gt stringclasses 1
value | context stringlengths 2.05k 161k |
|---|---|
/*
* Copyright (C) 2012 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.example.android.displayingbitmaps.ui;
import android.annotation.TargetApi;
import android.app.ActivityOptions;
import android.content.Context;
import android.content.Intent;
import android.os.Build.VERSION_CODES;
import android.os.Bundle;
import android.support.v4.app.Fragment;
import android.util.TypedValue;
import android.view.LayoutInflater;
import android.view.Menu;
import android.view.MenuInflater;
import android.view.MenuItem;
import android.view.View;
import android.view.ViewGroup;
import android.view.ViewGroup.LayoutParams;
import android.view.ViewTreeObserver;
import android.widget.AbsListView;
import android.widget.AdapterView;
import android.widget.BaseAdapter;
import android.widget.GridView;
import android.widget.ImageView;
import android.widget.Toast;
import com.example.android.common.logger.Log;
import com.example.android.displayingbitmaps.BuildConfig;
import com.example.android.displayingbitmaps.R;
import com.example.android.displayingbitmaps.provider.Images;
import com.example.android.displayingbitmaps.util.ImageCache;
import com.example.android.displayingbitmaps.util.ImageFetcher;
import com.example.android.displayingbitmaps.util.RecyclingImageView;
import com.example.android.displayingbitmaps.util.Utils;
/**
* The main fragment that powers the ImageGridActivity screen. Fairly straight forward GridView
* implementation with the key addition being the ImageWorker class w/ImageCache to load children
* asynchronously, keeping the UI nice and smooth and caching thumbnails for quick retrieval. The
* cache is retained over configuration changes like orientation change so the images are populated
* quickly if, for example, the user rotates the device.
*/
public class ImageGridFragment extends Fragment implements AdapterView.OnItemClickListener {
private static final String TAG = "ImageGridFragment";
private static final String IMAGE_CACHE_DIR = "thumbs";
private int mImageThumbSize;
private int mImageThumbSpacing;
private ImageAdapter mAdapter;
private ImageFetcher mImageFetcher;
/**
* Empty constructor as per the Fragment documentation
*/
public ImageGridFragment() {}
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setHasOptionsMenu(true);
mImageThumbSize = getResources().getDimensionPixelSize(R.dimen.image_thumbnail_size);
mImageThumbSpacing = getResources().getDimensionPixelSize(R.dimen.image_thumbnail_spacing);
mAdapter = new ImageAdapter(getActivity());
ImageCache.ImageCacheParams cacheParams =
new ImageCache.ImageCacheParams(getActivity(), IMAGE_CACHE_DIR);
cacheParams.setMemCacheSizePercent(0.25f); // Set memory cache to 25% of app memory
// The ImageFetcher takes care of loading images into our ImageView children asynchronously
mImageFetcher = new ImageFetcher(getActivity(), mImageThumbSize);
mImageFetcher.setLoadingImage(R.drawable.empty_photo);
mImageFetcher.addImageCache(getActivity().getSupportFragmentManager(), cacheParams);
}
@Override
public View onCreateView(
LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) {
final View v = inflater.inflate(R.layout.image_grid_fragment, container, false);
final GridView mGridView = (GridView) v.findViewById(R.id.gridView);
mGridView.setAdapter(mAdapter);
mGridView.setOnItemClickListener(this);
mGridView.setOnScrollListener(new AbsListView.OnScrollListener() {
@Override
public void onScrollStateChanged(AbsListView absListView, int scrollState) {
// Pause fetcher to ensure smoother scrolling when flinging
if (scrollState == AbsListView.OnScrollListener.SCROLL_STATE_FLING) {
// Before Honeycomb pause image loading on scroll to help with performance
if (!Utils.hasHoneycomb()) {
mImageFetcher.setPauseWork(true);
}
} else {
mImageFetcher.setPauseWork(false);
}
}
@Override
public void onScroll(AbsListView absListView, int firstVisibleItem,
int visibleItemCount, int totalItemCount) {
}
});
// This listener is used to get the final width of the GridView and then calculate the
// number of columns and the width of each column. The width of each column is variable
// as the GridView has stretchMode=columnWidth. The column width is used to set the height
// of each view so we get nice square thumbnails.
mGridView.getViewTreeObserver().addOnGlobalLayoutListener(
new ViewTreeObserver.OnGlobalLayoutListener() {
@TargetApi(VERSION_CODES.JELLY_BEAN)
@Override
public void onGlobalLayout() {
if (mAdapter.getNumColumns() == 0) {
final int numColumns = (int) Math.floor(
mGridView.getWidth() / (mImageThumbSize + mImageThumbSpacing));
if (numColumns > 0) {
final int columnWidth =
(mGridView.getWidth() / numColumns) - mImageThumbSpacing;
mAdapter.setNumColumns(numColumns);
mAdapter.setItemHeight(columnWidth);
if (BuildConfig.DEBUG) {
Log.d(TAG, "onCreateView - numColumns set to " + numColumns);
}
if (Utils.hasJellyBean()) {
mGridView.getViewTreeObserver()
.removeOnGlobalLayoutListener(this);
} else {
mGridView.getViewTreeObserver()
.removeGlobalOnLayoutListener(this);
}
}
}
}
});
return v;
}
@Override
public void onResume() {
super.onResume();
mImageFetcher.setExitTasksEarly(false);
mAdapter.notifyDataSetChanged();
}
@Override
public void onPause() {
super.onPause();
mImageFetcher.setPauseWork(false);
mImageFetcher.setExitTasksEarly(true);
mImageFetcher.flushCache();
}
@Override
public void onDestroy() {
super.onDestroy();
mImageFetcher.closeCache();
}
@TargetApi(VERSION_CODES.JELLY_BEAN)
@Override
public void onItemClick(AdapterView<?> parent, View v, int position, long id) {
final Intent i = new Intent(getActivity(), ImageDetailActivity.class);
i.putExtra(ImageDetailActivity.EXTRA_IMAGE, (int) id);
if (Utils.hasJellyBean()) {
// makeThumbnailScaleUpAnimation() looks kind of ugly here as the loading spinner may
// show plus the thumbnail image in GridView is cropped. so using
// makeScaleUpAnimation() instead.
ActivityOptions options =
ActivityOptions.makeScaleUpAnimation(v, 0, 0, v.getWidth(), v.getHeight());
getActivity().startActivity(i, options.toBundle());
} else {
startActivity(i);
}
}
@Override
public void onCreateOptionsMenu(Menu menu, MenuInflater inflater) {
inflater.inflate(R.menu.main_menu, menu);
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
switch (item.getItemId()) {
case R.id.clear_cache:
mImageFetcher.clearCache();
Toast.makeText(getActivity(), R.string.clear_cache_complete_toast,
Toast.LENGTH_SHORT).show();
return true;
}
return super.onOptionsItemSelected(item);
}
/**
* The main adapter that backs the GridView. This is fairly standard except the number of
* columns in the GridView is used to create a fake top row of empty views as we use a
* transparent ActionBar and don't want the real top row of images to start off covered by it.
*/
private class ImageAdapter extends BaseAdapter {
private final Context mContext;
private int mItemHeight = 0;
private int mNumColumns = 0;
private int mActionBarHeight = 0;
private GridView.LayoutParams mImageViewLayoutParams;
public ImageAdapter(Context context) {
super();
mContext = context;
mImageViewLayoutParams = new GridView.LayoutParams(
LayoutParams.MATCH_PARENT, LayoutParams.MATCH_PARENT);
// Calculate ActionBar height
TypedValue tv = new TypedValue();
if (context.getTheme().resolveAttribute(
android.R.attr.actionBarSize, tv, true)) {
mActionBarHeight = TypedValue.complexToDimensionPixelSize(
tv.data, context.getResources().getDisplayMetrics());
}
}
@Override
public int getCount() {
// If columns have yet to be determined, return no items
if (getNumColumns() == 0) {
return 0;
}
// Size + number of columns for top empty row
return Images.imageThumbUrls.length + mNumColumns;
}
@Override
public Object getItem(int position) {
return position < mNumColumns ?
null : Images.imageThumbUrls[position - mNumColumns];
}
@Override
public long getItemId(int position) {
return position < mNumColumns ? 0 : position - mNumColumns;
}
@Override
public int getViewTypeCount() {
// Two types of views, the normal ImageView and the top row of empty views
return 2;
}
@Override
public int getItemViewType(int position) {
return (position < mNumColumns) ? 1 : 0;
}
@Override
public boolean hasStableIds() {
return true;
}
@Override
public View getView(int position, View convertView, ViewGroup container) {
//BEGIN_INCLUDE(load_gridview_item)
// First check if this is the top row
if (position < mNumColumns) {
if (convertView == null) {
convertView = new View(mContext);
}
// Set empty view with height of ActionBar
convertView.setLayoutParams(new AbsListView.LayoutParams(
LayoutParams.MATCH_PARENT, mActionBarHeight));
return convertView;
}
// Now handle the main ImageView thumbnails
ImageView imageView;
if (convertView == null) { // if it's not recycled, instantiate and initialize
imageView = new RecyclingImageView(mContext);
imageView.setScaleType(ImageView.ScaleType.CENTER_CROP);
imageView.setLayoutParams(mImageViewLayoutParams);
} else { // Otherwise re-use the converted view
imageView = (ImageView) convertView;
}
// Check the height matches our calculated column width
if (imageView.getLayoutParams().height != mItemHeight) {
imageView.setLayoutParams(mImageViewLayoutParams);
}
// Finally load the image asynchronously into the ImageView, this also takes care of
// setting a placeholder image while the background thread runs
mImageFetcher.loadImage(Images.imageThumbUrls[position - mNumColumns], imageView);
return imageView;
//END_INCLUDE(load_gridview_item)
}
/**
* Sets the item height. Useful for when we know the column width so the height can be set
* to match.
*
* @param height
*/
public void setItemHeight(int height) {
if (height == mItemHeight) {
return;
}
mItemHeight = height;
mImageViewLayoutParams =
new GridView.LayoutParams(LayoutParams.MATCH_PARENT, mItemHeight);
mImageFetcher.setImageSize(height);
notifyDataSetChanged();
}
public void setNumColumns(int numColumns) {
mNumColumns = numColumns;
}
public int getNumColumns() {
return mNumColumns;
}
}
}
| |
/*! ******************************************************************************
*
* Pentaho Data Integration
*
* Copyright (C) 2002-2016 by Pentaho : http://www.pentaho.com
*
*******************************************************************************
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
******************************************************************************/
package org.pentaho.di.ui.trans.steps.cubeoutput;
import org.eclipse.swt.SWT;
import org.eclipse.swt.events.ModifyEvent;
import org.eclipse.swt.events.ModifyListener;
import org.eclipse.swt.events.SelectionAdapter;
import org.eclipse.swt.events.SelectionEvent;
import org.eclipse.swt.events.ShellAdapter;
import org.eclipse.swt.events.ShellEvent;
import org.eclipse.swt.layout.FormAttachment;
import org.eclipse.swt.layout.FormData;
import org.eclipse.swt.layout.FormLayout;
import org.eclipse.swt.widgets.Button;
import org.eclipse.swt.widgets.Display;
import org.eclipse.swt.widgets.Event;
import org.eclipse.swt.widgets.FileDialog;
import org.eclipse.swt.widgets.Label;
import org.eclipse.swt.widgets.Listener;
import org.eclipse.swt.widgets.Shell;
import org.eclipse.swt.widgets.Text;
import org.pentaho.di.core.Const;
import org.pentaho.di.core.util.Utils;
import org.pentaho.di.i18n.BaseMessages;
import org.pentaho.di.trans.TransMeta;
import org.pentaho.di.trans.step.BaseStepMeta;
import org.pentaho.di.trans.step.StepDialogInterface;
import org.pentaho.di.trans.steps.cubeoutput.CubeOutputMeta;
import org.pentaho.di.ui.core.widget.TextVar;
import org.pentaho.di.ui.trans.step.BaseStepDialog;
public class CubeOutputDialog extends BaseStepDialog implements StepDialogInterface {
private static Class<?> PKG = CubeOutputMeta.class; // for i18n purposes, needed by Translator2!!
private Label wlFilename;
private Button wbFilename;
private TextVar wFilename;
private FormData fdlFilename, fdbFilename, fdFilename;
private Label wlAddToResult;
private Button wAddToResult;
private FormData fdlAddToResult, fdAddToResult;
private Label wlDoNotOpenNewFileInit;
private Button wDoNotOpenNewFileInit;
private FormData fdlDoNotOpenNewFileInit, fdDoNotOpenNewFileInit;
private CubeOutputMeta input;
public CubeOutputDialog( Shell parent, Object in, TransMeta transMeta, String sname ) {
super( parent, (BaseStepMeta) in, transMeta, sname );
input = (CubeOutputMeta) in;
this.transMeta = transMeta;
if ( sname != null ) {
stepname = sname;
} else {
stepname = BaseMessages.getString( PKG, "CubeOutputDialog.DefaultStepName" );
}
}
public String open() {
Shell parent = getParent();
Display display = parent.getDisplay();
shell = new Shell( parent, SWT.DIALOG_TRIM | SWT.RESIZE | SWT.MAX | SWT.MIN );
props.setLook( shell );
setShellImage( shell, input );
ModifyListener lsMod = new ModifyListener() {
public void modifyText( ModifyEvent e ) {
input.setChanged();
}
};
changed = input.hasChanged();
FormLayout formLayout = new FormLayout();
formLayout.marginWidth = Const.FORM_MARGIN;
formLayout.marginHeight = Const.FORM_MARGIN;
shell.setLayout( formLayout );
shell.setText( BaseMessages.getString( PKG, "CubeOutputDialog.Shell.Text" ) );
int middle = props.getMiddlePct();
int margin = Const.MARGIN;
// Stepname line
wlStepname = new Label( shell, SWT.RIGHT );
wlStepname.setText( BaseMessages.getString( PKG, "CubeOutputDialog.Stepname.Label" ) );
props.setLook( wlStepname );
fdlStepname = new FormData();
fdlStepname.left = new FormAttachment( 0, 0 );
fdlStepname.top = new FormAttachment( 0, margin );
fdlStepname.right = new FormAttachment( middle, -margin );
wlStepname.setLayoutData( fdlStepname );
wStepname = new Text( shell, SWT.SINGLE | SWT.LEFT | SWT.BORDER );
wStepname.setText( stepname );
props.setLook( wStepname );
wStepname.addModifyListener( lsMod );
fdStepname = new FormData();
fdStepname.left = new FormAttachment( middle, 0 );
fdStepname.top = new FormAttachment( 0, margin );
fdStepname.right = new FormAttachment( 100, 0 );
wStepname.setLayoutData( fdStepname );
// Filename line
wlFilename = new Label( shell, SWT.RIGHT );
wlFilename.setText( BaseMessages.getString( PKG, "CubeOutputDialog.Filename.Label" ) );
props.setLook( wlFilename );
fdlFilename = new FormData();
fdlFilename.left = new FormAttachment( 0, 0 );
fdlFilename.top = new FormAttachment( wStepname, margin + 5 );
fdlFilename.right = new FormAttachment( middle, -margin );
wlFilename.setLayoutData( fdlFilename );
wbFilename = new Button( shell, SWT.PUSH | SWT.CENTER );
props.setLook( wbFilename );
wbFilename.setText( BaseMessages.getString( PKG, "CubeOutputDialog.Browse.Button" ) );
fdbFilename = new FormData();
fdbFilename.right = new FormAttachment( 100, 0 );
fdbFilename.top = new FormAttachment( wStepname, margin + 5 );
wbFilename.setLayoutData( fdbFilename );
wFilename = new TextVar( transMeta, shell, SWT.SINGLE | SWT.LEFT | SWT.BORDER );
props.setLook( wFilename );
wFilename.addModifyListener( lsMod );
fdFilename = new FormData();
fdFilename.left = new FormAttachment( middle, 0 );
fdFilename.top = new FormAttachment( wStepname, margin + 5 );
fdFilename.right = new FormAttachment( wbFilename, -margin );
wFilename.setLayoutData( fdFilename );
// Open new File at Init
wlDoNotOpenNewFileInit = new Label( shell, SWT.RIGHT );
wlDoNotOpenNewFileInit.setText( BaseMessages.getString( PKG, "CubeOutputDialog.DoNotOpenNewFileInit.Label" ) );
props.setLook( wlDoNotOpenNewFileInit );
fdlDoNotOpenNewFileInit = new FormData();
fdlDoNotOpenNewFileInit.left = new FormAttachment( 0, 0 );
fdlDoNotOpenNewFileInit.top = new FormAttachment( wFilename, 2 * margin );
fdlDoNotOpenNewFileInit.right = new FormAttachment( middle, -margin );
wlDoNotOpenNewFileInit.setLayoutData( fdlDoNotOpenNewFileInit );
wDoNotOpenNewFileInit = new Button( shell, SWT.CHECK );
wDoNotOpenNewFileInit.setToolTipText( BaseMessages.getString(
PKG, "CubeOutputDialog.DoNotOpenNewFileInit.Tooltip" ) );
props.setLook( wDoNotOpenNewFileInit );
fdDoNotOpenNewFileInit = new FormData();
fdDoNotOpenNewFileInit.left = new FormAttachment( middle, 0 );
fdDoNotOpenNewFileInit.top = new FormAttachment( wFilename, 2 * margin );
fdDoNotOpenNewFileInit.right = new FormAttachment( 100, 0 );
wDoNotOpenNewFileInit.setLayoutData( fdDoNotOpenNewFileInit );
wDoNotOpenNewFileInit.addSelectionListener( new SelectionAdapter() {
public void widgetSelected( SelectionEvent e ) {
input.setChanged();
}
} );
// Add File to the result files name
wlAddToResult = new Label( shell, SWT.RIGHT );
wlAddToResult.setText( BaseMessages.getString( PKG, "CubeOutputDialog.AddFileToResult.Label" ) );
props.setLook( wlAddToResult );
fdlAddToResult = new FormData();
fdlAddToResult.left = new FormAttachment( 0, 0 );
fdlAddToResult.top = new FormAttachment( wDoNotOpenNewFileInit, margin );
fdlAddToResult.right = new FormAttachment( middle, -margin );
wlAddToResult.setLayoutData( fdlAddToResult );
wAddToResult = new Button( shell, SWT.CHECK );
wAddToResult.setToolTipText( BaseMessages.getString( PKG, "CubeOutputDialog.AddFileToResult.Tooltip" ) );
props.setLook( wAddToResult );
fdAddToResult = new FormData();
fdAddToResult.left = new FormAttachment( middle, 0 );
fdAddToResult.top = new FormAttachment( wDoNotOpenNewFileInit, margin );
fdAddToResult.right = new FormAttachment( 100, 0 );
wAddToResult.setLayoutData( fdAddToResult );
SelectionAdapter lsSelR = new SelectionAdapter() {
public void widgetSelected( SelectionEvent arg0 ) {
input.setChanged();
}
};
wAddToResult.addSelectionListener( lsSelR );
wOK = new Button( shell, SWT.PUSH );
wOK.setText( BaseMessages.getString( PKG, "System.Button.OK" ) );
wCancel = new Button( shell, SWT.PUSH );
wCancel.setText( BaseMessages.getString( PKG, "System.Button.Cancel" ) );
setButtonPositions( new Button[] { wOK, wCancel }, margin, wAddToResult );
// Add listeners
lsOK = new Listener() {
public void handleEvent( Event e ) {
ok();
}
};
lsCancel = new Listener() {
public void handleEvent( Event e ) {
cancel();
}
};
wOK.addListener( SWT.Selection, lsOK );
wCancel.addListener( SWT.Selection, lsCancel );
lsDef = new SelectionAdapter() {
public void widgetDefaultSelected( SelectionEvent e ) {
ok();
}
};
wStepname.addSelectionListener( lsDef );
wFilename.addSelectionListener( lsDef );
wbFilename.addSelectionListener( new SelectionAdapter() {
public void widgetSelected( SelectionEvent e ) {
FileDialog dialog = new FileDialog( shell, SWT.SAVE );
dialog.setFilterExtensions( new String[] { "*.cube", "*" } );
if ( wFilename.getText() != null ) {
dialog.setFileName( wFilename.getText() );
}
dialog.setFilterNames( new String[] {
BaseMessages.getString( PKG, "CubeOutputDialog.FilterNames.Options.CubeFiles" ),
BaseMessages.getString( PKG, "CubeOutputDialog.FilterNames.Options.AllFiles" ) } );
if ( dialog.open() != null ) {
wFilename.setText( dialog.getFilterPath()
+ System.getProperty( "file.separator" ) + dialog.getFileName() );
}
}
} );
// Detect X or ALT-F4 or something that kills this window...
shell.addShellListener( new ShellAdapter() {
public void shellClosed( ShellEvent e ) {
cancel();
}
} );
// Set the shell size, based upon previous time...
setSize();
getData();
input.setChanged( changed );
shell.open();
while ( !shell.isDisposed() ) {
if ( !display.readAndDispatch() ) {
display.sleep();
}
}
return stepname;
}
/**
* Copy information from the meta-data input to the dialog fields.
*/
public void getData() {
if ( input.getFilename() != null ) {
wFilename.setText( input.getFilename() );
}
wDoNotOpenNewFileInit.setSelection( input.isDoNotOpenNewFileInit() );
wAddToResult.setSelection( input.isAddToResultFiles() );
wStepname.selectAll();
wStepname.setFocus();
}
private void cancel() {
stepname = null;
input.setChanged( changed );
dispose();
}
private void ok() {
if ( Utils.isEmpty( wStepname.getText() ) ) {
return;
}
stepname = wStepname.getText(); // return value
input.setAddToResultFiles( wAddToResult.getSelection() );
input.setDoNotOpenNewFileInit( wDoNotOpenNewFileInit.getSelection() );
input.setFilename( wFilename.getText() );
dispose();
}
}
| |
package org.grobid.core.layout;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Created by zholudev on 18/08/15.
* Represents a bounding box (e.g. for a reference marker in PDF)
*/
public class BoundingBox {
private static final Logger LOGGER = LoggerFactory.getLogger(BoundingBox.class);
private int page;
private double x, y, width, height;
private double x2, y2;
private BoundingBox(int page, double x, double y, double width, double height) {
this.page = page;
this.x = x;
this.y = y;
this.width = width;
this.height = height;
this.x2 = x + width;
this.y2 = y + height;
}
public static BoundingBox fromTwoPoints(int page, double x1, double y1, double x2, double y2) {
if (x1 > x2 || y1 > y2) {
throw new IllegalArgumentException("Invalid points provided: (" + x1 + ";" + y1 + ")-(" + x2 + ";" + y2 + ")");
}
return new BoundingBox(page, x1, y1, x2 - x1, y2 - y1);
}
public static BoundingBox fromString(String coords) {
String[] split = coords.split(",");
Long pageNum = Long.valueOf(split[0], 10);
float x = Float.parseFloat(split[1]);
float y = Float.parseFloat(split[2]);
float w = Float.parseFloat(split[3]);
float h = Float.parseFloat(split[4]);
return new BoundingBox(pageNum.intValue(), x, y, w, h);
}
public static BoundingBox fromPointAndDimensions(int page, double x, double y, double width, double height) {
return new BoundingBox(page, x, y, width, height);
}
public static BoundingBox fromLayoutToken(LayoutToken tok) {
return BoundingBox.fromPointAndDimensions(tok.getPage(), tok.getX(), tok.getY(), tok.getWidth(), tok.getHeight());
}
public boolean intersect(BoundingBox b) {
double ax1 = this.x;
double ax2 = this.x2;
double ay1 = this.y;
double ay2 = this.y2;
double bx1 = b.x;
double bx2 = b.x2;
double by1 = b.y;
double by2 = b.y2;
if (ax2 < bx1) return false;
else if (ax1 > bx2) return false;
else if (ay2 < by1) return false;
else if (ay1 > by2) return false;
else
return true;
}
public int getPage() {
return page;
}
public double getX() {
return x;
}
public double getY() {
return y;
}
public double getWidth() {
return width;
}
public double getHeight() {
return height;
}
public double getX2() {
return x2;
}
public double getY2() {
return y2;
}
public BoundingBox boundBox(BoundingBox o) {
if (this.page != o.page) {
throw new IllegalStateException("Cannot compute a bounding box for different pages");
}
return fromTwoPoints(o.page, Math.min(this.x, o.x), Math.min(this.y, o.y), Math.max(this.x2, o.x2), Math.max(this.y2, o.y2));
}
public BoundingBox boundBoxExcludingAnotherPage(BoundingBox o) {
if (this.page != o.page) {
LOGGER.debug("Cannot compute a bounding box for different pages: " + this + " and " + o + "; skipping");
return this;
}
return fromTwoPoints(o.page, Math.min(this.x, o.x), Math.min(this.y, o.y), Math.max(this.x2, o.x2), Math.max(this.y2, o.y2));
}
public boolean contains(BoundingBox b) {
return x <= b.x && y <= b.y && x2 >= b.x2 && y2 >= b.y2;
}
private double dist(double x1, double y1, double x2, double y2) {
return Math.sqrt((x2 - x1) * (x2 - 1) + (y2 - y1) * (y2 - y1));
}
public double verticalDistanceTo(BoundingBox to) {
//the current box is completely "bottomer"
boolean bottom = to.y2 < y;
boolean top = y2 < to.y;
if (bottom) {
return y - to.y2;
} else if (top) {
return to.y - y2;
}
return 0;
}
public double area() {
return width * height;
}
public double distanceTo(BoundingBox to) {
if (this.page != to.page) {
return 1000 * Math.abs(this.page - to.page);
}
//the current box is completely "lefter"
boolean left = x2 < to.x;
boolean right = to.x2 < x;
boolean bottom = to.y2 < y;
boolean top = y2 < to.y;
if (top && left) {
return dist(x2, y2, to.x, y);
} else if (left && bottom) {
return dist(x2, y, to.x, to.y2);
} else if (bottom && right) {
return dist(x, y, to.x2, to.y2);
} else if (right && top) {
return dist(x, y2, to.x2, to.y);
} else if (left) {
return to.x - x2;
} else if (right) {
return x - to.x2;
} else if (bottom) {
return y - to.y2;
} else if (top) {
return to.y - y2;
} else {
return 0;
}
}
public BoundingBox boundingBoxIntersection(BoundingBox b) {
if (!this.intersect(b))
return null;
double ax1 = this.x;
double ax2 = this.x2;
double ay1 = this.y;
double ay2 = this.y2;
double bx1 = b.x;
double bx2 = b.x2;
double by1 = b.y;
double by2 = b.y2;
double ix1 = 0.0;
if (ax1 > bx1)
ix1 = ax1;
else
ix1 = bx1;
double iy1 = 0.0;
if (ay1 > by1)
iy1 = ay1;
else
iy1 = by1;
double ix2 = 0.0;
if (ax2 > bx2)
ix2 = bx2;
else
ix2 = ax2;
double iy2 = 0.0;
if (ay2 > by2)
iy2 = by2;
else
iy2 = ay2;
return fromTwoPoints(page, ix1, iy1, ix2, iy2);
}
@Override
public String toString() {
return String.format("%d,%.2f,%.2f,%.2f,%.2f", page, x, y, width, height);
}
public String toJson() {
StringBuilder builder = new StringBuilder();
builder.append("\"p\":").append(page).append(", ");
builder.append("\"x\":").append(x).append(", ");
builder.append("\"y\":").append(y).append(", ");
builder.append("\"w\":").append(width).append(", ");
builder.append("\"h\":").append(height);
return builder.toString();
}
}
| |
package net.officefloor.benchmark;
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
import java.util.concurrent.Executor;
import java.util.concurrent.ThreadLocalRandom;
import io.netty.channel.unix.Socket;
import io.r2dbc.pool.PoolingConnectionFactoryProvider;
import io.r2dbc.postgresql.api.PostgresqlException;
import io.r2dbc.spi.Batch;
import io.r2dbc.spi.Connection;
import io.r2dbc.spi.ConnectionFactories;
import io.r2dbc.spi.ConnectionFactory;
import io.r2dbc.spi.ConnectionFactoryOptions;
import net.officefloor.cache.Cache;
import net.officefloor.cache.constant.ConstantCacheManagedObjectSource;
import net.officefloor.frame.api.managedobject.ManagedObject;
import net.officefloor.frame.util.ManagedObjectSourceStandAlone;
import net.officefloor.frame.util.ManagedObjectUserStandAlone;
import net.officefloor.plugin.managedobject.poll.StatePollContext;
import net.officefloor.server.RequestHandler;
import net.officefloor.server.http.parse.HttpRequestParser;
import reactor.core.publisher.Flux;
import reactor.core.publisher.Mono;
import reactor.core.scheduler.Scheduler;
import reactor.core.scheduler.Schedulers;
/**
* R2DBC server.
*
* @author Daniel Sagenschneider
*/
public class R2dbcOfficeFloorMain implements DatabaseOperations {
/**
* Database query load capacity to handle validation load.
*/
private static final int QUERY_LOAD_CAPACITY = 512 * (20 + 1); // update 20 selects then batch
/**
* Buffer size of queries.
*/
private static final int QUERY_BUFFER_SIZE = 512;
/**
* Run application.
*/
public static void main(String[] args) throws Throwable {
// Increase the buffer size (note: too high and cause OOM issues)
System.setProperty("reactor.bufferSize.small", String.valueOf(QUERY_BUFFER_SIZE));
// Run the WoOF server
RawWoof.run(args, (socketCount, server, port, database, username,
password) -> new R2dbcOfficeFloorMain(socketCount, server, port, database, username, password));
}
/**
* {@link ThreadLocal} {@link RateLimit}.
*/
private final ThreadLocal<RateLimit> threadLocalRateLimit = new ThreadLocal<RateLimit>();
/**
* {@link ThreadLocal} {@link Connection} instances.
*/
private final ThreadLocal<Connection[]> threadLocalConnections;
/**
* {@link Cache} of {@link CachedWorld}.
*/
private final Cache<Integer, CachedWorld> cache;
/**
* Instantiate.
*
* @param socketCount Number of server {@link Socket} instances.
* @param server Name of database server.
* @param port Port of database.
* @param database Name of database within server.
* @param username Username.
* @param password Password.
*/
@SuppressWarnings("unchecked")
public R2dbcOfficeFloorMain(int socketCount, String server, int port, String database, String username,
String password) throws Throwable {
// Must have enough connection capacity for initial load (+1 for rounding)
int requiredConnectionsPerSocket = (QUERY_LOAD_CAPACITY / (socketCount * QUERY_BUFFER_SIZE)) + 1;
int connectionsPerSocket = Math.max(4, requiredConnectionsPerSocket);
System.out.println("Using " + connectionsPerSocket + " connections per socket");
// Determine the pool size for connections
int connectionPoolSize = socketCount * connectionsPerSocket;
// Build the connection pool
ConnectionFactoryOptions factoryOptions = ConnectionFactoryOptions.builder()
.option(ConnectionFactoryOptions.DRIVER, "pool").option(ConnectionFactoryOptions.PROTOCOL, "postgresql")
.option(ConnectionFactoryOptions.HOST, server).option(ConnectionFactoryOptions.PORT, port)
.option(ConnectionFactoryOptions.DATABASE, database).option(ConnectionFactoryOptions.USER, username)
.option(ConnectionFactoryOptions.PASSWORD, password)
.option(PoolingConnectionFactoryProvider.MAX_SIZE, connectionPoolSize).build();
ConnectionFactory connectionFactory = ConnectionFactories.get(factoryOptions);
// Create thread local connection
this.threadLocalConnections = new ThreadLocal<Connection[]>() {
@Override
protected Connection[] initialValue() {
Connection[] connections = new Connection[connectionsPerSocket];
for (int i = 0; i < connections.length; i++) {
connections[i] = Mono.from(connectionFactory.create()).block();
}
return connections;
}
};
// Provide the cache
ManagedObjectSourceStandAlone source = new ManagedObjectSourceStandAlone();
source.registerInvokeProcessServicer(0, (processIndex, parameter, managedObject) -> {
// Poll database for cached data
StatePollContext<Map<Integer, CachedWorld>> pollContext = (StatePollContext<Map<Integer, CachedWorld>>) parameter;
Map<Integer, CachedWorld> data = new HashMap<>();
try {
Flux.from(connectionFactory.create()).flatMap((connection) -> {
return Flux.from(connection.createStatement("SELECT ID, RANDOMNUMBER FROM WORLD").execute())
.flatMap(result -> Flux.from(result.map((row, metadata) -> {
Integer id = row.get(0, Integer.class);
Integer randomNumber = row.get(1, Integer.class);
CachedWorld cachedWorld = new CachedWorld(id, randomNumber);
data.put(id, cachedWorld);
return cachedWorld;
}))).last().flatMap(ignore -> Mono.from(connection.close()));
}).blockLast();
pollContext.setNextState(data, -1, null);
} catch (Exception ex) {
}
});
ManagedObject cacheMo = new ManagedObjectUserStandAlone()
.sourceManagedObject(source.loadManagedObjectSource(ConstantCacheManagedObjectSource.class));
this.cache = (Cache<Integer, CachedWorld>) cacheMo.getObject();
}
public void sendDatabaseError(Throwable failure, AbstractSendResponse response) {
// Handle issue of prepared statement not found
// (seems unsafe memory issue in R2DBC that occurs during start then stops)
if (failure instanceof PostgresqlException) {
PostgresqlException postgresqlException = (PostgresqlException) failure;
if ("26000".equals(postgresqlException.getErrorDetails().getCode())) {
// Prepared statement not existing
response.sendError(503); // consider overloaded in connection setup during warm up
}
}
// Just send the failure
response.sendError(failure);
}
/*
* ===================== DatabaseOperations ======================
*/
@Override
public void threadSetup(RequestHandler<HttpRequestParser> requestHandler) {
// Ensure rate limits for socket servicing thread
// Note: will always create before servicing any requests
if (this.threadLocalRateLimit.get() == null) {
Connection[] connections = this.threadLocalConnections.get();
RateLimit rateLimit = new RateLimit(requestHandler, connections);
this.threadLocalRateLimit.set(rateLimit);
}
}
@Override
public void db(DbSendResponse sender) {
// Determine if will overload queries
RateLimitedConnection conn = this.threadLocalRateLimit.get().getAvailableConnection(1);
if (conn == null) {
sender.sendOverloaded();
return; // rate limited
}
// Service
Mono.from(conn.connection.createStatement("SELECT ID, RANDOMNUMBER FROM WORLD WHERE ID = $1")
.bind(0, ThreadLocalRandom.current().nextInt(1, 10001)).execute())
.flatMap(result -> Mono.from(result.map((row, metadata) -> {
Integer id = row.get(0, Integer.class);
Integer number = row.get(1, Integer.class);
return new World(id, number);
}))).publishOn(conn.writeScheduler).subscribe(world -> {
sender.sendDb(world);
}, error -> {
this.sendDatabaseError(error, sender);
}, () -> {
conn.processed(1);
});
}
@Override
public void queries(int queryCount, QueriesSendResponse sender) {
// Determine if will overload queries
RateLimitedConnection conn = this.threadLocalRateLimit.get().getAvailableConnection(queryCount);
if (conn == null) {
sender.sendOverloaded();
return; // rate limited
}
// Service
Flux.range(1, queryCount)
.flatMap(index -> conn.connection.createStatement("SELECT ID, RANDOMNUMBER FROM WORLD WHERE ID = $1")
.bind(0, ThreadLocalRandom.current().nextInt(1, 10001)).execute())
.flatMap(result -> Flux.from(result.map((row, metadata) -> {
Integer id = row.get(0, Integer.class);
Integer number = row.get(1, Integer.class);
return new World(id, number);
}))).collectList().publishOn(conn.writeScheduler).subscribe(worlds -> {
sender.sendQueries(worlds.toArray(World[]::new));
}, error -> {
this.sendDatabaseError(error, sender);
}, () -> {
conn.processed(queryCount);
});
}
@Override
public void fortunes(FortunesSendResponse sender) {
// Determine if will overload queries
RateLimitedConnection conn = this.threadLocalRateLimit.get().getAvailableConnection(1);
if (conn == null) {
sender.sendOverloaded();
return; // rate limited
}
// Service
Flux.from(conn.connection.createStatement("SELECT ID, MESSAGE FROM FORTUNE").execute())
.flatMap(result -> Flux.from(result.map((row, metadata) -> {
Integer id = row.get(0, Integer.class);
String message = row.get(1, String.class);
return new Fortune(id, message);
}))).collectList().publishOn(conn.writeScheduler).subscribe(fortunes -> {
sender.sendFortunes(fortunes);
}, error -> {
this.sendDatabaseError(error, sender);
}, () -> {
conn.processed(1);
});
}
@Override
public void update(int queryCount, UpdateSendResponse sender) {
int executeQueryCount = queryCount + 1; // select all and update
// Determine if will overload queries
RateLimitedConnection conn = this.threadLocalRateLimit.get().getAvailableConnection(executeQueryCount);
if (conn == null) {
sender.sendOverloaded();
return; // rate limited
}
// Service
Flux.range(1, queryCount)
.flatMap(index -> conn.connection.createStatement("SELECT ID, RANDOMNUMBER FROM WORLD WHERE ID = $1")
.bind(0, ThreadLocalRandom.current().nextInt(1, 10001)).execute())
.flatMap(result -> Flux.from(result.map((row, metadata) -> {
Integer id = row.get(0, Integer.class);
Integer number = row.get(1, Integer.class);
return new World(id, number);
}))).collectList().flatMap(worlds -> {
Collections.sort(worlds, (a, b) -> a.id - b.id);
Batch batch = conn.connection.createBatch();
for (World world : worlds) {
// Ensure change to random number to trigger update
int newRandomNumber;
do {
newRandomNumber = ThreadLocalRandom.current().nextInt(1, 10001);
} while (world.randomNumber == newRandomNumber);
world.randomNumber = newRandomNumber;
batch.add("UPDATE WORLD SET RANDOMNUMBER = " + world.randomNumber + " WHERE ID = " + world.id);
}
return Mono.from(batch.execute()).map((result) -> worlds);
}).publishOn(conn.writeScheduler).subscribe(worlds -> {
sender.sendUpdate(worlds.toArray(World[]::new));
}, error -> {
this.sendDatabaseError(error, sender);
}, () -> {
conn.processed(executeQueryCount);
});
}
@Override
public void cached(int queryCount, CachedSendResponse sender) {
// Set up for unique numbers
ThreadLocalRandom random = ThreadLocalRandom.current();
// Obtain the list of cached worlds
CachedWorld[] cachedWorlds = new CachedWorld[queryCount];
for (int i = 0; i < cachedWorlds.length; i++) {
// Obtain unique identifier
int randomNumber = random.nextInt(1, 10001);
// Obtain the cached world
cachedWorlds[i] = cache.get(randomNumber);
}
// Send cached worlds
sender.sendCached(cachedWorlds);
}
private static class RateLimit {
private final RateLimitedConnection[] rateLimitedConnections;
private final Executor socketExecutor;
private RateLimit(RequestHandler<HttpRequestParser> requestHandler, Connection[] connections) {
// Create the write scheduler
this.socketExecutor = (runnable) -> requestHandler.execute(() -> {
runnable.run();
});
Scheduler writeScheduler = Schedulers.fromExecutor(this.socketExecutor);
// Create the rate limited connections
this.rateLimitedConnections = new RateLimitedConnection[connections.length];
for (int i = 0; i < this.rateLimitedConnections.length; i++) {
this.rateLimitedConnections[i] = new RateLimitedConnection(connections[i], writeScheduler);
}
}
private RateLimitedConnection getAvailableConnection(int queryCount) {
// Determine available connection for limit
for (int i = 0; i < this.rateLimitedConnections.length; i++) {
RateLimitedConnection connection = this.rateLimitedConnections[i];
// Determine if query count reached
int newCount = connection.activeQueries + queryCount;
if (newCount <= QUERY_BUFFER_SIZE) {
// Connection available for load
connection.activeQueries = newCount;
return connection;
}
}
// As here, no available connection
return null;
}
}
private static class RateLimitedConnection {
private final Scheduler writeScheduler;
private final Connection connection;
private int activeQueries;
private RateLimitedConnection(Connection connection, Scheduler writeScheduler) {
this.connection = connection;
this.writeScheduler = writeScheduler;
}
private void processed(int queryCount) {
// Update the active queries
this.activeQueries -= queryCount;
}
}
}
| |
/*
* Copyright 2012-2016 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.boot.loader.jar;
import java.io.ByteArrayOutputStream;
import java.io.FileNotFoundException;
import java.io.FilePermission;
import java.io.IOException;
import java.io.InputStream;
import java.io.UnsupportedEncodingException;
import java.net.MalformedURLException;
import java.net.URL;
import java.net.URLConnection;
import java.net.URLEncoder;
import java.net.URLStreamHandler;
import java.security.Permission;
/**
* {@link java.net.JarURLConnection} used to support {@link JarFile#getUrl()}.
*
* @author Phillip Webb
* @author Andy Wilkinson
*/
final class JarURLConnection extends java.net.JarURLConnection {
private static ThreadLocal<Boolean> useFastExceptions = new ThreadLocal<Boolean>();
private static final FileNotFoundException FILE_NOT_FOUND_EXCEPTION = new FileNotFoundException(
"Jar file or entry not found");
private static final IllegalStateException NOT_FOUND_CONNECTION_EXCEPTION = new IllegalStateException(
FILE_NOT_FOUND_EXCEPTION);
private static final String SEPARATOR = "!/";
private static final URL EMPTY_JAR_URL;
static {
try {
EMPTY_JAR_URL = new URL("jar:", null, 0, "file:!/", new URLStreamHandler() {
@Override
protected URLConnection openConnection(URL u) throws IOException {
// Stub URLStreamHandler to prevent the wrong JAR Handler from being
// Instantiated and cached.
return null;
}
});
}
catch (MalformedURLException ex) {
throw new IllegalStateException(ex);
}
}
private static final JarEntryName EMPTY_JAR_ENTRY_NAME = new JarEntryName("");
private static final String READ_ACTION = "read";
private static final JarURLConnection NOT_FOUND_CONNECTION = JarURLConnection
.notFound();
private final JarFile jarFile;
private Permission permission;
private URL jarFileUrl;
private final JarEntryName jarEntryName;
private JarEntry jarEntry;
private JarURLConnection(URL url, JarFile jarFile, JarEntryName jarEntryName)
throws IOException {
// What we pass to super is ultimately ignored
super(EMPTY_JAR_URL);
this.url = url;
this.jarFile = jarFile;
this.jarEntryName = jarEntryName;
}
@Override
public void connect() throws IOException {
if (this.jarFile == null) {
throw FILE_NOT_FOUND_EXCEPTION;
}
if (!this.jarEntryName.isEmpty() && this.jarEntry == null) {
this.jarEntry = this.jarFile.getJarEntry(getEntryName());
if (this.jarEntry == null) {
throwFileNotFound(this.jarEntryName, this.jarFile);
}
}
this.connected = true;
}
@Override
public JarFile getJarFile() throws IOException {
connect();
return this.jarFile;
}
@Override
public URL getJarFileURL() {
if (this.jarFile == null) {
throw NOT_FOUND_CONNECTION_EXCEPTION;
}
if (this.jarFileUrl == null) {
this.jarFileUrl = buildJarFileUrl();
}
return this.jarFileUrl;
}
private URL buildJarFileUrl() {
try {
String spec = this.jarFile.getUrl().getFile();
if (spec.endsWith(SEPARATOR)) {
spec = spec.substring(0, spec.length() - SEPARATOR.length());
}
if (spec.indexOf(SEPARATOR) == -1) {
return new URL(spec);
}
return new URL("jar:" + spec);
}
catch (MalformedURLException ex) {
throw new IllegalStateException(ex);
}
}
@Override
public JarEntry getJarEntry() throws IOException {
if (this.jarEntryName == null || this.jarEntryName.isEmpty()) {
return null;
}
connect();
return this.jarEntry;
}
@Override
public String getEntryName() {
if (this.jarFile == null) {
throw NOT_FOUND_CONNECTION_EXCEPTION;
}
return this.jarEntryName.toString();
}
@Override
public InputStream getInputStream() throws IOException {
if (this.jarFile == null) {
throw FILE_NOT_FOUND_EXCEPTION;
}
if (this.jarEntryName.isEmpty()) {
throw new IOException("no entry name specified");
}
connect();
InputStream inputStream = this.jarFile.getInputStream(this.jarEntry);
if (inputStream == null) {
throwFileNotFound(this.jarEntryName, this.jarFile);
}
return inputStream;
}
private void throwFileNotFound(Object entry, JarFile jarFile)
throws FileNotFoundException {
if (Boolean.TRUE.equals(useFastExceptions.get())) {
throw FILE_NOT_FOUND_EXCEPTION;
}
throw new FileNotFoundException(
"JAR entry " + entry + " not found in " + jarFile.getName());
}
@Override
public int getContentLength() {
if (this.jarFile == null) {
return -1;
}
try {
if (this.jarEntryName.isEmpty()) {
return this.jarFile.size();
}
JarEntry entry = getJarEntry();
return (entry == null ? -1 : (int) entry.getSize());
}
catch (IOException ex) {
return -1;
}
}
@Override
public Object getContent() throws IOException {
connect();
return (this.jarEntryName.isEmpty() ? this.jarFile : super.getContent());
}
@Override
public String getContentType() {
return (this.jarEntryName == null ? null : this.jarEntryName.getContentType());
}
@Override
public Permission getPermission() throws IOException {
if (this.jarFile == null) {
throw FILE_NOT_FOUND_EXCEPTION;
}
if (this.permission == null) {
this.permission = new FilePermission(
this.jarFile.getRootJarFile().getFile().getPath(), READ_ACTION);
}
return this.permission;
}
static void setUseFastExceptions(boolean useFastExceptions) {
JarURLConnection.useFastExceptions.set(useFastExceptions);
}
static JarURLConnection get(URL url, JarFile jarFile) throws IOException {
String spec = extractFullSpec(url, jarFile.getPathFromRoot());
int separator;
int index = 0;
while ((separator = spec.indexOf(SEPARATOR, index)) > 0) {
String entryName = spec.substring(index, separator);
JarEntry jarEntry = jarFile.getJarEntry(entryName);
if (jarEntry == null) {
return JarURLConnection.notFound(jarFile, JarEntryName.get(entryName));
}
jarFile = jarFile.getNestedJarFile(jarEntry);
index += separator + SEPARATOR.length();
}
JarEntryName jarEntryName = JarEntryName.get(spec, index);
if (Boolean.TRUE.equals(useFastExceptions.get())) {
if (!jarEntryName.isEmpty()
&& !jarFile.containsEntry(jarEntryName.toString())) {
return NOT_FOUND_CONNECTION;
}
}
return new JarURLConnection(url, jarFile, jarEntryName);
}
private static String extractFullSpec(URL url, String pathFromRoot) {
String file = url.getFile();
int separatorIndex = file.indexOf(SEPARATOR);
if (separatorIndex < 0) {
return "";
}
int specIndex = separatorIndex + SEPARATOR.length() + pathFromRoot.length();
return file.substring(specIndex);
}
private static JarURLConnection notFound() {
try {
return notFound(null, null);
}
catch (IOException ex) {
throw new IllegalStateException(ex);
}
}
private static JarURLConnection notFound(JarFile jarFile, JarEntryName jarEntryName)
throws IOException {
if (Boolean.TRUE.equals(useFastExceptions.get())) {
return NOT_FOUND_CONNECTION;
}
return new JarURLConnection(null, jarFile, jarEntryName);
}
/**
* A JarEntryName parsed from a URL String.
*/
static class JarEntryName {
private final String name;
private String contentType;
JarEntryName(String spec) {
this.name = decode(spec);
}
private String decode(String source) {
if (source.length() == 0 || (source.indexOf('%') < 0)) {
return source;
}
ByteArrayOutputStream bos = new ByteArrayOutputStream(source.length());
write(source, bos);
// AsciiBytes is what is used to store the JarEntries so make it symmetric
return AsciiBytes.toString(bos.toByteArray());
}
private void write(String source, ByteArrayOutputStream outputStream) {
int length = source.length();
for (int i = 0; i < length; i++) {
int c = source.charAt(i);
if (c > 127) {
try {
String encoded = URLEncoder.encode(String.valueOf((char) c),
"UTF-8");
write(encoded, outputStream);
}
catch (UnsupportedEncodingException ex) {
throw new IllegalStateException(ex);
}
}
else {
if (c == '%') {
if ((i + 2) >= length) {
throw new IllegalArgumentException(
"Invalid encoded sequence \"" + source.substring(i)
+ "\"");
}
c = decodeEscapeSequence(source, i);
i += 2;
}
outputStream.write(c);
}
}
}
private char decodeEscapeSequence(String source, int i) {
int hi = Character.digit(source.charAt(i + 1), 16);
int lo = Character.digit(source.charAt(i + 2), 16);
if (hi == -1 || lo == -1) {
throw new IllegalArgumentException(
"Invalid encoded sequence \"" + source.substring(i) + "\"");
}
return ((char) ((hi << 4) + lo));
}
@Override
public String toString() {
return this.name;
}
public boolean isEmpty() {
return this.name.length() == 0;
}
public String getContentType() {
if (this.contentType == null) {
this.contentType = deduceContentType();
}
return this.contentType;
}
private String deduceContentType() {
// Guess the content type, don't bother with streams as mark is not supported
String type = (isEmpty() ? "x-java/jar" : null);
type = (type != null ? type : guessContentTypeFromName(toString()));
type = (type != null ? type : "content/unknown");
return type;
}
public static JarEntryName get(String spec) {
return get(spec, 0);
}
public static JarEntryName get(String spec, int beginIndex) {
if (spec.length() <= beginIndex) {
return EMPTY_JAR_ENTRY_NAME;
}
return new JarEntryName(spec.substring(beginIndex));
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.jackrabbit.oak.spi.security;
import java.security.Principal;
import java.util.List;
import java.util.Set;
import java.util.concurrent.CopyOnWriteArrayList;
import javax.annotation.CheckForNull;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
import com.google.common.base.Function;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.Iterables;
import com.google.common.collect.Lists;
import com.google.common.collect.ObjectArrays;
import com.google.common.collect.Sets;
import org.apache.jackrabbit.oak.api.PropertyState;
import org.apache.jackrabbit.oak.api.Tree;
import org.apache.jackrabbit.oak.plugins.tree.TreeLocation;
import org.apache.jackrabbit.oak.spi.commit.CommitHook;
import org.apache.jackrabbit.oak.spi.commit.MoveTracker;
import org.apache.jackrabbit.oak.spi.commit.ValidatorProvider;
import org.apache.jackrabbit.oak.spi.lifecycle.CompositeInitializer;
import org.apache.jackrabbit.oak.spi.lifecycle.CompositeWorkspaceInitializer;
import org.apache.jackrabbit.oak.spi.lifecycle.RepositoryInitializer;
import org.apache.jackrabbit.oak.spi.lifecycle.WorkspaceInitializer;
import org.apache.jackrabbit.oak.spi.xml.ProtectedItemImporter;
import org.osgi.framework.Constants;
/**
* Abstract base implementation for {@link SecurityConfiguration}s that can
* combine different implementations.
*/
public abstract class CompositeConfiguration<T extends SecurityConfiguration> implements SecurityConfiguration {
/**
* Parameter used to define the ranking of a given configuration compared to
* other registered configuration in this aggregate. If the ranking parameter
* is missing a new configuration will be added at the end of the list.
*/
public static final String PARAM_RANKING = "configurationRanking";
/**
* Default ranking value used to insert a new configuration at the end of
* the list.
*/
private static final int NO_RANKING = Integer.MIN_VALUE;
private final List<T> configurations = new CopyOnWriteArrayList<T>();
private final String name;
private final CompositeContext ctx = new CompositeContext();
private SecurityProvider securityProvider;
private T defaultConfig;
public CompositeConfiguration(@Nonnull String name) {
this.name = name;
}
public CompositeConfiguration(@Nonnull String name, @Nonnull SecurityProvider securityProvider) {
this.name = name;
this.securityProvider = securityProvider;
}
@CheckForNull
public T getDefaultConfig() {
return defaultConfig;
}
public void setDefaultConfig(@Nonnull T defaultConfig) {
this.defaultConfig = defaultConfig;
ctx.defaultCtx = defaultConfig.getContext();
}
public void addConfiguration(@Nonnull T configuration) {
addConfiguration(configuration, ConfigurationParameters.EMPTY);
}
public void addConfiguration(@Nonnull T configuration, @Nonnull ConfigurationParameters params) {
int ranking = configuration.getParameters().getConfigValue(PARAM_RANKING, NO_RANKING);
if (ranking == NO_RANKING) {
ranking = params.getConfigValue(Constants.SERVICE_RANKING, NO_RANKING);
}
if (ranking == NO_RANKING || configurations.isEmpty()) {
configurations.add(configuration);
} else {
int i = 0;
for (T c : configurations) {
int r = c.getParameters().getConfigValue(PARAM_RANKING, NO_RANKING);
if (ranking > r) {
break;
} else {
i++;
}
}
configurations.add(i, configuration);
}
ctx.add(configuration);
}
public void removeConfiguration(@Nonnull T configuration) {
configurations.remove(configuration);
ctx.refresh(configurations);
}
@Nonnull
public List<T> getConfigurations() {
if (configurations.isEmpty() && defaultConfig != null) {
return ImmutableList.of(defaultConfig);
} else {
return ImmutableList.copyOf(configurations);
}
}
public void setSecurityProvider(@Nonnull SecurityProvider securityProvider) {
this.securityProvider = securityProvider;
}
@Nonnull
protected SecurityProvider getSecurityProvider() {
if (securityProvider == null) {
throw new IllegalStateException("SecurityProvider missing => CompositeConfiguration is not ready.");
}
return securityProvider;
}
//----------------------------------------------< SecurityConfiguration >---
@Nonnull
@Override
public String getName() {
return name;
}
@Nonnull
@Override
public ConfigurationParameters getParameters() {
List<T> configs = getConfigurations();
ConfigurationParameters[] params = new ConfigurationParameters[configs.size()];
for (int i = 0; i < configs.size(); i++) {
params[i] = configs.get(i).getParameters();
}
return ConfigurationParameters.of(params);
}
@Nonnull
@Override
public WorkspaceInitializer getWorkspaceInitializer() {
return new CompositeWorkspaceInitializer(Lists.transform(getConfigurations(), new Function<T, WorkspaceInitializer>() {
@Override
public WorkspaceInitializer apply(T securityConfiguration) {
return securityConfiguration.getWorkspaceInitializer();
}
}));
}
@Nonnull
@Override
public RepositoryInitializer getRepositoryInitializer() {
return new CompositeInitializer(Lists.transform(getConfigurations(), new Function<T, RepositoryInitializer>() {
@Override
public RepositoryInitializer apply(T securityConfiguration) {
return securityConfiguration.getRepositoryInitializer();
}
}));
}
@Nonnull
@Override
public List<? extends CommitHook> getCommitHooks(@Nonnull final String workspaceName) {
return ImmutableList.copyOf(Iterables.concat(Lists.transform(getConfigurations(), new Function<T, List<? extends CommitHook>>() {
@Override
public List<? extends CommitHook> apply(T securityConfiguration) {
return securityConfiguration.getCommitHooks(workspaceName);
}
})));
}
@Nonnull
@Override
public List<? extends ValidatorProvider> getValidators(@Nonnull final String workspaceName, @Nonnull final Set<Principal> principals, @Nonnull final MoveTracker moveTracker) {
return ImmutableList.copyOf(Iterables.concat(Lists.transform(getConfigurations(), new Function<T, List<? extends ValidatorProvider>>() {
@Override
public List<? extends ValidatorProvider> apply(T securityConfiguration) {
return securityConfiguration.getValidators(workspaceName, principals, moveTracker);
}
})));
}
@Nonnull
@Override
public List<ProtectedItemImporter> getProtectedItemImporters() {
return ImmutableList.copyOf(Iterables.concat(Lists.transform(getConfigurations(), new Function<T, List<? extends ProtectedItemImporter>>() {
@Override
public List<? extends ProtectedItemImporter> apply(T securityConfiguration) {
return securityConfiguration.getProtectedItemImporters();
}
})));
}
@Nonnull
@Override
public Context getContext() {
return ctx;
}
private static final class CompositeContext implements Context {
@Nonnull
private Context defaultCtx = DEFAULT;
@Nullable
private Context[] delegatees = null;
private void refresh(@Nonnull List<? extends SecurityConfiguration> configurations) {
Set<Context> s = Sets.newLinkedHashSetWithExpectedSize(configurations.size());
for (Context c : Iterables.transform(configurations, ContextFunction.INSTANCE)) {
if (DEFAULT != c) {
s.add(c);
}
}
delegatees = (s.isEmpty()) ? null : s.toArray(new Context[s.size()]);
}
private void add(@Nonnull SecurityConfiguration configuration) {
Context c = configuration.getContext();
if (DEFAULT != c) {
if (delegatees == null) {
delegatees = new Context[] {c};
} else {
for (Context ctx : delegatees) {
if (ctx.equals(c)) {
return;
}
}
delegatees = ObjectArrays.concat(delegatees, c);
}
}
}
@Override
public boolean definesProperty(@Nonnull Tree parent, @Nonnull PropertyState property) {
if (delegatees == null) {
return defaultCtx.definesProperty(parent, property);
}
for (Context ctx : delegatees) {
if (ctx.definesProperty(parent, property)) {
return true;
}
}
return false;
}
@Override
public boolean definesContextRoot(@Nonnull Tree tree) {
if (delegatees == null) {
return defaultCtx.definesContextRoot(tree);
}
for (Context ctx : delegatees) {
if (ctx.definesContextRoot(tree)) {
return true;
}
}
return false;
}
@Override
public boolean definesTree(@Nonnull Tree tree) {
if (delegatees == null) {
return defaultCtx.definesTree(tree);
}
for (Context ctx : delegatees) {
if (ctx.definesTree(tree)) {
return true;
}
}
return false;
}
@Override
public boolean definesLocation(@Nonnull TreeLocation location) {
if (delegatees == null) {
return defaultCtx.definesLocation(location);
}
for (Context ctx : delegatees) {
if (ctx.definesLocation(location)) {
return true;
}
}
return false;
}
@Override
public boolean definesInternal(@Nonnull Tree tree) {
if (delegatees == null) {
return defaultCtx.definesInternal(tree);
}
for (Context ctx : delegatees) {
if (ctx.definesInternal(tree)) {
return true;
}
}
return false;
}
}
private static final class ContextFunction implements Function<SecurityConfiguration, Context> {
private static final ContextFunction INSTANCE = new ContextFunction();
private ContextFunction() {}
@Override
public Context apply(SecurityConfiguration input) {
return input.getContext();
}
}
}
| |
/*
* Copyright (c) 2012-2014. Alain Barret
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.github.abarhub.filerw.text;
import com.github.abarhub.filerw.ToolBox;
import com.github.abarhub.filerw.Tools;
import org.junit.jupiter.api.Nested;
import org.junit.jupiter.api.Test;
import java.io.ByteArrayOutputStream;
import java.io.PrintStream;
import static org.junit.jupiter.api.Assertions.*;
public class LineContentTextTest {
@Test
public void test1() {
LineContentText<FieldsListChamps1> line;
int len;
line = new LineContentText<>(FieldsListChamps1.class,
"ABC");
len = Tools.getSize(FieldsListChamps1.class);
assertEquals(padding("ABC", len), line.getLine());
}
@Test
public void test2() {
LineContentText<FieldsListChamps1> line;
int len;
line = new LineContentText<>(FieldsListChamps1.class);
len = Tools.getSize(FieldsListChamps1.class);
assertEquals(padding("", len), line.getLine());
}
@Test
public void test3() {
LineContentText<FieldsListChamps1> line;
line = new LineContentText<>(FieldsListChamps1.class,
"ABC");
line.setString(FieldsListChamps1.Prenom, "AAA");
assertEquals(
padding("ABC", FieldsListChamps1.Nom)
+ padding("AAA", FieldsListChamps1.Prenom)
+ padding("", FieldsListChamps1.DateNaissance),
line.getLine());
}
@Test
public void test4() {
LineContentText<FieldsListChamps1> line;
line = new LineContentText<>(FieldsListChamps1.class);
line.setString(FieldsListChamps1.Prenom, "BBC");
assertEquals(
padding("", FieldsListChamps1.Nom)
+ padding("BBC", FieldsListChamps1.Prenom)
+ padding("", FieldsListChamps1.DateNaissance),
line.getLine());
}
@Test
public void test5() {
LineContentText<FieldsListChamps1> line;
line = new LineContentText<>(Tools.convClassEnum(FieldsListChamps1.class));
line.setString(FieldsListChamps1.Prenom, "BBC");
assertEquals(
padding("", FieldsListChamps1.Nom)
+ padding("BBC", FieldsListChamps1.Prenom)
+ padding("", FieldsListChamps1.DateNaissance),
line.getLine());
}
@Test
public void testHash() {
LineContentText<FieldsListChamps1> line = new LineContentText<>(FieldsListChamps1.class, "abc");
LineContentText<FieldsListChamps1> line2 = new LineContentText<>(FieldsListChamps1.class, "abc");
assertEquals(line.hashCode(), line.hashCode());
assertEquals(line.hashCode(), line2.hashCode());
line.setString(FieldsListChamps1.Prenom, "BBC");
assertEquals(line.hashCode(), line.hashCode());
assertNotEquals(line.hashCode(), line2.hashCode());
}
@Test
public void testEquals() {
LineContentText<FieldsListChamps1> line = new LineContentText<>(FieldsListChamps1.class, "abc");
LineContentText<FieldsListChamps1> line2 = new LineContentText<>(FieldsListChamps1.class, "abc");
assertEquals(line, line);
assertEquals(line, line2);
line.setString(FieldsListChamps1.Prenom, "BBC");
assertEquals(line, line);
assertNotEquals(line, line2);
assertNotEquals(line, "abc");
}
@Nested
class TestConstructor {
@Test
public void testConstructor1() {
String s = getLine1() + "0";
assertThrows(IllegalArgumentException.class,
() -> new LineContentText<>(FieldsListChamps1.class, s));
}
@Test
public void testConstructor2() {
String s = getLine1() + "0";
assertThrows(IllegalArgumentException.class,
() -> new LineContentText<>(Tools.convClassEnum(FieldsListChamps1.class), s));
}
@Test
public void testConstructor3() {
String s = getLine1();
LineContentText<FieldsListChamps1> lineContentText = new LineContentText<>(FieldsListChamps1.class, s);
assertNotNull(lineContentText);
assertEquals(s, lineContentText.getLine());
}
@Test
public void testConstructor4() {
String s = getLine1();
LineContentText<FieldsListChamps1> lineContentText = new LineContentText<>(Tools.convClassEnum(FieldsListChamps1.class), s);
assertNotNull(lineContentText);
assertEquals(s, lineContentText.getLine());
}
}
@Test
public void testShow() {
// arrange
String s = getLine1();
LineContentText<FieldsListChamps1> lineContentText = new LineContentText<>(Tools.convClassEnum(FieldsListChamps1.class), s);
ByteArrayOutputStream out = new ByteArrayOutputStream();
PrintStream printStream = new PrintStream(out);
// act
lineContentText.show(printStream);
// assert
String s2 = out.toString();
assertNotNull(s2);
assertEquals(ToolBox.convertNewLine("Nom=abc12345123123145412\n" +
"Prenom=31212312300000000000\n" +
"DateNaissance=00000000\n"),
ToolBox.convertNewLine(s2));
}
private String padding(String nom, int len) {
StringBuilder res;
res = new StringBuilder(nom);
while (res.length() < len)
res.append(" ");
return res.toString();
}
private String padding(String nom, FieldsListChamps1 nom2) {
StringBuilder res;
res = new StringBuilder(nom);
while (res.length() < nom2.getLength())
res.append(" ");
return res.toString();
}
private String getLine1() {
return "abc123451231231454123121231230000000000" +
"000000000";
}
}
| |
/*
* Copyright 2010-2013 Ning, Inc.
* Copyright 2014-2019 Groupon, Inc
* Copyright 2014-2019 The Billing Project, LLC
*
* The Billing Project licenses this file to you under the Apache License, version 2.0
* (the "License"); you may not use this file except in compliance with the
* License. You may obtain a copy of the License at:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package org.killbill.billing.util.security.api;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
import javax.annotation.Nullable;
import javax.inject.Inject;
import org.apache.shiro.SecurityUtils;
import org.apache.shiro.authc.AuthenticationToken;
import org.apache.shiro.authc.UsernamePasswordToken;
import org.apache.shiro.authz.AuthorizationException;
import org.apache.shiro.authz.AuthorizationInfo;
import org.apache.shiro.mgt.DefaultSecurityManager;
import org.apache.shiro.realm.AuthorizingRealm;
import org.apache.shiro.realm.Realm;
import org.apache.shiro.subject.SimplePrincipalCollection;
import org.apache.shiro.subject.Subject;
import org.killbill.billing.ErrorCode;
import org.killbill.billing.security.Logical;
import org.killbill.billing.security.Permission;
import org.killbill.billing.security.SecurityApiException;
import org.killbill.billing.security.api.SecurityApi;
import org.killbill.billing.util.callcontext.CallContext;
import org.killbill.billing.util.callcontext.TenantContext;
import org.killbill.billing.util.security.shiro.dao.RolesPermissionsModelDao;
import org.killbill.billing.util.security.shiro.dao.UserDao;
import org.killbill.billing.util.security.shiro.dao.UserRolesModelDao;
import org.killbill.billing.util.security.shiro.realm.KillBillJdbcRealm;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.common.base.Function;
import com.google.common.base.Functions;
import com.google.common.base.Predicate;
import com.google.common.base.Predicates;
import com.google.common.base.Strings;
import com.google.common.collect.Collections2;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.Iterables;
import com.google.common.collect.Lists;
public class DefaultSecurityApi implements SecurityApi {
// Custom Realm implementors are encouraged to enable DEBUG level logging for development
private static final Logger logger = LoggerFactory.getLogger(DefaultSecurityApi.class);
private final UserDao userDao;
private final Set<Realm> realms;
private final Map<Realm, Method> getAuthorizationInfoMethods = new HashMap<Realm, Method>();
@Inject
public DefaultSecurityApi(final UserDao userDao, final Set<Realm> realms) {
this.userDao = userDao;
this.realms = realms;
buildGetAuthorizationInfoMethods();
}
@Override
public synchronized void login(final Object principal, final Object credentials) {
final Subject currentUser = SecurityUtils.getSubject();
if (currentUser.isAuthenticated()) {
logout();
}
// Workaround for https://issues.apache.org/jira/browse/SHIRO-510
// TODO Not sure if it's a good fix?
if (principal.equals(currentUser.getPrincipal()) &&
currentUser.isAuthenticated()) {
return;
}
// UsernamePasswordToken is hardcoded in AuthenticatingRealm
if (principal instanceof String && credentials instanceof String) {
currentUser.login(new UsernamePasswordToken((String) principal, (String) credentials));
} else if (principal instanceof String && credentials instanceof char[]) {
currentUser.login(new UsernamePasswordToken((String) principal, (char[]) credentials));
} else {
currentUser.login(new AuthenticationToken() {
@Override
public Object getPrincipal() {
return principal;
}
@Override
public Object getCredentials() {
return credentials;
}
});
}
}
@Override
public void logout() {
final Subject currentUser = SecurityUtils.getSubject();
if (currentUser != null && currentUser.isAuthenticated()) {
currentUser.logout();
}
}
@Override
public boolean isSubjectAuthenticated() {
return SecurityUtils.getSubject().isAuthenticated();
}
@Override
public Set<String> getCurrentUserPermissions(final TenantContext context) {
final Subject subject = SecurityUtils.getSubject();
final Set<String> allPermissions = new HashSet<String>();
for (final Entry<Realm, Method> realmAndMethod : getAuthorizationInfoMethods.entrySet()) {
try {
final AuthorizationInfo authorizationInfo = (AuthorizationInfo) realmAndMethod.getValue().invoke(realmAndMethod.getKey(), subject.getPrincipals());
if (authorizationInfo == null) {
logger.debug("No AuthorizationInfo returned from Realm {}", realmAndMethod.getKey());
} else {
final Collection<org.apache.shiro.authz.Permission> realmObjectPermissions = authorizationInfo.getObjectPermissions();
if (realmObjectPermissions == null) {
logger.debug("No ObjectPermissions returned from Realm {}", realmAndMethod.getKey());
} else {
for (final org.apache.shiro.authz.Permission realmPermission : realmObjectPermissions) {
// Note: this assumes custom realms return something sensible here
final String realmPermissionAsString = realmPermission.toString();
if (realmPermissionAsString == null) {
logger.debug("Null ObjectPermission#toString returned from Realm {}", realmAndMethod.getKey());
} else {
allPermissions.add(realmPermissionAsString);
}
}
}
// The Javadoc says that getObjectPermissions should contain the results from getStringPermissions,
// but this is incorrect in practice (JdbcRealm for instance)
final Collection<String> realmStringPermissions = authorizationInfo.getStringPermissions();
if (realmStringPermissions == null) {
logger.debug("No StringPermissions returned from Realm {}", realmAndMethod.getKey());
} else {
allPermissions.addAll(authorizationInfo.getStringPermissions());
}
}
} catch (final IllegalAccessException e) {
// Ignore
logger.debug("Unable to retrieve permissions for Realm {}", realmAndMethod.getKey(), e);
} catch (final InvocationTargetException e) {
// Ignore
logger.debug("Unable to retrieve permissions for Realm {}", realmAndMethod.getKey(), e);
} catch (final RuntimeException e) {
// Ignore
logger.debug("Unable to retrieve permissions for Realm {}", realmAndMethod.getKey(), e);
}
}
return allPermissions;
}
@Override
public void checkCurrentUserPermissions(final List<Permission> permissions, final Logical logical, final TenantContext context) throws SecurityApiException {
final String[] permissionsString = Lists.<Permission, String>transform(permissions, Functions.toStringFunction()).toArray(new String[permissions.size()]);
try {
final Subject subject = SecurityUtils.getSubject();
if (permissionsString.length == 1) {
subject.checkPermission(permissionsString[0]);
} else if (Logical.AND.equals(logical)) {
subject.checkPermissions(permissionsString);
} else if (Logical.OR.equals(logical)) {
boolean hasAtLeastOnePermission = false;
for (final String permission : permissionsString) {
if (subject.isPermitted(permission)) {
hasAtLeastOnePermission = true;
break;
}
}
// Cause the exception if none match
if (!hasAtLeastOnePermission) {
subject.checkPermission(permissionsString[0]);
}
}
} catch (final AuthorizationException e) {
throw new SecurityApiException(e, ErrorCode.SECURITY_NOT_ENOUGH_PERMISSIONS);
}
}
@Override
public void addUserRoles(final String username, final String password, final List<String> roles, final CallContext callContext) throws SecurityApiException {
userDao.insertUser(username, password, roles, callContext.getUserName());
}
@Override
public void updateUserPassword(final String username, final String password, final CallContext callContext) throws SecurityApiException {
userDao.updateUserPassword(username, password, callContext.getUserName());
}
@Override
public void updateUserRoles(final String username, final List<String> roles, final CallContext callContext) throws SecurityApiException {
userDao.updateUserRoles(username, roles, callContext.getUserName());
invalidateJDBCAuthorizationCache(username);
}
@Override
public void invalidateUser(final String username, final CallContext callContext) throws SecurityApiException {
userDao.invalidateUser(username, callContext.getUserName());
// Invalidate the JSESSIONID
logout();
}
@Override
public List<String> getUserRoles(final String username, final TenantContext tenantContext) throws SecurityApiException {
final List<UserRolesModelDao> permissionsModelDao = userDao.getUserRoles(username);
return ImmutableList.copyOf(Iterables.transform(permissionsModelDao, new Function<UserRolesModelDao, String>() {
@Nullable
@Override
public String apply(final UserRolesModelDao input) {
return input.getRoleName();
}
}));
}
@Override
public void addRoleDefinition(final String role, final List<String> permissions, final CallContext callContext) throws SecurityApiException {
final List<String> sanitizedPermissions = sanitizePermissions(permissions);
userDao.addRoleDefinition(role, sanitizedPermissions, callContext.getUserName());
}
@Override
public void updateRoleDefinition(final String role, final List<String> permissions, final CallContext callContext) throws SecurityApiException {
final List<String> sanitizedPermissions = sanitizePermissions(permissions);
userDao.updateRoleDefinition(role, sanitizedPermissions, callContext.getUserName());
}
@Override
public List<String> getRoleDefinition(final String role, final TenantContext tenantContext) {
final List<RolesPermissionsModelDao> permissionsModelDao = userDao.getRoleDefinition(role);
return ImmutableList.copyOf(Iterables.transform(permissionsModelDao, new Function<RolesPermissionsModelDao, String>() {
@Nullable
@Override
public String apply(final RolesPermissionsModelDao input) {
return input.getPermission();
}
}));
}
private List<String> sanitizePermissions(final List<String> permissionsRaw) throws SecurityApiException {
if (permissionsRaw == null) {
return ImmutableList.<String>of();
}
final Collection<String> permissions = Collections2.<String>filter(Lists.<String, String>transform(permissionsRaw,
new Function<String, String>() {
@Override
public String apply(final String input) {
return Strings.emptyToNull(input);
}
}),
Predicates.<String>notNull());
final Map<String, Set<String>> groupToValues = new HashMap<String, Set<String>>();
for (final String curPerm : permissions) {
if ("*".equals(curPerm)) {
return ImmutableList.of("*");
}
final String[] permissionParts = curPerm.split(":");
if (permissionParts.length != 1 && permissionParts.length != 2) {
throw new SecurityApiException(ErrorCode.SECURITY_INVALID_PERMISSIONS, curPerm);
}
Set<String> groupPermissions = groupToValues.get(permissionParts[0]);
if (groupPermissions == null) {
groupPermissions = new HashSet<String>();
groupToValues.put(permissionParts[0], groupPermissions);
}
if (permissionParts.length == 1 || "*".equals(permissionParts[1]) || Strings.emptyToNull(permissionParts[1]) == null) {
groupPermissions.clear();
groupPermissions.add("*");
} else {
groupPermissions.add(permissionParts[1]);
}
}
final List<String> expandedPermissions = new ArrayList<String>();
for (final String group : groupToValues.keySet()) {
final Set<String> groupPermissions = groupToValues.get(group);
for (final String value : groupPermissions) {
expandedPermissions.add(String.format("%s:%s", group, value));
}
}
return expandedPermissions;
}
private void invalidateJDBCAuthorizationCache(final String username) {
final Collection<Realm> realms = ((DefaultSecurityManager) SecurityUtils.getSecurityManager()).getRealms();
final KillBillJdbcRealm killBillJdbcRealm = (KillBillJdbcRealm) Iterables.tryFind(realms, new Predicate<Realm>() {
@Override
public boolean apply(@Nullable final Realm input) {
return (input instanceof KillBillJdbcRealm);
}
}).orNull();
if (killBillJdbcRealm != null) {
final SimplePrincipalCollection principals = new SimplePrincipalCollection();
principals.add(username, killBillJdbcRealm.getName());
killBillJdbcRealm.clearCachedAuthorizationInfo(principals);
}
}
private void buildGetAuthorizationInfoMethods() {
for (final Realm realm : realms) {
if (!(realm instanceof AuthorizingRealm)) {
logger.debug("Unable to retrieve getAuthorizationInfo method from Realm {}: not an AuthorizingRealm", realm);
continue;
}
Method getAuthorizationInfoMethod = null;
Class<?> clazz = realm.getClass();
while (clazz != null) {
final Method[] methods = clazz.getDeclaredMethods();
for (final Method method : methods) {
if ("getAuthorizationInfo".equals(method.getName())) {
getAuthorizationInfoMethod = method;
getAuthorizationInfoMethod.setAccessible(true);
break;
}
}
clazz = clazz.getSuperclass();
}
if (getAuthorizationInfoMethod == null) {
logger.debug("Unable to retrieve getAuthorizationInfo method from Realm {}", realm);
continue;
}
getAuthorizationInfoMethods.put(realm, getAuthorizationInfoMethod);
}
}
}
| |
package edu.towson.cis.cosc442.project4.coffeemaker;
import org.junit.*;
import static org.junit.Assert.*;
/**
* The class <code>RecipeTest</code> contains tests for the class <code>{@link Recipe}</code>.
*
* @generatedBy CodePro at 3/19/17 7:50 PM
* @author DELL
* @version $Revision: 1.0 $
*/
public class RecipeTest {
/**
* Run the boolean equals(Recipe) method test.
*
* @throws Exception
*
* @generatedBy CodePro at 3/19/17 7:50 PM
*/
@Test
public void testEquals_1()
throws Exception {
Recipe fixture = new Recipe();
fixture.setName((String) null);
Recipe r = new Recipe();
boolean result = fixture.equals(r);
// add additional test code here
assertEquals(false, result);
}
/**
* Run the boolean equals(Recipe) method test.
*
* @throws Exception
*
* @generatedBy CodePro at 3/19/17 7:50 PM
*/
@Test
public void testEquals_2()
throws Exception {
Recipe fixture = new Recipe();
fixture.setName("");
Recipe r = new Recipe();
boolean result = fixture.equals(r);
// add additional test code here
assertEquals(false, result);
}
/**
* Run the boolean equals(Recipe) method test.
*
* @throws Exception
*
* @generatedBy CodePro at 3/19/17 7:50 PM
*/
@Test
public void testEquals_3()
throws Exception {
Recipe fixture = new Recipe();
fixture.setName("");
Recipe r = new Recipe();
boolean result = fixture.equals(r);
// add additional test code here
assertEquals(false, result);
}
/**
* Run the int getAmtChocolate() method test.
*
* @throws Exception
*
* @generatedBy CodePro at 3/19/17 7:50 PM
*/
@Test
public void testGetAmtChocolate_1()
throws Exception {
Recipe fixture = new Recipe();
fixture.setName("");
int result = fixture.getAmtChocolate();
// add additional test code here
assertEquals(0, result);
}
/**
* Run the int getAmtCoffee() method test.
*
* @throws Exception
*
* @generatedBy CodePro at 3/19/17 7:50 PM
*/
@Test
public void testGetAmtCoffee_1()
throws Exception {
Recipe fixture = new Recipe();
fixture.setName("");
int result = fixture.getAmtCoffee();
// add additional test code here
assertEquals(0, result);
}
/**
* Run the int getAmtMilk() method test.
*
* @throws Exception
*
* @generatedBy CodePro at 3/19/17 7:50 PM
*/
@Test
public void testGetAmtMilk_1()
throws Exception {
Recipe fixture = new Recipe();
fixture.setName("");
int result = fixture.getAmtMilk();
// add additional test code here
assertEquals(0, result);
}
/**
* Run the int getAmtSugar() method test.
*
* @throws Exception
*
* @generatedBy CodePro at 3/19/17 7:50 PM
*/
@Test
public void testGetAmtSugar_1()
throws Exception {
Recipe fixture = new Recipe();
fixture.setName("");
int result = fixture.getAmtSugar();
// add additional test code here
assertEquals(0, result);
}
/**
* Run the String getName() method test.
*
* @throws Exception
*
* @generatedBy CodePro at 3/19/17 7:50 PM
*/
@Test
public void testGetName_1()
throws Exception {
Recipe fixture = new Recipe();
fixture.setName("");
String result = fixture.getName();
// add additional test code here
assertEquals("", result);
}
/**
* Run the int getPrice() method test.
*
* @throws Exception
*
* @generatedBy CodePro at 3/19/17 7:50 PM
*/
@Test
public void testGetPrice_1()
throws Exception {
Recipe fixture = new Recipe();
fixture.setName("");
int result = fixture.getPrice();
// add additional test code here
assertEquals(0, result);
}
/**
* Run the void setAmtChocolate(int) method test.
*
* @throws Exception
*
* @generatedBy CodePro at 3/19/17 7:50 PM
*/
@Test
public void testSetAmtChocolate_1()
throws Exception {
Recipe fixture = new Recipe();
fixture.setName("");
int amtChocolate = -1;
fixture.setAmtChocolate(amtChocolate);
// add additional test code here
}
/**
* Run the void setAmtChocolate(int) method test.
*
* @throws Exception
*
* @generatedBy CodePro at 3/19/17 7:50 PM
*/
@Test
public void testSetAmtChocolate_2()
throws Exception {
Recipe fixture = new Recipe();
fixture.setName("");
int amtChocolate = 1;
fixture.setAmtChocolate(amtChocolate);
// add additional test code here
}
/**
* Run the void setAmtCoffee(int) method test.
*
* @throws Exception
*
* @generatedBy CodePro at 3/19/17 7:50 PM
*/
@Test
public void testSetAmtCoffee_1()
throws Exception {
Recipe fixture = new Recipe();
fixture.setName("");
int amtCoffee = -1;
fixture.setAmtCoffee(amtCoffee);
// add additional test code here
}
/**
* Run the void setAmtCoffee(int) method test.
*
* @throws Exception
*
* @generatedBy CodePro at 3/19/17 7:50 PM
*/
@Test
public void testSetAmtCoffee_2()
throws Exception {
Recipe fixture = new Recipe();
fixture.setName("");
int amtCoffee = 1;
fixture.setAmtCoffee(amtCoffee);
// add additional test code here
}
/**
* Run the void setAmtMilk(int) method test.
*
* @throws Exception
*
* @generatedBy CodePro at 3/19/17 7:50 PM
*/
@Test
public void testSetAmtMilk_1()
throws Exception {
Recipe fixture = new Recipe();
fixture.setName("");
int amtMilk = -1;
fixture.setAmtMilk(amtMilk);
// add additional test code here
}
/**
* Run the void setAmtMilk(int) method test.
*
* @throws Exception
*
* @generatedBy CodePro at 3/19/17 7:50 PM
*/
@Test
public void testSetAmtMilk_2()
throws Exception {
Recipe fixture = new Recipe();
fixture.setName("");
int amtMilk = 1;
fixture.setAmtMilk(amtMilk);
// add additional test code here
}
/**
* Run the void setAmtSugar(int) method test.
*
* @throws Exception
*
* @generatedBy CodePro at 3/19/17 7:50 PM
*/
@Test
public void testSetAmtSugar_1()
throws Exception {
Recipe fixture = new Recipe();
fixture.setName("");
int amtSugar = -1;
fixture.setAmtSugar(amtSugar);
// add additional test code here
}
/**
* Run the void setAmtSugar(int) method test.
*
* @throws Exception
*
* @generatedBy CodePro at 3/19/17 7:50 PM
*/
@Test
public void testSetAmtSugar_2()
throws Exception {
Recipe fixture = new Recipe();
fixture.setName("");
int amtSugar = 1;
fixture.setAmtSugar(amtSugar);
// add additional test code here
}
/**
* Run the void setName(String) method test.
*
* @throws Exception
*
* @generatedBy CodePro at 3/19/17 7:50 PM
*/
@Test
public void testSetName_1()
throws Exception {
Recipe fixture = new Recipe();
fixture.setName("");
String name = "";
fixture.setName(name);
// add additional test code here
}
/**
* Run the void setPrice(int) method test.
*
* @throws Exception
*
* @generatedBy CodePro at 3/19/17 7:50 PM
*/
@Test
public void testSetPrice_1()
throws Exception {
Recipe fixture = new Recipe();
fixture.setName("");
int price = -1;
fixture.setPrice(price);
// add additional test code here
}
/**
* Run the void setPrice(int) method test.
*
* @throws Exception
*
* @generatedBy CodePro at 3/19/17 7:50 PM
*/
@Test
public void testSetPrice_2()
throws Exception {
Recipe fixture = new Recipe();
fixture.setName("");
int price = 1;
fixture.setPrice(price);
// add additional test code here
}
/**
* Run the String toString() method test.
*
* @throws Exception
*
* @generatedBy CodePro at 3/19/17 7:50 PM
*/
@Test
public void testToString_1()
throws Exception {
Recipe fixture = new Recipe();
fixture.setName("");
String result = fixture.toString();
// add additional test code here
assertEquals("", result);
}
/**
* Perform pre-test initialization.
*
* @throws Exception
* if the initialization fails for some reason
*
* @generatedBy CodePro at 3/19/17 7:50 PM
*/
@Before
public void setUp()
throws Exception {
// add additional set up code here
}
/**
* Perform post-test clean-up.
*
* @throws Exception
* if the clean-up fails for some reason
*
* @generatedBy CodePro at 3/19/17 7:50 PM
*/
@After
public void tearDown()
throws Exception {
// Add additional tear down code here
}
/**
* Launch the test.
*
* @param args the command line arguments
*
* @generatedBy CodePro at 3/19/17 7:50 PM
*/
public static void main(String[] args) {
new org.junit.runner.JUnitCore().run(RecipeTest.class);
}
}
| |
/*
* Copyright 2014 The Error Prone Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.errorprone.scanner;
import com.google.common.base.Predicates;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Iterables;
import com.google.common.collect.Streams;
import com.google.errorprone.BugCheckerInfo;
import com.google.errorprone.bugpatterns.AlreadyChecked;
import com.google.errorprone.bugpatterns.AlwaysThrows;
import com.google.errorprone.bugpatterns.AmbiguousMethodReference;
import com.google.errorprone.bugpatterns.AnnotateFormatMethod;
import com.google.errorprone.bugpatterns.AnnotationMirrorToString;
import com.google.errorprone.bugpatterns.AnnotationPosition;
import com.google.errorprone.bugpatterns.AnnotationValueToString;
import com.google.errorprone.bugpatterns.ArrayAsKeyOfSetOrMap;
import com.google.errorprone.bugpatterns.ArrayEquals;
import com.google.errorprone.bugpatterns.ArrayFillIncompatibleType;
import com.google.errorprone.bugpatterns.ArrayHashCode;
import com.google.errorprone.bugpatterns.ArrayToString;
import com.google.errorprone.bugpatterns.ArraysAsListPrimitiveArray;
import com.google.errorprone.bugpatterns.AssertFalse;
import com.google.errorprone.bugpatterns.AssertThrowsMultipleStatements;
import com.google.errorprone.bugpatterns.AssertionFailureIgnored;
import com.google.errorprone.bugpatterns.AsyncCallableReturnsNull;
import com.google.errorprone.bugpatterns.AsyncFunctionReturnsNull;
import com.google.errorprone.bugpatterns.AutoValueBuilderDefaultsInConstructor;
import com.google.errorprone.bugpatterns.AutoValueFinalMethods;
import com.google.errorprone.bugpatterns.AutoValueImmutableFields;
import com.google.errorprone.bugpatterns.AutoValueSubclassLeaked;
import com.google.errorprone.bugpatterns.BadAnnotationImplementation;
import com.google.errorprone.bugpatterns.BadComparable;
import com.google.errorprone.bugpatterns.BadImport;
import com.google.errorprone.bugpatterns.BadInstanceof;
import com.google.errorprone.bugpatterns.BadShiftAmount;
import com.google.errorprone.bugpatterns.BanSerializableRead;
import com.google.errorprone.bugpatterns.BareDotMetacharacter;
import com.google.errorprone.bugpatterns.BigDecimalEquals;
import com.google.errorprone.bugpatterns.BigDecimalLiteralDouble;
import com.google.errorprone.bugpatterns.BooleanParameter;
import com.google.errorprone.bugpatterns.BoxedPrimitiveConstructor;
import com.google.errorprone.bugpatterns.BoxedPrimitiveEquality;
import com.google.errorprone.bugpatterns.BugChecker;
import com.google.errorprone.bugpatterns.BugPatternNaming;
import com.google.errorprone.bugpatterns.ByteBufferBackingArray;
import com.google.errorprone.bugpatterns.CacheLoaderNull;
import com.google.errorprone.bugpatterns.CannotMockFinalClass;
import com.google.errorprone.bugpatterns.CanonicalDuration;
import com.google.errorprone.bugpatterns.CatchAndPrintStackTrace;
import com.google.errorprone.bugpatterns.CatchFail;
import com.google.errorprone.bugpatterns.CatchingUnchecked;
import com.google.errorprone.bugpatterns.ChainedAssertionLosesContext;
import com.google.errorprone.bugpatterns.ChainingConstructorIgnoresParameter;
import com.google.errorprone.bugpatterns.CharacterGetNumericValue;
import com.google.errorprone.bugpatterns.CheckNotNullMultipleTimes;
import com.google.errorprone.bugpatterns.CheckReturnValue;
import com.google.errorprone.bugpatterns.CheckedExceptionNotThrown;
import com.google.errorprone.bugpatterns.ClassCanBeStatic;
import com.google.errorprone.bugpatterns.ClassName;
import com.google.errorprone.bugpatterns.ClassNamedLikeTypeParameter;
import com.google.errorprone.bugpatterns.ClassNewInstance;
import com.google.errorprone.bugpatterns.CollectionToArraySafeParameter;
import com.google.errorprone.bugpatterns.CollectorShouldNotUseState;
import com.google.errorprone.bugpatterns.ComparableAndComparator;
import com.google.errorprone.bugpatterns.ComparableType;
import com.google.errorprone.bugpatterns.CompareToZero;
import com.google.errorprone.bugpatterns.ComparingThisWithNull;
import com.google.errorprone.bugpatterns.ComparisonContractViolated;
import com.google.errorprone.bugpatterns.ComparisonOutOfRange;
import com.google.errorprone.bugpatterns.CompileTimeConstantChecker;
import com.google.errorprone.bugpatterns.ComplexBooleanConstant;
import com.google.errorprone.bugpatterns.ComputeIfAbsentAmbiguousReference;
import com.google.errorprone.bugpatterns.ConditionalExpressionNumericPromotion;
import com.google.errorprone.bugpatterns.ConstantField;
import com.google.errorprone.bugpatterns.ConstantOverflow;
import com.google.errorprone.bugpatterns.ConstantPatternCompile;
import com.google.errorprone.bugpatterns.DangerousLiteralNullChecker;
import com.google.errorprone.bugpatterns.DateFormatConstant;
import com.google.errorprone.bugpatterns.DeadException;
import com.google.errorprone.bugpatterns.DeadThread;
import com.google.errorprone.bugpatterns.DeduplicateConstants;
import com.google.errorprone.bugpatterns.DefaultCharset;
import com.google.errorprone.bugpatterns.DefaultPackage;
import com.google.errorprone.bugpatterns.DepAnn;
import com.google.errorprone.bugpatterns.DeprecatedVariable;
import com.google.errorprone.bugpatterns.DifferentNameButSame;
import com.google.errorprone.bugpatterns.DiscardedPostfixExpression;
import com.google.errorprone.bugpatterns.DistinctVarargsChecker;
import com.google.errorprone.bugpatterns.DivZero;
import com.google.errorprone.bugpatterns.DoNotCallChecker;
import com.google.errorprone.bugpatterns.DoNotCallSuggester;
import com.google.errorprone.bugpatterns.DoNotClaimAnnotations;
import com.google.errorprone.bugpatterns.DoNotMockAutoValue;
import com.google.errorprone.bugpatterns.DoNotMockChecker;
import com.google.errorprone.bugpatterns.DoubleBraceInitialization;
import com.google.errorprone.bugpatterns.DuplicateMapKeys;
import com.google.errorprone.bugpatterns.EmptyCatch;
import com.google.errorprone.bugpatterns.EmptyIfStatement;
import com.google.errorprone.bugpatterns.EmptyTopLevelDeclaration;
import com.google.errorprone.bugpatterns.EqualsGetClass;
import com.google.errorprone.bugpatterns.EqualsHashCode;
import com.google.errorprone.bugpatterns.EqualsIncompatibleType;
import com.google.errorprone.bugpatterns.EqualsNaN;
import com.google.errorprone.bugpatterns.EqualsNull;
import com.google.errorprone.bugpatterns.EqualsReference;
import com.google.errorprone.bugpatterns.EqualsUnsafeCast;
import com.google.errorprone.bugpatterns.EqualsUsingHashCode;
import com.google.errorprone.bugpatterns.EqualsWrongThing;
import com.google.errorprone.bugpatterns.ErroneousThreadPoolConstructorChecker;
import com.google.errorprone.bugpatterns.ExpectedExceptionChecker;
import com.google.errorprone.bugpatterns.ExtendingJUnitAssert;
import com.google.errorprone.bugpatterns.ExtendsAutoValue;
import com.google.errorprone.bugpatterns.FallThrough;
import com.google.errorprone.bugpatterns.FieldCanBeFinal;
import com.google.errorprone.bugpatterns.FieldCanBeLocal;
import com.google.errorprone.bugpatterns.FieldCanBeStatic;
import com.google.errorprone.bugpatterns.Finally;
import com.google.errorprone.bugpatterns.FloatCast;
import com.google.errorprone.bugpatterns.FloatingPointAssertionWithinEpsilon;
import com.google.errorprone.bugpatterns.FloatingPointLiteralPrecision;
import com.google.errorprone.bugpatterns.ForEachIterable;
import com.google.errorprone.bugpatterns.ForOverrideChecker;
import com.google.errorprone.bugpatterns.FunctionalInterfaceClash;
import com.google.errorprone.bugpatterns.FunctionalInterfaceMethodChanged;
import com.google.errorprone.bugpatterns.FutureReturnValueIgnored;
import com.google.errorprone.bugpatterns.FuturesGetCheckedIllegalExceptionType;
import com.google.errorprone.bugpatterns.FuzzyEqualsShouldNotBeUsedInEqualsMethod;
import com.google.errorprone.bugpatterns.GetClassOnAnnotation;
import com.google.errorprone.bugpatterns.GetClassOnClass;
import com.google.errorprone.bugpatterns.GetClassOnEnum;
import com.google.errorprone.bugpatterns.HashCodeToString;
import com.google.errorprone.bugpatterns.HashtableContains;
import com.google.errorprone.bugpatterns.HidingField;
import com.google.errorprone.bugpatterns.IdentityBinaryExpression;
import com.google.errorprone.bugpatterns.IdentityHashMapBoxing;
import com.google.errorprone.bugpatterns.IdentityHashMapUsage;
import com.google.errorprone.bugpatterns.IgnoredPureGetter;
import com.google.errorprone.bugpatterns.ImmutableMemberCollection;
import com.google.errorprone.bugpatterns.ImmutableSetForContains;
import com.google.errorprone.bugpatterns.ImplementAssertionWithChaining;
import com.google.errorprone.bugpatterns.Incomparable;
import com.google.errorprone.bugpatterns.IncompatibleModifiersChecker;
import com.google.errorprone.bugpatterns.InconsistentCapitalization;
import com.google.errorprone.bugpatterns.InconsistentHashCode;
import com.google.errorprone.bugpatterns.IncorrectMainMethod;
import com.google.errorprone.bugpatterns.IncrementInForLoopAndHeader;
import com.google.errorprone.bugpatterns.IndexOfChar;
import com.google.errorprone.bugpatterns.InexactVarargsConditional;
import com.google.errorprone.bugpatterns.InfiniteRecursion;
import com.google.errorprone.bugpatterns.InitializeInline;
import com.google.errorprone.bugpatterns.InputStreamSlowMultibyteRead;
import com.google.errorprone.bugpatterns.InsecureCipherMode;
import com.google.errorprone.bugpatterns.InstanceOfAndCastMatchWrongType;
import com.google.errorprone.bugpatterns.IntLongMath;
import com.google.errorprone.bugpatterns.InterfaceWithOnlyStatics;
import com.google.errorprone.bugpatterns.InterruptedExceptionSwallowed;
import com.google.errorprone.bugpatterns.InvalidPatternSyntax;
import com.google.errorprone.bugpatterns.InvalidTimeZoneID;
import com.google.errorprone.bugpatterns.InvalidZoneId;
import com.google.errorprone.bugpatterns.IsInstanceIncompatibleType;
import com.google.errorprone.bugpatterns.IsInstanceOfClass;
import com.google.errorprone.bugpatterns.IterableAndIterator;
import com.google.errorprone.bugpatterns.IterablePathParameter;
import com.google.errorprone.bugpatterns.JMockTestWithoutRunWithOrRuleAnnotation;
import com.google.errorprone.bugpatterns.JUnit3FloatingPointComparisonWithoutDelta;
import com.google.errorprone.bugpatterns.JUnit3TestNotRun;
import com.google.errorprone.bugpatterns.JUnit4ClassAnnotationNonStatic;
import com.google.errorprone.bugpatterns.JUnit4ClassUsedInJUnit3;
import com.google.errorprone.bugpatterns.JUnit4SetUpNotRun;
import com.google.errorprone.bugpatterns.JUnit4TearDownNotRun;
import com.google.errorprone.bugpatterns.JUnit4TestNotRun;
import com.google.errorprone.bugpatterns.JUnit4TestsNotRunWithinEnclosed;
import com.google.errorprone.bugpatterns.JUnitAmbiguousTestClass;
import com.google.errorprone.bugpatterns.JUnitAssertSameCheck;
import com.google.errorprone.bugpatterns.JUnitParameterMethodNotFound;
import com.google.errorprone.bugpatterns.JavaLangClash;
import com.google.errorprone.bugpatterns.JavaUtilDateChecker;
import com.google.errorprone.bugpatterns.JdkObsolete;
import com.google.errorprone.bugpatterns.LambdaFunctionalInterface;
import com.google.errorprone.bugpatterns.LiteByteStringUtf8;
import com.google.errorprone.bugpatterns.LiteEnumValueOf;
import com.google.errorprone.bugpatterns.LiteProtoToString;
import com.google.errorprone.bugpatterns.LockNotBeforeTry;
import com.google.errorprone.bugpatterns.LockOnBoxedPrimitive;
import com.google.errorprone.bugpatterns.LogicalAssignment;
import com.google.errorprone.bugpatterns.LongDoubleConversion;
import com.google.errorprone.bugpatterns.LongFloatConversion;
import com.google.errorprone.bugpatterns.LongLiteralLowerCaseSuffix;
import com.google.errorprone.bugpatterns.LoopConditionChecker;
import com.google.errorprone.bugpatterns.LoopOverCharArray;
import com.google.errorprone.bugpatterns.LossyPrimitiveCompare;
import com.google.errorprone.bugpatterns.MathAbsoluteRandom;
import com.google.errorprone.bugpatterns.MathRoundIntLong;
import com.google.errorprone.bugpatterns.MemberName;
import com.google.errorprone.bugpatterns.MemoizeConstantVisitorStateLookups;
import com.google.errorprone.bugpatterns.MethodCanBeStatic;
import com.google.errorprone.bugpatterns.MissingBraces;
import com.google.errorprone.bugpatterns.MissingCasesInEnumSwitch;
import com.google.errorprone.bugpatterns.MissingDefault;
import com.google.errorprone.bugpatterns.MissingFail;
import com.google.errorprone.bugpatterns.MissingImplementsComparable;
import com.google.errorprone.bugpatterns.MissingOverride;
import com.google.errorprone.bugpatterns.MissingSuperCall;
import com.google.errorprone.bugpatterns.MissingTestCall;
import com.google.errorprone.bugpatterns.MisusedDayOfYear;
import com.google.errorprone.bugpatterns.MisusedWeekYear;
import com.google.errorprone.bugpatterns.MixedArrayDimensions;
import com.google.errorprone.bugpatterns.MixedDescriptors;
import com.google.errorprone.bugpatterns.MixedMutabilityReturnType;
import com.google.errorprone.bugpatterns.MockitoUsage;
import com.google.errorprone.bugpatterns.ModifiedButNotUsed;
import com.google.errorprone.bugpatterns.ModifyCollectionInEnhancedForLoop;
import com.google.errorprone.bugpatterns.ModifySourceCollectionInStream;
import com.google.errorprone.bugpatterns.ModifyingCollectionWithItself;
import com.google.errorprone.bugpatterns.MultiVariableDeclaration;
import com.google.errorprone.bugpatterns.MultipleParallelOrSequentialCalls;
import com.google.errorprone.bugpatterns.MultipleTopLevelClasses;
import com.google.errorprone.bugpatterns.MultipleUnaryOperatorsInMethodCall;
import com.google.errorprone.bugpatterns.MustBeClosedChecker;
import com.google.errorprone.bugpatterns.MutablePublicArray;
import com.google.errorprone.bugpatterns.NCopiesOfChar;
import com.google.errorprone.bugpatterns.NarrowingCompoundAssignment;
import com.google.errorprone.bugpatterns.NegativeCharLiteral;
import com.google.errorprone.bugpatterns.NestedInstanceOfConditions;
import com.google.errorprone.bugpatterns.NoAllocationChecker;
import com.google.errorprone.bugpatterns.NoFunctionalReturnType;
import com.google.errorprone.bugpatterns.NonAtomicVolatileUpdate;
import com.google.errorprone.bugpatterns.NonCanonicalStaticImport;
import com.google.errorprone.bugpatterns.NonCanonicalStaticMemberImport;
import com.google.errorprone.bugpatterns.NonCanonicalType;
import com.google.errorprone.bugpatterns.NonFinalCompileTimeConstant;
import com.google.errorprone.bugpatterns.NonOverridingEquals;
import com.google.errorprone.bugpatterns.NonRuntimeAnnotation;
import com.google.errorprone.bugpatterns.NullOptional;
import com.google.errorprone.bugpatterns.NullTernary;
import com.google.errorprone.bugpatterns.NullableConstructor;
import com.google.errorprone.bugpatterns.NullablePrimitive;
import com.google.errorprone.bugpatterns.NullablePrimitiveArray;
import com.google.errorprone.bugpatterns.NullableVoid;
import com.google.errorprone.bugpatterns.ObjectEqualsForPrimitives;
import com.google.errorprone.bugpatterns.ObjectToString;
import com.google.errorprone.bugpatterns.ObjectsHashCodePrimitive;
import com.google.errorprone.bugpatterns.OperatorPrecedence;
import com.google.errorprone.bugpatterns.OptionalEquality;
import com.google.errorprone.bugpatterns.OptionalMapToOptional;
import com.google.errorprone.bugpatterns.OptionalMapUnusedValue;
import com.google.errorprone.bugpatterns.OptionalNotPresent;
import com.google.errorprone.bugpatterns.OptionalOfRedundantMethod;
import com.google.errorprone.bugpatterns.OrphanedFormatString;
import com.google.errorprone.bugpatterns.OutlineNone;
import com.google.errorprone.bugpatterns.OverrideThrowableToString;
import com.google.errorprone.bugpatterns.Overrides;
import com.google.errorprone.bugpatterns.PackageInfo;
import com.google.errorprone.bugpatterns.PackageLocation;
import com.google.errorprone.bugpatterns.ParameterComment;
import com.google.errorprone.bugpatterns.ParameterName;
import com.google.errorprone.bugpatterns.ParametersButNotParameterized;
import com.google.errorprone.bugpatterns.PreconditionsCheckNotNullRepeated;
import com.google.errorprone.bugpatterns.PreconditionsInvalidPlaceholder;
import com.google.errorprone.bugpatterns.PreferredInterfaceType;
import com.google.errorprone.bugpatterns.PrimitiveArrayPassedToVarargsMethod;
import com.google.errorprone.bugpatterns.PrimitiveAtomicReference;
import com.google.errorprone.bugpatterns.PrivateConstructorForUtilityClass;
import com.google.errorprone.bugpatterns.PrivateSecurityContractProtoAccess;
import com.google.errorprone.bugpatterns.ProtectedMembersInFinalClass;
import com.google.errorprone.bugpatterns.ProtoBuilderReturnValueIgnored;
import com.google.errorprone.bugpatterns.ProtoFieldNullComparison;
import com.google.errorprone.bugpatterns.ProtoRedundantSet;
import com.google.errorprone.bugpatterns.ProtoStringFieldReferenceEquality;
import com.google.errorprone.bugpatterns.ProtoTruthMixedDescriptors;
import com.google.errorprone.bugpatterns.ProtocolBufferOrdinal;
import com.google.errorprone.bugpatterns.ProtosAsKeyOfSetOrMap;
import com.google.errorprone.bugpatterns.PublicApiNamedStreamShouldReturnStream;
import com.google.errorprone.bugpatterns.PublicConstructorForAbstractClass;
import com.google.errorprone.bugpatterns.RandomCast;
import com.google.errorprone.bugpatterns.RandomModInteger;
import com.google.errorprone.bugpatterns.ReachabilityFenceUsage;
import com.google.errorprone.bugpatterns.RedundantOverride;
import com.google.errorprone.bugpatterns.RedundantThrows;
import com.google.errorprone.bugpatterns.ReferenceEquality;
import com.google.errorprone.bugpatterns.RemoveUnusedImports;
import com.google.errorprone.bugpatterns.RequiredModifiersChecker;
import com.google.errorprone.bugpatterns.RestrictedApiChecker;
import com.google.errorprone.bugpatterns.RethrowReflectiveOperationExceptionAsLinkageError;
import com.google.errorprone.bugpatterns.ReturnValueIgnored;
import com.google.errorprone.bugpatterns.ReturnsNullCollection;
import com.google.errorprone.bugpatterns.RobolectricShadowDirectlyOn;
import com.google.errorprone.bugpatterns.RxReturnValueIgnored;
import com.google.errorprone.bugpatterns.SameNameButDifferent;
import com.google.errorprone.bugpatterns.SelfAssignment;
import com.google.errorprone.bugpatterns.SelfComparison;
import com.google.errorprone.bugpatterns.SelfEquals;
import com.google.errorprone.bugpatterns.ShortCircuitBoolean;
import com.google.errorprone.bugpatterns.ShouldHaveEvenArgs;
import com.google.errorprone.bugpatterns.SizeGreaterThanOrEqualsZero;
import com.google.errorprone.bugpatterns.StaticAssignmentInConstructor;
import com.google.errorprone.bugpatterns.StaticAssignmentOfThrowable;
import com.google.errorprone.bugpatterns.StaticMockMember;
import com.google.errorprone.bugpatterns.StaticQualifiedUsingExpression;
import com.google.errorprone.bugpatterns.StreamResourceLeak;
import com.google.errorprone.bugpatterns.StreamToIterable;
import com.google.errorprone.bugpatterns.StreamToString;
import com.google.errorprone.bugpatterns.StringBuilderInitWithChar;
import com.google.errorprone.bugpatterns.StringEquality;
import com.google.errorprone.bugpatterns.StringSplitter;
import com.google.errorprone.bugpatterns.StronglyTypeByteString;
import com.google.errorprone.bugpatterns.SubstringOfZero;
import com.google.errorprone.bugpatterns.SuppressWarningsDeprecated;
import com.google.errorprone.bugpatterns.SuppressWarningsWithoutExplanation;
import com.google.errorprone.bugpatterns.SwigMemoryLeak;
import com.google.errorprone.bugpatterns.SwitchDefault;
import com.google.errorprone.bugpatterns.SymbolToString;
import com.google.errorprone.bugpatterns.SystemExitOutsideMain;
import com.google.errorprone.bugpatterns.SystemOut;
import com.google.errorprone.bugpatterns.TestExceptionChecker;
import com.google.errorprone.bugpatterns.TestParametersNotInitialized;
import com.google.errorprone.bugpatterns.TheoryButNoTheories;
import com.google.errorprone.bugpatterns.ThreadJoinLoop;
import com.google.errorprone.bugpatterns.ThreadLocalUsage;
import com.google.errorprone.bugpatterns.ThreeLetterTimeZoneID;
import com.google.errorprone.bugpatterns.ThrowIfUncheckedKnownChecked;
import com.google.errorprone.bugpatterns.ThrowNull;
import com.google.errorprone.bugpatterns.ThrowSpecificExceptions;
import com.google.errorprone.bugpatterns.ThrowsUncheckedException;
import com.google.errorprone.bugpatterns.ToStringReturnsNull;
import com.google.errorprone.bugpatterns.TooManyParameters;
import com.google.errorprone.bugpatterns.TransientMisuse;
import com.google.errorprone.bugpatterns.TreeToString;
import com.google.errorprone.bugpatterns.TruthAssertExpected;
import com.google.errorprone.bugpatterns.TruthConstantAsserts;
import com.google.errorprone.bugpatterns.TruthGetOrDefault;
import com.google.errorprone.bugpatterns.TruthSelfEquals;
import com.google.errorprone.bugpatterns.TryFailRefactoring;
import com.google.errorprone.bugpatterns.TryFailThrowable;
import com.google.errorprone.bugpatterns.TryWithResourcesVariable;
import com.google.errorprone.bugpatterns.TypeEqualsChecker;
import com.google.errorprone.bugpatterns.TypeNameShadowing;
import com.google.errorprone.bugpatterns.TypeParameterNaming;
import com.google.errorprone.bugpatterns.TypeParameterQualifier;
import com.google.errorprone.bugpatterns.TypeParameterShadowing;
import com.google.errorprone.bugpatterns.TypeParameterUnusedInFormals;
import com.google.errorprone.bugpatterns.TypeToString;
import com.google.errorprone.bugpatterns.URLEqualsHashCode;
import com.google.errorprone.bugpatterns.UndefinedEquals;
import com.google.errorprone.bugpatterns.UngroupedOverloads;
import com.google.errorprone.bugpatterns.UnicodeDirectionalityCharacters;
import com.google.errorprone.bugpatterns.UnicodeEscape;
import com.google.errorprone.bugpatterns.UnicodeInCode;
import com.google.errorprone.bugpatterns.UnnecessarilyFullyQualified;
import com.google.errorprone.bugpatterns.UnnecessarilyVisible;
import com.google.errorprone.bugpatterns.UnnecessaryAnonymousClass;
import com.google.errorprone.bugpatterns.UnnecessaryAssignment;
import com.google.errorprone.bugpatterns.UnnecessaryBoxedAssignment;
import com.google.errorprone.bugpatterns.UnnecessaryBoxedVariable;
import com.google.errorprone.bugpatterns.UnnecessaryDefaultInEnumSwitch;
import com.google.errorprone.bugpatterns.UnnecessaryFinal;
import com.google.errorprone.bugpatterns.UnnecessaryLambda;
import com.google.errorprone.bugpatterns.UnnecessaryLongToIntConversion;
import com.google.errorprone.bugpatterns.UnnecessaryMethodInvocationMatcher;
import com.google.errorprone.bugpatterns.UnnecessaryMethodReference;
import com.google.errorprone.bugpatterns.UnnecessaryOptionalGet;
import com.google.errorprone.bugpatterns.UnnecessaryParentheses;
import com.google.errorprone.bugpatterns.UnnecessarySetDefault;
import com.google.errorprone.bugpatterns.UnnecessaryStaticImport;
import com.google.errorprone.bugpatterns.UnnecessaryTypeArgument;
import com.google.errorprone.bugpatterns.UnsafeFinalization;
import com.google.errorprone.bugpatterns.UnsafeLocaleUsage;
import com.google.errorprone.bugpatterns.UnsafeReflectiveConstructionCast;
import com.google.errorprone.bugpatterns.UnsynchronizedOverridesSynchronized;
import com.google.errorprone.bugpatterns.UnusedAnonymousClass;
import com.google.errorprone.bugpatterns.UnusedCollectionModifiedInPlace;
import com.google.errorprone.bugpatterns.UnusedException;
import com.google.errorprone.bugpatterns.UnusedMethod;
import com.google.errorprone.bugpatterns.UnusedNestedClass;
import com.google.errorprone.bugpatterns.UnusedVariable;
import com.google.errorprone.bugpatterns.UseCorrectAssertInTests;
import com.google.errorprone.bugpatterns.UseEnumSwitch;
import com.google.errorprone.bugpatterns.VarChecker;
import com.google.errorprone.bugpatterns.VarTypeName;
import com.google.errorprone.bugpatterns.VariableNameSameAsType;
import com.google.errorprone.bugpatterns.WaitNotInLoop;
import com.google.errorprone.bugpatterns.WildcardImport;
import com.google.errorprone.bugpatterns.WithSignatureDiscouraged;
import com.google.errorprone.bugpatterns.WrongOneof;
import com.google.errorprone.bugpatterns.XorPower;
import com.google.errorprone.bugpatterns.android.BinderIdentityRestoredDangerously;
import com.google.errorprone.bugpatterns.android.BundleDeserializationCast;
import com.google.errorprone.bugpatterns.android.FragmentInjection;
import com.google.errorprone.bugpatterns.android.FragmentNotInstantiable;
import com.google.errorprone.bugpatterns.android.HardCodedSdCardPath;
import com.google.errorprone.bugpatterns.android.IsLoggableTagLength;
import com.google.errorprone.bugpatterns.android.MislabeledAndroidString;
import com.google.errorprone.bugpatterns.android.ParcelableCreator;
import com.google.errorprone.bugpatterns.android.RectIntersectReturnValueIgnored;
import com.google.errorprone.bugpatterns.android.StaticOrDefaultInterfaceMethod;
import com.google.errorprone.bugpatterns.android.WakelockReleasedDangerously;
import com.google.errorprone.bugpatterns.apidiff.AndroidJdkLibsChecker;
import com.google.errorprone.bugpatterns.apidiff.Java7ApiChecker;
import com.google.errorprone.bugpatterns.apidiff.Java8ApiChecker;
import com.google.errorprone.bugpatterns.argumentselectiondefects.ArgumentSelectionDefectChecker;
import com.google.errorprone.bugpatterns.argumentselectiondefects.AssertEqualsArgumentOrderChecker;
import com.google.errorprone.bugpatterns.argumentselectiondefects.AutoValueConstructorOrderChecker;
import com.google.errorprone.bugpatterns.checkreturnvalue.UsingJsr305CheckReturnValue;
import com.google.errorprone.bugpatterns.collectionincompatibletype.CollectionIncompatibleType;
import com.google.errorprone.bugpatterns.collectionincompatibletype.CollectionUndefinedEquality;
import com.google.errorprone.bugpatterns.collectionincompatibletype.CompatibleWithMisuse;
import com.google.errorprone.bugpatterns.collectionincompatibletype.IncompatibleArgumentType;
import com.google.errorprone.bugpatterns.collectionincompatibletype.TruthIncompatibleType;
import com.google.errorprone.bugpatterns.flogger.FloggerArgumentToString;
import com.google.errorprone.bugpatterns.flogger.FloggerFormatString;
import com.google.errorprone.bugpatterns.flogger.FloggerLogString;
import com.google.errorprone.bugpatterns.flogger.FloggerLogVarargs;
import com.google.errorprone.bugpatterns.flogger.FloggerLogWithCause;
import com.google.errorprone.bugpatterns.flogger.FloggerMessageFormat;
import com.google.errorprone.bugpatterns.flogger.FloggerPassedAround;
import com.google.errorprone.bugpatterns.flogger.FloggerRedundantIsEnabled;
import com.google.errorprone.bugpatterns.flogger.FloggerRequiredModifiers;
import com.google.errorprone.bugpatterns.flogger.FloggerSplitLogStatement;
import com.google.errorprone.bugpatterns.flogger.FloggerStringConcatenation;
import com.google.errorprone.bugpatterns.flogger.FloggerWithCause;
import com.google.errorprone.bugpatterns.flogger.FloggerWithoutCause;
import com.google.errorprone.bugpatterns.formatstring.FormatString;
import com.google.errorprone.bugpatterns.formatstring.FormatStringAnnotationChecker;
import com.google.errorprone.bugpatterns.formatstring.InlineFormatString;
import com.google.errorprone.bugpatterns.inject.AssistedInjectAndInjectOnConstructors;
import com.google.errorprone.bugpatterns.inject.AssistedInjectAndInjectOnSameConstructor;
import com.google.errorprone.bugpatterns.inject.AutoFactoryAtInject;
import com.google.errorprone.bugpatterns.inject.CloseableProvides;
import com.google.errorprone.bugpatterns.inject.InjectOnConstructorOfAbstractClass;
import com.google.errorprone.bugpatterns.inject.InjectOnMemberAndConstructor;
import com.google.errorprone.bugpatterns.inject.InjectedConstructorAnnotations;
import com.google.errorprone.bugpatterns.inject.InvalidTargetingOnScopingAnnotation;
import com.google.errorprone.bugpatterns.inject.JavaxInjectOnAbstractMethod;
import com.google.errorprone.bugpatterns.inject.JavaxInjectOnFinalField;
import com.google.errorprone.bugpatterns.inject.MisplacedScopeAnnotations;
import com.google.errorprone.bugpatterns.inject.MoreThanOneInjectableConstructor;
import com.google.errorprone.bugpatterns.inject.MoreThanOneQualifier;
import com.google.errorprone.bugpatterns.inject.MoreThanOneScopeAnnotationOnClass;
import com.google.errorprone.bugpatterns.inject.OverlappingQualifierAndScopeAnnotation;
import com.google.errorprone.bugpatterns.inject.QualifierOrScopeOnInjectMethod;
import com.google.errorprone.bugpatterns.inject.QualifierWithTypeUse;
import com.google.errorprone.bugpatterns.inject.ScopeAnnotationOnInterfaceOrAbstractClass;
import com.google.errorprone.bugpatterns.inject.ScopeOrQualifierAnnotationRetention;
import com.google.errorprone.bugpatterns.inject.dagger.AndroidInjectionBeforeSuper;
import com.google.errorprone.bugpatterns.inject.dagger.EmptySetMultibindingContributions;
import com.google.errorprone.bugpatterns.inject.dagger.PrivateConstructorForNoninstantiableModule;
import com.google.errorprone.bugpatterns.inject.dagger.ProvidesNull;
import com.google.errorprone.bugpatterns.inject.dagger.RefersToDaggerCodegen;
import com.google.errorprone.bugpatterns.inject.dagger.ScopeOnModule;
import com.google.errorprone.bugpatterns.inject.dagger.UseBinds;
import com.google.errorprone.bugpatterns.inject.guice.AssistedInjectScoping;
import com.google.errorprone.bugpatterns.inject.guice.AssistedParameters;
import com.google.errorprone.bugpatterns.inject.guice.BindingToUnqualifiedCommonType;
import com.google.errorprone.bugpatterns.inject.guice.InjectOnFinalField;
import com.google.errorprone.bugpatterns.inject.guice.OverridesGuiceInjectableMethod;
import com.google.errorprone.bugpatterns.inject.guice.OverridesJavaxInjectableMethod;
import com.google.errorprone.bugpatterns.inject.guice.ProvidesMethodOutsideOfModule;
import com.google.errorprone.bugpatterns.inlineme.Inliner;
import com.google.errorprone.bugpatterns.inlineme.Suggester;
import com.google.errorprone.bugpatterns.inlineme.Validator;
import com.google.errorprone.bugpatterns.javadoc.AlmostJavadoc;
import com.google.errorprone.bugpatterns.javadoc.EmptyBlockTag;
import com.google.errorprone.bugpatterns.javadoc.EscapedEntity;
import com.google.errorprone.bugpatterns.javadoc.InheritDoc;
import com.google.errorprone.bugpatterns.javadoc.InvalidBlockTag;
import com.google.errorprone.bugpatterns.javadoc.InvalidInlineTag;
import com.google.errorprone.bugpatterns.javadoc.InvalidLink;
import com.google.errorprone.bugpatterns.javadoc.InvalidParam;
import com.google.errorprone.bugpatterns.javadoc.InvalidThrows;
import com.google.errorprone.bugpatterns.javadoc.InvalidThrowsLink;
import com.google.errorprone.bugpatterns.javadoc.MalformedInlineTag;
import com.google.errorprone.bugpatterns.javadoc.MissingSummary;
import com.google.errorprone.bugpatterns.javadoc.ReturnFromVoid;
import com.google.errorprone.bugpatterns.javadoc.UnescapedEntity;
import com.google.errorprone.bugpatterns.javadoc.UnrecognisedJavadocTag;
import com.google.errorprone.bugpatterns.javadoc.UrlInSee;
import com.google.errorprone.bugpatterns.nullness.EqualsBrokenForNull;
import com.google.errorprone.bugpatterns.nullness.EqualsMissingNullable;
import com.google.errorprone.bugpatterns.nullness.FieldMissingNullable;
import com.google.errorprone.bugpatterns.nullness.ParameterMissingNullable;
import com.google.errorprone.bugpatterns.nullness.ReturnMissingNullable;
import com.google.errorprone.bugpatterns.nullness.UnnecessaryCheckNotNull;
import com.google.errorprone.bugpatterns.nullness.VoidMissingNullable;
import com.google.errorprone.bugpatterns.overloading.InconsistentOverloads;
import com.google.errorprone.bugpatterns.threadsafety.DoubleCheckedLocking;
import com.google.errorprone.bugpatterns.threadsafety.GuardedByChecker;
import com.google.errorprone.bugpatterns.threadsafety.ImmutableAnnotationChecker;
import com.google.errorprone.bugpatterns.threadsafety.ImmutableChecker;
import com.google.errorprone.bugpatterns.threadsafety.ImmutableEnumChecker;
import com.google.errorprone.bugpatterns.threadsafety.ImmutableRefactoring;
import com.google.errorprone.bugpatterns.threadsafety.LockMethodChecker;
import com.google.errorprone.bugpatterns.threadsafety.StaticGuardedByInstance;
import com.google.errorprone.bugpatterns.threadsafety.SynchronizeOnNonFinalField;
import com.google.errorprone.bugpatterns.threadsafety.ThreadPriorityCheck;
import com.google.errorprone.bugpatterns.threadsafety.UnlockMethodChecker;
import com.google.errorprone.bugpatterns.time.DateChecker;
import com.google.errorprone.bugpatterns.time.DurationFrom;
import com.google.errorprone.bugpatterns.time.DurationGetTemporalUnit;
import com.google.errorprone.bugpatterns.time.DurationTemporalUnit;
import com.google.errorprone.bugpatterns.time.DurationToLongTimeUnit;
import com.google.errorprone.bugpatterns.time.FromTemporalAccessor;
import com.google.errorprone.bugpatterns.time.InstantTemporalUnit;
import com.google.errorprone.bugpatterns.time.InvalidJavaTimeConstant;
import com.google.errorprone.bugpatterns.time.JavaDurationGetSecondsGetNano;
import com.google.errorprone.bugpatterns.time.JavaDurationWithNanos;
import com.google.errorprone.bugpatterns.time.JavaDurationWithSeconds;
import com.google.errorprone.bugpatterns.time.JavaInstantGetSecondsGetNano;
import com.google.errorprone.bugpatterns.time.JavaLocalDateTimeGetNano;
import com.google.errorprone.bugpatterns.time.JavaLocalTimeGetNano;
import com.google.errorprone.bugpatterns.time.JavaPeriodGetDays;
import com.google.errorprone.bugpatterns.time.JavaTimeDefaultTimeZone;
import com.google.errorprone.bugpatterns.time.JodaConstructors;
import com.google.errorprone.bugpatterns.time.JodaDateTimeConstants;
import com.google.errorprone.bugpatterns.time.JodaDurationWithMillis;
import com.google.errorprone.bugpatterns.time.JodaInstantWithMillis;
import com.google.errorprone.bugpatterns.time.JodaNewPeriod;
import com.google.errorprone.bugpatterns.time.JodaPlusMinusLong;
import com.google.errorprone.bugpatterns.time.JodaTimeConverterManager;
import com.google.errorprone.bugpatterns.time.JodaToSelf;
import com.google.errorprone.bugpatterns.time.JodaWithDurationAddedLong;
import com.google.errorprone.bugpatterns.time.LocalDateTemporalAmount;
import com.google.errorprone.bugpatterns.time.PeriodFrom;
import com.google.errorprone.bugpatterns.time.PeriodGetTemporalUnit;
import com.google.errorprone.bugpatterns.time.PeriodTimeMath;
import com.google.errorprone.bugpatterns.time.PreferJavaTimeOverload;
import com.google.errorprone.bugpatterns.time.ProtoDurationGetSecondsGetNano;
import com.google.errorprone.bugpatterns.time.ProtoTimestampGetSecondsGetNano;
import com.google.errorprone.bugpatterns.time.StronglyTypeTime;
import com.google.errorprone.bugpatterns.time.TemporalAccessorGetChronoField;
import com.google.errorprone.bugpatterns.time.TimeUnitConversionChecker;
import com.google.errorprone.bugpatterns.time.TimeUnitMismatch;
import com.google.errorprone.bugpatterns.time.ZoneIdOfZ;
import java.util.Arrays;
/**
* Static helper class that provides {@link ScannerSupplier}s and {@link BugChecker}s for the
* built-in Error Prone checks, as opposed to plugin checks or checks used in tests.
*/
public class BuiltInCheckerSuppliers {
@SafeVarargs
public static ImmutableSet<BugCheckerInfo> getSuppliers(Class<? extends BugChecker>... checkers) {
return getSuppliers(Arrays.asList(checkers));
}
public static ImmutableSet<BugCheckerInfo> getSuppliers(
Iterable<Class<? extends BugChecker>> checkers) {
return Streams.stream(checkers)
.map(BugCheckerInfo::create)
.collect(ImmutableSet.toImmutableSet());
}
/** Returns a {@link ScannerSupplier} with all {@link BugChecker}s in Error Prone. */
public static ScannerSupplier allChecks() {
return ScannerSupplier.fromBugCheckerInfos(
Iterables.concat(ENABLED_ERRORS, ENABLED_WARNINGS, DISABLED_CHECKS));
}
/**
* Returns a {@link ScannerSupplier} with the {@link BugChecker}s that are in the ENABLED lists.
*/
public static ScannerSupplier defaultChecks() {
return allChecks()
.filter(Predicates.or(Predicates.in(ENABLED_ERRORS), Predicates.in(ENABLED_WARNINGS)));
}
/**
* Returns a {@link ScannerSupplier} with the {@link BugChecker}s that are in the ENABLED_ERRORS
* list.
*/
public static ScannerSupplier errorChecks() {
return allChecks().filter(Predicates.in(ENABLED_ERRORS));
}
/** A list of all checks with severity ERROR that are on by default. */
public static final ImmutableSet<BugCheckerInfo> ENABLED_ERRORS =
getSuppliers(
// keep-sorted start
AlwaysThrows.class,
AndroidInjectionBeforeSuper.class,
ArrayEquals.class,
ArrayFillIncompatibleType.class,
ArrayHashCode.class,
ArrayToString.class,
ArraysAsListPrimitiveArray.class,
AssistedInjectScoping.class,
AssistedParameters.class,
AsyncCallableReturnsNull.class,
AsyncFunctionReturnsNull.class,
AutoValueBuilderDefaultsInConstructor.class,
AutoValueConstructorOrderChecker.class,
BadAnnotationImplementation.class,
BadShiftAmount.class,
BoxedPrimitiveEquality.class,
BundleDeserializationCast.class,
ChainingConstructorIgnoresParameter.class,
CheckNotNullMultipleTimes.class,
CheckReturnValue.class,
CollectionIncompatibleType.class,
CollectionToArraySafeParameter.class,
ComparableType.class,
ComparingThisWithNull.class,
ComparisonOutOfRange.class,
CompatibleWithMisuse.class,
CompileTimeConstantChecker.class,
ComputeIfAbsentAmbiguousReference.class,
ConditionalExpressionNumericPromotion.class,
ConstantOverflow.class,
DangerousLiteralNullChecker.class,
DeadException.class,
DeadThread.class,
DiscardedPostfixExpression.class,
DoNotCallChecker.class,
DoNotMockChecker.class,
DoubleBraceInitialization.class,
DuplicateMapKeys.class,
DurationFrom.class,
DurationGetTemporalUnit.class,
DurationTemporalUnit.class,
DurationToLongTimeUnit.class,
EqualsHashCode.class,
EqualsNaN.class,
EqualsNull.class,
EqualsReference.class,
EqualsWrongThing.class,
FloggerFormatString.class,
FloggerLogString.class,
FloggerLogVarargs.class,
FloggerSplitLogStatement.class,
ForOverrideChecker.class,
FormatString.class,
FormatStringAnnotationChecker.class,
FromTemporalAccessor.class,
FunctionalInterfaceMethodChanged.class,
FuturesGetCheckedIllegalExceptionType.class,
GetClassOnAnnotation.class,
GetClassOnClass.class,
GuardedByChecker.class,
HashtableContains.class,
IdentityBinaryExpression.class,
IdentityHashMapBoxing.class,
ImmutableChecker.class,
Incomparable.class,
IncompatibleArgumentType.class,
IncompatibleModifiersChecker.class,
IndexOfChar.class,
InexactVarargsConditional.class,
InfiniteRecursion.class,
InjectOnFinalField.class,
InjectOnMemberAndConstructor.class,
InstantTemporalUnit.class,
InvalidJavaTimeConstant.class,
InvalidPatternSyntax.class,
InvalidTimeZoneID.class,
InvalidZoneId.class,
IsInstanceIncompatibleType.class,
IsInstanceOfClass.class,
IsLoggableTagLength.class,
JUnit3TestNotRun.class,
JUnit4ClassAnnotationNonStatic.class,
JUnit4SetUpNotRun.class,
JUnit4TearDownNotRun.class,
JUnit4TestNotRun.class,
JUnit4TestsNotRunWithinEnclosed.class,
JUnitAssertSameCheck.class,
JUnitParameterMethodNotFound.class,
JavaxInjectOnAbstractMethod.class,
JodaToSelf.class,
LiteByteStringUtf8.class,
LocalDateTemporalAmount.class,
LockOnBoxedPrimitive.class,
LoopConditionChecker.class,
LossyPrimitiveCompare.class,
MathRoundIntLong.class,
MislabeledAndroidString.class,
MisplacedScopeAnnotations.class,
MissingSuperCall.class,
MissingTestCall.class,
MisusedDayOfYear.class,
MisusedWeekYear.class,
MixedDescriptors.class,
MockitoUsage.class,
ModifyingCollectionWithItself.class,
MoreThanOneInjectableConstructor.class,
MoreThanOneScopeAnnotationOnClass.class,
MustBeClosedChecker.class,
NCopiesOfChar.class,
NonCanonicalStaticImport.class,
NonFinalCompileTimeConstant.class,
NonRuntimeAnnotation.class,
NullTernary.class,
OptionalEquality.class,
OptionalMapUnusedValue.class,
OptionalOfRedundantMethod.class,
OverlappingQualifierAndScopeAnnotation.class,
OverridesJavaxInjectableMethod.class,
PackageInfo.class,
ParametersButNotParameterized.class,
ParcelableCreator.class,
PeriodFrom.class,
PeriodGetTemporalUnit.class,
PeriodTimeMath.class,
PreconditionsInvalidPlaceholder.class,
PrivateSecurityContractProtoAccess.class,
ProtoBuilderReturnValueIgnored.class,
ProtoFieldNullComparison.class,
ProtoStringFieldReferenceEquality.class,
ProtoTruthMixedDescriptors.class,
ProtocolBufferOrdinal.class,
ProvidesMethodOutsideOfModule.class,
ProvidesNull.class,
RandomCast.class,
RandomModInteger.class,
RectIntersectReturnValueIgnored.class,
RequiredModifiersChecker.class,
RestrictedApiChecker.class,
ReturnValueIgnored.class,
SelfAssignment.class,
SelfComparison.class,
SelfEquals.class,
ShouldHaveEvenArgs.class,
SizeGreaterThanOrEqualsZero.class,
StreamToString.class,
StringBuilderInitWithChar.class,
SubstringOfZero.class,
SuppressWarningsDeprecated.class,
TemporalAccessorGetChronoField.class,
TestParametersNotInitialized.class,
TheoryButNoTheories.class,
ThrowIfUncheckedKnownChecked.class,
ThrowNull.class,
TreeToString.class,
TruthSelfEquals.class,
TryFailThrowable.class,
TypeParameterQualifier.class,
UnicodeDirectionalityCharacters.class,
UnicodeInCode.class,
UnnecessaryCheckNotNull.class,
UnnecessaryTypeArgument.class,
UnusedAnonymousClass.class,
UnusedCollectionModifiedInPlace.class,
Validator.class,
VarTypeName.class,
WrongOneof.class,
XorPower.class,
ZoneIdOfZ.class
// keep-sorted end
);
/** A list of all checks with severity WARNING that are on by default. */
public static final ImmutableSet<BugCheckerInfo> ENABLED_WARNINGS =
getSuppliers(
// keep-sorted start
AlmostJavadoc.class,
AlreadyChecked.class,
AmbiguousMethodReference.class,
AnnotateFormatMethod.class,
ArgumentSelectionDefectChecker.class,
ArrayAsKeyOfSetOrMap.class,
AssertEqualsArgumentOrderChecker.class,
AssertThrowsMultipleStatements.class,
AssertionFailureIgnored.class,
AssistedInjectAndInjectOnSameConstructor.class,
AutoValueFinalMethods.class,
AutoValueImmutableFields.class,
AutoValueSubclassLeaked.class,
BadComparable.class,
BadImport.class,
BadInstanceof.class,
BareDotMetacharacter.class,
BigDecimalEquals.class,
BigDecimalLiteralDouble.class,
BoxedPrimitiveConstructor.class,
BugPatternNaming.class,
ByteBufferBackingArray.class,
CacheLoaderNull.class,
CannotMockFinalClass.class,
CanonicalDuration.class,
CatchAndPrintStackTrace.class,
CatchFail.class,
ChainedAssertionLosesContext.class,
CharacterGetNumericValue.class,
ClassCanBeStatic.class,
ClassNewInstance.class,
CloseableProvides.class,
CollectionUndefinedEquality.class,
CollectorShouldNotUseState.class,
ComparableAndComparator.class,
CompareToZero.class,
ComplexBooleanConstant.class,
DateChecker.class,
DateFormatConstant.class,
DefaultCharset.class,
DefaultPackage.class,
DeprecatedVariable.class,
DistinctVarargsChecker.class,
DoNotCallSuggester.class,
DoNotClaimAnnotations.class,
DoNotMockAutoValue.class,
DoubleCheckedLocking.class,
EmptyBlockTag.class,
EmptyCatch.class,
EmptySetMultibindingContributions.class,
EqualsGetClass.class,
EqualsIncompatibleType.class,
EqualsUnsafeCast.class,
EqualsUsingHashCode.class,
ErroneousThreadPoolConstructorChecker.class,
EscapedEntity.class,
ExtendingJUnitAssert.class,
FallThrough.class,
Finally.class,
FloatCast.class,
FloatingPointAssertionWithinEpsilon.class,
FloatingPointLiteralPrecision.class,
FloggerArgumentToString.class,
FloggerStringConcatenation.class,
FragmentInjection.class,
FragmentNotInstantiable.class,
FutureReturnValueIgnored.class,
GetClassOnEnum.class,
HidingField.class,
IdentityHashMapUsage.class,
IgnoredPureGetter.class,
ImmutableAnnotationChecker.class,
ImmutableEnumChecker.class,
InconsistentCapitalization.class,
InconsistentHashCode.class,
IncorrectMainMethod.class,
IncrementInForLoopAndHeader.class,
InheritDoc.class,
InjectOnConstructorOfAbstractClass.class,
InjectedConstructorAnnotations.class,
InlineFormatString.class,
Inliner.class,
InputStreamSlowMultibyteRead.class,
InstanceOfAndCastMatchWrongType.class,
IntLongMath.class,
InvalidBlockTag.class,
InvalidInlineTag.class,
InvalidLink.class,
InvalidParam.class,
InvalidTargetingOnScopingAnnotation.class,
InvalidThrows.class,
InvalidThrowsLink.class,
IterableAndIterator.class,
JUnit3FloatingPointComparisonWithoutDelta.class,
JUnit4ClassUsedInJUnit3.class,
JUnitAmbiguousTestClass.class,
JavaDurationGetSecondsGetNano.class,
JavaDurationWithNanos.class,
JavaDurationWithSeconds.class,
JavaInstantGetSecondsGetNano.class,
JavaLangClash.class,
JavaLocalDateTimeGetNano.class,
JavaLocalTimeGetNano.class,
JavaPeriodGetDays.class,
JavaTimeDefaultTimeZone.class,
JavaUtilDateChecker.class,
JavaxInjectOnFinalField.class,
JdkObsolete.class,
JodaConstructors.class,
JodaDateTimeConstants.class,
JodaDurationWithMillis.class,
JodaInstantWithMillis.class,
JodaNewPeriod.class,
JodaPlusMinusLong.class,
JodaTimeConverterManager.class,
JodaWithDurationAddedLong.class,
LiteEnumValueOf.class,
LiteProtoToString.class,
LockNotBeforeTry.class,
LogicalAssignment.class,
LongDoubleConversion.class,
LongFloatConversion.class,
LoopOverCharArray.class,
MalformedInlineTag.class,
MathAbsoluteRandom.class,
MemoizeConstantVisitorStateLookups.class,
MissingCasesInEnumSwitch.class,
MissingFail.class,
MissingImplementsComparable.class,
MissingOverride.class,
MissingSummary.class,
MixedMutabilityReturnType.class,
ModifiedButNotUsed.class,
ModifyCollectionInEnhancedForLoop.class,
ModifySourceCollectionInStream.class,
MultipleParallelOrSequentialCalls.class,
MultipleUnaryOperatorsInMethodCall.class,
MutablePublicArray.class,
NarrowingCompoundAssignment.class,
NegativeCharLiteral.class,
NestedInstanceOfConditions.class,
NonAtomicVolatileUpdate.class,
NonCanonicalType.class,
NonOverridingEquals.class,
NullOptional.class,
NullableConstructor.class,
NullablePrimitive.class,
NullablePrimitiveArray.class,
NullableVoid.class,
ObjectEqualsForPrimitives.class,
ObjectToString.class,
ObjectsHashCodePrimitive.class,
OperatorPrecedence.class,
OptionalMapToOptional.class,
OptionalNotPresent.class,
OrphanedFormatString.class,
OutlineNone.class,
OverrideThrowableToString.class,
Overrides.class,
OverridesGuiceInjectableMethod.class,
ParameterName.class,
PreconditionsCheckNotNullRepeated.class,
PrimitiveAtomicReference.class,
ProtectedMembersInFinalClass.class,
ProtoDurationGetSecondsGetNano.class,
ProtoRedundantSet.class,
ProtoTimestampGetSecondsGetNano.class,
QualifierOrScopeOnInjectMethod.class,
ReachabilityFenceUsage.class,
ReferenceEquality.class,
RethrowReflectiveOperationExceptionAsLinkageError.class,
ReturnFromVoid.class,
RobolectricShadowDirectlyOn.class,
RxReturnValueIgnored.class,
SameNameButDifferent.class,
ScopeAnnotationOnInterfaceOrAbstractClass.class,
ShortCircuitBoolean.class,
StaticAssignmentInConstructor.class,
StaticAssignmentOfThrowable.class,
StaticGuardedByInstance.class,
StaticMockMember.class,
StreamResourceLeak.class,
StreamToIterable.class,
StringSplitter.class,
Suggester.class,
SwigMemoryLeak.class,
SynchronizeOnNonFinalField.class,
ThreadJoinLoop.class,
ThreadLocalUsage.class,
ThreadPriorityCheck.class,
ThreeLetterTimeZoneID.class,
TimeUnitConversionChecker.class,
ToStringReturnsNull.class,
TruthAssertExpected.class,
TruthConstantAsserts.class,
TruthGetOrDefault.class,
TruthIncompatibleType.class,
TypeEqualsChecker.class,
TypeNameShadowing.class,
TypeParameterShadowing.class,
TypeParameterUnusedInFormals.class,
URLEqualsHashCode.class,
UndefinedEquals.class,
UnescapedEntity.class,
UnicodeEscape.class,
UnnecessaryAssignment.class,
UnnecessaryLambda.class,
UnnecessaryLongToIntConversion.class,
UnnecessaryMethodInvocationMatcher.class,
UnnecessaryMethodReference.class,
UnnecessaryParentheses.class,
UnrecognisedJavadocTag.class,
UnsafeFinalization.class,
UnsafeReflectiveConstructionCast.class,
UnsynchronizedOverridesSynchronized.class,
UnusedMethod.class,
UnusedNestedClass.class,
UnusedVariable.class,
UseBinds.class,
UseCorrectAssertInTests.class,
VariableNameSameAsType.class,
WaitNotInLoop.class,
WakelockReleasedDangerously.class,
WithSignatureDiscouraged.class
// keep-sorted end
);
/** A list of all checks that are off by default. */
public static final ImmutableSet<BugCheckerInfo> DISABLED_CHECKS =
getSuppliers(
// keep-sorted start
AndroidJdkLibsChecker.class,
AnnotationMirrorToString.class,
AnnotationPosition.class,
AnnotationValueToString.class,
AssertFalse.class,
AssistedInjectAndInjectOnConstructors.class,
AutoFactoryAtInject.class,
BanSerializableRead.class,
BinderIdentityRestoredDangerously.class, // TODO: enable this by default.
BindingToUnqualifiedCommonType.class,
BooleanParameter.class,
CatchingUnchecked.class,
CheckedExceptionNotThrown.class,
ClassName.class,
ClassNamedLikeTypeParameter.class,
ComparisonContractViolated.class,
ConstantField.class,
ConstantPatternCompile.class,
DeduplicateConstants.class,
DepAnn.class,
DifferentNameButSame.class,
DivZero.class,
EmptyIfStatement.class,
EmptyTopLevelDeclaration.class,
EqualsBrokenForNull.class,
EqualsMissingNullable.class,
ExpectedExceptionChecker.class,
ExtendsAutoValue.class,
FieldCanBeFinal.class,
FieldCanBeLocal.class,
FieldCanBeStatic.class,
FieldMissingNullable.class,
FloggerLogWithCause.class,
FloggerMessageFormat.class,
FloggerPassedAround.class,
FloggerRedundantIsEnabled.class,
FloggerRequiredModifiers.class,
FloggerWithCause.class,
FloggerWithoutCause.class,
ForEachIterable.class,
FunctionalInterfaceClash.class,
FuzzyEqualsShouldNotBeUsedInEqualsMethod.class,
HardCodedSdCardPath.class,
HashCodeToString.class,
ImmutableMemberCollection.class,
ImmutableRefactoring.class,
ImmutableSetForContains.class,
ImplementAssertionWithChaining.class,
InconsistentOverloads.class,
InitializeInline.class,
InsecureCipherMode.class,
InterfaceWithOnlyStatics.class,
InterruptedExceptionSwallowed.class,
IterablePathParameter.class,
JMockTestWithoutRunWithOrRuleAnnotation.class,
Java7ApiChecker.class,
Java8ApiChecker.class,
LambdaFunctionalInterface.class,
LockMethodChecker.class,
LongLiteralLowerCaseSuffix.class,
MemberName.class,
MethodCanBeStatic.class,
MissingBraces.class,
MissingDefault.class,
MixedArrayDimensions.class,
MoreThanOneQualifier.class,
MultiVariableDeclaration.class,
MultipleTopLevelClasses.class,
NoAllocationChecker.class,
NoFunctionalReturnType.class,
NonCanonicalStaticMemberImport.class,
PackageLocation.class,
ParameterComment.class,
ParameterMissingNullable.class,
PreferJavaTimeOverload.class,
PreferredInterfaceType.class,
PrimitiveArrayPassedToVarargsMethod.class,
PrivateConstructorForNoninstantiableModule.class,
PrivateConstructorForUtilityClass.class,
ProtosAsKeyOfSetOrMap.class,
PublicApiNamedStreamShouldReturnStream.class,
PublicConstructorForAbstractClass.class,
QualifierWithTypeUse.class,
RedundantOverride.class,
RedundantThrows.class,
RefersToDaggerCodegen.class,
RemoveUnusedImports.class,
ReturnMissingNullable.class,
ReturnsNullCollection.class,
ScopeOnModule.class,
ScopeOrQualifierAnnotationRetention.class,
StaticOrDefaultInterfaceMethod.class,
StaticQualifiedUsingExpression.class,
StringEquality.class,
StronglyTypeByteString.class,
StronglyTypeTime.class,
SuppressWarningsWithoutExplanation.class,
SwitchDefault.class,
SymbolToString.class,
SystemExitOutsideMain.class,
SystemOut.class,
TestExceptionChecker.class,
ThrowSpecificExceptions.class,
ThrowsUncheckedException.class,
TimeUnitMismatch.class,
TooManyParameters.class,
TransientMisuse.class,
TryFailRefactoring.class,
TryWithResourcesVariable.class,
TypeParameterNaming.class,
TypeToString.class,
UngroupedOverloads.class,
UnlockMethodChecker.class,
UnnecessarilyFullyQualified.class,
UnnecessarilyVisible.class,
UnnecessaryAnonymousClass.class,
UnnecessaryBoxedAssignment.class,
UnnecessaryBoxedVariable.class,
UnnecessaryDefaultInEnumSwitch.class,
UnnecessaryFinal.class,
UnnecessaryOptionalGet.class,
UnnecessarySetDefault.class,
UnnecessaryStaticImport.class,
UnsafeLocaleUsage.class,
UnusedException.class,
UrlInSee.class,
UseEnumSwitch.class,
UsingJsr305CheckReturnValue.class,
VarChecker.class,
VoidMissingNullable.class,
WildcardImport.class
// keep-sorted end
);
// May not be instantiated
private BuiltInCheckerSuppliers() {}
}
| |
package cherry.common.db.gen.dto;
import cherry.foundation.type.DeletedFlag;
import java.util.ArrayList;
import java.util.List;
import org.joda.time.LocalDateTime;
public class AsyncProcessFileResultDetailCriteria {
/**
* This field was generated by MyBatis Generator.
* This field corresponds to the database table ASYNC_PROCESS_FILE_RESULT_DETAIL
*
* @mbggenerated
*/
protected String orderByClause;
/**
* This field was generated by MyBatis Generator.
* This field corresponds to the database table ASYNC_PROCESS_FILE_RESULT_DETAIL
*
* @mbggenerated
*/
protected boolean distinct;
/**
* This field was generated by MyBatis Generator.
* This field corresponds to the database table ASYNC_PROCESS_FILE_RESULT_DETAIL
*
* @mbggenerated
*/
protected List<Criteria> oredCriteria;
/**
* This method was generated by MyBatis Generator.
* This method corresponds to the database table ASYNC_PROCESS_FILE_RESULT_DETAIL
*
* @mbggenerated
*/
public AsyncProcessFileResultDetailCriteria() {
oredCriteria = new ArrayList<Criteria>();
}
/**
* This method was generated by MyBatis Generator.
* This method corresponds to the database table ASYNC_PROCESS_FILE_RESULT_DETAIL
*
* @mbggenerated
*/
public void setOrderByClause(String orderByClause) {
this.orderByClause = orderByClause;
}
/**
* This method was generated by MyBatis Generator.
* This method corresponds to the database table ASYNC_PROCESS_FILE_RESULT_DETAIL
*
* @mbggenerated
*/
public String getOrderByClause() {
return orderByClause;
}
/**
* This method was generated by MyBatis Generator.
* This method corresponds to the database table ASYNC_PROCESS_FILE_RESULT_DETAIL
*
* @mbggenerated
*/
public void setDistinct(boolean distinct) {
this.distinct = distinct;
}
/**
* This method was generated by MyBatis Generator.
* This method corresponds to the database table ASYNC_PROCESS_FILE_RESULT_DETAIL
*
* @mbggenerated
*/
public boolean isDistinct() {
return distinct;
}
/**
* This method was generated by MyBatis Generator.
* This method corresponds to the database table ASYNC_PROCESS_FILE_RESULT_DETAIL
*
* @mbggenerated
*/
public List<Criteria> getOredCriteria() {
return oredCriteria;
}
/**
* This method was generated by MyBatis Generator.
* This method corresponds to the database table ASYNC_PROCESS_FILE_RESULT_DETAIL
*
* @mbggenerated
*/
public void or(Criteria criteria) {
oredCriteria.add(criteria);
}
/**
* This method was generated by MyBatis Generator.
* This method corresponds to the database table ASYNC_PROCESS_FILE_RESULT_DETAIL
*
* @mbggenerated
*/
public Criteria or() {
Criteria criteria = createCriteriaInternal();
oredCriteria.add(criteria);
return criteria;
}
/**
* This method was generated by MyBatis Generator.
* This method corresponds to the database table ASYNC_PROCESS_FILE_RESULT_DETAIL
*
* @mbggenerated
*/
public Criteria createCriteria() {
Criteria criteria = createCriteriaInternal();
if (oredCriteria.size() == 0) {
oredCriteria.add(criteria);
}
return criteria;
}
/**
* This method was generated by MyBatis Generator.
* This method corresponds to the database table ASYNC_PROCESS_FILE_RESULT_DETAIL
*
* @mbggenerated
*/
protected Criteria createCriteriaInternal() {
Criteria criteria = new Criteria();
return criteria;
}
/**
* This method was generated by MyBatis Generator.
* This method corresponds to the database table ASYNC_PROCESS_FILE_RESULT_DETAIL
*
* @mbggenerated
*/
public void clear() {
oredCriteria.clear();
orderByClause = null;
distinct = false;
}
/**
* This class was generated by MyBatis Generator.
* This class corresponds to the database table ASYNC_PROCESS_FILE_RESULT_DETAIL
*
* @mbggenerated
*/
protected abstract static class GeneratedCriteria {
protected List<Criterion> criteria;
protected GeneratedCriteria() {
super();
criteria = new ArrayList<Criterion>();
}
public boolean isValid() {
return criteria.size() > 0;
}
public List<Criterion> getAllCriteria() {
return criteria;
}
public List<Criterion> getCriteria() {
return criteria;
}
protected void addCriterion(String condition) {
if (condition == null) {
throw new RuntimeException("Value for condition cannot be null");
}
criteria.add(new Criterion(condition));
}
protected void addCriterion(String condition, Object value, String property) {
if (value == null) {
throw new RuntimeException("Value for " + property + " cannot be null");
}
criteria.add(new Criterion(condition, value));
}
protected void addCriterion(String condition, Object value1, Object value2, String property) {
if (value1 == null || value2 == null) {
throw new RuntimeException("Between values for " + property + " cannot be null");
}
criteria.add(new Criterion(condition, value1, value2));
}
public Criteria andIdIsNull() {
addCriterion("ID is null");
return (Criteria) this;
}
public Criteria andIdIsNotNull() {
addCriterion("ID is not null");
return (Criteria) this;
}
public Criteria andIdEqualTo(Long value) {
addCriterion("ID =", value, "id");
return (Criteria) this;
}
public Criteria andIdNotEqualTo(Long value) {
addCriterion("ID <>", value, "id");
return (Criteria) this;
}
public Criteria andIdGreaterThan(Long value) {
addCriterion("ID >", value, "id");
return (Criteria) this;
}
public Criteria andIdGreaterThanOrEqualTo(Long value) {
addCriterion("ID >=", value, "id");
return (Criteria) this;
}
public Criteria andIdLessThan(Long value) {
addCriterion("ID <", value, "id");
return (Criteria) this;
}
public Criteria andIdLessThanOrEqualTo(Long value) {
addCriterion("ID <=", value, "id");
return (Criteria) this;
}
public Criteria andIdIn(List<Long> values) {
addCriterion("ID in", values, "id");
return (Criteria) this;
}
public Criteria andIdNotIn(List<Long> values) {
addCriterion("ID not in", values, "id");
return (Criteria) this;
}
public Criteria andIdBetween(Long value1, Long value2) {
addCriterion("ID between", value1, value2, "id");
return (Criteria) this;
}
public Criteria andIdNotBetween(Long value1, Long value2) {
addCriterion("ID not between", value1, value2, "id");
return (Criteria) this;
}
public Criteria andAsyncIdIsNull() {
addCriterion("ASYNC_ID is null");
return (Criteria) this;
}
public Criteria andAsyncIdIsNotNull() {
addCriterion("ASYNC_ID is not null");
return (Criteria) this;
}
public Criteria andAsyncIdEqualTo(Long value) {
addCriterion("ASYNC_ID =", value, "asyncId");
return (Criteria) this;
}
public Criteria andAsyncIdNotEqualTo(Long value) {
addCriterion("ASYNC_ID <>", value, "asyncId");
return (Criteria) this;
}
public Criteria andAsyncIdGreaterThan(Long value) {
addCriterion("ASYNC_ID >", value, "asyncId");
return (Criteria) this;
}
public Criteria andAsyncIdGreaterThanOrEqualTo(Long value) {
addCriterion("ASYNC_ID >=", value, "asyncId");
return (Criteria) this;
}
public Criteria andAsyncIdLessThan(Long value) {
addCriterion("ASYNC_ID <", value, "asyncId");
return (Criteria) this;
}
public Criteria andAsyncIdLessThanOrEqualTo(Long value) {
addCriterion("ASYNC_ID <=", value, "asyncId");
return (Criteria) this;
}
public Criteria andAsyncIdIn(List<Long> values) {
addCriterion("ASYNC_ID in", values, "asyncId");
return (Criteria) this;
}
public Criteria andAsyncIdNotIn(List<Long> values) {
addCriterion("ASYNC_ID not in", values, "asyncId");
return (Criteria) this;
}
public Criteria andAsyncIdBetween(Long value1, Long value2) {
addCriterion("ASYNC_ID between", value1, value2, "asyncId");
return (Criteria) this;
}
public Criteria andAsyncIdNotBetween(Long value1, Long value2) {
addCriterion("ASYNC_ID not between", value1, value2, "asyncId");
return (Criteria) this;
}
public Criteria andRecordNumberIsNull() {
addCriterion("RECORD_NUMBER is null");
return (Criteria) this;
}
public Criteria andRecordNumberIsNotNull() {
addCriterion("RECORD_NUMBER is not null");
return (Criteria) this;
}
public Criteria andRecordNumberEqualTo(Long value) {
addCriterion("RECORD_NUMBER =", value, "recordNumber");
return (Criteria) this;
}
public Criteria andRecordNumberNotEqualTo(Long value) {
addCriterion("RECORD_NUMBER <>", value, "recordNumber");
return (Criteria) this;
}
public Criteria andRecordNumberGreaterThan(Long value) {
addCriterion("RECORD_NUMBER >", value, "recordNumber");
return (Criteria) this;
}
public Criteria andRecordNumberGreaterThanOrEqualTo(Long value) {
addCriterion("RECORD_NUMBER >=", value, "recordNumber");
return (Criteria) this;
}
public Criteria andRecordNumberLessThan(Long value) {
addCriterion("RECORD_NUMBER <", value, "recordNumber");
return (Criteria) this;
}
public Criteria andRecordNumberLessThanOrEqualTo(Long value) {
addCriterion("RECORD_NUMBER <=", value, "recordNumber");
return (Criteria) this;
}
public Criteria andRecordNumberIn(List<Long> values) {
addCriterion("RECORD_NUMBER in", values, "recordNumber");
return (Criteria) this;
}
public Criteria andRecordNumberNotIn(List<Long> values) {
addCriterion("RECORD_NUMBER not in", values, "recordNumber");
return (Criteria) this;
}
public Criteria andRecordNumberBetween(Long value1, Long value2) {
addCriterion("RECORD_NUMBER between", value1, value2, "recordNumber");
return (Criteria) this;
}
public Criteria andRecordNumberNotBetween(Long value1, Long value2) {
addCriterion("RECORD_NUMBER not between", value1, value2, "recordNumber");
return (Criteria) this;
}
public Criteria andDescriptionIsNull() {
addCriterion("DESCRIPTION is null");
return (Criteria) this;
}
public Criteria andDescriptionIsNotNull() {
addCriterion("DESCRIPTION is not null");
return (Criteria) this;
}
public Criteria andDescriptionEqualTo(String value) {
addCriterion("DESCRIPTION =", value, "description");
return (Criteria) this;
}
public Criteria andDescriptionNotEqualTo(String value) {
addCriterion("DESCRIPTION <>", value, "description");
return (Criteria) this;
}
public Criteria andDescriptionGreaterThan(String value) {
addCriterion("DESCRIPTION >", value, "description");
return (Criteria) this;
}
public Criteria andDescriptionGreaterThanOrEqualTo(String value) {
addCriterion("DESCRIPTION >=", value, "description");
return (Criteria) this;
}
public Criteria andDescriptionLessThan(String value) {
addCriterion("DESCRIPTION <", value, "description");
return (Criteria) this;
}
public Criteria andDescriptionLessThanOrEqualTo(String value) {
addCriterion("DESCRIPTION <=", value, "description");
return (Criteria) this;
}
public Criteria andDescriptionLike(String value) {
addCriterion("DESCRIPTION like", value, "description");
return (Criteria) this;
}
public Criteria andDescriptionNotLike(String value) {
addCriterion("DESCRIPTION not like", value, "description");
return (Criteria) this;
}
public Criteria andDescriptionIn(List<String> values) {
addCriterion("DESCRIPTION in", values, "description");
return (Criteria) this;
}
public Criteria andDescriptionNotIn(List<String> values) {
addCriterion("DESCRIPTION not in", values, "description");
return (Criteria) this;
}
public Criteria andDescriptionBetween(String value1, String value2) {
addCriterion("DESCRIPTION between", value1, value2, "description");
return (Criteria) this;
}
public Criteria andDescriptionNotBetween(String value1, String value2) {
addCriterion("DESCRIPTION not between", value1, value2, "description");
return (Criteria) this;
}
public Criteria andUpdatedAtIsNull() {
addCriterion("UPDATED_AT is null");
return (Criteria) this;
}
public Criteria andUpdatedAtIsNotNull() {
addCriterion("UPDATED_AT is not null");
return (Criteria) this;
}
public Criteria andUpdatedAtEqualTo(LocalDateTime value) {
addCriterion("UPDATED_AT =", value, "updatedAt");
return (Criteria) this;
}
public Criteria andUpdatedAtNotEqualTo(LocalDateTime value) {
addCriterion("UPDATED_AT <>", value, "updatedAt");
return (Criteria) this;
}
public Criteria andUpdatedAtGreaterThan(LocalDateTime value) {
addCriterion("UPDATED_AT >", value, "updatedAt");
return (Criteria) this;
}
public Criteria andUpdatedAtGreaterThanOrEqualTo(LocalDateTime value) {
addCriterion("UPDATED_AT >=", value, "updatedAt");
return (Criteria) this;
}
public Criteria andUpdatedAtLessThan(LocalDateTime value) {
addCriterion("UPDATED_AT <", value, "updatedAt");
return (Criteria) this;
}
public Criteria andUpdatedAtLessThanOrEqualTo(LocalDateTime value) {
addCriterion("UPDATED_AT <=", value, "updatedAt");
return (Criteria) this;
}
public Criteria andUpdatedAtIn(List<LocalDateTime> values) {
addCriterion("UPDATED_AT in", values, "updatedAt");
return (Criteria) this;
}
public Criteria andUpdatedAtNotIn(List<LocalDateTime> values) {
addCriterion("UPDATED_AT not in", values, "updatedAt");
return (Criteria) this;
}
public Criteria andUpdatedAtBetween(LocalDateTime value1, LocalDateTime value2) {
addCriterion("UPDATED_AT between", value1, value2, "updatedAt");
return (Criteria) this;
}
public Criteria andUpdatedAtNotBetween(LocalDateTime value1, LocalDateTime value2) {
addCriterion("UPDATED_AT not between", value1, value2, "updatedAt");
return (Criteria) this;
}
public Criteria andCreatedAtIsNull() {
addCriterion("CREATED_AT is null");
return (Criteria) this;
}
public Criteria andCreatedAtIsNotNull() {
addCriterion("CREATED_AT is not null");
return (Criteria) this;
}
public Criteria andCreatedAtEqualTo(LocalDateTime value) {
addCriterion("CREATED_AT =", value, "createdAt");
return (Criteria) this;
}
public Criteria andCreatedAtNotEqualTo(LocalDateTime value) {
addCriterion("CREATED_AT <>", value, "createdAt");
return (Criteria) this;
}
public Criteria andCreatedAtGreaterThan(LocalDateTime value) {
addCriterion("CREATED_AT >", value, "createdAt");
return (Criteria) this;
}
public Criteria andCreatedAtGreaterThanOrEqualTo(LocalDateTime value) {
addCriterion("CREATED_AT >=", value, "createdAt");
return (Criteria) this;
}
public Criteria andCreatedAtLessThan(LocalDateTime value) {
addCriterion("CREATED_AT <", value, "createdAt");
return (Criteria) this;
}
public Criteria andCreatedAtLessThanOrEqualTo(LocalDateTime value) {
addCriterion("CREATED_AT <=", value, "createdAt");
return (Criteria) this;
}
public Criteria andCreatedAtIn(List<LocalDateTime> values) {
addCriterion("CREATED_AT in", values, "createdAt");
return (Criteria) this;
}
public Criteria andCreatedAtNotIn(List<LocalDateTime> values) {
addCriterion("CREATED_AT not in", values, "createdAt");
return (Criteria) this;
}
public Criteria andCreatedAtBetween(LocalDateTime value1, LocalDateTime value2) {
addCriterion("CREATED_AT between", value1, value2, "createdAt");
return (Criteria) this;
}
public Criteria andCreatedAtNotBetween(LocalDateTime value1, LocalDateTime value2) {
addCriterion("CREATED_AT not between", value1, value2, "createdAt");
return (Criteria) this;
}
public Criteria andLockVersionIsNull() {
addCriterion("LOCK_VERSION is null");
return (Criteria) this;
}
public Criteria andLockVersionIsNotNull() {
addCriterion("LOCK_VERSION is not null");
return (Criteria) this;
}
public Criteria andLockVersionEqualTo(Integer value) {
addCriterion("LOCK_VERSION =", value, "lockVersion");
return (Criteria) this;
}
public Criteria andLockVersionNotEqualTo(Integer value) {
addCriterion("LOCK_VERSION <>", value, "lockVersion");
return (Criteria) this;
}
public Criteria andLockVersionGreaterThan(Integer value) {
addCriterion("LOCK_VERSION >", value, "lockVersion");
return (Criteria) this;
}
public Criteria andLockVersionGreaterThanOrEqualTo(Integer value) {
addCriterion("LOCK_VERSION >=", value, "lockVersion");
return (Criteria) this;
}
public Criteria andLockVersionLessThan(Integer value) {
addCriterion("LOCK_VERSION <", value, "lockVersion");
return (Criteria) this;
}
public Criteria andLockVersionLessThanOrEqualTo(Integer value) {
addCriterion("LOCK_VERSION <=", value, "lockVersion");
return (Criteria) this;
}
public Criteria andLockVersionIn(List<Integer> values) {
addCriterion("LOCK_VERSION in", values, "lockVersion");
return (Criteria) this;
}
public Criteria andLockVersionNotIn(List<Integer> values) {
addCriterion("LOCK_VERSION not in", values, "lockVersion");
return (Criteria) this;
}
public Criteria andLockVersionBetween(Integer value1, Integer value2) {
addCriterion("LOCK_VERSION between", value1, value2, "lockVersion");
return (Criteria) this;
}
public Criteria andLockVersionNotBetween(Integer value1, Integer value2) {
addCriterion("LOCK_VERSION not between", value1, value2, "lockVersion");
return (Criteria) this;
}
public Criteria andDeletedFlgIsNull() {
addCriterion("DELETED_FLG is null");
return (Criteria) this;
}
public Criteria andDeletedFlgIsNotNull() {
addCriterion("DELETED_FLG is not null");
return (Criteria) this;
}
public Criteria andDeletedFlgEqualTo(DeletedFlag value) {
addCriterion("DELETED_FLG =", value, "deletedFlg");
return (Criteria) this;
}
public Criteria andDeletedFlgNotEqualTo(DeletedFlag value) {
addCriterion("DELETED_FLG <>", value, "deletedFlg");
return (Criteria) this;
}
public Criteria andDeletedFlgGreaterThan(DeletedFlag value) {
addCriterion("DELETED_FLG >", value, "deletedFlg");
return (Criteria) this;
}
public Criteria andDeletedFlgGreaterThanOrEqualTo(DeletedFlag value) {
addCriterion("DELETED_FLG >=", value, "deletedFlg");
return (Criteria) this;
}
public Criteria andDeletedFlgLessThan(DeletedFlag value) {
addCriterion("DELETED_FLG <", value, "deletedFlg");
return (Criteria) this;
}
public Criteria andDeletedFlgLessThanOrEqualTo(DeletedFlag value) {
addCriterion("DELETED_FLG <=", value, "deletedFlg");
return (Criteria) this;
}
public Criteria andDeletedFlgIn(List<DeletedFlag> values) {
addCriterion("DELETED_FLG in", values, "deletedFlg");
return (Criteria) this;
}
public Criteria andDeletedFlgNotIn(List<DeletedFlag> values) {
addCriterion("DELETED_FLG not in", values, "deletedFlg");
return (Criteria) this;
}
public Criteria andDeletedFlgBetween(DeletedFlag value1, DeletedFlag value2) {
addCriterion("DELETED_FLG between", value1, value2, "deletedFlg");
return (Criteria) this;
}
public Criteria andDeletedFlgNotBetween(DeletedFlag value1, DeletedFlag value2) {
addCriterion("DELETED_FLG not between", value1, value2, "deletedFlg");
return (Criteria) this;
}
}
/**
* This class was generated by MyBatis Generator.
* This class corresponds to the database table ASYNC_PROCESS_FILE_RESULT_DETAIL
*
* @mbggenerated do_not_delete_during_merge
*/
public static class Criteria extends GeneratedCriteria {
protected Criteria() {
super();
}
}
/**
* This class was generated by MyBatis Generator.
* This class corresponds to the database table ASYNC_PROCESS_FILE_RESULT_DETAIL
*
* @mbggenerated
*/
public static class Criterion {
private String condition;
private Object value;
private Object secondValue;
private boolean noValue;
private boolean singleValue;
private boolean betweenValue;
private boolean listValue;
private String typeHandler;
public String getCondition() {
return condition;
}
public Object getValue() {
return value;
}
public Object getSecondValue() {
return secondValue;
}
public boolean isNoValue() {
return noValue;
}
public boolean isSingleValue() {
return singleValue;
}
public boolean isBetweenValue() {
return betweenValue;
}
public boolean isListValue() {
return listValue;
}
public String getTypeHandler() {
return typeHandler;
}
protected Criterion(String condition) {
super();
this.condition = condition;
this.typeHandler = null;
this.noValue = true;
}
protected Criterion(String condition, Object value, String typeHandler) {
super();
this.condition = condition;
this.value = value;
this.typeHandler = typeHandler;
if (value instanceof List<?>) {
this.listValue = true;
} else {
this.singleValue = true;
}
}
protected Criterion(String condition, Object value) {
this(condition, value, null);
}
protected Criterion(String condition, Object value, Object secondValue, String typeHandler) {
super();
this.condition = condition;
this.value = value;
this.secondValue = secondValue;
this.typeHandler = typeHandler;
this.betweenValue = true;
}
protected Criterion(String condition, Object value, Object secondValue) {
this(condition, value, secondValue, null);
}
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.master;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotEquals;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicInteger;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.CoordinatedStateManager;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.HRegionInfo;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.MetaTableAccessor;
import org.apache.hadoop.hbase.MiniHBaseCluster;
import org.apache.hadoop.hbase.MiniHBaseCluster.MiniHBaseClusterRegionServer;
import org.apache.hadoop.hbase.ServerLoad;
import org.apache.hadoop.hbase.ServerName;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.UnknownRegionException;
import org.apache.hadoop.hbase.Waiter;
import org.apache.hadoop.hbase.client.Admin;
import org.apache.hadoop.hbase.client.HBaseAdmin;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.Table;
import org.apache.hadoop.hbase.coordination.ZkCoordinatedStateManager;
import org.apache.hadoop.hbase.client.TableState;
import org.apache.hadoop.hbase.coprocessor.BaseRegionObserver;
import org.apache.hadoop.hbase.coprocessor.CoprocessorHost;
import org.apache.hadoop.hbase.coprocessor.ObserverContext;
import org.apache.hadoop.hbase.coprocessor.RegionCoprocessorEnvironment;
import org.apache.hadoop.hbase.coprocessor.RegionObserver;
import org.apache.hadoop.hbase.master.balancer.StochasticLoadBalancer;
import org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionStateTransition.TransitionCode;
import org.apache.hadoop.hbase.regionserver.HRegionServer;
import org.apache.hadoop.hbase.testclassification.MasterTests;
import org.apache.hadoop.hbase.testclassification.MediumTests;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
import org.apache.hadoop.hbase.util.FSUtils;
import org.apache.hadoop.hbase.util.JVMClusterUtil;
import org.apache.hadoop.hbase.zookeeper.MetaTableLocator;
import org.apache.zookeeper.KeeperException;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.Test;
import org.junit.experimental.categories.Category;
/**
* This tests AssignmentManager with a testing cluster.
*/
@SuppressWarnings("deprecation")
@Category({MasterTests.class, MediumTests.class})
public class TestAssignmentManagerOnCluster {
private final static byte[] FAMILY = Bytes.toBytes("FAMILY");
private final static HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
final static Configuration conf = TEST_UTIL.getConfiguration();
private static HBaseAdmin admin;
@BeforeClass
public static void setUpBeforeClass() throws Exception {
// Using the our load balancer to control region plans
conf.setClass(HConstants.HBASE_MASTER_LOADBALANCER_CLASS,
MyLoadBalancer.class, LoadBalancer.class);
conf.setClass(CoprocessorHost.REGION_COPROCESSOR_CONF_KEY,
MyRegionObserver.class, RegionObserver.class);
// Reduce the maximum attempts to speed up the test
conf.setInt("hbase.assignment.maximum.attempts", 3);
conf.setInt("hbase.master.maximum.ping.server.attempts", 3);
conf.setInt("hbase.master.ping.server.retry.sleep.interval", 1);
TEST_UTIL.startMiniCluster(1, 4, null, MyMaster.class, MyRegionServer.class);
admin = TEST_UTIL.getHBaseAdmin();
}
@AfterClass
public static void tearDownAfterClass() throws Exception {
TEST_UTIL.shutdownMiniCluster();
}
/**
* This tests restarting meta regionserver
*/
@Test (timeout=180000)
public void testRestartMetaRegionServer() throws Exception {
MiniHBaseCluster cluster = TEST_UTIL.getHBaseCluster();
boolean stoppedARegionServer = false;
try {
HMaster master = cluster.getMaster();
RegionStates regionStates = master.getAssignmentManager().getRegionStates();
ServerName metaServerName = regionStates.getRegionServerOfRegion(
HRegionInfo.FIRST_META_REGIONINFO);
if (master.getServerName().equals(metaServerName)) {
// Move meta off master
metaServerName = cluster.getLiveRegionServerThreads()
.get(0).getRegionServer().getServerName();
master.move(HRegionInfo.FIRST_META_REGIONINFO.getEncodedNameAsBytes(),
Bytes.toBytes(metaServerName.getServerName()));
TEST_UTIL.waitUntilNoRegionsInTransition(60000);
}
RegionState metaState =
MetaTableLocator.getMetaRegionState(master.getZooKeeper());
assertEquals("Meta should be not in transition",
metaState.getState(), RegionState.State.OPEN);
assertNotEquals("Meta should be moved off master",
metaState.getServerName(), master.getServerName());
assertEquals("Meta should be on the meta server",
metaState.getServerName(), metaServerName);
cluster.killRegionServer(metaServerName);
stoppedARegionServer = true;
cluster.waitForRegionServerToStop(metaServerName, 60000);
// Wait for SSH to finish
final ServerName oldServerName = metaServerName;
final ServerManager serverManager = master.getServerManager();
TEST_UTIL.waitFor(120000, 200, new Waiter.Predicate<Exception>() {
@Override
public boolean evaluate() throws Exception {
return serverManager.isServerDead(oldServerName)
&& !serverManager.areDeadServersInProgress();
}
});
TEST_UTIL.waitUntilNoRegionsInTransition(60000);
// Now, make sure meta is assigned
assertTrue("Meta should be assigned",
regionStates.isRegionOnline(HRegionInfo.FIRST_META_REGIONINFO));
// Now, make sure meta is registered in zk
metaState = MetaTableLocator.getMetaRegionState(master.getZooKeeper());
assertEquals("Meta should be not in transition",
metaState.getState(), RegionState.State.OPEN);
assertEquals("Meta should be assigned", metaState.getServerName(),
regionStates.getRegionServerOfRegion(HRegionInfo.FIRST_META_REGIONINFO));
assertNotEquals("Meta should be assigned on a different server",
metaState.getServerName(), metaServerName);
} finally {
if (stoppedARegionServer) {
cluster.startRegionServer();
}
}
}
/**
* This tests region assignment
*/
@Test (timeout=60000)
public void testAssignRegion() throws Exception {
TableName table = TableName.valueOf("testAssignRegion");
try {
HTableDescriptor desc = new HTableDescriptor(table);
desc.addFamily(new HColumnDescriptor(FAMILY));
admin.createTable(desc);
Table meta = TEST_UTIL.getConnection().getTable(TableName.META_TABLE_NAME);
HRegionInfo hri = new HRegionInfo(
desc.getTableName(), Bytes.toBytes("A"), Bytes.toBytes("Z"));
MetaTableAccessor.addRegionToMeta(meta, hri);
HMaster master = TEST_UTIL.getHBaseCluster().getMaster();
master.assignRegion(hri);
AssignmentManager am = master.getAssignmentManager();
am.waitForAssignment(hri);
RegionStates regionStates = am.getRegionStates();
ServerName serverName = regionStates.getRegionServerOfRegion(hri);
TEST_UTIL.assertRegionOnServer(hri, serverName, 200);
// Region is assigned now. Let's assign it again.
// Master should not abort, and region should be assigned.
TEST_UTIL.getHBaseAdmin().assign(hri.getRegionName());
master.getAssignmentManager().waitForAssignment(hri);
RegionState newState = regionStates.getRegionState(hri);
assertTrue(newState.isOpened());
} finally {
TEST_UTIL.deleteTable(table);
}
}
/**
* This tests region assignment on a simulated restarted server
*/
@Test (timeout=120000)
public void testAssignRegionOnRestartedServer() throws Exception {
TableName table = TableName.valueOf("testAssignRegionOnRestartedServer");
TEST_UTIL.getMiniHBaseCluster().getConf().setInt("hbase.assignment.maximum.attempts", 20);
TEST_UTIL.getMiniHBaseCluster().stopMaster(0);
//restart the master so that conf take into affect
TEST_UTIL.getMiniHBaseCluster().startMaster();
ServerName deadServer = null;
HMaster master = null;
try {
HTableDescriptor desc = new HTableDescriptor(table);
desc.addFamily(new HColumnDescriptor(FAMILY));
admin.createTable(desc);
Table meta = TEST_UTIL.getConnection().getTable(TableName.META_TABLE_NAME);
final HRegionInfo hri = new HRegionInfo(
desc.getTableName(), Bytes.toBytes("A"), Bytes.toBytes("Z"));
MetaTableAccessor.addRegionToMeta(meta, hri);
master = TEST_UTIL.getHBaseCluster().getMaster();
Set<ServerName> onlineServers = master.serverManager.getOnlineServers().keySet();
assertFalse("There should be some servers online", onlineServers.isEmpty());
// Use the first server as the destination server
ServerName destServer = onlineServers.iterator().next();
// Created faked dead server that is still online in master
deadServer = ServerName.valueOf(destServer.getHostname(),
destServer.getPort(), destServer.getStartcode() - 100L);
master.serverManager.recordNewServerWithLock(deadServer, ServerLoad.EMPTY_SERVERLOAD);
final AssignmentManager am = master.getAssignmentManager();
RegionPlan plan = new RegionPlan(hri, null, deadServer);
am.addPlan(hri.getEncodedName(), plan);
master.assignRegion(hri);
TEST_UTIL.waitFor(60000, new Waiter.Predicate<Exception>() {
@Override
public boolean evaluate() throws Exception {
return ! am.getRegionStates().isRegionInTransition(hri);
}
});
assertFalse("Region should be assigned", am.getRegionStates().isRegionInTransition(hri));
} finally {
if (deadServer != null) {
master.serverManager.expireServer(deadServer);
}
TEST_UTIL.deleteTable(table);
// reset the value for other tests
TEST_UTIL.getMiniHBaseCluster().getConf().setInt("hbase.assignment.maximum.attempts", 3);
ServerName masterServerName = TEST_UTIL.getMiniHBaseCluster().getMaster().getServerName();
TEST_UTIL.getMiniHBaseCluster().stopMaster(masterServerName);
TEST_UTIL.getMiniHBaseCluster().startMaster();
}
}
/**
* This tests offlining a region
*/
@Test (timeout=60000)
public void testOfflineRegion() throws Exception {
TableName table =
TableName.valueOf("testOfflineRegion");
try {
HRegionInfo hri = createTableAndGetOneRegion(table);
RegionStates regionStates = TEST_UTIL.getHBaseCluster().
getMaster().getAssignmentManager().getRegionStates();
ServerName serverName = regionStates.getRegionServerOfRegion(hri);
TEST_UTIL.assertRegionOnServer(hri, serverName, 200);
admin.offline(hri.getRegionName());
long timeoutTime = System.currentTimeMillis() + 800;
while (true) {
if (regionStates.getRegionByStateOfTable(table)
.get(RegionState.State.OFFLINE).contains(hri))
break;
long now = System.currentTimeMillis();
if (now > timeoutTime) {
fail("Failed to offline the region in time");
break;
}
Thread.sleep(10);
}
RegionState regionState = regionStates.getRegionState(hri);
assertTrue(regionState.isOffline());
} finally {
TEST_UTIL.deleteTable(table);
}
}
/**
* This tests moving a region
*/
@Test (timeout=50000)
public void testMoveRegion() throws Exception {
TableName table =
TableName.valueOf("testMoveRegion");
try {
HRegionInfo hri = createTableAndGetOneRegion(table);
HMaster master = TEST_UTIL.getHBaseCluster().getMaster();
RegionStates regionStates = master.getAssignmentManager().getRegionStates();
ServerName serverName = regionStates.getRegionServerOfRegion(hri);
ServerManager serverManager = master.getServerManager();
ServerName destServerName = null;
List<JVMClusterUtil.RegionServerThread> regionServers =
TEST_UTIL.getHBaseCluster().getLiveRegionServerThreads();
for (JVMClusterUtil.RegionServerThread regionServer: regionServers) {
HRegionServer destServer = regionServer.getRegionServer();
destServerName = destServer.getServerName();
if (!destServerName.equals(serverName)
&& serverManager.isServerOnline(destServerName)) {
break;
}
}
assertTrue(destServerName != null
&& !destServerName.equals(serverName));
TEST_UTIL.getHBaseAdmin().move(hri.getEncodedNameAsBytes(),
Bytes.toBytes(destServerName.getServerName()));
long timeoutTime = System.currentTimeMillis() + 30000;
while (true) {
ServerName sn = regionStates.getRegionServerOfRegion(hri);
if (sn != null && sn.equals(destServerName)) {
TEST_UTIL.assertRegionOnServer(hri, sn, 200);
break;
}
long now = System.currentTimeMillis();
if (now > timeoutTime) {
fail("Failed to move the region in time: "
+ regionStates.getRegionState(hri));
}
regionStates.waitForUpdate(50);
}
} finally {
TEST_UTIL.deleteTable(table);
}
}
/**
* If a table is deleted, we should not be able to move it anymore.
* Otherwise, the region will be brought back.
* @throws Exception
*/
@Test (timeout=50000)
public void testMoveRegionOfDeletedTable() throws Exception {
TableName table =
TableName.valueOf("testMoveRegionOfDeletedTable");
Admin admin = TEST_UTIL.getHBaseAdmin();
try {
HRegionInfo hri = createTableAndGetOneRegion(table);
HMaster master = TEST_UTIL.getHBaseCluster().getMaster();
AssignmentManager am = master.getAssignmentManager();
RegionStates regionStates = am.getRegionStates();
ServerName serverName = regionStates.getRegionServerOfRegion(hri);
ServerName destServerName = null;
for (int i = 0; i < 3; i++) {
HRegionServer destServer = TEST_UTIL.getHBaseCluster().getRegionServer(i);
if (!destServer.getServerName().equals(serverName)) {
destServerName = destServer.getServerName();
break;
}
}
assertTrue(destServerName != null
&& !destServerName.equals(serverName));
TEST_UTIL.deleteTable(table);
try {
admin.move(hri.getEncodedNameAsBytes(),
Bytes.toBytes(destServerName.getServerName()));
fail("We should not find the region");
} catch (IOException ioe) {
assertTrue(ioe instanceof UnknownRegionException);
}
am.balance(new RegionPlan(hri, serverName, destServerName));
assertFalse("The region should not be in transition",
regionStates.isRegionInTransition(hri));
} finally {
if (admin.tableExists(table)) {
TEST_UTIL.deleteTable(table);
}
}
}
HRegionInfo createTableAndGetOneRegion(
final TableName tableName) throws IOException, InterruptedException {
HTableDescriptor desc = new HTableDescriptor(tableName);
desc.addFamily(new HColumnDescriptor(FAMILY));
admin.createTable(desc, Bytes.toBytes("A"), Bytes.toBytes("Z"), 5);
// wait till the table is assigned
HMaster master = TEST_UTIL.getHBaseCluster().getMaster();
long timeoutTime = System.currentTimeMillis() + 1000;
while (true) {
List<HRegionInfo> regions = master.getAssignmentManager().
getRegionStates().getRegionsOfTable(tableName);
if (regions.size() > 3) {
return regions.get(2);
}
long now = System.currentTimeMillis();
if (now > timeoutTime) {
fail("Could not find an online region");
}
Thread.sleep(10);
}
}
/**
* This tests assign a region while it's closing.
*/
@Test (timeout=60000)
public void testAssignWhileClosing() throws Exception {
TableName table = TableName.valueOf("testAssignWhileClosing");
try {
HTableDescriptor desc = new HTableDescriptor(table);
desc.addFamily(new HColumnDescriptor(FAMILY));
admin.createTable(desc);
Table meta = TEST_UTIL.getConnection().getTable(TableName.META_TABLE_NAME);
HRegionInfo hri = new HRegionInfo(
desc.getTableName(), Bytes.toBytes("A"), Bytes.toBytes("Z"));
MetaTableAccessor.addRegionToMeta(meta, hri);
HMaster master = TEST_UTIL.getHBaseCluster().getMaster();
master.assignRegion(hri);
AssignmentManager am = master.getAssignmentManager();
assertTrue(am.waitForAssignment(hri));
ServerName sn = am.getRegionStates().getRegionServerOfRegion(hri);
TEST_UTIL.assertRegionOnServer(hri, sn, 6000);
MyRegionObserver.preCloseEnabled.set(true);
am.unassign(hri);
RegionState state = am.getRegionStates().getRegionState(hri);
assertEquals(RegionState.State.FAILED_CLOSE, state.getState());
MyRegionObserver.preCloseEnabled.set(false);
am.unassign(hri);
// region is closing now, will be re-assigned automatically.
// now, let's forcefully assign it again. it should be
// assigned properly and no double-assignment
am.assign(hri, true);
// let's check if it's assigned after it's out of transition
am.waitOnRegionToClearRegionsInTransition(hri);
assertTrue(am.waitForAssignment(hri));
ServerName serverName = master.getAssignmentManager().
getRegionStates().getRegionServerOfRegion(hri);
TEST_UTIL.assertRegionOnlyOnServer(hri, serverName, 200);
} finally {
MyRegionObserver.preCloseEnabled.set(false);
TEST_UTIL.deleteTable(table);
}
}
/**
* This tests region close failed
*/
@Test (timeout=60000)
public void testCloseFailed() throws Exception {
TableName table = TableName.valueOf("testCloseFailed");
try {
HTableDescriptor desc = new HTableDescriptor(table);
desc.addFamily(new HColumnDescriptor(FAMILY));
admin.createTable(desc);
Table meta = TEST_UTIL.getConnection().getTable(TableName.META_TABLE_NAME);
HRegionInfo hri = new HRegionInfo(
desc.getTableName(), Bytes.toBytes("A"), Bytes.toBytes("Z"));
MetaTableAccessor.addRegionToMeta(meta, hri);
HMaster master = TEST_UTIL.getHBaseCluster().getMaster();
master.assignRegion(hri);
AssignmentManager am = master.getAssignmentManager();
assertTrue(am.waitForAssignment(hri));
ServerName sn = am.getRegionStates().getRegionServerOfRegion(hri);
TEST_UTIL.assertRegionOnServer(hri, sn, 6000);
MyRegionObserver.preCloseEnabled.set(true);
am.unassign(hri);
RegionState state = am.getRegionStates().getRegionState(hri);
assertEquals(RegionState.State.FAILED_CLOSE, state.getState());
MyRegionObserver.preCloseEnabled.set(false);
am.unassign(hri);
// region may still be assigned now since it's closing,
// let's check if it's assigned after it's out of transition
am.waitOnRegionToClearRegionsInTransition(hri);
// region should be closed and re-assigned
assertTrue(am.waitForAssignment(hri));
ServerName serverName = master.getAssignmentManager().
getRegionStates().getRegionServerOfRegion(hri);
TEST_UTIL.assertRegionOnServer(hri, serverName, 200);
} finally {
MyRegionObserver.preCloseEnabled.set(false);
TEST_UTIL.deleteTable(table);
}
}
/**
* This tests region open failed
*/
@Test (timeout=60000)
public void testOpenFailed() throws Exception {
TableName table = TableName.valueOf("testOpenFailed");
try {
HTableDescriptor desc = new HTableDescriptor(table);
desc.addFamily(new HColumnDescriptor(FAMILY));
admin.createTable(desc);
Table meta = TEST_UTIL.getConnection().getTable(TableName.META_TABLE_NAME);
HRegionInfo hri = new HRegionInfo(
desc.getTableName(), Bytes.toBytes("A"), Bytes.toBytes("Z"));
MetaTableAccessor.addRegionToMeta(meta, hri);
MyLoadBalancer.controledRegion = hri.getEncodedName();
HMaster master = TEST_UTIL.getHBaseCluster().getMaster();
master.assignRegion(hri);
AssignmentManager am = master.getAssignmentManager();
assertFalse(am.waitForAssignment(hri));
RegionState state = am.getRegionStates().getRegionState(hri);
assertEquals(RegionState.State.FAILED_OPEN, state.getState());
// Failed to open since no plan, so it's on no server
assertNull(state.getServerName());
MyLoadBalancer.controledRegion = null;
master.assignRegion(hri);
assertTrue(am.waitForAssignment(hri));
ServerName serverName = master.getAssignmentManager().
getRegionStates().getRegionServerOfRegion(hri);
TEST_UTIL.assertRegionOnServer(hri, serverName, 200);
} finally {
MyLoadBalancer.controledRegion = null;
TEST_UTIL.deleteTable(table);
}
}
/**
* This tests region open failure which is not recoverable
*/
@Test (timeout=60000)
public void testOpenFailedUnrecoverable() throws Exception {
TableName table =
TableName.valueOf("testOpenFailedUnrecoverable");
try {
HTableDescriptor desc = new HTableDescriptor(table);
desc.addFamily(new HColumnDescriptor(FAMILY));
admin.createTable(desc);
Table meta = TEST_UTIL.getConnection().getTable(TableName.META_TABLE_NAME);
HRegionInfo hri = new HRegionInfo(
desc.getTableName(), Bytes.toBytes("A"), Bytes.toBytes("Z"));
MetaTableAccessor.addRegionToMeta(meta, hri);
FileSystem fs = FileSystem.get(conf);
Path tableDir= FSUtils.getTableDir(FSUtils.getRootDir(conf), table);
Path regionDir = new Path(tableDir, hri.getEncodedName());
// create a file named the same as the region dir to
// mess up with region opening
fs.create(regionDir, true);
HMaster master = TEST_UTIL.getHBaseCluster().getMaster();
master.assignRegion(hri);
AssignmentManager am = master.getAssignmentManager();
assertFalse(am.waitForAssignment(hri));
RegionState state = am.getRegionStates().getRegionState(hri);
assertEquals(RegionState.State.FAILED_OPEN, state.getState());
// Failed to open due to file system issue. Region state should
// carry the opening region server so that we can force close it
// later on before opening it again. See HBASE-9092.
assertNotNull(state.getServerName());
// remove the blocking file, so that region can be opened
fs.delete(regionDir, true);
master.assignRegion(hri);
assertTrue(am.waitForAssignment(hri));
ServerName serverName = master.getAssignmentManager().
getRegionStates().getRegionServerOfRegion(hri);
TEST_UTIL.assertRegionOnServer(hri, serverName, 200);
} finally {
TEST_UTIL.deleteTable(table);
}
}
@Test (timeout=60000)
public void testSSHWhenDisablingTableRegionsInOpeningOrPendingOpenState() throws Exception {
final TableName table =
TableName.valueOf
("testSSHWhenDisablingTableRegionsInOpeningOrPendingOpenState");
AssignmentManager am = TEST_UTIL.getHBaseCluster().getMaster().getAssignmentManager();
HRegionInfo hri = null;
ServerName serverName = null;
try {
hri = createTableAndGetOneRegion(table);
serverName = am.getRegionStates().getRegionServerOfRegion(hri);
ServerName destServerName = null;
HRegionServer destServer = null;
for (int i = 0; i < 3; i++) {
destServer = TEST_UTIL.getHBaseCluster().getRegionServer(i);
if (!destServer.getServerName().equals(serverName)) {
destServerName = destServer.getServerName();
break;
}
}
am.regionOffline(hri);
am.getRegionStates().updateRegionState(hri, RegionState.State.PENDING_OPEN, destServerName);
am.getTableStateManager().setTableState(table, TableState.State.DISABLING);
List<HRegionInfo> toAssignRegions = am.cleanOutCrashedServerReferences(destServerName);
assertTrue("Regions to be assigned should be empty.", toAssignRegions.isEmpty());
assertTrue("Regions to be assigned should be empty.", am.getRegionStates()
.getRegionState(hri).isOffline());
} finally {
if (hri != null && serverName != null) {
am.regionOnline(hri, serverName);
}
am.getTableStateManager().setTableState(table, TableState.State.ENABLED);
TEST_UTIL.getHBaseAdmin().disableTable(table);
TEST_UTIL.deleteTable(table);
}
}
/**
* This tests region close hanging
*/
@Test (timeout=60000)
public void testCloseHang() throws Exception {
TableName table = TableName.valueOf("testCloseHang");
try {
HTableDescriptor desc = new HTableDescriptor(table);
desc.addFamily(new HColumnDescriptor(FAMILY));
admin.createTable(desc);
Table meta = TEST_UTIL.getConnection().getTable(TableName.META_TABLE_NAME);
HRegionInfo hri = new HRegionInfo(
desc.getTableName(), Bytes.toBytes("A"), Bytes.toBytes("Z"));
MetaTableAccessor.addRegionToMeta(meta, hri);
HMaster master = TEST_UTIL.getHBaseCluster().getMaster();
master.assignRegion(hri);
AssignmentManager am = master.getAssignmentManager();
assertTrue(am.waitForAssignment(hri));
ServerName sn = am.getRegionStates().getRegionServerOfRegion(hri);
TEST_UTIL.assertRegionOnServer(hri, sn, 6000);
MyRegionObserver.postCloseEnabled.set(true);
am.unassign(hri);
// Let region closing move ahead. The region should be closed
// properly and re-assigned automatically
MyRegionObserver.postCloseEnabled.set(false);
// region may still be assigned now since it's closing,
// let's check if it's assigned after it's out of transition
am.waitOnRegionToClearRegionsInTransition(hri);
// region should be closed and re-assigned
assertTrue(am.waitForAssignment(hri));
ServerName serverName = master.getAssignmentManager().
getRegionStates().getRegionServerOfRegion(hri);
TEST_UTIL.assertRegionOnServer(hri, serverName, 200);
} finally {
MyRegionObserver.postCloseEnabled.set(false);
TEST_UTIL.deleteTable(table);
}
}
/**
* This tests region close racing with open
*/
@Test (timeout=60000)
public void testOpenCloseRacing() throws Exception {
TableName table = TableName.valueOf("testOpenCloseRacing");
try {
HTableDescriptor desc = new HTableDescriptor(table);
desc.addFamily(new HColumnDescriptor(FAMILY));
admin.createTable(desc);
Table meta = TEST_UTIL.getConnection().getTable(TableName.META_TABLE_NAME);
HRegionInfo hri = new HRegionInfo(
desc.getTableName(), Bytes.toBytes("A"), Bytes.toBytes("Z"));
MetaTableAccessor.addRegionToMeta(meta, hri);
meta.close();
MyRegionObserver.postOpenEnabled.set(true);
MyRegionObserver.postOpenCalled = false;
HMaster master = TEST_UTIL.getHBaseCluster().getMaster();
// Region will be opened, but it won't complete
master.assignRegion(hri);
long end = EnvironmentEdgeManager.currentTime() + 20000;
// Wait till postOpen is called
while (!MyRegionObserver.postOpenCalled ) {
assertFalse("Timed out waiting for postOpen to be called",
EnvironmentEdgeManager.currentTime() > end);
Thread.sleep(300);
}
AssignmentManager am = master.getAssignmentManager();
// Now let's unassign it, it should do nothing
am.unassign(hri);
RegionState state = am.getRegionStates().getRegionState(hri);
ServerName oldServerName = state.getServerName();
assertTrue(state.isOpening() && oldServerName != null);
// Now the region is stuck in opening
// Let's forcefully re-assign it to trigger closing/opening
// racing. This test is to make sure this scenario
// is handled properly.
MyRegionObserver.postOpenEnabled.set(false);
ServerName destServerName = null;
int numRS = TEST_UTIL.getHBaseCluster().getLiveRegionServerThreads().size();
for (int i = 0; i < numRS; i++) {
HRegionServer destServer = TEST_UTIL.getHBaseCluster().getRegionServer(i);
if (!destServer.getServerName().equals(oldServerName)) {
destServerName = destServer.getServerName();
break;
}
}
assertNotNull(destServerName);
assertFalse("Region should be assigned on a new region server",
oldServerName.equals(destServerName));
List<HRegionInfo> regions = new ArrayList<HRegionInfo>();
regions.add(hri);
am.assign(destServerName, regions);
// let's check if it's assigned after it's out of transition
am.waitOnRegionToClearRegionsInTransition(hri);
assertTrue(am.waitForAssignment(hri));
ServerName serverName = master.getAssignmentManager().
getRegionStates().getRegionServerOfRegion(hri);
TEST_UTIL.assertRegionOnlyOnServer(hri, serverName, 6000);
} finally {
MyRegionObserver.postOpenEnabled.set(false);
TEST_UTIL.deleteTable(table);
}
}
/**
* Test force unassign/assign a region hosted on a dead server
*/
@Test (timeout=60000)
public void testAssignRacingWithSSH() throws Exception {
TableName table = TableName.valueOf("testAssignRacingWithSSH");
MiniHBaseCluster cluster = TEST_UTIL.getHBaseCluster();
MyMaster master = null;
try {
HTableDescriptor desc = new HTableDescriptor(table);
desc.addFamily(new HColumnDescriptor(FAMILY));
admin.createTable(desc);
Table meta = TEST_UTIL.getConnection().getTable(TableName.META_TABLE_NAME);
HRegionInfo hri = new HRegionInfo(
desc.getTableName(), Bytes.toBytes("A"), Bytes.toBytes("Z"));
MetaTableAccessor.addRegionToMeta(meta, hri);
// Assign the region
master = (MyMaster)cluster.getMaster();
master.assignRegion(hri);
// Hold SSH before killing the hosting server
master.enableSSH(false);
AssignmentManager am = master.getAssignmentManager();
RegionStates regionStates = am.getRegionStates();
ServerName metaServer = regionStates.getRegionServerOfRegion(
HRegionInfo.FIRST_META_REGIONINFO);
while (true) {
assertTrue(am.waitForAssignment(hri));
RegionState state = regionStates.getRegionState(hri);
ServerName oldServerName = state.getServerName();
if (!ServerName.isSameHostnameAndPort(oldServerName, metaServer)) {
// Kill the hosting server, which doesn't have meta on it.
cluster.killRegionServer(oldServerName);
cluster.waitForRegionServerToStop(oldServerName, -1);
break;
}
int i = cluster.getServerWithMeta();
HRegionServer rs = cluster.getRegionServer(i == 0 ? 1 : 0);
oldServerName = rs.getServerName();
master.move(hri.getEncodedNameAsBytes(),
Bytes.toBytes(oldServerName.getServerName()));
}
// You can't assign a dead region before SSH
am.assign(hri, true);
RegionState state = regionStates.getRegionState(hri);
assertTrue(state.isFailedClose());
// You can't unassign a dead region before SSH either
am.unassign(hri);
state = regionStates.getRegionState(hri);
assertTrue(state.isFailedClose());
// Enable SSH so that log can be split
master.enableSSH(true);
// let's check if it's assigned after it's out of transition.
// no need to assign it manually, SSH should do it
am.waitOnRegionToClearRegionsInTransition(hri);
assertTrue(am.waitForAssignment(hri));
ServerName serverName = master.getAssignmentManager().
getRegionStates().getRegionServerOfRegion(hri);
TEST_UTIL.assertRegionOnlyOnServer(hri, serverName, 6000);
} finally {
if (master != null) {
master.enableSSH(true);
}
TEST_UTIL.deleteTable(table);
cluster.startRegionServer();
}
}
/**
* Test SSH waiting for extra region server for assignment
*/
@Test (timeout=300000)
public void testSSHWaitForServerToAssignRegion() throws Exception {
TableName table = TableName.valueOf("testSSHWaitForServerToAssignRegion");
MiniHBaseCluster cluster = TEST_UTIL.getHBaseCluster();
boolean startAServer = false;
try {
HTableDescriptor desc = new HTableDescriptor(table);
desc.addFamily(new HColumnDescriptor(FAMILY));
admin.createTable(desc);
HMaster master = cluster.getMaster();
final ServerManager serverManager = master.getServerManager();
MyLoadBalancer.countRegionServers = Integer.valueOf(
serverManager.countOfRegionServers());
HRegionServer rs = TEST_UTIL.getRSForFirstRegionInTable(table);
assertNotNull("First region should be assigned", rs);
final ServerName serverName = rs.getServerName();
// Wait till SSH tried to assign regions a several times
int counter = MyLoadBalancer.counter.get() + 5;
cluster.killRegionServer(serverName);
startAServer = true;
cluster.waitForRegionServerToStop(serverName, -1);
while (counter > MyLoadBalancer.counter.get()) {
Thread.sleep(1000);
}
cluster.startRegionServer();
startAServer = false;
// Wait till the dead server is processed by SSH
TEST_UTIL.waitFor(120000, 1000, new Waiter.Predicate<Exception>() {
@Override
public boolean evaluate() throws Exception {
return serverManager.isServerDead(serverName)
&& !serverManager.areDeadServersInProgress();
}
});
TEST_UTIL.waitUntilNoRegionsInTransition(300000);
rs = TEST_UTIL.getRSForFirstRegionInTable(table);
assertTrue("First region should be re-assigned to a different server",
rs != null && !serverName.equals(rs.getServerName()));
} finally {
MyLoadBalancer.countRegionServers = null;
TEST_UTIL.deleteTable(table);
if (startAServer) {
cluster.startRegionServer();
}
}
}
/**
* Test force unassign/assign a region of a disabled table
*/
@Test (timeout=60000)
public void testAssignDisabledRegion() throws Exception {
TableName table = TableName.valueOf("testAssignDisabledRegion");
MiniHBaseCluster cluster = TEST_UTIL.getHBaseCluster();
MyMaster master = null;
try {
HTableDescriptor desc = new HTableDescriptor(table);
desc.addFamily(new HColumnDescriptor(FAMILY));
admin.createTable(desc);
Table meta = TEST_UTIL.getConnection().getTable(TableName.META_TABLE_NAME);
HRegionInfo hri = new HRegionInfo(
desc.getTableName(), Bytes.toBytes("A"), Bytes.toBytes("Z"));
MetaTableAccessor.addRegionToMeta(meta, hri);
// Assign the region
master = (MyMaster)cluster.getMaster();
master.assignRegion(hri);
AssignmentManager am = master.getAssignmentManager();
RegionStates regionStates = am.getRegionStates();
assertTrue(am.waitForAssignment(hri));
// Disable the table
admin.disableTable(table);
assertTrue(regionStates.isRegionOffline(hri));
// You can't assign a disabled region
am.assign(hri, true);
assertTrue(regionStates.isRegionOffline(hri));
// You can't unassign a disabled region either
am.unassign(hri);
assertTrue(regionStates.isRegionOffline(hri));
} finally {
TEST_UTIL.deleteTable(table);
}
}
/**
* Test offlined region is assigned by SSH
*/
@Test (timeout=60000)
public void testAssignOfflinedRegionBySSH() throws Exception {
TableName table = TableName.valueOf("testAssignOfflinedRegionBySSH");
MiniHBaseCluster cluster = TEST_UTIL.getHBaseCluster();
MyMaster master = null;
try {
HTableDescriptor desc = new HTableDescriptor(table);
desc.addFamily(new HColumnDescriptor(FAMILY));
admin.createTable(desc);
Table meta = TEST_UTIL.getConnection().getTable(TableName.META_TABLE_NAME);
HRegionInfo hri = new HRegionInfo(
desc.getTableName(), Bytes.toBytes("A"), Bytes.toBytes("Z"));
MetaTableAccessor.addRegionToMeta(meta, hri);
// Assign the region
master = (MyMaster)cluster.getMaster();
master.assignRegion(hri);
AssignmentManager am = master.getAssignmentManager();
RegionStates regionStates = am.getRegionStates();
ServerName metaServer = regionStates.getRegionServerOfRegion(
HRegionInfo.FIRST_META_REGIONINFO);
ServerName oldServerName = null;
while (true) {
assertTrue(am.waitForAssignment(hri));
RegionState state = regionStates.getRegionState(hri);
oldServerName = state.getServerName();
if (!ServerName.isSameHostnameAndPort(oldServerName, metaServer)) {
// Mark the hosting server aborted, but don't actually kill it.
// It doesn't have meta on it.
MyRegionServer.abortedServer = oldServerName;
break;
}
int i = cluster.getServerWithMeta();
HRegionServer rs = cluster.getRegionServer(i == 0 ? 1 : 0);
oldServerName = rs.getServerName();
master.move(hri.getEncodedNameAsBytes(),
Bytes.toBytes(oldServerName.getServerName()));
}
// Make sure the region is assigned on the dead server
assertTrue(regionStates.isRegionOnline(hri));
assertEquals(oldServerName, regionStates.getRegionServerOfRegion(hri));
// Kill the hosting server, which doesn't have meta on it.
cluster.killRegionServer(oldServerName);
cluster.waitForRegionServerToStop(oldServerName, -1);
ServerManager serverManager = master.getServerManager();
while (!serverManager.isServerDead(oldServerName)
|| serverManager.getDeadServers().areDeadServersInProgress()) {
Thread.sleep(100);
}
// Let's check if it's assigned after it's out of transition.
// no need to assign it manually, SSH should do it
am.waitOnRegionToClearRegionsInTransition(hri);
assertTrue(am.waitForAssignment(hri));
ServerName serverName = master.getAssignmentManager().
getRegionStates().getRegionServerOfRegion(hri);
TEST_UTIL.assertRegionOnlyOnServer(hri, serverName, 200);
} finally {
MyRegionServer.abortedServer = null;
TEST_UTIL.deleteTable(table);
cluster.startRegionServer();
}
}
/**
* Test disabled region is ignored by SSH
*/
@Test (timeout=60000)
public void testAssignDisabledRegionBySSH() throws Exception {
TableName table = TableName.valueOf("testAssignDisabledRegionBySSH");
MiniHBaseCluster cluster = TEST_UTIL.getHBaseCluster();
MyMaster master;
try {
HTableDescriptor desc = new HTableDescriptor(table);
desc.addFamily(new HColumnDescriptor(FAMILY));
admin.createTable(desc);
Table meta = TEST_UTIL.getConnection().getTable(TableName.META_TABLE_NAME);
HRegionInfo hri = new HRegionInfo(
desc.getTableName(), Bytes.toBytes("A"), Bytes.toBytes("Z"));
MetaTableAccessor.addRegionToMeta(meta, hri);
// Assign the region
master = (MyMaster)cluster.getMaster();
master.assignRegion(hri);
AssignmentManager am = master.getAssignmentManager();
RegionStates regionStates = am.getRegionStates();
ServerName metaServer = regionStates.getRegionServerOfRegion(
HRegionInfo.FIRST_META_REGIONINFO);
ServerName oldServerName = null;
while (true) {
assertTrue(am.waitForAssignment(hri));
RegionState state = regionStates.getRegionState(hri);
oldServerName = state.getServerName();
if (!ServerName.isSameHostnameAndPort(oldServerName, metaServer)) {
// Mark the hosting server aborted, but don't actually kill it.
// It doesn't have meta on it.
MyRegionServer.abortedServer = oldServerName;
break;
}
int i = cluster.getServerWithMeta();
HRegionServer rs = cluster.getRegionServer(i == 0 ? 1 : 0);
oldServerName = rs.getServerName();
master.move(hri.getEncodedNameAsBytes(),
Bytes.toBytes(oldServerName.getServerName()));
}
// Make sure the region is assigned on the dead server
assertTrue(regionStates.isRegionOnline(hri));
assertEquals(oldServerName, regionStates.getRegionServerOfRegion(hri));
// Disable the table now.
master.disableTable(hri.getTable(), HConstants.NO_NONCE, HConstants.NO_NONCE);
// Kill the hosting server, which doesn't have meta on it.
cluster.killRegionServer(oldServerName);
cluster.waitForRegionServerToStop(oldServerName, -1);
ServerManager serverManager = master.getServerManager();
while (!serverManager.isServerDead(oldServerName)
|| serverManager.getDeadServers().areDeadServersInProgress()) {
Thread.sleep(100);
}
// Wait till no more RIT, the region should be offline.
TEST_UTIL.waitUntilNoRegionsInTransition(60000);
assertTrue(regionStates.isRegionOffline(hri));
} finally {
MyRegionServer.abortedServer = null;
TEST_UTIL.deleteTable(table);
cluster.startRegionServer();
}
}
/**
* Test that region state transition call is idempotent
*/
@Test(timeout = 60000)
public void testReportRegionStateTransition() throws Exception {
TableName table = TableName.valueOf("testReportRegionStateTransition");
try {
MyRegionServer.simulateRetry = true;
HTableDescriptor desc = new HTableDescriptor(table);
desc.addFamily(new HColumnDescriptor(FAMILY));
admin.createTable(desc);
Table meta = TEST_UTIL.getConnection().getTable(TableName.META_TABLE_NAME);
HRegionInfo hri =
new HRegionInfo(desc.getTableName(), Bytes.toBytes("A"), Bytes.toBytes("Z"));
MetaTableAccessor.addRegionToMeta(meta, hri);
HMaster master = TEST_UTIL.getHBaseCluster().getMaster();
master.assignRegion(hri);
AssignmentManager am = master.getAssignmentManager();
am.waitForAssignment(hri);
RegionStates regionStates = am.getRegionStates();
ServerName serverName = regionStates.getRegionServerOfRegion(hri);
// Assert the the region is actually open on the server
TEST_UTIL.assertRegionOnServer(hri, serverName, 200);
// Closing region should just work fine
admin.disableTable(table);
assertTrue(regionStates.isRegionOffline(hri));
List<HRegionInfo> regions = TEST_UTIL.getHBaseAdmin().getOnlineRegions(serverName);
assertTrue(!regions.contains(hri));
} finally {
MyRegionServer.simulateRetry = false;
TEST_UTIL.deleteTable(table);
}
}
/**
* Test concurrent updates to meta when meta is not on master
* @throws Exception
*/
@Test(timeout = 30000)
public void testUpdatesRemoteMeta() throws Exception {
conf.setInt("hbase.regionstatestore.meta.connection", 3);
final RegionStateStore rss =
new RegionStateStore(new MyRegionServer(conf, new ZkCoordinatedStateManager()));
rss.start();
// Create 10 threads and make each do 10 puts related to region state update
Thread[] th = new Thread[10];
List<String> nameList = new ArrayList<String>();
List<TableName> tableNameList = new ArrayList<TableName>();
for (int i = 0; i < th.length; i++) {
th[i] = new Thread() {
@Override
public void run() {
HRegionInfo[] hri = new HRegionInfo[10];
ServerName serverName = ServerName.valueOf("dummyhost", 1000, 1234);
for (int i = 0; i < 10; i++) {
hri[i] = new HRegionInfo(TableName.valueOf(Thread.currentThread().getName() + "_" + i));
RegionState newState = new RegionState(hri[i], RegionState.State.OPEN, serverName);
RegionState oldState =
new RegionState(hri[i], RegionState.State.PENDING_OPEN, serverName);
rss.updateRegionState(1, newState, oldState);
}
}
};
th[i].start();
nameList.add(th[i].getName());
}
for (int i = 0; i < th.length; i++) {
th[i].join();
}
// Add all the expected table names in meta to tableNameList
for (String name : nameList) {
for (int i = 0; i < 10; i++) {
tableNameList.add(TableName.valueOf(name + "_" + i));
}
}
List<Result> metaRows = MetaTableAccessor.fullScanRegions(admin.getConnection());
int count = 0;
// Check all 100 rows are in meta
for (Result result : metaRows) {
if (tableNameList.contains(HRegionInfo.getTable(result.getRow()))) {
count++;
if (count == 100) {
break;
}
}
}
assertTrue(count == 100);
rss.stop();
}
static class MyLoadBalancer extends StochasticLoadBalancer {
// For this region, if specified, always assign to nowhere
static volatile String controledRegion = null;
static volatile Integer countRegionServers = null;
static AtomicInteger counter = new AtomicInteger(0);
@Override
public ServerName randomAssignment(HRegionInfo regionInfo,
List<ServerName> servers) {
if (regionInfo.getEncodedName().equals(controledRegion)) {
return null;
}
return super.randomAssignment(regionInfo, servers);
}
@Override
public Map<ServerName, List<HRegionInfo>> roundRobinAssignment(
List<HRegionInfo> regions, List<ServerName> servers) {
if (countRegionServers != null && services != null) {
int regionServers = services.getServerManager().countOfRegionServers();
if (regionServers < countRegionServers.intValue()) {
// Let's wait till more region servers join in.
// Before that, fail region assignments.
counter.incrementAndGet();
return null;
}
}
return super.roundRobinAssignment(regions, servers);
}
}
public static class MyMaster extends HMaster {
AtomicBoolean enabled = new AtomicBoolean(true);
public MyMaster(Configuration conf, CoordinatedStateManager cp)
throws IOException, KeeperException,
InterruptedException {
super(conf, cp);
}
@Override
public boolean isServerCrashProcessingEnabled() {
return enabled.get() && super.isServerCrashProcessingEnabled();
}
public void enableSSH(boolean enabled) {
this.enabled.set(enabled);
if (enabled) {
serverManager.processQueuedDeadServers();
}
}
}
public static class MyRegionServer extends MiniHBaseClusterRegionServer {
static volatile ServerName abortedServer = null;
static volatile boolean simulateRetry = false;
public MyRegionServer(Configuration conf, CoordinatedStateManager cp)
throws IOException, KeeperException,
InterruptedException {
super(conf, cp);
}
@Override
public boolean reportRegionStateTransition(TransitionCode code, long openSeqNum,
HRegionInfo... hris) {
if (simulateRetry) {
// Simulate retry by calling the method twice
super.reportRegionStateTransition(code, openSeqNum, hris);
return super.reportRegionStateTransition(code, openSeqNum, hris);
}
return super.reportRegionStateTransition(code, openSeqNum, hris);
}
@Override
public boolean isAborted() {
return getServerName().equals(abortedServer) || super.isAborted();
}
}
public static class MyRegionObserver extends BaseRegionObserver {
// If enabled, fail all preClose calls
static AtomicBoolean preCloseEnabled = new AtomicBoolean(false);
// If enabled, stall postClose calls
static AtomicBoolean postCloseEnabled = new AtomicBoolean(false);
// If enabled, stall postOpen calls
static AtomicBoolean postOpenEnabled = new AtomicBoolean(false);
// A flag to track if postOpen is called
static volatile boolean postOpenCalled = false;
@Override
public void preClose(ObserverContext<RegionCoprocessorEnvironment> c,
boolean abortRequested) throws IOException {
if (preCloseEnabled.get()) throw new IOException("fail preClose from coprocessor");
}
@Override
public void postClose(ObserverContext<RegionCoprocessorEnvironment> c,
boolean abortRequested) {
stallOnFlag(postCloseEnabled);
}
@Override
public void postOpen(ObserverContext<RegionCoprocessorEnvironment> c) {
postOpenCalled = true;
stallOnFlag(postOpenEnabled);
}
private void stallOnFlag(final AtomicBoolean flag) {
try {
// If enabled, stall
while (flag.get()) {
Thread.sleep(1000);
}
} catch (InterruptedException ie) {
Thread.currentThread().interrupt();
}
}
}
}
| |
/*
* Copyright (c) 2009-2014, ZawodyWeb Team
* All rights reserved.
*
* This file is distributable under the Simplified BSD license. See the terms
* of the Simplified BSD license in the documentation provided with this file.
*/
package pl.umk.mat.zawodyweb.www;
import java.sql.Timestamp;
import java.util.List;
import java.util.ResourceBundle;
import javax.faces.application.FacesMessage;
import javax.faces.context.FacesContext;
import javax.servlet.http.Cookie;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import javax.servlet.http.HttpSession;
import org.apache.commons.lang.StringUtils;
import org.apache.log4j.Logger;
import org.openid4java.discovery.Identifier;
import org.restfaces.annotation.HttpAction;
import org.restfaces.annotation.Instance;
import pl.umk.mat.zawodyweb.database.DAOFactory;
import pl.umk.mat.zawodyweb.database.UserLogDAO;
import pl.umk.mat.zawodyweb.database.UsersDAO;
import pl.umk.mat.zawodyweb.database.pojo.UserLog;
import pl.umk.mat.zawodyweb.database.pojo.Users;
import pl.umk.mat.zawodyweb.ldap.LdapConnector;
import pl.umk.mat.zawodyweb.olat.jdbc.Connector;
/**
*
* @author slawek
*/
@Instance("#{sessionBean}")
public class SessionBean {
private static final Logger logger = Logger.getLogger(SessionBean.class);
private final String OLAT_PASS = "OLAT";
private final String LDAP_PASS = "LDAP";
private final String OPENID_PASS = "OPENID";
private final ResourceBundle messages = ResourceBundle.getBundle("pl.umk.mat.zawodyweb.www.Messages");
private Users currentUser = new Users();
private Integer currentContestId;
private boolean loggedIn;
private Boolean rememberMe;
/*
*
*/
private boolean showOnlyMySubmissions = true;
private int submissionsContestId = 0;
private int submissionsPageIndex = 0;
private int submissionsUserId = 0;
private int submissionsProblemId = 0;
private int submissionsSeriesId = 0;
private long submissionsLastVisit = 0;
private OpenIdConsumer openIdConsumer;
/**
* @return the currentUser
*/
public Users getCurrentUser() {
return currentUser;
}
public Integer getCurrentContestId() {
return currentContestId;
}
public void setCurrentContestId(Integer id) {
currentContestId = id;
}
/**
* @return the isLoggedIn
*/
public boolean isLoggedIn() {
return loggedIn;
}
private void logUser() {
UserLog ul = new UserLog();
UserLogDAO dao = DAOFactory.DEFAULT.buildUserLogDAO();
ul.setUsername(currentUser.getLogin().toLowerCase());
ul.setLogdate(new Timestamp(System.currentTimeMillis()));
ul.setIp(((HttpServletRequest) FacesContext.getCurrentInstance().getExternalContext().getRequest()).getRemoteAddr());
dao.save(ul);
}
/**
* Redirect here when using OpenID functionality
*
* @return
*/
@HttpAction(name = "openid", pattern = "openid")
public String openIdResponse() {
FacesContext context = FacesContext.getCurrentInstance();
Identifier identifier = openIdConsumer.verifyResponse((HttpServletRequest) context.getExternalContext().getRequest());
if (identifier == null) {
String summary = messages.getString("bad_login_data");
WWWHelper.AddMessage(context, FacesMessage.SEVERITY_ERROR, "login", summary, null);
return "login";
} else {
/*
* Login using OpenID successful
*/
UsersDAO dao = DAOFactory.DEFAULT.buildUsersDAO();
List<Users> users = dao.findByLogin(openIdConsumer.getLogin());
Users user;
if (users.isEmpty()) {
user = new Users();
user.setRdate(new Timestamp(System.currentTimeMillis()));
} else {
user = users.get(0);
if (OPENID_PASS.equals(user.getPass()) == false) {
loggedIn = false;
return "login";
}
}
user.setLogin(openIdConsumer.getLogin().toLowerCase());
user.setFirstname(openIdConsumer.getFirstname());
user.setLastname(openIdConsumer.getLastname());
user.setEmail(openIdConsumer.getEmail());
user.setPass(OPENID_PASS);
user.setLdate(new Timestamp(System.currentTimeMillis()));
dao.saveOrUpdate(user);
currentUser = user;
loggedIn = true;
logUser();
}
return "start";
}
/**
* Inserts or updates user using OLAT data
*
* @param dao
* @param user
* @param username
* @return
*/
public Users olatSaveUser(UsersDAO dao, Users user, Users olatUser) {
user.setLogin(olatUser.getLogin().toLowerCase());
user.setFirstname(olatUser.getFirstname());
user.setLastname(olatUser.getLastname());
user.setEmail(olatUser.getEmail());
user.setSchooltype(olatUser.getSchooltype());
user.setLdate(new Timestamp(System.currentTimeMillis()));
user.setPass(OLAT_PASS);
dao.saveOrUpdate(user);
return user;
}
/**
* Inserts or updates user using OLAT data
*
* @param dao
* @param user
* @param username
* @return
*/
public Users ldapSaveUser(UsersDAO dao, Users user, Users ldapUser) {
user.setLogin(ldapUser.getLogin().toLowerCase());
user.setFirstname(ldapUser.getFirstname());
user.setLastname(ldapUser.getLastname());
user.setEmail(ldapUser.getEmail());
user.setLdate(new Timestamp(System.currentTimeMillis()));
user.setPass(LDAP_PASS);
dao.saveOrUpdate(user);
return user;
}
public String logIn() {
if (loggedIn == true) {
return "start";
}
FacesContext context = FacesContext.getCurrentInstance();
Cookie cookie = new Cookie("login", currentUser.getLogin());
if (rememberMe) {
cookie.setMaxAge(60 * 60 * 24 * 30);
} else {
cookie.setMaxAge(0);
}
HttpServletResponse response = (HttpServletResponse) context.getExternalContext().getResponse();
response.addCookie(cookie);
try {
currentUser.setLogin(currentUser.getLogin().toLowerCase());
UsersDAO dao = DAOFactory.DEFAULT.buildUsersDAO();
List<Users> users = dao.findByLogin(currentUser.getLogin());
if (users.isEmpty() == false) {
Users user = users.get(0);
if (LDAP_PASS.equals(user.getPass())) {
/*
* LDAP
*/
Users ldapUser = LdapConnector.retieveUser(currentUser.getLogin(), currentUser.getPass());
if (ldapUser != null) {
currentUser = ldapSaveUser(dao, user, ldapUser);
loggedIn = true;
logUser();
}
} else if (OLAT_PASS.equals(user.getPass())) {
/*
* OLAT
*/
if (Connector.getInstance().checkPassword(currentUser.getLogin(), currentUser.getPass())) {
Users olatUser = Connector.getInstance().getUser(currentUser.getLogin());
currentUser = olatSaveUser(dao, user, olatUser);
loggedIn = true;
logUser();
}
} else if (OPENID_PASS.equals(user.getPass())) {
/*
* OpenID
*/
String contextPath = ((HttpServletRequest) context.getExternalContext().getRequest()).getRequestURL().toString();
contextPath = contextPath.replaceFirst(context.getExternalContext().getRequestServletPath() + ".*$", "");
openIdConsumer = new OpenIdConsumer(contextPath + "/openid.html");
if (openIdConsumer.authorizationRequest(currentUser.getLogin(), (HttpServletRequest) context.getExternalContext().getRequest(), response) == true) {
return null;
} else {
loggedIn = false;
}
} else if (user.checkPass(currentUser.getPass())) {
/*
* Normal
*/
currentUser = user;
loggedIn = true;
logUser();
}
if (loggedIn == false) {
user.setFdate(new Timestamp(System.currentTimeMillis()));
dao.saveOrUpdate(user);
}
} else {
/*
* User not found - registering
*/
Users ldapUser = null;
Users newUser = new Users();
newUser.setRdate(new Timestamp(System.currentTimeMillis()));
if (currentUser.getPass() != null
&& (ldapUser = LdapConnector.retieveUser(currentUser.getLogin(), currentUser.getPass())) != null) {
/*
* LDAP
*/
currentUser = ldapSaveUser(dao, newUser, ldapUser);
loggedIn = true;
logUser();
} else if (currentUser.getPass() != null
&& Connector.getInstance().checkPassword(currentUser.getLogin(), currentUser.getPass())) {
/*
* OLAT
*/
Users olatUser = Connector.getInstance().getUser(currentUser.getLogin());
currentUser = olatSaveUser(dao, newUser, olatUser);
loggedIn = true;
logUser();
} else {
/*
* OpenID
*/
String contextPath = ((HttpServletRequest) context.getExternalContext().getRequest()).getRequestURL().toString();
contextPath = contextPath.replaceFirst(context.getExternalContext().getRequestServletPath() + ".*$", "");
openIdConsumer = new OpenIdConsumer(contextPath + "/openid.html");
if (openIdConsumer.authorizationRequest(currentUser.getLogin(), (HttpServletRequest) context.getExternalContext().getRequest(), response) == true) {
return null;
} else {
loggedIn = false;
}
}
}
} catch (Exception ex) {
logger.error("Exception while logging in as <" + currentUser.getLogin() + ">", ex);
loggedIn = false;
}
if (loggedIn == false) {
String summary = messages.getString("bad_login_data");
WWWHelper.AddMessage(context, FacesMessage.SEVERITY_ERROR, "login", summary, null);
return null;
}
return "start";
}
@HttpAction(name = "logout", pattern = "logout")
public String logOut() {
FacesContext context = FacesContext.getCurrentInstance();
HttpSession session = (HttpSession) context.getExternalContext().getSession(false);
session.invalidate();
currentUser = new Users();
loggedIn = false;
return "start";
}
private Cookie getLoginCookie() {
FacesContext context = FacesContext.getCurrentInstance();
String viewId = context.getViewRoot().getViewId();
if (viewId.equals("/login.jspx")) {
HttpServletRequest request = (HttpServletRequest) context.getExternalContext().getRequest();
if (request.getCookies() != null) {
for (Cookie cookie : request.getCookies()) {
if (cookie.getName().equals("login")) {
return cookie;
}
}
}
}
return null;
}
public Boolean getRememberMe() {
return getLoginCookie() != null;
}
public void setRememberMe(Boolean value) {
rememberMe = value;
}
public String getLogin() {
String result;
try {
result = getLoginCookie().getValue();
} catch (NullPointerException e) {
result = StringUtils.EMPTY;
}
return result;
}
public void setLogin(String value) {
currentUser.setLogin(value);
}
public boolean isShowOnlyMySubmissions() {
return showOnlyMySubmissions;
}
public void setShowOnlyMySubmissions(boolean showOnlyMySubmissions) {
if (showOnlyMySubmissions != this.showOnlyMySubmissions) {
this.showOnlyMySubmissions = showOnlyMySubmissions;
setSubmissionsPageIndex(0);
}
}
/**
* @return the submissionsContestId
*/
public int getSubmissionsContestId() {
return submissionsContestId;
}
/**
* @return the submissionsPageIndex
*/
public int getSubmissionsPageIndex() {
return submissionsPageIndex;
}
/**
* @param submissionsContestId the submissionsContestId to set
*/
public void setSubmissionsContestId(int submissionsContestId) {
this.submissionsContestId = submissionsContestId;
}
/**
* @param submissionsPageIndex the submissionsPageIndex to set
*/
public void setSubmissionsPageIndex(int submissionsPageIndex) {
if (submissionsPageIndex < 0) {
submissionsPageIndex = 0;
}
this.submissionsPageIndex = submissionsPageIndex;
}
/**
* @return the submissionsLastVisit
*/
public long getSubmissionsLastVisit() {
return submissionsLastVisit;
}
/**
* @param submissionsLastVisit the submissionsLastVisit to set
*/
public void setSubmissionsLastVisit(long submissionsLastVisit) {
this.submissionsLastVisit = submissionsLastVisit;
}
/**
* @return the submissionsUserId
*/
public int getSubmissionsUserId() {
return submissionsUserId;
}
/**
* @param submissionsUserId the submissionsUserId to set
*/
public void setSubmissionsUserId(int submissionsUserId) {
this.submissionsUserId = submissionsUserId;
}
/**
* @return the submissionsProblemId
*/
public int getSubmissionsProblemId() {
return submissionsProblemId;
}
/**
* @param submissionsProblemId the submissionsProblemId to set
*/
public void setSubmissionsProblemId(int submissionsProblemId) {
this.submissionsProblemId = submissionsProblemId;
}
/**
* @return the submissionsSeriesId
*/
public int getSubmissionsSeriesId() {
return submissionsSeriesId;
}
/**
* @param submissionsSeriesId the submissionsSeriesId to set
*/
public void setSubmissionsSeriesId(int submissionsSeriesId) {
this.submissionsSeriesId = submissionsSeriesId;
}
}
| |
/*******************************************************************************
*
* Pentaho Mondrian Test Compatibility Kit
*
* Copyright (C) 2013-2014 by Pentaho : http://www.pentaho.com
*
*******************************************************************************
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
******************************************************************************/
package org.pentaho.mondrian.tck;
import java.sql.Connection;
import java.sql.ResultSet;
import java.sql.Statement;
import java.util.ArrayList;
import java.util.List;
import mondrian.rolap.RolapUtil;
import org.apache.log4j.Logger;
import com.google.common.base.Function;
public class SqlExpectation {
static final Logger LOGGER = RolapUtil.SQL_LOGGER;
final ResultSetProvider query;
final String[] columns;
final boolean columnsPartial;
final String[] rows;
final boolean partial;
final int[] types;
List<Function<Statement, Void>> statementModifiers;
final int cancelTimeout;
final ResultSetValidator validator;
public SqlExpectation(
ResultSetProvider query,
String[] columns,
boolean columnsPartial,
int[] types,
String[] rows,
boolean partial,
int cancelTimeout,
final List<Function<Statement, Void>> statementModifiers ) {
this.query = query;
this.columns = columns;
this.columnsPartial = columnsPartial;
this.types = types;
this.rows = rows;
this.partial = partial;
this.cancelTimeout = cancelTimeout;
this.statementModifiers = statementModifiers;
this.validator = new ResultSetValidator( columns, columnsPartial, rows, partial, types );
}
public void verify( ResultSet rs ) throws Exception {
// Validate column names
validator.validateColumns( rs );
// Validate rows
validator.validateRows( rs );
}
public static Builder newBuilder() {
return new Builder();
}
public static class Builder {
private ResultSetProvider query;
private String[] columns;
private boolean columnsPartial;
private String[] rows;
private int[] types;
private int cancelTimeout = -1;
private boolean partial = false;
private List<Function<Statement, Void>> statementModifiers = new ArrayList<>();
private Builder() {
}
/**
* Sets the {@link ResultSetProvider} to run.
* <p>(mandatory)
*/
public Builder query( ResultSetProvider query ) {
this.query = query;
return this;
}
/**
* Sets the SQL query to run.
* <p>(mandatory)
*/
public Builder query( final String query ) {
return query( new ResultSetProvider() {
@Override
public ResultSet getData( Connection conn, final Statement statement ) throws Exception {
for ( Function<Statement, Void> statementModifier : statementModifiers ) {
statementModifier.apply( statement );
}
try {
// Run the query
SqlExpectation.LOGGER.info( "Mondrian.tck:" + query );
statement.execute( query );
} catch ( Throwable t ) {
throw new Exception(
"Query failed to run successfully:\n"
+ query,
t );
}
return statement.getResultSet();
}
} );
}
/**
* Sets the column names expected
* <p>(optional)
*/
public Builder columns( String... columns ) {
this.columns = columns;
return this;
}
/**
* Sets whether the columns provided in {@link #columns(String[])} are only the
* part of the columns of the result set.
* <p>(optional)
*/
public Builder columnsPartial() {
this.columnsPartial = true;
return this;
}
/**
* Sets the expected column types. Use values in {@link java.sql.Types}.
* <p>(optional)
*/
public Builder types( int... types ) {
this.types = types;
return this;
}
/**
* Sets the expected rows. The value delimiter is pipe ( '|' ).
* <p>(optional)
*/
public Builder rows( String... rows ) {
this.rows = rows;
return this;
}
/**
* Sets whether the rows provided in {@link #rows(String[])} are only the
* first rows of the result set and we didn't intend to validate them all.
*/
public Builder partial() {
this.partial = true;
return this;
}
/**
* adds a function that will be run for the statement before execution
*/
public Builder modifyStatement( Function<Statement, Void> statementModifier ) {
statementModifiers.add( statementModifier );
return this;
}
public Builder cancelTimeout( int to ) {
this.cancelTimeout = to;
return this;
}
public SqlExpectation build() {
return new SqlExpectation( query, columns, columnsPartial, types, rows, partial, cancelTimeout, statementModifiers );
}
}
/**
* This interface has to be implemented to provide a ResultSet to validate to
* the Expectation classes.
*
* <p>There are two arguments to the API, one for the connection and one for the
* statement. Note that this is required because the statements provided by the shims
* are not symmetrical. The bug can be represented as:
*
* <p><code>connection != connection.createStatement().getConnection()</code>
*/
public interface ResultSetProvider {
/**
* Returns {@link java.sql.ResultSet} executed by the {@link java.sql.Statement}<br/>
* <p>
* Code should be like <br/>
* {@code statement.<doSomething>; return statement.getResultSet();}
* </p>
*/
ResultSet getData( Connection conn, Statement statement ) throws Exception;
}
}
| |
/*
* Copyright (c) 2013 Triforce - in association with the University of Pretoria and Epi-Use <Advance/>
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package afk.ge;
import afk.ge.ems.Utils;
import afk.ge.tokyo.ems.components.BBoxComponent;
import afk.ge.tokyo.ems.components.State;
import com.hackoeur.jglm.Mat4;
import com.hackoeur.jglm.Vec3;
import com.hackoeur.jglm.Vec4;
import com.hackoeur.jglm.support.FastMath;
import static com.hackoeur.jglm.support.FastMath.*;
import java.util.ArrayList;
import java.util.List;
/**
* Oriented Bounding Box. Stored as a matrix (without scaling) and Extents( x,
* y, z ).
*
* Ported to java from this article:
* http://www.3dkingdoms.com/weekly/weekly.php?a=21
*
* @author Jonathan Kreuzer
* @author Daniel
*/
public class BBox
{
private Mat4 m;
private Vec3 extents;
public BBox()
{
}
public BBox(final Mat4 m, final Vec3 extents)
{
set(m, extents);
}
/**
* BL = Low values corner point, BH = High values corner point.
*/
public BBox(final Mat4 m, final Vec3 bl, final Vec3 bh)
{
set(m, bl, bh);
}
public BBox(State state, BBoxComponent bBoxComponent)
{
m = Utils.getBBoxMatrix(state, bBoxComponent.offset.multiply(state.scale));
this.extents = bBoxComponent.extent.multiply(state.scale);
}
public final void set(final Mat4 m, final Vec3 extents)
{
this.m = m;
this.extents = extents;
}
public final void set(final Mat4 m, final Vec3 bl, final Vec3 bh)
{
this.m = m;
this.m.translate((bh.add(bl)).multiply(0.5f));
extents = (bh.subtract(bl)).multiply(0.5f);
}
public Vec3 getSize()
{
return extents.multiply(2.0f);
}
public Vec3 getCenterPoint()
{
return m.getTranslate();
}
/**
* Check if a point is in this bounding box.
*
* @param inP the point to check.
* @return true if the point is within the box, false otherwise.
*/
public boolean isPointInBox(final Vec3 inP)
{
// Rotate the point into the box's coordinates
Vec4 p = m.getInverseSimple().multiply(inP.toPoint());
// Now just use an axis-aligned check
if (abs(p.getX()) < extents.getX()
&& abs(p.getY()) < extents.getY()
&& abs(p.getZ()) < extents.getZ())
{
return true;
}
return false;
}
/**
* Check if a sphere overlaps any part of this bounding box.
*
* @param inP the center of the sphere.
* @param fRadius the radius of the sphere.
* @return true if the sphere overlaps with the box, false otherwise.
*/
public boolean isSphereInBox(final Vec3 inP, float fRadius)
{
float fDist;
float fDistSq = 0;
Vec4 p = m.getInverseSimple().multiply(inP.toPoint());
// Add distance squared from sphere centerpoint to box for each axis
for (int i = 0; i < 3; i++)
{
if (abs(p.get(i)) > extents.get(i))
{
fDist = abs(p.get(i)) - extents.get(i);
fDistSq += fDist * fDist;
}
}
return (fDistSq <= fRadius * fRadius);
}
/**
* Check if the bounding box is completely behind a plane (defined by a
* normal and a point).
*
* @param inNorm the normal of the plane.
* @param inP a point on the plane.
* @return true if the box is completely behind a plane, false otherwise.
*/
public boolean boxOutsidePlane(final Vec3 inNorm, final Vec3 inP)
{
// Plane Normal in Box Space
Vec3 norm = m.getInverseSimple().roatateVector(inNorm); // roatateVector only uses rotation portion of matrix
norm = new Vec3(abs(norm.getX()), abs(norm.getY()), abs(norm.getZ()));
float extent = norm.dot(extents); // Box Extent along the plane normal
float distance = inNorm.dot(getCenterPoint().subtract(inP)); // Distance from Box Center to the Plane
// If Box Centerpoint is behind the plane further than its extent, the Box is outside the plane
if (distance < -extent)
{
return true;
}
return false;
}
/**
* Does the Line (l1, l2) intersect the Box?
*
* @param l1 first point of line segment.
* @param l2 second point of line segment.
* @return true if line (l1, l2) intersects the box.
*/
public boolean isLineInBox(final Vec3 l1, final Vec3 l2)
{
// Put line in box space
Mat4 mInv = m.getInverseSimple();
Vec4 lb1 = mInv.multiply(l1.toPoint());
Vec4 lb2 = mInv.multiply(l2.toPoint());
// Get line midpoint and extent
Vec4 lMid = (lb1.add(lb2)).multiply(0.5f);
Vec4 l = lb1.subtract(lMid);
Vec3 lExt = new Vec3(abs(l.getX()), abs(l.getY()), abs(l.getZ()));
// Use Separating Axis Test
// Separation vector from box center to line center is LMid, since the line is in box space
for (int i = 0; i < 3; i++)
{
if (abs(lMid.get(i)) > extents.get(i) + lExt.get(i))
{
return false;
}
}
// Crossproducts of line and each axis
if ((abs(lMid.getY() * l.getZ()) - lMid.getZ() * l.getY()) > (extents.getY() * lExt.getZ() + extents.getZ() * lExt.getY()))
{
return false;
}
if (abs(lMid.getX() * l.getZ() - lMid.getZ() * l.getX()) > (extents.getX() * lExt.getZ() + extents.getZ() * lExt.getX()))
{
return false;
}
if (abs(lMid.getX() * l.getY() - lMid.getY() * l.getX()) > (extents.getX() * lExt.getY() + extents.getY() * lExt.getX()))
{
return false;
}
// No separating axis, the line intersects
return true;
}
/**
* Returns a 3x3 rotation matrix as vectors.
*
* @retrun an array of vectors containing the rotation part of the matrix.
*/
Vec3[] getInvRot()
{
Vec3[] pvRot = new Vec3[3];
pvRot[0] = m.<Vec4>getColumn(0).getXYZ();
pvRot[1] = m.<Vec4>getColumn(1).getXYZ();
pvRot[2] = m.<Vec4>getColumn(2).getXYZ();
return pvRot;
}
/**
* Check if any part of a box is inside any part of another box. Uses the
* separating axis test.
*
* @param BBox the other box to check.
* @return true if any part of the given box is inside this box.
*/
public boolean isBoxInBox(BBox BBox)
{
Vec3 sizeA = extents;
Vec3 sizeB = BBox.extents;
// System.out.println("sizeA: " + sizeA);
// System.out.println("mA: " + m);
// System.out.println("sizeB: " + sizeB);
// System.out.println("mB: " + BBox.m);
Vec3[] rotA, rotB;
rotA = getInvRot();
rotB = BBox.getInvRot();
float[][] r = new float[3][3]; // Rotation from B to A
float[][] ar = new float[3][3]; // absolute values of R matrix, to use with box extents
float extentA, extentB, separation;
int i, k;
// Calculate B to A rotation matrix
for (i = 0; i < 3; i++)
{
for (k = 0; k < 3; k++)
{
r[i][k] = rotA[i].dot(rotB[k]);
ar[i][k] = abs(r[i][k]);
}
}
// Vector separating the centers of Box B and of Box A
Vec3 vSepWS = BBox.getCenterPoint().subtract(getCenterPoint());
// Rotated into Box A's coordinates
Vec3 vSepA = new Vec3(
vSepWS.dot(rotA[0]),
vSepWS.dot(rotA[1]),
vSepWS.dot(rotA[2]));
// Test if any of A's basis vectors separate the box
for (i = 0; i < 3; i++)
{
extentA = sizeA.get(i);
extentB = sizeB.dot(new Vec3(ar[i][0], ar[i][1], ar[i][2]));
separation = abs(vSepA.get(i));
if (separation > extentA + extentB)
{
return false;
}
}
// Test if any of B's basis vectors separate the box
for (k = 0; k < 3; k++)
{
extentA = sizeA.dot(new Vec3(ar[0][k], ar[1][k], ar[2][k]));
extentB = sizeB.get(k);
separation = abs(vSepA.dot(new Vec3(r[0][k], r[1][k], r[2][k])));
if (separation > extentA + extentB)
{
return false;
}
}
// Now test Cross Products of each basis vector combination ( A[i], B[k] )
for (i = 0; i < 3; i++)
{
for (k = 0; k < 3; k++)
{
int i1 = (i + 1) % 3, i2 = (i + 2) % 3;
int k1 = (k + 1) % 3, k2 = (k + 2) % 3;
extentA = sizeA.get(i1) * ar[i2][k] + sizeA.get(i2) * ar[i1][k];
extentB = sizeB.get(k1) * ar[i][k2] + sizeB.get(k2) * ar[i][k1];
separation = abs(vSepA.get(i2) * r[i1][k] - vSepA.get(i1) * r[i2][k]);
if (separation > extentA + extentB)
{
return false;
}
}
}
// No separating axis found, the boxes overlap
return true;
}
public float getEntrancePointDistance(Vec3 org, Vec3 ray)
{
List<Vec3> ps = getIntersectionPoints(org, ray);
if (ps.isEmpty())
{
return Float.POSITIVE_INFINITY;
}
float cdist = Float.POSITIVE_INFINITY;
for (int i = 0; i < ps.size(); i++)
{
float dist = ps.get(i).subtract(org).getLengthSquared();
if (dist < cdist)
{
cdist = dist;
}
}
return FastMath.sqrtFast(cdist);
}
public List<Vec3> getIntersectionPoints(Vec3 org, Vec3 ray)
{
// Put ray in box space
Mat4 mInv = m.getInverseSimple();
ray = mInv.multiply(ray.toDirection()).getXYZ();
org = mInv.multiply(org.toPoint()).getXYZ();
Vec3 mext = extents.getNegated();
ArrayList<Vec3> ps = new ArrayList<Vec3>();
for (int i = 0; i < 3; i++)
{
if (org.get(i) >= extents.get(i))
{
if (ray.get(i) >= 0)
{
return new ArrayList<Vec3>(); // ray points away from box
}
ps.addAll(lineIntersection(i, org, ray, extents));
} else if (org.get(i) <= mext.get(i))
{
if (ray.get(i) <= 0)
{
return new ArrayList<Vec3>(); // ray points away from box
}
ps.addAll(lineIntersection(i, org, ray, mext));
}
}
return ps;
}
private ArrayList<Vec3> lineIntersection(int xi, Vec3 org,
Vec3 ray, Vec3 lext)
{
ArrayList<Vec3> ps = new ArrayList<Vec3>();
int yi = (xi + 2) % 3;
int zi = (xi + 1) % 3;
final float JZERO = 0.00000000001f;
float t0 = (lext.get(xi) - org.get(xi))
/ (ray.get(xi) == 0 ? JZERO : ray.get(xi));
float t1 = (lext.get(yi) - org.get(yi))
/ (ray.get(yi) == 0 ? JZERO : ray.get(yi));
float t2 = (lext.get(zi) - org.get(zi))
/ (ray.get(zi) == 0 ? JZERO : ray.get(zi));
float[] r = new float[3];
r[xi] = lext.get(xi);
r[yi] = org.get(yi) + ray.get(yi) * t0;
r[zi] = org.get(zi) + ray.get(zi) * t0;
if (t0 > 0
&& Math.abs(r[yi]) <= extents.get(yi)
&& Math.abs(r[zi]) <= extents.get(zi))
{
ps.add(m.multiply(new Vec4(r[0], r[1], r[2],1.0f)).getXYZ());
}
r = new float[3];
r[xi] = org.get(xi) + ray.get(xi) * t1;
r[yi] = lext.get(yi);
r[zi] = org.get(zi) + ray.get(zi) * t1;
if (t1 > 0
&& Math.abs(r[xi]) <= extents.get(xi)
&& Math.abs(r[zi]) <= extents.get(zi))
{
ps.add(m.multiply(new Vec4(r[0], r[1], r[2],1.0f)).getXYZ());
}
r = new float[3];
r[xi] = org.get(xi) + ray.get(xi) * t2;
r[yi] = org.get(yi) + ray.get(yi) * t2;
r[zi] = lext.get(zi);
if (t2 > 0
&& Math.abs(r[xi]) <= extents.get(xi)
&& Math.abs(r[yi]) <= extents.get(yi))
{
ps.add(m.multiply(new Vec4(r[0], r[1], r[2],1.0f)).getXYZ());
}
return ps;
}
}
| |
/*
* Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.cloudfront.model;
import java.io.Serializable;
import javax.annotation.Generated;
/**
* <p>
* A complex type that controls which HTTP methods CloudFront processes and forwards to your Amazon S3 bucket or your
* custom origin. There are three choices:
* </p>
* <ul>
* <li>
* <p>
* CloudFront forwards only <code>GET</code> and <code>HEAD</code> requests.
* </p>
* </li>
* <li>
* <p>
* CloudFront forwards only <code>GET</code>, <code>HEAD</code>, and <code>OPTIONS</code> requests.
* </p>
* </li>
* <li>
* <p>
* CloudFront forwards <code>GET, HEAD, OPTIONS, PUT, PATCH, POST</code>, and <code>DELETE</code> requests.
* </p>
* </li>
* </ul>
* <p>
* If you pick the third choice, you may need to restrict access to your Amazon S3 bucket or to your custom origin so
* users can't perform operations that you don't want them to. For example, you might not want users to have permissions
* to delete objects from your origin.
* </p>
*
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/cloudfront-2020-05-31/AllowedMethods" target="_top">AWS API
* Documentation</a>
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class AllowedMethods implements Serializable, Cloneable {
/**
* <p>
* The number of HTTP methods that you want CloudFront to forward to your origin. Valid values are 2 (for
* <code>GET</code> and <code>HEAD</code> requests), 3 (for <code>GET</code>, <code>HEAD</code>, and
* <code>OPTIONS</code> requests) and 7 (for <code>GET, HEAD, OPTIONS, PUT, PATCH, POST</code>, and
* <code>DELETE</code> requests).
* </p>
*/
private Integer quantity;
/**
* <p>
* A complex type that contains the HTTP methods that you want CloudFront to process and forward to your origin.
* </p>
*/
private com.amazonaws.internal.SdkInternalList<String> items;
private CachedMethods cachedMethods;
/**
* <p>
* The number of HTTP methods that you want CloudFront to forward to your origin. Valid values are 2 (for
* <code>GET</code> and <code>HEAD</code> requests), 3 (for <code>GET</code>, <code>HEAD</code>, and
* <code>OPTIONS</code> requests) and 7 (for <code>GET, HEAD, OPTIONS, PUT, PATCH, POST</code>, and
* <code>DELETE</code> requests).
* </p>
*
* @param quantity
* The number of HTTP methods that you want CloudFront to forward to your origin. Valid values are 2 (for
* <code>GET</code> and <code>HEAD</code> requests), 3 (for <code>GET</code>, <code>HEAD</code>, and
* <code>OPTIONS</code> requests) and 7 (for <code>GET, HEAD, OPTIONS, PUT, PATCH, POST</code>, and
* <code>DELETE</code> requests).
*/
public void setQuantity(Integer quantity) {
this.quantity = quantity;
}
/**
* <p>
* The number of HTTP methods that you want CloudFront to forward to your origin. Valid values are 2 (for
* <code>GET</code> and <code>HEAD</code> requests), 3 (for <code>GET</code>, <code>HEAD</code>, and
* <code>OPTIONS</code> requests) and 7 (for <code>GET, HEAD, OPTIONS, PUT, PATCH, POST</code>, and
* <code>DELETE</code> requests).
* </p>
*
* @return The number of HTTP methods that you want CloudFront to forward to your origin. Valid values are 2 (for
* <code>GET</code> and <code>HEAD</code> requests), 3 (for <code>GET</code>, <code>HEAD</code>, and
* <code>OPTIONS</code> requests) and 7 (for <code>GET, HEAD, OPTIONS, PUT, PATCH, POST</code>, and
* <code>DELETE</code> requests).
*/
public Integer getQuantity() {
return this.quantity;
}
/**
* <p>
* The number of HTTP methods that you want CloudFront to forward to your origin. Valid values are 2 (for
* <code>GET</code> and <code>HEAD</code> requests), 3 (for <code>GET</code>, <code>HEAD</code>, and
* <code>OPTIONS</code> requests) and 7 (for <code>GET, HEAD, OPTIONS, PUT, PATCH, POST</code>, and
* <code>DELETE</code> requests).
* </p>
*
* @param quantity
* The number of HTTP methods that you want CloudFront to forward to your origin. Valid values are 2 (for
* <code>GET</code> and <code>HEAD</code> requests), 3 (for <code>GET</code>, <code>HEAD</code>, and
* <code>OPTIONS</code> requests) and 7 (for <code>GET, HEAD, OPTIONS, PUT, PATCH, POST</code>, and
* <code>DELETE</code> requests).
* @return Returns a reference to this object so that method calls can be chained together.
*/
public AllowedMethods withQuantity(Integer quantity) {
setQuantity(quantity);
return this;
}
/**
* <p>
* A complex type that contains the HTTP methods that you want CloudFront to process and forward to your origin.
* </p>
*
* @return A complex type that contains the HTTP methods that you want CloudFront to process and forward to your
* origin.
* @see Method
*/
public java.util.List<String> getItems() {
if (items == null) {
items = new com.amazonaws.internal.SdkInternalList<String>();
}
return items;
}
/**
* <p>
* A complex type that contains the HTTP methods that you want CloudFront to process and forward to your origin.
* </p>
*
* @param items
* A complex type that contains the HTTP methods that you want CloudFront to process and forward to your
* origin.
* @see Method
*/
public void setItems(java.util.Collection<String> items) {
if (items == null) {
this.items = null;
return;
}
this.items = new com.amazonaws.internal.SdkInternalList<String>(items);
}
/**
* <p>
* A complex type that contains the HTTP methods that you want CloudFront to process and forward to your origin.
* </p>
* <p>
* <b>NOTE:</b> This method appends the values to the existing list (if any). Use
* {@link #setItems(java.util.Collection)} or {@link #withItems(java.util.Collection)} if you want to override the
* existing values.
* </p>
*
* @param items
* A complex type that contains the HTTP methods that you want CloudFront to process and forward to your
* origin.
* @return Returns a reference to this object so that method calls can be chained together.
* @see Method
*/
public AllowedMethods withItems(String... items) {
if (this.items == null) {
setItems(new com.amazonaws.internal.SdkInternalList<String>(items.length));
}
for (String ele : items) {
this.items.add(ele);
}
return this;
}
/**
* <p>
* A complex type that contains the HTTP methods that you want CloudFront to process and forward to your origin.
* </p>
*
* @param items
* A complex type that contains the HTTP methods that you want CloudFront to process and forward to your
* origin.
* @return Returns a reference to this object so that method calls can be chained together.
* @see Method
*/
public AllowedMethods withItems(java.util.Collection<String> items) {
setItems(items);
return this;
}
/**
* <p>
* A complex type that contains the HTTP methods that you want CloudFront to process and forward to your origin.
* </p>
*
* @param items
* A complex type that contains the HTTP methods that you want CloudFront to process and forward to your
* origin.
* @return Returns a reference to this object so that method calls can be chained together.
* @see Method
*/
public AllowedMethods withItems(Method... items) {
com.amazonaws.internal.SdkInternalList<String> itemsCopy = new com.amazonaws.internal.SdkInternalList<String>(items.length);
for (Method value : items) {
itemsCopy.add(value.toString());
}
if (getItems() == null) {
setItems(itemsCopy);
} else {
getItems().addAll(itemsCopy);
}
return this;
}
/**
* @param cachedMethods
*/
public void setCachedMethods(CachedMethods cachedMethods) {
this.cachedMethods = cachedMethods;
}
/**
* @return
*/
public CachedMethods getCachedMethods() {
return this.cachedMethods;
}
/**
* @param cachedMethods
* @return Returns a reference to this object so that method calls can be chained together.
*/
public AllowedMethods withCachedMethods(CachedMethods cachedMethods) {
setCachedMethods(cachedMethods);
return this;
}
/**
* Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be
* redacted from this string using a placeholder value.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getQuantity() != null)
sb.append("Quantity: ").append(getQuantity()).append(",");
if (getItems() != null)
sb.append("Items: ").append(getItems()).append(",");
if (getCachedMethods() != null)
sb.append("CachedMethods: ").append(getCachedMethods());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof AllowedMethods == false)
return false;
AllowedMethods other = (AllowedMethods) obj;
if (other.getQuantity() == null ^ this.getQuantity() == null)
return false;
if (other.getQuantity() != null && other.getQuantity().equals(this.getQuantity()) == false)
return false;
if (other.getItems() == null ^ this.getItems() == null)
return false;
if (other.getItems() != null && other.getItems().equals(this.getItems()) == false)
return false;
if (other.getCachedMethods() == null ^ this.getCachedMethods() == null)
return false;
if (other.getCachedMethods() != null && other.getCachedMethods().equals(this.getCachedMethods()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getQuantity() == null) ? 0 : getQuantity().hashCode());
hashCode = prime * hashCode + ((getItems() == null) ? 0 : getItems().hashCode());
hashCode = prime * hashCode + ((getCachedMethods() == null) ? 0 : getCachedMethods().hashCode());
return hashCode;
}
@Override
public AllowedMethods clone() {
try {
return (AllowedMethods) super.clone();
} catch (CloneNotSupportedException e) {
throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e);
}
}
}
| |
/**
* Implementation of the net.sf.geographiclib.GeodesicLine class
*
* Copyright (c) Charles Karney (2013) <charles@karney.com> and licensed
* under the MIT/X11 License. For more information, see
* http://geographiclib.sourceforge.net/
**********************************************************************/
package net.sf.geographiclib;
/**
* A geodesic line.
* <p>
* GeodesicLine facilitates the determination of a series of points on a single
* geodesic. The starting point (<i>lat1</i>, <i>lon1</i>) and the azimuth
* <i>azi1</i> are specified in the constructor. {@link #Position Position}
* returns the location of point 2 a distance <i>s12</i> along the geodesic.
* Alternatively {@link #ArcPosition ArcPosition} gives the position of point 2
* an arc length <i>a12</i> along the geodesic.
* <p>
* The calculations are accurate to better than 15 nm (15 nanometers). See
* Sec. 9 of
* <a href="http://arxiv.org/abs/1102.1215v1">arXiv:1102.1215v1</a> for
* details. The algorithms used by this class are based on series expansions
* using the flattening <i>f</i> as a small parameter. These are only accurate
* for |<i>f</i>| < 0.02; however reasonably accurate results will be
* obtained for |<i>f</i>| < 0.2.
* <p>
* The algorithms are described in
* <ul>
* <li>
* C. F. F. Karney,
* <a href="http://dx.doi.org/10.1007/s00190-012-0578-z">
* Algorithms for geodesics</a>,
* J. Geodesy <b>87</b>, 43–55 (2013);
* DOI: <a href="http://dx.doi.org/10.1007/s00190-012-0578-z">
* 10.1007/s00190-012-0578-z</a>;
* addenda: <a href="http://geographiclib.sf.net/geod-addenda.html">
* geod-addenda.html</a>.
* </ul>
* <p>
* Here's an example of using this class
* <pre>
* {@code
* import net.sf.geographiclib.*;
* public class GeodesicLineTest {
* public static void main(String[] args) {
* // Print waypoints between JFK and SIN
* Geodesic geod = Geodesic.WGS84;
* double
* lat1 = 40.640, lon1 = -73.779, // JFK
* lat2 = 1.359, lon2 = 103.989; // SIN
* GeodesicData g = geod.Inverse(lat1, lon1, lat2, lon2,
* GeodesicMask.DISTANCE | GeodesicMask.AZIMUTH);
* GeodesicLine line = new GeodesicLine(geod, lat1, lon1, g.azi1,
* GeodesicMask.DISTANCE_IN | GeodesicMask.LONGITUDE);
* double
* s12 = g.s12,
* a12 = g.a12,
* ds0 = 500e3; // Nominal distance between points = 500 km
* int num = (int)(Math.ceil(s12 / ds0)); // The number of intervals
* {
* // Use intervals of equal length
* double ds = s12 / num;
* for (int i = 0; i <= num; ++i) {
* g = line.Position(i * ds,
* GeodesicMask.LATITUDE | GeodesicMask.LONGITUDE);
* System.out.println(i + " " + g.lat2 + " " + g.lon2);
* }
* }
* {
* // Slightly faster, use intervals of equal arc length
* double da = a12 / num;
* for (int i = 0; i <= num; ++i) {
* g = line.ArcPosition(i * da,
* GeodesicMask.LATITUDE | GeodesicMask.LONGITUDE);
* System.out.println(i + " " + g.lat2 + " " + g.lon2);
* }
* }
* }
* }}</pre>
**********************************************************************/
public class GeodesicLine {
private static final int nC1_ = Geodesic.nC1_;
private static final int nC1p_ = Geodesic.nC1p_;
private static final int nC2_ = Geodesic.nC2_;
private static final int nC3_ = Geodesic.nC3_;
private static final int nC4_ = Geodesic.nC4_;
private double _lat1, _lon1, _azi1;
private double _a, _f, _b, _c2, _f1, _salp0, _calp0, _k2,
_salp1, _calp1, _ssig1, _csig1, _dn1, _stau1, _ctau1, _somg1, _comg1,
_A1m1, _A2m1, _A3c, _B11, _B21, _B31, _A4, _B41;
// index zero elements of _C1a, _C1pa, _C2a, _C3a are unused
private double _C1a[], _C1pa[], _C2a[], _C3a[],
_C4a[]; // all the elements of _C4a are used
private int _caps;
/**
* Constructor for a geodesic line staring at latitude <i>lat1</i>, longitude
* <i>lon1</i>, and azimuth <i>azi1</i> (all in degrees).
* <p>
* @param g A {@link Geodesic} object used to compute the necessary
* information about the GeodesicLine.
* @param lat1 latitude of point 1 (degrees).
* @param lon1 longitude of point 1 (degrees).
* @param azi1 azimuth at point 1 (degrees).
* <p>
* <i>lat1</i> should be in the range [−90°, 90°]; <i>lon1</i>
* and <i>azi1</i> should be in the range [−540°, 540°).
* <p>
* If the point is at a pole, the azimuth is defined by keeping <i>lon1</i>
* fixed, writing <i>lat1</i> = ±(90° − ε), and
* taking the limit ε → 0+.
**********************************************************************/
public GeodesicLine(Geodesic g,
double lat1, double lon1, double azi1) {
this(g, lat1, lon1, azi1, GeodesicMask.ALL);
}
/**
* Constructor for a geodesic line staring at latitude <i>lat1</i>, longitude
* <i>lon1</i>, and azimuth <i>azi1</i> (all in degrees) with a subset of the
* capabilities included.
* <p>
* @param g A {@link Geodesic} object used to compute the necessary
* information about the GeodesicLine.
* @param lat1 latitude of point 1 (degrees).
* @param lon1 longitude of point 1 (degrees).
* @param azi1 azimuth at point 1 (degrees).
* @param caps bitor'ed combination of {@link GeodesicMask} values
* specifying the capabilities the GeodesicLine object should possess,
* i.e., which quantities can be returned in calls to {@link #Position
* Position}.
* <p>
* The {@link GeodesicMask} values are
* <ul>
* <li>
* <i>caps</i> |= GeodesicMask.LATITUDE for the latitude <i>lat2</i>; this
* is added automatically;
* <li>
* <i>caps</i> |= GeodesicMask.LONGITUDE for the latitude <i>lon2</i>;
* <li>
* <i>caps</i> |= GeodesicMask.AZIMUTH for the latitude <i>azi2</i>; this
* is added automatically;
* <li>
* <i>caps</i> |= GeodesicMask.DISTANCE for the distance <i>s12</i>;
* <li>
* <i>caps</i> |= GeodesicMask.REDUCEDLENGTH for the reduced length
* <i>m12</i>;
* <li>
* <i>caps</i> |= GeodesicMask.GEODESICSCALE for the geodesic scales
* <i>M12</i> and <i>M21</i>;
* <li>
* <i>caps</i> |= GeodesicMask.AREA for the area <i>S12</i>;
* <li>
* <i>caps</i> |= GeodesicMask.DISTANCE_IN permits the length of the
* geodesic to be given in terms of <i>s12</i>; without this capability the
* length can only be specified in terms of arc length;
* <li>
* <i>caps</i> |= GeodesicMask.ALL for all of the above;
* </ul>
**********************************************************************/
public GeodesicLine(Geodesic g,
double lat1, double lon1, double azi1,
int caps) {
_a = g._a;
_f = g._f;
_b = g._b;
_c2 = g._c2;
_f1 = g._f1;
// Always allow latitude and azimuth
_caps = caps | GeodesicMask.LATITUDE | GeodesicMask.AZIMUTH;
// Guard against underflow in salp0
azi1 = Geodesic.AngRound(GeoMath.AngNormalize(azi1));
lon1 = GeoMath.AngNormalize(lon1);
_lat1 = lat1;
_lon1 = lon1;
_azi1 = azi1;
// alp1 is in [0, pi]
double alp1 = azi1 * GeoMath.degree;
// Enforce sin(pi) == 0 and cos(pi/2) == 0. Better to face the ensuing
// problems directly than to skirt them.
_salp1 = azi1 == -180 ? 0 : Math.sin(alp1);
_calp1 = Math.abs(azi1) == 90 ? 0 : Math.cos(alp1);
double cbet1, sbet1, phi;
phi = lat1 * GeoMath.degree;
// Ensure cbet1 = +epsilon at poles
sbet1 = _f1 * Math.sin(phi);
cbet1 = Math.abs(lat1) == 90 ? Geodesic.tiny_ : Math.cos(phi);
{ Pair p = Geodesic.SinCosNorm(sbet1, cbet1);
sbet1 = p.first; cbet1 = p.second; }
_dn1 = Math.sqrt(1 + g._ep2 * GeoMath.sq(sbet1));
// Evaluate alp0 from sin(alp1) * cos(bet1) = sin(alp0),
_salp0 = _salp1 * cbet1; // alp0 in [0, pi/2 - |bet1|]
// Alt: calp0 = hypot(sbet1, calp1 * cbet1). The following
// is slightly better (consider the case salp1 = 0).
_calp0 = GeoMath.hypot(_calp1, _salp1 * sbet1);
// Evaluate sig with tan(bet1) = tan(sig1) * cos(alp1).
// sig = 0 is nearest northward crossing of equator.
// With bet1 = 0, alp1 = pi/2, we have sig1 = 0 (equatorial line).
// With bet1 = pi/2, alp1 = -pi, sig1 = pi/2
// With bet1 = -pi/2, alp1 = 0 , sig1 = -pi/2
// Evaluate omg1 with tan(omg1) = sin(alp0) * tan(sig1).
// With alp0 in (0, pi/2], quadrants for sig and omg coincide.
// No atan2(0,0) ambiguity at poles since cbet1 = +epsilon.
// With alp0 = 0, omg1 = 0 for alp1 = 0, omg1 = pi for alp1 = pi.
_ssig1 = sbet1; _somg1 = _salp0 * sbet1;
_csig1 = _comg1 = sbet1 != 0 || _calp1 != 0 ? cbet1 * _calp1 : 1;
{ Pair p = Geodesic.SinCosNorm(_ssig1, _csig1);
_ssig1 = p.first; _csig1 = p.second; } // sig1 in (-pi, pi]
// Geodesic.SinCosNorm(_somg1, _comg1); -- don't need to normalize!
_k2 = GeoMath.sq(_calp0) * g._ep2;
double eps = _k2 / (2 * (1 + Math.sqrt(1 + _k2)) + _k2);
if ((_caps & GeodesicMask.CAP_C1) != 0) {
_A1m1 = Geodesic.A1m1f(eps);
_C1a = new double[nC1_ + 1];
Geodesic.C1f(eps, _C1a);
_B11 = Geodesic.SinCosSeries(true, _ssig1, _csig1, _C1a);
double s = Math.sin(_B11), c = Math.cos(_B11);
// tau1 = sig1 + B11
_stau1 = _ssig1 * c + _csig1 * s;
_ctau1 = _csig1 * c - _ssig1 * s;
// Not necessary because C1pa reverts C1a
// _B11 = -SinCosSeries(true, _stau1, _ctau1, _C1pa, nC1p_);
}
if ((_caps & GeodesicMask.CAP_C1p) != 0) {
_C1pa = new double[nC1p_ + 1];
Geodesic.C1pf(eps, _C1pa);
}
if ((_caps & GeodesicMask.CAP_C2) != 0) {
_C2a = new double[nC2_ + 1];
_A2m1 = Geodesic.A2m1f(eps);
Geodesic.C2f(eps, _C2a);
_B21 = Geodesic.SinCosSeries(true, _ssig1, _csig1, _C2a);
}
if ((_caps & GeodesicMask.CAP_C3) != 0) {
_C3a = new double[nC3_];
g.C3f(eps, _C3a);
_A3c = -_f * _salp0 * g.A3f(eps);
_B31 = Geodesic.SinCosSeries(true, _ssig1, _csig1, _C3a);
}
if ((_caps & GeodesicMask.CAP_C4) != 0) {
_C4a = new double[nC4_];
g.C4f(eps, _C4a);
// Multiplier = a^2 * e^2 * cos(alpha0) * sin(alpha0)
_A4 = GeoMath.sq(_a) * _calp0 * _salp0 * g._e2;
_B41 = Geodesic.SinCosSeries(false, _ssig1, _csig1, _C4a);
}
}
/**
* A default constructor. If GeodesicLine.Position is called on the
* resulting object, it returns immediately (without doing any calculations).
* The object can be set with a call to {@link Geodesic.Line}. Use {@link
* Init()} to test whether object is still in this uninitialized state.
* (This constructor was useful in C++, e.g., to allow vectors of
* GeodesicLine objects. It may not be needed in Java, so make it private.)
**********************************************************************/
private GeodesicLine() { _caps = 0; }
/**
* Compute the position of point 2 which is a distance <i>s12</i> (meters)
* from point 1.
* <p>
* @param s12 distance between point 1 and point 2 (meters); it can be
* negative.
* @return a {@link GeodesicData} object with the following fields:
* <i>lat1</i>, <i>lon1</i>, <i>azi1</i>, <i>lat2</i>, <i>lon2</i>,
* <i>azi2</i>, <i>s12</i>, <i>a12</i>. Some of these results may be
* missing if the GeodesicLine did not include the relevant capability.
* <p>
* The values of <i>lon2</i> and <i>azi2</i> returned are in the range
* [−180°, 180°).
* <p>
* The GeodesicLine object <i>must</i> have been constructed with <i>caps</i>
* |= {@link GeodesicMask#DISTANCE_IN}; otherwise no parameters are set.
**********************************************************************/
public GeodesicData Position(double s12) {
return Position(false, s12,
GeodesicMask.LATITUDE | GeodesicMask.LONGITUDE |
GeodesicMask.AZIMUTH);
}
/**
* Compute the position of point 2 which is a distance <i>s12</i> (meters)
* from point 1 and with a subset of the geodesic results returned.
* <p>
* @param s12 distance between point 1 and point 2 (meters); it can be
* negative.
* @param outmask a bitor'ed combination of {@link GeodesicMask} values
* specifying which results should be returned.
* @return a {@link GeodesicData} object including the requested results.
* <p>
* The GeodesicLine object <i>must</i> have been constructed with <i>caps</i>
* |= {@link GeodesicMask#DISTANCE_IN}; otherwise no parameters are set.
* Requesting a value which the GeodesicLine object is not capable of
* computing is not an error (no parameters will be set).
**********************************************************************/
public GeodesicData Position(double s12, int outmask) {
return Position(false, s12, outmask);
}
/**
* Compute the position of point 2 which is an arc length <i>a12</i>
* (degrees) from point 1.
* <p>
* @param a12 arc length between point 1 and point 2 (degrees); it can
* be negative.
* @return a {@link GeodesicData} object with the following fields:
* <i>lat1</i>, <i>lon1</i>, <i>azi1</i>, <i>lat2</i>, <i>lon2</i>,
* <i>azi2</i>, <i>s12</i>, <i>a12</i>. Some of these results may be
* missing if the GeodesicLine did not include the relevant capability.
* <p>
* The values of <i>lon2</i> and <i>azi2</i> returned are in the range
* [−180°, 180°).
* <p>
* The GeodesicLine object <i>must</i> have been constructed with <i>caps</i>
* |= {@link GeodesicMask#DISTANCE_IN}; otherwise no parameters are set.
**********************************************************************/
public GeodesicData ArcPosition(double a12) {
return Position(true, a12,
GeodesicMask.LATITUDE | GeodesicMask.LONGITUDE |
GeodesicMask.AZIMUTH | GeodesicMask.DISTANCE);
}
/**
* Compute the position of point 2 which is an arc length <i>a12</i>
* (degrees) from point 1 and with a subset of the geodesic results returned.
* <p>
* @param a12 arc length between point 1 and point 2 (degrees); it can
* be negative.
* @param outmask a bitor'ed combination of {@link GeodesicMask} values
* specifying which results should be returned.
* @return a {@link GeodesicData} object giving <i>lat1</i>, <i>lon2</i>,
* <i>azi2</i>, and <i>a12</i>.
* <p>
* The GeodesicLine object <i>must</i> have been constructed with <i>caps</i>
* |= {@link GeodesicMask#DISTANCE_IN}; otherwise no parameters are set.
* Requesting a value which the GeodesicLine object is not capable of
* computing is not an error (no parameters will be set).
**********************************************************************/
public GeodesicData ArcPosition(double a12, int outmask) {
return Position(true, a12, outmask);
}
/**
* The general position function. {@link #Position(double, int) Position}
* and {@link #ArcPosition(double, int) ArcPosition} are defined in terms of
* this function.
* <p>
* @param arcmode boolean flag determining the meaning of the second
* parameter; if arcmode is false, then the GeodesicLine object must have
* been constructed with <i>caps</i> |= {@link GeodesicMask#DISTANCE_IN}.
* @param s12_a12 if <i>arcmode</i> is false, this is the distance between
* point 1 and point 2 (meters); otherwise it is the arc length between
* point 1 and point 2 (degrees); it can be negative.
* @param outmask a bitor'ed combination of {@link GeodesicMask} values
* specifying which results should be returned.
* @return a {@link GeodesicData} object with the requested results.
* <p>
* The {@link GeodesicMask} values possible for <i>outmask</i> are
* <ul>
* <li>
* <i>outmask</i> |= GeodesicMask.LATITUDE for the latitude <i>lat2</i>.
* <li>
* <i>outmask</i> |= GeodesicMask.LONGITUDE for the latitude <i>lon2</i>.
* <li>
* <i>outmask</i> |= GeodesicMask.AZIMUTH for the latitude <i>azi2</i>.
* <li>
* <i>outmask</i> |= GeodesicMask.DISTANCE for the distance <i>s12</i>.
* <li>
* <i>outmask</i> |= GeodesicMask.REDUCEDLENGTH for the reduced length
* <i>m12</i>.
* <li>
* <i>outmask</i> |= GeodesicMask.GEODESICSCALE for the geodesic scales
* <i>M12</i> and <i>M21</i>.
* <li>
* <i>outmask</i> |= GeodesicMask.AREA for the area <i>S12</i>.
* </ul>
* <p>
* Requesting a value which the GeodesicLine object is not capable of
* computing is not an error; Double.NaN is returned instead.
**********************************************************************/
public GeodesicData Position(boolean arcmode, double s12_a12,
int outmask) {
outmask &= _caps & GeodesicMask.OUT_ALL;
GeodesicData r = new GeodesicData();
if (!( Init() &&
(arcmode ||
(_caps & GeodesicMask.DISTANCE_IN & GeodesicMask.OUT_ALL) != 0) ))
// Uninitialized or impossible distance calculation requested
return r;
r.lat1 = _lat1; r.lon1 = _lon1; r.azi1 = _azi1;
// Avoid warning about uninitialized B12.
double sig12, ssig12, csig12, B12 = 0, AB1 = 0;
if (arcmode) {
// Interpret s12_a12 as spherical arc length
r.a12 = s12_a12;
sig12 = s12_a12 * GeoMath.degree;
double s12a = Math.abs(s12_a12);
s12a -= 180 * Math.floor(s12a / 180);
ssig12 = s12a == 0 ? 0 : Math.sin(sig12);
csig12 = s12a == 90 ? 0 : Math.cos(sig12);
} else {
// Interpret s12_a12 as distance
r.s12 = s12_a12;
double
tau12 = s12_a12 / (_b * (1 + _A1m1)),
s = Math.sin(tau12),
c = Math.cos(tau12);
// tau2 = tau1 + tau12
B12 = - Geodesic.SinCosSeries(true,
_stau1 * c + _ctau1 * s,
_ctau1 * c - _stau1 * s,
_C1pa);
sig12 = tau12 - (B12 - _B11);
r.a12 = sig12 / GeoMath.degree;
ssig12 = Math.sin(sig12); csig12 = Math.cos(sig12);
if (Math.abs(_f) > 0.01) {
// Reverted distance series is inaccurate for |f| > 1/100, so correct
// sig12 with 1 Newton iteration. The following table shows the
// approximate maximum error for a = WGS_a() and various f relative to
// GeodesicExact.
// erri = the error in the inverse solution (nm)
// errd = the error in the direct solution (series only) (nm)
// errda = the error in the direct solution (series + 1 Newton) (nm)
//
// f erri errd errda
// -1/5 12e6 1.2e9 69e6
// -1/10 123e3 12e6 765e3
// -1/20 1110 108e3 7155
// -1/50 18.63 200.9 27.12
// -1/100 18.63 23.78 23.37
// -1/150 18.63 21.05 20.26
// 1/150 22.35 24.73 25.83
// 1/100 22.35 25.03 25.31
// 1/50 29.80 231.9 30.44
// 1/20 5376 146e3 10e3
// 1/10 829e3 22e6 1.5e6
// 1/5 157e6 3.8e9 280e6
double
ssig2 = _ssig1 * csig12 + _csig1 * ssig12,
csig2 = _csig1 * csig12 - _ssig1 * ssig12;
B12 = Geodesic.SinCosSeries(true, ssig2, csig2, _C1a);
double serr = (1 + _A1m1) * (sig12 + (B12 - _B11)) - s12_a12 / _b;
sig12 = sig12 - serr / Math.sqrt(1 + _k2 * GeoMath.sq(ssig2));
ssig12 = Math.sin(sig12); csig12 = Math.cos(sig12);
// Update B12 below
}
}
double omg12, lam12, lon12;
double ssig2, csig2, sbet2, cbet2, somg2, comg2, salp2, calp2;
// sig2 = sig1 + sig12
ssig2 = _ssig1 * csig12 + _csig1 * ssig12;
csig2 = _csig1 * csig12 - _ssig1 * ssig12;
double dn2 = Math.sqrt(1 + _k2 * GeoMath.sq(ssig2));
if ((outmask & (GeodesicMask.DISTANCE | GeodesicMask.REDUCEDLENGTH |
GeodesicMask.GEODESICSCALE)) != 0) {
if (arcmode || Math.abs(_f) > 0.01)
B12 = Geodesic.SinCosSeries(true, ssig2, csig2, _C1a);
AB1 = (1 + _A1m1) * (B12 - _B11);
}
// sin(bet2) = cos(alp0) * sin(sig2)
sbet2 = _calp0 * ssig2;
// Alt: cbet2 = hypot(csig2, salp0 * ssig2);
cbet2 = GeoMath.hypot(_salp0, _calp0 * csig2);
if (cbet2 == 0)
// I.e., salp0 = 0, csig2 = 0. Break the degeneracy in this case
cbet2 = csig2 = Geodesic.tiny_;
// tan(omg2) = sin(alp0) * tan(sig2)
somg2 = _salp0 * ssig2; comg2 = csig2; // No need to normalize
// tan(alp0) = cos(sig2)*tan(alp2)
salp2 = _salp0; calp2 = _calp0 * csig2; // No need to normalize
// omg12 = omg2 - omg1
omg12 = Math.atan2(somg2 * _comg1 - comg2 * _somg1,
comg2 * _comg1 + somg2 * _somg1);
if ((outmask & GeodesicMask.DISTANCE) != 0 && arcmode)
r.s12 = _b * ((1 + _A1m1) * sig12 + AB1);
if ((outmask & GeodesicMask.LONGITUDE) != 0) {
lam12 = omg12 + _A3c *
( sig12 + (Geodesic.SinCosSeries(true, ssig2, csig2, _C3a)
- _B31));
lon12 = lam12 / GeoMath.degree;
// Use GeoMath.AngNormalize2 because longitude might have wrapped
// multiple times.
lon12 = GeoMath.AngNormalize2(lon12);
r.lon2 = GeoMath.AngNormalize(_lon1 + lon12);
}
if ((outmask & GeodesicMask.LATITUDE) != 0)
r.lat2 = Math.atan2(sbet2, _f1 * cbet2) / GeoMath.degree;
if ((outmask & GeodesicMask.AZIMUTH) != 0)
// minus signs give range [-180, 180). 0- converts -0 to +0.
r.azi2 = 0 - Math.atan2(-salp2, calp2) / GeoMath.degree;
if ((outmask &
(GeodesicMask.REDUCEDLENGTH | GeodesicMask.GEODESICSCALE)) != 0) {
double
B22 = Geodesic.SinCosSeries(true, ssig2, csig2, _C2a),
AB2 = (1 + _A2m1) * (B22 - _B21),
J12 = (_A1m1 - _A2m1) * sig12 + (AB1 - AB2);
if ((outmask & GeodesicMask.REDUCEDLENGTH) != 0)
// Add parens around (_csig1 * ssig2) and (_ssig1 * csig2) to ensure
// accurate cancellation in the case of coincident points.
r.m12 = _b * ((dn2 * (_csig1 * ssig2) - _dn1 * (_ssig1 * csig2))
- _csig1 * csig2 * J12);
if ((outmask & GeodesicMask.GEODESICSCALE) != 0) {
double t = _k2 * (ssig2 - _ssig1) * (ssig2 + _ssig1) / (_dn1 + dn2);
r.M12 = csig12 + (t * ssig2 - csig2 * J12) * _ssig1 / _dn1;
r.M21 = csig12 - (t * _ssig1 - _csig1 * J12) * ssig2 / dn2;
}
}
if ((outmask & GeodesicMask.AREA) != 0) {
double
B42 = Geodesic.SinCosSeries(false, ssig2, csig2, _C4a);
double salp12, calp12;
if (_calp0 == 0 || _salp0 == 0) {
// alp12 = alp2 - alp1, used in atan2 so no need to normalized
salp12 = salp2 * _calp1 - calp2 * _salp1;
calp12 = calp2 * _calp1 + salp2 * _salp1;
// The right thing appears to happen if alp1 = +/-180 and alp2 = 0, viz
// salp12 = -0 and alp12 = -180. However this depends on the sign
// being attached to 0 correctly. The following ensures the correct
// behavior.
if (salp12 == 0 && calp12 < 0) {
salp12 = Geodesic.tiny_ * _calp1;
calp12 = -1;
}
} else {
// tan(alp) = tan(alp0) * sec(sig)
// tan(alp2-alp1) = (tan(alp2) -tan(alp1)) / (tan(alp2)*tan(alp1)+1)
// = calp0 * salp0 * (csig1-csig2) / (salp0^2 + calp0^2 * csig1*csig2)
// If csig12 > 0, write
// csig1 - csig2 = ssig12 * (csig1 * ssig12 / (1 + csig12) + ssig1)
// else
// csig1 - csig2 = csig1 * (1 - csig12) + ssig12 * ssig1
// No need to normalize
salp12 = _calp0 * _salp0 *
(csig12 <= 0 ? _csig1 * (1 - csig12) + ssig12 * _ssig1 :
ssig12 * (_csig1 * ssig12 / (1 + csig12) + _ssig1));
calp12 = GeoMath.sq(_salp0) + GeoMath.sq(_calp0) * _csig1 * csig2;
}
r.S12 = _c2 * Math.atan2(salp12, calp12) + _A4 * (B42 - _B41);
}
return r;
}
/**
* @return true if the object has been initialized.
**********************************************************************/
private boolean Init() { return _caps != 0; }
/**
* @return <i>lat1</i> the latitude of point 1 (degrees).
**********************************************************************/
public double Latitude()
{ return Init() ? _lat1 : Double.NaN; }
/**
* @return <i>lon1</i> the longitude of point 1 (degrees).
**********************************************************************/
public double Longitude()
{ return Init() ? _lon1 : Double.NaN; }
/**
* @return <i>azi1</i> the azimuth (degrees) of the geodesic line at point 1.
**********************************************************************/
public double Azimuth()
{ return Init() ? _azi1 : Double.NaN; }
/**
* @return <i>azi0</i> the azimuth (degrees) of the geodesic line as it
* crosses the equator in a northward direction.
**********************************************************************/
public double EquatorialAzimuth() {
return Init() ?
Math.atan2(_salp0, _calp0) / GeoMath.degree : Double.NaN;
}
/**
* @return <i>a1</i> the arc length (degrees) between the northward
* equatorial crossing and point 1.
**********************************************************************/
public double EquatorialArc() {
return Init() ?
Math.atan2(_ssig1, _csig1) / GeoMath.degree : Double.NaN;
}
/**
* @return <i>a</i> the equatorial radius of the ellipsoid (meters). This is
* the value inherited from the Geodesic object used in the constructor.
**********************************************************************/
public double MajorRadius()
{ return Init() ? _a : Double.NaN; }
/**
* @return <i>f</i> the flattening of the ellipsoid. This is the value
* inherited from the Geodesic object used in the constructor.
**********************************************************************/
public double Flattening()
{ return Init() ? _f : Double.NaN; }
/**
* @return <i>caps</i> the computational capabilities that this object was
* constructed with. LATITUDE and AZIMUTH are always included.
**********************************************************************/
public int Capabilities() { return _caps; }
/**
* @param testcaps a set of bitor'ed {@link GeodesicMask} values.
* @return true if the GeodesicLine object has all these capabilities.
**********************************************************************/
public boolean Capabilities(int testcaps) {
testcaps &= GeodesicMask.OUT_ALL;
return (_caps & testcaps) == testcaps;
}
}
| |
package edu.ucsf.lava.crms.assessment.model;
import java.util.Date;
import org.springframework.util.StringUtils;
import edu.ucsf.lava.core.model.EntityBase;
import edu.ucsf.lava.core.model.EntityManager;
import edu.ucsf.lava.crms.assessment.controller.CalcUtils;
import edu.ucsf.lava.crms.people.model.Patient;
import edu.ucsf.lava.crms.scheduling.model.Visit;
public class UdsFtldNeuropsych extends UdsInstrument {
public static EntityManager MANAGER = new EntityBase.Manager(UdsFtldNeuropsych.class);
public static final String UDS_FTLD_NEUROPSYCH_FORMID = "C1F";
public UdsFtldNeuropsych() {}
/* constructor for adding new instruments. do instrument-specific initialization here. */
public UdsFtldNeuropsych(Patient p, Visit v, String projName, String instrType, Date dcDate, String dcStatus) {
super(p,v,projName,instrType,dcDate,dcStatus);
this.setFormId(UDS_FTLD_NEUROPSYCH_FORMID);
}
// note: id inherited from Instrument
protected Short ftdbentc; // delete in FTLD Module v. 3
protected Short ftdverfc; // delete in FTLD Module v. 3
protected Short ftdverfn; // delete in FTLD Module v. 3
protected Short ftdvernf; // delete in FTLD Module v. 3
protected Short ftdverlc; // delete in FTLD Module v. 3
protected Short ftdverlr; // delete in FTLD Module v. 3
protected Short ftdverln; // delete in FTLD Module v. 3
protected Short ftdvertn; // delete in FTLD Module v. 3
protected Short ftdverte; // delete in FTLD Module v. 3
protected Short ftdverti; // delete in FTLD Module v. 3
protected Short ftdworrc;
protected Short ftdworrs;
protected Short ftdworrr;
protected Short ftdworic;
protected Short ftdworis;
protected Short ftdworir;
protected Short ftdworip;
protected Short ftdbentd; // delete in FTLD Module v. 3
protected Short ftdbenrs; // delete in FTLD Module v. 3
protected Short ftdsemmt;
protected Short ftdsemaa;
protected Short ftdsemta;
protected Short ftdsemsu;
protected Short ftdanasw;
protected Short ftdanaow;
protected Short ftdanats;
protected Short ftdsenas;
protected Short ftdsenos;
protected Short ftdsensr;
protected Short ftdsenpr;
protected Short ftdnounc;
protected Short ftdverbc;
protected Float ftdratio;
protected Short ftdreaas;
protected Short ftdreaos;
protected Short ftdreasr;
protected Short ftdreapr;
public Short getFtdbentc() {
return ftdbentc;
}
public void setFtdbentc(Short ftdbentc) {
this.ftdbentc = ftdbentc;
}
public Short getFtdverfc() {
return ftdverfc;
}
public void setFtdverfc(Short ftdverfc) {
this.ftdverfc = ftdverfc;
}
public Short getFtdverfn() {
return ftdverfn;
}
public void setFtdverfn(Short ftdverfn) {
this.ftdverfn = ftdverfn;
}
public Short getFtdvernf() {
return ftdvernf;
}
public void setFtdvernf(Short ftdvernf) {
this.ftdvernf = ftdvernf;
}
public Short getFtdverlc() {
return ftdverlc;
}
public void setFtdverlc(Short ftdverlc) {
this.ftdverlc = ftdverlc;
}
public Short getFtdverlr() {
return ftdverlr;
}
public void setFtdverlr(Short ftdverlr) {
this.ftdverlr = ftdverlr;
}
public Short getFtdverln() {
return ftdverln;
}
public void setFtdverln(Short ftdverln) {
this.ftdverln = ftdverln;
}
public Short getFtdvertn() {
return ftdvertn;
}
public void setFtdvertn(Short ftdvertn) {
this.ftdvertn = ftdvertn;
}
public Short getFtdverte() {
return ftdverte;
}
public void setFtdverte(Short ftdverte) {
this.ftdverte = ftdverte;
}
public Short getFtdverti() {
return ftdverti;
}
public void setFtdverti(Short ftdverti) {
this.ftdverti = ftdverti;
}
public Short getFtdworrc() {
return ftdworrc;
}
public void setFtdworrc(Short ftdworrc) {
this.ftdworrc = ftdworrc;
}
public Short getFtdworrs() {
return ftdworrs;
}
public void setFtdworrs(Short ftdworrs) {
this.ftdworrs = ftdworrs;
}
public Short getFtdworrr() {
return ftdworrr;
}
public void setFtdworrr(Short ftdworrr) {
this.ftdworrr = ftdworrr;
}
public Short getFtdworic() {
return ftdworic;
}
public void setFtdworic(Short ftdworic) {
this.ftdworic = ftdworic;
}
public Short getFtdworis() {
return ftdworis;
}
public void setFtdworis(Short ftdworis) {
this.ftdworis = ftdworis;
}
public Short getFtdworir() {
return ftdworir;
}
public void setFtdworir(Short ftdworir) {
this.ftdworir = ftdworir;
}
public Short getFtdworip() {
return ftdworip;
}
public void setFtdworip(Short ftdworip) {
this.ftdworip = ftdworip;
}
public Short getFtdbentd() {
return ftdbentd;
}
public void setFtdbentd(Short ftdbentd) {
this.ftdbentd = ftdbentd;
}
public Short getFtdbenrs() {
return ftdbenrs;
}
public void setFtdbenrs(Short ftdbenrs) {
this.ftdbenrs = ftdbenrs;
}
public Short getFtdsemmt() {
return ftdsemmt;
}
public void setFtdsemmt(Short ftdsemmt) {
this.ftdsemmt = ftdsemmt;
}
public Short getFtdsemaa() {
return ftdsemaa;
}
public void setFtdsemaa(Short ftdsemaa) {
this.ftdsemaa = ftdsemaa;
}
public Short getFtdsemta() {
return ftdsemta;
}
public void setFtdsemta(Short ftdsemta) {
this.ftdsemta = ftdsemta;
}
public Short getFtdsemsu() {
return ftdsemsu;
}
public void setFtdsemsu(Short ftdsemsu) {
this.ftdsemsu = ftdsemsu;
}
public Short getFtdanasw() {
return ftdanasw;
}
public void setFtdanasw(Short ftdanasw) {
this.ftdanasw = ftdanasw;
}
public Short getFtdanaow() {
return ftdanaow;
}
public void setFtdanaow(Short ftdanaow) {
this.ftdanaow = ftdanaow;
}
public Short getFtdanats() {
return ftdanats;
}
public void setFtdanats(Short ftdanats) {
this.ftdanats = ftdanats;
}
public Short getFtdsenas() {
return ftdsenas;
}
public void setFtdsenas(Short ftdsenas) {
this.ftdsenas = ftdsenas;
}
public Short getFtdsenos() {
return ftdsenos;
}
public void setFtdsenos(Short ftdsenos) {
this.ftdsenos = ftdsenos;
}
public Short getFtdsensr() {
return ftdsensr;
}
public void setFtdsensr(Short ftdsensr) {
this.ftdsensr = ftdsensr;
}
public Short getFtdsenpr() {
return ftdsenpr;
}
public void setFtdsenpr(Short ftdsenpr) {
this.ftdsenpr = ftdsenpr;
}
public Short getFtdnounc() {
return ftdnounc;
}
public void setFtdnounc(Short ftdnounc) {
this.ftdnounc = ftdnounc;
}
public Short getFtdverbc() {
return ftdverbc;
}
public void setFtdverbc(Short ftdverbc) {
this.ftdverbc = ftdverbc;
}
public Float getFtdratio() {
return ftdratio;
}
public void setFtdratio(Float ftdratio) {
this.ftdratio = ftdratio;
}
public Short getFtdreaas() {
return ftdreaas;
}
public void setFtdreaas(Short ftdreaas) {
this.ftdreaas = ftdreaas;
}
public Short getFtdreaos() {
return ftdreaos;
}
public void setFtdreaos(Short ftdreaos) {
this.ftdreaos = ftdreaos;
}
public Short getFtdreasr() {
return ftdreasr;
}
public void setFtdreasr(Short ftdreasr) {
this.ftdreasr = ftdreasr;
}
public Short getFtdreapr() {
return ftdreapr;
}
public void setFtdreapr(Short ftdreapr) {
this.ftdreapr = ftdreapr;
}
public String[] getRequiredResultFields(String version) {
String[] required;
required = new String[] {
"ftdworrc",
"ftdworrs",
"ftdworrr",
"ftdworic",
"ftdworis",
"ftdworir",
"ftdworip",
"ftdsemmt",
"ftdsemaa",
"ftdsemta",
"ftdanasw",
"ftdanaow",
"ftdsenas",
"ftdsenos",
"ftdsensr",
"ftdsenpr",
"ftdnounc",
"ftdverbc",
"ftdreaas",
"ftdreaos",
"ftdreasr",
"ftdreapr"
};
if (version.equals("2")){
// fields deleted in FTLD Module 3
required = StringUtils.concatenateStringArrays(required, new String[]{
"ftdbentc",
"ftdverfc",
"ftdverfn",
"ftdvernf",
"ftdverlc",
"ftdverlr",
"ftdverln",
"ftdvertn",
"ftdverte",
"ftdverti",
"ftdbentd",
"ftdbenrs"});
}
return required;
}
public void markUnusedFields(String version) {
if(version.equals("3")) {
this.ftdbentc = this.ftdverfc = this.ftdverfn = this.ftdvernf = this.ftdverlc = this.ftdverlr = this.ftdverln
= this.ftdvertn = this.ftdverte = this.ftdverti = this.ftdbentd = this.ftdbenrs = (short)-8;
}
}
public String getUdsUploadCsvRecord() {
StringBuffer buffer = UdsUploadUtils.getCommonFields(this);
if (getInstrVer().equals("2")) {
buffer.append(UdsUploadUtils.formatField(getFtdbentc())).append(",");
buffer.append(UdsUploadUtils.formatField(getFtdverfc())).append(",");
buffer.append(UdsUploadUtils.formatField(getFtdverfn())).append(",");
buffer.append(UdsUploadUtils.formatField(getFtdvernf())).append(",");
buffer.append(UdsUploadUtils.formatField(getFtdverlc())).append(",");
buffer.append(UdsUploadUtils.formatField(getFtdverlr())).append(",");
buffer.append(UdsUploadUtils.formatField(getFtdverln())).append(",");
buffer.append(UdsUploadUtils.formatField(getFtdvertn())).append(",");
buffer.append(UdsUploadUtils.formatField(getFtdverte())).append(",");
buffer.append(UdsUploadUtils.formatField(getFtdverti())).append(",");
}
buffer.append(UdsUploadUtils.formatField(getFtdworrc())).append(",");
buffer.append(UdsUploadUtils.formatField(getFtdworrs())).append(",");
buffer.append(UdsUploadUtils.formatField(getFtdworrr())).append(",");
buffer.append(UdsUploadUtils.formatField(getFtdworic())).append(",");
buffer.append(UdsUploadUtils.formatField(getFtdworis())).append(",");
buffer.append(UdsUploadUtils.formatField(getFtdworir())).append(",");
buffer.append(UdsUploadUtils.formatField(getFtdworip())).append(",");
if (getInstrVer().equals("2")) {
buffer.append(UdsUploadUtils.formatField(getFtdbentd())).append(",");
buffer.append(UdsUploadUtils.formatField(getFtdbenrs())).append(",");
}
buffer.append(UdsUploadUtils.formatField(getFtdsemmt())).append(",");
buffer.append(UdsUploadUtils.formatField(getFtdsemaa())).append(",");
buffer.append(UdsUploadUtils.formatField(getFtdsemta())).append(",");
buffer.append(UdsUploadUtils.formatField(getFtdsemsu())).append(",");
buffer.append(UdsUploadUtils.formatField(getFtdanasw())).append(",");
buffer.append(UdsUploadUtils.formatField(getFtdanaow())).append(",");
buffer.append(UdsUploadUtils.formatField(getFtdanats())).append(",");
buffer.append(UdsUploadUtils.formatField(getFtdsenas())).append(",");
buffer.append(UdsUploadUtils.formatField(getFtdsenos())).append(",");
buffer.append(UdsUploadUtils.formatField(getFtdsensr())).append(",");
buffer.append(UdsUploadUtils.formatField(getFtdsenpr())).append(",");
buffer.append(UdsUploadUtils.formatField(getFtdnounc())).append(",");
buffer.append(UdsUploadUtils.formatField(getFtdverbc())).append(",");
buffer.append(UdsUploadUtils.formatField(getFtdratio())).append(",");
buffer.append(UdsUploadUtils.formatField(getFtdreaas())).append(",");
buffer.append(UdsUploadUtils.formatField(getFtdreaos())).append(",");
buffer.append(UdsUploadUtils.formatField(getFtdreasr())).append(",");
buffer.append(UdsUploadUtils.formatField(getFtdreapr()));
return buffer.toString();
}
@Override
public void updateCalculatedFields() {
super.updateCalculatedFields();
this.ftdsemsu = CalcUtils.add(new Short[] {this.ftdsemaa, this.ftdsemta}, new Short[]{95,96,97,98}).shortValue();
this.ftdanats = CalcUtils.add(new Short[] {this.ftdanasw, this.ftdanaow}, new Short[]{95,96,97,98}).shortValue();
if (this.ftdnounc!=null && this.ftdverbc!=null) {
if (this.ftdnounc>0 && this.ftdnounc<=16 && this.ftdverbc>0 && this.ftdverbc<=16) {
this.ftdratio = this.ftdnounc.floatValue() / this.ftdverbc.floatValue();
}
else if (this.ftdnounc.equals((short)0) || this.ftdverbc.equals((short)0)) {
this.ftdratio = (float) 88.88;
}
else {
// if either or both noun and verb are NACC (95-98) or MAC (-6 to -9) error codes
// set it to Cannot Calculate code
this.ftdratio = (float) -5.0;
}
}
else {
this.ftdratio = (float) -5.0;
}
}
}
| |
package WriterImplementation;
import java.io.File;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import java.util.logging.Level;
import java.util.logging.Logger;
import Exception.DBManagerException;
import Exception.VariableManagerException;
import Model.DatabaseConfig;
import Model.Variable;
import WriterInterface.GenericWriterInterface;
public class PHPGenericWriterImpl implements GenericWriterInterface {
private static PHPGenericWriterImpl _genericWriterImpl;
private PHPGenericWriterImpl() {
if( _genericWriterImpl != null ) {
throw new InstantiationError( "More instances of this object cannot be created." );
}
}
private synchronized static void createInstance(){
if(_genericWriterImpl==null){
_genericWriterImpl = new PHPGenericWriterImpl();
}
}
public static GenericWriterInterface getInstace(){
if(_genericWriterImpl==null) createInstance();
return _genericWriterImpl;
}
@Override
public void header(String path) {
FileWriterImpl.getInstace().fileDelete(path);
File f ;
try {
f = FileWriterImpl.getInstace().fileCreate(path);
} catch (IOException ex) {
Logger.getLogger(PHPGenericWriterImpl.class.getName()).log(Level.SEVERE, null, ex);
return;
}
FileWriterImpl.getInstace().writeLine("<?php ", f);
}
@Override
public void getInputVars(String path, List<String> input) throws VariableManagerException{
printInputVars(path, input, "_GET");
}
@Override
public void postInputVars(String path, List<String> input) throws VariableManagerException{
printInputVars(path, input, "_POST");
}
private void printInputVars(String path, List<String> input, String method) throws VariableManagerException{
File f ;
try {
f = FileWriterImpl.getInstace().fileCreate(path);
} catch (IOException ex) {
Logger.getLogger(PHPGenericWriterImpl.class.getName()).log(Level.SEVERE, null, ex);
return;
}
FileWriterImpl.getInstace().writeLine("if("+method+"){",f );
for(String var : input){
String line = "\t$"+var+" = $"+method+"[\""+var+"\"];";
FileWriterImpl.getInstace().writeLine(line, f);
VariableManagerImpl.getInstace().addVariable(var, path);
}
FileWriterImpl.getInstace().writeLine("}",f );
}
@Override
public void addDatabase(String alias, String host, String port, String user, String passsword, String databaseName) throws DBManagerException{
String filename = "config/db_"+alias+".php";
header(filename);
File f;
try {
f = FileWriterImpl.getInstace().fileCreate(filename);
} catch (IOException ex) {
Logger.getLogger(PHPGenericWriterImpl.class.getName()).log(Level.SEVERE, null, ex);
return;
}
FileWriterImpl.getInstace().writeLine("$_db_config_host=\""+host+";", f);
FileWriterImpl.getInstace().writeLine("$_db_config_port=\""+port+";", f);
FileWriterImpl.getInstace().writeLine("$_db_config_user=\""+user+";", f);
FileWriterImpl.getInstace().writeLine("$_db_config_password=\""+passsword+";", f);
FileWriterImpl.getInstace().writeLine("$_db_config_databaseName=\""+databaseName+";", f);
EOF(filename);
DatabaseConfig databaseConfig = new DatabaseConfig();
databaseConfig.setAlias(alias);
databaseConfig.setConfigFileName(filename);
databaseConfig.setDatabaseName(databaseName);
databaseConfig.setPassword(passsword);
databaseConfig.setPort(port);
databaseConfig.setURL(host);
databaseConfig.setUser(user);
DBManagerImpl.getInstace().addDatabaseConfig(databaseConfig);
}
@Override
public void useDatabase(String alias, String path) {
File f;
try {
f = FileWriterImpl.getInstace().fileCreate(path);
} catch (IOException ex) {
Logger.getLogger(PHPGenericWriterImpl.class.getName()).log(Level.SEVERE, null, ex);
return;
}
//get database config
DatabaseConfig db = DBManagerImpl.getInstace().getDatabaseConfig(alias);
//include of connection details
FileWriterImpl.getInstace().writeLine("include '"+db.getConfigFileName()+"';", f);
//PDO connection to database
String phpCode = "/* Connect to an ODBC database using driver invocation */\n" +
"$_dsn = 'mysql:dbname=$_db_config_databaseName;host=$_db_config_host';\n" +
"\n" +
"try {\n" +
" $dbh = new PDO($_dsn, $_db_config_user, $_db_config_password);\n" +
"} catch (PDOException $e) {\n" +
" echo 'Connection failed: ' . $e->getMessage();\n" +
"}";
FileWriterImpl.getInstace().writeLine(phpCode, f);
}
@Override
public void beginTransaction(String path){
String phpCode="try {\n"+
" $dbn->beginTransaction();\n";
try {
FileWriterImpl.getInstace().writeLine(phpCode, FileWriterImpl.getInstace().fileCreate(path));
} catch (IOException e) {
e.printStackTrace();
}
}
@Override
public void endTransaction(String path){
String phpCode = " $dbn->commit();\n" +
"} catch(PDOException $ex) {\n" +
" //Something went wrong rollback!\n" +
" $dbn->rollBack();\n" +
" echo $ex->getMessage();\n" +
"}";
try {
FileWriterImpl.getInstace().writeLine(phpCode, FileWriterImpl.getInstace().fileCreate(path));
} catch (IOException e) {
e.printStackTrace();
}
}
/* (non-Javadoc)
* example of query
* $stmt = $db->prepare("UPDATE table SET name=? WHERE id=?");
* $stmt->execute(array($name, $id));
* $affected_rows = $stmt->rowCount();
* @see WriterInterface.GenericWriterInterface#executeSqlQuery(java.lang.String, java.lang.String, java.util.List)
*/
@Override
public void executeSqlQuery(String path, String query, List<Variable> queryParameters){
File f;
try {
f = FileWriterImpl.getInstace().fileCreate(path);
} catch (IOException ex) {
Logger.getLogger(PHPGenericWriterImpl.class.getName()).log(Level.SEVERE, null, ex);
return;
}
FileWriterImpl.getInstace().writeLine("$dbn->query($sqlQuery);", f);
String phpCode= "try {\n" +
"$sqlQuery=\""+query+"\"; \n"+
" $stmt = $dbn->prepare($sqlQuery); \n";
boolean useArrayOfParameters = false;
if(queryParameters!=null){
if(queryParameters.size()>0){
useArrayOfParameters= true;
}
}
if(useArrayOfParameters){
phpCode+="$stmt->execute(array(";
int paramNumber = 0;
for (Variable variable : queryParameters) {
if(paramNumber!=0){
phpCode+= ", ";
}
phpCode+= "$"+variable.getName()+" ";
}
phpCode+= "));\n";
}else{
phpCode+="$stmt->execute();";
}
phpCode += "} catch(PDOException $ex) {\n" +
" echo \"An Error occured!\"; \n" +
"}";
FileWriterImpl.getInstace().writeLine(phpCode, f);
}
@Override
public void executeSqlQueryAndGetResultInVariable(String path, String query, List<Variable> queryParameters, String variableName) throws VariableManagerException{
executeSqlQuery(path, query, queryParameters);
try {
FileWriterImpl.getInstace().writeLine("$"+variableName+"= $stmt->fetchAll(PDO::FETCH_ASSOC);", FileWriterImpl.getInstace().fileCreate(path));
} catch (IOException e) {
e.printStackTrace();
}
VariableManagerImpl.getInstace().addVariable(variableName, path);
}
@Override
public void executeSqlUpdateAndGetAffectedRowsNumberIntoVariable(String path, String query, List<Variable> queryParameters, String resultVariableName) throws VariableManagerException{
executeSqlQuery(path, query, queryParameters);
try {
FileWriterImpl.getInstace().writeLine("$"+resultVariableName+"= $stmt->rowCount();", FileWriterImpl.getInstace().fileCreate(path));
} catch (IOException e) {
e.printStackTrace();
}
VariableManagerImpl.getInstace().addVariable(resultVariableName, path);
}
@Override
public void countResultRowsNumberAndGetResultInVariable(String path, String rowsVariableName, String countResultVariableName) throws VariableManagerException{
File f;
try {
f = FileWriterImpl.getInstace().fileCreate(path);
} catch (IOException ex) {
Logger.getLogger(PHPGenericWriterImpl.class.getName()).log(Level.SEVERE, null, ex);
return;
}
String phpCode = "$"+countResultVariableName+" = $"+rowsVariableName+"->rowCount();";
FileWriterImpl.getInstace().writeLine(phpCode, f);
VariableManagerImpl.getInstace().addVariable(countResultVariableName, path);
}
@Override
public void getLastInsertedIdIntoVariable(String path, String resultVariableName) throws VariableManagerException {
File f;
try {
f = FileWriterImpl.getInstace().fileCreate(path);
} catch (IOException ex) {
Logger.getLogger(PHPGenericWriterImpl.class.getName()).log(Level.SEVERE, null, ex);
return;
}
String phpCode = "$"+resultVariableName+" = $dbn->lastInsertId();";
FileWriterImpl.getInstace().writeLine(phpCode, f);
VariableManagerImpl.getInstace().addVariable(resultVariableName, path);
}
@Override
public void EOF(String path) {
File f ;
try {
f = FileWriterImpl.getInstace().fileCreate(path);
} catch (IOException ex) {
Logger.getLogger(PHPGenericWriterImpl.class.getName()).log(Level.SEVERE, null, ex);
return;
}
FileWriterImpl.getInstace().writeLine("?>", f);
}
@Override
public void printVariableAsJSON(String path, String variableName) {
String line="echo json_encode($"+variableName+");";
try {
FileWriterImpl.getInstace().writeLine(line, FileWriterImpl.getInstace().fileCreate(path));
} catch (IOException e) {
e.printStackTrace();
}
}
@Override
public void printVariableAsXML(String path, String variableName) {
String line="echo xmlrpc_encode(\"$"+variableName+"\");";
try {
FileWriterImpl.getInstace().writeLine(line, FileWriterImpl.getInstace().fileCreate(path));
} catch (IOException e) {
e.printStackTrace();
}
}
@Override
public void writeArithmeticAndGetResultInVariable(String path, String arithmetic, List<Variable> variableList, String resultVariableName) {
List<String> values = new ArrayList<>();
for (Variable variable : variableList) {
values.add("$"+variable.getName());
}
String arithmeticLine = String.format(arithmetic.replace("?", "%s"), values.toArray());
String line = "$"+resultVariableName+"="+arithmeticLine+";";
try {
FileWriterImpl.getInstace().writeLine(line, FileWriterImpl.getInstace().fileCreate(path));
} catch (IOException e) {
e.printStackTrace();
}
}
}
| |
/*
* Copyright (C) 2015, BMW Car IT GmbH
*
* Author: Sebastian Mattheis <sebastian.mattheis@bmw-carit.de>
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in
* writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific
* language governing permissions and limitations under the License.
*/
package com.bmwcarit.barefoot.spatial;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashSet;
import java.util.LinkedList;
import java.util.List;
import java.util.Set;
import org.junit.Test;
import com.bmwcarit.barefoot.util.Triple;
import com.bmwcarit.barefoot.util.Tuple;
import com.esri.core.geometry.Geometry;
import com.esri.core.geometry.GeometryEngine;
import com.esri.core.geometry.Point;
import com.esri.core.geometry.Polyline;
import com.esri.core.geometry.WktImportFlags;
public class QuadTreeIndexTest {
private static List<Polyline> geometries() {
/*
* (p2) (p3) ----- (e1) : (p1) -> (p2) ----------------------------------------------------
* - \ / --------- (e2) : (p3) -> (p1) ----------------------------------------------------
* | (p1) | ------ (e3) : (p4) -> (p1) ----------------------------------------------------
* - / \ --------- (e4) : (p1) -> (p5) ----------------------------------------------------
* (p4) (p5) ----- (e5) : (p2) -> (p4) ----------------------------------------------------
* --------------- (e6) : (p5) -> (p3) ----------------------------------------------------
*/
String p1 = "11.3441505 48.0839963";
String p2 = "11.3421209 48.0850624";
String p3 = "11.3460348 48.0850108";
String p4 = "11.3427522 48.0832129";
String p5 = "11.3469701 48.0825356";
List<Polyline> geometries = new LinkedList<>();
geometries
.add((Polyline) GeometryEngine.geometryFromWkt("LINESTRING(" + p1 + "," + p2 + ")",
WktImportFlags.wktImportDefaults, Geometry.Type.Polyline));
geometries
.add((Polyline) GeometryEngine.geometryFromWkt("LINESTRING(" + p3 + "," + p1 + ")",
WktImportFlags.wktImportDefaults, Geometry.Type.Polyline));
geometries
.add((Polyline) GeometryEngine.geometryFromWkt("LINESTRING(" + p4 + "," + p1 + ")",
WktImportFlags.wktImportDefaults, Geometry.Type.Polyline));
geometries
.add((Polyline) GeometryEngine.geometryFromWkt("LINESTRING(" + p1 + "," + p5 + ")",
WktImportFlags.wktImportDefaults, Geometry.Type.Polyline));
geometries
.add((Polyline) GeometryEngine.geometryFromWkt("LINESTRING(" + p2 + "," + p4 + ")",
WktImportFlags.wktImportDefaults, Geometry.Type.Polyline));
geometries
.add((Polyline) GeometryEngine.geometryFromWkt("LINESTRING(" + p5 + "," + p3 + ")",
WktImportFlags.wktImportDefaults, Geometry.Type.Polyline));
return geometries;
}
@Test
public void testIndexNearest() {
SpatialOperator spatial = new Geography();
QuadTreeIndex index = new QuadTreeIndex();
List<Polyline> lines = geometries();
{
int i = 0;
for (Polyline line : lines) {
index.add(i++, line);
}
}
{
Point c = new Point(11.343629, 48.083797);
double dmin = Double.MAX_VALUE;
int nearest = 0;
for (int i = 0; i < lines.size(); ++i) {
double f = spatial.intercept(lines.get(i), c);
Point p = spatial.interpolate(lines.get(i), f);
double d = spatial.distance(c, p);
if (d < dmin) {
dmin = d;
nearest = i;
}
}
Set<Tuple<Integer, Double>> points = index.nearest(c);
assertEquals(1, points.size());
assertEquals(nearest, (int) points.iterator().next().one());
}
{
Point c = new Point(11.344827, 48.083752);
double dmin = Double.MAX_VALUE;
int nearest = 0;
for (int i = 0; i < lines.size(); ++i) {
double f = spatial.intercept(lines.get(i), c);
Point p = spatial.interpolate(lines.get(i), f);
double d = spatial.distance(c, p);
if (d < dmin) {
dmin = d;
nearest = i;
}
}
Set<Tuple<Integer, Double>> points = index.nearest(c);
assertEquals(1, points.size());
assertEquals(nearest, (int) points.iterator().next().one());
}
}
@Test
public void testIndexRadius() {
SpatialOperator spatial = new Geography();
QuadTreeIndex index = new QuadTreeIndex();
List<Polyline> lines = geometries();
{
int i = 0;
for (Polyline line : lines) {
index.add(i++, line);
}
}
{
Point c = new Point(11.343629, 48.083797);
double r = 50;
Set<Integer> neighbors = new HashSet<>();
for (int i = 0; i < lines.size(); ++i) {
double f = spatial.intercept(lines.get(i), c);
Point p = spatial.interpolate(lines.get(i), f);
double d = spatial.distance(c, p);
if (d <= r) {
neighbors.add(i);
}
}
assertEquals(4, neighbors.size());
Set<Tuple<Integer, Double>> points = index.radius(c, r);
assertEquals(neighbors.size(), points.size());
for (Tuple<Integer, Double> point : points) {
assertTrue(neighbors.contains(point.one()));
}
}
{
Point c = new Point(11.344827, 48.083752);
double r = 10;
Set<Integer> neighbors = new HashSet<>();
for (int i = 0; i < lines.size(); ++i) {
double f = spatial.intercept(lines.get(i), c);
Point p = spatial.interpolate(lines.get(i), f);
double d = spatial.distance(c, p);
if (d <= r) {
neighbors.add(i);
}
}
assertEquals(1, neighbors.size());
Set<Tuple<Integer, Double>> points = index.radius(c, r);
assertEquals(neighbors.size(), points.size());
for (Tuple<Integer, Double> point : points) {
assertTrue(neighbors.contains(point.one()));
}
}
{
Point c = new Point(11.344827, 48.083752);
double r = 5;
Set<Integer> neighbors = new HashSet<>();
for (int i = 0; i < lines.size(); ++i) {
double f = spatial.intercept(lines.get(i), c);
Point p = spatial.interpolate(lines.get(i), f);
double d = spatial.distance(c, p);
if (d <= r) {
neighbors.add(i);
}
}
assertEquals(0, neighbors.size());
Set<Tuple<Integer, Double>> points = index.radius(c, r);
assertEquals(neighbors.size(), points.size());
for (Tuple<Integer, Double> point : points) {
assertTrue(neighbors.contains(point.one()));
}
}
}
@Test
public void testIndexKNearest() {
final String[] wkts =
{"LINESTRING (11.4235408 48.1010922, 11.4235077 48.101068, 11.4231626 48.1008749)",
"LINESTRING (11.4209648 48.0995957, 11.420596 48.0993468, 11.4203029 48.0991585)",
"LINESTRING (11.4234794 48.102708, 11.423487 48.1026804, 11.4235585 48.1024194, 11.4235555 48.1020172, 11.4235546 48.1018929, 11.4235942 48.1015978, 11.4236333 48.1015246)",
"LINESTRING (11.4342071 48.1036417, 11.434099 48.1036362)",
"LINESTRING (11.424749 48.1075811, 11.424589 48.1074943)",
"LINESTRING (11.4233874 48.10403, 11.4233392 48.1036034, 11.4233178 48.1033183, 11.423331 48.1030842, 11.4233452 48.1030028)",
"LINESTRING (11.426896 48.1086681, 11.4267471 48.1085396, 11.4265761 48.1084146, 11.4263784 48.1083004, 11.4261893 48.1081984, 11.4259574 48.1080886, 11.4256389 48.1079628)",
"LINESTRING (11.4296155 48.1040403, 11.4291788 48.104216, 11.4276585 48.1048256, 11.4267428 48.1051668, 11.4260504 48.1053754)",
"LINESTRING (11.4233715 48.102927, 11.4233234 48.1028774, 11.4232315 48.1028237, 11.4231667 48.1028091)",
"LINESTRING (11.423901 48.1067007, 11.4238735 48.1066368)",
"LINESTRING (11.4291276 48.1025703, 11.4283449 48.1021036)",
"LINESTRING (11.4196228 48.0987412, 11.419544 48.0986937, 11.4192787 48.0985466, 11.419083 48.0984455)",
"LINESTRING (11.4315239 48.1033907, 11.4314949 48.103268, 11.431445 48.1032298)",
"LINESTRING (11.4241922 48.105846, 11.4239102 48.1058577, 11.4237211 48.1058502)",
"LINESTRING (11.4240852 48.1070322, 11.4239786 48.1068676, 11.423901 48.1067007)",
"LINESTRING (11.429366 48.1026713, 11.4291276 48.1025703)",
"LINESTRING (11.4233715 48.102927, 11.4234794 48.102708)",
"LINESTRING (11.434099 48.1036362, 11.4339195 48.1036199)",
"LINESTRING (11.4279771 48.1021004, 11.427739 48.1021063, 11.4274622 48.1020796, 11.4272785 48.1020453)",
"LINESTRING (11.4203343 48.1064247, 11.4200027 48.1065159)",
"LINESTRING (11.4327695 48.1034661, 11.4324386 48.1034103, 11.4322638 48.1033878, 11.4320837 48.103374, 11.4317987 48.1033749, 11.4315239 48.1033907)",
"LINESTRING (11.4232825 48.1058113, 11.4231094 48.1057922)",
"LINESTRING (11.4212843 48.0998197, 11.4209648 48.0995957)",
"LINESTRING (11.4246926 48.1014583, 11.4242786 48.1014318, 11.4241056 48.1014114, 11.4239619 48.1013638, 11.4237734 48.1012623)",
"LINESTRING (11.4203029 48.0991585, 11.4202116 48.0990999)",
"LINESTRING (11.424589 48.1074943, 11.4245557 48.1074727, 11.4244429 48.1073825, 11.4242763 48.1072459, 11.4241703 48.107132, 11.4240852 48.1070322)",
"LINESTRING (11.4256389 48.1079628, 11.4252166 48.1077985)",
"LINESTRING (11.4296286 48.1027528, 11.4294025 48.1026865, 11.429366 48.1026713)",
"LINESTRING (11.423522 48.1058375, 11.4234677 48.1058286)",
"LINESTRING (11.4189939 48.1067931, 11.4185333 48.1069367, 11.417415 48.1073033, 11.4167637 48.1075141)",
"LINESTRING (11.4252269 48.1015127, 11.4250265 48.1014863, 11.4246926 48.1014583)",
"LINESTRING (11.4261995 48.1017542, 11.4260869 48.1017195)",
"LINESTRING (11.4227463 48.1057586, 11.4225241 48.1058169, 11.4222246 48.1059396)",
"LINESTRING (11.4315239 48.1033907, 11.4310888 48.1034897, 11.4307594 48.1035925, 11.4304346 48.1037122, 11.4299221 48.1039175)",
"LINESTRING (11.4272785 48.1020453, 11.4272214 48.1020346)",
"LINESTRING (11.4234228 48.1044161, 11.4234068 48.1042436, 11.4233968 48.1041474)",
"LINESTRING (11.4233968 48.1041474, 11.4233909 48.1040614, 11.4233874 48.10403)",
"LINESTRING (11.431288 48.1031457, 11.4309557 48.1030701)",
"LINESTRING (11.4283449 48.1021036, 11.4281273 48.1020855, 11.4279771 48.1021004)",
"LINESTRING (11.4222246 48.1059396, 11.4220781 48.1059888, 11.4213633 48.1061695)",
"LINESTRING (11.4346668 48.1036677, 11.4342071 48.1036417)",
"LINESTRING (11.4231626 48.1008749, 11.4226747 48.100628, 11.4222092 48.1003989, 11.4218485 48.1001882, 11.4212843 48.0998197)",
"LINESTRING (11.4246184 48.1057797, 11.4241922 48.105846)",
"LINESTRING (11.4237211 48.1058502, 11.4236695 48.1054044, 11.4234864 48.1048008, 11.4234491 48.1046087, 11.4234228 48.1044161)",
"LINESTRING (11.4254268 48.1055632, 11.4246184 48.1057797)",
"LINESTRING (11.4202116 48.0990999, 11.419921 48.098921)",
"LINESTRING (11.4260869 48.1017195, 11.425897 48.1016611, 11.4255831 48.1015849, 11.4252269 48.1015127)",
"LINESTRING (11.431445 48.1032298, 11.4313772 48.103178, 11.431288 48.1031457)",
"LINESTRING (11.4237698 48.1062692, 11.4237211 48.1058502)",
"LINESTRING (11.419083 48.0984455, 11.4187162 48.0983106)",
"LINESTRING (11.4213633 48.1061695, 11.4206946 48.1063362)",
"LINESTRING (11.419921 48.098921, 11.4196228 48.0987412)",
"LINESTRING (11.4229218 48.1057609, 11.4227463 48.1057586)",
"LINESTRING (11.4339195 48.1036199, 11.4337703 48.1036037, 11.4327695 48.1034661)",
"LINESTRING (11.4206946 48.1063362, 11.4203343 48.1064247)",
"LINESTRING (11.4237211 48.1058502, 11.423522 48.1058375)",
"LINESTRING (11.4260504 48.1053754, 11.4258914 48.1054237, 11.4254268 48.1055632)",
"LINESTRING (11.4266892 48.1019015, 11.4261995 48.1017542)",
"LINESTRING (11.4272214 48.1020346, 11.427052 48.1020029, 11.4266892 48.1019015)",
"LINESTRING (11.4236333 48.1015246, 11.4237734 48.1012623)",
"LINESTRING (11.4299221 48.1039175, 11.4296155 48.1040403)",
"LINESTRING (11.4272459 48.1090405, 11.426896 48.1086681)",
"LINESTRING (11.4238735 48.1066368, 11.4238315 48.1065376, 11.4237781 48.1063403, 11.4237698 48.1062692)",
"LINESTRING (11.4237734 48.1012623, 11.4235408 48.1010922)",
"LINESTRING (11.4187162 48.0983106, 11.4183193 48.0982035, 11.4179521 48.098084, 11.4176973 48.0979896, 11.4173972 48.0978488)",
"LINESTRING (11.4308462 48.1030444, 11.430253 48.1029024)",
"LINESTRING (11.4233452 48.1030028, 11.4233715 48.102927)",
"LINESTRING (11.4231094 48.1057922, 11.4229218 48.1057609)",
"LINESTRING (11.430253 48.1029024, 11.4296286 48.1027528)",
"LINESTRING (11.4309557 48.1030701, 11.4308462 48.1030444)",
"LINESTRING (11.4252166 48.1077985, 11.4249964 48.1077091, 11.4247926 48.1076039, 11.424749 48.1075811)",
"LINESTRING (11.4234677 48.1058286, 11.4232825 48.1058113)",
"LINESTRING (11.4200027 48.1065159, 11.4189939 48.1067931)",
"LINESTRING (11.4360303 48.1037204, 11.4346668 48.1036677)"};
final SpatialOperator spatial = new Geography();
{
final List<Triple<Integer, Polyline, Double>> lines =
new ArrayList<>();
final QuadTreeIndex index = new QuadTreeIndex();
final Point c = new Point(11.429859, 48.105382);
final int k = 20;
int i = 0;
for (String wkt : wkts) {
int id = i++;
Polyline line = (Polyline) GeometryEngine.geometryFromWkt(wkt,
WktImportFlags.wktImportDefaults, Geometry.Type.Polyline);
double f = spatial.intercept(line, c);
Point p = spatial.interpolate(line, f);
double d = spatial.distance(c, p);
lines.add(new Triple<>(id, line, d));
index.add(id, line);
}
Collections.sort(lines, new Comparator<Triple<Integer, Polyline, Double>>() {
@Override
public int compare(Triple<Integer, Polyline, Double> left,
Triple<Integer, Polyline, Double> right) {
return left.three() < right.three() ? -1
: left.three() > right.three() ? +1 : 0;
}
});
Set<Integer> neighbors = new HashSet<>();
for (int j = 0; j < k; ++j) {
Tuple<Integer, Polyline> e = lines.get(j);
neighbors.add(e.one());
}
Set<Tuple<Integer, Double>> points = index.knearest(c, k);
assertEquals(points.size(), neighbors.size());
for (Tuple<Integer, Double> point : points) {
assertTrue(neighbors.contains(point.one()));
}
}
{
final List<Triple<Integer, Polyline, Double>> lines =
new ArrayList<>();
final QuadTreeIndex index = new QuadTreeIndex();
final Point c = new Point(11.429859, 48.105382);
final int k = 3;
int i = 0;
for (String wkt : wkts) {
int id = i++;
Polyline line = (Polyline) GeometryEngine.geometryFromWkt(wkt,
WktImportFlags.wktImportDefaults, Geometry.Type.Polyline);
double f = spatial.intercept(line, c);
Point p = spatial.interpolate(line, f);
double d = spatial.distance(c, p);
lines.add(new Triple<>(id, line, d));
index.add(id, line);
}
Collections.sort(lines, new Comparator<Triple<Integer, Polyline, Double>>() {
@Override
public int compare(Triple<Integer, Polyline, Double> left,
Triple<Integer, Polyline, Double> right) {
return left.three() < right.three() ? -1
: left.three() > right.three() ? +1 : 0;
}
});
Set<Integer> neighbors = new HashSet<>();
for (int j = 0; j < k; ++j) {
Triple<Integer, Polyline, Double> e = lines.get(j);
neighbors.add(e.one());
}
Set<Tuple<Integer, Double>> points = index.knearest(c, k);
assertEquals(points.size(), neighbors.size());
for (Tuple<Integer, Double> point : points) {
assertTrue(neighbors.contains(point.one()));
}
}
{
final List<Triple<Integer, Polyline, Double>> lines =
new ArrayList<>();
final QuadTreeIndex index = new QuadTreeIndex();
final Point c = new Point(11.42096, 48.10318);
final int k = 10;
int i = 0;
for (String wkt : wkts) {
int id = i++;
Polyline line = (Polyline) GeometryEngine.geometryFromWkt(wkt,
WktImportFlags.wktImportDefaults, Geometry.Type.Polyline);
double f = spatial.intercept(line, c);
Point p = spatial.interpolate(line, f);
double d = spatial.distance(c, p);
lines.add(new Triple<>(id, line, d));
index.add(id, line);
}
Collections.sort(lines, new Comparator<Triple<Integer, Polyline, Double>>() {
@Override
public int compare(Triple<Integer, Polyline, Double> left,
Triple<Integer, Polyline, Double> right) {
return left.three() < right.three() ? -1
: left.three() > right.three() ? +1 : 0;
}
});
Set<Integer> neighbors = new HashSet<>();
for (int j = 0; j < k; ++j) {
Tuple<Integer, Polyline> e = lines.get(j);
neighbors.add(e.one());
}
Set<Tuple<Integer, Double>> points = index.knearest(c, k);
assertEquals(points.size(), neighbors.size());
for (Tuple<Integer, Double> point : points) {
assertTrue(neighbors.contains(point.one()));
}
}
}
}
| |
/**
* Copyright (c) 2016-present, RxJava Contributors.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in
* compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is
* distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See
* the License for the specific language governing permissions and limitations under the License.
*/
package io.reactivex.internal.operators.flowable;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.*;
import org.reactivestreams.*;
import io.reactivex.*;
import io.reactivex.Scheduler.Worker;
import io.reactivex.disposables.Disposable;
import io.reactivex.exceptions.MissingBackpressureException;
import io.reactivex.internal.disposables.*;
import io.reactivex.internal.subscriptions.SubscriptionHelper;
import io.reactivex.internal.util.BackpressureHelper;
import io.reactivex.plugins.RxJavaPlugins;
import io.reactivex.subscribers.SerializedSubscriber;
public final class FlowableDebounceTimed<T> extends AbstractFlowableWithUpstream<T, T> {
final long timeout;
final TimeUnit unit;
final Scheduler scheduler;
public FlowableDebounceTimed(Flowable<T> source, long timeout, TimeUnit unit, Scheduler scheduler) {
super(source);
this.timeout = timeout;
this.unit = unit;
this.scheduler = scheduler;
}
@Override
protected void subscribeActual(Subscriber<? super T> s) {
source.subscribe(new DebounceTimedSubscriber<T>(
new SerializedSubscriber<T>(s),
timeout, unit, scheduler.createWorker()));
}
static final class DebounceTimedSubscriber<T> extends AtomicLong
implements FlowableSubscriber<T>, Subscription {
private static final long serialVersionUID = -9102637559663639004L;
final Subscriber<? super T> downstream;
final long timeout;
final TimeUnit unit;
final Scheduler.Worker worker;
Subscription upstream;
Disposable timer;
volatile long index;
boolean done;
DebounceTimedSubscriber(Subscriber<? super T> actual, long timeout, TimeUnit unit, Worker worker) {
this.downstream = actual;
this.timeout = timeout;
this.unit = unit;
this.worker = worker;
}
@Override
public void onSubscribe(Subscription s) {
if (SubscriptionHelper.validate(this.upstream, s)) {
this.upstream = s;
downstream.onSubscribe(this);
s.request(Long.MAX_VALUE);
}
}
@Override
public void onNext(T t) {
if (done) {
return;
}
long idx = index + 1;
index = idx;
Disposable d = timer;
if (d != null) {
d.dispose();
}
DebounceEmitter<T> de = new DebounceEmitter<T>(t, idx, this);
timer = de;
d = worker.schedule(de, timeout, unit);
de.setResource(d);
}
@Override
public void onError(Throwable t) {
if (done) {
RxJavaPlugins.onError(t);
return;
}
done = true;
Disposable d = timer;
if (d != null) {
d.dispose();
}
downstream.onError(t);
worker.dispose();
}
@Override
public void onComplete() {
if (done) {
return;
}
done = true;
Disposable d = timer;
if (d != null) {
d.dispose();
}
@SuppressWarnings("unchecked")
DebounceEmitter<T> de = (DebounceEmitter<T>)d;
if (de != null) {
de.emit();
}
downstream.onComplete();
worker.dispose();
}
@Override
public void request(long n) {
if (SubscriptionHelper.validate(n)) {
BackpressureHelper.add(this, n);
}
}
@Override
public void cancel() {
upstream.cancel();
worker.dispose();
}
void emit(long idx, T t, DebounceEmitter<T> emitter) {
if (idx == index) {
long r = get();
if (r != 0L) {
downstream.onNext(t);
BackpressureHelper.produced(this, 1);
emitter.dispose();
} else {
cancel();
downstream.onError(new MissingBackpressureException("Could not deliver value due to lack of requests"));
}
}
}
}
static final class DebounceEmitter<T> extends AtomicReference<Disposable> implements Runnable, Disposable {
private static final long serialVersionUID = 6812032969491025141L;
final T value;
final long idx;
final DebounceTimedSubscriber<T> parent;
final AtomicBoolean once = new AtomicBoolean();
DebounceEmitter(T value, long idx, DebounceTimedSubscriber<T> parent) {
this.value = value;
this.idx = idx;
this.parent = parent;
}
@Override
public void run() {
emit();
}
void emit() {
if (once.compareAndSet(false, true)) {
parent.emit(idx, value, this);
}
}
@Override
public void dispose() {
DisposableHelper.dispose(this);
}
@Override
public boolean isDisposed() {
return get() == DisposableHelper.DISPOSED;
}
public void setResource(Disposable d) {
DisposableHelper.replace(this, d);
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more contributor license
* agreements. See the NOTICE file distributed with this work for additional information regarding
* copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License. You may obtain a
* copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package org.apache.geode.internal.cache.wan.wancommand;
import static org.apache.geode.distributed.ConfigurationProperties.DISTRIBUTED_SYSTEM_ID;
import static org.apache.geode.distributed.ConfigurationProperties.GROUPS;
import static org.apache.geode.distributed.ConfigurationProperties.REMOTE_LOCATORS;
import static org.apache.geode.internal.cache.wan.wancommand.WANCommandUtils.createSender;
import static org.apache.geode.internal.cache.wan.wancommand.WANCommandUtils.getMemberIdCallable;
import static org.apache.geode.internal.cache.wan.wancommand.WANCommandUtils.startSender;
import static org.apache.geode.internal.cache.wan.wancommand.WANCommandUtils.verifySenderState;
import static org.apache.geode.test.dunit.Assert.assertEquals;
import static org.apache.geode.test.dunit.Assert.assertFalse;
import static org.apache.geode.test.dunit.Assert.assertTrue;
import static org.apache.geode.test.dunit.Assert.fail;
import static org.apache.geode.test.dunit.LogWriterUtils.getLogWriter;
import static org.apache.geode.test.dunit.Wait.pause;
import java.util.List;
import java.util.Properties;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.apache.geode.distributed.DistributedMember;
import org.apache.geode.management.cli.Result;
import org.apache.geode.management.internal.cli.i18n.CliStrings;
import org.apache.geode.management.internal.cli.result.CommandResult;
import org.apache.geode.management.internal.cli.result.TabularResultData;
import org.apache.geode.test.dunit.IgnoredException;
import org.apache.geode.test.dunit.rules.ClusterStartupRule;
import org.apache.geode.test.dunit.rules.MemberVM;
import org.apache.geode.test.junit.categories.DistributedTest;
import org.apache.geode.test.junit.rules.GfshCommandRule;
@Category(DistributedTest.class)
public class StopGatewaySenderCommandDUnitTest {
@Rule
public ClusterStartupRule clusterStartupRule = new ClusterStartupRule();
@Rule
public GfshCommandRule gfsh = new GfshCommandRule();
private MemberVM locatorSite1;
private MemberVM locatorSite2;
private MemberVM server1;
private MemberVM server2;
private MemberVM server3;
private MemberVM server4;
private MemberVM server5;
@Before
public void before() throws Exception {
Properties props = new Properties();
props.setProperty(DISTRIBUTED_SYSTEM_ID, "" + 1);
locatorSite1 = clusterStartupRule.startLocatorVM(1, props);
props.setProperty(DISTRIBUTED_SYSTEM_ID, "" + 2);
props.setProperty(REMOTE_LOCATORS, "localhost[" + locatorSite1.getPort() + "]");
locatorSite2 = clusterStartupRule.startLocatorVM(2, props);
// Connect Gfsh to locator.
gfsh.connectAndVerify(locatorSite1);
}
@Test
public void testStopGatewaySender_ErrorConditions() throws Exception {
Integer locator1Port = locatorSite1.getPort();
// setup servers in Site #1 (London)
server1 = clusterStartupRule.startServerVM(3, locator1Port);
server1.invoke(() -> createSender("ln", 2, false, 100, 400, false, false, null, true));
final DistributedMember server1DM = server1.invoke(getMemberIdCallable());
String command = CliStrings.STOP_GATEWAYSENDER + " --" + CliStrings.STOP_GATEWAYSENDER__ID
+ "=ln --" + CliStrings.MEMBER + "=" + server1DM.getId() + " --" + CliStrings.GROUP
+ "=SenderGroup1";
gfsh.executeAndAssertThat(command).statusIsError()
.containsOutput(CliStrings.PROVIDE_EITHER_MEMBER_OR_GROUP_MESSAGE);
}
@Test
public void testStopGatewaySender() throws Exception {
Integer locator1Port = locatorSite1.getPort();
// setup servers in Site #1 (London)
server1 = clusterStartupRule.startServerVM(3, locator1Port);
server2 = clusterStartupRule.startServerVM(4, locator1Port);
server3 = clusterStartupRule.startServerVM(5, locator1Port);
server1.invoke(() -> createSender("ln", 2, false, 100, 400, false, false, null, true));
server2.invoke(() -> createSender("ln", 2, false, 100, 400, false, false, null, true));
server3.invoke(() -> createSender("ln", 2, false, 100, 400, false, false, null, true));
server1.invoke(() -> startSender("ln"));
server2.invoke(() -> startSender("ln"));
server3.invoke(() -> startSender("ln"));
server1.invoke(() -> verifySenderState("ln", true, false));
server2.invoke(() -> verifySenderState("ln", true, false));
server3.invoke(() -> verifySenderState("ln", true, false));
pause(10000);
String command =
CliStrings.STOP_GATEWAYSENDER + " --" + CliStrings.STOP_GATEWAYSENDER__ID + "=ln";
CommandResult cmdResult = executeCommandWithIgnoredExceptions(command);
if (cmdResult != null) {
String strCmdResult = cmdResult.toString();
getLogWriter().info("testStopGatewaySender stringResult : " + strCmdResult + ">>>>");
assertEquals(Result.Status.OK, cmdResult.getStatus());
TabularResultData resultData = (TabularResultData) cmdResult.getResultData();
List<String> status = resultData.retrieveAllValues("Result");
assertEquals(3, status.size());
assertTrue(status.contains("OK"));
} else {
fail("testStopGatewaySender failed as did not get CommandResult");
}
server1.invoke(() -> verifySenderState("ln", false, false));
server2.invoke(() -> verifySenderState("ln", false, false));
server3.invoke(() -> verifySenderState("ln", false, false));
}
/**
* test to validate that the start gateway sender starts the gateway sender on a member
*/
@Test
public void testStopGatewaySender_onMember() throws Exception {
Integer locator1Port = locatorSite1.getPort();
// setup servers in Site #1 (London)
server1 = clusterStartupRule.startServerVM(3, locator1Port);
server1.invoke(() -> createSender("ln", 2, false, 100, 400, false, false, null, true));
server1.invoke(() -> startSender("ln"));
server1.invoke(() -> verifySenderState("ln", true, false));
final DistributedMember server1DM = (DistributedMember) server1.invoke(getMemberIdCallable());
pause(10000);
String command = CliStrings.STOP_GATEWAYSENDER + " --" + CliStrings.STOP_GATEWAYSENDER__ID
+ "=ln --" + CliStrings.MEMBER + "=" + server1DM.getId();
CommandResult cmdResult = executeCommandWithIgnoredExceptions(command);
if (cmdResult != null) {
String strCmdResult = cmdResult.toString();
getLogWriter().info("testStopGatewaySender stringResult : " + strCmdResult + ">>>>");
assertEquals(Result.Status.OK, cmdResult.getStatus());
assertTrue(strCmdResult.contains("is stopped on member"));
} else {
fail("testStopGatewaySender failed as did not get CommandResult");
}
server1.invoke(() -> verifySenderState("ln", false, false));
}
/**
* test to validate that the start gateway sender starts the gateway sender on a group of members
*/
@Test
public void testStopGatewaySender_Group() throws Exception {
Integer locator1Port = locatorSite1.getPort();
// setup servers in Site #1 (London)
server1 = startServerWithGroups(3, "SenderGroup1", locator1Port);
server2 = startServerWithGroups(4, "SenderGroup1", locator1Port);
server3 = startServerWithGroups(5, "SenderGroup1", locator1Port);
server1.invoke(() -> createSender("ln", 2, false, 100, 400, false, false, null, true));
server2.invoke(() -> createSender("ln", 2, false, 100, 400, false, false, null, true));
server3.invoke(() -> createSender("ln", 2, false, 100, 400, false, false, null, true));
server1.invoke(() -> startSender("ln"));
server2.invoke(() -> startSender("ln"));
server3.invoke(() -> startSender("ln"));
server1.invoke(() -> verifySenderState("ln", true, false));
server2.invoke(() -> verifySenderState("ln", true, false));
server3.invoke(() -> verifySenderState("ln", true, false));
pause(10000);
String command = CliStrings.STOP_GATEWAYSENDER + " --" + CliStrings.STOP_GATEWAYSENDER__ID
+ "=ln --" + CliStrings.GROUP + "=SenderGroup1";
CommandResult cmdResult = executeCommandWithIgnoredExceptions(command);
if (cmdResult != null) {
String strCmdResult = cmdResult.toString();
getLogWriter().info("testStopGatewaySender_Group stringResult : " + strCmdResult + ">>>>");
assertEquals(Result.Status.OK, cmdResult.getStatus());
TabularResultData resultData = (TabularResultData) cmdResult.getResultData();
List<String> status = resultData.retrieveAllValues("Result");
assertEquals(3, status.size());
assertFalse(status.contains("Error"));
assertTrue(status.contains("OK"));
} else {
fail("testStopGatewaySender failed as did not get CommandResult");
}
server1.invoke(() -> verifySenderState("ln", false, false));
server2.invoke(() -> verifySenderState("ln", false, false));
server3.invoke(() -> verifySenderState("ln", false, false));
}
/**
* Test to validate the scenario gateway sender is started when one or more sender members belongs
* to multiple groups
*/
@Test
public void testStopGatewaySender_MultipleGroup() throws Exception {
Integer locator1Port = locatorSite1.getPort();
// setup servers in Site #1 (London)
server1 = startServerWithGroups(3, "SenderGroup1", locator1Port);
server2 = startServerWithGroups(4, "SenderGroup1", locator1Port);
server3 = startServerWithGroups(5, "SenderGroup1, SenderGroup2", locator1Port);
server4 = startServerWithGroups(6, "SenderGroup2", locator1Port);
server5 = startServerWithGroups(7, "SenderGroup3", locator1Port);
server1.invoke(() -> createSender("ln", 2, false, 100, 400, false, false, null, true));
server2.invoke(() -> createSender("ln", 2, false, 100, 400, false, false, null, true));
server3.invoke(() -> createSender("ln", 2, false, 100, 400, false, false, null, true));
server4.invoke(() -> createSender("ln", 2, false, 100, 400, false, false, null, true));
server5.invoke(() -> createSender("ln", 2, false, 100, 400, false, false, null, true));
server1.invoke(() -> startSender("ln"));
server2.invoke(() -> startSender("ln"));
server3.invoke(() -> startSender("ln"));
server4.invoke(() -> startSender("ln"));
server5.invoke(() -> startSender("ln"));
server1.invoke(() -> verifySenderState("ln", true, false));
server2.invoke(() -> verifySenderState("ln", true, false));
server3.invoke(() -> verifySenderState("ln", true, false));
server4.invoke(() -> verifySenderState("ln", true, false));
server5.invoke(() -> verifySenderState("ln", true, false));
pause(10000);
String command = CliStrings.STOP_GATEWAYSENDER + " --" + CliStrings.STOP_GATEWAYSENDER__ID
+ "=ln --" + CliStrings.GROUP + "=SenderGroup1,SenderGroup2";
CommandResult cmdResult = executeCommandWithIgnoredExceptions(command);
if (cmdResult != null) {
String strCmdResult = cmdResult.toString();
getLogWriter().info("testStopGatewaySender_Group stringResult : " + strCmdResult + ">>>>");
assertEquals(Result.Status.OK, cmdResult.getStatus());
TabularResultData resultData = (TabularResultData) cmdResult.getResultData();
List<String> status = resultData.retrieveAllValues("Result");
assertEquals(4, status.size());
assertFalse(status.contains("Error"));
assertTrue(status.contains("OK"));
} else {
fail("testStopGatewaySender failed as did not get CommandResult");
}
server1.invoke(() -> verifySenderState("ln", false, false));
server2.invoke(() -> verifySenderState("ln", false, false));
server3.invoke(() -> verifySenderState("ln", false, false));
server4.invoke(() -> verifySenderState("ln", false, false));
server5.invoke(() -> verifySenderState("ln", true, false));
}
private CommandResult executeCommandWithIgnoredExceptions(String command) {
final IgnoredException exln = IgnoredException.addIgnoredException("Could not connect");
CommandResult commandResult = gfsh.executeCommand(command);
exln.remove();
return commandResult;
}
private MemberVM startServerWithGroups(int index, String groups, int locPort) throws Exception {
Properties props = new Properties();
props.setProperty(GROUPS, groups);
return clusterStartupRule.startServerVM(index, props, locPort);
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.dataflow.sample.timeseriesflow.metrics.core.complex.rule;
import com.google.auto.value.AutoValue;
import com.google.common.base.Preconditions;
import com.google.dataflow.sample.timeseriesflow.DerivedAggregations.Indicators;
import com.google.dataflow.sample.timeseriesflow.TimeSeriesData.Data;
import com.google.dataflow.sample.timeseriesflow.TimeSeriesData.TSAccum;
import com.google.dataflow.sample.timeseriesflow.TimeSeriesData.TSKey;
import com.google.dataflow.sample.timeseriesflow.common.CommonUtils;
import com.google.dataflow.sample.timeseriesflow.common.TSDataUtils;
import com.google.dataflow.sample.timeseriesflow.datamap.AccumCoreNumericBuilder;
import com.google.dataflow.sample.timeseriesflow.graph.GenerateComputations;
import com.google.dataflow.sample.timeseriesflow.metrics.core.complex.fsi.vwap.VWAPGFn;
import com.google.dataflow.sample.timeseriesflow.options.TSFlowOptions;
import java.util.ArrayList;
import java.util.List;
import org.apache.beam.sdk.annotations.Experimental;
import org.apache.beam.sdk.coders.KvCoder;
import org.apache.beam.sdk.extensions.protobuf.ProtoCoder;
import org.apache.beam.sdk.transforms.DoFn;
import org.apache.beam.sdk.transforms.Filter;
import org.apache.beam.sdk.transforms.Flatten;
import org.apache.beam.sdk.transforms.GroupByKey;
import org.apache.beam.sdk.transforms.PTransform;
import org.apache.beam.sdk.transforms.ParDo;
import org.apache.beam.sdk.transforms.Values;
import org.apache.beam.sdk.transforms.WithKeys;
import org.apache.beam.sdk.values.KV;
import org.apache.beam.sdk.values.PCollection;
import org.apache.beam.sdk.values.PCollectionList;
import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.annotations.VisibleForTesting;
import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.collect.ImmutableList;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* This class is intended as an example tool to demonstrate how rules could be applied to the output
* of the metrics library. The example blends several metric output streams together and applied a
* series of sample rules to that output.
*
* <p>The example makes use of several metric streams:
*
* <p>1- VWAP metric 2- The LAST known Price. 3- The LAST known Ask. 4- The LAST known Bid.
*
* <p>The rule is as follows. *
*
* <p>1 - If there is a valid non zero VWAP within the {@link
* GenerateComputations#getType2SlidingWindowDuration()} then the output == VWAP
*
* <p>2 - If there is no non-zero VWAP values within the {@link
* GenerateComputations#getType2SlidingWindowDuration()} then the following rules are applied:
*
* <p>2a - If the last known price value is > a lower bound (Bid) and < a higher bound (Ask) then
* the primary is used. The lower and upper bound are based on either Type 1 or Type 2 computations.
*
* <p>2b - If the last known primary value is < a lower bound (Bid) then the lower bound is used.
* The lower bound is based on either Type 1 or Type 2 computations.
*
* <p>2c - If the last known primary value is > a higher bound (Ask) then the higher bound is used.
* The lower bound is based on either Type 1 or Type 2 computations.
*
* <p>Edge conditions: If no VWAP or Price value then no metric is output. If either lower or upper
* value is missing, then the primary value is output. A error log will also be raised.
*
* <p>Limitations:
*
* <p>This transform currently requires that the needed primary / lower and upper metrics have been
* already been added to {@link GenerateComputations} before this metric is added to the list of
* complex metrics. For example if the primary value requires the {@link VWAPGFn}, then the call to
* {@link GenerateComputations#getComplexType2Metrics()} must have {@link VWAPGFn} before this class
* in the list.
*/
@Experimental
public class ValueInBoundsGFn
extends PTransform<PCollection<KV<TSKey, TSAccum>>, PCollection<KV<TSKey, TSAccum>>> {
private static final Logger LOG = LoggerFactory.getLogger(ValueInBoundsGFn.class);
/** Options for {@link ValueInBoundsGFn} fn. */
public interface ValueInBoundsOptions extends TSFlowOptions {
List<String> getValueInBoundsMajorKeyName();
void setValueInBoundsMajorKeyName(List<String> majorKeyName);
String getValueInBoundsOutputMetricName();
void setValueInBoundsOutputMetricName(String majorKeyName);
String getValueInBoundsPrimaryMinorKeyName();
void setValueInBoundsPrimaryMinorKeyName(String primaryMinorKeyName);
String getValueInBoundsPrimaryMetricName();
void setValueInBoundsPrimaryMetricName(String primaryMetricName);
String getValueInBoundsLowerBoundaryMinorKeyName();
void setValueInBoundsLowerBoundaryMinorKeyName(String lowerBoundaryMinorKeyName);
String getValueInBoundsUpperBoundaryMinorKeyName();
void setValueInBoundsUpperBoundaryMinorKeyName(String lowerBoundaryMinorKeyName);
}
@Override
public PCollection<KV<TSKey, TSAccum>> expand(PCollection<KV<TSKey, TSAccum>> input) {
ValueInBoundsOptions vwapOptions =
input.getPipeline().getOptions().as(ValueInBoundsOptions.class);
List<TSKey> keys = new ArrayList<>();
// Generate list of all keys that the user requested for bound validation
for (String key : vwapOptions.getValueInBoundsMajorKeyName()) {
TSKey.Builder builder = TSKey.newBuilder().setMajorKey(key);
keys.add(
builder.setMinorKeyString(vwapOptions.getValueInBoundsPrimaryMinorKeyName()).build());
keys.add(
builder
.setMinorKeyString(vwapOptions.getValueInBoundsLowerBoundaryMinorKeyName())
.build());
keys.add(
builder
.setMinorKeyString(vwapOptions.getValueInBoundsUpperBoundaryMinorKeyName())
.build());
}
Filter<KV<TSKey, TSAccum>> filterInclude = Filter.by(x -> keys.contains(x.getKey()));
Filter<KV<TSKey, TSAccum>> filterExclude = Filter.by(x -> !keys.contains(x.getKey()));
return PCollectionList.of(input.apply("Filter_1", filterExclude))
.and(
input
.apply("Filter_2", filterInclude)
.apply(WithKeys.of(x -> x.getKey().toBuilder().clearMinorKeyString().build()))
.setCoder(KvCoder.of(ProtoCoder.of(TSKey.class), CommonUtils.getKvTSAccumCoder()))
.apply(GroupByKey.create())
.apply(Values.create())
.apply(
ParDo.of(
ApplyRules.builder()
.setPrimaryMinorKeyName(
vwapOptions.getValueInBoundsPrimaryMinorKeyName())
.setPrimaryMetricName(vwapOptions.getValueInBoundsPrimaryMetricName())
.setLowerBoundaryMinorKeyName(
vwapOptions.getValueInBoundsLowerBoundaryMinorKeyName())
.setUpperBoundaryMinorKeyName(
vwapOptions.getValueInBoundsUpperBoundaryMinorKeyName())
.setOutputMetricName(vwapOptions.getValueInBoundsOutputMetricName())
.build())))
.apply(Flatten.pCollections());
}
@VisibleForTesting
@AutoValue
public abstract static class ApplyRules
extends DoFn<Iterable<KV<TSKey, TSAccum>>, KV<TSKey, TSAccum>> {
abstract String getOutputMetricName();
abstract String getPrimaryMinorKeyName();
abstract String getPrimaryMetricName();
abstract String getLowerBoundaryMinorKeyName();
abstract String getUpperBoundaryMinorKeyName();
@AutoValue.Builder
public abstract static class Builder {
public abstract Builder setOutputMetricName(String newOutputMetricName);
public abstract Builder setPrimaryMinorKeyName(String newPrimaryMinorKeyName);
public abstract Builder setPrimaryMetricName(String newPrimaryMetricName);
public abstract Builder setLowerBoundaryMinorKeyName(String newLowerBoundaryMinorKeyName);
public abstract Builder setUpperBoundaryMinorKeyName(String newUpperBoundaryMinorKeyName);
public abstract ApplyRules build();
}
public static Builder builder() {
return new AutoValue_ValueInBoundsGFn_ApplyRules.Builder();
}
@ProcessElement
public void process(
@Element Iterable<KV<TSKey, TSAccum>> element, OutputReceiver<KV<TSKey, TSAccum>> o) {
// Create outputKey, which is the MajorKey and MinorKey from Primary and a new Metric.
AccumWithinBoundary output = null;
KV<TSKey, TSAccum> primaryAccum = null;
Data primary = null;
Data primaryLast = null;
Data lower = null;
Data upper = null;
for (KV<TSKey, TSAccum> kvAccum : element) {
if (kvAccum.getKey().getMinorKeyString().equals(getPrimaryMinorKeyName())) {
primaryAccum = kvAccum;
output = new AccumWithinBoundary(kvAccum.getValue(), getOutputMetricName());
primary = kvAccum.getValue().getDataStoreOrDefault(getPrimaryMetricName(), null);
primaryLast = kvAccum.getValue().getDataStoreOrDefault(Indicators.LAST.name(), null);
}
if (kvAccum.getKey().getMinorKeyString().equals(getLowerBoundaryMinorKeyName())) {
lower = kvAccum.getValue().getDataStoreOrDefault(Indicators.LAST.name(), null);
// We will not be mutating this value so output as is now we have the data
o.output(kvAccum);
}
if (kvAccum.getKey().getMinorKeyString().equals(getUpperBoundaryMinorKeyName())) {
upper = kvAccum.getValue().getDataStoreOrDefault(Indicators.LAST.name(), null);
// We will not be mutating this value so output as is now we have the data
o.output(kvAccum);
}
}
// If we have no primary then we do not output any metrics. And there is no primary to pass
// on.
if (primary == null) {
LOG.error(
"No Primary value available, normally caused by lack of bootstrap values or misconfiguration of primary keys {}. Metric {} will not be output",
ImmutableList.of(getPrimaryMinorKeyName(), getPrimaryMetricName()),
getOutputMetricName());
return;
}
// If the primary has non gap-fill inputs then return primary
if (!output.build().getIsAllGapFillMessages()) {
output.setMetric(primary);
o.output(KV.of(output.build().getKey(), output.build()));
return;
}
// If we do not have a LOWER or UPPER value then we auto output Primary
if (lower == null || upper == null) {
output.setMetric(primary);
o.output(KV.of(output.build().getKey(), output.build()));
return;
}
// We will check against the LAST value from the minor key rather than the primary
output.setMetric(checkInSpread(primaryLast, lower, upper));
o.output(KV.of(output.build().getKey(), output.build()));
}
}
@VisibleForTesting
public static Data checkInSpread(Data primary, Data lower, Data upper) {
if (TSDataUtils.findMinData(primary, lower).equals(primary)) {
return lower;
}
if (TSDataUtils.findMaxValue(primary, upper).equals(primary)) {
return upper;
}
return primary;
}
public static class AccumWithinBoundary extends AccumCoreNumericBuilder {
private final String metricName;
public AccumWithinBoundary(TSAccum tsAccum, String within_boundary_name) {
super(tsAccum);
Preconditions.checkNotNull(within_boundary_name);
metricName = within_boundary_name;
}
public void setMetric(Data data) {
setValue(metricName, data);
}
public Data getMetricOrNull() {
return getValueOrNull(metricName);
}
}
}
| |
package me.kevingleason.androidrtc;
import android.app.ListActivity;
import android.content.Intent;
import android.content.SharedPreferences;
import android.os.Bundle;
import android.util.Log;
import android.view.Menu;
import android.view.MenuItem;
import android.view.View;
import android.widget.EditText;
import android.widget.ListView;
import android.widget.TextView;
import android.widget.Toast;
import com.pubnub.api.Callback;
import com.pubnub.api.Pubnub;
import com.pubnub.api.PubnubError;
import com.pubnub.api.PubnubException;
import org.json.JSONException;
import org.json.JSONObject;
import java.util.ArrayList;
import me.kevingleason.androidrtc.adapters.HistoryAdapter;
import me.kevingleason.androidrtc.adt.HistoryItem;
import me.kevingleason.androidrtc.util.Constants;
public class MainActivity extends ListActivity {
private SharedPreferences mSharedPreferences;
private String username;
private String stdByChannel;
private Pubnub mPubNub;
private ListView mHistoryList;
private HistoryAdapter mHistoryAdapter;
private EditText mCallNumET;
private TextView mUsernameTV;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
this.mSharedPreferences = getSharedPreferences(Constants.SHARED_PREFS, MODE_PRIVATE);
if (!this.mSharedPreferences.contains(Constants.USER_NAME)){
Intent intent = new Intent(this, LoginActivity.class);
startActivity(intent);
finish();
return;
}
this.username = this.mSharedPreferences.getString(Constants.USER_NAME, "");
this.stdByChannel = this.username + Constants.STDBY_SUFFIX;
this.mHistoryList = getListView();
this.mCallNumET = (EditText) findViewById(R.id.call_num);
this.mUsernameTV = (TextView) findViewById(R.id.main_username);
this.mUsernameTV.setText(this.username);
initPubNub();
this.mHistoryAdapter = new HistoryAdapter(this, new ArrayList<HistoryItem>(), this.mPubNub);
this.mHistoryList.setAdapter(this.mHistoryAdapter);
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
// Inflate the menu; this adds items to the action bar if it is present.
getMenuInflater().inflate(R.menu.menu_main, menu);
return true;
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
// Handle action bar item clicks here. The action bar will
// automatically handle clicks on the Home/Up button, so long
// as you specify a parent activity in AndroidManifest.xml.
int id = item.getItemId();
//noinspection SimplifiableIfStatement
switch(id){
case R.id.action_settings:
return true;
case R.id.action_sign_out:
signOut();
return true;
}
return super.onOptionsItemSelected(item);
}
@Override
protected void onStop() {
super.onStop();
if(this.mPubNub!=null){
this.mPubNub.unsubscribeAll();
}
}
@Override
protected void onRestart() {
super.onRestart();
if(this.mPubNub==null){
initPubNub();
} else {
subscribeStdBy();
}
}
/**
* Subscribe to standby channel so that it doesn't interfere with the WebRTC Signaling.
*/
public void initPubNub(){
this.mPubNub = new Pubnub(Constants.PUB_KEY, Constants.SUB_KEY);
this.mPubNub.setUUID(this.username);
subscribeStdBy();
}
/**
* Subscribe to standby channel
*/
private void subscribeStdBy(){
try {
this.mPubNub.subscribe(this.stdByChannel, new Callback() {
@Override
public void successCallback(String channel, Object message) {
Log.d("MA-iPN", "MESSAGE: " + message.toString());
if (!(message instanceof JSONObject)) return; // Ignore if not JSONObject
JSONObject jsonMsg = (JSONObject) message;
try {
if (!jsonMsg.has(Constants.JSON_CALL_USER)) return; //Ignore Signaling messages.
String user = jsonMsg.getString(Constants.JSON_CALL_USER);
dispatchIncomingCall(user);
} catch (JSONException e){
e.printStackTrace();
}
}
@Override
public void connectCallback(String channel, Object message) {
Log.d("MA-iPN", "CONNECTED: " + message.toString());
setUserStatus(Constants.STATUS_AVAILABLE);
}
@Override
public void errorCallback(String channel, PubnubError error) {
Log.d("MA-iPN","ERROR: " + error.toString());
}
});
} catch (PubnubException e){
Log.d("HERE","HEREEEE");
e.printStackTrace();
}
}
/**
* Take the user to a video screen. USER_NAME is a required field.
* @param view button that is clicked to trigger toVideo
*/
public void makeCall(View view){
String callNum = mCallNumET.getText().toString();
if (callNum.isEmpty() || callNum.equals(this.username)){
showToast("Enter a valid user ID to call.");
return;
}
dispatchCall(callNum);
}
/**TODO: Debate who calls who. Should one be on standby? Or use State API for busy/available
* Check that user is online. If they are, dispatch the call by publishing to their standby
* channel. If the publish was successful, then change activities over to the video chat.
* The called user will then have the option to accept of decline the call. If they accept,
* they will be brought to the video chat activity as well, to connect video/audio. If
* they decline, a hangup will be issued, and the VideoChat adapter's onHangup callback will
* be invoked.
* @param callNum Number to publish a call to.
*/
public void dispatchCall(final String callNum){
final String callNumStdBy = callNum + Constants.STDBY_SUFFIX;
this.mPubNub.hereNow(callNumStdBy, new Callback() {
@Override
public void successCallback(String channel, Object message) {
Log.d("MA-dC", "HERE_NOW: " +" CH - " + callNumStdBy + " " + message.toString());
try {
int occupancy = ((JSONObject) message).getInt(Constants.JSON_OCCUPANCY);
if (occupancy == 0) {
showToast("User is not online!");
return;
}
JSONObject jsonCall = new JSONObject();
jsonCall.put(Constants.JSON_CALL_USER, username);
jsonCall.put(Constants.JSON_CALL_TIME, System.currentTimeMillis());
mPubNub.publish(callNumStdBy, jsonCall, new Callback() {
@Override
public void successCallback(String channel, Object message) {
Log.d("MA-dC", "SUCCESS: " + message.toString());
Intent intent = new Intent(MainActivity.this, VideoChatActivity.class);
intent.putExtra(Constants.USER_NAME, username);
intent.putExtra(Constants.CALL_USER, callNum); // Only accept from this number?
startActivity(intent);
}
});
} catch (JSONException e) {
e.printStackTrace();
}
}
});
}
/**
* Handle incoming calls. TODO: Implement an accept/reject functionality.
* @param userId
*/
private void dispatchIncomingCall(String userId){
showToast("Call from: " + userId);
Intent intent = new Intent(MainActivity.this, IncomingCallActivity.class);
intent.putExtra(Constants.USER_NAME, username);
intent.putExtra(Constants.CALL_USER, userId);
startActivity(intent);
}
private void setUserStatus(String status){
try {
JSONObject state = new JSONObject();
state.put(Constants.JSON_STATUS, status);
this.mPubNub.setState(this.stdByChannel, this.username, state, new Callback() {
@Override
public void successCallback(String channel, Object message) {
Log.d("MA-sUS","State Set: " + message.toString());
}
});
} catch (JSONException e){
e.printStackTrace();
}
}
private void getUserStatus(String userId){
String stdByUser = userId + Constants.STDBY_SUFFIX;
this.mPubNub.getState(stdByUser, userId, new Callback() {
@Override
public void successCallback(String channel, Object message) {
Log.d("MA-gUS", "User Status: " + message.toString());
}
});
}
/**
* Ensures that toast is run on the UI thread.
* @param message
*/
private void showToast(final String message){
runOnUiThread(new Runnable() {
@Override
public void run() {
Toast.makeText(MainActivity.this, message, Toast.LENGTH_SHORT).show();
}
});
}
/**
* Log out, remove username from SharedPreferences, unsubscribe from PubNub, and send user back
* to the LoginActivity
*/
public void signOut(){
this.mPubNub.unsubscribeAll();
SharedPreferences.Editor edit = this.mSharedPreferences.edit();
edit.remove(Constants.USER_NAME);
edit.apply();
Intent intent = new Intent(this, LoginActivity.class);
intent.putExtra("oldUsername", this.username);
startActivity(intent);
}
}
| |
/*
* Copyright 2002-2020 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.security.test.web.servlet.request;
import javax.servlet.http.HttpServletRequest;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.ExtendWith;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.annotation.Bean;
import org.springframework.mock.web.MockHttpServletRequest;
import org.springframework.security.config.annotation.web.builders.HttpSecurity;
import org.springframework.security.config.annotation.web.configuration.EnableWebSecurity;
import org.springframework.security.config.annotation.web.configuration.WebSecurityConfigurerAdapter;
import org.springframework.security.core.Authentication;
import org.springframework.security.oauth2.client.OAuth2AuthorizedClient;
import org.springframework.security.oauth2.client.annotation.RegisteredOAuth2AuthorizedClient;
import org.springframework.security.oauth2.client.registration.ClientRegistration;
import org.springframework.security.oauth2.client.registration.ClientRegistrationRepository;
import org.springframework.security.oauth2.client.registration.TestClientRegistrations;
import org.springframework.security.oauth2.client.web.OAuth2AuthorizedClientRepository;
import org.springframework.security.oauth2.core.OAuth2AccessToken;
import org.springframework.security.oauth2.core.TestOAuth2AccessTokens;
import org.springframework.security.test.context.TestSecurityContextHolder;
import org.springframework.test.context.ContextConfiguration;
import org.springframework.test.context.junit.jupiter.SpringExtension;
import org.springframework.test.context.web.WebAppConfiguration;
import org.springframework.test.web.servlet.MockMvc;
import org.springframework.test.web.servlet.setup.MockMvcBuilders;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.RestController;
import org.springframework.web.context.WebApplicationContext;
import org.springframework.web.servlet.config.annotation.EnableWebMvc;
import static org.assertj.core.api.Assertions.assertThatIllegalArgumentException;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.ArgumentMatchers.eq;
import static org.mockito.BDDMockito.given;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.verify;
import static org.springframework.security.test.web.servlet.request.SecurityMockMvcRequestPostProcessors.oauth2Client;
import static org.springframework.security.test.web.servlet.setup.SecurityMockMvcConfigurers.springSecurity;
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.content;
/**
* Tests for {@link SecurityMockMvcRequestPostProcessors#oidcLogin()}
*
* @author Josh Cummings
* @since 5.3
*/
@ExtendWith(SpringExtension.class)
@ContextConfiguration
@WebAppConfiguration
public class SecurityMockMvcRequestPostProcessorsOAuth2ClientTests {
@Autowired
WebApplicationContext context;
MockMvc mvc;
@BeforeEach
public void setup() {
// @formatter:off
this.mvc = MockMvcBuilders
.webAppContextSetup(this.context)
.apply(springSecurity())
.build();
// @formatter:on
}
@AfterEach
public void cleanup() {
TestSecurityContextHolder.clearContext();
}
@Test
public void oauth2ClientWhenUsingDefaultsThenException() throws Exception {
assertThatIllegalArgumentException()
.isThrownBy(() -> oauth2Client().postProcessRequest(new MockHttpServletRequest()))
.withMessageContaining("ClientRegistration");
}
@Test
public void oauth2ClientWhenUsingDefaultsThenProducesDefaultAuthorizedClient() throws Exception {
this.mvc.perform(get("/access-token").with(oauth2Client("registration-id")))
.andExpect(content().string("access-token"));
this.mvc.perform(get("/client-id").with(oauth2Client("registration-id")))
.andExpect(content().string("test-client"));
}
@Test
public void oauth2ClientWhenClientRegistrationThenUses() throws Exception {
ClientRegistration clientRegistration = TestClientRegistrations.clientRegistration()
.registrationId("registration-id").clientId("client-id").build();
this.mvc.perform(get("/client-id").with(oauth2Client().clientRegistration(clientRegistration)))
.andExpect(content().string("client-id"));
}
@Test
public void oauth2ClientWhenClientRegistrationConsumerThenUses() throws Exception {
this.mvc.perform(get("/client-id")
.with(oauth2Client("registration-id").clientRegistration((c) -> c.clientId("client-id"))))
.andExpect(content().string("client-id"));
}
@Test
public void oauth2ClientWhenPrincipalNameThenUses() throws Exception {
this.mvc.perform(get("/principal-name").with(oauth2Client("registration-id").principalName("test-subject")))
.andExpect(content().string("test-subject"));
}
@Test
public void oauth2ClientWhenAccessTokenThenUses() throws Exception {
OAuth2AccessToken accessToken = TestOAuth2AccessTokens.noScopes();
this.mvc.perform(get("/access-token").with(oauth2Client("registration-id").accessToken(accessToken)))
.andExpect(content().string("no-scopes"));
}
@Test
public void oauth2ClientWhenUsedOnceThenDoesNotAffectRemainingTests() throws Exception {
this.mvc.perform(get("/client-id").with(oauth2Client("registration-id")))
.andExpect(content().string("test-client"));
OAuth2AuthorizedClient client = new OAuth2AuthorizedClient(TestClientRegistrations.clientRegistration().build(),
"sub", TestOAuth2AccessTokens.noScopes());
OAuth2AuthorizedClientRepository repository = this.context.getBean(OAuth2AuthorizedClientRepository.class);
given(repository.loadAuthorizedClient(eq("registration-id"), any(Authentication.class),
any(HttpServletRequest.class))).willReturn(client);
this.mvc.perform(get("/client-id")).andExpect(content().string("client-id"));
verify(repository).loadAuthorizedClient(eq("registration-id"), any(Authentication.class),
any(HttpServletRequest.class));
}
@EnableWebSecurity
@EnableWebMvc
static class OAuth2ClientConfig extends WebSecurityConfigurerAdapter {
@Override
protected void configure(HttpSecurity http) throws Exception {
// @formatter:off
http
.authorizeRequests((authz) -> authz
.anyRequest().permitAll()
)
.oauth2Client();
// @formatter:on
}
@Bean
ClientRegistrationRepository clientRegistrationRepository() {
return mock(ClientRegistrationRepository.class);
}
@Bean
OAuth2AuthorizedClientRepository authorizedClientRepository() {
return mock(OAuth2AuthorizedClientRepository.class);
}
@RestController
static class PrincipalController {
@GetMapping("/access-token")
String accessToken(
@RegisteredOAuth2AuthorizedClient("registration-id") OAuth2AuthorizedClient authorizedClient) {
return authorizedClient.getAccessToken().getTokenValue();
}
@GetMapping("/principal-name")
String principalName(
@RegisteredOAuth2AuthorizedClient("registration-id") OAuth2AuthorizedClient authorizedClient) {
return authorizedClient.getPrincipalName();
}
@GetMapping("/client-id")
String clientId(
@RegisteredOAuth2AuthorizedClient("registration-id") OAuth2AuthorizedClient authorizedClient) {
return authorizedClient.getClientRegistration().getClientId();
}
}
}
}
| |
package org.codehaus.mojo.sonar.bootstrap;
import org.apache.maven.artifact.factory.ArtifactFactory;
import org.apache.maven.artifact.metadata.ArtifactMetadataSource;
import org.apache.maven.artifact.repository.ArtifactRepository;
import org.apache.maven.artifact.resolver.ArtifactCollector;
import org.apache.maven.project.MavenProject;
import org.apache.maven.shared.dependency.tree.DependencyNode;
import org.apache.maven.shared.dependency.tree.DependencyTreeBuilder;
import org.apache.maven.shared.dependency.tree.DependencyTreeBuilderException;
import org.apache.maven.shared.dependency.tree.filter.AncestorOrSelfDependencyNodeFilter;
import org.apache.maven.shared.dependency.tree.filter.DependencyNodeFilter;
import org.apache.maven.shared.dependency.tree.filter.StateDependencyNodeFilter;
import org.apache.maven.shared.dependency.tree.traversal.BuildingDependencyNodeVisitor;
import org.apache.maven.shared.dependency.tree.traversal.CollectingDependencyNodeVisitor;
import org.apache.maven.shared.dependency.tree.traversal.DependencyNodeVisitor;
import org.apache.maven.shared.dependency.tree.traversal.FilteringDependencyNodeVisitor;
import java.util.ArrayDeque;
import java.util.ArrayList;
import java.util.Deque;
import java.util.Iterator;
import java.util.List;
public class DependencyCollector
{
private final DependencyTreeBuilder dependencyTreeBuilder;
private final ArtifactFactory artifactFactory;
private final ArtifactRepository localRepository;
private final ArtifactMetadataSource artifactMetadataSource;
private final ArtifactCollector artifactCollector;
public DependencyCollector( DependencyTreeBuilder dependencyTreeBuilder,
ArtifactFactory artifactFactory, ArtifactRepository localRepository,
ArtifactMetadataSource artifactMetadataSource, ArtifactCollector artifactCollector )
{
this.dependencyTreeBuilder = dependencyTreeBuilder;
this.artifactFactory = artifactFactory;
this.localRepository = localRepository;
this.artifactMetadataSource = artifactMetadataSource;
this.artifactCollector = artifactCollector;
}
private static class Dependency
{
private final String key;
private final String version;
private String scope;
List<Dependency> dependencies = new ArrayList<Dependency>();
public Dependency( String key, String version )
{
this.key = key;
this.version = version;
}
public String key()
{
return key;
}
public String version()
{
return version;
}
public String scope()
{
return scope;
}
public Dependency setScope( String scope )
{
this.scope = scope;
return this;
}
public List<Dependency> dependencies()
{
return dependencies;
}
}
private List<Dependency> collectProjectDependencies( MavenProject project )
{
final List<Dependency> result = new ArrayList<Dependency>();
try
{
DependencyNode root =
dependencyTreeBuilder.buildDependencyTree( project, localRepository, artifactFactory,
artifactMetadataSource, null, artifactCollector );
DependencyNodeVisitor visitor = new BuildingDependencyNodeVisitor( new DependencyNodeVisitor()
{
private Deque<Dependency> stack = new ArrayDeque<Dependency>();
public boolean visit( DependencyNode node )
{
if ( node.getParent() != null && node.getParent() != node )
{
Dependency dependency = toDependency( node );
if ( stack.isEmpty() )
{
result.add( dependency );
}
else
{
stack.peek().dependencies().add( dependency );
}
stack.push( dependency );
}
return true;
}
public boolean endVisit( DependencyNode node )
{
if ( !stack.isEmpty() )
{
stack.pop();
}
return true;
}
} );
// mode verbose OFF : do not show the same lib many times
DependencyNodeFilter filter = StateDependencyNodeFilter.INCLUDED;
CollectingDependencyNodeVisitor collectingVisitor = new CollectingDependencyNodeVisitor();
DependencyNodeVisitor firstPassVisitor = new FilteringDependencyNodeVisitor( collectingVisitor, filter );
root.accept( firstPassVisitor );
DependencyNodeFilter secondPassFilter =
new AncestorOrSelfDependencyNodeFilter( collectingVisitor.getNodes() );
visitor = new FilteringDependencyNodeVisitor( visitor, secondPassFilter );
root.accept( visitor );
}
catch ( DependencyTreeBuilderException e )
{
throw new IllegalStateException( "Can not load the graph of dependencies of the project "
+ project, e );
}
return result;
}
private Dependency toDependency( DependencyNode node )
{
String key = String.format( "%s:%s", node.getArtifact().getGroupId(), node.getArtifact().getArtifactId() );
String version = node.getArtifact().getBaseVersion();
return new Dependency( key, version ).setScope( node.getArtifact().getScope() );
}
public String toJson( MavenProject project )
{
return dependenciesToJson( collectProjectDependencies( project ) );
}
private String dependenciesToJson( List<Dependency> deps )
{
StringBuilder json = new StringBuilder();
json.append( '[' );
serializeDeps( json, deps );
json.append( ']' );
return json.toString();
}
private void serializeDeps( StringBuilder json, List<Dependency> deps )
{
for ( Iterator<Dependency> dependencyIt = deps.iterator(); dependencyIt.hasNext(); )
{
serializeDep( json, dependencyIt.next() );
if ( dependencyIt.hasNext() )
{
json.append( ',' );
}
}
}
private void serializeDep( StringBuilder json, Dependency dependency )
{
json.append( "{" );
json.append( "\"k\":\"" );
json.append( dependency.key() );
json.append( "\",\"v\":\"" );
json.append( dependency.version() );
json.append( "\",\"s\":\"" );
json.append( dependency.scope() );
json.append( "\",\"d\":[" );
serializeDeps( json, dependency.dependencies() );
json.append( "]" );
json.append( "}" );
}
}
| |
package org.apache.hadoop.mapred;
import java.io.IOException;
import java.io.OutputStream;
import java.util.ArrayList;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.CopyOnWriteArrayList;
import java.util.concurrent.atomic.AtomicInteger;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.mapred.CachePool.CacheUnit;
import org.apache.hadoop.util.StringUtils;
public class SpillScheduler extends Thread {
public static final int RECEIVE=0;
public static final int SEND=1;
public static final int SORT=2;
private static final Log LOG = LogFactory.getLog(SpillScheduler.class.getName());
class SpillFile{
int priority;
boolean finished;
List<CacheUnit> cus;
SpillThread spillThread;
SpillOutputStream sos;
SpillFile(SpillThread st, int prio, SpillOutputStream out) {
priority = prio;
spillThread = st;
finished = false;
cus = new LinkedList<CacheUnit>();
sos = out;
}
int size() {
return cus.size();
}
void add(CacheUnit cu){
//LOG.info(Thread.currentThread()+"add000");
synchronized(cus) {
// LOG.info(Thread.currentThread()+"add111");
while (cus.size() >= maxPerFileUpCus) {
// LOG.info(Thread.currentThread()+"add222");
try {
cus.wait(500);
// LOG.info(Thread.currentThread()+"add444");
} catch (InterruptedException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
cus.add(cu);
spillThread.toSpillSize.getAndIncrement();
}
// LOG.info(Thread.currentThread()+"add555");
}
CacheUnit getNext(){
// LOG.info(Thread.currentThread()+"get000");
synchronized(cus) {
// LOG.info(Thread.currentThread()+"get111");
if (cus.size() == 0) {
// LOG.info(Thread.currentThread()+"get333");
return null;
}
// LOG.info(Thread.currentThread()+"get222");
spillThread.toSpillSize.getAndDecrement();
CacheUnit cu = cus.remove(0);
// LOG.info(Thread.currentThread()+"get444");
if (cus.size() + 1 >= maxPerFileUpCus) {
// LOG.info(Thread.currentThread()+"get555");
cus.notify();
}
// LOG.info(Thread.currentThread()+"get555");
return cu;
}
}
}
class SpillThread extends Thread {
Map<OutputStream, SpillFile> files = new ConcurrentHashMap<OutputStream, SpillFile>();
boolean stop = false;
int maxPriority = -1;
boolean spilled = false;
int round = 0;
AtomicInteger toSpillSize = new AtomicInteger(0);
OutputStream currentOs = null;
private RoundQueue<OutputStream> spillIndeces = //for schedule task to spill
new RoundQueue<OutputStream>();
//private AtomicInteger index = new AtomicInteger(0);
public boolean containFile(OutputStream out) {
return files.containsKey(out);
}
public void addSpill(OutputStream out, CacheUnit cu){
if (files.get(out) == null) {
LOG.info(" SpillScheduler don't contain this file! Please register first!");
return;
}
int old = this.toSpillSize.get();
files.get(out).add(cu);
if(old == 0) {
synchronized (files) {
files.notify();
}
}
}
private boolean hasSpillFinished(OutputStream out) {
if (out == null || !files.containsKey(out) || files.get(out) == null) {
LOG.info("error hasSpillFinished");
return true;
} else {
return files.get(out).finished;
}
}
public void registerSpill(OutputStream out, int priority, SpillOutputStream sos) {
if (files.containsKey(out)) {
LOG.info(" out contained already");
return;
} else {
files.put(out, new SpillFile(this, priority, sos));
spillIndeces.insert(out);
}
}
public void unRegisterSpill(OutputStream out) {
// LOG.info(Thread.currentThread() + "urs111");
if (!files.containsKey(out)) {
return;
}
// LOG.info(Thread.currentThread() + "urs222");
// toSpillTaskRemoved(spillIndeces.indexOf(out));
// LOG.info(Thread.currentThread() + "urs444");
spillIndeces.remove(out);
// LOG.info(Thread.currentThread() + "urs555");
files.remove(out);
// LOG.info(Thread.currentThread() + "urs333");
}
private int getNumCus() {
int n = 0;
for(SpillFile sf : files.values()) {
n += sf.size();
}
return n;
}
private OutputStream getNextOs() {
while (toSpillSize.get() > 0) {
OutputStream out = spillIndeces.getNext();
round++;
if (round >= spillIndeces.size()) {
if (!spilled) {
maxPriority = -1;
}
spilled = false;
round = 0;
}
if (out == null) {
return null;
}
/*SpillFile sf = files.get(out);
if (sf == null) {
return null;
}*/
if (files.get(out).size() > 0) {
return out;
}
}
return null;
}
public void run() {
while(!stop) {
// LOG.info(Thread.currentThread()+"run000");
try {
synchronized (files) {
// LOG.info(Thread.currentThread()+"run111");
while (toSpillSize.intValue() == 0) {
// LOG.info(Thread.currentThread()+"run222");
files.wait(500);
}
}
// LOG.info(Thread.currentThread()+"run333");
currentOs = getNextOs();
// LOG.info(Thread.currentThread()+"run444");
if (currentOs == null) {
String t = "";
int i = 0;
for (SpillFile sf : files.values()) {
t += (", " + sf.size());
i += sf.size();
}
t += ("toSpillSize: " + toSpillSize.intValue());
toSpillSize.getAndSet(i);
LOG.info(t);
continue;
}
// LOG.info(Thread.currentThread()+"run555");
SpillFile sf = files.get(currentOs);
if (sf.priority < maxPriority) {
continue;
} else {
maxPriority =sf.priority;
spilled = true;
}
CacheUnit cu = sf.getNext();
// LOG.info(Thread.currentThread()+"runccc");
// if (cu != null) {
// toSpillSize.decrementAndGet();
// }
// LOG.info(Thread.currentThread()+"run666");
cu.writeFile(currentOs);
// LOG.info(Thread.currentThread()+"run777");
if (cu.isLast()) {
// LOG.info(Thread.currentThread()+"run888");
synchronized(currentOs) {
// LOG.info(Thread.currentThread()+"run999");
LOG.info(" file written finished");
sf.finished = true;
currentOs.notify();
}
}
// LOG.info(Thread.currentThread()+"runaaa");
sf.sos.returnUnit(cu);
// LOG.info(Thread.currentThread()+"runbbb");
currentOs = null;
} catch (InterruptedException e) {
// TODO Auto-generated catch block
LOG.warn(" interupted " + e);
e.printStackTrace();
} catch (IOException e) {
LOG.error(" write error! " + e);
e.printStackTrace();
// to be improved
} catch (Throwable throwable) {
String t = Thread.currentThread() + " files: " ;
for (OutputStream out : files.keySet()) {
t += (out + ", ");
}
LOG.info(t);
t = Thread.currentThread() + " spillIndeces: " ;
spillIndeces.list(t);
LOG.info(t);
LOG.fatal("Error running spill thread " + Thread.currentThread() +
": " + StringUtils.stringifyException(throwable));
}
}
}
/* public void toSpillTaskRemoved(int i) {
LOG.info(Thread.currentThread() + "tstr111");
if (i == -1) {
return;
}
LOG.info(Thread.currentThread() + "tstr222");
int tNum = spillIndeces.size();
LOG.info(Thread.currentThread() + "tstr333");
// synchronized (index) {
LOG.info(Thread.currentThread() + "tstr444");
if (tNum == 0) {
LOG.info(Thread.currentThread() + "tstr666");
index.set(0);
} else if (i < index.get()) {
LOG.info(Thread.currentThread() + "tstr777");
index.set((index.get() - 1 + tNum) % tNum);
} else if (i == index.get()) {
LOG.info(Thread.currentThread() + "tstr888");
index.set(index.get()%tNum);
}
LOG.info(Thread.currentThread() + "tstr555");
//}
} */
}
private static SpillScheduler ss = null; //singleton pattern
private final int numMaxThreads;
private final int maxPerFileUpCus;
private boolean stop = false;
Map<OutputStream, Integer> file2Threads = new ConcurrentHashMap<OutputStream, Integer>();
private SpillThread[] spillThreads;
private ThreadGroup threadGroup = new ThreadGroup("spillThreads");
SpillScheduler() {
this(new JobConf());
}
SpillScheduler(JobConf conf) {
super("spillSchedulerThread");
numMaxThreads = conf.getInt("child.io.thread.num", 2);
maxPerFileUpCus = conf.getInt("perfile.spillscheduler.cacheunit.max.num", 10);
initialize();
}
public int getMaxPerFileUpCus() {
return this.maxPerFileUpCus;
}
public static SpillScheduler get() {
if (ss == null) {
ss = new SpillScheduler();
}
return ss;
}
private void initialize() {
setDaemon(true);
threadGroup.setDaemon(true);
LOG.info(" numMaxThreads: " + numMaxThreads);
spillThreads = new SpillThread[numMaxThreads];
for (int i = 0; i < numMaxThreads; i++) {
spillThreads[i] = new SpillThread();
spillThreads[i].setDaemon(true);
spillThreads[i].setName("spillThread" + i);
}
}
/*
public void start() {
LOG.info("start.");
for(SpillThread st : spillThreads) {
st.start();
}
super.start();
}*/
public void run() {
while (!stop) {
for(SpillThread st : spillThreads) {
if (Thread.State.TERMINATED.equals(st.getState()) || !st.isAlive()) {
LOG.info(st + " down! ");
st.start();
}
}
try {
Thread.sleep(1000);
} catch (InterruptedException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
}
void test() {
String t = "";
for(int i = 0; i < spillThreads.length; i++) {
t += "spill thread " + spillThreads[i].getName() + " files: " + spillThreads[i].files.size() + " cus: " +
spillThreads[i].getNumCus() + ";\n";
}
LOG.info(t);
}
public synchronized void registerSpill(OutputStream out, int priority, SpillOutputStream sos) {
int min = spillThreads[0].files.size();
int ind = 0;
// test();
for (int i = 1; i < spillThreads.length; i++) {
int n = spillThreads[i].files.size();
if (min > n) {
min = n;
ind =i;
}
}
spillThreads[ind].registerSpill(out, priority, sos);
this.file2Threads.put(out, new Integer(ind));
}
public synchronized void unRegisterSpill(OutputStream out) {
// LOG.info(Thread.currentThread() + "aurs111");
int ind = this.file2Threads.get(out);
// LOG.info(Thread.currentThread() + "aurs222");
if (ind < 0 || ind > spillThreads.length) {
LOG.info(" SpillScheduler don't contain this file! Please register first!");
return;
}
// test();
spillThreads[ind].unRegisterSpill(out);
// test();
this.file2Threads.remove(out);
}
public void addSpill(OutputStream out, CacheUnit cu) {
int ind = this.file2Threads.get(out);
if (ind < 0 || ind > spillThreads.length) {
LOG.info(" SpillScheduler don't contain this file! Please register first!");
return;
}
spillThreads[ind].addSpill(out, cu);
// test();
}
private boolean hasSpillFinished(OutputStream out) {
int ind = this.file2Threads.get(out);
if (ind < 0 || ind > spillThreads.length) {
LOG.info(" SpillScheduler don't contain this file! Please register first!");
return true;
}
// test();
return spillThreads[ind].hasSpillFinished(out);
}
public void waitSpillFinished(OutputStream out) throws InterruptedException {
// test();
if (out == null) {
return;
}
synchronized (out) {
while (!hasSpillFinished(out)) {
out.wait(500);
}
}
}
}
| |
/*
* The MIT License
*
* Copyright 2015 Michael Hrcek <hrcekmj@clarkson.edu>.
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package hrcek.core;
import java.io.BufferedInputStream;
import java.io.BufferedOutputStream;
import java.io.BufferedReader;
import java.io.BufferedWriter;
import java.io.EOFException;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.FileWriter;
import java.io.IOException;
import java.io.InputStreamReader;
import java.io.ObjectInputStream;
import java.io.ObjectOutputStream;
import java.util.ArrayList;
import java.util.List;
import java.util.logging.Level;
import java.util.logging.Logger;
import javax.swing.UIManager;
/**
*
* @author Michael Hrcek <hrcekmj@clarkson.edu>
* @author Xperia64
*/
public class Boot {
public static void showUsage() {
System.out.println(showUsageAsString());
}
public static String showUsageAsString() {
String msg = "";
msg += "Usage:";
msg += "java -jar DerpyWriter.jar <arguments>\n\n";
msg += "\tArguments:\n";
msg += "\t<source files> plaintext files used for source\n";
msg += "\t-a [#] accuracy (default 1)\n";
msg += "\t-c [#] output count (default 100)\n";
msg += "\t-h --help display this text\n";
msg += "\t-o [FILE] output file (default stdout, hyphen for stdout)\n";
msg += "\t-t [#] thread count (default 1)\n";
msg += "\t-i ignore logical punctuation checking.\n";
msg += "\t-l [FILE] load dictionary file.\n";
msg += "\t-s [FILE] save dictionary file.\n";
msg += "\t-r only read files.\n";
msg += "\t-v verbose mode\n";
msg += "\t-w [#] [FILE] weight a file relative to the other files\n";
msg += "\t-nf do not format text\n";
msg += "\t-fo <txt,html> Output text as a format (Default plaintext)\n";
msg += "\t-fi <txt,normal,html> Input text as a format (Default normal)\n";
msg += "\t-d Enable debug mode\n";
msg += "\t-st Enable strict mode\n";
return msg;
}
/**
*
* @param file the filename.
* @return true if the file can be opened.
*/
public static boolean isFilenameValid(String file) {
final char[] ILLEGAL_CHARACTERS = {'\n', '\r', '\t', '\0', '\f', '`', '?', '*', '<', '>', '|', '\"', ':'};
for (int i = 0; i < ILLEGAL_CHARACTERS.length; i++) {
if (file.contains(Character.toString(ILLEGAL_CHARACTERS[i]))) {
return false;
}
}
File f = new File(file);
try {
f.getCanonicalPath();
return true;
} catch (IOException e) {
return false;
}
}
public static void main(String[] args) throws InterruptedException {
if (args.length == 0) {
DerpyGUI gui = new DerpyGUI();
} else {
checkFlags(args);
if (!DerpyManager.checkIfHasWritingSource()) {
System.out.println("This requires at least one source file");
showUsage();
System.exit(1);
}
DerpyManager.setDictionary(new Dictionary());
if (DerpyManager.getInputDictionary() != null) {
DerpyManager.loadDictionary();
}
DerpyManager.setWordAccuracy();
DerpyManager.readSources();
DerpyManager.checkIfRequestedAccuracyIsWithinAcceptableBounds();
if (DerpyManager.shouldWrite()) {
System.out.println(DerpyManager.write());
} else {
printIfVerbose("Write skipped...");
}
if (DerpyManager.getOutputDictionary() != null) {
DerpyManager.saveDictionary();
}
}
}
public static void printIfVerbose(String msg) {
if (DerpyManager.isVERBOSE()) {
System.out.println(msg);
}
}
public static void printIfNotVerbose(String msg) {
if (!DerpyManager.isVERBOSE()) {
System.out.println(msg);
}
}
/**
* Method for checking command line flags
*
* @param args The arguments
*/
public static void checkFlags(String[] args) {
for (int i = 0; i < args.length; i++) {
if (args[i].equals("-a")) {
try {
DerpyManager.setAccuracy(Integer.parseInt(args[++i]));
DerpyManager.setAccuracy_write(DerpyManager.getAccuracy());
if (DerpyManager.getAccuracy() < 0) {
DerpyLogger.error("Argument must be a positive integer! (i.e. \"-a 3\")");
}
} catch (Exception e) {
DerpyLogger.error("Argument must be a positive integer! (i.e. \"-a 3\")");
}
} else if (args[i].equals("-c")) {
try {
DerpyManager.setOutput(Integer.parseInt(args[++i]));
if (DerpyManager.getOutput() < 0) {
DerpyLogger.error("Argument must be a positive integer! (i.e. \"-c 1000\")");
}
} catch (Exception e) {
DerpyLogger.error("Argument must be a positive integer! (i.e. \"-c 1000\")");
}
} else if (args[i].equals("-h") || args[i].equals("--help")) {
showUsage();
System.exit(0);
} else if (args[i].equals("-o")) {
if (!args[++i].equals("-")) {
DerpyManager.setOutputFile(args[i]);
}
} else if (args[i].equals("-l")) {
if (!args[++i].equals("-")) {
DerpyManager.setInputDictionary(args[i]);
}
} else if (args[i].equals("-v")) {
DerpyManager.setVERBOSE(true);
} else if (args[i].equals("-nf")) {
DerpyManager.setFormatText(false);
} else if (args[i].equals("-s")) {
if (!args[++i].equals("-")) {
DerpyManager.setOutputDictionary(args[i]);
}
} else if (args[i].equals("-t")) {
try {
DerpyManager.setThreads(Integer.parseInt(args[++i]));
if (DerpyManager.getThreads() < 1) {
DerpyLogger.error("Argument must be a positive integer greater than 1! (i.e. \"-t 2\")");
}
} catch (Exception e) {
DerpyLogger.error("Argument must be a positive integer greater than 1! (i.e. \"-t 2\")");
}
} else if (args[i].equals("-i")) {
DerpyManager.setIgnorePunctuation(true);
} else if (args[i].equals("-r")) {
DerpyManager.setWrite(false);
} else if (args[i].equals("-w")) {
try {
int weight = Integer.parseInt(args[++i]);
if (weight <= 0) {
DerpyLogger.error("Argument must be a positive integer! (i.e. \"-w 1 [filename]\")");
} else {
++i;
DerpyManager.setThreadable(false);
if (isFilenameValid(args[i])) {
if (new File(args[i]).exists()) {
DerpyManager.getSources().add(new File(args[i]).getAbsolutePath());
DerpyManager.getWeights().add(weight);
}
} else if (args[i].toLowerCase().equals("*stdin*")) {
if (DerpyManager.stdin == null) {
BufferedReader derpReader = new BufferedReader(new InputStreamReader(System.in));
StringBuilder builder = new StringBuilder();
String aux = "";
try {
while ((aux = derpReader.readLine()) != null) {
builder.append(aux);
builder.append('\n');
}
} catch (IOException ex) {
Logger.getLogger(Boot.class.getName()).log(Level.SEVERE, null, ex);
}
DerpyManager.stdin = builder.toString();
}
DerpyManager.getSources().add("*STDIN*");
DerpyManager.getWeights().add(weight);
} else {
DerpyLogger.error("Invalid filename: " + args[i]);
}
}
} catch (Exception e) {
DerpyLogger.error("Argument must be a positive integer! (i.e. \"-w 1 [filename]\")");
}
} else if (args[i].equals("-fo")) {
i++;
if (args[i].toLowerCase().equals("plaintext") || args[i].toLowerCase().equals("text") || args[i].toLowerCase().equals("txt")) {
DerpyManager.setFileOutputFormat(DerpyFormatter.DERPY_FORMAT_PLAINTEXT);
} else if (args[i].toLowerCase().equals("html") || args[i].toLowerCase().equals("htm")) {
DerpyManager.setFileOutputFormat(DerpyFormatter.DERPY_FORMAT_HTML);
}
} else if (args[i].equals("-fi")) {
i++;
if (args[i].toLowerCase().equals("plaintext") || args[i].toLowerCase().equals("text") || args[i].toLowerCase().equals("txt")) {
DerpyManager.setFileInputFormat(DerpyFormatter.DERPY_FORMAT_PLAINTEXT);
} else if (args[i].toLowerCase().equals("html") || args[i].toLowerCase().equals("htm")) {
DerpyManager.setFileInputFormat(DerpyFormatter.DERPY_FORMAT_HTML);
} else if (args[i].toLowerCase().equals("normal") || args[i].toLowerCase().equals("norm")) {
DerpyManager.setFileInputFormat(DerpyFormatter.DERPY_FORMAT_TEXT);
}
} else if (args[i].equals("-d")) {
DerpyLogger.setDebugMode(true);
} else if (args[i].equals("-st")) {
DerpyManager.setStrictMode(true);
} else {
// Assume a relative path if not absolute
if (isFilenameValid(args[i])) {
if (new File(args[i]).exists()) {
DerpyManager.getSources().add(new File(args[i]).getAbsolutePath());
DerpyManager.getWeights().add(1);
}
} else if (args[i].toLowerCase().equals("*stdin*")) {
if (DerpyManager.stdin == null) {
BufferedReader derpReader = new BufferedReader(new InputStreamReader(System.in));
StringBuilder builder = new StringBuilder();
String aux = "";
try {
while ((aux = derpReader.readLine()) != null) {
builder.append(aux);
builder.append('\n');
}
} catch (IOException ex) {
Logger.getLogger(Boot.class.getName()).log(Level.SEVERE, null, ex);
}
DerpyManager.stdin = builder.toString();
}
DerpyManager.getSources().add("*STDIN*");
DerpyManager.getWeights().add(1);
} else {
DerpyLogger.error("Invalid filename: " + args[i]);
}
}
}
}
}
| |
package com.brightgenerous.lucene;
import java.io.Serializable;
import java.lang.ref.SoftReference;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashSet;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
import java.util.WeakHashMap;
import java.util.concurrent.ConcurrentHashMap;
import com.brightgenerous.commons.EqualsUtils;
import com.brightgenerous.commons.HashCodeUtils;
import com.brightgenerous.commons.ToStringUtils;
import com.brightgenerous.lang.Args;
import com.brightgenerous.lucene.delegate.LuceneUtility;
import com.brightgenerous.lucene.delegate.StringDistanceDelegater;
@SuppressWarnings("deprecation")
public class LuceneUtils implements Serializable {
private static final long serialVersionUID = -5768707421292489384L;
public static boolean resolved() {
return LuceneUtility.RESOLVED;
}
static class InstanceKey implements Serializable {
private static final long serialVersionUID = -5571606798438371038L;
private final boolean levenstein;
private final boolean jaroWinkler;
public InstanceKey(boolean levenstein, boolean jaroWinkler) {
this.levenstein = levenstein;
this.jaroWinkler = jaroWinkler;
}
@Override
public int hashCode() {
final int multiplier = 37;
int result = 17;
result = (multiplier * result) + (levenstein ? 1 : 0);
result = (multiplier * result) + (jaroWinkler ? 1 : 0);
return result;
}
@Override
public boolean equals(Object obj) {
if (obj == null) {
return false;
}
if (!(obj instanceof InstanceKey)) {
return false;
}
InstanceKey other = (InstanceKey) obj;
if (levenstein != other.levenstein) {
return false;
}
if (jaroWinkler != other.jaroWinkler) {
return false;
}
return true;
}
}
private final boolean levenstein;
private final boolean jaroWinkler;
private LuceneUtils(boolean levenstein, boolean jaroWinkler) {
Args.isTrue(levenstein || jaroWinkler, "(levenstein || jaroWinkler) must be true.");
this.levenstein = levenstein;
this.jaroWinkler = jaroWinkler;
}
public static LuceneUtils get() {
return getInstance(true, true);
}
public static LuceneUtils getLevenstein() {
return getInstance(true, false);
}
public static LuceneUtils getJaroWinkler() {
return getInstance(false, true);
}
private static volatile Map<InstanceKey, SoftReference<LuceneUtils>> cache;
protected static LuceneUtils getInstance(boolean levenstein, boolean jaroWinkler) {
if (cache == null) {
synchronized (LuceneUtils.class) {
if (cache == null) {
cache = new ConcurrentHashMap<>();
}
}
}
InstanceKey ik = new InstanceKey(levenstein, jaroWinkler);
SoftReference<LuceneUtils> sr = cache.get(ik);
LuceneUtils ret;
if (sr != null) {
ret = sr.get();
if (ret != null) {
return ret;
}
Set<InstanceKey> dels = new HashSet<>();
for (Entry<InstanceKey, SoftReference<LuceneUtils>> entry : cache.entrySet()) {
if (entry.getValue().get() == null) {
dels.add(entry.getKey());
}
}
for (InstanceKey del : dels) {
cache.remove(del);
}
}
ret = new LuceneUtils(levenstein, jaroWinkler);
cache.put(ik, new SoftReference<>(ret));
return ret;
}
public String near(String value, Collection<String> objs) {
return near(value, objs, levenstein, jaroWinkler);
}
public String near(String value, String[] objs) {
if (value == null) {
return null;
}
if ((objs == null) || (objs.length < 1)) {
return null;
}
return near(value, Arrays.asList(objs), levenstein, jaroWinkler);
}
public <T> T near(String value, Extracter<T> extracter, Collection<T> objs) {
return near(value, extracter, objs, levenstein, jaroWinkler);
}
public <T> T near(String value, Extracter<T> extracter, T[] objs) {
if (value == null) {
return null;
}
if ((objs == null) || (objs.length < 1)) {
return null;
}
return near(value, extracter, Arrays.asList(objs), levenstein, jaroWinkler);
}
public String far(String value, Collection<String> objs) {
return far(value, objs, levenstein, jaroWinkler);
}
public String far(String value, String[] objs) {
if (value == null) {
return null;
}
if ((objs == null) || (objs.length < 1)) {
return null;
}
return far(value, Arrays.asList(objs), levenstein, jaroWinkler);
}
public <T> T far(String value, Extracter<T> extracter, Collection<T> objs) {
return far(value, extracter, objs, levenstein, jaroWinkler);
}
public <T> T far(String value, Extracter<T> extracter, T[] objs) {
if (value == null) {
return null;
}
if ((objs == null) || (objs.length < 1)) {
return null;
}
return far(value, extracter, Arrays.asList(objs), levenstein, jaroWinkler);
}
public static String nearLevenstein(String value, Collection<String> objs) {
return near(value, objs, true, false);
}
public static String nearLevenstein(String value, String[] objs) {
if (value == null) {
return null;
}
if ((objs == null) || (objs.length < 1)) {
return null;
}
return nearLevenstein(value, Arrays.asList(objs));
}
public static String nearJaroWinkler(String value, Collection<String> objs) {
return near(value, objs, false, true);
}
public static String nearJaroWinkler(String value, String[] objs) {
if (value == null) {
return null;
}
if ((objs == null) || (objs.length < 1)) {
return null;
}
return nearJaroWinkler(value, Arrays.asList(objs));
}
public static String nearBoth(String value, Collection<String> objs) {
return near(value, objs, true, true);
}
public static String nearBoth(String value, String[] objs) {
if (value == null) {
return null;
}
if ((objs == null) || (objs.length < 1)) {
return null;
}
return nearBoth(value, Arrays.asList(objs));
}
private static String near(String value, Collection<String> objs, boolean levenstein,
boolean jaroWinkler) {
if (value == null) {
return null;
}
if ((objs == null) || objs.isEmpty()) {
return null;
}
double dist = Double.MIN_VALUE;
String ret = null;
for (String obj : objs) {
double d = getDistance(obj, value, levenstein, jaroWinkler);
if (1.0d <= d) {
return obj;
}
if (dist < d) {
dist = d;
ret = obj;
}
}
return ret;
}
public static <T> T nearLevenstein(String value, Extracter<T> extracter, Collection<T> objs) {
return near(value, extracter, objs, true, false);
}
public static <T> T nearLevenstein(String value, Extracter<T> extracter, T[] objs) {
if (value == null) {
return null;
}
if ((objs == null) || (objs.length < 1)) {
return null;
}
return nearLevenstein(value, extracter, Arrays.asList(objs));
}
public static <T> T nearJaroWinkler(String value, Extracter<T> extracter, Collection<T> objs) {
return near(value, extracter, objs, false, true);
}
public static <T> T nearJaroWinkler(String value, Extracter<T> extracter, T[] objs) {
if (value == null) {
return null;
}
if ((objs == null) || (objs.length < 1)) {
return null;
}
return nearJaroWinkler(value, extracter, Arrays.asList(objs));
}
public static <T> T nearBoth(String value, Extracter<T> extracter, Collection<T> objs) {
return near(value, extracter, objs, true, true);
}
public static <T> T nearBoth(String value, Extracter<T> extracter, T[] objs) {
if (value == null) {
return null;
}
if ((objs == null) || (objs.length < 1)) {
return null;
}
return nearBoth(value, extracter, Arrays.asList(objs));
}
private static <T> T near(String value, Extracter<T> extracter, Collection<T> objs,
boolean levenstein, boolean jaroWinkler) {
if (value == null) {
return null;
}
if ((objs == null) || objs.isEmpty()) {
return null;
}
if (extracter == null) {
extracter = getDefaultExtracter();
}
double dist = Double.MIN_VALUE;
T ret = null;
for (T obj : objs) {
double d = getDistance(extracter.extract(obj), value, levenstein, jaroWinkler);
if (1.0d <= d) {
return obj;
}
if (dist < d) {
dist = d;
ret = obj;
}
}
return ret;
}
public static String farLevenstein(String value, Collection<String> objs) {
return far(value, objs, true, false);
}
public static String farLevenstein(String value, String[] objs) {
if (value == null) {
return null;
}
if ((objs == null) || (objs.length < 1)) {
return null;
}
return farLevenstein(value, Arrays.asList(objs));
}
public static String farJaroWinkler(String value, Collection<String> objs) {
return far(value, objs, false, true);
}
public static String farJaroWinkler(String value, String[] objs) {
if (value == null) {
return null;
}
if ((objs == null) || (objs.length < 1)) {
return null;
}
return farJaroWinkler(value, Arrays.asList(objs));
}
public static String farBoth(String value, Collection<String> objs) {
return far(value, objs, true, true);
}
public static String farBoth(String value, String[] objs) {
if (value == null) {
return null;
}
if ((objs == null) || (objs.length < 1)) {
return null;
}
return farBoth(value, Arrays.asList(objs));
}
private static String far(String value, Collection<String> objs, boolean levenstein,
boolean jaroWinkler) {
if (value == null) {
return null;
}
if ((objs == null) || objs.isEmpty()) {
return null;
}
double dist = Double.MAX_VALUE;
String ret = null;
for (String obj : objs) {
double d = getDistance(obj, value, levenstein, jaroWinkler);
if (d <= 0.0d) {
return obj;
}
if (d < dist) {
dist = d;
ret = obj;
}
}
return ret;
}
public static <T> T farLevenstein(String value, Extracter<T> extracter, Collection<T> objs) {
return far(value, extracter, objs, true, false);
}
public static <T> T farLevenstein(String value, Extracter<T> extracter, T[] objs) {
if (value == null) {
return null;
}
if ((objs == null) || (objs.length < 1)) {
return null;
}
return farLevenstein(value, extracter, Arrays.asList(objs));
}
public static <T> T farJaroWinkler(String value, Extracter<T> extracter, Collection<T> objs) {
return far(value, extracter, objs, false, true);
}
public static <T> T farJaroWinkler(String value, Extracter<T> extracter, T[] objs) {
if (value == null) {
return null;
}
if ((objs == null) || (objs.length < 1)) {
return null;
}
return farJaroWinkler(value, extracter, Arrays.asList(objs));
}
public static <T> T farBoth(String value, Extracter<T> extracter, Collection<T> objs) {
return far(value, extracter, objs, true, true);
}
public static <T> T farBoth(String value, Extracter<T> extracter, T[] objs) {
if (value == null) {
return null;
}
if ((objs == null) || (objs.length < 1)) {
return null;
}
return farBoth(value, extracter, Arrays.asList(objs));
}
private static <T> T far(String value, Extracter<T> extracter, Collection<T> objs,
boolean levenstein, boolean jaroWinkler) {
if (value == null) {
return null;
}
if ((objs == null) || objs.isEmpty()) {
return null;
}
if (extracter == null) {
extracter = getDefaultExtracter();
}
double dist = Double.MAX_VALUE;
T ret = null;
for (T obj : objs) {
double d = getDistance(extracter.extract(obj), value, levenstein, jaroWinkler);
if (d <= 0.0d) {
return obj;
}
if (d < dist) {
dist = d;
ret = obj;
}
}
return ret;
}
public Comparator<String> comparator(String value) {
return comparator(value, levenstein, jaroWinkler);
}
public static Comparator<String> comparatorLevenstein(String value) {
return comparator(value, true, false);
}
public static Comparator<String> comparatorJaroWinkler(String value) {
return comparator(value, false, true);
}
public static Comparator<String> comparatorBoth(String value) {
return comparator(value, true, true);
}
private static Comparator<String> comparator(String value, boolean levenstein,
boolean jaroWinkler) {
Args.notNull(value, "value");
Args.isTrue(levenstein || jaroWinkler, "(levenstein || jaroWinkler) must be true.");
return new StringComparator(value, levenstein, jaroWinkler);
}
public <T> Comparator<T> comparator(String value, Extracter<T> extracter) {
return comparator(value, extracter, levenstein, jaroWinkler);
}
public static <T> Comparator<T> comparatorLevenstein(String value, Extracter<T> extracter) {
return comparator(value, extracter, true, false);
}
public static <T> Comparator<T> comparatorJaroWinkler(String value, Extracter<T> extracter) {
return comparator(value, extracter, false, true);
}
public static <T> Comparator<T> comparatorBoth(String value, Extracter<T> extracter) {
return comparator(value, extracter, true, true);
}
private static <T> Comparator<T> comparator(String value, Extracter<T> extracter,
boolean levenstein, boolean jaroWinkler) {
Args.notNull(value, "value");
Args.isTrue(levenstein || jaroWinkler, "(levenstein || jaroWinkler) must be true.");
if (extracter == null) {
extracter = getDefaultExtracter();
}
return new ExtractComparator<>(value, extracter, levenstein, jaroWinkler);
}
private static <T> Extracter<T> getDefaultExtracter() {
return new Extracter<T>() {
@Override
public String extract(T obj) {
if (obj == null) {
return null;
}
if (obj instanceof String) {
return (String) obj;
}
return String.valueOf(obj);
}
};
}
private static volatile StringDistanceDelegater ld;
private static volatile StringDistanceDelegater jd;
private static double getDistance(String obj, String value, boolean levenstein,
boolean jaroWinkler) {
if ((levenstein && (ld == null)) || (jaroWinkler && (jd == null))) {
synchronized (LuceneUtils.class) {
if (levenstein && (ld == null)) {
ld = LuceneUtility.createLevensteinDistance();
}
if (jaroWinkler && (jd == null)) {
jd = LuceneUtility.createJaroWinklerDistance();
}
}
}
if (levenstein && jaroWinkler) {
return Math.pow(ld.getDistance(obj, value), 2)
+ Math.pow(jd.getDistance(obj, value), 2);
}
if (levenstein) {
return ld.getDistance(obj, value);
}
if (jaroWinkler) {
return jd.getDistance(obj, value);
}
throw new IllegalStateException();
}
static class StringComparator implements Comparator<String> {
private final String value;
private final boolean levenstein;
private final boolean jaroWinkler;
private final Map<String, Double> caches = Collections
.synchronizedMap(new WeakHashMap<String, Double>());
public StringComparator(String value, boolean levenstein, boolean jaroWinkler) {
this.value = value;
this.levenstein = levenstein;
this.jaroWinkler = jaroWinkler;
}
@Override
public int compare(String o1, String o2) {
double d1 = distance(o1);
double d2 = distance(o2);
// between 0 ... 1.0, correct closer to 1.0
if (d1 == d2) {
if (o1 == o2) {
return 0;
}
if (o1 == null) {
return -1;
}
if (o2 == null) {
return 1;
}
return o1.compareTo(o2);
}
if (d1 < d2) {
return 1;
}
return -1;
}
private double distance(String obj) {
Double ret = caches.get(obj);
if (ret == null) {
synchronized (caches) {
ret = caches.get(obj);
if (ret == null) {
ret = Double.valueOf(getDistance(obj, value, levenstein, jaroWinkler));
caches.put(obj, ret);
}
}
}
return ret.doubleValue();
}
}
static class ExtractComparator<T> implements Comparator<T> {
private final String value;
private final Extracter<T> extracter;
private final boolean levenstein;
private final boolean jaroWinkler;
private final Map<T, Holder> caches = Collections
.synchronizedMap(new WeakHashMap<T, Holder>());
public ExtractComparator(String value, Extracter<T> extracter, boolean levenstein,
boolean jaroWinkler) {
this.value = value;
this.extracter = extracter;
this.levenstein = levenstein;
this.jaroWinkler = jaroWinkler;
}
@Override
public int compare(T o1, T o2) {
Holder h1 = distance(o1);
Holder h2 = distance(o2);
// between 0 ... 1.0, correct closer to 1.0
if (h1.distance == h2.distance) {
if (h1.value == h2.value) {
return 0;
}
if (h1.value == null) {
return -1;
}
if (h2.value == null) {
return 1;
}
return h1.value.compareTo(h2.value);
}
if (h1.distance < h2.distance) {
return 1;
}
return -1;
}
private Holder distance(T obj) {
Holder ret = caches.get(obj);
if (ret == null) {
synchronized (caches) {
ret = caches.get(obj);
if (ret == null) {
ret = new Holder();
ret.value = extracter.extract(obj);
ret.distance = getDistance(ret.value, value, levenstein, jaroWinkler);
caches.put(obj, ret);
}
}
}
return ret;
}
static class Holder {
double distance;
String value;
}
}
@Override
public int hashCode() {
if (HashCodeUtils.resolved()) {
return HashCodeUtils.hashCodeAlt(null, this);
}
return super.hashCode();
}
@Override
public boolean equals(Object obj) {
if (EqualsUtils.resolved()) {
return EqualsUtils.equalsAlt(null, this, obj);
}
return super.equals(obj);
}
@Override
public String toString() {
if (ToStringUtils.resolved()) {
return ToStringUtils.toStringAlt(this);
}
return super.toString();
}
}
| |
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0 and the Server Side Public License, v 1; you may not use this file except
* in compliance with, at your election, the Elastic License 2.0 or the Server
* Side Public License, v 1.
*/
package org.elasticsearch.painless;
import org.elasticsearch.common.SuppressForbidden;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.json.JsonXContent;
import org.elasticsearch.painless.action.PainlessContextClassBindingInfo;
import org.elasticsearch.painless.action.PainlessContextClassInfo;
import org.elasticsearch.painless.action.PainlessContextInfo;
import org.elasticsearch.painless.action.PainlessContextInstanceBindingInfo;
import org.elasticsearch.painless.action.PainlessContextMethodInfo;
import java.io.IOException;
import java.net.HttpURLConnection;
import java.net.URL;
import java.net.URLConnection;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Comparator;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.function.Function;
import java.util.stream.Collectors;
public class ContextGeneratorCommon {
@SuppressForbidden(reason = "retrieving data from an internal API not exposed as part of the REST client")
public static List<PainlessContextInfo> getContextInfos() throws IOException {
URLConnection getContextNames = new URL(
"http://" + System.getProperty("cluster.uri") + "/_scripts/painless/_context").openConnection();
XContentParser parser = JsonXContent.jsonXContent.createParser(null, null, getContextNames.getInputStream());
parser.nextToken();
parser.nextToken();
@SuppressWarnings("unchecked")
List<String> contextNames = (List<String>)(Object)parser.list();
parser.close();
((HttpURLConnection)getContextNames).disconnect();
List<PainlessContextInfo> contextInfos = new ArrayList<>();
for (String contextName : contextNames) {
URLConnection getContextInfo = new URL(
"http://" + System.getProperty("cluster.uri") + "/_scripts/painless/_context?context=" + contextName).openConnection();
parser = JsonXContent.jsonXContent.createParser(null, null, getContextInfo.getInputStream());
contextInfos.add(PainlessContextInfo.fromXContent(parser));
((HttpURLConnection)getContextInfo).disconnect();
}
contextInfos.sort(Comparator.comparing(PainlessContextInfo::getName));
return contextInfos;
}
public static String getType(Map<String, String> javaNamesToDisplayNames, String javaType) {
if (javaType.endsWith("[]") == false) {
return javaNamesToDisplayNames.getOrDefault(javaType, javaType);
}
int bracePosition = javaType.indexOf('[');
String braces = javaType.substring(bracePosition);
String type = javaType.substring(0, bracePosition);
if (javaNamesToDisplayNames.containsKey(type)) {
return javaNamesToDisplayNames.get(type) + braces;
}
return javaType;
}
private static Map<String, String> getDisplayNames(Collection<PainlessContextInfo> contextInfos) {
Map<String, String> javaNamesToDisplayNames = new HashMap<>();
for (PainlessContextInfo contextInfo : contextInfos) {
for (PainlessContextClassInfo classInfo : contextInfo.getClasses()) {
String className = classInfo.getName();
if (javaNamesToDisplayNames.containsKey(className) == false) {
if (classInfo.isImported()) {
javaNamesToDisplayNames.put(className,
className.substring(className.lastIndexOf('.') + 1).replace('$', '.'));
} else {
javaNamesToDisplayNames.put(className, className.replace('$', '.'));
}
}
}
}
return javaNamesToDisplayNames;
}
public static List<PainlessContextClassInfo> sortClassInfos(Collection<PainlessContextClassInfo> unsortedClassInfos) {
List<PainlessContextClassInfo> classInfos = new ArrayList<>(unsortedClassInfos);
classInfos.removeIf(v ->
"void".equals(v.getName()) || "boolean".equals(v.getName()) || "byte".equals(v.getName()) ||
"short".equals(v.getName()) || "char".equals(v.getName()) || "int".equals(v.getName()) ||
"long".equals(v.getName()) || "float".equals(v.getName()) || "double".equals(v.getName()) ||
"org.elasticsearch.painless.lookup.def".equals(v.getName()) ||
isInternalClass(v.getName())
);
classInfos.sort((c1, c2) -> {
String n1 = c1.getName();
String n2 = c2.getName();
boolean i1 = c1.isImported();
boolean i2 = c2.isImported();
String p1 = n1.substring(0, n1.lastIndexOf('.'));
String p2 = n2.substring(0, n2.lastIndexOf('.'));
int compare = p1.compareTo(p2);
if (compare == 0) {
if (i1 && i2) {
compare = n1.substring(n1.lastIndexOf('.') + 1).compareTo(n2.substring(n2.lastIndexOf('.') + 1));
} else if (i1 == false && i2 == false) {
compare = n1.compareTo(n2);
} else {
compare = Boolean.compare(i1, i2) * -1;
}
}
return compare;
});
return classInfos;
}
private static boolean isInternalClass(String javaName) {
return javaName.equals("org.elasticsearch.script.ScoreScript") ||
javaName.equals("org.elasticsearch.xpack.sql.expression.function.scalar.geo.GeoShape") ||
javaName.equals("org.elasticsearch.xpack.sql.expression.function.scalar.whitelist.InternalSqlScriptUtils") ||
javaName.equals("org.elasticsearch.xpack.sql.expression.literal.IntervalDayTime") ||
javaName.equals("org.elasticsearch.xpack.sql.expression.literal.IntervalYearMonth") ||
javaName.equals("org.elasticsearch.xpack.eql.expression.function.scalar.whitelist.InternalEqlScriptUtils") ||
javaName.equals("org.elasticsearch.xpack.ql.expression.function.scalar.InternalQlScriptUtils") ||
javaName.equals("org.elasticsearch.xpack.ql.expression.function.scalar.whitelist.InternalQlScriptUtils") ||
javaName.equals("org.elasticsearch.script.ScoreScript$ExplanationHolder");
}
public static List<PainlessContextClassInfo> excludeCommonClassInfos(
Set<PainlessContextClassInfo> exclude,
List<PainlessContextClassInfo> classInfos
) {
List<PainlessContextClassInfo> uniqueClassInfos = new ArrayList<>(classInfos);
uniqueClassInfos.removeIf(exclude::contains);
return uniqueClassInfos;
}
public static class PainlessInfos {
public final Set<PainlessContextMethodInfo> importedMethods;
public final Set<PainlessContextClassBindingInfo> classBindings;
public final Set<PainlessContextInstanceBindingInfo> instanceBindings;
public final List<PainlessInfoJson.Class> common;
public final List<PainlessInfoJson.Context> contexts;
public final Map<String, String> javaNamesToDisplayNames;
public final Map<String, String> javaNamesToJavadoc;
public final Map<String, List<String>> javaNamesToArgs;
public PainlessInfos(List<PainlessContextInfo> contextInfos) {
javaNamesToDisplayNames = getDisplayNames(contextInfos);
javaNamesToJavadoc = new HashMap<>();
javaNamesToArgs = new HashMap<>();
Set<PainlessContextClassInfo> commonClassInfos = getCommon(contextInfos, PainlessContextInfo::getClasses);
common = PainlessInfoJson.Class.fromInfos(sortClassInfos(commonClassInfos), javaNamesToDisplayNames);
importedMethods = getCommon(contextInfos, PainlessContextInfo::getImportedMethods);
classBindings = getCommon(contextInfos, PainlessContextInfo::getClassBindings);
instanceBindings = getCommon(contextInfos, PainlessContextInfo::getInstanceBindings);
contexts = contextInfos.stream()
.map(ctx -> new PainlessInfoJson.Context(ctx, commonClassInfos, javaNamesToDisplayNames))
.collect(Collectors.toList());
}
public PainlessInfos(List<PainlessContextInfo> contextInfos, JavadocExtractor extractor) throws IOException {
javaNamesToDisplayNames = getDisplayNames(contextInfos);
javaNamesToJavadoc = new HashMap<>();
javaNamesToArgs = new HashMap<>();
Set<PainlessContextClassInfo> commonClassInfos = getCommon(contextInfos, PainlessContextInfo::getClasses);
common = PainlessInfoJson.Class.fromInfos(sortClassInfos(commonClassInfos), javaNamesToDisplayNames, extractor);
importedMethods = getCommon(contextInfos, PainlessContextInfo::getImportedMethods);
classBindings = getCommon(contextInfos, PainlessContextInfo::getClassBindings);
instanceBindings = getCommon(contextInfos, PainlessContextInfo::getInstanceBindings);
contexts = new ArrayList<>(contextInfos.size());
for (PainlessContextInfo contextInfo : contextInfos) {
contexts.add(new PainlessInfoJson.Context(contextInfo, commonClassInfos, javaNamesToDisplayNames, extractor));
}
}
private <T> Set<T> getCommon(List<PainlessContextInfo> contexts, Function<PainlessContextInfo,List<T>> getter) {
Map<T, Integer> infoCounts = new HashMap<>();
for (PainlessContextInfo contextInfo : contexts) {
for (T info : getter.apply(contextInfo)) {
infoCounts.merge(info, 1, Integer::sum);
}
}
return infoCounts.entrySet().stream().filter(
e -> e.getValue() == contexts.size()
).map(Map.Entry::getKey).collect(Collectors.toSet());
}
}
}
| |
package org.barracuda.util.net;
/******************************************************************************
* Compilation: javac BinaryIn.java
* Execution: java BinaryIn input output
* Dependencies: none
*
* This library is for reading binary data from an input stream.
*
* % java BinaryIn http://introcs.cs.princeton.edu/cover.jpg output.jpg
*
******************************************************************************/
import java.io.BufferedInputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.net.Socket;
import java.net.URL;
import java.net.URLConnection;
/**
* <i>Binary input</i>. This class provides methods for reading
* in bits from a binary input stream, either
* one bit at a time (as a <tt>boolean</tt>),
* 8 bits at a time (as a <tt>byte</tt> or <tt>char</tt>),
* 16 bits at a time (as a <tt>short</tt>),
* 32 bits at a time (as an <tt>int</tt> or <tt>float</tt>), or
* 64 bits at a time (as a <tt>double</tt> or <tt>long</tt>).
* <p>
* The binary input stream can be from standard input, a filename,
* a URL name, a Socket, or an InputStream.
* <p>
* All primitive types are assumed to be represented using their
* standard Java representations, in big-endian (most significant
* byte first) order.
* <p>
* The client should not intermix calls to <tt>BinaryIn</tt> with calls
* to <tt>In</tt>; otherwise unexpected behavior will result.
*
* @author Robert Sedgewick
* @author Kevin Wayne
*/
public final class BitInputStream {
private static final int EOF = -1; // end of file
private BufferedInputStream in; // the input stream
private int buffer; // one character buffer
private int n; // number of bits left in buffer
/**
* Initializes a binary input stream from standard input.
*/
public BitInputStream() {
in = new BufferedInputStream(System.in);
fillBuffer();
}
/**
* Initializes a binary input stream from an <tt>InputStream</tt>.
*
* @param is the <tt>InputStream</tt> object
*/
public BitInputStream(InputStream is) {
in = new BufferedInputStream(is);
fillBuffer();
}
/**
* Initializes a binary input stream from a socket.
*
* @param socket the socket
*/
public BitInputStream(Socket socket) {
try {
InputStream is = socket.getInputStream();
in = new BufferedInputStream(is);
fillBuffer();
}
catch (IOException ioe) {
System.err.println("Could not open " + socket);
}
}
/**
* Initializes a binary input stream from a URL.
*
* @param url the URL
*/
public BitInputStream(URL url) {
try {
URLConnection site = url.openConnection();
InputStream is = site.getInputStream();
in = new BufferedInputStream(is);
fillBuffer();
}
catch (IOException ioe) {
System.err.println("Could not open " + url);
}
}
/**
* Initializes a binary input stream from a filename or URL name.
*
* @param name the name of the file or URL
*/
public BitInputStream(String name) {
try {
// first try to read file from local file system
File file = new File(name);
if (file.exists()) {
FileInputStream fis = new FileInputStream(file);
in = new BufferedInputStream(fis);
fillBuffer();
return;
}
// next try for files included in jar
URL url = getClass().getResource(name);
// or URL from web
if (url == null) {
url = new URL(name);
}
URLConnection site = url.openConnection();
InputStream is = site.getInputStream();
in = new BufferedInputStream(is);
fillBuffer();
}
catch (IOException ioe) {
System.err.println("Could not open " + name);
}
}
private void fillBuffer() {
try {
buffer = in.read();
n = 8;
}
catch (IOException e) {
System.err.println("EOF");
buffer = EOF;
n = -1;
}
}
/**
* Returns true if this binary input stream exists.
*
* @return <tt>true</tt> if this binary input stream exists;
* <tt>false</tt> otherwise
*/
public boolean exists() {
return in != null;
}
/**
* Returns true if this binary input stream is empty.
*
* @return <tt>true</tt> if this binary input stream is empty;
* <tt>false</tt> otherwise
*/
public boolean isEmpty() {
return buffer == EOF;
}
/**
* Reads the next bit of data from this binary input stream and return as a boolean.
*
* @return the next bit of data from this binary input stream as a <tt>boolean</tt>
* @throws RuntimeException if this binary input stream is empty
*/
public boolean readBoolean() {
if (isEmpty()) throw new RuntimeException("Reading from empty input stream");
n--;
boolean bit = ((buffer >> n) & 1) == 1;
if (n == 0) fillBuffer();
return bit;
}
/**
* Reads the next 8 bits from this binary input stream and return as an 8-bit char.
*
* @return the next 8 bits of data from this binary input stream as a <tt>char</tt>
* @throws RuntimeException if there are fewer than 8 bits available
*/
public char readChar() {
if (isEmpty()) throw new RuntimeException("Reading from empty input stream");
// special case when aligned byte
if (n == 8) {
int x = buffer;
fillBuffer();
return (char) (x & 0xff);
}
// combine last N bits of current buffer with first 8-N bits of new buffer
int x = buffer;
x <<= (8 - n);
int oldN = n;
fillBuffer();
if (isEmpty()) throw new RuntimeException("Reading from empty input stream");
n = oldN;
x |= (buffer >>> n);
return (char) (x & 0xff);
// the above code doesn't quite work for the last character if N = 8
// because buffer will be -1
}
/**
* Reads the next r bits from this binary input stream and return as an r-bit character.
*
* @param r number of bits to read
* @return the next r bits of data from this binary input streamt as a <tt>char</tt>
* @throws RuntimeException if there are fewer than r bits available
*/
public char readChar(int r) {
if (r < 1 || r > 16) throw new RuntimeException("Illegal value of r = " + r);
// optimize r = 8 case
if (r == 8) return readChar();
char x = 0;
for (int i = 0; i < r; i++) {
x <<= 1;
boolean bit = readBoolean();
if (bit) x |= 1;
}
return x;
}
/**
* Reads the remaining bytes of data from this binary input stream and return as a string.
*
* @return the remaining bytes of data from this binary input stream as a <tt>String</tt>
* @throws RuntimeException if this binary input stream is empty or if the number of bits
* available is not a multiple of 8 (byte-aligned)
*/
public String readString() {
if (isEmpty()) throw new RuntimeException("Reading from empty input stream");
StringBuilder sb = new StringBuilder();
while (!isEmpty()) {
char c = readChar();
sb.append(c);
}
return sb.toString();
}
/**
* Reads the next 16 bits from this binary input stream and return as a 16-bit short.
*
* @return the next 16 bits of data from this binary standard input as a <tt>short</tt>
* @throws RuntimeException if there are fewer than 16 bits available
*/
public short readShort() {
short x = 0;
for (int i = 0; i < 2; i++) {
char c = readChar();
x <<= 8;
x |= c;
}
return x;
}
/**
* Reads the next 32 bits from this binary input stream and return as a 32-bit int.
*
* @return the next 32 bits of data from this binary input stream as a <tt>int</tt>
* @throws RuntimeException if there are fewer than 32 bits available
*/
public int readInt() {
int x = 0;
for (int i = 0; i < 4; i++) {
char c = readChar();
x <<= 8;
x |= c;
}
return x;
}
/**
* Reads the next r bits from this binary input stream return as an r-bit int.
*
* @param r number of bits to read
* @return the next r bits of data from this binary input stream as a <tt>int</tt>
* @throws RuntimeException if there are fewer than r bits available on standard input
*/
public int readInt(int r) {
if (r < 1 || r > 32) throw new RuntimeException("Illegal value of r = " + r);
// optimize r = 32 case
if (r == 32) return readInt();
int x = 0;
for (int i = 0; i < r; i++) {
x <<= 1;
boolean bit = readBoolean();
if (bit) x |= 1;
}
return x;
}
/**
* Reads the next 64 bits from this binary input stream and return as a 64-bit long.
*
* @return the next 64 bits of data from this binary input stream as a <tt>long</tt>
* @throws RuntimeException if there are fewer than 64 bits available
*/
public long readLong() {
long x = 0;
for (int i = 0; i < 8; i++) {
char c = readChar();
x <<= 8;
x |= c;
}
return x;
}
/**
* Reads the next 64 bits from this binary input stream and return as a 64-bit double.
*
* @return the next 64 bits of data from this binary input stream as a <tt>double</tt>
* @throws RuntimeException if there are fewer than 64 bits available
*/
public double readDouble() {
return Double.longBitsToDouble(readLong());
}
/**
* Reads the next 32 bits from standard input and return as a 32-bit float.
*
* @return the next 32 bits of data from standard input as a <tt>float</tt>
* @throws RuntimeException if there are fewer than 32 bits available on standard input
*/
public float readFloat() {
return Float.intBitsToFloat(readInt());
}
/**
* Reads the next 8 bits from this binary input stream and return as an 8-bit byte.
*
* @return the next 8 bits of data from this binary input stream as a <tt>byte</tt>
* @throws RuntimeException if there are fewer than 8 bits available
*/
public byte readByte() {
char c = readChar();
byte x = (byte) (c & 0xff);
return x;
}
}
| |
/*
* Copyright 2015 the original author or authors.
* @https://github.com/scouter-project/scouter
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package scouter.client.stack.config;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import scouter.client.stack.config.preprocessor.Processor;
public class ParserConfig {
private String m_configFilename = null;
private String m_parserName = null;
private int m_stackStartLine = 2; //Default Stack start line: 2
private String m_divideStack = null;
private String m_timeFormat = null;
private SimpleDateFormat m_simpleDateFormat = null;
private int m_timeSize = 0;
private String m_timeFilter = null;
private int m_timePosition = 0;
private String m_threadStatus = null;
private boolean m_serviceExclude = false;
private ArrayList<String> m_workerThread = new ArrayList<String>();
private ArrayList<String> m_workingThread = new ArrayList<String>();
private ArrayList<String> m_sql = new ArrayList<String>();
private ArrayList<String> m_service = new ArrayList<String>();
private ArrayList<String> m_log = new ArrayList<String>();
private ArrayList<String> m_excludeStack = new ArrayList<String>();
private ArrayList<String> m_singleStack = new ArrayList<String>();
private JmxConfig m_jmxConfig = null;
private ArrayList<AnalyzerValue> m_analyzer = null;
private ArrayList<Processor> m_stackPreprocessorList = null;
private Processor.TARGET m_stackPreprocessorTarget = null;
public ArrayList<String> getWorkingThread() {
return m_workingThread;
}
public void setStackStartLine(int line){
m_stackStartLine = line;
}
public int getStackStartLine(){
return m_stackStartLine;
}
public void setDivideStack(String divideStack){
m_divideStack = divideStack;
}
public String getDivideStack(){
return m_divideStack;
}
public ArrayList<String> getSql() {
return m_sql;
}
public ArrayList<String> getService() {
return m_service;
}
public boolean isServiceExclude(){
return m_serviceExclude;
}
public ArrayList<String> getLog() {
return m_log;
}
public ArrayList<String> getExcludeStack() {
return m_excludeStack;
}
public ArrayList<String> getSingleStack() {
return m_singleStack;
}
public ArrayList<String> getWorkerThread() {
return m_workerThread;
}
public String getThreadStatus(){
return m_threadStatus;
}
public void setThreadStatus(String status){
m_threadStatus = status;
}
public void setWorkerThread( ArrayList<String> workerThread ) {
m_workerThread = workerThread;
}
public void setWorkingThread( ArrayList<String> workingThread ) {
m_workingThread = workingThread;
}
public void setSql( ArrayList<String> sql ) {
m_sql = sql;
}
public void setService( ArrayList<String> service ) {
m_service = service;
}
public void setServiceExclude(boolean value){
m_serviceExclude = value;
}
public void setLog( ArrayList<String> log ) {
m_log = log;
}
public void setExcludeStack( ArrayList<String> excludeStack ) {
m_excludeStack = excludeStack;
}
public void setSingleStack(ArrayList<String> singleStack ) {
m_singleStack = singleStack;
}
public void setConfigFilename( String filename ) {
m_configFilename = filename;
}
public String getConfigFilename() {
return m_configFilename;
}
public String getParserName() {
return m_parserName;
}
public void setParserName( String parserName ) {
m_parserName = parserName;
}
public String getTimeFormat() {
return m_timeFormat;
}
public void setTimeFormat( String timeFormat ) {
m_timeFormat = timeFormat;
if ( m_timeFormat != null )
m_simpleDateFormat = new SimpleDateFormat(m_timeFormat);
}
public void setStackPreprocessorList(ArrayList<Processor> list){
m_stackPreprocessorList = list;
}
public ArrayList<Processor> getStackPreprocessorList(){
return m_stackPreprocessorList;
}
public void setStackPreprocessorTarget(String target){
if(target == null || target.length() == 0){
throw new RuntimeException("preProcessor target attribute of preprocessor is not exist!");
}
if(target.equals("header")){
m_stackPreprocessorTarget = Processor.TARGET.HEADER;
}else if(target.equals("stack")){
m_stackPreprocessorTarget = Processor.TARGET.STACK;
}else if(target.equals("all")){
m_stackPreprocessorTarget = Processor.TARGET.ALL;
}else{
throw new RuntimeException("preProcessor target (header/stack/all) of preprocessor is wrong(" + target + ")!");
}
}
public Processor.TARGET getStackPreprocessorTarget(){
return m_stackPreprocessorTarget;
}
public void setTimeSize( int size ) {
m_timeSize = size;
}
public int getTimeSize() {
return m_timeSize;
}
public void setTimePosition( int position ) {
m_timePosition = position;
}
public int getTimePosition() {
return m_timePosition;
}
public void setTimeFilter( String filter ) {
m_timeFilter = filter;
}
public String getTimeFilter() {
return m_timeFilter;
}
public SimpleDateFormat getSimpleDateFormat() {
return m_simpleDateFormat;
}
public void setSimpleDateFormat( SimpleDateFormat format ) {
m_simpleDateFormat = format;
}
public ArrayList<AnalyzerValue> getAnalyzerList() {
return m_analyzer;
}
public boolean addAnalyzer( AnalyzerValue value ) {
if ( !value.isValid() )
return false;
if ( m_analyzer == null )
m_analyzer = new ArrayList<AnalyzerValue>();
m_analyzer.add(value);
return true;
}
public JmxConfig getJMXConfig() {
return m_jmxConfig;
}
public void setJMXConfig( int count, int interval, String filePath ) {
m_jmxConfig = new JmxConfig(count, interval, filePath);
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* <p/>
* http://www.apache.org/licenses/LICENSE-2.0
* <p/>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.tez.http;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Preconditions;
import org.apache.hadoop.io.DataOutputBuffer;
import org.apache.hadoop.io.IOUtils;
import org.apache.tez.common.security.JobTokenSecretManager;
import org.apache.tez.runtime.library.common.security.SecureShuffleUtils;
import org.apache.tez.runtime.library.common.shuffle.orderedgrouped.ShuffleHeader;
import org.apache.tez.util.StopWatch;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.BufferedInputStream;
import java.io.DataInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.net.HttpURLConnection;
import java.net.URL;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicLong;
public class HttpConnection extends BaseHttpConnection {
private static final Logger LOG = LoggerFactory.getLogger(HttpConnection.class);
private static final Logger URL_LOG = LoggerFactory.getLogger(LOG.getName() + ".url");
private URL url;
private final String logIdentifier;
@VisibleForTesting
protected volatile HttpURLConnection connection;
private volatile DataInputStream input;
private volatile boolean connectionSucceeed;
private volatile boolean cleanup;
private final JobTokenSecretManager jobTokenSecretMgr;
private String encHash;
private String msgToEncode;
private final HttpConnectionParams httpConnParams;
private final StopWatch stopWatch;
private final AtomicLong urlLogCount;
/**
* HttpConnection
*
* @param url
* @param connParams
* @param logIdentifier
* @param jobTokenSecretManager
* @throws IOException
*/
public HttpConnection(URL url, HttpConnectionParams connParams,
String logIdentifier, JobTokenSecretManager jobTokenSecretManager) throws IOException {
this.logIdentifier = logIdentifier;
this.jobTokenSecretMgr = jobTokenSecretManager;
this.httpConnParams = connParams;
this.url = url;
this.stopWatch = new StopWatch();
this.urlLogCount = new AtomicLong();
if (LOG.isDebugEnabled()) {
LOG.debug("MapOutput URL :" + url.toString());
}
}
@VisibleForTesting
public void computeEncHash() throws IOException {
// generate hash of the url
msgToEncode = SecureShuffleUtils.buildMsgFrom(url);
encHash = SecureShuffleUtils.hashFromString(msgToEncode, jobTokenSecretMgr);
}
private void setupConnection() throws IOException {
connection = (HttpURLConnection) url.openConnection();
if (httpConnParams.isSslShuffle()) {
//Configure for SSL
SSLFactory sslFactory = httpConnParams.getSslFactory();
Preconditions.checkArgument(sslFactory != null, "SSLFactory can not be null");
sslFactory.configure(connection);
}
computeEncHash();
// put url hash into http header
connection.addRequestProperty(SecureShuffleUtils.HTTP_HEADER_URL_HASH, encHash);
// set the read timeout
connection.setReadTimeout(httpConnParams.getReadTimeout());
// put shuffle version into http header
connection.addRequestProperty(ShuffleHeader.HTTP_HEADER_NAME,
ShuffleHeader.DEFAULT_HTTP_HEADER_NAME);
connection.addRequestProperty(ShuffleHeader.HTTP_HEADER_VERSION,
ShuffleHeader.DEFAULT_HTTP_HEADER_VERSION);
}
/**
* Connect to source
*
* @return true if connection was successful
* false if connection was previously cleaned up
* @throws IOException upon connection failure
*/
@Override
public boolean connect() throws IOException {
return connect(httpConnParams.getConnectionTimeout());
}
/**
* Connect to source with specific timeout
*
* @param connectionTimeout
* @return true if connection was successful
* false if connection was previously cleaned up
* @throws IOException upon connection failure
*/
private boolean connect(int connectionTimeout) throws IOException {
stopWatch.reset().start();
if (connection == null) {
setupConnection();
}
int unit = 0;
if (connectionTimeout < 0) {
throw new IOException("Invalid timeout " + "[timeout = " + connectionTimeout + " ms]");
} else if (connectionTimeout > 0) {
unit = Math.min(UNIT_CONNECT_TIMEOUT, connectionTimeout);
}
// set the connect timeout to the unit-connect-timeout
connection.setConnectTimeout(unit);
int connectionFailures = 0;
while (true) {
long connectStartTime = System.currentTimeMillis();
try {
connection.connect();
connectionSucceeed = true;
break;
} catch (IOException ioe) {
// Don't attempt another connect if already cleanedup.
connectionFailures++;
if (cleanup) {
LOG.info("Cleanup is set to true. Not attempting to"
+ " connect again. Last exception was: ["
+ ioe.getClass().getName() + ", " + ioe.getMessage() + "]");
return false;
}
// update the total remaining connect-timeout
connectionTimeout -= unit;
// throw an exception if we have waited for timeout amount of time
// note that the updated value if timeout is used here
if (connectionTimeout <= 0) {
throw new IOException(
"Failed to connect to " + url + ", #connectionFailures=" + connectionFailures, ioe);
}
long elapsed = System.currentTimeMillis() - connectStartTime;
if (elapsed < unit) {
try {
long sleepTime = unit - elapsed;
if (LOG.isDebugEnabled()) {
LOG.debug("Sleeping for " + sleepTime + " while establishing connection to " + url +
", since connectAttempt returned in " + elapsed + " ms");
}
Thread.sleep(sleepTime);
} catch (InterruptedException e) {
throw new IOException(
"Connection establishment sleep interrupted, #connectionFailures=" +
connectionFailures, e);
}
}
// reset the connect timeout for the last try
if (connectionTimeout < unit) {
unit = connectionTimeout;
// reset the connect time out for the final connect
connection.setConnectTimeout(unit);
}
}
}
if (LOG.isDebugEnabled()) {
LOG.debug("Time taken to connect to " + url.toString() +
" " + stopWatch.now(TimeUnit.MILLISECONDS) + " ms; connectionFailures="
+ connectionFailures);
}
return true;
}
@Override
public void validate() throws IOException {
stopWatch.reset().start();
int rc = connection.getResponseCode();
if (rc != HttpURLConnection.HTTP_OK) {
throw new IOException("Got invalid response code " + rc + " from " + url
+ ": " + connection.getResponseMessage());
}
// get the shuffle version
if (!ShuffleHeader.DEFAULT_HTTP_HEADER_NAME.equals(connection
.getHeaderField(ShuffleHeader.HTTP_HEADER_NAME))
|| !ShuffleHeader.DEFAULT_HTTP_HEADER_VERSION.equals(connection
.getHeaderField(ShuffleHeader.HTTP_HEADER_VERSION))) {
throw new IOException("Incompatible shuffle response version");
}
// get the replyHash which is HMac of the encHash we sent to the server
String replyHash =
connection
.getHeaderField(SecureShuffleUtils.HTTP_HEADER_REPLY_URL_HASH);
if (replyHash == null) {
throw new IOException("security validation of TT Map output failed");
}
if (LOG.isDebugEnabled()) {
LOG.debug("url=" + msgToEncode + ";encHash=" + encHash + ";replyHash="
+ replyHash);
}
// verify that replyHash is HMac of encHash
SecureShuffleUtils.verifyReply(replyHash, encHash, jobTokenSecretMgr);
if (URL_LOG.isInfoEnabled()) {
// Following log statement will be used by tez-tool perf-analyzer for mapping attempt to NM
// host
URL_LOG.info("for url=" + url + " sent hash and receievd reply " +
stopWatch.now(TimeUnit.MILLISECONDS) + " ms");
} else {
// Log summary.
if (urlLogCount.incrementAndGet() % 1000 == 0) {
LOG.info("Sent hash and recieved reply for {} urls", urlLogCount);
}
}
}
/**
* Get the inputstream from the connection
*
* @return DataInputStream
* @throws IOException
*/
@Override
public DataInputStream getInputStream() throws IOException {
stopWatch.reset().start();
if (connectionSucceeed) {
input = new DataInputStream(new BufferedInputStream(
connection.getInputStream(), httpConnParams.getBufferSize()));
}
if (LOG.isDebugEnabled()) {
LOG.debug("Time taken to getInputStream (connect) " + url +
" " + stopWatch.now(TimeUnit.MILLISECONDS) + " ms");
}
return input;
}
/**
* Cleanup the connection.
*
* @param disconnect Close the connection if this is true; otherwise respect keepalive
* @throws IOException
*/
@Override
public void cleanup(boolean disconnect) throws IOException {
cleanup = true;
stopWatch.reset().start();
try {
if (input != null) {
if (LOG.isDebugEnabled()) {
LOG.debug("Closing input on " + logIdentifier);
}
input.close();
input = null;
}
if (httpConnParams.isKeepAlive() && connectionSucceeed) {
// Refer:
// http://docs.oracle.com/javase/6/docs/technotes/guides/net/http-keepalive.html
readErrorStream(connection.getErrorStream());
}
if (connection != null && (disconnect || !httpConnParams.isKeepAlive())) {
if (LOG.isDebugEnabled()) {
LOG.debug("Closing connection on " + logIdentifier + ", disconnectParam=" + disconnect);
}
connection.disconnect();
connection = null;
}
} catch (IOException e) {
if (LOG.isDebugEnabled()) {
LOG.debug("Exception while shutting down fetcher " + logIdentifier, e);
} else {
LOG.info("Exception while shutting down fetcher " + logIdentifier
+ ": " + e.getMessage());
}
}
if (LOG.isDebugEnabled()) {
LOG.debug("Time taken to cleanup connection to " + url +
" " + stopWatch.now(TimeUnit.MILLISECONDS) + " ms");
}
}
/**
* Cleanup the error stream if any, for keepAlive connections
*
* @param errorStream
*/
private void readErrorStream(InputStream errorStream) {
if (errorStream == null) {
return;
}
try {
DataOutputBuffer errorBuffer = new DataOutputBuffer();
IOUtils.copyBytes(errorStream, errorBuffer, 4096);
IOUtils.closeStream(errorBuffer);
IOUtils.closeStream(errorStream);
} catch (IOException ioe) {
// ignore
}
}
}
| |
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package com.weatherNotes.services.impl;
import com.github.springtestdbunit.DbUnitTestExecutionListener;
import com.github.springtestdbunit.annotation.DatabaseSetup;
import com.weatherNotes.conf.ApplicationConfig;
import com.weatherNotes.models.PreDefinedNote;
import com.weatherNotes.models.SystemNote;
import com.weatherNotes.services.NotesService;
import java.io.FileOutputStream;
import java.sql.Connection;
import java.sql.DriverManager;
import java.util.List;
import org.dbunit.database.DatabaseConnection;
import org.dbunit.database.IDatabaseConnection;
import org.dbunit.database.QueryDataSet;
import org.dbunit.database.search.TablesDependencyHelper;
import org.dbunit.dataset.IDataSet;
import org.dbunit.dataset.xml.FlatXmlDataSet;
import org.junit.After;
import org.junit.AfterClass;
import static org.junit.Assert.*;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.test.context.ContextConfiguration;
import org.springframework.test.context.TestExecutionListeners;
import org.springframework.test.context.junit4.SpringJUnit4ClassRunner;
import org.springframework.test.context.support.DependencyInjectionTestExecutionListener;
import org.springframework.test.context.support.DirtiesContextTestExecutionListener;
import org.springframework.test.context.transaction.TransactionalTestExecutionListener;
import org.springframework.test.context.web.WebAppConfiguration;
import org.springframework.transaction.annotation.Transactional;
/**
*
* @author abdo
*/
@RunWith(SpringJUnit4ClassRunner.class)
@WebAppConfiguration
@ContextConfiguration(classes = {ApplicationConfig.class})
@TestExecutionListeners({DependencyInjectionTestExecutionListener.class,
DirtiesContextTestExecutionListener.class,
TransactionalTestExecutionListener.class,
DbUnitTestExecutionListener.class})
@Transactional()
public class NotesServiceImplTest {
@Autowired
NotesService notesServiceImpl;
@BeforeClass
public static void setUpClass() {
}
@AfterClass
public static void tearDownClass() {
}
@Before
public void setUp() {
}
@After
public void tearDown() {
}
/**
* Test of getPreDefinedNoteByTemp method, of class NotesServiceImpl.
*/
@Test
public void testGetPreDefinedNoteByTemp() {
System.out.println("getPreDefinedNoteByTemp");
Double temp = 20.0;
PreDefinedNote result = notesServiceImpl.getPreDefinedNoteByTemp(temp);
assertNotNull(result);
}
/**
* Test of getPreDefinedNote method, of class NotesServiceImpl.
*/
@Test
public void testGetPreDefinedNote() {
System.out.println("getPreDefinedNote");
Integer id = 1;
PreDefinedNote result = notesServiceImpl.getPreDefinedNote(id);
assertNotNull(result);
}
/**
* Test of updateIfnotExistCreatePredefinedNote method, of class
* NotesServiceImpl.
*/
@Test
public void testUpdateIfnotExistCreatePredefinedNote() {
System.out.println("updateIfnotExistCreatePredefinedNote");
PreDefinedNote preDefinedNote = new PreDefinedNote();
preDefinedNote.setMaxTemp(10.0);
preDefinedNote.setMinTemp(-5.0);
preDefinedNote.setValue("test value");
PreDefinedNote result = notesServiceImpl.updateIfnotExistCreatePredefinedNote(preDefinedNote);
assertNotNull(result);
assertEquals(result.getMaxTemp(), preDefinedNote.getMaxTemp());
}
/**
* Test of deletePreDefinedNote method, of class NotesServiceImpl.
*/
@Test
public void testDeletePreDefinedNote() {
System.out.println("deletePreDefinedNote");
PreDefinedNote preDefinedNote = new PreDefinedNote();
preDefinedNote.setId(1);
notesServiceImpl.deletePreDefinedNote(preDefinedNote);
}
/**
* Test of getAllSystemNotes method, of class NotesServiceImpl.
*/
@Test
public void testGetAllSystemNotes() {
System.out.println("getAllSystemNotes");
List<SystemNote> result = notesServiceImpl.getAllSystemNotes();
assertNotNull( result);
}
/**
* Test of updateIfnotExistCreateSystemNote method, of class
* NotesServiceImpl.
*/
@Test
public void testUpdateIfnotExistCreateSystemNote() {
System.out.println("updateIfnotExistCreateSystemNote");
SystemNote sysNote = new SystemNote();
sysNote.setId(1);
sysNote.setValue(null);
SystemNote result = notesServiceImpl.updateIfnotExistCreateSystemNote(sysNote);
assertNotNull( result);
}
/**
* Test of getSystemNote method, of class NotesServiceImpl.
*/
@Test
public void testGetSystemNote() {
System.out.println("getSystemNote");
Integer id = 1;
SystemNote result = notesServiceImpl.getSystemNote(id);
assertNotNull(result);
}
/**
* Test of deleteSysdNote method, of class NotesServiceImpl.
*/
@Test
public void testDeleteSysdNote() {
System.out.println("deleteSysdNote");
SystemNote sysNote = new SystemNote();
sysNote.setId(1);
notesServiceImpl.deleteSysdNote(sysNote);
}
/**
* Test of getSysNoteByDate method, of class NotesServiceImpl.
*/
@Test
public void testGetSysNoteByDate() {
System.out.println("getSysNoteByDate");
String date = "2017-04-19";
SystemNote result = notesServiceImpl.getSysNoteByDate(date);
assertNotNull(result);
}
/**
* Test of getAllPredefinedNotes method, of class NotesServiceImpl.
*/
@Test
public void testGetAllPredefinedNotes() {
System.out.println("getAllPredefinedNotes");
List<PreDefinedNote> result = notesServiceImpl.getAllPredefinedNotes();
assertNotNull( result);
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache license, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the license for the specific language governing permissions and
* limitations under the license.
*/
package org.apache.logging.log4j.core.layout;
import java.util.HashSet;
import java.util.Set;
import javax.xml.stream.XMLStreamException;
import org.apache.logging.log4j.core.impl.Log4jLogEvent;
import org.apache.logging.log4j.core.jackson.JsonConstants;
import org.apache.logging.log4j.core.jackson.Log4jJsonObjectMapper;
import org.apache.logging.log4j.core.jackson.Log4jXmlObjectMapper;
import org.apache.logging.log4j.core.jackson.Log4jYamlObjectMapper;
import org.apache.logging.log4j.core.jackson.XmlConstants;
import org.codehaus.stax2.XMLStreamWriter2;
import com.fasterxml.jackson.core.PrettyPrinter;
import com.fasterxml.jackson.core.util.DefaultPrettyPrinter;
import com.fasterxml.jackson.core.util.MinimalPrettyPrinter;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.ObjectWriter;
import com.fasterxml.jackson.databind.ser.impl.SimpleBeanPropertyFilter;
import com.fasterxml.jackson.databind.ser.impl.SimpleFilterProvider;
import com.fasterxml.jackson.dataformat.xml.util.DefaultXmlPrettyPrinter;
abstract class JacksonFactory {
static class JSON extends JacksonFactory {
private final boolean encodeThreadContextAsList;
private final boolean includeStacktrace;
private final boolean stacktraceAsString;
private final boolean objectMessageAsJsonObject;
public JSON(final boolean encodeThreadContextAsList, final boolean includeStacktrace, final boolean stacktraceAsString, final boolean objectMessageAsJsonObject) {
this.encodeThreadContextAsList = encodeThreadContextAsList;
this.includeStacktrace = includeStacktrace;
this.stacktraceAsString = stacktraceAsString;
this.objectMessageAsJsonObject = objectMessageAsJsonObject;
}
@Override
protected String getPropertNameForContextMap() {
return JsonConstants.ELT_CONTEXT_MAP;
}
@Override
protected String getPropertyNameForTimeMillis() {
return JsonConstants.ELT_TIME_MILLIS;
}
@Override
protected String getPropertyNameForInstant() {
return JsonConstants.ELT_INSTANT;
}
@Override
protected String getPropertNameForSource() {
return JsonConstants.ELT_SOURCE;
}
@Override
protected String getPropertNameForNanoTime() {
return JsonConstants.ELT_NANO_TIME;
}
@Override
protected PrettyPrinter newCompactPrinter() {
return new MinimalPrettyPrinter();
}
@Override
protected ObjectMapper newObjectMapper() {
return new Log4jJsonObjectMapper(encodeThreadContextAsList, includeStacktrace, stacktraceAsString, objectMessageAsJsonObject);
}
@Override
protected PrettyPrinter newPrettyPrinter() {
return new DefaultPrettyPrinter();
}
}
static class XML extends JacksonFactory {
static final int DEFAULT_INDENT = 1;
private final boolean includeStacktrace;
private final boolean stacktraceAsString;
public XML(final boolean includeStacktrace, final boolean stacktraceAsString) {
this.includeStacktrace = includeStacktrace;
this.stacktraceAsString = stacktraceAsString;
}
@Override
protected String getPropertyNameForTimeMillis() {
return XmlConstants.ELT_TIME_MILLIS;
}
@Override
protected String getPropertyNameForInstant() {
return XmlConstants.ELT_INSTANT;
}
@Override
protected String getPropertNameForContextMap() {
return XmlConstants.ELT_CONTEXT_MAP;
}
@Override
protected String getPropertNameForSource() {
return XmlConstants.ELT_SOURCE;
}
@Override
protected String getPropertNameForNanoTime() {
return JsonConstants.ELT_NANO_TIME;
}
@Override
protected PrettyPrinter newCompactPrinter() {
// Yes, null is the proper answer.
return null;
}
@Override
protected ObjectMapper newObjectMapper() {
return new Log4jXmlObjectMapper(includeStacktrace, stacktraceAsString);
}
@Override
protected PrettyPrinter newPrettyPrinter() {
return new Log4jXmlPrettyPrinter(DEFAULT_INDENT);
}
}
static class YAML extends JacksonFactory {
private final boolean includeStacktrace;
private final boolean stacktraceAsString;
public YAML(final boolean includeStacktrace, final boolean stacktraceAsString) {
this.includeStacktrace = includeStacktrace;
this.stacktraceAsString = stacktraceAsString;
}
@Override
protected String getPropertyNameForTimeMillis() {
return JsonConstants.ELT_TIME_MILLIS;
}
@Override
protected String getPropertyNameForInstant() {
return JsonConstants.ELT_INSTANT;
}
@Override
protected String getPropertNameForContextMap() {
return JsonConstants.ELT_CONTEXT_MAP;
}
@Override
protected String getPropertNameForSource() {
return JsonConstants.ELT_SOURCE;
}
@Override
protected String getPropertNameForNanoTime() {
return JsonConstants.ELT_NANO_TIME;
}
@Override
protected PrettyPrinter newCompactPrinter() {
return new MinimalPrettyPrinter();
}
@Override
protected ObjectMapper newObjectMapper() {
return new Log4jYamlObjectMapper(false, includeStacktrace, stacktraceAsString);
}
@Override
protected PrettyPrinter newPrettyPrinter() {
return new DefaultPrettyPrinter();
}
}
/**
* When <Event>s are written into a XML file; the "Event" object is not the root element, but an element named
* <Events> created using {@link XmlLayout#getHeader()} and {@link XmlLayout#getFooter()} methods.
* <p>
* {@link com.fasterxml.jackson.dataformat.xml.util.DefaultXmlPrettyPrinter} is used to print the Event object into
* XML; hence it assumes <Event> tag as the root element, so it prints the <Event> tag without any
* indentation. To add an indentation to the <Event> tag; hence an additional indentation for any
* sub-elements, this class is written. As an additional task, to avoid the blank line printed after the ending
* </Event> tag, {@link #writePrologLinefeed(XMLStreamWriter2)} method is also overridden.
* </p>
*/
static class Log4jXmlPrettyPrinter extends DefaultXmlPrettyPrinter {
private static final long serialVersionUID = 1L;
Log4jXmlPrettyPrinter(final int nesting) {
_nesting = nesting;
}
@Override
public void writePrologLinefeed(final XMLStreamWriter2 sw) throws XMLStreamException {
// nothing
}
/**
* Sets the nesting level to 1 rather than 0, so the "Event" tag will get indentation of next level below root.
*/
@Override
public DefaultXmlPrettyPrinter createInstance() {
return new Log4jXmlPrettyPrinter(XML.DEFAULT_INDENT);
}
}
abstract protected String getPropertyNameForTimeMillis();
abstract protected String getPropertyNameForInstant();
abstract protected String getPropertNameForContextMap();
abstract protected String getPropertNameForSource();
abstract protected String getPropertNameForNanoTime();
abstract protected PrettyPrinter newCompactPrinter();
abstract protected ObjectMapper newObjectMapper();
abstract protected PrettyPrinter newPrettyPrinter();
ObjectWriter newWriter(final boolean locationInfo, final boolean properties, final boolean compact) {
return newWriter(locationInfo, properties, compact, false);
}
ObjectWriter newWriter(final boolean locationInfo, final boolean properties, final boolean compact,
final boolean includeMillis) {
final SimpleFilterProvider filters = new SimpleFilterProvider();
final Set<String> except = new HashSet<>(3);
if (!locationInfo) {
except.add(this.getPropertNameForSource());
}
if (!properties) {
except.add(this.getPropertNameForContextMap());
}
if (includeMillis) {
except.add(getPropertyNameForInstant());
} else {
except.add(getPropertyNameForTimeMillis());
}
except.add(this.getPropertNameForNanoTime());
filters.addFilter(Log4jLogEvent.class.getName(), SimpleBeanPropertyFilter.serializeAllExcept(except));
final ObjectWriter writer = this.newObjectMapper().writer(compact ? this.newCompactPrinter() : this.newPrettyPrinter());
return writer.with(filters);
}
}
| |
/* Copyright 2009 - 2010 The Stajistics Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.stajistics.jdbc.wrapper;
import java.sql.Connection;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;
import java.util.LinkedList;
import java.util.List;
import org.stajistics.Stats;
import org.stajistics.StatsFactory;
import org.stajistics.StatsKey;
import org.stajistics.aop.ProxyFactory;
import org.stajistics.jdbc.StatsJDBCConfig;
import org.stajistics.jdbc.decorator.AbstractStatementDecorator;
import org.stajistics.tracker.span.SpanTracker;
/**
*
* @author The Stajistics Project
*
*/
public class StatsStatementWrapper extends AbstractStatementDecorator {
private static StatsFactory statsFactory = Stats.getFactory(StatsStatementWrapper.class);
private final StatsJDBCConfig config;
private final Connection connection;
private final List<String> batchSQL;
private final ProxyFactory<ResultSet> resultSetProxyFactory;
private final SpanTracker openClosedTracker;
public StatsStatementWrapper(final Statement delegate,
final Connection connection,
final StatsJDBCConfig config) {
super(delegate);
if (connection == null) {
throw new NullPointerException("connection");
}
if (config == null) {
throw new NullPointerException("config");
}
this.connection = connection;
this.config = config;
batchSQL = new LinkedList<String>();
this.resultSetProxyFactory = config.getProxyFactory(ResultSet.class);
StatsKey openClosedKey = statsFactory.buildKey(Statement.class.getName())
.withNameSuffix("open")
.newKey();
openClosedTracker = statsFactory.track(openClosedKey);
}
private void handleSQL(final String sql) {
config.getSQLAnalyzer()
.analyzeSQL(sql);
}
private void handleSQL(final List<String> batchSQL) {
config.getSQLAnalyzer()
.analyzeSQL(batchSQL);
}
@Override
public Connection getConnection() throws SQLException {
return connection;
}
@Override
public void close() throws SQLException {
try {
super.close();
} finally {
openClosedTracker.commit();
}
}
@Override
public boolean execute(final String sql) throws SQLException {
boolean result = delegate().execute(sql);
handleSQL(sql);
return result;
}
@Override
public boolean execute(final String sql,
final int autoGeneratedKeys) throws SQLException {
boolean result = delegate().execute(sql, autoGeneratedKeys);
handleSQL(sql);
return result;
}
@Override
public boolean execute(final String sql,
final int[] columnIndexes) throws SQLException {
boolean result = delegate().execute(sql, columnIndexes);
handleSQL(sql);
return result;
}
@Override
public boolean execute(final String sql,
final String[] columnNames) throws SQLException {
boolean result = execute(sql, columnNames);
handleSQL(sql);
return result;
}
@Override
public ResultSet executeQuery(final String sql) throws SQLException {
ResultSet rs = new StatsResultSetWrapper(delegate().executeQuery(sql), config);
handleSQL(sql);
rs = resultSetProxyFactory.createProxy(rs);
return rs;
}
@Override
public ResultSet getGeneratedKeys() throws SQLException {
ResultSet rs = new StatsResultSetWrapper(delegate().getGeneratedKeys(), config);
rs = resultSetProxyFactory.createProxy(rs);
return rs;
}
@Override
public ResultSet getResultSet() throws SQLException {
ResultSet rs = new StatsResultSetWrapper(delegate().getResultSet(), config);
rs = resultSetProxyFactory.createProxy(rs);
return rs;
}
@Override
public int executeUpdate(String sql) throws SQLException {
handleSQL(sql);
return delegate().executeUpdate(sql);
}
@Override
public int executeUpdate(final String sql,
final int autoGeneratedKeys) throws SQLException {
int result = delegate().executeUpdate(sql, autoGeneratedKeys);
handleSQL(sql);
return result;
}
@Override
public int executeUpdate(final String sql,
final int[] columnIndexes) throws SQLException {
int result = delegate().executeUpdate(sql, columnIndexes);
handleSQL(sql);
return result;
}
@Override
public int executeUpdate(final String sql,
final String[] columnNames) throws SQLException {
int result = delegate().executeUpdate(sql, columnNames);
handleSQL(sql);
return result;
}
@Override
public int[] executeBatch() throws SQLException {
int[] result = delegate().executeBatch();
handleSQL(batchSQL);
return result;
}
@Override
public void addBatch(final String sql) throws SQLException {
delegate().addBatch(sql);
batchSQL.add(sql);
}
@Override
public void clearBatch() throws SQLException {
delegate().clearBatch();
batchSQL.clear();
}
}
| |
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jctools.queues;
import org.jctools.util.LongCell;
import org.jctools.util.Pow2;
import org.jctools.util.UnsafeAccess;
import org.jctools.util.VolatileLongCell;
import java.util.Collection;
import java.util.Iterator;
import java.util.NoSuchElementException;
import java.util.Queue;
abstract class FloatingCaqL0Pad {
protected long p00, p01, p02, p03, p04, p05, p06, p07;
protected long p10, p11, p12, p13, p14, p15, p16, p17;
}
abstract class FloatingCaqColdFields<E> extends InlinedRingBufferL0Pad {
protected static final int BUFFER_PAD = 32;
protected static final int SPARSE_SHIFT = Integer.getInteger("sparse.shift", 0);
protected final int capacity;
protected final long mask;
protected final E[] buffer;
protected final VolatileLongCell tail = new VolatileLongCell(0);
protected final VolatileLongCell head = new VolatileLongCell(0);
protected final LongCell tailCache = new LongCell();
protected final LongCell headCache = new LongCell();
@SuppressWarnings("unchecked")
FloatingCaqColdFields(int capacity) {
if (Pow2.isPowerOfTwo(capacity)) {
this.capacity = capacity;
} else {
this.capacity = Pow2.roundToPowerOfTwo(capacity);
}
mask = this.capacity - 1;
buffer = (E[]) new Object[(this.capacity << SPARSE_SHIFT) + BUFFER_PAD * 2];
}
}
public final class FloatingCountersSpscConcurrentArrayQueue<E> extends FloatingCaqColdFields<E> implements
Queue<E> {
protected long p00, p01, p02, p03, p04, p05, p06, p07;
protected long p50, p51, p52, p53, p54, p55, p56;
private static final long ARRAY_BASE;
private static final int ELEMENT_SHIFT;
static {
final int scale = UnsafeAccess.UNSAFE.arrayIndexScale(Object[].class);
if (4 == scale) {
ELEMENT_SHIFT = 2 + SPARSE_SHIFT;
} else if (8 == scale) {
ELEMENT_SHIFT = 3 + SPARSE_SHIFT;
} else {
throw new IllegalStateException("Unknown pointer size");
}
ARRAY_BASE = UnsafeAccess.UNSAFE.arrayBaseOffset(Object[].class)
+ (BUFFER_PAD << (ELEMENT_SHIFT - SPARSE_SHIFT));
}
public FloatingCountersSpscConcurrentArrayQueue(final int capacity) {
super(capacity);
}
public boolean add(final E e) {
if (offer(e)) {
return true;
}
throw new IllegalStateException("Queue is full");
}
private long offset(long index) {
return ARRAY_BASE + ((index & mask) << ELEMENT_SHIFT);
}
public boolean offer(final E e) {
if (null == e) {
throw new NullPointerException("Null is not a valid element");
}
final long currTail = tail.get();
final long wrapPoint = currTail - capacity + 32;
if (headCache.get() <= wrapPoint) {
final long currHead = head.get();
headCache.set(currHead);
if (currHead <= wrapPoint) {
return false;
}
}
UnsafeAccess.UNSAFE.putObject(buffer, offset(currTail), e);
tail.lazySet(currTail + 1);
return true;
}
public E poll() {
final long currHead = head.get();
if (currHead >= tailCache.get()) {
final long currTail = tail.get();
tailCache.set(currTail);
if (currHead >= currTail) {
return null;
}
}
final long offset = offset(currHead);
@SuppressWarnings("unchecked")
final E e = (E) UnsafeAccess.UNSAFE.getObject(buffer, offset);
UnsafeAccess.UNSAFE.putObject(buffer, offset, null);
head.lazySet(currHead + 1);
return e;
}
public E remove() {
final E e = poll();
if (null == e) {
throw new NoSuchElementException("Queue is empty");
}
return e;
}
public E element() {
final E e = peek();
if (null == e) {
throw new NoSuchElementException("Queue is empty");
}
return e;
}
public E peek() {
long currentHead = head.get();
return getElement(currentHead);
}
@SuppressWarnings("unchecked")
private E getElement(long index) {
final long offset = offset(index);
return (E) UnsafeAccess.UNSAFE.getObject(buffer, offset);
}
public int size() {
return (int) (tail.get() - head.get());
}
public boolean isEmpty() {
return tail.get() == head.get();
}
public boolean contains(final Object o) {
if (null == o) {
return false;
}
for (long i = head.get(), limit = tail.get(); i < limit; i++) {
final E e = getElement(i);
if (o.equals(e)) {
return true;
}
}
return false;
}
public Iterator<E> iterator() {
throw new UnsupportedOperationException();
}
public Object[] toArray() {
throw new UnsupportedOperationException();
}
public <T> T[] toArray(final T[] a) {
throw new UnsupportedOperationException();
}
public boolean remove(final Object o) {
throw new UnsupportedOperationException();
}
public boolean containsAll(final Collection<?> c) {
for (final Object o : c) {
if (!contains(o)) {
return false;
}
}
return true;
}
public boolean addAll(final Collection<? extends E> c) {
for (final E e : c) {
add(e);
}
return true;
}
public boolean removeAll(final Collection<?> c) {
throw new UnsupportedOperationException();
}
public boolean retainAll(final Collection<?> c) {
throw new UnsupportedOperationException();
}
public void clear() {
Object value;
do {
value = poll();
} while (null != value);
}
}
| |
/*
* Copyright 2015-present Facebook, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License. You may obtain
* a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package com.facebook.buck.artifact_cache;
import com.facebook.buck.cli.BuckConfig;
import com.facebook.buck.cli.SlbBuckConfig;
import com.facebook.buck.util.HumanReadableException;
import com.facebook.buck.util.immutables.BuckStyleImmutable;
import com.facebook.buck.util.unit.SizeUnit;
import com.google.common.base.Function;
import com.google.common.base.Joiner;
import com.google.common.base.Optional;
import com.google.common.base.Preconditions;
import com.google.common.base.Predicate;
import com.google.common.base.Splitter;
import com.google.common.collect.FluentIterable;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import org.immutables.value.Value;
import java.net.URI;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.List;
import java.util.concurrent.TimeUnit;
/**
* Represents configuration specific to the {@link ArtifactCache}s.
*/
public class ArtifactCacheBuckConfig {
private static final String CACHE_SECTION_NAME = "cache";
private static final String DEFAULT_DIR_CACHE_MODE = CacheReadMode.readwrite.name();
// Names of the fields in a [cache*] section that describe a single HTTP cache.
private static final String HTTP_URL_FIELD_NAME = "http_url";
private static final String HTTP_BLACKLISTED_WIFI_SSIDS_FIELD_NAME = "blacklisted_wifi_ssids";
private static final String HTTP_MODE_FIELD_NAME = "http_mode";
private static final String HTTP_TIMEOUT_SECONDS_FIELD_NAME = "http_timeout_seconds";
private static final String HTTP_READ_HEADERS_FIELD_NAME = "http_read_headers";
private static final String HTTP_WRITE_HEADERS_FIELD_NAME = "http_write_headers";
private static final String HTTP_CACHE_ERROR_MESSAGE_NAME = "http_error_message_format";
private static final String HTTP_MAX_STORE_SIZE = "http_max_store_size";
private static final String HTTP_THREAD_POOL_SIZE = "http_thread_pool_size";
private static final String HTTP_THREAD_POOL_KEEP_ALIVE_DURATION_MILLIS =
"http_thread_pool_keep_alive_duration_millis";
private static final ImmutableSet<String> HTTP_CACHE_DESCRIPTION_FIELDS = ImmutableSet.of(
HTTP_URL_FIELD_NAME,
HTTP_BLACKLISTED_WIFI_SSIDS_FIELD_NAME,
HTTP_MODE_FIELD_NAME,
HTTP_TIMEOUT_SECONDS_FIELD_NAME,
HTTP_READ_HEADERS_FIELD_NAME,
HTTP_WRITE_HEADERS_FIELD_NAME,
HTTP_CACHE_ERROR_MESSAGE_NAME,
HTTP_MAX_STORE_SIZE);
private static final String HTTP_MAX_FETCH_RETRIES = "http_max_fetch_retries";
// List of names of cache-* sections that contain the fields above. This is used to emulate
// dicts, essentially.
private static final String HTTP_CACHE_NAMES_FIELD_NAME = "http_cache_names";
private static final URI DEFAULT_HTTP_URL = URI.create("http://localhost:8080/");
private static final String DEFAULT_HTTP_CACHE_MODE = CacheReadMode.readwrite.name();
private static final long DEFAULT_HTTP_CACHE_TIMEOUT_SECONDS = 3L;
private static final String DEFAULT_HTTP_MAX_CONCURRENT_WRITES = "1";
private static final String DEFAULT_HTTP_WRITE_SHUTDOWN_TIMEOUT_SECONDS = "1800"; // 30 minutes
private static final String DEFAULT_HTTP_CACHE_ERROR_MESSAGE =
"{cache_name} cache encountered an error: {error_message}";
private static final int DEFAULT_HTTP_MAX_FETCH_RETRIES = 2;
private static final String SERVED_CACHE_ENABLED_FIELD_NAME = "serve_local_cache";
private static final String DEFAULT_SERVED_CACHE_MODE = CacheReadMode.readonly.name();
private static final String SERVED_CACHE_READ_MODE_FIELD_NAME = "served_local_cache_mode";
private static final String LOAD_BALANCING_TYPE = "load_balancing_type";
private static final LoadBalancingType DEFAULT_LOAD_BALANCING_TYPE =
LoadBalancingType.SINGLE_SERVER;
private static final long DEFAULT_HTTP_THREAD_POOL_SIZE = 200;
private static final long DEFAULT_HTTP_THREAD_POOL_KEEP_ALIVE_DURATION_MILLIS =
TimeUnit.MINUTES.toMillis(1);
private static final String TWO_LEVEL_CACHING_ENABLED_FIELD_NAME = "two_level_cache_enabled";
// Old name for "two_level_cache_minimum_size", remove eventually.
private static final String TWO_LEVEL_CACHING_THRESHOLD_FIELD_NAME = "two_level_cache_threshold";
private static final String TWO_LEVEL_CACHING_MIN_SIZE_FIELD_NAME =
"two_level_cache_minimum_size";
private static final String TWO_LEVEL_CACHING_MAX_SIZE_FIELD_NAME =
"two_level_cache_maximum_size";
private static final long TWO_LEVEL_CACHING_MIN_SIZE_DEFAULT = 20 * 1024L;
private static final String HYBRID_THRIFT_ENDPOINT = "hybrid_thrift_endpoint";
private static final String REPOSITORY = "repository";
private static final String DEFAULT_REPOSITORY = "";
private static final String SCHEDULE_TYPE = "schedule_type";
private static final String DEFAULT_SCHEDULE_TYPE = "none";
public enum LoadBalancingType {
SINGLE_SERVER,
CLIENT_SLB,
}
private final BuckConfig buckConfig;
private final SlbBuckConfig slbConfig;
public ArtifactCacheBuckConfig(BuckConfig buckConfig) {
this.buckConfig = buckConfig;
this.slbConfig = new SlbBuckConfig(buckConfig, CACHE_SECTION_NAME);
}
public String getRepository() {
return buckConfig.getValue(CACHE_SECTION_NAME, REPOSITORY)
.or(DEFAULT_REPOSITORY);
}
public String getScheduleType() {
return buckConfig.getValue(CACHE_SECTION_NAME, SCHEDULE_TYPE)
.or(DEFAULT_SCHEDULE_TYPE);
}
public SlbBuckConfig getSlbConfig() {
return slbConfig;
}
public Optional<String> getHybridThriftEndpoint() {
return buckConfig.getValue(CACHE_SECTION_NAME, HYBRID_THRIFT_ENDPOINT);
}
public LoadBalancingType getLoadBalancingType() {
return buckConfig.getEnum(CACHE_SECTION_NAME, LOAD_BALANCING_TYPE, LoadBalancingType.class)
.or(DEFAULT_LOAD_BALANCING_TYPE);
}
public int getHttpMaxConcurrentWrites() {
return Integer.valueOf(
buckConfig.getValue(CACHE_SECTION_NAME, "http_max_concurrent_writes")
.or(DEFAULT_HTTP_MAX_CONCURRENT_WRITES));
}
public int getHttpWriterShutdownTimeout() {
return Integer.valueOf(
buckConfig.getValue(CACHE_SECTION_NAME, "http_writer_shutdown_timeout_seconds")
.or(DEFAULT_HTTP_WRITE_SHUTDOWN_TIMEOUT_SECONDS));
}
public int getMaxFetchRetries() {
return buckConfig.getInteger(CACHE_SECTION_NAME, HTTP_MAX_FETCH_RETRIES)
.or(DEFAULT_HTTP_MAX_FETCH_RETRIES);
}
public boolean hasAtLeastOneWriteableCache() {
return FluentIterable.from(getHttpCaches()).anyMatch(
new Predicate<HttpCacheEntry>() {
@Override
public boolean apply(HttpCacheEntry input) {
return input.getCacheReadMode().equals(ArtifactCacheBuckConfig.CacheReadMode.readwrite);
}
});
}
public String getHostToReportToRemoteCacheServer() {
return buckConfig.getLocalhost();
}
public ImmutableList<String> getArtifactCacheModesRaw() {
// If there is a user-set value, even if it is `mode =`, use it.
if (buckConfig.hasUserDefinedValue(CACHE_SECTION_NAME, "mode")) {
return buckConfig.getListWithoutComments(CACHE_SECTION_NAME, "mode");
}
// Otherwise, we default to using the directory cache.
return ImmutableList.of("dir");
}
public ImmutableSet<ArtifactCacheMode> getArtifactCacheModes() {
return FluentIterable.from(getArtifactCacheModesRaw())
.transform(
new Function<String, ArtifactCacheMode>() {
@Override
public ArtifactCacheMode apply(String input) {
try {
return ArtifactCacheMode.valueOf(input);
} catch (IllegalArgumentException e) {
throw new HumanReadableException(
"Unusable %s.mode: '%s'",
CACHE_SECTION_NAME,
input);
}
}
})
.toSet();
}
public Optional<DirCacheEntry> getServedLocalCache() {
if (!getServingLocalCacheEnabled()) {
return Optional.absent();
}
return Optional.of(getDirCache().withCacheReadMode(getServedLocalCacheReadMode()));
}
public DirCacheEntry getDirCache() {
return DirCacheEntry.builder()
.setCacheDir(getCacheDir())
.setCacheReadMode(getDirCacheReadMode())
.setMaxSizeBytes(getCacheDirMaxSizeBytes())
.build();
}
public ImmutableSet<HttpCacheEntry> getHttpCaches() {
ImmutableSet.Builder<HttpCacheEntry> result = ImmutableSet.builder();
ImmutableSet<String> httpCacheNames = getHttpCacheNames();
boolean implicitLegacyCache = httpCacheNames.isEmpty() &&
getArtifactCacheModes().contains(ArtifactCacheMode.http);
if (implicitLegacyCache || legacyCacheConfigurationFieldsPresent()) {
result.add(obtainEntryForName(Optional.<String>absent()));
}
for (String cacheName : httpCacheNames) {
result.add(obtainEntryForName(Optional.of(cacheName)));
}
return result.build();
}
// It's important that this number is greater than the `-j` parallelism,
// as if it's too small, we'll overflow the reusable connection pool and
// start spamming new connections. While this isn't the best location,
// the other current option is setting this wherever we construct a `Build`
// object and have access to the `-j` argument. However, since that is
// created in several places leave it here for now.
public long getThreadPoolSize() {
return buckConfig.getLong(CACHE_SECTION_NAME, HTTP_THREAD_POOL_SIZE)
.or(DEFAULT_HTTP_THREAD_POOL_SIZE);
}
public long getThreadPoolKeepAliveDurationMillis() {
return buckConfig.getLong(CACHE_SECTION_NAME, HTTP_THREAD_POOL_KEEP_ALIVE_DURATION_MILLIS)
.or(DEFAULT_HTTP_THREAD_POOL_KEEP_ALIVE_DURATION_MILLIS);
}
public boolean getTwoLevelCachingEnabled() {
return buckConfig.getBooleanValue(
CACHE_SECTION_NAME,
TWO_LEVEL_CACHING_ENABLED_FIELD_NAME,
false);
}
public long getTwoLevelCachingMinimumSize() {
return buckConfig.getValue(CACHE_SECTION_NAME, TWO_LEVEL_CACHING_MIN_SIZE_FIELD_NAME)
.or(buckConfig.getValue(CACHE_SECTION_NAME, TWO_LEVEL_CACHING_THRESHOLD_FIELD_NAME))
.transform(
new Function<String, Long>() {
@Override
public Long apply(String input) {
return SizeUnit.parseBytes(input);
}
})
.or(TWO_LEVEL_CACHING_MIN_SIZE_DEFAULT);
}
public Optional<Long> getTwoLevelCachingMaximumSize() {
return buckConfig.getValue(CACHE_SECTION_NAME, TWO_LEVEL_CACHING_MAX_SIZE_FIELD_NAME)
.transform(
new Function<String, Long>() {
@Override
public Long apply(String input) {
return SizeUnit.parseBytes(input);
}
});
}
private CacheReadMode getDirCacheReadMode() {
return getCacheReadMode(CACHE_SECTION_NAME, "dir_mode", DEFAULT_DIR_CACHE_MODE);
}
private Path getCacheDir() {
String cacheDir = buckConfig.getLocalCacheDirectory();
Path pathToCacheDir = buckConfig.resolvePathThatMayBeOutsideTheProjectFilesystem(
Paths.get(
cacheDir));
return Preconditions.checkNotNull(pathToCacheDir);
}
private Optional<Long> getCacheDirMaxSizeBytes() {
return buckConfig.getValue(CACHE_SECTION_NAME, "dir_max_size").transform(
new Function<String, Long>() {
@Override
public Long apply(String input) {
return SizeUnit.parseBytes(input);
}
});
}
private boolean getServingLocalCacheEnabled() {
return buckConfig.getBooleanValue(CACHE_SECTION_NAME, SERVED_CACHE_ENABLED_FIELD_NAME, false);
}
private CacheReadMode getServedLocalCacheReadMode() {
return getCacheReadMode(
CACHE_SECTION_NAME,
SERVED_CACHE_READ_MODE_FIELD_NAME,
DEFAULT_SERVED_CACHE_MODE);
}
private CacheReadMode getCacheReadMode(String section, String fieldName, String defaultValue) {
String cacheMode = buckConfig.getValue(section, fieldName).or(defaultValue);
final CacheReadMode result;
try {
result = CacheReadMode.valueOf(cacheMode);
} catch (IllegalArgumentException e) {
throw new HumanReadableException("Unusable cache.%s: '%s'", fieldName, cacheMode);
}
return result;
}
private ImmutableMap<String, String> getCacheHeaders(String section, String fieldName) {
ImmutableMap.Builder<String, String> headerBuilder = ImmutableMap.builder();
ImmutableList<String> rawHeaders = buckConfig.getListWithoutComments(
section,
fieldName,
';');
for (String rawHeader : rawHeaders) {
List<String> splitHeader = Splitter.on(':')
.omitEmptyStrings()
.trimResults()
.splitToList(rawHeader);
headerBuilder.put(splitHeader.get(0), splitHeader.get(1));
}
return headerBuilder.build();
}
private ImmutableSet<String> getHttpCacheNames() {
ImmutableList<String> httpCacheNames = buckConfig.getListWithoutComments(
CACHE_SECTION_NAME,
HTTP_CACHE_NAMES_FIELD_NAME);
return ImmutableSet.copyOf(httpCacheNames);
}
private String getCacheErrorFormatMessage(String section, String fieldName, String defaultValue) {
return buckConfig.getValue(section, fieldName).or(defaultValue);
}
private HttpCacheEntry obtainEntryForName(Optional<String> cacheName) {
final String section = Joiner.on('#').skipNulls().join(CACHE_SECTION_NAME, cacheName.orNull());
HttpCacheEntry.Builder builder = HttpCacheEntry.builder();
builder.setName(cacheName);
builder.setUrl(buckConfig.getUrl(section, HTTP_URL_FIELD_NAME).or(DEFAULT_HTTP_URL));
builder.setTimeoutSeconds(
buckConfig.getLong(section, HTTP_TIMEOUT_SECONDS_FIELD_NAME)
.or(DEFAULT_HTTP_CACHE_TIMEOUT_SECONDS).intValue());
builder.setReadHeaders(getCacheHeaders(section, HTTP_READ_HEADERS_FIELD_NAME));
builder.setWriteHeaders(getCacheHeaders(section, HTTP_WRITE_HEADERS_FIELD_NAME));
builder.setBlacklistedWifiSsids(
buckConfig.getListWithoutComments(section, HTTP_BLACKLISTED_WIFI_SSIDS_FIELD_NAME));
builder.setCacheReadMode(
getCacheReadMode(section, HTTP_MODE_FIELD_NAME, DEFAULT_HTTP_CACHE_MODE));
builder.setErrorMessageFormat(
getCacheErrorFormatMessage(
section,
HTTP_CACHE_ERROR_MESSAGE_NAME,
DEFAULT_HTTP_CACHE_ERROR_MESSAGE));
builder.setMaxStoreSize(buckConfig.getLong(section, HTTP_MAX_STORE_SIZE));
return builder.build();
}
private boolean legacyCacheConfigurationFieldsPresent() {
for (String field : HTTP_CACHE_DESCRIPTION_FIELDS) {
if (buckConfig.getValue(CACHE_SECTION_NAME, field).isPresent()) {
return true;
}
}
return false;
}
public enum ArtifactCacheMode {
dir,
http,
thrift_over_http,
}
public enum CacheReadMode {
readonly(false),
readwrite(true),
;
private final boolean doStore;
CacheReadMode(boolean doStore) {
this.doStore = doStore;
}
public boolean isDoStore() {
return doStore;
}
}
@Value.Immutable
@BuckStyleImmutable
abstract static class AbstractDirCacheEntry {
public abstract Path getCacheDir();
public abstract Optional<Long> getMaxSizeBytes();
public abstract CacheReadMode getCacheReadMode();
}
@Value.Immutable
@BuckStyleImmutable
abstract static class AbstractHttpCacheEntry {
public abstract Optional<String> getName();
public abstract URI getUrl();
public abstract int getTimeoutSeconds();
public abstract ImmutableMap<String, String> getReadHeaders();
public abstract ImmutableMap<String, String> getWriteHeaders();
public abstract CacheReadMode getCacheReadMode();
protected abstract ImmutableSet<String> getBlacklistedWifiSsids();
public abstract String getErrorMessageFormat();
public abstract Optional<Long> getMaxStoreSize();
public boolean isWifiUsableForDistributedCache(Optional<String> currentWifiSsid) {
if (currentWifiSsid.isPresent() &&
getBlacklistedWifiSsids().contains(currentWifiSsid.get())) {
// We're connected to a wifi hotspot that has been explicitly blacklisted from connecting to
// a distributed cache.
return false;
}
return true;
}
}
}
| |
/*
* Copyright (C) 2008 ZXing authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.zxing.client.android.camera;
import android.content.Context;
import android.graphics.Point;
import android.graphics.Rect;
import android.hardware.Camera;
import android.os.Handler;
import android.util.Log;
import android.view.SurfaceHolder;
import com.google.zxing.PlanarYUVLuminanceSource;
import com.google.zxing.client.android.camera.open.OpenCameraInterface;
import java.io.IOException;
/**
* This object wraps the Camera service object and expects to be the only one talking to it. The
* implementation encapsulates the steps needed to take preview-sized images, which are used for
* both preview and decoding.
*
* @author dswitkin@google.com (Daniel Switkin)
*/
public final class CameraManager {
private static final String TAG = CameraManager.class.getSimpleName();
private static final int MIN_FRAME_WIDTH = 240;
private static final int MIN_FRAME_HEIGHT = 240;
private static final int MAX_FRAME_WIDTH = 1200; // = 5/8 * 1920
private static final int MAX_FRAME_HEIGHT = 675; // = 5/8 * 1080
private final Context context;
private final CameraConfigurationManager configManager;
private Camera camera;
private AutoFocusManager autoFocusManager;
private Rect framingRect;
private Rect framingRectInPreview;
private boolean initialized;
private boolean previewing;
private int requestedCameraId = -1;
private int requestedFramingRectWidth;
private int requestedFramingRectHeight;
/**
* Preview frames are delivered here, which we pass on to the registered handler. Make sure to
* clear the handler so it will only receive one message.
*/
private final PreviewCallback previewCallback;
public CameraManager(Context context) {
this.context = context;
this.configManager = new CameraConfigurationManager(context);
previewCallback = new PreviewCallback(configManager);
}
/**
* Opens the camera driver and initializes the hardware parameters.
*
* @param holder The surface object which the camera will draw preview frames into.
* @throws IOException Indicates the camera driver failed to open.
*/
public synchronized void openDriver(SurfaceHolder holder) throws IOException {
Camera theCamera = camera;
if (theCamera == null) {
if (requestedCameraId >= 0) {
theCamera = OpenCameraInterface.open(requestedCameraId);
} else {
theCamera = OpenCameraInterface.open();
}
if (theCamera == null) {
throw new IOException();
}
camera = theCamera;
}
if (holder!=null) {
theCamera.setPreviewDisplay(holder);
}
if (!initialized) {
initialized = true;
configManager.initFromCameraParameters(theCamera);
if (requestedFramingRectWidth > 0 && requestedFramingRectHeight > 0) {
setManualFramingRect(requestedFramingRectWidth, requestedFramingRectHeight);
requestedFramingRectWidth = 0;
requestedFramingRectHeight = 0;
}
}
Camera.Parameters parameters = theCamera.getParameters();
String parametersFlattened = parameters == null ? null : parameters.flatten(); // Save these, temporarily
try {
configManager.setDesiredCameraParameters(theCamera, false);
} catch (RuntimeException re) {
// Driver failed
Log.w(TAG, "Camera rejected parameters. Setting only minimal safe-mode parameters");
Log.i(TAG, "Resetting to saved camera params: " + parametersFlattened);
// Reset:
if (parametersFlattened != null) {
parameters = theCamera.getParameters();
parameters.unflatten(parametersFlattened);
try {
theCamera.setParameters(parameters);
configManager.setDesiredCameraParameters(theCamera, true);
} catch (RuntimeException re2) {
// Well, darn. Give up
Log.w(TAG, "Camera rejected even safe-mode parameters! No configuration");
}
}
}
}
public Camera getCamera(){
return camera;
}
public Point getCameraDimensions(){ return configManager.getCameraResolution(); }
public AutoFocusManager getFocusManager(){ return autoFocusManager; }
public synchronized boolean isOpen() {
return camera != null;
}
/**
* Closes the camera driver if still in use.
*/
public synchronized void closeDriver() {
if (camera != null) {
camera.release();
camera = null;
// Make sure to clear these each time we close the camera, so that any scanning rect
// requested by intent is forgotten.
framingRect = null;
framingRectInPreview = null;
}
}
/**
* Asks the camera hardware to begin drawing preview frames to the screen.
*/
public synchronized void startPreview() {
Camera theCamera = camera;
if (theCamera != null && !previewing) {
theCamera.startPreview();
previewing = true;
autoFocusManager = new AutoFocusManager(context, camera);
}
}
/**
* Tells the camera to stop drawing preview frames.
*/
public synchronized void stopPreview() {
if (autoFocusManager != null) {
autoFocusManager.stop();
autoFocusManager = null;
}
if (camera != null && previewing) {
camera.stopPreview();
previewCallback.setHandler(null, 0);
previewing = false;
}
}
/**
* Convenience method for {@link com.google.zxing.client.android.CaptureActivity}
*
* @param newSetting if {@code true}, light should be turned on if currently off. And vice versa.
*/
public synchronized void setTorch(boolean newSetting) {
if (newSetting != configManager.getTorchState(camera)) {
if (camera != null) {
if (autoFocusManager != null) {
autoFocusManager.stop();
}
configManager.setTorch(camera, newSetting);
if (autoFocusManager != null) {
autoFocusManager.start();
}
}
}
}
/**
* A single preview frame will be returned to the handler supplied. The data will arrive as byte[]
* in the message.obj field, with width and height encoded as message.arg1 and message.arg2,
* respectively.
*
* @param handler The handler to send the message to.
* @param message The what field of the message to be sent.
*/
public synchronized void requestPreviewFrame(Handler handler, int message) {
Camera theCamera = camera;
if (theCamera != null && previewing) {
previewCallback.setHandler(handler, message);
theCamera.setOneShotPreviewCallback(previewCallback);
}
}
/**
* Calculates the framing rect which the UI should draw to show the user where to place the
* barcode. This target helps with alignment as well as forces the user to hold the device
* far enough away to ensure the image will be in focus.
*
* @return The rectangle to draw on screen in window coordinates.
*/
public synchronized Rect getFramingRect() {
if (framingRect == null) {
if (camera == null) {
return null;
}
Point screenResolution = configManager.getScreenResolution();
if (screenResolution == null) {
// Called early, before init even finished
return null;
}
int width = findDesiredDimensionInRange(screenResolution.x, MIN_FRAME_WIDTH, MAX_FRAME_WIDTH);
int height = findDesiredDimensionInRange(screenResolution.y, MIN_FRAME_HEIGHT, MAX_FRAME_HEIGHT);
int leftOffset = (screenResolution.x - width) / 2;
int topOffset = (screenResolution.y - height) / 2;
framingRect = new Rect(leftOffset, topOffset, leftOffset + width, topOffset + height);
Log.d(TAG, "Calculated framing rect: " + framingRect);
}
return framingRect;
}
private static int findDesiredDimensionInRange(int resolution, int hardMin, int hardMax) {
int dim = 5 * resolution / 8; // Target 5/8 of each dimension
if (dim < hardMin) {
return hardMin;
}
if (dim > hardMax) {
return hardMax;
}
return dim;
}
/**
* Like {@link #getFramingRect} but coordinates are in terms of the preview frame,
* not UI / screen.
*
* @return {@link Rect} expressing barcode scan area in terms of the preview size
*/
public synchronized Rect getFramingRectInPreview() {
if (framingRectInPreview == null) {
Rect framingRect = getFramingRect();
if (framingRect == null) {
return null;
}
Rect rect = new Rect(framingRect);
Point cameraResolution = configManager.getCameraResolution();
Point screenResolution = configManager.getScreenResolution();
if (cameraResolution == null || screenResolution == null) {
// Called early, before init even finished
return null;
}
rect.left = rect.left * cameraResolution.x / screenResolution.x;
rect.right = rect.right * cameraResolution.x / screenResolution.x;
rect.top = rect.top * cameraResolution.y / screenResolution.y;
rect.bottom = rect.bottom * cameraResolution.y / screenResolution.y;
framingRectInPreview = rect;
}
return framingRectInPreview;
}
/**
* Allows third party apps to specify the camera ID, rather than determine
* it automatically based on available cameras and their orientation.
*
* @param cameraId camera ID of the camera to use. A negative value means "no preference".
*/
public synchronized void setManualCameraId(int cameraId) {
if (initialized) {
throw new IllegalStateException();
} else {
requestedCameraId = cameraId;
}
}
/**
* Allows third party apps to specify the scanning rectangle dimensions, rather than determine
* them automatically based on screen resolution.
*
* @param width The width in pixels to scan.
* @param height The height in pixels to scan.
*/
public synchronized void setManualFramingRect(int width, int height) {
if (initialized) {
Point screenResolution = configManager.getScreenResolution();
if (width > screenResolution.x) {
width = screenResolution.x;
}
if (height > screenResolution.y) {
height = screenResolution.y;
}
int leftOffset = (screenResolution.x - width) / 2;
int topOffset = (screenResolution.y - height) / 2;
framingRect = new Rect(leftOffset, topOffset, leftOffset + width, topOffset + height);
Log.d(TAG, "Calculated manual framing rect: " + framingRect);
framingRectInPreview = null;
} else {
requestedFramingRectWidth = width;
requestedFramingRectHeight = height;
}
}
/**
* A factory method to build the appropriate LuminanceSource object based on the format
* of the preview buffers, as described by Camera.Parameters.
*
* @param data A preview frame.
* @param width The width of the image.
* @param height The height of the image.
* @return A PlanarYUVLuminanceSource instance.
*/
public PlanarYUVLuminanceSource buildLuminanceSource(byte[] data, int width, int height) {
Rect rect = getFramingRectInPreview();
if (rect == null) {
return null;
}
// Go ahead and assume it's YUV rather than die.
return new PlanarYUVLuminanceSource(data, width, height, rect.left, rect.top,
rect.width(), rect.height(), false);
}
}
| |
package com.akjava.gwt.threebox2d.client;
import java.io.IOException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.jbox2d.collision.AABB;
import org.jbox2d.collision.shapes.CircleShape;
import org.jbox2d.collision.shapes.PolygonShape;
import org.jbox2d.collision.shapes.Shape;
import org.jbox2d.collision.shapes.ShapeType;
import org.jbox2d.common.Vec2;
import org.jbox2d.dynamics.Body;
import org.jbox2d.dynamics.Fixture;
import org.jbox2d.dynamics.World;
import org.jbox2d.dynamics.joints.DistanceJoint;
import org.jbox2d.dynamics.joints.Joint;
import com.akjava.gwt.lib.client.CanvasUtils;
import com.akjava.gwt.stats.client.Stats;
import com.akjava.gwt.three.client.examples.renderers.CSS3DObject;
import com.akjava.gwt.three.client.examples.renderers.CSS3DRenderer;
import com.akjava.gwt.three.client.gwt.materials.MeshBasicMaterialParameter;
import com.akjava.gwt.three.client.js.THREE;
import com.akjava.gwt.three.client.js.cameras.Camera;
import com.akjava.gwt.three.client.js.core.Object3D;
import com.akjava.gwt.three.client.js.lights.Light;
import com.akjava.gwt.three.client.js.renderers.WebGLRenderer;
import com.akjava.gwt.three.client.js.scenes.Scene;
import com.akjava.gwt.three.client.js.textures.Texture;
import com.akjava.gwt.threebox2d.client.demo.EdgeDemo;
import com.akjava.gwt.threebox2d.client.demo.simple.SimpleDemo;
import com.akjava.gwt.threebox2d.client.demo.spring.SpringDemo;
import com.google.gwt.canvas.client.Canvas;
import com.google.gwt.canvas.dom.client.Context2d;
import com.google.gwt.core.client.EntryPoint;
import com.google.gwt.dom.client.Style.Unit;
import com.google.gwt.event.dom.client.ClickEvent;
import com.google.gwt.event.dom.client.ClickHandler;
import com.google.gwt.event.dom.client.KeyDownEvent;
import com.google.gwt.event.dom.client.KeyDownHandler;
import com.google.gwt.event.dom.client.KeyUpEvent;
import com.google.gwt.event.dom.client.KeyUpHandler;
import com.google.gwt.event.logical.shared.ValueChangeEvent;
import com.google.gwt.event.logical.shared.ValueChangeHandler;
import com.google.gwt.text.shared.Renderer;
import com.google.gwt.user.client.Timer;
import com.google.gwt.user.client.ui.Button;
import com.google.gwt.user.client.ui.FocusPanel;
import com.google.gwt.user.client.ui.HTMLPanel;
import com.google.gwt.user.client.ui.HorizontalPanel;
import com.google.gwt.user.client.ui.Image;
import com.google.gwt.user.client.ui.RootLayoutPanel;
import com.google.gwt.user.client.ui.ValueListBox;
import com.google.gwt.user.client.ui.VerticalPanel;
public class Main implements EntryPoint {
//private Canvas canvas;
private Camera camera;
private WebGLRenderer renderer;
private Scene scene;
int width=600;
int height=350;
Object3D objRoot;
Map<Body,Object3D> threeObjects=new HashMap<Body,Object3D>();
Map<Joint,Object3D> threeJoints=new HashMap<Joint,Object3D>();
List<Box2dDemo> demos=new ArrayList<Box2dDemo>();
//private String currentRendererType="css3d";
private void switchRenderer(String type){
init();
if(renderer.gwtGetType().equals(type)){//same type no need to switch
return;
}
focusPanel.clear();
HTMLPanel div=new HTMLPanel("");
if(type.equals("css3d")){
renderer = THREE.CSS3DRenderer();
}else if(type.equals("webgl")){
renderer=THREE.WebGLRenderer();
//renderer.set tod aliase
renderer.setClearColor(0xffffff, 1);//what? TODO set color
}else{//canvas
renderer=THREE.CanvasRenderer();
}
renderer.setSize(width, height);
div.getElement().appendChild(renderer.getDomElement());
renderer.gwtSetType(type);
focusPanel.add(div);
}
public int scale=10;
@Override
public void onModuleLoad() {
main = new MainUi();
RootLayoutPanel.get().add(main);
//RootPanel.get().add(main);
demos.add(new EdgeDemo());
demos.add(new SpringDemo());
demos.add(new SimpleDemo());
//demos.add(new CarDemo());
//demos.add(new ImageWaveDemo());
//APEngine.container(new ArrayListDisplayObjectContainer());
//apeDemo = new CarDemo();
//apeDemo=new ImageWaveDemo();
//apeDemo.initialize();
//APEngine.step();
scene = THREE.Scene();
objRoot=THREE.Object3D();
objRoot.setPosition(-width/2, 0, 0);
scene.add(objRoot);
camera = THREE.PerspectiveCamera(35,(double)width/height,.1,10000);
scene.add(camera);
//camera.getPosition().setZ(700);
camera.getPosition().setZ(1000);
camera.getPosition().setX(0);
camera.getPosition().setY(-200);
light = THREE.PointLight(0xffffff);
light.setPosition(10, 0, 10);
scene.add(light);
//camera.getRotation().setZ(Math.toRadians(180)); //fliphorizontaled
renderer = THREE.CanvasRenderer();
renderer.gwtSetType("canvas");
renderer.setSize(width, height);
HTMLPanel div=new HTMLPanel("");
div.getElement().appendChild(renderer.getDomElement());
focusPanel = new FocusPanel();
focusPanel.add(div);
main.getCenter().add(focusPanel);
focusPanel.addKeyDownHandler(new KeyDownHandler() {
@Override
public void onKeyDown(KeyDownEvent event) {
apeDemo.keyDown(event);
}
});
focusPanel.addKeyUpHandler(new KeyUpHandler() {
@Override
public void onKeyUp(KeyUpEvent event) {
apeDemo.KeyUp(event);
}
});
focusPanel.setFocus(true);
main.getWebglButton().addClickHandler(new ClickHandler() {
@Override
public void onClick(ClickEvent event) {
switchRenderer("webgl");
}
});
main.getCanvasButton().addClickHandler(new ClickHandler() {
@Override
public void onClick(ClickEvent event) {
switchRenderer("canvas");
}
});
/* commentout
main.getCss3dButton().addClickHandler(new ClickHandler() {
@Override
public void onClick(ClickEvent event) {
switchRenderer("css3d");
}
});
*/
/*
canvas = Canvas.createIfSupported();
canvas.setSize("600px", "600px");
canvas.setCoordinateSpaceWidth(600);
canvas.setCoordinateSpaceHeight(600);
root.add(canvas);
*/
final Stats stats=Stats.insertStatsToRootPanel();
stats.domElement().getStyle().setWidth(90.0, Unit.PX);
//stats.setPosition(8, 0);
Timer timer=new Timer(){
@Override
public void run() {//wait?
long t=System.currentTimeMillis();
//LogUtils.log(""+(t-last));
last=t;
stats.begin();
if(doInit){
//LogUtils.log("init");
init();
doInit=false;
}
updateCanvas();
stats.end();
}
};
timer.scheduleRepeating(1000/60);
updateCanvas();
HorizontalPanel buttons=new HorizontalPanel();
buttons.setVerticalAlignment(HorizontalPanel.ALIGN_MIDDLE);
ValueListBox<Box2dDemo> demosList=new ValueListBox<Box2dDemo>(new Renderer<Box2dDemo>() {
@Override
public String render(Box2dDemo object) {
if(object==null){
return null;
}
return object.getName();
}
@Override
public void render(Box2dDemo object, Appendable appendable)
throws IOException {
}
});
demosList.addValueChangeHandler(new ValueChangeHandler<Box2dDemo>() {
@Override
public void onValueChange(ValueChangeEvent<Box2dDemo> event) {
apeDemo=event.getValue();
init();//remove
}
});
demosList.setValue(demos.get(0));
demosList.setAcceptableValues(demos);
buttons.add(demosList);
apeDemo=demos.get(0);
Button init=new Button("Restart",new ClickHandler() {
@Override
public void onClick(ClickEvent event) {
doInit=true;
focusPanel.setFocus(true);
}
});
main.getButtons().add(buttons);
buttons.add(init);
init();
}
long last;
boolean doInit;
public static Map<Object,Integer> colorMap=new HashMap<Object, Integer>();
private void init(){
main.getControler().clear();
threeObjects.clear();
if(renderer.gwtGetType().equals("css3d")){
((CSS3DRenderer)renderer).gwtClear();
//renderer.getDomElement().getStyle().setPosition(Position.ABSOLUTE);
//renderer.getDomElement().getStyle().setTop(0,Unit.PX);
}
scene = THREE.Scene();
scene.add(camera);
objRoot=THREE.Object3D();
objRoot.setPosition(-width/2, 0, 0);
scene.add(objRoot);
scene.add(light);
if(apeDemo!=null){
apeDemo.initialize();
if(apeDemo.createControler()!=null){
main.getControler().add(apeDemo.createControler());
}
}
}
private void updateCanvas() {
if(apeDemo==null){
return;//useless
}
World world=apeDemo.getWorld();
for(Body b=world.getBodyList();b!=null;b=b.getNext()){
drawBody(b);
}
for(Joint joint=world.getJointList();joint!=null;joint=joint.getNext()){
drawJoint(joint);
}
renderer.render(scene, camera);
apeDemo.step();
}
Vec2 jointA=new Vec2();
Vec2 jointB=new Vec2();
Vec2 jointCenter=new Vec2();
private void drawJoint(Joint joint) {
joint.getAnchorA(jointA);
joint.getAnchorB(jointB);
float radian=Box2DUtils.calculateRadian(jointA, jointB);
Box2DUtils.getCenter(jointA, jointB,jointCenter);
Object3D obj=threeJoints.get(joint);
if(obj==null){
Canvas dotCanvas=CanvasUtils.createCanvas(1, 2);//to bold
dotCanvas.getContext2d().setFillStyle("#008");
dotCanvas.getContext2d().fillRect(0, 0, 1, 2);
obj=createCanvasObject(dotCanvas,dotCanvas.getCoordinateSpaceWidth(),dotCanvas.getCoordinateSpaceHeight());
threeJoints.put(joint, obj);
objRoot.add(obj);
}
if(joint instanceof DistanceJoint){
float length=((DistanceJoint)joint).getLength();
obj.setScale(length*scale/2, 1, 1);
}
obj.setPosition(jointCenter.x*scale, -jointCenter.y*scale, 0);
obj.getRotation().setZ(-radian);
}
private void drawBody(Body body) {
Object3D obj=threeObjects.get(body);
if(obj==null){
Canvas bodyCanvas=createBodyCanvas(body,"#800",true);
//RootPanel.get().add(bodyCanvas);
obj=createCanvasObject(bodyCanvas,bodyCanvas.getCoordinateSpaceWidth(),bodyCanvas.getCoordinateSpaceHeight());
threeObjects.put(body, obj);
//LogUtils.log("create object:"+bodyCanvas.getCoordinateSpaceWidth()+","+bodyCanvas.getCoordinateSpaceHeight());
objRoot.add(obj);
//obj.setScale(scale, scale, scale);
}
Vec2 pos=body.getPosition();
obj.setPosition(pos.x*scale, -pos.y*scale, 0);
obj.getRotation().setZ(-body.getAngle());
}
/*
private Object3D createColorCircleObject(int r,int g,int b,double alpha,int radius,boolean stroke){
Object3D object;
Canvas canvas=CanvasUtils.createCircleImageCanvas(r, g, b, alpha, (int)(radius), 3,stroke);
//test
Texture texture=THREE.Texture(canvas.getCanvasElement());
texture.setNeedsUpdate(true);
if(!renderer.gwtGetType().equals("css3d")){//webgl and canvas
object=THREE.Mesh(THREE.PlaneGeometry(radius*2, radius*2),
THREE.MeshBasicMaterial(MeshBasicMaterialParameter.create().map(texture).transparent(true)));
}else{
Image img=new Image(canvas.toDataUrl());
object=CSS3DObject.createObject(img.getElement());
}
return object;
}
*/
private Object3D createCanvasObject(Canvas canvas,int w,int h){
Object3D object;
Texture texture=THREE.Texture(canvas.getCanvasElement());
texture.setNeedsUpdate(true);
if(!renderer.gwtGetType().equals("css3d")){
object=THREE.Mesh(THREE.PlaneGeometry(w, h),
THREE.MeshBasicMaterial(MeshBasicMaterialParameter.create().map(texture).transparent(true)));
}else{
VerticalPanel v=new VerticalPanel();
v.setSize(w+"px", h+"px");
Image img=new Image(canvas.toDataUrl());
v.add(img);
//LogUtils.log("img:"+img.getWidth()+":"+img.getHeight());
object=CSS3DObject.createObject(v.getElement());
}
return object;
}
/*
private Object3D createColorRectObject(int r,int g,int b,double alpha,int width,int height){
Object3D object;
if(!renderer.gwtGetType().equals("css3d")){
MeshBasicMaterialBuilder basicMaterial=MeshBasicMaterialBuilder.create().color(r,g,b).opacity(alpha)
.transparent(true);
object=THREE.Mesh(THREE.PlaneGeometry(width, height),
basicMaterial.build());
}else{
Image img=new Image(CanvasUtils.createColorRectImageDataUrl(r, g, b, 1, (int)width, (int)height));
object=CSS3DObject.createObject(img.getElement());
}
return object;
}
*/
private Box2dDemo apeDemo;
private FocusPanel focusPanel;
public static MainUi main;
private Light light;
public void drawShape(Body body){
//Canvas bodyCanvas=createBodyCanvas(body,"#800",true);
//Vec2 pos=body.getPosition();
//canvas.getContext2d().drawImage(bodyCanvas.getCanvasElement(), pos.x-(float)bodyCanvas.getCoordinateSpaceWidth()/2, pos.y-(float)bodyCanvas.getCoordinateSpaceHeight()/2);
}
public Canvas createBodyCanvas(Body body,String style,boolean stroke){
List<Shape> shapes=new ArrayList<Shape>();
for(Fixture fixture=body.getFixtureList();fixture!=null;fixture=fixture.getNext()){
ShapeType type=fixture.getType();
if(type==ShapeType.POLYGON){
shapes.add(fixture.getShape());
}
else if(type==ShapeType.CIRCLE){
CircleShape circle=(CircleShape) fixture.getShape();
float radius=circle.m_radius;
//TODO support circle
PolygonShape p=new PolygonShape();
p.setAsBox(radius, radius);
shapes.add(fixture.getShape());
}
}
//TODO swap y-cordinate
AABB aabb=calculateBox(shapes);
int w=(int) Math.max(Math.abs(aabb.upperBound.x),Math.abs(aabb.lowerBound.x))*2*scale;
int h=(int) Math.max(Math.abs(aabb.upperBound.y),Math.abs(aabb.lowerBound.y))*2*scale;
if(w<=0){
w=1;
}
if(h<=0){
h=1;
}
//should totally change w+h;
float offx=w/2;
float offy=h/2;
Canvas canvas=CanvasUtils.createCanvas(w, h);
//LogUtils.log("canvas created:"+canvas.getCoordinateSpaceWidth()+","+canvas.getCoordinateSpaceHeight());
Context2d context=canvas.getContext2d();
context.setLineWidth(3);
context.setFillStyle("#eee");
//context.fillRect(0, 0, canvas.getCoordinateSpaceWidth(), canvas.getCoordinateSpaceHeight());
for(Fixture fixture=body.getFixtureList();fixture!=null;fixture=fixture.getNext()){
ShapeType type=fixture.getType();
if(type==ShapeType.POLYGON){
PolygonShape poly=(PolygonShape) fixture.getShape();
int size=poly.m_vertexCount;
context.beginPath();
float px=(poly.m_vertices[0].x)*scale+offx;
float py=(poly.m_vertices[0].y*1)*scale+offy;
//LogUtils.log("moveTo:"+px+","+py);
context.moveTo(px, py);
for(int i=1;i<size;i++){
px=(poly.m_vertices[i].x)*scale+offx;
py=(poly.m_vertices[i].y*1)*scale+offy;
//LogUtils.log("lineTo:"+px+","+py);
context.lineTo(px,py );
}
px=(poly.m_vertices[0].x)*scale+offx;
py=(poly.m_vertices[0].y*1)*scale+offy;
//LogUtils.log("lineTo:"+px+","+py);
context.lineTo(px,py);
context.closePath();
if(stroke){
context.setStrokeStyle(style);
context.stroke();
}else{
context.setFillStyle(style);
context.fill();
}
}else if(type==ShapeType.CIRCLE){
//LogUtils.log("draw circle");
CircleShape circle=(CircleShape) fixture.getShape();
float radius=circle.m_radius;
context.beginPath();
context.arc((float)w/2, (float)h/2, radius*scale, 0, 360);
context.closePath();
if(stroke){
context.setStrokeStyle(style);
context.stroke();
}else{
context.setFillStyle(style);
context.fill();
}
}
}
return canvas;
}
//calculate multiple
public AABB calculateBox(List<Shape> polygons){
List<Vec2> points=new ArrayList<Vec2>();
for(Shape shape:polygons){
if(shape.getType()==ShapeType.POLYGON){
PolygonShape polygon=(PolygonShape) shape;
int vertexCount=polygon.m_vertexCount;
for (int i = 0; i < vertexCount; ++i) {
points.add(polygon.m_vertices[i]);
}
}else if(shape.getType()==ShapeType.CIRCLE){
CircleShape circle=(CircleShape)shape;
float radius=circle.m_radius;
points.add(new Vec2(-radius, -radius));
points.add(new Vec2(radius, -radius));
points.add(new Vec2(-radius, radius));
points.add(new Vec2(radius, radius));
}
}
if(points.size()==0){
return new AABB();
}
float minX=points.get(0).x;
float minY=points.get(0).y;
float maxX=points.get(0).x;
float maxY=points.get(0).y;
for (int i = 1; i < points.size(); ++i) {
Vec2 v=points.get(i);
if(v.x<minX){
minX=v.x;
}
if(v.y<minY){
minY=v.y;
}
if(v.x>maxX){
maxX=v.x;
}
if(v.y>maxY){
maxY=v.y;
}
}
AABB aabb=new AABB();
aabb.lowerBound.x=minX;
aabb.lowerBound.y=minY;
aabb.upperBound.x=maxX;
aabb.upperBound.y=maxY;
return aabb;
}
}
| |
package org.zstack.core.errorcode;
import com.google.gson.JsonParser;
import com.google.gson.JsonSyntaxException;
import org.apache.commons.io.FileUtils;
import org.apache.commons.lang.math.NumberUtils;
import org.json.JSONException;
import org.springframework.beans.factory.annotation.Autowired;
import org.zstack.core.Platform;
import org.zstack.core.cloudbus.CloudBus;
import org.zstack.core.config.GlobalConfigException;
import org.zstack.core.config.GlobalConfigValidatorExtensionPoint;
import org.zstack.core.db.DatabaseFacade;
import org.zstack.core.db.Q;
import org.zstack.core.db.SimpleQuery;
import org.zstack.core.db.UpdateQuery;
import org.zstack.core.thread.ChainTask;
import org.zstack.core.thread.SyncTaskChain;
import org.zstack.core.thread.ThreadFacade;
import org.zstack.core.workflow.FlowChainBuilder;
import org.zstack.core.workflow.ShareFlow;
import org.zstack.header.AbstractService;
import org.zstack.header.core.Completion;
import org.zstack.header.core.ReturnValueCompletion;
import org.zstack.header.core.workflow.*;
import org.zstack.header.errorcode.*;
import org.zstack.header.message.Message;
import org.zstack.utils.CollectionDSL;
import org.zstack.utils.TimeUtils;
import org.zstack.utils.Utils;
import org.zstack.utils.gson.JSONObjectUtil;
import org.zstack.utils.logging.CLogger;
import org.zstack.utils.path.PathUtil;
import org.zstack.utils.string.ErrorCodeElaboration;
import org.zstack.utils.string.StringSimilarity;
import java.io.File;
import java.io.IOException;
import java.sql.Timestamp;
import java.util.*;
import java.util.stream.Collectors;
import static org.zstack.core.Platform.argerr;
import static org.zstack.core.Platform.operr;
/**
* Created by mingjian.deng on 2018/12/1.
*/
public class ElaborationManagerImpl extends AbstractService {
static final CLogger logger = Utils.getLogger(ElaborationManagerImpl.class);
@Autowired
private CloudBus bus;
@Autowired
private DatabaseFacade dbf;
@Autowired
private ThreadFacade thdf;
@Override
public void handleMessage(Message msg) {
if (msg instanceof APIReloadElaborationMsg) {
handle((APIReloadElaborationMsg) msg);
} else if (msg instanceof APIGetElaborationsMsg) {
handle((APIGetElaborationsMsg) msg);
} else if (msg instanceof APIGetElaborationCategoriesMsg) {
handle((APIGetElaborationCategoriesMsg) msg);
} else if (msg instanceof APIGetMissedElaborationMsg) {
handle((APIGetMissedElaborationMsg) msg);
} else if (msg instanceof APICheckElaborationContentMsg) {
handle((APICheckElaborationContentMsg) msg);
} else {
bus.dealWithUnknownMessage(msg);
}
}
private void preCheckElaborationContent(String filename, String jsonContent, ReturnValueCompletion<List<ElaborationCheckResult>> completion) {
if (filename == null && jsonContent == null) {
completion.fail(argerr("non file or jsoncontent input"));
return;
}
if (filename != null && jsonContent != null) {
completion.fail(argerr("file or jsoncontent cannot both nonempty"));
return;
}
final boolean isClassPathFolder = (StringSimilarity.classPathFolder != null && StringSimilarity.classPathFolder.getAbsolutePath().equalsIgnoreCase(filename));
List<String> errorTemplates = PathUtil.scanFolderOnClassPath(StringSimilarity.elaborateFolder);
List<String> errTemplates = new ArrayList<>();
errorTemplates.forEach(e -> errTemplates.add(PathUtil.fileName(e)));
FlowChain checks = FlowChainBuilder.newShareFlowChain();
checks.setName(String.format("check-elaborations-for-%s", filename));
checks.then(new ShareFlow() {
List<String> files = new ArrayList<>();
Map<String, List<ErrorCodeElaboration>> contents = new HashMap<>();
List<ElaborationCheckResult> results = new ArrayList<>();
@Override
public void setup() {
if (filename != null) {
flow(new NoRollbackFlow() {
@Override
public void run(FlowTrigger trigger, Map data) {
try {
File folder = new File(filename);
if (folder.isFile()) {
files.add(folder.getAbsolutePath());
} else {
PathUtil.scanFolder(files, folder.getAbsolutePath());
}
} catch (Exception e) {
trigger.fail(operr("Unable to scan folder: %s", e.getMessage()));
return;
}
if (files.isEmpty()) {
trigger.fail(argerr("%s is not existed or is empty folder", filename));
} else {
trigger.next();
}
}
});
flow(new NoRollbackFlow() {
String __name__ = "FileNameWithoutJson";
@Override
public void run(FlowTrigger trigger, Map data) {
for (String file: files) {
String name = PathUtil.fileName(file);
if (!name.endsWith(".json")) {
results.add(new ElaborationCheckResult(file, null, ElaborationFailedReason.FileNameWithoutJson.toString()));
}
}
trigger.next();
}
});
flow(new NoRollbackFlow() {
String __name__ = "InValidJsonArraySchema";
@Override
public void run(FlowTrigger trigger, Map data) {
List<String> checks = CollectionDSL.list();
checks.addAll(files);
for (String file: checks) {
File templateFile = new File(file);
try {
String content = FileUtils.readFileToString(templateFile);
new JsonParser().parse(content);
List<ErrorCodeElaboration> errs = JSONObjectUtil.toCollection(content, ArrayList.class, ErrorCodeElaboration.class);
contents.put(file, errs);
} catch (IOException e) {
trigger.fail(Platform.operr(String.format("read error elaboration template files [%s] failed, due to: %s", templateFile, e.getMessage())));
return;
} catch (JsonSyntaxException e) {
results.add(new ElaborationCheckResult(file, null, ElaborationFailedReason.InValidJsonSchema.toString()));
files.remove(file);
} catch (JSONException e) {
results.add(new ElaborationCheckResult(file, null, ElaborationFailedReason.InValidJsonArraySchema.toString()));
files.remove(file);
} catch (Exception e) {
logger.debug(e.getMessage());
results.add(new ElaborationCheckResult(file, null, ElaborationFailedReason.InValidJsonArraySchema.toString()));
files.remove(file);
}
}
trigger.next();
}
});
} else {
flow(new NoRollbackFlow() {
String __name__ = "InValidJsonArraySchema";
@Override
public void run(FlowTrigger trigger, Map data) {
try {
String filename = "input-json";
new JsonParser().parse(jsonContent);
List<ErrorCodeElaboration> errs = JSONObjectUtil.toCollection(jsonContent, ArrayList.class, ErrorCodeElaboration.class);
contents.put(filename, errs);
} catch (JsonSyntaxException e) {
results.add(new ElaborationCheckResult(filename, null, ElaborationFailedReason.InValidJsonSchema.toString()));
} catch (JSONException e) {
results.add(new ElaborationCheckResult(filename, null, ElaborationFailedReason.InValidJsonArraySchema.toString()));
} catch (Exception e) {
logger.debug(e.getMessage());
results.add(new ElaborationCheckResult(filename, null, ElaborationFailedReason.InValidJsonArraySchema.toString()));
}
trigger.next();
}
});
}
flow(new NoRollbackFlow() {
String __name__ = "RegexAlreadyExisted, DuplicatedRegex, MessageNotFound and RegexNotFound";
@Override
public void run(FlowTrigger trigger, Map data) {
HashSet<String> sets = new HashSet<>();
contents.forEach((f, c) -> {
for (ErrorCodeElaboration err: c) {
String content = String.format("%s.%s: [%s]", err.getCategory(), err.getCode(), err.getRegex());
if (err.getRegex() == null || err.getRegex().isEmpty()) {
results.add(new ElaborationCheckResult(f, content, ElaborationFailedReason.RegexNotFound.toString()));
continue;
}
if (err.getMessage_cn() == null || err.getMessage_cn().isEmpty()) {
results.add(new ElaborationCheckResult(f, content, ElaborationFailedReason.MessageNotFound.toString()));
}
if (!isClassPathFolder && StringSimilarity.regexContained(err.getRegex())) {
results.add(new ElaborationCheckResult(f, content, ElaborationFailedReason.RegexAlreadyExisted.toString()));
}
if (sets.contains(err.getRegex())) {
results.add(new ElaborationCheckResult(f, content, ElaborationFailedReason.DuplicatedRegex.toString()));
} else {
sets.add(err.getRegex());
}
}
});
trigger.next();
}
});
flow(new NoRollbackFlow() {
String __name__ = "CategoryNotFound and NotSameCategoriesInFile";
@Override
public void run(FlowTrigger trigger, Map data) {
contents.forEach((f, c) -> {
for (ErrorCodeElaboration err: c) {
String content = String.format("%s.%s: [%s]", err.getCategory(), err.getCode(), err.getRegex());
if (err.getCategory() == null || err.getCategory().isEmpty()) {
results.add(new ElaborationCheckResult(f, content, ElaborationFailedReason.CategoryNotFound.toString()));
}
}
});
trigger.next();
}
});
flow(new NoRollbackFlow() {
String __name__ = "DuplicatedErrorCode and ErrorCodeAlreadyExisted";
@Override
public void run(FlowTrigger trigger, Map data) {
HashSet<String> sets = new HashSet<>();
contents.forEach((f, c) -> {
for (ErrorCodeElaboration err: c) {
if (err.getCode() == null || err.getCode().isEmpty() || err.getCategory() == null || err.getCategory().isEmpty()) {
continue;
}
if (!NumberUtils.isNumber(err.getCode())) {
trigger.fail(operr("elaboration code must be number!"));
return;
}
String code = err.getCategory() + "." + err.getCode();
if (!isClassPathFolder && StringSimilarity.errorCodeContained(code)) {
results.add(new ElaborationCheckResult(f, code, ElaborationFailedReason.ErrorCodeAlreadyExisted.toString()));
}
if (sets.contains(code)) {
results.add(new ElaborationCheckResult(f, code, ElaborationFailedReason.DuplicatedErrorCode.toString()));
} else {
sets.add(code);
}
}
});
trigger.next();
}
});
done(new FlowDoneHandler(completion) {
@Override
public void handle(Map data) {
completion.success(results);
}
});
error(new FlowErrorHandler(completion) {
@Override
public void handle(ErrorCode errCode, Map data) {
completion.fail(errCode);
}
});
}
}).start();
}
private void handle(final APICheckElaborationContentMsg msg) {
APICheckElaborationContentReply reply = new APICheckElaborationContentReply();
preCheckElaborationContent(msg.getElaborateFile(), msg.getElaborateContent(), new ReturnValueCompletion<List<ElaborationCheckResult>>(msg) {
@Override
public void success(List<ElaborationCheckResult> returnValue) {
returnValue.sort(Comparator.comparing(ElaborationCheckResult::getReason));
reply.setResults(returnValue);
bus.reply(msg, reply);
}
@Override
public void fail(ErrorCode errorCode) {
reply.setError(errorCode);
bus.reply(msg, reply);
}
});
}
private List<ElaborationVO> getMissedElatorations(Long repeats, String from) {
Long times = repeats != null ? repeats : 1L;
if (from == null) {
return Q.New(ElaborationVO.class).eq(ElaborationVO_.matched, false).gte(ElaborationVO_.repeats, times).list();
} else {
if (TimeUtils.isValidTimestampFormat(from)) {
long start = TimeUtils.parseFormatStringToTimeStamp(from);
return Q.New(ElaborationVO.class).eq(ElaborationVO_.matched, false).gte(ElaborationVO_.repeats, times).
gte(ElaborationVO_.lastOpDate, new Timestamp(start)).list();
} else if (NumberUtils.isNumber(from)) {
try {
return Q.New(ElaborationVO.class).eq(ElaborationVO_.matched, false).gte(ElaborationVO_.repeats, times).
gte(ElaborationVO_.lastOpDate, new Timestamp(Long.valueOf(from))).list();
} catch (NumberFormatException e) {
throw new OperationFailureException(argerr("%s is not a Long value Number", from));
}
} else {
throw new OperationFailureException(argerr("arg 'startTime' should format like 'yyyy-MM-dd HH:mm:ss' or '1545380003000'"));
}
}
}
private void handle(final APIGetMissedElaborationMsg msg) {
APIGetMissedElaborationReply reply = new APIGetMissedElaborationReply();
List<ElaborationVO> vos = getMissedElatorations(msg.getRepeats(), msg.getStartTime());
vos.forEach(vo -> {
ErrorCodeElaboration e = StringSimilarity.findSimilary(vo.getErrorInfo());
if (!StringSimilarity.matched(e)) {
reply.getInventories().add(ElaborationInventory.valueOf(vo));
}
});
bus.reply(msg, reply);
}
private void eliminateErrors() {
String time = ElaborateGlobalConfig.ELIMILATE_TIME.value();
long count = Q.New(ElaborationVO.class).eq(ElaborationVO_.matched, false).lt(ElaborationVO_.lastOpDate, new Timestamp(new Date().getTime() - TimeUtils.parseTimeInMillis(time))).count();
if (count > 0) {
logger.debug(String.format("clean [%s] records which are not matched error code in db", count));
UpdateQuery.New(ElaborationVO.class).eq(ElaborationVO_.matched, false).lt(ElaborationVO_.lastOpDate, new Timestamp(new Date().getTime() - TimeUtils.parseTimeInMillis(time))).delete();
}
}
private void refreshElaboration(final Completion completion) {
FlowChain chain = FlowChainBuilder.newSimpleFlowChain();
chain.setName("refresh-elaboration");
chain.then(new NoRollbackFlow() {
String __name__ = "check elaboration contents first";
@Override
public void run(FlowTrigger trigger, Map data) {
preCheckElaborationContent(StringSimilarity.classPathFolder.getAbsolutePath(), null, new ReturnValueCompletion<List<ElaborationCheckResult>>(trigger) {
@Override
public void success(List<ElaborationCheckResult> returnValue) {
if (returnValue.isEmpty()) {
trigger.next();
} else {
trigger.fail(operr("%s: %s", returnValue.get(0).getContent(), returnValue.get(0).getReason()));
}
}
@Override
public void fail(ErrorCode errorCode) {
trigger.fail(errorCode);
}
});
}
}).then(new NoRollbackFlow() {
String __name__ = "refresh error templates";
@Override
public void run(FlowTrigger trigger, Map data) {
StringSimilarity.refreshErrorTemplates();
eliminateErrors();
List<ElaborationVO> vos = Q.New(ElaborationVO.class).gte(ElaborationVO_.repeats, 1).eq(ElaborationVO_.matched, false).orderBy(ElaborationVO_.lastOpDate, SimpleQuery.Od.DESC).list();
if (!vos.isEmpty()) {
List<String> messages = StringSimilarity.getElaborations().stream().map(ErrorCodeElaboration::getMessage_cn).collect(Collectors.toList());
for (ElaborationVO vo: vos) {
if (messages.contains(vo.getErrorInfo())) {
vo.setMatched(true);
dbf.updateAndRefresh(vo);
}
}
}
StringSimilarity.resetCachedErrors();
trigger.next();
}
}).done(new FlowDoneHandler(completion) {
@Override
public void handle(Map data) {
completion.success();
}
}).error(new FlowErrorHandler(completion) {
@Override
public void handle(ErrorCode errCode, Map data) {
completion.fail(errCode);
}
}).start();
}
private void handle(final APIReloadElaborationMsg msg) {
APIReloadElaborationEvent evt = new APIReloadElaborationEvent(msg.getId());
thdf.chainSubmit(new ChainTask(msg) {
@Override
public String getSyncSignature() {
return getName();
}
@Override
public void run(SyncTaskChain chain) {
refreshElaboration(new Completion(chain) {
@Override
public void success() {
bus.publish(evt);
chain.next();
}
@Override
public void fail(ErrorCode errorCode) {
evt.setError(errorCode);
bus.publish(evt);
chain.next();
}
});
}
@Override
public String getName() {
return "reload-elaborations";
}
});
}
private void handle(final APIGetElaborationsMsg msg) {
APIGetElaborationsReply reply = new APIGetElaborationsReply();
if (msg.getRegex() != null) {
ErrorCodeElaboration e = StringSimilarity.findSimilary(msg.getRegex());
if (StringSimilarity.matched(e)) {
if (msg.getCategory() != null && msg.getCategory().equalsIgnoreCase(e.getCategory())) {
reply.getContents().add(new ElaborationContent(e));
} else if (msg.getCategory() == null){
reply.getContents().add(new ElaborationContent(e));
}
}
} else if (msg.getCategory() != null) {
List<ErrorCodeElaboration> elaborations = StringSimilarity.getElaborations();
elaborations.forEach(e -> {
if (msg.getCategory().equalsIgnoreCase("all")) {
reply.getContents().add(new ElaborationContent(e));
return;
}
if (e.getCategory().equalsIgnoreCase(msg.getCategory())) {
if (msg.getCode() != null) {
if (e.getCode().equalsIgnoreCase(msg.getCode())) {
reply.getContents().add(new ElaborationContent(e));
}
} else {
reply.getContents().add(new ElaborationContent(e));
}
}
});
}
if (msg.getCategory() == null && msg.getRegex() == null){
throw new OperationFailureException(Platform.argerr("input args 'regex' or 'category' must be set"));
}
Collections.sort(reply.getContents());
bus.reply(msg, reply);
}
private void handle(final APIGetElaborationCategoriesMsg msg) {
APIGetElaborationCategoriesReply reply = new APIGetElaborationCategoriesReply();
List<ErrorCodeElaboration> elaborations = StringSimilarity.getElaborations();
Map<String, ElaborationCategory> categoryMap = new HashMap<>();
for (ErrorCodeElaboration elaboration: elaborations) {
ElaborationCategory c = categoryMap.get(elaboration.getCategory());
if (c != null) {
c.setNum(c.getNum() + 1);
categoryMap.put(elaboration.getCategory(), c);
} else {
categoryMap.put(elaboration.getCategory(), new ElaborationCategory(elaboration.getCategory(), 1));
}
}
if (!categoryMap.isEmpty()) {
categoryMap.forEach((key, value) -> reply.getCategories().add(value));
}
bus.reply(msg, reply);
}
@Override
public String getId() {
return bus.makeLocalServiceId(ElaborationConsstants.SERVICE_ID);
}
@Override
public boolean start() {
installGlobalConfigValidator();
return true;
}
private void installGlobalConfigValidator() {
ElaborateGlobalConfig.ELIMILATE_TIME.installValidateExtension(new GlobalConfigValidatorExtensionPoint() {
@Override
public void validateGlobalConfig(String category, String name, String oldValue, String newValue) throws GlobalConfigException {
if (!TimeUtils.isValidTimeFormat(newValue)) {
throw new GlobalConfigException(String.format("%s is not a valid format string;" +
" a format string consists of a number ending with suffix s/S/m/M/h/H/d/D/w/W/y/Y or without suffix;" +
" for example, 3h, 1y", newValue));
}
}
});
}
@Override
public boolean stop() {
return true;
}
}
| |
/*
Copyright 2007-2009 WebDriver committers
Copyright 2007-2009 Google Inc.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package org.openqa.selenium;
import static org.openqa.selenium.Ignore.Driver.SELENESE;
import org.openqa.selenium.environment.GlobalTestEnvironment;
import org.openqa.selenium.environment.webserver.AppServer;
import static org.openqa.selenium.Ignore.Driver.IE;
import static org.openqa.selenium.Ignore.Driver.REMOTE;
import java.net.URI;
import java.net.InetAddress;
import java.util.Calendar;
import java.util.Date;
import java.util.Iterator;
import java.util.Random;
import java.util.Set;
public class CookieImplementationTest extends AbstractDriverTestCase {
@Override
protected void setUp() throws Exception {
super.setUp();
driver.get(simpleTestPage);
driver.manage().deleteAllCookies();
}
@JavascriptEnabled
@Ignore(SELENESE)
public void testShouldGetCookieByName() {
String key = String.format("key_%d", new Random().nextInt());
((JavascriptExecutor) driver).executeScript("document.cookie = arguments[0] + '=set';", key);
Cookie cookie = driver.manage().getCookieNamed(key);
assertEquals("set", cookie.getValue());
}
@JavascriptEnabled
@Ignore(SELENESE)
public void testShouldBeAbleToAddCookie() {
String key = String.format("key_%d", new Random().nextInt());
Cookie cookie = new Cookie(key, "foo");
((JavascriptExecutor) driver).executeScript("return document.cookie");
driver.manage().addCookie(cookie);
String current = (String) ((JavascriptExecutor) driver).executeScript("return document.cookie");
assertTrue(current.contains(key));
}
@Ignore(SELENESE)
public void testGetAllCookies() {
Random random = new Random();
String key1 = String.format("key_%d", random.nextInt());
String key2 = String.format("key_%d", random.nextInt());
Set<Cookie> cookies = driver.manage().getCookies();
int count = cookies.size();
Cookie one = new Cookie(key1, "value");
Cookie two = new Cookie(key2, "value");
driver.manage().addCookie(one);
driver.manage().addCookie(two);
driver.get(simpleTestPage);
cookies = driver.manage().getCookies();
assertEquals(count + 2, cookies.size());
assertTrue(cookies.contains(one));
assertTrue(cookies.contains(two));
}
@JavascriptEnabled
@Ignore(SELENESE)
public void testDeleteAllCookies() {
((JavascriptExecutor) driver).executeScript("document.cookie = 'foo=set';");
int count = driver.manage().getCookies().size();
assertTrue(count > 0);
driver.manage().deleteAllCookies();
count = driver.manage().getCookies().size();
assertTrue(count == 0);
}
@JavascriptEnabled
@Ignore(SELENESE)
public void testDeleteCookieWithName() {
Random random = new Random();
String key1 = String.format("key_%d", random.nextInt());
String key2 = String.format("key_%d", random.nextInt());
((JavascriptExecutor) driver).executeScript("document.cookie = arguments[0] + '=set';", key1);
((JavascriptExecutor) driver).executeScript("document.cookie = arguments[0] + '=set';", key2);
assertNotNull(driver.manage().getCookieNamed(key1));
assertNotNull(driver.manage().getCookieNamed(key2));
driver.manage().deleteCookieNamed(key1);
assertNull(driver.manage().getCookieNamed(key1));
assertNotNull(driver.manage().getCookieNamed(key2));
}
@Ignore(SELENESE)
public void testShouldNotDeleteCookiesWithASimilarName() {
String cookieOneName = "fish";
Cookie cookie1 = new Cookie(cookieOneName, "cod");
Cookie cookie2 = new Cookie(cookieOneName + "x", "earth");
WebDriver.Options options = driver.manage();
options.addCookie(cookie1);
options.addCookie(cookie2);
options.deleteCookieNamed(cookieOneName);
Set<Cookie> cookies = options.getCookies();
assertFalse(cookies.contains(cookie1));
assertTrue(cookies.contains(cookie2));
}
@Ignore(SELENESE)
public void testAddCookiesWithDifferentPathsThatAreRelatedToOurs() {
driver.get(simpleTestPage);
driver.manage().deleteAllCookies();
Cookie cookie1 = new Cookie("fish", "cod", "/common/animals");
Cookie cookie2 = new Cookie("planet", "earth", "/common/");
WebDriver.Options options = driver.manage();
options.addCookie(cookie1);
options.addCookie(cookie2);
AppServer appServer = GlobalTestEnvironment.get().getAppServer();
driver.get(appServer.whereIs("animals"));
Set<Cookie> cookies = options.getCookies();
assertTrue(cookies.contains(cookie1));
assertTrue(cookies.contains(cookie2));
driver.get(appServer.whereIs(""));
cookies = options.getCookies();
assertFalse(cookies.contains(cookie1));
assertTrue(cookies.contains(cookie2));
}
@Ignore(SELENESE)
public void testCanSetCookiesOnADifferentPathOfTheSameHost() {
Cookie cookie1 = new Cookie("fish", "cod", "/common/animals");
Cookie cookie2 = new Cookie("planet", "earth", "/common/galaxy");
WebDriver.Options options = driver.manage();
options.addCookie(cookie1);
options.addCookie(cookie2);
AppServer appServer = GlobalTestEnvironment.get().getAppServer();
driver.get(appServer.whereIs("animals"));
Set<Cookie> cookies = options.getCookies();
assertTrue(cookies.contains(cookie1));
assertFalse(cookies.contains(cookie2));
driver.get(appServer.whereIs("galaxy"));
cookies = options.getCookies();
assertFalse(cookies.contains(cookie1));
assertTrue(cookies.contains(cookie2));
}
@Ignore(SELENESE)
public void testShouldNotBeAbleToSetDomainToSomethingThatIsUnrelatedToTheCurrentDomain() {
Cookie cookie1 = new Cookie("fish", "cod");
WebDriver.Options options = driver.manage();
options.addCookie(cookie1);
String url = GlobalTestEnvironment.get().getAppServer().whereElseIs("simpleTest.html");
driver.get(url);
Cookie cookie = options.getCookieNamed("fish");
assertNull(String.valueOf(cookie), cookie);
}
@Ignore(SELENESE)
public void testShouldBeAbleToAddToADomainWhichIsRelatedToTheCurrentDomain() {
String name = gotoValidDomainAndClearCookies();
if (name == null) {
System.out.println("Skipping test: unable to find domain name to use");
return;
}
assertNull(driver.manage().getCookieNamed("name"));
String shorter = name.replaceFirst(".*?\\.", "");
Cookie cookie =
new Cookie("name", "value", shorter, "/", new Date(System.currentTimeMillis() + 100000));
driver.manage().addCookie(cookie);
assertNotNull(driver.manage().getCookieNamed("name"));
}
@Ignore({REMOTE, SELENESE})
public void testShouldBeAbleToIncludeLeadingPeriodInDomainName() throws Exception {
String name = gotoValidDomainAndClearCookies();
if (name == null || name.matches("\\d{1,3}(?:\\.\\d{1,3}){3}")) {
System.out.println("Skipping test: unable to find domain name to use");
return;
}
driver.manage().deleteAllCookies();
assertNull("Looks like delete all cookies doesn't", driver.manage().getCookieNamed("name"));
// Replace the first part of the name with a period
String shorter = name.replaceFirst(".*?\\.", ".");
Cookie cookie =
new Cookie("name", "value", shorter, "/", new Date(System.currentTimeMillis() + 100000));
driver.manage().addCookie(cookie);
assertNotNull(driver.manage().getCookieNamed("name"));
}
@Ignore(SELENESE)
public void testGetCookieDoesNotRetriveBeyondCurrentDomain() {
Cookie cookie1 = new Cookie("fish", "cod");
WebDriver.Options options = driver.manage();
options.addCookie(cookie1);
String url = GlobalTestEnvironment.get().getAppServer().whereElseIs("");
driver.get(url);
Set<Cookie> cookies = options.getCookies();
assertFalse(cookies.contains(cookie1));
}
@Ignore({IE, SELENESE})
public void testShouldBeAbleToSetDomainToTheCurrentDomain() throws Exception {
URI url = new URI(driver.getCurrentUrl());
String host = url.getHost() + ":" + url.getPort();
Cookie cookie1 = new Cookie.Builder("fish", "cod").domain(host).build();
WebDriver.Options options = driver.manage();
options.addCookie(cookie1);
driver.get(javascriptPage);
Set<Cookie> cookies = options.getCookies();
assertTrue(cookies.contains(cookie1));
}
@Ignore(SELENESE)
public void testShouldWalkThePathToDeleteACookie() {
Cookie cookie1 = new Cookie("fish", "cod");
driver.manage().addCookie(cookie1);
driver.get(childPage);
Cookie cookie2 = new Cookie("rodent", "hamster", "/common/child");
driver.manage().addCookie(cookie2);
driver.get(grandchildPage);
Cookie cookie3 = new Cookie("dog", "dalmation", "/common/child/grandchild/");
driver.manage().addCookie(cookie3);
driver.get(GlobalTestEnvironment.get().getAppServer().whereIs("child/grandchild"));
driver.manage().deleteCookieNamed("rodent");
assertNull(driver.manage().getCookieNamed("rodent"));
Set<Cookie> cookies = driver.manage().getCookies();
assertEquals(2, cookies.size());
assertTrue(cookies.contains(cookie1));
assertTrue(cookies.contains(cookie3));
driver.manage().deleteAllCookies();
driver.get(grandchildPage);
cookies = driver.manage().getCookies();
assertEquals(0, cookies.size());
}
@Ignore({IE, SELENESE})
public void testShouldIgnoreThePortNumberOfTheHostWhenSettingTheCookie() throws Exception {
URI uri = new URI(driver.getCurrentUrl());
String host = String.format("%s:%d", uri.getHost(), uri.getPort());
assertNull(driver.manage().getCookieNamed("name"));
Cookie cookie = new Cookie.Builder("name", "value").domain(host).build();
driver.manage().addCookie(cookie);
assertNotNull(driver.manage().getCookieNamed("name"));
}
@Ignore(SELENESE)
public void testCookieIntegrity() {
String url = GlobalTestEnvironment.get().getAppServer().whereElseIs("animals");
driver.get(url);
driver.manage().deleteAllCookies();
long time = System.currentTimeMillis() + (60 * 60 * 24);
Cookie cookie1 = new Cookie("fish", "cod", "/common/animals", new Date(time));
WebDriver.Options options = driver.manage();
options.addCookie(cookie1);
Set<Cookie> cookies = options.getCookies();
Iterator<Cookie> iter = cookies.iterator();
Cookie retrievedCookie = null;
while (iter.hasNext()) {
Cookie temp = iter.next();
if (cookie1.equals(temp)) {
retrievedCookie = temp;
break;
}
}
assertNotNull(retrievedCookie);
//Cookie.equals only compares name, domain and path
assertEquals(cookie1, retrievedCookie);
}
@Ignore(SELENESE)
private String gotoValidDomainAndClearCookies() {
AppServer appServer = GlobalTestEnvironment.get().getAppServer();
String name = null;
String hostname = appServer.getHostName();
if (hostname.matches("\\w+\\.\\w+.*")) {
name = hostname;
driver.get(appServer.whereIs("simpleTest.html"));
}
hostname = appServer.getAlternateHostName();
if (name == null && hostname.matches("\\w+\\.\\w+.*")) {
name = hostname;
driver.get(appServer.whereElseIs("simpleTest.html"));
}
driver.manage().deleteAllCookies();
return name;
}
}
| |
/*
* ====================================================================
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
* ====================================================================
*
* This software consists of voluntary contributions made by many
* individuals on behalf of the Apache Software Foundation. For more
* information on the Apache Software Foundation, please see
* <http://www.apache.org/>.
*
*/
package org.apache.hc.client5.http.impl.classic;
import java.io.IOException;
import java.util.Iterator;
import java.util.Locale;
import org.apache.hc.client5.http.classic.ExecRuntime;
import org.apache.hc.core5.http.ClassicHttpResponse;
import org.apache.hc.core5.http.Header;
import org.apache.hc.core5.http.HttpEntity;
import org.apache.hc.core5.http.ProtocolException;
import org.apache.hc.core5.http.ProtocolVersion;
import org.apache.hc.core5.util.Args;
/**
* Backward compatibility with HttpClient 4.x.
*
* @since 4.3
*/
public final class CloseableHttpResponse implements ClassicHttpResponse {
private final ClassicHttpResponse response;
private final ExecRuntime execRuntime;
static CloseableHttpResponse adapt(final ClassicHttpResponse response) {
if (response == null) {
return null;
}
if (response instanceof CloseableHttpResponse) {
return (CloseableHttpResponse) response;
} else {
return new CloseableHttpResponse(response, null);
}
}
CloseableHttpResponse(final ClassicHttpResponse response, final ExecRuntime execRuntime) {
this.response = Args.notNull(response, "Response");
this.execRuntime = execRuntime;
}
@Override
public int getCode() {
return response.getCode();
}
@Override
public HttpEntity getEntity() {
return response.getEntity();
}
@Override
public boolean containsHeader(final String name) {
return response.containsHeader(name);
}
@Override
public void setVersion(final ProtocolVersion version) {
response.setVersion(version);
}
@Override
public void setCode(final int code) {
response.setCode(code);
}
@Override
public String getReasonPhrase() {
return response.getReasonPhrase();
}
@Override
public int containsHeaders(final String name) {
return response.containsHeaders(name);
}
@Override
public void setEntity(final HttpEntity entity) {
response.setEntity(entity);
}
@Override
public ProtocolVersion getVersion() {
return response.getVersion();
}
@Override
public void setReasonPhrase(final String reason) {
response.setReasonPhrase(reason);
}
@Override
public Header[] getHeaders(final String name) {
return response.getHeaders(name);
}
@Override
public void addHeader(final Header header) {
response.addHeader(header);
}
@Override
public Locale getLocale() {
return response.getLocale();
}
@Override
public void addHeader(final String name, final Object value) {
response.addHeader(name, value);
}
@Override
public void setLocale(final Locale loc) {
response.setLocale(loc);
}
@Override
public Header getSingleHeader(final String name) throws ProtocolException {
return response.getSingleHeader(name);
}
@Override
public void setHeader(final Header header) {
response.setHeader(header);
}
@Override
public Header getFirstHeader(final String name) {
return response.getFirstHeader(name);
}
@Override
public void setHeader(final String name, final Object value) {
response.setHeader(name, value);
}
@Override
public void setHeaders(final Header... headers) {
response.setHeaders(headers);
}
@Override
public void removeHeader(final Header header) {
response.removeHeader(header);
}
@Override
public void removeHeaders(final String name) {
response.removeHeaders(name);
}
@Override
public Header getLastHeader(final String name) {
return response.getLastHeader(name);
}
@Override
public Header[] getAllHeaders() {
return response.getAllHeaders();
}
@Override
public Iterator<Header> headerIterator() {
return response.headerIterator();
}
@Override
public Iterator<Header> headerIterator(final String name) {
return response.headerIterator(name);
}
@Override
public void close() throws IOException {
if (execRuntime != null) {
try {
response.close();
execRuntime.disconnect();
} finally {
execRuntime.discardConnection();
}
} else {
response.close();
}
}
@Override
public String toString() {
return response.toString();
}
}
| |
/*
* Copyright (c) 2022 Tectonicus contributors. All rights reserved.
*
* This file is part of Tectonicus. It is subject to the license terms in the LICENSE file found in
* the top-level directory of this distribution. The full list of project contributors is contained
* in the AUTHORS file found in the same location.
*
*/
package tectonicus.view;
import java.io.File;
import java.util.HashSet;
import java.util.Set;
import java.util.StringTokenizer;
import lombok.AllArgsConstructor;
import lombok.Getter;
import lombok.Setter;
import lombok.experimental.UtilityClass;
import org.joml.Vector3f;
import tectonicus.Minecraft;
import tectonicus.configuration.ImageFormat;
import tectonicus.configuration.LightStyle;
import tectonicus.rasteriser.Rasteriser;
import tectonicus.renderer.PerspectiveCamera;
import tectonicus.world.Sign;
@UtilityClass
public class ViewUtil {
public static final int VIEW_WIDTH = 2048;
public static final int VIEW_HEIGHT = 1152;
@Getter
@AllArgsConstructor
public static class Viewpoint {
private final Vector3f eye;
private final Vector3f lookAt;
private final Vector3f up;
@Setter
private float fov;
}
private static Set<String> extractSettings(Sign sign) {
String toParse = "";
if (sign.getText(0).startsWith("#") && sign.getText(0).length()>1)
toParse += " " + sign.getText(0).substring(1);
if (sign.getText(1).startsWith("#") && sign.getText(1).length()>1)
toParse += " " + sign.getText(1).substring(1);
if (sign.getText(2).startsWith("#") && sign.getText(2).length()>1)
toParse += " " + sign.getText(2).substring(1);
if (sign.getText(3).startsWith("#") && sign.getText(3).length()>1)
toParse += " " + sign.getText(3).substring(1);
Set<String> settings = new HashSet<>();
StringTokenizer tokeniser = new StringTokenizer(toParse);
while (tokeniser.hasMoreTokens())
{
String token = tokeniser.nextToken();
if (token != null)
{
token = token.trim().toLowerCase();
settings.add(token);
}
}
return settings;
}
private static int parseHeight(Set<String> settings) {
int height = 0;
for (String s : settings)
{
if (s.startsWith("h"))
{
String sub = s.substring(1).trim();
try
{
height = Integer.parseInt(sub);
break;
}
catch (Exception e)
{}
}
}
// For 1.18 or higher we need to add 64 to get the correct height
if (Minecraft.getChunkHeight() > 256) {
height += 64;
}
return height;
}
private static int parseElevation(Set<String> settings) {
int angle = 90;
for (String s : settings)
{
if (s.startsWith("a"))
{
String sub = s.substring(1).trim();
try
{
angle = Integer.parseInt(sub);
break;
}
catch (Exception e)
{}
}
}
if (angle < 0)
angle = 0;
if (angle > 180)
angle = 180;
return angle;
}
private static int parseFOV(Set<String> settings) {
int fov = 0;
for (String s : settings)
{
if (s.startsWith("f"))
{
String sub = s.substring(1).trim();
try
{
fov = Integer.parseInt(sub);
break;
}
catch (Exception e)
{}
}
}
return fov;
}
public static Viewpoint findView(Sign sign) {
Set<String> settings = extractSettings(sign);
final int heightOffset = parseHeight(settings);
int elevation = parseElevation(settings);
final int fov = parseFOV(settings);
final float angleDeg = 90 / 4.0f * sign.getData() - 90;
final float angleRad = angleDeg / 360f * 2.0f * (float)Math.PI;
Vector3f eye = new Vector3f(sign.getX() + 0.5f, sign.getY() + 0.5f + heightOffset, sign.getZ() + 0.5f);
Vector3f up, forward, lookAt;
if (elevation == 0)
{
// Looking straight up
up = new Vector3f((float)Math.cos(angleRad), 0, (float)Math.sin(angleRad));
forward = new Vector3f(0, 1, 0);
}
else if (elevation == 180)
{
// Looking straight down
up = new Vector3f((float)Math.cos(angleRad), 0, (float)Math.sin(angleRad));
forward = new Vector3f(0, -1, 0);
}
else
{
// Use elevation angle
final int adjustedElevation = elevation - 90; // convert into 0 straight ahead, -90 as up, +90 as down
final float elevationRads = (adjustedElevation / 360.0f) * 2.0f * (float)Math.PI;
final float dy = -(float)Math.tan(elevationRads);
up = new Vector3f(0, 1, 0);
forward = new Vector3f((float)Math.cos(angleRad), dy, (float)Math.sin(angleRad));
}
lookAt = new Vector3f(eye.x + forward.x, eye.y + forward.y, eye.z + forward.z);
// Make orthogonal
Vector3f right = new Vector3f();
forward.cross(up, right);
right.cross(forward, up);
return new Viewpoint(eye, lookAt, up, fov);
}
public static PerspectiveCamera createCamera(Rasteriser rasteriser, Viewpoint view, final int drawDistance) {
PerspectiveCamera perspectiveCamera = new PerspectiveCamera(rasteriser, VIEW_WIDTH, VIEW_HEIGHT);
perspectiveCamera.lookAt(view.getEye(), view.getLookAt(), view.getUp(), view.getFov(), (float) VIEW_WIDTH /(float) VIEW_HEIGHT, 0.1f, drawDistance);
return perspectiveCamera;
}
public static File createViewFile(File viewsDir, Sign sign, ImageFormat imageFormat) {
return new File(viewsDir, "View_" + sign.getX() + "_" + sign.getY() + "_" + sign.getZ() + "." + imageFormat.getExtension());
}
public static LightStyle parseLightStyle(Sign sign) {
LightStyle style = LightStyle.Day;
Set<String> settings = extractSettings(sign);
if (settings.contains("night"))
style = LightStyle.Night;
return style;
}
}
| |
/*
* Copyright 2000-2012 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.psi.formatter.java;
import com.intellij.openapi.fileTypes.StdFileTypes;
import com.intellij.psi.codeStyle.CommonCodeStyleSettings;
import com.intellij.util.IncorrectOperationException;
/**
* Is intended to hold specific java formatting tests for alignment settings (
* <code>Project Settings - Code Style - Alignment and Braces</code>).
*
* @author Denis Zhdanov
* @since Apr 27, 2010 6:42:00 PM
*/
public class JavaFormatterAlignmentTest extends AbstractJavaFormatterTest {
public void testChainedMethodsAlignment() throws Exception {
// Inspired by IDEA-30369
getSettings().ALIGN_MULTILINE_CHAINED_METHODS = true;
getSettings().METHOD_CALL_CHAIN_WRAP = CommonCodeStyleSettings.WRAP_AS_NEEDED;
getSettings().getRootSettings().getIndentOptions(StdFileTypes.JAVA).CONTINUATION_INDENT_SIZE = 8;
doTest();
}
public void testMethodAndChainedField() throws Exception {
// Inspired by IDEA-79806
getSettings().ALIGN_MULTILINE_CHAINED_METHODS = true;
doMethodTest(
"Holder.INSTANCE\n" +
" .foo();",
"Holder.INSTANCE\n" +
" .foo();"
);
}
public void testMultipleMethodAnnotationsCommentedInTheMiddle() throws Exception {
getSettings().BLANK_LINES_AFTER_CLASS_HEADER = 1;
getSettings().getRootSettings().getIndentOptions(StdFileTypes.JAVA).INDENT_SIZE = 4;
// Inspired by IDEA-53942
doTextTest(
"public class Test {\n" +
" @Override\n" +
"// @XmlElement(name = \"Document\", required = true, type = DocumentType.class)\n" +
" @XmlTransient\n" +
" void foo() {\n" +
"}\n" +
"}",
"public class Test {\n" +
"\n" +
" @Override\n" +
"// @XmlElement(name = \"Document\", required = true, type = DocumentType.class)\n" +
" @XmlTransient\n" +
" void foo() {\n" +
" }\n" +
"}"
);
}
public void testTernaryOperator() throws Exception {
// Inspired by IDEADEV-13018
getSettings().ALIGN_MULTILINE_TERNARY_OPERATION = true;
doMethodTest("int i = a ? x\n" + ": y;", "int i = a ? x\n" + " : y;");
}
public void testMethodCallArgumentsAndSmartTabs() throws IncorrectOperationException {
// Inspired by IDEADEV-20144.
getSettings().ALIGN_MULTILINE_PARAMETERS_IN_CALLS = true;
getSettings().getRootSettings().getIndentOptions(StdFileTypes.JAVA).SMART_TABS = true;
getSettings().getRootSettings().getIndentOptions(StdFileTypes.JAVA).USE_TAB_CHARACTER = true;
doTextTest("class Foo {\n" +
" void foo() {\n" +
" bar(new Object[] {\n" +
" \"hello1\",\n" +
" \"hello2\", add(\"hello3\",\n" +
" \"world\")\n" +
"});" +
" }}", "class Foo {\n" +
"\tvoid foo() {\n" +
"\t\tbar(new Object[]{\n" +
"\t\t\t\t\"hello1\",\n" +
"\t\t\t\t\"hello2\", add(\"hello3\",\n" +
"\t\t\t\t \"world\")\n" +
"\t\t});\n" +
"\t}\n" +
"}");
}
public void testArrayInitializer() throws IncorrectOperationException {
// Inspired by IDEADEV-16136
getSettings().ARRAY_INITIALIZER_WRAP = CommonCodeStyleSettings.WRAP_ALWAYS;
getSettings().ALIGN_MULTILINE_ARRAY_INITIALIZER_EXPRESSION = true;
doTextTest(
"@SuppressWarnings({\"UseOfSystemOutOrSystemErr\", \"AssignmentToCollectionOrArrayFieldFromParameter\", \"ReturnOfCollectionOrArrayField\"})\n" +
"public class Some {\n" +
"}",
"@SuppressWarnings({\"UseOfSystemOutOrSystemErr\",\n" +
" \"AssignmentToCollectionOrArrayFieldFromParameter\",\n" +
" \"ReturnOfCollectionOrArrayField\"})\n" +
"public class Some {\n" +
"}");
}
public void testMethodBrackets() throws Exception {
// Inspired by IDEA-53013
getSettings().ALIGN_MULTILINE_METHOD_BRACKETS = true;
getSettings().ALIGN_MULTILINE_PARENTHESIZED_EXPRESSION = false;
getSettings().ALIGN_MULTILINE_PARAMETERS = true;
getSettings().ALIGN_MULTILINE_PARAMETERS_IN_CALLS = true;
getSettings().CALL_PARAMETERS_RPAREN_ON_NEXT_LINE = true;
getSettings().METHOD_PARAMETERS_RPAREN_ON_NEXT_LINE = true;
doClassTest(
"public void foo(int i,\n" +
" int j) {\n" +
"}\n" +
"\n" +
" public void bar() {\n" +
" foo(1,\n" +
" 2);\n" +
" }",
"public void foo(int i,\n" +
" int j\n" +
" ) {\n" +
"}\n" +
"\n" +
"public void bar() {\n" +
" foo(1,\n" +
" 2\n" +
" );\n" +
"}"
);
// Inspired by IDEA-55306
getSettings().ALIGN_MULTILINE_METHOD_BRACKETS = false;
getSettings().CALL_PARAMETERS_RPAREN_ON_NEXT_LINE = false;
String method =
"executeCommand(new Command<Boolean>() {\n" +
" public Boolean run() throws ExecutionException {\n" +
" return doInterrupt();\n" +
" }\n" +
"});";
doMethodTest(method, method);
}
public void testFieldInColumnsAlignment() {
// Inspired by IDEA-55147
getSettings().ALIGN_GROUP_FIELD_DECLARATIONS = true;
getSettings().FIELD_ANNOTATION_WRAP = CommonCodeStyleSettings.DO_NOT_WRAP;
getSettings().VARIABLE_ANNOTATION_WRAP = CommonCodeStyleSettings.DO_NOT_WRAP;
doTextTest(
"public class FormattingTest {\n" +
"\n" +
" int start = 1;\n" +
" double end = 2;\n" +
"\n" +
" int i2 = 1;\n" +
" double dd2,\n" +
" dd3 = 2;\n" +
"\n" +
" // asd\n" +
" char ccc3 = 'a';\n" +
" double ddd31, ddd32 = 1;\n" +
"\n" +
" private\n" +
" final String s4 = \"\";\n" +
" private\n" +
" transient int i4 = 1;\n" +
"\n" +
" private final String s5 = \"xxx\";\n" +
" private transient int iiii5 = 1;\n" +
" /*sdf*/\n" +
" @MyAnnotation(value = 1, text = 2) float f5 = 1;\n" +
"}",
"public class FormattingTest {\n" +
"\n" +
" int start = 1;\n" +
" double end = 2;\n" +
"\n" +
" int i2 = 1;\n" +
" double dd2,\n" +
" dd3 = 2;\n" +
"\n" +
" // asd\n" +
" char ccc3 = 'a';\n" +
" double ddd31, ddd32 = 1;\n" +
"\n" +
" private\n" +
" final String s4 = \"\";\n" +
" private\n" +
" transient int i4 = 1;\n" +
"\n" +
" private final String s5 = \"xxx\";\n" +
" private transient int iiii5 = 1;\n" +
" /*sdf*/\n" +
" @MyAnnotation(value = 1, text = 2) float f5 = 1;\n" +
"}"
);
}
public void testTabsAndFieldsInColumnsAlignment() throws Exception {
// Inspired by IDEA-56242
getSettings().ALIGN_GROUP_FIELD_DECLARATIONS = true;
getIndentOptions().USE_TAB_CHARACTER = true;
doTextTest(
"public class Test {\n" +
"\tprivate Long field2 = null;\n" +
"\tprivate final Object field1 = null;\n" +
"\tprivate int i = 1;\n" +
"}",
"public class Test {\n" +
"\tprivate Long field2 = null;\n" +
"\tprivate final Object field1 = null;\n" +
"\tprivate int i = 1;\n" +
"}"
);
}
public void testDoNotAlignIfNotEnabled() {
getSettings().ALIGN_GROUP_FIELD_DECLARATIONS = false;
doTextTest(
"public class Test {\n" +
"private Long field2 = null;\n" +
"private final Object field1 = null;\n" +
"private int i = 1;\n" +
"}",
"public class Test {\n" +
" private Long field2 = null;\n" +
" private final Object field1 = null;\n" +
" private int i = 1;\n" +
"}"
);
}
public void testAnnotatedAndNonAnnotatedFieldsInColumnsAlignment() {
// Inspired by IDEA-60237
getSettings().ALIGN_GROUP_FIELD_DECLARATIONS = true;
doTextTest(
"public class Test {\n" +
" @Id\n" +
" private final String name;\n" +
" @Column(length = 2 * 1024 * 1024 /* 2 MB */)\n" +
" private String value;\n" +
" private boolean required;\n" +
" private String unsetValue;\n" +
"}",
"public class Test {\n" +
" @Id\n" +
" private final String name;\n" +
" @Column(length = 2 * 1024 * 1024 /* 2 MB */)\n" +
" private String value;\n" +
" private boolean required;\n" +
" private String unsetValue;\n" +
"}"
);
}
public void testAlignThrowsKeyword() throws Exception {
// Inspired by IDEA-63820
getSettings().ALIGN_THROWS_KEYWORD = true;
doClassTest(
"public void test()\n" +
" throws Exception {}",
"public void test()\n" +
"throws Exception {\n" +
"}"
);
getSettings().ALIGN_THROWS_KEYWORD = false;
doClassTest(
"public void test()\n" +
" throws Exception {}",
"public void test()\n" +
" throws Exception {\n" +
"}"
);
}
public void testAlignResourceList() throws Exception {
getSettings().KEEP_SIMPLE_BLOCKS_IN_ONE_LINE = true;
getSettings().ALIGN_MULTILINE_RESOURCES = true;
doMethodTest("try (MyResource r1 = null;\n" +
"MyResource r2 = null) { }",
"try (MyResource r1 = null;\n" +
" MyResource r2 = null) { }");
getSettings().ALIGN_MULTILINE_RESOURCES = false;
doMethodTest("try (MyResource r1 = null;\n" +
"MyResource r2 = null) { }",
"try (MyResource r1 = null;\n" +
" MyResource r2 = null) { }");
}
public void testChainedMethodCallsAfterFieldsChain_WithAlignment() throws Exception {
getSettings().ALIGN_MULTILINE_CHAINED_METHODS = true;
getSettings().METHOD_CALL_CHAIN_WRAP = CommonCodeStyleSettings.WRAP_ALWAYS;
doMethodTest(
"a.current.current.current.getThis().getThis().getThis();",
"a.current.current.current.getThis()\n" +
" .getThis()\n" +
" .getThis();"
);
doMethodTest(
"a.current.current.current.getThis().getThis().getThis().current.getThis().getThis().getThis().getThis();",
"a.current.current.current.getThis()\n" +
" .getThis()\n" +
" .getThis().current.getThis()\n" +
" .getThis()\n" +
" .getThis()\n" +
" .getThis();"
);
String onlyMethodCalls = "getThis().getThis().getThis();";
String formatedMethodCalls = "getThis().getThis()\n" +
" .getThis();";
doMethodTest(onlyMethodCalls, formatedMethodCalls);
}
public void testChainedMethodCallsAfterFieldsChain_WithoutAlignment() throws Exception {
getSettings().ALIGN_MULTILINE_CHAINED_METHODS = false;
getSettings().METHOD_CALL_CHAIN_WRAP = CommonCodeStyleSettings.WRAP_ALWAYS;
doMethodTest(
"a.current.current.current.getThis().getThis().getThis();",
"a.current.current.current.getThis()\n" +
" .getThis()\n" +
" .getThis();"
);
}
public void testChainedMethodCalls_WithChopDownIfLongOption() throws Exception {
getSettings().ALIGN_MULTILINE_CHAINED_METHODS = true;
getSettings().METHOD_CALL_CHAIN_WRAP = CommonCodeStyleSettings.WRAP_ON_EVERY_ITEM; // it's equal to "Chop down if long"
getSettings().RIGHT_MARGIN = 50;
String before = "a.current.current.getThis().getThis().getThis().getThis().getThis();";
doMethodTest(
before,
"a.current.current.getThis()\n" +
" .getThis()\n" +
" .getThis()\n" +
" .getThis()\n" +
" .getThis();"
);
getSettings().RIGHT_MARGIN = 80;
doMethodTest(before, before);
}
public void testChainedMethodCalls_WithWrapIfNeededOption() throws Exception {
getSettings().ALIGN_MULTILINE_CHAINED_METHODS = false;
getSettings().METHOD_CALL_CHAIN_WRAP = CommonCodeStyleSettings.WRAP_AS_NEEDED;
getSettings().RIGHT_MARGIN = 50;
String before = "a.current.current.getThis().getThis().getThis().getThis();";
doMethodTest(
before,
"a.current.current.getThis().getThis()\n" +
" .getThis().getThis();"
);
getSettings().ALIGN_MULTILINE_CHAINED_METHODS = true;
doMethodTest(
before,
"a.current.current.getThis().getThis()\n" +
" .getThis().getThis();"
);
getSettings().RIGHT_MARGIN = 75;
doMethodTest(before, before);
}
public void testAlignMethodCalls_PassedAsParameters_InMethodCall() {
getSettings().ALIGN_MULTILINE_PARAMETERS_IN_CALLS = true;
doMethodTest(
"test(call1(),\n" +
" call2(),\n" +
" call3());\n",
"test(call1(),\n" +
" call2(),\n" +
" call3());\n"
);
}
public void testLocalVariablesAlignment() {
getSettings().ALIGN_CONSECUTIVE_VARIABLE_DECLARATIONS = true;
doMethodTest(
"int a = 2;\n" +
"String myString = \"my string\"",
"int a = 2;\n" +
"String myString = \"my string\""
);
}
public void testAlignOnlyDeclarationStatements() {
getSettings().ALIGN_CONSECUTIVE_VARIABLE_DECLARATIONS = true;
doMethodTest(
" String s;\n" +
" int a = 2;\n" +
"s = \"abs\";\n" +
"long stamp = 12;",
"String s;\n" +
"int a = 2;\n" +
"s = \"abs\";\n" +
"long stamp = 12;"
);
}
public void testDoNotAlignWhenBlankLine() {
getSettings().ALIGN_CONSECUTIVE_VARIABLE_DECLARATIONS = true;
doMethodTest(
"int a = 2;\n" +
"\n" +
"String myString = \"my string\"",
"int a = 2;\n" +
"\n" +
"String myString = \"my string\""
);
}
public void testDoNotAlignWhenGroupInterrupted() {
getSettings().ALIGN_CONSECUTIVE_VARIABLE_DECLARATIONS = true;
doMethodTest(
"int a = 2;\n" +
"System.out.println(\"hi!\")\n" +
"String myString = \"my string\"",
"int a = 2;\n" +
"System.out.println(\"hi!\")\n" +
"String myString = \"my string\""
);
}
public void testDoNotAlignMultiDeclarations() {
getSettings().ALIGN_CONSECUTIVE_VARIABLE_DECLARATIONS = true;
doMethodTest(
" int a, b = 2;\n" +
"String myString = \"my string\"",
"int a, b = 2;\n" +
"String myString = \"my string\""
);
}
public void testDoNotAlignMultilineParams() {
getSettings().ALIGN_CONSECUTIVE_VARIABLE_DECLARATIONS = true;
doMethodTest(
"int a = 12;\n" +
" Runnable runnable = new Runnable() {\n" +
" @Override\n" +
" public void run() {\n" +
" System.out.println(\"AAA!\");\n" +
" }\n" +
"};",
"int a = 12;\n" +
"Runnable runnable = new Runnable() {\n" +
" @Override\n" +
" public void run() {\n" +
" System.out.println(\"AAA!\");\n" +
" }\n" +
"};"
);
doMethodTest(
" Runnable runnable = new Runnable() {\n" +
" @Override\n" +
" public void run() {\n" +
" System.out.println(\"AAA!\");\n" +
" }\n" +
"};\n" +
"int c = 12;",
"Runnable runnable = new Runnable() {\n" +
" @Override\n" +
" public void run() {\n" +
" System.out.println(\"AAA!\");\n" +
" }\n" +
"};\n" +
"int c = 12;"
);
doMethodTest(
" int ac = 99;\n" +
"Runnable runnable = new Runnable() {\n" +
" @Override\n" +
" public void run() {\n" +
" System.out.println(\"AAA!\");\n" +
" }\n" +
"};\n" +
"int c = 12;",
"int ac = 99;\n" +
"Runnable runnable = new Runnable() {\n" +
" @Override\n" +
" public void run() {\n" +
" System.out.println(\"AAA!\");\n" +
" }\n" +
"};\n" +
"int c = 12;"
);
}
public void testDoNotAlign_IfFirstMultiline() {
getSettings().ALIGN_CONSECUTIVE_VARIABLE_DECLARATIONS = true;
doMethodTest(
"int\n" +
" i = 0;\n" +
"int[] a = new int[]{1, 2, 0x0052, 0x0053, 0x0054};\n" +
"int var1 = 1;\n" +
"int var2 = 2;",
"int\n" +
" i = 0;\n" +
"int[] a = new int[]{1, 2, 0x0052, 0x0053, 0x0054};\n" +
"int var1 = 1;\n" +
"int var2 = 2;"
);
}
public void testAlign_InMethod() {
getSettings().ALIGN_CONSECUTIVE_VARIABLE_DECLARATIONS = true;
doClassTest(
"public void run() {\n" +
"\n" +
" int a = 2;\n" +
" String superString = \"\";\n" +
"\n" +
" test(call1(), call2(), call3());\n" +
" }",
"public void run() {\n" +
"\n" +
" int a = 2;\n" +
" String superString = \"\";\n" +
"\n" +
" test(call1(), call2(), call3());\n" +
"}"
);
doClassTest(
"public void run() {\n" +
"\n" +
" test(call1(), call2(), call3());\n" +
"\n" +
" int a = 2;\n" +
" String superString = \"\";\n" +
"}",
"public void run() {\n" +
"\n" +
" test(call1(), call2(), call3());\n" +
"\n" +
" int a = 2;\n" +
" String superString = \"\";\n" +
"}");
}
}
| |
/**
* This class is generated by jOOQ
*/
package edu.kit.ipd.crowdcontrol.workerservice.database.model.tables.records;
import com.google.gson.JsonElement;
import edu.kit.ipd.crowdcontrol.workerservice.database.model.tables.Worker;
import javax.annotation.Generated;
import org.jooq.Field;
import org.jooq.Record1;
import org.jooq.Record6;
import org.jooq.Row6;
import org.jooq.impl.UpdatableRecordImpl;
/**
* This class is generated by jOOQ.
*/
@Generated(
value = {
"http://www.jooq.org",
"jOOQ version:3.7.3"
},
comments = "This class is generated by jOOQ"
)
@SuppressWarnings({ "all", "unchecked", "rawtypes" })
public class WorkerRecord extends UpdatableRecordImpl<WorkerRecord> implements Record6<Integer, JsonElement, String, String, Integer, String> {
private static final long serialVersionUID = 1646930592;
/**
* Setter for <code>crowdcontrol.Worker.id_worker</code>.
*/
public void setIdWorker(Integer value) {
setValue(0, value);
}
/**
* Getter for <code>crowdcontrol.Worker.id_worker</code>.
*/
public Integer getIdWorker() {
return (Integer) getValue(0);
}
/**
* Setter for <code>crowdcontrol.Worker.platform_data</code>.
*/
public void setPlatformData(JsonElement value) {
setValue(1, value);
}
/**
* Getter for <code>crowdcontrol.Worker.platform_data</code>.
*/
public JsonElement getPlatformData() {
return (JsonElement) getValue(1);
}
/**
* Setter for <code>crowdcontrol.Worker.platform</code>.
*/
public void setPlatform(String value) {
setValue(2, value);
}
/**
* Getter for <code>crowdcontrol.Worker.platform</code>.
*/
public String getPlatform() {
return (String) getValue(2);
}
/**
* Setter for <code>crowdcontrol.Worker.email</code>.
*/
public void setEmail(String value) {
setValue(3, value);
}
/**
* Getter for <code>crowdcontrol.Worker.email</code>.
*/
public String getEmail() {
return (String) getValue(3);
}
/**
* Setter for <code>crowdcontrol.Worker.quality</code>.
*/
public void setQuality(Integer value) {
setValue(4, value);
}
/**
* Getter for <code>crowdcontrol.Worker.quality</code>.
*/
public Integer getQuality() {
return (Integer) getValue(4);
}
/**
* Setter for <code>crowdcontrol.Worker.identification</code>.
*/
public void setIdentification(String value) {
setValue(5, value);
}
/**
* Getter for <code>crowdcontrol.Worker.identification</code>.
*/
public String getIdentification() {
return (String) getValue(5);
}
// -------------------------------------------------------------------------
// Primary key information
// -------------------------------------------------------------------------
/**
* {@inheritDoc}
*/
@Override
public Record1<Integer> key() {
return (Record1) super.key();
}
// -------------------------------------------------------------------------
// Record6 type implementation
// -------------------------------------------------------------------------
/**
* {@inheritDoc}
*/
@Override
public Row6<Integer, JsonElement, String, String, Integer, String> fieldsRow() {
return (Row6) super.fieldsRow();
}
/**
* {@inheritDoc}
*/
@Override
public Row6<Integer, JsonElement, String, String, Integer, String> valuesRow() {
return (Row6) super.valuesRow();
}
/**
* {@inheritDoc}
*/
@Override
public Field<Integer> field1() {
return Worker.WORKER.ID_WORKER;
}
/**
* {@inheritDoc}
*/
@Override
public Field<JsonElement> field2() {
return Worker.WORKER.PLATFORM_DATA;
}
/**
* {@inheritDoc}
*/
@Override
public Field<String> field3() {
return Worker.WORKER.PLATFORM;
}
/**
* {@inheritDoc}
*/
@Override
public Field<String> field4() {
return Worker.WORKER.EMAIL;
}
/**
* {@inheritDoc}
*/
@Override
public Field<Integer> field5() {
return Worker.WORKER.QUALITY;
}
/**
* {@inheritDoc}
*/
@Override
public Field<String> field6() {
return Worker.WORKER.IDENTIFICATION;
}
/**
* {@inheritDoc}
*/
@Override
public Integer value1() {
return getIdWorker();
}
/**
* {@inheritDoc}
*/
@Override
public JsonElement value2() {
return getPlatformData();
}
/**
* {@inheritDoc}
*/
@Override
public String value3() {
return getPlatform();
}
/**
* {@inheritDoc}
*/
@Override
public String value4() {
return getEmail();
}
/**
* {@inheritDoc}
*/
@Override
public Integer value5() {
return getQuality();
}
/**
* {@inheritDoc}
*/
@Override
public String value6() {
return getIdentification();
}
/**
* {@inheritDoc}
*/
@Override
public WorkerRecord value1(Integer value) {
setIdWorker(value);
return this;
}
/**
* {@inheritDoc}
*/
@Override
public WorkerRecord value2(JsonElement value) {
setPlatformData(value);
return this;
}
/**
* {@inheritDoc}
*/
@Override
public WorkerRecord value3(String value) {
setPlatform(value);
return this;
}
/**
* {@inheritDoc}
*/
@Override
public WorkerRecord value4(String value) {
setEmail(value);
return this;
}
/**
* {@inheritDoc}
*/
@Override
public WorkerRecord value5(Integer value) {
setQuality(value);
return this;
}
/**
* {@inheritDoc}
*/
@Override
public WorkerRecord value6(String value) {
setIdentification(value);
return this;
}
/**
* {@inheritDoc}
*/
@Override
public WorkerRecord values(Integer value1, JsonElement value2, String value3, String value4, Integer value5, String value6) {
value1(value1);
value2(value2);
value3(value3);
value4(value4);
value5(value5);
value6(value6);
return this;
}
// -------------------------------------------------------------------------
// Constructors
// -------------------------------------------------------------------------
/**
* Create a detached WorkerRecord
*/
public WorkerRecord() {
super(Worker.WORKER);
}
/**
* Create a detached, initialised WorkerRecord
*/
public WorkerRecord(Integer idWorker, JsonElement platformData, String platform, String email, Integer quality, String identification) {
super(Worker.WORKER);
setValue(0, idWorker);
setValue(1, platformData);
setValue(2, platform);
setValue(3, email);
setValue(4, quality);
setValue(5, identification);
}
}
| |
// See the LICENCE.txt file distributed with this work for additional
// information regarding copyright ownership.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package scray.common.serialization;
import java.lang.reflect.Field;
import java.math.BigInteger;
import java.net.InetSocketAddress;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Locale;
import java.util.Set;
import java.util.UUID;
import com.esotericsoftware.kryo.Kryo;
import com.esotericsoftware.kryo.Registration;
import com.esotericsoftware.kryo.Serializer;
import com.esotericsoftware.kryo.io.Input;
import com.esotericsoftware.kryo.io.Output;
import com.esotericsoftware.kryo.util.DefaultClassResolver;
import com.esotericsoftware.kryo.util.IntMap;
import com.twitter.chill.java.InetSocketAddressSerializer;
import com.twitter.chill.java.LocaleSerializer;
import com.twitter.chill.java.SimpleDateFormatSerializer;
import com.twitter.chill.java.SqlDateSerializer;
import com.twitter.chill.java.SqlTimeSerializer;
import com.twitter.chill.java.TimestampSerializer;
import com.twitter.chill.java.URISerializer;
import com.twitter.chill.java.UUIDSerializer;
import scray.common.serialization.numbers.KryoRowTypeNumber;
import scray.common.serialization.numbers.KryoSerializerNumber;
/**
* some classes for JAVA-interoperability to prevent importing Scala
* dependencies.
*/
public class JavaKryoRowSerialization {
public static void registerSerializers(Kryo kryo) {
kryo.register(JavaColumn.class, new JavaColumnSerializer(),
KryoSerializerNumber.column.getNumber());
kryo.register(JavaRowColumn.class, new RowColumnSerializer(),
KryoSerializerNumber.rowcolumn.getNumber());
kryo.register(JavaSimpleRow.class, new JavaSimpleRowSerializer(),
KryoSerializerNumber.simplerow.getNumber());
kryo.register(JavaCompositeRow.class, new JavaCompositeRowSerializer(),
KryoSerializerNumber.compositerow.getNumber());
kryo.register(JavaBatchID.class, new BatchIDSerializer(),
KryoSerializerNumber.BatchId.getNumber());
kryo.register(Set.class, new JavaSetSerializer<>(),
KryoSerializerNumber.Set1.getNumber());
kryo.register(Set.class, new JavaSetSerializer<>(),
KryoSerializerNumber.Set2.getNumber());
kryo.register(Set.class, new JavaSetSerializer<>(),
KryoSerializerNumber.Set3.getNumber());
kryo.register(Set.class, new JavaSetSerializer<>(),
KryoSerializerNumber.Set4.getNumber());
kryo.register(Set.class, new JavaSetSerializer<>(),
KryoSerializerNumber.Set.getNumber());
kryo.register(BigInteger.class, new JavaBigIntegerSerializer(),
KryoSerializerNumber.BigInteger.getNumber());
kryo.register(java.sql.Date.class, new SqlDateSerializer(),
KryoSerializerNumber.SqlDate.getNumber());
kryo.register(java.sql.Time.class, new SqlTimeSerializer(),
KryoSerializerNumber.SqlTime.getNumber());
kryo.register(java.sql.Timestamp.class, new TimestampSerializer(),
KryoSerializerNumber.SqlTimestamp.getNumber());
kryo.register(java.net.URI.class, new URISerializer(),
KryoSerializerNumber.URI.getNumber());
kryo.register(UUID.class, new UUIDSerializer(),
KryoSerializerNumber.UUID.getNumber());
kryo.register(InetSocketAddress.class, new InetSocketAddressSerializer(),
KryoSerializerNumber.InetSockerAddress.getNumber());
kryo.register(Locale.class, new LocaleSerializer(),
KryoSerializerNumber.Locale.getNumber());
kryo.register(SimpleDateFormat.class, new SimpleDateFormatSerializer(),
KryoSerializerNumber.SimpleDateFormat.getNumber());
// useful to debug interoperability problems introduced through kryo or chill
// printRegistrations(kryo)
}
/**
* method useful for debugging serialization problems with Kryo-Scala
*
* @see scray-querying/src/main/scala/scray/common/serialization/
* KryoPoolSerialization.scala:ScrayKryoInstantiator.printRegistrations
*/
@SuppressWarnings("unused")
private static void printRegistrations(Kryo kryo) {
try {
Field field1 = Kryo.class.getDeclaredField("classResolver");
field1.setAccessible(true);
DefaultClassResolver result = (DefaultClassResolver) field1.get(kryo);
Field field2 = DefaultClassResolver.class.getDeclaredField("idToRegistration");
field2.setAccessible(true);
@SuppressWarnings("unchecked")
IntMap<Registration> result2 = (IntMap<Registration>) field2.get(result);
Field field3 = IntMap.class.getDeclaredField("keyTable");
field3.setAccessible(true);
int[] result3 = (int[]) field3.get(result2);
for (int i : result3) {
System.out.println("Kryo Java Registration: " + i + " : "
+ result2.get(i).getType().getName());
}
} catch (Exception e) {
System.err.println("Couldn't display registrations because of ");
}
}
/**
* kryo serializer for JavaColumn
*/
public static class JavaColumnSerializer extends Serializer<JavaColumn> {
@Override
public void write(Kryo k, Output o, JavaColumn v) {
o.writeString(v.getDbSystem());
o.writeString(v.getDbId());
o.writeString(v.getTableId());
o.writeString(v.getColumn());
}
@Override
public JavaColumn read(Kryo k, Input i, Class<JavaColumn> type) {
String dbSystem = i.readString();
String dbId = i.readString();
String tableId = i.readString();
String column = i.readString();
return new JavaColumn(dbSystem, dbId, tableId, column);
}
}
/**
* kryo serializer for JavaRowColumn
*/
public static class RowColumnSerializer extends
Serializer<JavaRowColumn<?>> {
@Override
public void write(Kryo k, Output o, JavaRowColumn<?> v) {
k.writeObject(o, v.getColumn());
k.writeClassAndObject(o, v.getValue());
}
@Override
@SuppressWarnings({ "unchecked", "rawtypes" })
public JavaRowColumn<?> read(Kryo k, Input i,
Class<JavaRowColumn<?>> type) {
JavaColumn column = k.readObject(i, JavaColumn.class);
return new JavaRowColumn(column, k.readClassAndObject(i));
}
}
/**
* kryo serializer for JavaSimpleRow
*/
public static class JavaSimpleRowSerializer extends
Serializer<JavaSimpleRow> {
@Override
public void write(Kryo k, Output o, JavaSimpleRow v) {
o.writeShort(v.getColumns().size());
for (JavaRowColumn<?> rowcol : v.getColumns()) {
k.writeObject(o, rowcol);
}
}
@Override
public JavaSimpleRow read(Kryo k, Input i, Class<JavaSimpleRow> type) {
ArrayList<JavaRowColumn<?>> abuf = new ArrayList<JavaRowColumn<?>>();
int number = i.readShort();
for (int j = 0; j < number; j++) {
abuf.add(k.readObject(i, JavaRowColumn.class));
}
return new JavaSimpleRow(abuf);
}
}
/**
* kryo serializer for JavaSimpleRow
*/
public static class JavaCompositeRowSerializer extends
Serializer<JavaCompositeRow> {
@Override
public void write(Kryo k, Output o, JavaCompositeRow v) {
o.writeShort(v.getRows().size());
for (JavaRow rowcol : v.getRows()) {
if (rowcol instanceof JavaSimpleRow) {
o.writeByte(KryoRowTypeNumber.simplerow.getNumber());
k.writeObject(o, (JavaSimpleRow) rowcol);
}
if (rowcol instanceof JavaCompositeRow) {
o.writeByte(KryoRowTypeNumber.compositerow.getNumber());
k.writeObject(o, (JavaCompositeRow) rowcol);
}
}
}
@Override
public JavaCompositeRow read(Kryo k, Input i,
Class<JavaCompositeRow> type) {
ArrayList<JavaRow> abuf = new ArrayList<JavaRow>();
int number = i.readShort();
for (int j = 0; j < number; j++) {
int typ = i.readByte();
if (typ == KryoRowTypeNumber.simplerow.getNumber()) {
abuf.add(k.readObject(i, JavaSimpleRow.class));
}
if (typ == KryoRowTypeNumber.compositerow.getNumber()) {
abuf.add(k.readObject(i, JavaCompositeRow.class));
}
}
return new JavaCompositeRow(abuf);
}
}
/**
* kryo serializer for BigIntegers
*/
public static class JavaBigIntegerSerializer extends Serializer<BigInteger> {
@Override
public void write(Kryo k, Output o, BigInteger bi) {
o.writeString(bi.toString());
}
@Override
public BigInteger read(Kryo k, Input i, Class<BigInteger> type) {
return new BigInteger(i.readString());
}
}
}
| |
package com.github.pixelrunstudios.ChemHelper;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
public class IdealGas {
public static final Map<String, Double> conversion = new HashMap<String, Double>();
//pressure
public static final double kPa = 1;
public static final double Pa = 1000;
public static final double atm = 0.0098692;
public static final double mmHg = 7.5006;
public static final double torr = 7.500592;
public static final double psi = 0.1450377;
public static final double inHg = 0.2952998;
public static final double bar = 0.01;
//volume
public static final double L = 1;
public static final double mL = 1000;
public static final double m3 = 0.001;
public static final double floz = 35.1950797;
public static final double qt = 0.879876992;
public static final double gal = 0.219969248;
public static final double ft3 = 0.0353147;
//ideal gas constant
public static final double R = 8.3144621;
//Temperature
//C -> K = +273.15
//K -> C = -273.15
//F -> K = (x + 459.67) * 5/9.0
//K -> F = (x * 9.0/5) - 459.67
static{
conversion.put("kPa", kPa);
conversion.put("Pa", Pa);
conversion.put("atm", atm);
conversion.put("mmHg", mmHg);
conversion.put("torr", torr);
conversion.put("psi", psi);
conversion.put("inHg", inHg);
conversion.put("bar", bar);
conversion.put("L", L);
conversion.put("mL", mL);
conversion.put("m3", m3);
conversion.put("floz", floz);
conversion.put("qt", qt);
conversion.put("gal", gal);
conversion.put("ft3", ft3);
}
public static List<Double> calc(String p1S, String p1US, String v1S, String v1US, String n1S, String t1S, String t1US,
String p2S, String p2US, String v2S, String v2US, String n2S, String t2S, String t2US){
ArrayList<Pair<Boolean, Pair<Double, String>>> input = new ArrayList<Pair<Boolean, Pair<Double, String>>>(8);
input.add(isFilled(p1S, p1US));
input.add(isFilled(v1S, v1US));
input.add(isFilled(n1S, "mol"));
input.add(isFilled(t1S, t1US));
input.add(isFilled(p2S, p2US));
input.add(isFilled(v2S, v2US));
input.add(isFilled(n2S, "mol"));
input.add(isFilled(t2S, t2US));
int firstFilled = 0;
int secondFilled = 0;
int pCount = 0;
Map<Integer, Pair<Double, String>> first = new HashMap<Integer, Pair<Double, String>>();
Map<Integer, Pair<Double, String>> firstAfter = new HashMap<Integer, Pair<Double, String>>();
Map<Integer, Pair<Double, String>> secondAfter = new HashMap<Integer, Pair<Double, String>>();
Map<Integer, Pair<Double, String>> second = new HashMap<Integer, Pair<Double, String>>();
Map<Integer, Pair<Double, String>> secondAfterAfter = new HashMap<Integer, Pair<Double, String>>();
Map<Integer, Pair<Double, String>> firstAfterAfter = new HashMap<Integer, Pair<Double, String>>();
for(Pair<Boolean, Pair<Double, String>> p : input){
if(pCount < 4 && p.getValueOne()){
firstFilled++;
first.put(pCount, p.getValueTwo());
}
else if(pCount >= 4 && p.getValueOne()){
secondFilled++;
second.put(pCount-4, p.getValueTwo());
}
pCount++;
}
for(Map.Entry<Integer, Pair<Double, String>> p : first.entrySet()){
if(p.getKey() == 0){
firstAfter.put(p.getKey(), Pair.make(convert(p.getValue().getValueOne(), p.getValue().getValueTwo(), "kPa"), "kPa"));
}
if(p.getKey() == 1){
firstAfter.put(p.getKey(), Pair.make(convert(p.getValue().getValueOne(), p.getValue().getValueTwo(), "L"), "L"));
}
if(p.getKey() == 2){
firstAfter.put(p.getKey(), p.getValue());
}
if(p.getKey() == 3){
firstAfter.put(p.getKey(), Pair.make(convert(p.getValue().getValueOne(), p.getValue().getValueTwo(), "K"), "K"));
}
}
System.out.println(firstAfter);
for(Map.Entry<Integer, Pair<Double, String>> p : second.entrySet()){
if(p.getKey() == 0){
secondAfter.put(p.getKey(), Pair.make(convert(p.getValue().getValueOne(), p.getValue().getValueTwo(), "kPa"), "kPa"));
}
if(p.getKey() == 1){
secondAfter.put(p.getKey(), Pair.make(convert(p.getValue().getValueOne(), p.getValue().getValueTwo(), "L"), "L"));
}
if(p.getKey() == 2){
secondAfter.put(p.getKey(), p.getValue());
}
if(p.getKey() == 3){
secondAfter.put(p.getKey(), Pair.make(convert(p.getValue().getValueOne(), p.getValue().getValueTwo(), "K"), "K"));
}
}
double lastVal = 0;
if(firstFilled == 3){
int missing = -1;
for(int i = 0; i <= 3; i++){
if(!firstAfter.containsKey(i)){
missing = i;
}
}
lastVal = all4(firstAfter, missing);
if(missing == 0){
lastVal = convert(lastVal, "kPa", input.get(0).getValueTwo().getValueTwo());
}
if(missing == 1){
lastVal = convert(lastVal, "L", input.get(1).getValueTwo().getValueTwo());
}
if(missing == 3){
lastVal = convert(lastVal, "K", input.get(3).getValueTwo().getValueTwo());
}
firstAfterAfter.putAll(first);
firstAfterAfter.put(missing, Pair.make(lastVal, "std"));
}
if(firstFilled == 2 && secondFilled == 1){
if(check(firstAfter, secondAfter).getValueOne()){
int sameField = check(firstAfter, secondAfter).getValueTwo().getValueOne();
int diffField = check(firstAfter, secondAfter).getValueTwo().getValueTwo();
double middle = two(sameField, firstAfter.get(sameField).getValueOne(), secondAfter.get(sameField).getValueOne(), diffField, firstAfter.get(diffField).getValueOne());
double end = 0.0;
if(diffField == 0){
end = convert(middle, "kPa", input.get(4).getValueTwo().getValueTwo());
}
else if(diffField == 1){
end = convert(middle, "L", input.get(5).getValueTwo().getValueTwo());
}
else if(diffField == 2){
end = middle;
}
else if(diffField == 3){
end = convert(middle, "K", input.get(7).getValueTwo().getValueTwo());
}
System.out.println(end);
secondAfterAfter.putAll(second);
secondAfterAfter.put(diffField, Pair.make(end, "std"));
}
}
Double[] total = new Double[8];
for(Map.Entry<Integer, Pair<Double, String>> p : firstAfterAfter.entrySet()){
total[p.getKey()] = p.getValue().getValueOne();
}
for(Map.Entry<Integer, Pair<Double, String>> p : secondAfterAfter.entrySet()){
total[p.getKey() + 4] = p.getValue().getValueOne();
}
return Arrays.asList(total);
}
public static Pair<Boolean, Pair<Double, String>> isFilled(String s, String u){
if(!s.equals("")){
return Pair.make(true, Pair.make(Double.parseDouble(s), u));
}else{
return Pair.make(false, Pair.make(0.0,u));
}
}
public static double all4(Map<Integer, Pair<Double,String>> first, int missing){
double val = 0;
if(missing == 0){
val = 1.0/first.get(1).getValueOne()*R*first.get(2).getValueOne()*first.get(3).getValueOne();
}
else if(missing == 1){
val = 1.0/first.get(0).getValueOne()*R*first.get(2).getValueOne()*first.get(3).getValueOne();
}
else if(missing == 2){
val = 1.0*first.get(0).getValueOne()*first.get(1).getValueOne()/R/first.get(3).getValueOne();
}
else if(missing == 3){
val = 1.0*first.get(0).getValueOne()*first.get(1).getValueOne()/first.get(2).getValueOne()/R;
}
return val;
}
public static double two(int sameField, double field1SVal, double field2SVal, int diffField, double field1DVal){
double a = field1SVal;
double b = field1DVal;
double c = field2SVal;
System.out.println("a:" + a);
System.out.println("b:" + b);
System.out.println("c:" + c);
if(sameField == 2 || sameField == 3){
a = 1.0/a;
c = 1.0/c;
}
if(diffField == 2 || diffField == 3){
b = 1.0/b;
}
double d = a*b*1.0/c;
if(diffField == 2 || diffField == 3){
d = 1.0/d;
}
return d;
}
public static double convert(double n, String first, String second){
if(!first.equals("K") && !second.equals("K")){
System.out.println(second);
System.out.println(first);
return n*(conversion.get(second)/conversion.get(first));
}
else if(first.equals("C") && second.equals("K")){
return n+273.15;
}else if(first.equals("K") && second.equals("C")){
return n-273.15;
}else if(first.equals("K") && second.equals("K")){
return n;
}else if(first.equals("F") && second.equals("K")){
return (n + 459.67) * (5.0/9);
}else if(first.equals("K") && second.equals("F")){
return n * 9.0/5 - 459.67;
}else{
return -1;
}
}
public static Pair<Boolean, Pair<Integer,Integer>> check(Map<Integer, Pair<Double,String>> first, Map<Integer, Pair<Double,String>> second){
int firstVal = -1;
int secondVal = -1;
int counter = 0;
for(Map.Entry<Integer, Pair<Double,String>> f : first.entrySet()){
if(counter == 0){
firstVal = f.getKey();
}
if(counter == 1){
secondVal = f.getKey();
}
counter++;
}
for(Map.Entry<Integer, Pair<Double,String>> s : second.entrySet()){
if(s.getKey() == firstVal){
return Pair.make(true, Pair.make(s.getKey(), secondVal));
}
else if(s.getKey() == secondVal){
return Pair.make(true, Pair.make(s.getKey(), firstVal));
}
}
return Pair.make(false, Pair.make(-1,-1));
}
}
| |
/*
* Copyright 2010-2016 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazonaws.services.simpleworkflow.model;
import java.io.Serializable;
/**
* <p>
* Provides details of the <code>RequestCancelActivityTaskFailed</code>
* event.
* </p>
*/
public class RequestCancelActivityTaskFailedEventAttributes implements Serializable, Cloneable {
/**
* The activityId provided in the <code>RequestCancelActivityTask</code>
* decision that failed.
* <p>
* <b>Constraints:</b><br/>
* <b>Length: </b>1 - 256<br/>
*/
private String activityId;
/**
* The cause of the failure. This information is generated by the system
* and can be useful for diagnostic purposes. <note>If <b>cause</b> is
* set to OPERATION_NOT_PERMITTED, the decision failed because it lacked
* sufficient permissions. For details and example IAM policies, see <a
* href="http://docs.aws.amazon.com/amazonswf/latest/developerguide/swf-dev-iam.html">Using
* IAM to Manage Access to Amazon SWF Workflows</a>.</note>
* <p>
* <b>Constraints:</b><br/>
* <b>Allowed Values: </b>ACTIVITY_ID_UNKNOWN, OPERATION_NOT_PERMITTED
*/
private String cause;
/**
* The ID of the <code>DecisionTaskCompleted</code> event corresponding
* to the decision task that resulted in the
* <code>RequestCancelActivityTask</code> decision for this cancellation
* request. This information can be useful for diagnosing problems by
* tracing back the chain of events leading up to this event.
*/
private Long decisionTaskCompletedEventId;
/**
* The activityId provided in the <code>RequestCancelActivityTask</code>
* decision that failed.
* <p>
* <b>Constraints:</b><br/>
* <b>Length: </b>1 - 256<br/>
*
* @return The activityId provided in the <code>RequestCancelActivityTask</code>
* decision that failed.
*/
public String getActivityId() {
return activityId;
}
/**
* The activityId provided in the <code>RequestCancelActivityTask</code>
* decision that failed.
* <p>
* <b>Constraints:</b><br/>
* <b>Length: </b>1 - 256<br/>
*
* @param activityId The activityId provided in the <code>RequestCancelActivityTask</code>
* decision that failed.
*/
public void setActivityId(String activityId) {
this.activityId = activityId;
}
/**
* The activityId provided in the <code>RequestCancelActivityTask</code>
* decision that failed.
* <p>
* Returns a reference to this object so that method calls can be chained together.
* <p>
* <b>Constraints:</b><br/>
* <b>Length: </b>1 - 256<br/>
*
* @param activityId The activityId provided in the <code>RequestCancelActivityTask</code>
* decision that failed.
*
* @return A reference to this updated object so that method calls can be chained
* together.
*/
public RequestCancelActivityTaskFailedEventAttributes withActivityId(String activityId) {
this.activityId = activityId;
return this;
}
/**
* The cause of the failure. This information is generated by the system
* and can be useful for diagnostic purposes. <note>If <b>cause</b> is
* set to OPERATION_NOT_PERMITTED, the decision failed because it lacked
* sufficient permissions. For details and example IAM policies, see <a
* href="http://docs.aws.amazon.com/amazonswf/latest/developerguide/swf-dev-iam.html">Using
* IAM to Manage Access to Amazon SWF Workflows</a>.</note>
* <p>
* <b>Constraints:</b><br/>
* <b>Allowed Values: </b>ACTIVITY_ID_UNKNOWN, OPERATION_NOT_PERMITTED
*
* @return The cause of the failure. This information is generated by the system
* and can be useful for diagnostic purposes. <note>If <b>cause</b> is
* set to OPERATION_NOT_PERMITTED, the decision failed because it lacked
* sufficient permissions. For details and example IAM policies, see <a
* href="http://docs.aws.amazon.com/amazonswf/latest/developerguide/swf-dev-iam.html">Using
* IAM to Manage Access to Amazon SWF Workflows</a>.</note>
*
* @see RequestCancelActivityTaskFailedCause
*/
public String getCause() {
return cause;
}
/**
* The cause of the failure. This information is generated by the system
* and can be useful for diagnostic purposes. <note>If <b>cause</b> is
* set to OPERATION_NOT_PERMITTED, the decision failed because it lacked
* sufficient permissions. For details and example IAM policies, see <a
* href="http://docs.aws.amazon.com/amazonswf/latest/developerguide/swf-dev-iam.html">Using
* IAM to Manage Access to Amazon SWF Workflows</a>.</note>
* <p>
* <b>Constraints:</b><br/>
* <b>Allowed Values: </b>ACTIVITY_ID_UNKNOWN, OPERATION_NOT_PERMITTED
*
* @param cause The cause of the failure. This information is generated by the system
* and can be useful for diagnostic purposes. <note>If <b>cause</b> is
* set to OPERATION_NOT_PERMITTED, the decision failed because it lacked
* sufficient permissions. For details and example IAM policies, see <a
* href="http://docs.aws.amazon.com/amazonswf/latest/developerguide/swf-dev-iam.html">Using
* IAM to Manage Access to Amazon SWF Workflows</a>.</note>
*
* @see RequestCancelActivityTaskFailedCause
*/
public void setCause(String cause) {
this.cause = cause;
}
/**
* The cause of the failure. This information is generated by the system
* and can be useful for diagnostic purposes. <note>If <b>cause</b> is
* set to OPERATION_NOT_PERMITTED, the decision failed because it lacked
* sufficient permissions. For details and example IAM policies, see <a
* href="http://docs.aws.amazon.com/amazonswf/latest/developerguide/swf-dev-iam.html">Using
* IAM to Manage Access to Amazon SWF Workflows</a>.</note>
* <p>
* Returns a reference to this object so that method calls can be chained together.
* <p>
* <b>Constraints:</b><br/>
* <b>Allowed Values: </b>ACTIVITY_ID_UNKNOWN, OPERATION_NOT_PERMITTED
*
* @param cause The cause of the failure. This information is generated by the system
* and can be useful for diagnostic purposes. <note>If <b>cause</b> is
* set to OPERATION_NOT_PERMITTED, the decision failed because it lacked
* sufficient permissions. For details and example IAM policies, see <a
* href="http://docs.aws.amazon.com/amazonswf/latest/developerguide/swf-dev-iam.html">Using
* IAM to Manage Access to Amazon SWF Workflows</a>.</note>
*
* @return A reference to this updated object so that method calls can be chained
* together.
*
* @see RequestCancelActivityTaskFailedCause
*/
public RequestCancelActivityTaskFailedEventAttributes withCause(String cause) {
this.cause = cause;
return this;
}
/**
* The cause of the failure. This information is generated by the system
* and can be useful for diagnostic purposes. <note>If <b>cause</b> is
* set to OPERATION_NOT_PERMITTED, the decision failed because it lacked
* sufficient permissions. For details and example IAM policies, see <a
* href="http://docs.aws.amazon.com/amazonswf/latest/developerguide/swf-dev-iam.html">Using
* IAM to Manage Access to Amazon SWF Workflows</a>.</note>
* <p>
* <b>Constraints:</b><br/>
* <b>Allowed Values: </b>ACTIVITY_ID_UNKNOWN, OPERATION_NOT_PERMITTED
*
* @param cause The cause of the failure. This information is generated by the system
* and can be useful for diagnostic purposes. <note>If <b>cause</b> is
* set to OPERATION_NOT_PERMITTED, the decision failed because it lacked
* sufficient permissions. For details and example IAM policies, see <a
* href="http://docs.aws.amazon.com/amazonswf/latest/developerguide/swf-dev-iam.html">Using
* IAM to Manage Access to Amazon SWF Workflows</a>.</note>
*
* @see RequestCancelActivityTaskFailedCause
*/
public void setCause(RequestCancelActivityTaskFailedCause cause) {
this.cause = cause.toString();
}
/**
* The cause of the failure. This information is generated by the system
* and can be useful for diagnostic purposes. <note>If <b>cause</b> is
* set to OPERATION_NOT_PERMITTED, the decision failed because it lacked
* sufficient permissions. For details and example IAM policies, see <a
* href="http://docs.aws.amazon.com/amazonswf/latest/developerguide/swf-dev-iam.html">Using
* IAM to Manage Access to Amazon SWF Workflows</a>.</note>
* <p>
* Returns a reference to this object so that method calls can be chained together.
* <p>
* <b>Constraints:</b><br/>
* <b>Allowed Values: </b>ACTIVITY_ID_UNKNOWN, OPERATION_NOT_PERMITTED
*
* @param cause The cause of the failure. This information is generated by the system
* and can be useful for diagnostic purposes. <note>If <b>cause</b> is
* set to OPERATION_NOT_PERMITTED, the decision failed because it lacked
* sufficient permissions. For details and example IAM policies, see <a
* href="http://docs.aws.amazon.com/amazonswf/latest/developerguide/swf-dev-iam.html">Using
* IAM to Manage Access to Amazon SWF Workflows</a>.</note>
*
* @return A reference to this updated object so that method calls can be chained
* together.
*
* @see RequestCancelActivityTaskFailedCause
*/
public RequestCancelActivityTaskFailedEventAttributes withCause(RequestCancelActivityTaskFailedCause cause) {
this.cause = cause.toString();
return this;
}
/**
* The ID of the <code>DecisionTaskCompleted</code> event corresponding
* to the decision task that resulted in the
* <code>RequestCancelActivityTask</code> decision for this cancellation
* request. This information can be useful for diagnosing problems by
* tracing back the chain of events leading up to this event.
*
* @return The ID of the <code>DecisionTaskCompleted</code> event corresponding
* to the decision task that resulted in the
* <code>RequestCancelActivityTask</code> decision for this cancellation
* request. This information can be useful for diagnosing problems by
* tracing back the chain of events leading up to this event.
*/
public Long getDecisionTaskCompletedEventId() {
return decisionTaskCompletedEventId;
}
/**
* The ID of the <code>DecisionTaskCompleted</code> event corresponding
* to the decision task that resulted in the
* <code>RequestCancelActivityTask</code> decision for this cancellation
* request. This information can be useful for diagnosing problems by
* tracing back the chain of events leading up to this event.
*
* @param decisionTaskCompletedEventId The ID of the <code>DecisionTaskCompleted</code> event corresponding
* to the decision task that resulted in the
* <code>RequestCancelActivityTask</code> decision for this cancellation
* request. This information can be useful for diagnosing problems by
* tracing back the chain of events leading up to this event.
*/
public void setDecisionTaskCompletedEventId(Long decisionTaskCompletedEventId) {
this.decisionTaskCompletedEventId = decisionTaskCompletedEventId;
}
/**
* The ID of the <code>DecisionTaskCompleted</code> event corresponding
* to the decision task that resulted in the
* <code>RequestCancelActivityTask</code> decision for this cancellation
* request. This information can be useful for diagnosing problems by
* tracing back the chain of events leading up to this event.
* <p>
* Returns a reference to this object so that method calls can be chained together.
*
* @param decisionTaskCompletedEventId The ID of the <code>DecisionTaskCompleted</code> event corresponding
* to the decision task that resulted in the
* <code>RequestCancelActivityTask</code> decision for this cancellation
* request. This information can be useful for diagnosing problems by
* tracing back the chain of events leading up to this event.
*
* @return A reference to this updated object so that method calls can be chained
* together.
*/
public RequestCancelActivityTaskFailedEventAttributes withDecisionTaskCompletedEventId(Long decisionTaskCompletedEventId) {
this.decisionTaskCompletedEventId = decisionTaskCompletedEventId;
return this;
}
/**
* Returns a string representation of this object; useful for testing and
* debugging.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getActivityId() != null) sb.append("ActivityId: " + getActivityId() + ",");
if (getCause() != null) sb.append("Cause: " + getCause() + ",");
if (getDecisionTaskCompletedEventId() != null) sb.append("DecisionTaskCompletedEventId: " + getDecisionTaskCompletedEventId() );
sb.append("}");
return sb.toString();
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getActivityId() == null) ? 0 : getActivityId().hashCode());
hashCode = prime * hashCode + ((getCause() == null) ? 0 : getCause().hashCode());
hashCode = prime * hashCode + ((getDecisionTaskCompletedEventId() == null) ? 0 : getDecisionTaskCompletedEventId().hashCode());
return hashCode;
}
@Override
public boolean equals(Object obj) {
if (this == obj) return true;
if (obj == null) return false;
if (obj instanceof RequestCancelActivityTaskFailedEventAttributes == false) return false;
RequestCancelActivityTaskFailedEventAttributes other = (RequestCancelActivityTaskFailedEventAttributes)obj;
if (other.getActivityId() == null ^ this.getActivityId() == null) return false;
if (other.getActivityId() != null && other.getActivityId().equals(this.getActivityId()) == false) return false;
if (other.getCause() == null ^ this.getCause() == null) return false;
if (other.getCause() != null && other.getCause().equals(this.getCause()) == false) return false;
if (other.getDecisionTaskCompletedEventId() == null ^ this.getDecisionTaskCompletedEventId() == null) return false;
if (other.getDecisionTaskCompletedEventId() != null && other.getDecisionTaskCompletedEventId().equals(this.getDecisionTaskCompletedEventId()) == false) return false;
return true;
}
@Override
public RequestCancelActivityTaskFailedEventAttributes clone() {
try {
return (RequestCancelActivityTaskFailedEventAttributes) super.clone();
} catch (CloneNotSupportedException e) {
throw new IllegalStateException(
"Got a CloneNotSupportedException from Object.clone() "
+ "even though we're Cloneable!",
e);
}
}
}
| |
package org.holmes;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Date;
import java.util.List;
import org.holmes.evaluator.BooleanEvaluator;
import org.holmes.evaluator.CollectionEvaluator;
import org.holmes.evaluator.DateEvaluator;
import org.holmes.evaluator.NumberEvaluator;
import org.holmes.evaluator.ObjectEvaluator;
import org.holmes.evaluator.StringEvaluator;
import org.holmes.exception.RuleViolationException;
import org.holmes.exception.ValidationException;
import org.holmes.resolver.SimpleMessageResolver;
/**
* The main class of the framework.
*
* @author diegossilveira
*/
public class HolmesEngine {
private final List<Rule> rules;
private final ResultCollector collector;
private MessageResolver messageResolver;
private String defaultViolationDescriptor;
private HolmesEngine(ResultCollector collector) {
rules = new ArrayList<Rule>();
this.messageResolver = new SimpleMessageResolver();
this.collector = collector;
}
/**
* Initializes the engine with GREEDY {@link Op2erationMode}.
*
* @return initialized engine instance.
*/
public static HolmesEngine init() {
return new HolmesEngine(OperationMode.GREEDY.getResultCollector());
}
/**
* Initializes the engine with the given {@link OperationMode}.
*
* @param mode
* the {@link OperationMode}
* @return initialized engine instance.
*/
public static HolmesEngine init(OperationMode mode) {
return new HolmesEngine(mode.getResultCollector());
}
/**
* Initializes the engine with the given {@link ResultCollector}.
*
* @param collector
* @return initialized engine instance.
*/
public static HolmesEngine init(ResultCollector collector) {
return new HolmesEngine(collector);
}
/**
* Creates a new {@link Rule} for a {@link Boolean} target type.
*
* @param bool
* the target
* @return an appropriated {@link Evaluator} for the given target type.
*/
public BooleanEvaluator ensureThat(final Boolean bool) {
return configure(new BooleanEvaluator(bool));
}
/**
* Creates a new {@link Rule} for a {@link String} target type.
*
* @param string
* the target
* @return an appropriated {@link Evaluator} for the given target type.
*/
public StringEvaluator ensureThat(final String string) {
return configure(new StringEvaluator(string));
}
/**
* Creates a new {@link Rule} for a {@link Collection} target type.
*
* @param collection
* the target
* @return an appropriated {@link Evaluator} for the given target type.
*/
public <E> CollectionEvaluator<E> ensureThat(final Collection<E> collection) {
return configure(new CollectionEvaluator<E>(collection));
}
/**
* Creates a new {@link Rule} for a {@link Number} target type.
*
* @param number
* the target
* @return an appropriated {@link Evaluator} for the given target type.
*/
public NumberEvaluator ensureThat(final Number number) {
return configure(new NumberEvaluator(number));
}
/**
* Creates a new {@link Rule} for a {@link Date} target type.
*
* @param date
* the target
* @return an appropriated {@link Evaluator} for the given target type.
*/
public DateEvaluator ensureThat(final Date date) {
return configure(new DateEvaluator(date));
}
/**
* Creates a new {@link Rule} for a generic {@link Object} target type.
*
* @param object
* the target
* @return an appropriated {@link Evaluator} for the given target type.
*/
public <T> ObjectEvaluator<T> ensureThat(final T object) {
return configure(new ObjectEvaluator<T>(object));
}
/**
* Runs all evaluation rules on the context.
*
* @return the result of the validation process.
* @throws ValidationException
*/
public ValidationResult run() throws ValidationException {
ValidationResult result = ValidationResult.init(messageResolver);
for (Rule rule : rules) {
evaluateRule(rule, result);
}
collector.finish(result);
return result;
}
/**
* Specifies a default violation descriptor used to represent violations to
* rules which do not specify its own descriptor.
*
* @param defaultViolationDescriptor
* the violation descriptor.
*/
public HolmesEngine withDefaultDescriptor(String defaultViolationDescriptor) {
this.defaultViolationDescriptor = defaultViolationDescriptor;
return this;
}
/**
* Specifies an instance of {@link MessageResolver} used to process violation descriptors.
*
* @param messageResolver
* an instance of {@link MessageResolver}.
*/
public HolmesEngine withMessageResolver(MessageResolver messageResolver) {
this.messageResolver = messageResolver;
return this;
}
private <E extends Evaluator<?>> E configure(E evaluator) {
Rule rule = Rule.simpleFor(evaluator);
evaluator.setJoint(new Joint(rule));
rules.add(rule);
return evaluator;
}
private void evaluateRule(Rule rule, ValidationResult result) {
try {
if(!rule.hasViolationDescriptor()) {
rule.setViolationDescriptor(defaultViolationDescriptor);
}
rule.evaluate();
} catch (RuleViolationException e) {
collector.onRuleViolation(e, result);
}
}
// these methods are used by unit tests.
ResultCollector getCollector() {
return collector;
}
MessageResolver getMessageResolver() {
return messageResolver;
}
String getDefaultViolationDescriptor() {
return defaultViolationDescriptor;
}
}
| |
/*
* Copyright 2012 LinkedIn, Inc
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package voldemort.utils.pool;
import java.util.Map.Entry;
import java.util.Queue;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentLinkedQueue;
import java.util.concurrent.ConcurrentMap;
import org.apache.log4j.Logger;
import voldemort.store.UnreachableStoreException;
/**
* Extends simple implementation of a per-key resource pool with a non-blocking
* interface to enqueue requests for a resource when one becomes available. <br>
* <ul>
* <li>Allocates resources in FIFOish order: blocking requests via checkout are
* FIFO and non-blocking enqueued requests are FIFO, however, there is no
* ordering between blocking (checkout) and non-blocking (requestResource).
* <li>Pools and Queues are per key and there is no global maximum pool or queue
* limit.
* </ul>
*
* Beyond the expectations documented in KeyedResourcePool, the following is
* expected of the user of this class:
* <ul>
* <li>A resource acquired via #checkout(K)) or via requestResource(K ,
* ResourceRequest) requestResource is checked in exactly once.
* <li>A resource that is checked in was previously checked out or requested.
* <li>Also, requestResource is never called after close.
* </ul>
*/
public class QueuedKeyedResourcePool<K, V> extends KeyedResourcePool<K, V> {
private static final Logger logger = Logger.getLogger(QueuedKeyedResourcePool.class.getName());
private final ConcurrentMap<K, Queue<AsyncResourceRequest<V>>> requestQueueMap;
public QueuedKeyedResourcePool(ResourceFactory<K, V> objectFactory, ResourcePoolConfig config) {
super(objectFactory, config);
requestQueueMap = new ConcurrentHashMap<K, Queue<AsyncResourceRequest<V>>>();
}
/**
* Create a new queued pool with key type K, request type R, and value type
* V.
*
* @param factory The factory that creates objects
* @param config The pool config
* @return The created pool
*/
public static <K, V> QueuedKeyedResourcePool<K, V> create(ResourceFactory<K, V> factory,
ResourcePoolConfig config) {
return new QueuedKeyedResourcePool<K, V>(factory, config);
}
/**
* Create a new queued pool using the defaults for key of type K, request of
* type R, and value of Type V.
*
* @param factory The factory that creates objects
* @return The created pool
*/
public static <K, V> QueuedKeyedResourcePool<K, V> create(ResourceFactory<K, V> factory) {
return create(factory, new ResourcePoolConfig());
}
/**
* This method is the asynchronous (nonblocking) version of
* KeyedResourcePool.checkout. This method necessarily has a different
* function declaration (i.e., arguments passed and return type).
*
* This method either checks out a resource and uses that resource or
* enqueues a request to checkout the resource. I.e., there is a
* non-blocking fast-path that is tried optimistically.
*
* @param key The key to checkout the resource for
*/
public void registerResourceRequest(K key, AsyncResourceRequest<V> resourceRequest) {
checkNotClosed();
Queue<AsyncResourceRequest<V>> requestQueue = getRequestQueueForKey(key);
if(requestQueue.isEmpty()) {
// Attempt non-blocking checkout iff requestQueue is empty.
Pool<V> resourcePool = getResourcePoolForKey(key);
V resource = null;
try {
resource = attemptNonBlockingCheckout(key, resourcePool);
} catch(Exception e) {
destroyResource(key, resourcePool, resource);
resource = null;
resourceRequest.handleException(e);
return;
}
if(resource != null) {
resourceRequest.useResource(resource);
return;
}
}
requestQueue.add(resourceRequest);
// Guard against (potential) races with checkin by invoking
// processQueueLoop after resource request has been added to the
// asynchronous queue.
processQueueLoop(key);
}
/**
* Used only for unit testing. Please do not use this method in other ways.
*
* @param key
* @return
* @throws Exception
*/
public V internalNonBlockingGet(K key) throws Exception {
Pool<V> resourcePool = getResourcePoolForKey(key);
return attemptNonBlockingCheckout(key, resourcePool);
}
/**
* Pops resource requests off the queue until queue is empty or an unexpired
* resource request is found. Invokes .handleTimeout on all expired resource
* requests popped off the queue.
*
* @return null or a valid ResourceRequest
*/
private AsyncResourceRequest<V> getNextUnexpiredResourceRequest(Queue<AsyncResourceRequest<V>> requestQueue) {
AsyncResourceRequest<V> resourceRequest = requestQueue.poll();
while(resourceRequest != null) {
if(resourceRequest.getDeadlineNs() < System.nanoTime()) {
resourceRequest.handleTimeout();
resourceRequest = requestQueue.poll();
} else {
break;
}
}
return resourceRequest;
}
/**
* Attempts to checkout a resource so that one queued request can be
* serviced.
*
* @param key The key for which to process the requestQueue
* @return true iff an item was processed from the Queue.
*/
private boolean processQueue(K key) {
Queue<AsyncResourceRequest<V>> requestQueue = getRequestQueueForKey(key);
if(requestQueue.isEmpty()) {
return false;
}
// Attempt to get a resource.
Pool<V> resourcePool = getResourcePoolForKey(key);
V resource = null;
Exception ex = null;
try {
// Must attempt non-blocking checkout to ensure resources are
// created for the pool.
resource = attemptNonBlockingCheckout(key, resourcePool);
} catch(Exception e) {
destroyResource(key, resourcePool, resource);
ex = e;
resource = null;
}
// Neither we got a resource, nor an exception. So no requests can be
// processed return
if(resource == null && ex == null) {
return false;
}
// With resource in hand, process the resource requests
AsyncResourceRequest<V> resourceRequest = getNextUnexpiredResourceRequest(requestQueue);
if(resourceRequest == null) {
if(resource != null) {
// Did not use the resource! Directly check in via super to
// avoid
// circular call to processQueue().
try {
super.checkin(key, resource);
} catch(Exception e) {
logger.error("Exception checking in resource: ", e);
}
} else {
// Poor exception, no request to tag this exception onto
// drop it on the floor and continue as usual.
}
return false;
} else {
// We have a request here.
if(resource != null) {
resourceRequest.useResource(resource);
} else {
resourceRequest.handleException(ex);
}
return true;
}
}
/**
* TODO: The processQueueLoop is typically invoked from the selector (
* serial could invoke this as well, but most likely Parallel (Selector
* returning connection to the pool )is invoking it). When parallel requests
* does not have connections, they enqueue the requests. The next thread to
* check in will continue to process these requests, until the queue is
* drained or connection is exhausted. There is no number bound on this. For
* example If you bump the ExceededQuotaSlopTest to do more than 500
* requests it will fail and if you put a bound on this it will pass.
* Something that requires deeper investigation in the future.
*
* Attempts to repeatedly process enqueued resource requests. Tries until no
* more progress is possible without blocking.
*
* @param key
*/
private void processQueueLoop(K key) {
while(processQueue(key)) {}
}
@Override
public void reportException(K key, Exception e) {
super.reportException(key, e);
processQueueLoop(key);
}
/**
* Check the given resource back into the pool
*
* @param key The key for the resource
* @param resource The resource
*/
@Override
public void checkin(K key, V resource) {
super.checkin(key, resource);
// NB: Blocking checkout calls for synchronous requests get the resource
// checked in above before processQueueLoop() attempts checkout below.
// There is therefore a risk that asynchronous requests will be starved.
processQueueLoop(key);
}
/**
* A safe wrapper to destroy the given resource request.
*/
protected void destroyRequest(AsyncResourceRequest<V> resourceRequest) {
if(resourceRequest != null) {
try {
// To hand control back to the owner of the
// AsyncResourceRequest, treat "destroy" as an exception since
// there is no resource to pass into useResource, and the
// timeout has not expired.
Exception e = new UnreachableStoreException("Client request was terminated while waiting in the queue.");
resourceRequest.handleException(e);
} catch(Exception ex) {
logger.error("Exception while destroying resource request:", ex);
}
}
}
/**
* Destroys all resource requests in requestQueue.
*
* @param requestQueue The queue for which all resource requests are to be
* destroyed.
*/
private void destroyRequestQueue(Queue<AsyncResourceRequest<V>> requestQueue) {
if(requestQueue != null) {
AsyncResourceRequest<V> resourceRequest = requestQueue.poll();
while(resourceRequest != null) {
destroyRequest(resourceRequest);
resourceRequest = requestQueue.poll();
}
}
}
@Override
protected boolean internalClose() {
// wasOpen ensures only one thread destroys everything.
boolean wasOpen = super.internalClose();
if(wasOpen) {
for(Entry<K, Queue<AsyncResourceRequest<V>>> entry: requestQueueMap.entrySet()) {
Queue<AsyncResourceRequest<V>> requestQueue = entry.getValue();
destroyRequestQueue(requestQueue);
requestQueueMap.remove(entry.getKey());
}
}
return wasOpen;
}
@Override
public void reset(K key) {
try {
Queue<AsyncResourceRequest<V>> requestQueue = getRequestQueueForExistingKey(key);
if(requestQueue != null) {
destroyRequestQueue(requestQueue);
}
} finally {
super.reset(key);
}
}
/**
* Close the queue and the pool.
*/
@Override
public void close() {
internalClose();
}
/*
* Get the queue of work for the given key. If no queue exists, create one.
*/
protected Queue<AsyncResourceRequest<V>> getRequestQueueForKey(K key) {
Queue<AsyncResourceRequest<V>> requestQueue = requestQueueMap.get(key);
if(requestQueue == null) {
Queue<AsyncResourceRequest<V>> newRequestQueue = new ConcurrentLinkedQueue<AsyncResourceRequest<V>>();
requestQueue = requestQueueMap.putIfAbsent(key, newRequestQueue);
if(requestQueue == null) {
requestQueue = newRequestQueue;
}
}
return requestQueue;
}
/*
* Get the pool for the given key. If no pool exists, returns null.
*/
protected Queue<AsyncResourceRequest<V>> getRequestQueueForExistingKey(K key) {
Queue<AsyncResourceRequest<V>> requestQueue = requestQueueMap.get(key);
return requestQueue;
}
/**
* Count the number of queued resource requests for a specific pool.
*
* @param key The key
* @return The count of queued resource requests. Returns 0 if no queue
* exists for given key.
*/
public int getRegisteredResourceRequestCount(K key) {
if(requestQueueMap.containsKey(key)) {
Queue<AsyncResourceRequest<V>> requestQueue = getRequestQueueForExistingKey(key);
// FYI: .size() is not constant time in the next call. ;)
if(requestQueue != null) {
return requestQueue.size();
}
}
return 0;
}
/**
* Count the total number of queued resource requests for all queues. The
* result is "approximate" in the face of concurrency since individual
* queues can change size during the aggregate count.
*
* @return The (approximate) aggregate count of queued resource requests.
*/
public int getRegisteredResourceRequestCount() {
int count = 0;
for(Entry<K, Queue<AsyncResourceRequest<V>>> entry: this.requestQueueMap.entrySet()) {
// FYI: .size() is not constant time in the next call. ;)
count += entry.getValue().size();
}
return count;
}
}
| |
/*! ******************************************************************************
*
* Pentaho Data Integration
*
* Copyright (C) 2002-2016 by Pentaho : http://www.pentaho.com
*
*******************************************************************************
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
******************************************************************************/
package org.pentaho.di.job.entries.sftpput;
import org.pentaho.di.job.entry.validator.AndValidator;
import org.pentaho.di.job.entry.validator.JobEntryValidatorUtils;
import java.net.InetAddress;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.apache.commons.vfs2.FileObject;
import org.apache.commons.vfs2.FileType;
import org.pentaho.di.cluster.SlaveServer;
import org.pentaho.di.core.CheckResultInterface;
import org.pentaho.di.core.Const;
import org.pentaho.di.core.util.Utils;
import org.pentaho.di.core.Result;
import org.pentaho.di.core.ResultFile;
import org.pentaho.di.core.RowMetaAndData;
import org.pentaho.di.core.database.DatabaseMeta;
import org.pentaho.di.core.encryption.Encr;
import org.pentaho.di.core.exception.KettleDatabaseException;
import org.pentaho.di.core.exception.KettleException;
import org.pentaho.di.core.exception.KettleXMLException;
import org.pentaho.di.core.variables.VariableSpace;
import org.pentaho.di.core.vfs.KettleVFS;
import org.pentaho.di.core.xml.XMLHandler;
import org.pentaho.di.i18n.BaseMessages;
import org.pentaho.di.job.JobMeta;
import org.pentaho.di.job.entries.sftp.SFTPClient;
import org.pentaho.di.job.entry.JobEntryBase;
import org.pentaho.di.job.entry.JobEntryInterface;
import org.pentaho.di.repository.ObjectId;
import org.pentaho.di.repository.Repository;
import org.pentaho.di.resource.ResourceEntry;
import org.pentaho.di.resource.ResourceEntry.ResourceType;
import org.pentaho.di.resource.ResourceReference;
import org.pentaho.metastore.api.IMetaStore;
import org.w3c.dom.Node;
/**
* This defines an SFTP put job entry.
*
* @author Matt
* @since 05-11-2003
*
*/
public class JobEntrySFTPPUT extends JobEntryBase implements Cloneable, JobEntryInterface {
private static Class<?> PKG = JobEntrySFTPPUT.class; // for i18n purposes, needed by Translator2!!
private int afterFTPS;
public static final String[] afterFTPSDesc = new String[] {
BaseMessages.getString( PKG, "JobSFTPPUT.AfterSFTP.DoNothing.Label" ),
BaseMessages.getString( PKG, "JobSFTPPUT.AfterSFTP.Delete.Label" ),
BaseMessages.getString( PKG, "JobSFTPPUT.AfterSFTP.Move.Label" ), };
public static final String[] afterFTPSCode = new String[] { "nothing", "delete", "move" };
public static final int AFTER_FTPSPUT_NOTHING = 0;
public static final int AFTER_FTPSPUT_DELETE = 1;
public static final int AFTER_FTPSPUT_MOVE = 2;
private String serverName;
private String serverPort;
private String userName;
private String password;
private String sftpDirectory;
private String localDirectory;
private String wildcard;
private boolean copyprevious;
private boolean copypreviousfiles;
private boolean addFilenameResut;
private boolean usekeyfilename;
private String keyfilename;
private String keyfilepass;
private String compression;
private boolean createRemoteFolder;
// proxy
private String proxyType;
private String proxyHost;
private String proxyPort;
private String proxyUsername;
private String proxyPassword;
private String destinationfolder;
private boolean createDestinationFolder;
private boolean successWhenNoFile;
public JobEntrySFTPPUT( String n ) {
super( n, "" );
serverName = null;
serverPort = "22";
copyprevious = false;
copypreviousfiles = false;
addFilenameResut = false;
usekeyfilename = false;
keyfilename = null;
keyfilepass = null;
compression = "none";
proxyType = null;
proxyHost = null;
proxyPort = null;
proxyUsername = null;
proxyPassword = null;
createRemoteFolder = false;
afterFTPS = AFTER_FTPSPUT_NOTHING;
destinationfolder = null;
createDestinationFolder = false;
successWhenNoFile = false;
}
public JobEntrySFTPPUT() {
this( "" );
}
public Object clone() {
JobEntrySFTPPUT je = (JobEntrySFTPPUT) super.clone();
return je;
}
public String getXML() {
StringBuilder retval = new StringBuilder( 300 );
retval.append( super.getXML() );
retval.append( " " ).append( XMLHandler.addTagValue( "servername", serverName ) );
retval.append( " " ).append( XMLHandler.addTagValue( "serverport", serverPort ) );
retval.append( " " ).append( XMLHandler.addTagValue( "username", userName ) );
retval.append( " " ).append(
XMLHandler.addTagValue( "password", Encr.encryptPasswordIfNotUsingVariables( password ) ) );
retval.append( " " ).append( XMLHandler.addTagValue( "sftpdirectory", sftpDirectory ) );
retval.append( " " ).append( XMLHandler.addTagValue( "localdirectory", localDirectory ) );
retval.append( " " ).append( XMLHandler.addTagValue( "wildcard", wildcard ) );
retval.append( " " ).append( XMLHandler.addTagValue( "copyprevious", copyprevious ) );
retval.append( " " ).append( XMLHandler.addTagValue( "copypreviousfiles", copypreviousfiles ) );
retval.append( " " ).append( XMLHandler.addTagValue( "addFilenameResut", addFilenameResut ) );
retval.append( " " ).append( XMLHandler.addTagValue( "usekeyfilename", usekeyfilename ) );
retval.append( " " ).append( XMLHandler.addTagValue( "keyfilename", keyfilename ) );
retval.append( " " ).append(
XMLHandler.addTagValue( "keyfilepass", Encr.encryptPasswordIfNotUsingVariables( keyfilepass ) ) );
retval.append( " " ).append( XMLHandler.addTagValue( "compression", compression ) );
retval.append( " " ).append( XMLHandler.addTagValue( "proxyType", proxyType ) );
retval.append( " " ).append( XMLHandler.addTagValue( "proxyHost", proxyHost ) );
retval.append( " " ).append( XMLHandler.addTagValue( "proxyPort", proxyPort ) );
retval.append( " " ).append( XMLHandler.addTagValue( "proxyUsername", proxyUsername ) );
retval.append( " " ).append(
XMLHandler.addTagValue( "proxyPassword", Encr.encryptPasswordIfNotUsingVariables( proxyPassword ) ) );
retval.append( " " ).append( XMLHandler.addTagValue( "createRemoteFolder", createRemoteFolder ) );
retval.append( " " ).append(
XMLHandler.addTagValue( "aftersftpput", getAfterSFTPPutCode( getAfterFTPS() ) ) );
retval.append( " " ).append( XMLHandler.addTagValue( "destinationfolder", destinationfolder ) );
retval
.append( " " ).append( XMLHandler.addTagValue( "createdestinationfolder", createDestinationFolder ) );
retval.append( " " ).append( XMLHandler.addTagValue( "successWhenNoFile", successWhenNoFile ) );
return retval.toString();
}
public static String getAfterSFTPPutCode( int i ) {
if ( i < 0 || i >= afterFTPSCode.length ) {
return afterFTPSCode[0];
}
return afterFTPSCode[i];
}
public void loadXML( Node entrynode, List<DatabaseMeta> databases, List<SlaveServer> slaveServers,
Repository rep, IMetaStore metaStore ) throws KettleXMLException {
try {
super.loadXML( entrynode, databases, slaveServers );
serverName = XMLHandler.getTagValue( entrynode, "servername" );
serverPort = XMLHandler.getTagValue( entrynode, "serverport" );
userName = XMLHandler.getTagValue( entrynode, "username" );
password = Encr.decryptPasswordOptionallyEncrypted( XMLHandler.getTagValue( entrynode, "password" ) );
sftpDirectory = XMLHandler.getTagValue( entrynode, "sftpdirectory" );
localDirectory = XMLHandler.getTagValue( entrynode, "localdirectory" );
wildcard = XMLHandler.getTagValue( entrynode, "wildcard" );
copyprevious = "Y".equalsIgnoreCase( XMLHandler.getTagValue( entrynode, "copyprevious" ) );
copypreviousfiles = "Y".equalsIgnoreCase( XMLHandler.getTagValue( entrynode, "copypreviousfiles" ) );
addFilenameResut = "Y".equalsIgnoreCase( XMLHandler.getTagValue( entrynode, "addFilenameResut" ) );
usekeyfilename = "Y".equalsIgnoreCase( XMLHandler.getTagValue( entrynode, "usekeyfilename" ) );
keyfilename = XMLHandler.getTagValue( entrynode, "keyfilename" );
keyfilepass = Encr.decryptPasswordOptionallyEncrypted( XMLHandler.getTagValue( entrynode, "keyfilepass" ) );
compression = XMLHandler.getTagValue( entrynode, "compression" );
proxyType = XMLHandler.getTagValue( entrynode, "proxyType" );
proxyHost = XMLHandler.getTagValue( entrynode, "proxyHost" );
proxyPort = XMLHandler.getTagValue( entrynode, "proxyPort" );
proxyUsername = XMLHandler.getTagValue( entrynode, "proxyUsername" );
proxyPassword =
Encr.decryptPasswordOptionallyEncrypted( XMLHandler.getTagValue( entrynode, "proxyPassword" ) );
createRemoteFolder = "Y".equalsIgnoreCase( XMLHandler.getTagValue( entrynode, "createRemoteFolder" ) );
boolean remove = "Y".equalsIgnoreCase( XMLHandler.getTagValue( entrynode, "remove" ) );
setAfterFTPS( getAfterSFTPPutByCode( Const.NVL( XMLHandler.getTagValue( entrynode, "aftersftpput" ), "" ) ) );
if ( remove && getAfterFTPS() == AFTER_FTPSPUT_NOTHING ) {
setAfterFTPS( AFTER_FTPSPUT_DELETE );
}
destinationfolder = XMLHandler.getTagValue( entrynode, "destinationfolder" );
createDestinationFolder =
"Y".equalsIgnoreCase( XMLHandler.getTagValue( entrynode, "createdestinationfolder" ) );
successWhenNoFile = "Y".equalsIgnoreCase( XMLHandler.getTagValue( entrynode, "successWhenNoFile" ) );
} catch ( KettleXMLException xe ) {
throw new KettleXMLException( "Unable to load job entry of type 'SFTPPUT' from XML node", xe );
}
}
public static int getAfterSFTPPutByCode( String tt ) {
if ( tt == null ) {
return 0;
}
for ( int i = 0; i < afterFTPSCode.length; i++ ) {
if ( afterFTPSCode[i].equalsIgnoreCase( tt ) ) {
return i;
}
}
return 0;
}
public static String getAfterSFTPPutDesc( int i ) {
if ( i < 0 || i >= afterFTPSDesc.length ) {
return afterFTPSDesc[0];
}
return afterFTPSDesc[i];
}
public static int getAfterSFTPPutByDesc( String tt ) {
if ( tt == null ) {
return 0;
}
for ( int i = 0; i < afterFTPSDesc.length; i++ ) {
if ( afterFTPSDesc[i].equalsIgnoreCase( tt ) ) {
return i;
}
}
// If this fails, try to match using the code.
return getAfterSFTPPutByCode( tt );
}
public void loadRep( Repository rep, IMetaStore metaStore, ObjectId id_jobentry, List<DatabaseMeta> databases,
List<SlaveServer> slaveServers ) throws KettleException {
try {
serverName = rep.getJobEntryAttributeString( id_jobentry, "servername" );
serverPort = rep.getJobEntryAttributeString( id_jobentry, "serverport" );
userName = rep.getJobEntryAttributeString( id_jobentry, "username" );
password =
Encr.decryptPasswordOptionallyEncrypted( rep.getJobEntryAttributeString( id_jobentry, "password" ) );
sftpDirectory = rep.getJobEntryAttributeString( id_jobentry, "sftpdirectory" );
localDirectory = rep.getJobEntryAttributeString( id_jobentry, "localdirectory" );
wildcard = rep.getJobEntryAttributeString( id_jobentry, "wildcard" );
copyprevious = rep.getJobEntryAttributeBoolean( id_jobentry, "copyprevious" );
copypreviousfiles = rep.getJobEntryAttributeBoolean( id_jobentry, "copypreviousfiles" );
addFilenameResut = rep.getJobEntryAttributeBoolean( id_jobentry, "addFilenameResut" );
usekeyfilename = rep.getJobEntryAttributeBoolean( id_jobentry, "usekeyfilename" );
keyfilename = rep.getJobEntryAttributeString( id_jobentry, "keyfilename" );
keyfilepass =
Encr.decryptPasswordOptionallyEncrypted( rep.getJobEntryAttributeString( id_jobentry, "keyfilepass" ) );
compression = rep.getJobEntryAttributeString( id_jobentry, "compression" );
proxyType = rep.getJobEntryAttributeString( id_jobentry, "proxyType" );
proxyHost = rep.getJobEntryAttributeString( id_jobentry, "proxyHost" );
proxyPort = rep.getJobEntryAttributeString( id_jobentry, "proxyPort" );
proxyUsername = rep.getJobEntryAttributeString( id_jobentry, "proxyUsername" );
proxyPassword =
Encr.decryptPasswordOptionallyEncrypted( rep.getJobEntryAttributeString( id_jobentry, "proxyPassword" ) );
createRemoteFolder = rep.getJobEntryAttributeBoolean( id_jobentry, "createRemoteFolder" );
boolean remove = rep.getJobEntryAttributeBoolean( id_jobentry, "remove" );
setAfterFTPS( getAfterSFTPPutByCode( Const.NVL(
rep.getJobEntryAttributeString( id_jobentry, "aftersftpput" ), "" ) ) );
if ( remove && getAfterFTPS() == AFTER_FTPSPUT_NOTHING ) {
setAfterFTPS( AFTER_FTPSPUT_DELETE );
}
destinationfolder = rep.getJobEntryAttributeString( id_jobentry, "destinationfolder" );
createDestinationFolder = rep.getJobEntryAttributeBoolean( id_jobentry, "createdestinationfolder" );
successWhenNoFile = rep.getJobEntryAttributeBoolean( id_jobentry, "successWhenNoFile" );
} catch ( KettleException dbe ) {
throw new KettleException( "Unable to load job entry of type 'SFTPPUT' from the repository for id_jobentry="
+ id_jobentry, dbe );
}
}
public void saveRep( Repository rep, IMetaStore metaStore, ObjectId id_job ) throws KettleException {
try {
rep.saveJobEntryAttribute( id_job, getObjectId(), "servername", serverName );
rep.saveJobEntryAttribute( id_job, getObjectId(), "serverport", serverPort );
rep.saveJobEntryAttribute( id_job, getObjectId(), "username", userName );
rep.saveJobEntryAttribute( id_job, getObjectId(), "password", Encr
.encryptPasswordIfNotUsingVariables( password ) );
rep.saveJobEntryAttribute( id_job, getObjectId(), "sftpdirectory", sftpDirectory );
rep.saveJobEntryAttribute( id_job, getObjectId(), "localdirectory", localDirectory );
rep.saveJobEntryAttribute( id_job, getObjectId(), "wildcard", wildcard );
rep.saveJobEntryAttribute( id_job, getObjectId(), "copyprevious", copyprevious );
rep.saveJobEntryAttribute( id_job, getObjectId(), "copypreviousfiles", copypreviousfiles );
rep.saveJobEntryAttribute( id_job, getObjectId(), "addFilenameResut", addFilenameResut );
rep.saveJobEntryAttribute( id_job, getObjectId(), "usekeyfilename", usekeyfilename );
rep.saveJobEntryAttribute( id_job, getObjectId(), "keyfilename", keyfilename );
rep.saveJobEntryAttribute( id_job, getObjectId(), "keyfilepass", Encr
.encryptPasswordIfNotUsingVariables( keyfilepass ) );
rep.saveJobEntryAttribute( id_job, getObjectId(), "compression", compression );
rep.saveJobEntryAttribute( id_job, getObjectId(), "proxyType", proxyType );
rep.saveJobEntryAttribute( id_job, getObjectId(), "proxyHost", proxyHost );
rep.saveJobEntryAttribute( id_job, getObjectId(), "proxyPort", proxyPort );
rep.saveJobEntryAttribute( id_job, getObjectId(), "proxyUsername", proxyUsername );
rep.saveJobEntryAttribute( id_job, getObjectId(), "proxyPassword", Encr
.encryptPasswordIfNotUsingVariables( proxyPassword ) );
rep.saveJobEntryAttribute( id_job, getObjectId(), "aftersftpput", getAfterSFTPPutCode( getAfterFTPS() ) );
rep.saveJobEntryAttribute( id_job, getObjectId(), "createRemoteFolder", createRemoteFolder );
rep.saveJobEntryAttribute( id_job, getObjectId(), "destinationfolder", destinationfolder );
rep.saveJobEntryAttribute( id_job, getObjectId(), "createdestinationfolder", createDestinationFolder );
rep.saveJobEntryAttribute( id_job, getObjectId(), "successWhenNoFile", successWhenNoFile );
} catch ( KettleDatabaseException dbe ) {
throw new KettleException( "Unable to load job entry of type 'SFTPPUT' to the repository for id_job="
+ id_job, dbe );
}
}
/**
* @param createDestinationFolder
* The create destination folder flag to set.
*/
public void setCreateDestinationFolder( boolean createDestinationFolder ) {
this.createDestinationFolder = createDestinationFolder;
}
/**
* @return Returns the create destination folder flag
*/
public boolean isCreateDestinationFolder() {
return createDestinationFolder;
}
/**
* @param successWhenNoFile
* The successWhenNoFile flag to set.
*/
public void setSuccessWhenNoFile( boolean successWhenNoFile ) {
this.successWhenNoFile = successWhenNoFile;
}
/**
* @return Returns the create successWhenNoFile folder flag
*/
public boolean isSuccessWhenNoFile() {
return successWhenNoFile;
}
public void setDestinationFolder( String destinationfolderin ) {
this.destinationfolder = destinationfolderin;
}
public String getDestinationFolder() {
return destinationfolder;
}
/**
* @return Returns the afterFTPS.
*/
public int getAfterFTPS() {
return afterFTPS;
}
/**
* @param value
* The afterFTPS to set.
*/
public void setAfterFTPS( int value ) {
this.afterFTPS = value;
}
/**
* @return Returns the directory.
*/
public String getScpDirectory() {
return sftpDirectory;
}
/**
* @param directory
* The directory to set.
*/
public void setScpDirectory( String directory ) {
this.sftpDirectory = directory;
}
/**
* @return Returns the password.
*/
public String getPassword() {
return password;
}
/**
* @param password
* The password to set.
*/
public void setPassword( String password ) {
this.password = password;
}
/**
* @return Returns the serverName.
*/
public String getServerName() {
return serverName;
}
/**
* @param serverName
* The serverName to set.
*/
public void setServerName( String serverName ) {
this.serverName = serverName;
}
/**
* @return Returns the userName.
*/
public String getUserName() {
return userName;
}
/**
* @param userName
* The userName to set.
*/
public void setUserName( String userName ) {
this.userName = userName;
}
/**
* @return Returns the wildcard.
*/
public String getWildcard() {
return wildcard;
}
/**
* @param wildcard
* The wildcard to set.
*/
public void setWildcard( String wildcard ) {
this.wildcard = wildcard;
}
/**
* @return Returns the localdirectory.
*/
public String getLocalDirectory() {
return localDirectory;
}
/**
* @param localDirectory
* The localDirectory to set.
*/
public void setLocalDirectory( String localDirectory ) {
this.localDirectory = localDirectory;
}
public boolean isCopyPrevious() {
return copyprevious;
}
public void setCopyPrevious( boolean copyprevious ) {
this.copyprevious = copyprevious;
}
public boolean isCopyPreviousFiles() {
return copypreviousfiles;
}
public void setCopyPreviousFiles( boolean copypreviousfiles ) {
this.copypreviousfiles = copypreviousfiles;
}
public boolean isAddFilenameResut() {
return addFilenameResut;
}
public boolean isUseKeyFile() {
return usekeyfilename;
}
public void setUseKeyFile( boolean value ) {
this.usekeyfilename = value;
}
public String getKeyFilename() {
return keyfilename;
}
public void setKeyFilename( String value ) {
this.keyfilename = value;
}
public String getKeyPassPhrase() {
return keyfilepass;
}
public void setKeyPassPhrase( String value ) {
this.keyfilepass = value;
}
public void setAddFilenameResut( boolean addFilenameResut ) {
this.addFilenameResut = addFilenameResut;
}
/**
* @return Returns the compression.
*/
public String getCompression() {
return compression;
}
/**
* @param compression
* The compression to set.
*/
public void setCompression( String compression ) {
this.compression = compression;
}
public String getServerPort() {
return serverPort;
}
public void setServerPort( String serverPort ) {
this.serverPort = serverPort;
}
public String getProxyType() {
return proxyType;
}
public void setProxyType( String value ) {
this.proxyType = value;
}
public String getProxyHost() {
return proxyHost;
}
public void setProxyHost( String value ) {
this.proxyHost = value;
}
public String getProxyPort() {
return proxyPort;
}
public void setProxyPort( String value ) {
this.proxyPort = value;
}
public String getProxyUsername() {
return proxyUsername;
}
public void setProxyUsername( String value ) {
this.proxyUsername = value;
}
public String getProxyPassword() {
return proxyPassword;
}
public void setProxyPassword( String value ) {
this.proxyPassword = value;
}
public boolean isCreateRemoteFolder() {
return this.createRemoteFolder;
}
public void setCreateRemoteFolder( boolean value ) {
this.createRemoteFolder = value;
}
public Result execute( Result previousResult, int nr ) throws KettleException {
Result result = previousResult;
List<RowMetaAndData> rows = result.getRows();
result.setResult( false );
if ( log.isDetailed() ) {
logDetailed( BaseMessages.getString( PKG, "JobSFTPPUT.Log.StartJobEntry" ) );
}
ArrayList<FileObject> myFileList = new ArrayList<FileObject>();
if ( copyprevious ) {
if ( rows.size() == 0 ) {
if ( log.isDetailed() ) {
logDetailed( BaseMessages.getString( PKG, "JobSFTPPUT.ArgsFromPreviousNothing" ) );
}
result.setResult( true );
return result;
}
try {
RowMetaAndData resultRow = null;
// Copy the input row to the (command line) arguments
for ( int iteration = 0; iteration < rows.size(); iteration++ ) {
resultRow = rows.get( iteration );
// Get file names
String file_previous = resultRow.getString( 0, null );
if ( !Utils.isEmpty( file_previous ) ) {
FileObject file = KettleVFS.getFileObject( file_previous, this );
if ( !file.exists() ) {
logError( BaseMessages.getString( PKG, "JobSFTPPUT.Log.FilefromPreviousNotFound", file_previous ) );
} else {
myFileList.add( file );
if ( log.isDebug() ) {
logDebug( BaseMessages.getString( PKG, "JobSFTPPUT.Log.FilenameFromResult", file_previous ) );
}
}
}
}
} catch ( Exception e ) {
logError( BaseMessages.getString( PKG, "JobSFTPPUT.Error.ArgFromPrevious" ) );
result.setNrErrors( 1 );
// free resource
myFileList = null;
return result;
}
}
if ( copypreviousfiles ) {
List<ResultFile> resultFiles = result.getResultFilesList();
if ( resultFiles == null || resultFiles.size() == 0 ) {
if ( log.isDetailed() ) {
logDetailed( BaseMessages.getString( PKG, "JobSFTPPUT.ArgsFromPreviousNothingFiles" ) );
}
result.setResult( true );
return result;
}
try {
for ( Iterator<ResultFile> it = resultFiles.iterator(); it.hasNext() && !parentJob.isStopped(); ) {
ResultFile resultFile = it.next();
FileObject file = resultFile.getFile();
if ( file != null ) {
if ( !file.exists() ) {
logError( BaseMessages.getString( PKG, "JobSFTPPUT.Log.FilefromPreviousNotFound", file.toString() ) );
} else {
myFileList.add( file );
if ( log.isDebug() ) {
logDebug( BaseMessages.getString( PKG, "JobSFTPPUT.Log.FilenameFromResult", file.toString() ) );
}
}
}
}
} catch ( Exception e ) {
logError( BaseMessages.getString( PKG, "JobSFTPPUT.Error.ArgFromPrevious" ) );
result.setNrErrors( 1 );
// free resource
myFileList = null;
return result;
}
}
SFTPClient sftpclient = null;
// String substitution..
String realServerName = environmentSubstitute( serverName );
String realServerPort = environmentSubstitute( serverPort );
String realUsername = environmentSubstitute( userName );
String realPassword = Encr.decryptPasswordOptionallyEncrypted( environmentSubstitute( password ) );
String realSftpDirString = environmentSubstitute( sftpDirectory );
String realWildcard = environmentSubstitute( wildcard );
String realLocalDirectory = environmentSubstitute( localDirectory );
String realKeyFilename = null;
String realPassPhrase = null;
// Destination folder (Move to)
String realDestinationFolder = environmentSubstitute( getDestinationFolder() );
try {
// Let's perform some checks before starting
if ( getAfterFTPS() == AFTER_FTPSPUT_MOVE ) {
if ( Utils.isEmpty( realDestinationFolder ) ) {
logError( BaseMessages.getString( PKG, "JobSSH2PUT.Log.DestinatFolderMissing" ) );
result.setNrErrors( 1 );
return result;
} else {
FileObject folder = null;
try {
folder = KettleVFS.getFileObject( realDestinationFolder, this );
// Let's check if folder exists...
if ( !folder.exists() ) {
// Do we need to create it?
if ( createDestinationFolder ) {
folder.createFolder();
} else {
logError( BaseMessages.getString(
PKG, "JobSSH2PUT.Log.DestinatFolderNotExist", realDestinationFolder ) );
result.setNrErrors( 1 );
return result;
}
}
realDestinationFolder = KettleVFS.getFilename( folder );
} catch ( Exception e ) {
throw new KettleException( e );
} finally {
if ( folder != null ) {
try {
folder.close();
} catch ( Exception e ) { /* Ignore */
}
}
}
}
}
if ( isUseKeyFile() ) {
// We must have here a private keyfilename
realKeyFilename = environmentSubstitute( getKeyFilename() );
if ( Utils.isEmpty( realKeyFilename ) ) {
// Error..Missing keyfile
logError( BaseMessages.getString( PKG, "JobSFTP.Error.KeyFileMissing" ) );
result.setNrErrors( 1 );
return result;
}
if ( !KettleVFS.fileExists( realKeyFilename ) ) {
// Error.. can not reach keyfile
logError( BaseMessages.getString( PKG, "JobSFTP.Error.KeyFileNotFound" ) );
result.setNrErrors( 1 );
return result;
}
realPassPhrase = environmentSubstitute( getKeyPassPhrase() );
}
// Create sftp client to host ...
sftpclient =
new SFTPClient(
InetAddress.getByName( realServerName ), Const.toInt( realServerPort, 22 ), realUsername,
realKeyFilename, realPassPhrase );
if ( log.isDetailed() ) {
logDetailed( BaseMessages.getString( PKG, "JobSFTPPUT.Log.OpenedConnection", realServerName, ""
+ realServerPort, realUsername ) );
}
// Set compression
sftpclient.setCompression( getCompression() );
// Set proxy?
String realProxyHost = environmentSubstitute( getProxyHost() );
if ( !Utils.isEmpty( realProxyHost ) ) {
// Set proxy
sftpclient.setProxy(
realProxyHost, environmentSubstitute( getProxyPort() ), environmentSubstitute( getProxyUsername() ),
environmentSubstitute( getProxyPassword() ), getProxyType() );
}
// login to ftp host ...
sftpclient.login( realPassword );
// Don't show the password in the logs, it's not good for security audits
// logDetailed("logged in using password "+realPassword); // Logging this seems a bad idea! Oh well.
// move to spool dir ...
if ( !Utils.isEmpty( realSftpDirString ) ) {
boolean existfolder = sftpclient.folderExists( realSftpDirString );
if ( !existfolder ) {
if ( !isCreateRemoteFolder() ) {
throw new KettleException( BaseMessages.getString(
PKG, "JobSFTPPUT.Error.CanNotFindRemoteFolder", realSftpDirString ) );
}
if ( log.isDetailed() ) {
logDetailed( BaseMessages
.getString( PKG, "JobSFTPPUT.Error.CanNotFindRemoteFolder", realSftpDirString ) );
}
// Let's create folder
sftpclient.createFolder( realSftpDirString );
if ( log.isDetailed() ) {
logDetailed( BaseMessages.getString( PKG, "JobSFTPPUT.Log.RemoteFolderCreated", realSftpDirString ) );
}
}
sftpclient.chdir( realSftpDirString );
if ( log.isDetailed() ) {
logDetailed( BaseMessages.getString( PKG, "JobSFTPPUT.Log.ChangedDirectory", realSftpDirString ) );
}
} // end if
if ( !copyprevious && !copypreviousfiles ) {
// Get all the files in the local directory...
myFileList = new ArrayList<FileObject>();
FileObject localFiles = KettleVFS.getFileObject( realLocalDirectory, this );
FileObject[] children = localFiles.getChildren();
if ( children != null ) {
for ( int i = 0; i < children.length; i++ ) {
// Get filename of file or directory
if ( children[i].getType().equals( FileType.FILE ) ) {
// myFileList.add(children[i].getAbsolutePath());
myFileList.add( children[i] );
}
} // end for
}
}
if ( myFileList == null || myFileList.size() == 0 ) {
if ( isSuccessWhenNoFile() ) {
// Just warn user
if ( isBasic() ) {
logBasic( BaseMessages.getString( PKG, "JobSFTPPUT.Error.NoFileToSend" ) );
}
} else {
// Fail
logError( BaseMessages.getString( PKG, "JobSFTPPUT.Error.NoFileToSend" ) );
result.setNrErrors( 1 );
return result;
}
}
if ( log.isDetailed() ) {
logDetailed( BaseMessages.getString( PKG, "JobSFTPPUT.Log.RowsFromPreviousResult", myFileList.size() ) );
}
Pattern pattern = null;
if ( !copyprevious && !copypreviousfiles ) {
if ( !Utils.isEmpty( realWildcard ) ) {
pattern = Pattern.compile( realWildcard );
}
}
// Get the files in the list and execute sftp.put() for each file
Iterator<FileObject> it = myFileList.iterator();
while ( it.hasNext() && !parentJob.isStopped() ) {
FileObject myFile = it.next();
try {
String localFilename = myFile.toString();
String destinationFilename = myFile.getName().getBaseName();
boolean getIt = true;
// First see if the file matches the regular expression!
if ( pattern != null ) {
Matcher matcher = pattern.matcher( destinationFilename );
getIt = matcher.matches();
}
if ( getIt ) {
if ( log.isDebug() ) {
logDebug( BaseMessages.getString(
PKG, "JobSFTPPUT.Log.PuttingFile", localFilename, realSftpDirString ) );
}
sftpclient.put( myFile, destinationFilename );
if ( log.isDetailed() ) {
logDetailed( BaseMessages.getString( PKG, "JobSFTPPUT.Log.TransferedFile", localFilename ) );
}
// We successfully uploaded the file
// what's next ...
switch ( getAfterFTPS() ) {
case AFTER_FTPSPUT_DELETE:
myFile.delete();
if ( log.isDetailed() ) {
logDetailed( BaseMessages.getString( PKG, "JobSFTPPUT.Log.DeletedFile", localFilename ) );
}
break;
case AFTER_FTPSPUT_MOVE:
FileObject destination = null;
try {
destination =
KettleVFS.getFileObject( realDestinationFolder
+ Const.FILE_SEPARATOR + myFile.getName().getBaseName(), this );
myFile.moveTo( destination );
if ( log.isDetailed() ) {
logDetailed( BaseMessages.getString( PKG, "JobSFTPPUT.Log.FileMoved", myFile, destination ) );
}
} finally {
if ( destination != null ) {
destination.close();
}
}
break;
default:
if ( addFilenameResut ) {
// Add to the result files...
ResultFile resultFile =
new ResultFile( ResultFile.FILE_TYPE_GENERAL, myFile, parentJob.getJobname(), toString() );
result.getResultFiles().put( resultFile.getFile().toString(), resultFile );
if ( log.isDetailed() ) {
logDetailed( BaseMessages.getString(
PKG, "JobSFTPPUT.Log.FilenameAddedToResultFilenames", localFilename ) );
}
}
break;
}
}
} finally {
if ( myFile != null ) {
myFile.close();
}
}
} // end for
result.setResult( true );
// JKU: no idea if this is needed...!
// result.setNrFilesRetrieved(filesRetrieved);
} catch ( Exception e ) {
result.setNrErrors( 1 );
logError( BaseMessages.getString( PKG, "JobSFTPPUT.Exception", e.getMessage() ) );
logError( Const.getStackTracker( e ) );
} finally {
// close connection, if possible
try {
if ( sftpclient != null ) {
sftpclient.disconnect();
}
} catch ( Exception e ) {
// just ignore this, makes no big difference
} // end catch
myFileList = null;
} // end finally
return result;
} // JKU: end function execute()
public boolean evaluates() {
return true;
}
public List<ResourceReference> getResourceDependencies( JobMeta jobMeta ) {
List<ResourceReference> references = super.getResourceDependencies( jobMeta );
if ( !Utils.isEmpty( serverName ) ) {
String realServerName = jobMeta.environmentSubstitute( serverName );
ResourceReference reference = new ResourceReference( this );
reference.getEntries().add( new ResourceEntry( realServerName, ResourceType.SERVER ) );
references.add( reference );
}
return references;
}
@Override
public void check( List<CheckResultInterface> remarks, JobMeta jobMeta, VariableSpace space,
Repository repository, IMetaStore metaStore ) {
JobEntryValidatorUtils.andValidator().validate( this, "serverName", remarks,
AndValidator.putValidators( JobEntryValidatorUtils.notBlankValidator() ) );
JobEntryValidatorUtils.andValidator().validate(
this, "localDirectory", remarks, AndValidator.putValidators( JobEntryValidatorUtils.notBlankValidator(),
JobEntryValidatorUtils.fileExistsValidator() ) );
JobEntryValidatorUtils.andValidator().validate( this, "userName", remarks,
AndValidator.putValidators( JobEntryValidatorUtils.notBlankValidator() ) );
JobEntryValidatorUtils.andValidator().validate( this, "password", remarks,
AndValidator.putValidators( JobEntryValidatorUtils.notNullValidator() ) );
JobEntryValidatorUtils.andValidator().validate( this, "serverPort", remarks,
AndValidator.putValidators( JobEntryValidatorUtils.integerValidator() ) );
}
}
| |
/*
* Copyright (c) 2014 ChargeBee Inc
* All Rights Reserved.
*/
package com.chargebee.samples;
import com.chargebee.APIException;
import com.chargebee.Result;
import com.chargebee.exceptions.InvalidRequestException;
import com.chargebee.internal.Request;
import com.chargebee.models.Address;
import com.chargebee.models.Customer;
import com.chargebee.models.HostedPage;
import com.chargebee.models.Invoice;
import com.chargebee.models.Subscription;
import com.chargebee.org.json.*;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileReader;
import java.io.IOException;
import java.io.PrintWriter;
import java.util.HashMap;
import java.util.Map;
import javax.servlet.ServletException;
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import javax.servlet.http.HttpSession;
import static com.chargebee.samples.common.ErrorHandler.*;
import static com.chargebee.samples.common.Utils.getHostUrl;
import static com.chargebee.samples.common.Utils.validateParameters;
/*
* Self Service Portal for customers to manage their subscriptions.
*
*/
public class SelfServicePortal extends HttpServlet {
@Override
public String getServletInfo() {
return "Self Service Portal for customers to manage their subscription";
}
@Override
protected void doGet(HttpServletRequest request, HttpServletResponse response)
throws ServletException, IOException {
String path = request.getServletPath();
if( !authenticate(request) ) {
response.sendRedirect("/ssp");
return;
}
try {
if (path.endsWith("/update_card")) {
updateCard(request, response);
} else if (path.endsWith("/redirect_handler")) {
redirectHandler(request, response);
} else if (path.endsWith("/invoice_as_pdf")) {
invoiceAsPdf(request, response);
} else {
response.sendError(HttpServletResponse.SC_NOT_FOUND);
}
} catch (Exception e) {
throw new RuntimeException(e);//Will be handled in error servlet.
}
}
@Override
protected void doPost(HttpServletRequest request, HttpServletResponse response)
throws ServletException, IOException {
String path = request.getServletPath();
if( !path.endsWith("/login") && !authenticate(request) ) {
response.sendRedirect("/ssp/");
return;
}
try {
if (path.endsWith("/login")) {
login(request, response);
} else if (path.endsWith("/logout")) {
logout(request, response);
} else if (path.endsWith("/update_account_info")) {
updateAccountInfo(request, response);
} else if (path.endsWith("/update_billing_info")) {
updateBillingInfo(request, response);
} else if (path.endsWith("/update_shipping_address")) {
updateShippingAddress(request, response);
} else if (path.endsWith("/sub_cancel")) {
subscriptionCancel(request, response);
} else if (path.endsWith("/sub_reactivate")) {
subscriptionReactivate(request, response);
} else {
response.sendError(HttpServletResponse.SC_NOT_FOUND);
}
} catch (Exception e) {
throw new RuntimeException(e);//Will be handled in error servlet.
}
}
/*
* Checks the session variable is set for the logged in user.
*/
public static boolean authenticate(HttpServletRequest request) {
if( getSubscriptionId(request) == null || getCustomerId(request) == null) {
return false;
}
return true;
}
/*
* Gets the subscription Id from the session variable if set in session
*/
public static String getSubscriptionId(HttpServletRequest request) {
String subscriptionId = null;
if (request.getSession(false) != null
&& request.getSession(false).getAttribute("subscription_id") != null) {
subscriptionId = request.getSession(false)
.getAttribute("subscription_id").toString();
}
return subscriptionId;
}
/*
* Gets the customer Id from the session variable if set in session
*/
public static String getCustomerId(HttpServletRequest request) {
String customerId = null;
if (request.getSession(false) != null
&& request.getSession(false).getAttribute("customer_id") != null) {
customerId = request.getSession(false)
.getAttribute("customer_id").toString();
}
return customerId;
}
/*
* Forwards the user to ChargeBee hosted page to update the card details.
*/
private void updateCard(HttpServletRequest request,
HttpServletResponse response) throws ServletException, IOException {
String hostUrl = getHostUrl(request);
try {
Result result = HostedPage.updatePaymentMethod()
.customerId(getCustomerId(request))
.embed(Boolean.FALSE)
.redirectUrl(hostUrl + "/ssp/redirect_handler")
.cancelUrl(hostUrl + "/ssp/subscription.jsp")
.request();
response.sendRedirect(result.hostedPage().url());
} catch (Exception e) {
e.printStackTrace();
response.sendError(HttpServletResponse.SC_INTERNAL_SERVER_ERROR);
}
}
/*
* Handles the redirection from ChargeBee on successful card update.
*/
private void redirectHandler(HttpServletRequest request,
HttpServletResponse response) throws ServletException, IOException {
String id = request.getParameter("id");
Result result = HostedPage.retrieve(id).request();
if (result.hostedPage().state().equals(HostedPage.State.SUCCEEDED)) {
response.sendRedirect("/ssp/subscription.jsp");
} else {
response.sendError(HttpServletResponse.SC_BAD_REQUEST);
}
}
/*
* Returns pdf download url for the requested invoice
*/
private void invoiceAsPdf(HttpServletRequest request,
HttpServletResponse response) throws ServletException, IOException {
//response.setHeader("Content-Type", "application/json;charset=utf-8");
String invoiceId = request.getParameter("invoice_id");
Invoice invoice = Invoice.retrieve(invoiceId).request().invoice();
if( !getSubscriptionId(request).equals(invoice.subscriptionId()) ) {
response.sendError(HttpServletResponse.SC_BAD_REQUEST);
return;
}
Result result = Invoice.pdf(invoiceId).request();
response.sendRedirect(result.download().downloadUrl());
}
/*
* Authenticates the user and sets the subscription id as session attribute.
* Here the username should be subscription id in ChargeBee and
* password can be anything.
*/
private void login(HttpServletRequest request,
HttpServletResponse response) throws ServletException, Exception {
if ( fetchSubscription(request)) {
response.sendRedirect("subscription.jsp");
} else {
response.sendRedirect("/ssp?login=failed");
}
}
/*
* Log out the user by invalidating its session
*/
private void logout(HttpServletRequest request, HttpServletResponse response)
throws ServletException, IOException {
if (request.getSession(false) != null) {
request.getSession(false).invalidate();
}
response.sendRedirect("index.jsp");
}
/*
* Update customer details in ChargeBee.
*/
private void updateAccountInfo(HttpServletRequest request,
HttpServletResponse response) throws ServletException, IOException {
response.setHeader("Content-Type", "application/json;charset=utf-8");
PrintWriter out = response.getWriter();
validateParameters(request);
try {
Result result = Customer.update(getCustomerId(request))
.firstName(request.getParameter("first_name"))
.lastName(request.getParameter("last_name"))
.company(request.getParameter("company"))
.phone(request.getParameter("phone"))
.email(request.getParameter("email")).request();
out.write("{ \"forward\" : \"/ssp/subscription.jsp\" }");
} catch (InvalidRequestException e) {
handleInvalidRequestErrors(e, response, out, null);
} catch (Exception e) {
handleGeneralErrors(e, response, out);
} finally {
out.flush();
}
}
/*
* Update Billing info of customer in ChargeBee.
*/
private void updateBillingInfo(HttpServletRequest req,
HttpServletResponse resp) throws ServletException, IOException {
resp.setHeader("Content-Type", "application/json;charset=utf-8");
PrintWriter out = resp.getWriter();
validateParameters(req);
try {
Customer.updateBillingInfo(getCustomerId(req))
.billingAddressFirstName(req.getParameter("billing_address[first_name]"))
.billingAddressLastName(req.getParameter("billing_address[last_name]"))
.billingAddressLine1(req.getParameter("billing_address[line1]"))
.billingAddressLine2(req.getParameter("billing_address[line2]"))
.billingAddressCity(req.getParameter("billing_address[city]"))
.billingAddressState(req.getParameter("billing_address[state]"))
.billingAddressCountry(req.getParameter("billing_address[country]"))
.billingAddressZip(req.getParameter("billing_address[zip]"))
.request();
out.write("{\"forward\" : \"/ssp/subscription.jsp\"}");
} catch (InvalidRequestException e) {
handleInvalidRequestErrors(e, resp, out, null);
} catch (Exception e) {
handleGeneralErrors(e, resp, out);
} finally {
out.flush();
}
}
/*
* Update Shipping address for the customer in ChargeBee.
*/
private void updateShippingAddress(HttpServletRequest request,
HttpServletResponse response) throws ServletException, IOException {
response.setHeader("Content-Type", "application/json;charset=utf-8");
PrintWriter out = response.getWriter();
validateParameters(request);
try {
Subscription.update(getSubscriptionId(request))
.shippingAddressFirstName(request.getParameter("shipping_address[first_name]"))
.shippingAddressLastName(request.getParameter("shipping_address[last_name]"))
.shippingAddressLine1(request.getParameter("shipping_address[line1]"))
.shippingAddressLine2(request.getParameter("shipping_address[line2]"))
.shippingAddressCity(request.getParameter("shipping_address[city]"))
.shippingAddressState(request.getParameter("shipping_address[state]"))
.shippingAddressCountry(request.getParameter("shipping_address[country]")).
shippingAddressZip(request.getParameter("shipping_address[zip]")).request();
out.write("{ \"forward\" : \"/ssp/subscription.jsp\" }");
} catch (InvalidRequestException e) {
handleInvalidRequestErrors(e, response, out, null);
} catch (Exception e) {
handleGeneralErrors(e, response, out);
} finally {
out.flush();
}
}
/*
* Reactivate the subscription from cancel/non-renewing state to active state.
*/
private void subscriptionReactivate(HttpServletRequest request,
HttpServletResponse response) throws ServletException, IOException {
response.setHeader("Content-Type", "application/json;charset=utf-8");
PrintWriter out = response.getWriter();
try{
Subscription.reactivate(getSubscriptionId(request))
.request();
out.write("{ \"forward\" : \"/ssp/subscription.jsp\" }");
} catch (InvalidRequestException e ) {
handleInvalidErrors(e, response, out);
} catch ( Exception e) {
handleGeneralErrors(e, response, out);
} finally {
out.flush();
}
}
/*
* Cancels the Subscription.
*/
private void subscriptionCancel(HttpServletRequest request,
HttpServletResponse response) throws ServletException, IOException {
String cancelStatus = request.getParameter("cancel_status");
Subscription.CancelRequest subscriptionCancelParam = Subscription
.cancel(getSubscriptionId(request));
if ("cancel_on_next_renewal".equals(cancelStatus)) {
subscriptionCancelParam.endOfTerm(Boolean.TRUE);
}
subscriptionCancelParam.request();
response.sendRedirect("/ssp/subscription.jsp");
}
/*
* Verifying subscription id is present in ChargeBee.
*/
private boolean fetchSubscription(HttpServletRequest request) throws IOException {
try {
String username = request.getParameter("subscription_id");
if(username == null || username.isEmpty()) {
return false;
}
Result result = Subscription.retrieve(username).request();
HttpSession session = request.getSession();
session.setAttribute("subscription_id",
result.subscription().id());
session.setAttribute("customer_id",
result.customer().id());
return true;
} catch (InvalidRequestException ex) {
if ("resource_not_found".equals(ex.apiErrorCode)) {
return false;
}
throw ex;
}
}
/*
* Return Shipping Address if it is found in ChargeBee.
*/
public static Address getShippingAddress(String subscriptionId) throws IOException {
try {
Result result = Address.retrieve().label("shipping_address")
.subscriptionId(subscriptionId).request();
return result.address();
} catch( APIException e ) {
if(!e.code.equals("resource_not_found")) {
throw e;
}
return null;
}
}
public static String countryCodeFilePath() {
return "ssp/country_code.txt";
}
/*
* Get the list of Country and its Codes.
*/
public static Map<String,String> getCountryCode(String path) throws Exception {
BufferedReader bufferedReader = null;
File file = new File(path);
System.out.println("Reading Country codes from file " + file.getAbsolutePath());
Map<String, String> m = new HashMap();
try {
bufferedReader = new BufferedReader(new FileReader(file));
String currentLine;
while ((currentLine = bufferedReader.readLine()) != null) {
String[] line = currentLine.split(":");
for (String countryCode : line) {
String[] cc = countryCode.split(",");
if( cc.length == 2 ) {
m.put(cc[0], cc[1]);
}
}
}
} catch(Exception e){
throw new RuntimeException(e);
} finally {
if( bufferedReader != null) {
try{
bufferedReader.close();
} catch ( Exception e) {
throw new RuntimeException(e);
}
}
}
return m;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package opennlp.tools.util;
import java.util.Arrays;
import java.util.List;
import opennlp.tools.ml.model.MaxentModel;
/**
* Performs k-best search over sequence. This is based on the description in
* Ratnaparkhi (1998), PhD diss, Univ. of Pennsylvania.
*
* @see Sequence
* @see SequenceValidator
* @see BeamSearchContextGenerator
*/
public class BeamSearch<T> {
private static final Object[] EMPTY_ADDITIONAL_CONTEXT = new Object[0];
protected int size;
protected BeamSearchContextGenerator<T> cg;
protected MaxentModel model;
private SequenceValidator<T> validator;
private double[] probs;
private Cache contextsCache;
private static final int zeroLog = -100000;
/**
* Creates new search object.
*
* @param size The size of the beam (k).
* @param cg the context generator for the model.
* @param model the model for assigning probabilities to the sequence outcomes.
*/
public BeamSearch(int size, BeamSearchContextGenerator<T> cg, MaxentModel model) {
this(size, cg, model, null, 0);
}
public BeamSearch(int size, BeamSearchContextGenerator<T> cg, MaxentModel model,
int cacheSize) {
this (size, cg, model, null, cacheSize);
}
public BeamSearch(int size, BeamSearchContextGenerator<T> cg, MaxentModel model,
SequenceValidator<T> validator, int cacheSize) {
this.size = size;
this.cg = cg;
this.model = model;
this.validator = validator;
if (cacheSize > 0) {
contextsCache = new Cache(cacheSize);
}
this.probs = new double[model.getNumOutcomes()];
}
/**
* Note:
* This method will be private in the future because clients can now
* pass a validator to validate the sequence.
*
* @see SequenceValidator
*/
private boolean validSequence(int i, T[] inputSequence, String[] outcomesSequence, String outcome) {
if (validator != null) {
return validator.validSequence(i, inputSequence, outcomesSequence, outcome);
}
else {
return true;
}
}
public Sequence[] bestSequences(int numSequences, T[] sequence, Object[] additionalContext) {
return bestSequences(numSequences, sequence, additionalContext, zeroLog);
}
/**
* Returns the best sequence of outcomes based on model for this object.
*
* @param numSequences The maximum number of sequences to be returned.
* @param sequence The input sequence.
* @param additionalContext An Object[] of additional context. This is passed to the context generator blindly with the assumption that the context are appropiate.
* @param minSequenceScore A lower bound on the score of a returned sequence.
* @return An array of the top ranked sequences of outcomes.
*/
public Sequence[] bestSequences(int numSequences, T[] sequence, Object[] additionalContext, double minSequenceScore) {
Heap<Sequence> prev = new ListHeap<Sequence>(size);
Heap<Sequence> next = new ListHeap<Sequence>(size);
Heap<Sequence> tmp;
prev.add(new Sequence());
if (additionalContext == null) {
additionalContext = EMPTY_ADDITIONAL_CONTEXT;
}
for (int i = 0; i < sequence.length; i++) {
int sz = Math.min(size, prev.size());
for (int sc = 0; prev.size() > 0 && sc < sz; sc++) {
Sequence top = prev.extract();
List<String> tmpOutcomes = top.getOutcomes();
String[] outcomes = tmpOutcomes.toArray(new String[tmpOutcomes.size()]);
String[] contexts = cg.getContext(i, sequence, outcomes, additionalContext);
double[] scores;
if (contextsCache != null) {
scores = (double[]) contextsCache.get(contexts);
if (scores == null) {
scores = model.eval(contexts, probs);
contextsCache.put(contexts,scores);
}
}
else {
scores = model.eval(contexts, probs);
}
double[] temp_scores = new double[scores.length];
for (int c = 0; c < scores.length; c++) {
temp_scores[c] = scores[c];
}
Arrays.sort(temp_scores);
double min = temp_scores[Math.max(0,scores.length-size)];
for (int p = 0; p < scores.length; p++) {
if (scores[p] < min)
continue; //only advance first "size" outcomes
String out = model.getOutcome(p);
if (validSequence(i, sequence, outcomes, out)) {
Sequence ns = new Sequence(top, out, scores[p]);
if (ns.getScore() > minSequenceScore) {
next.add(ns);
}
}
}
if (next.size() == 0) {//if no advanced sequences, advance all valid
for (int p = 0; p < scores.length; p++) {
String out = model.getOutcome(p);
if (validSequence(i, sequence, outcomes, out)) {
Sequence ns = new Sequence(top, out, scores[p]);
if (ns.getScore() > minSequenceScore) {
next.add(ns);
}
}
}
}
}
// make prev = next; and re-init next (we reuse existing prev set once we clear it)
prev.clear();
tmp = prev;
prev = next;
next = tmp;
}
int numSeq = Math.min(numSequences, prev.size());
Sequence[] topSequences = new Sequence[numSeq];
for (int seqIndex = 0; seqIndex < numSeq; seqIndex++) {
topSequences[seqIndex] = prev.extract();
}
return topSequences;
}
/**
* Returns the best sequence of outcomes based on model for this object.
*
* @param sequence The input sequence.
* @param additionalContext An Object[] of additional context. This is passed to the context generator blindly with the assumption that the context are appropiate.
*
* @return The top ranked sequence of outcomes or null if no sequence could be found
*/
public Sequence bestSequence(T[] sequence, Object[] additionalContext) {
Sequence sequences[] = bestSequences(1, sequence, additionalContext,zeroLog);
if (sequences.length > 0)
return sequences[0];
else
return null;
}
}
| |
package net.md_5.bungee.event;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.text.MessageFormat;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.locks.ReadWriteLock;
import java.util.concurrent.locks.ReentrantReadWriteLock;
import java.util.logging.Level;
import java.util.logging.Logger;
public class EventBus
{
private final Map<Class<?>, Map<Byte, Map<Object, Method[]>>> byListenerAndPriority = new HashMap<>();
private final Map<Class<?>, EventHandlerMethod[]> byEventBaked = new HashMap<>();
private final ReadWriteLock lock = new ReentrantReadWriteLock();
private final Logger logger;
public EventBus()
{
this( null );
}
public EventBus(Logger logger)
{
this.logger = ( logger == null ) ? Logger.getGlobal() : logger;
}
public void post(Object event)
{
lock.readLock().lock();
try
{
EventHandlerMethod[] handlers = byEventBaked.get( event.getClass() );
if ( handlers != null )
{
for ( EventHandlerMethod method : handlers )
{
try
{
method.invoke( event );
} catch ( IllegalAccessException ex )
{
throw new Error( "Method became inaccessible: " + event, ex );
} catch ( IllegalArgumentException ex )
{
throw new Error( "Method rejected target/argument: " + event, ex );
} catch ( InvocationTargetException ex )
{
logger.log( Level.WARNING, MessageFormat.format( "Error dispatching event {0} to listener {1}", event, method.getListener() ), ex.getCause() );
}
}
}
} finally
{
lock.readLock().unlock();
}
}
private Map<Class<?>, Map<Byte, Set<Method>>> findHandlers(Object listener)
{
Map<Class<?>, Map<Byte, Set<Method>>> handler = new HashMap<>();
for ( Method m : listener.getClass().getDeclaredMethods() )
{
EventHandler annotation = m.getAnnotation( EventHandler.class );
if ( annotation != null )
{
Class<?>[] params = m.getParameterTypes();
if ( params.length != 1 )
{
logger.log( Level.INFO, "Method {0} in class {1} annotated with {2} does not have single argument", new Object[]
{
m, listener.getClass(), annotation
} );
continue;
}
Map<Byte, Set<Method>> prioritiesMap = handler.get( params[0] );
if ( prioritiesMap == null )
{
prioritiesMap = new HashMap<>();
handler.put( params[0], prioritiesMap );
}
Set<Method> priority = prioritiesMap.get( annotation.priority() );
if ( priority == null )
{
priority = new HashSet<>();
prioritiesMap.put( annotation.priority(), priority );
}
priority.add( m );
}
}
return handler;
}
public void register(Object listener)
{
Map<Class<?>, Map<Byte, Set<Method>>> handler = findHandlers( listener );
lock.writeLock().lock();
try
{
for ( Map.Entry<Class<?>, Map<Byte, Set<Method>>> e : handler.entrySet() )
{
Map<Byte, Map<Object, Method[]>> prioritiesMap = byListenerAndPriority.get( e.getKey() );
if ( prioritiesMap == null )
{
prioritiesMap = new HashMap<>();
byListenerAndPriority.put( e.getKey(), prioritiesMap );
}
for ( Map.Entry<Byte, Set<Method>> entry : e.getValue().entrySet() )
{
Map<Object, Method[]> currentPriorityMap = prioritiesMap.get( entry.getKey() );
if ( currentPriorityMap == null )
{
currentPriorityMap = new HashMap<>();
prioritiesMap.put( entry.getKey(), currentPriorityMap );
}
Method[] baked = new Method[ entry.getValue().size() ];
currentPriorityMap.put( listener, entry.getValue().toArray( baked ) );
}
bakeHandlers( e.getKey() );
}
} finally
{
lock.writeLock().unlock();
}
}
public void unregister(Object listener)
{
Map<Class<?>, Map<Byte, Set<Method>>> handler = findHandlers( listener );
lock.writeLock().lock();
try
{
for ( Map.Entry<Class<?>, Map<Byte, Set<Method>>> e : handler.entrySet() )
{
Map<Byte, Map<Object, Method[]>> prioritiesMap = byListenerAndPriority.get( e.getKey() );
if ( prioritiesMap != null )
{
for ( Byte priority : e.getValue().keySet() )
{
Map<Object, Method[]> currentPriority = prioritiesMap.get( priority );
if ( currentPriority != null )
{
currentPriority.remove( listener );
if ( currentPriority.isEmpty() )
{
prioritiesMap.remove( priority );
}
}
}
if ( prioritiesMap.isEmpty() )
{
byListenerAndPriority.remove( e.getKey() );
}
}
bakeHandlers( e.getKey() );
}
} finally
{
lock.writeLock().unlock();
}
}
/**
* Shouldn't be called without first locking the writeLock; intended for use
* only inside {@link #register(java.lang.Object) register(Object)} or
* {@link #unregister(java.lang.Object) unregister(Object)}.
*/
private void bakeHandlers(Class<?> eventClass)
{
Map<Byte, Map<Object, Method[]>> handlersByPriority = byListenerAndPriority.get( eventClass );
if ( handlersByPriority != null )
{
List<EventHandlerMethod> handlersList = new ArrayList<>( handlersByPriority.size() * 2 );
// Either I'm really tired, or the only way we can iterate between Byte.MIN_VALUE and Byte.MAX_VALUE inclusively,
// with only a byte on the stack is by using a do {} while() format loop.
byte value = Byte.MIN_VALUE;
do
{
Map<Object, Method[]> handlersByListener = handlersByPriority.get( value );
if ( handlersByListener != null )
{
for ( Map.Entry<Object, Method[]> listenerHandlers : handlersByListener.entrySet() )
{
for ( Method method : listenerHandlers.getValue() )
{
EventHandlerMethod ehm = new EventHandlerMethod( listenerHandlers.getKey(), method );
handlersList.add( ehm );
}
}
}
} while ( value++ < Byte.MAX_VALUE );
byEventBaked.put( eventClass, handlersList.toArray( new EventHandlerMethod[ handlersList.size() ] ) );
} else
{
byEventBaked.put( eventClass, null );
}
}
}
| |
/**
*
* Apache License
* Version 2.0, January 2004
* http://www.apache.org/licenses/
*
* TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
*
* 1. Definitions.
*
* "License" shall mean the terms and conditions for use, reproduction,
* and distribution as defined by Sections 1 through 9 of this document.
*
* "Licensor" shall mean the copyright owner or entity authorized by
* the copyright owner that is granting the License.
*
* "Legal Entity" shall mean the union of the acting entity and all
* other entities that control, are controlled by, or are under common
* control with that entity. For the purposes of this definition,
* "control" means (i) the power, direct or indirect, to cause the
* direction or management of such entity, whether by contract or
* otherwise, or (ii) ownership of fifty percent (50%) or more of the
* outstanding shares, or (iii) beneficial ownership of such entity.
*
* "You" (or "Your") shall mean an individual or Legal Entity
* exercising permissions granted by this License.
*
* "Source" form shall mean the preferred form for making modifications,
* including but not limited to software source code, documentation
* source, and configuration files.
*
* "Object" form shall mean any form resulting from mechanical
* transformation or translation of a Source form, including but
* not limited to compiled object code, generated documentation,
* and conversions to other media types.
*
* "Work" shall mean the work of authorship, whether in Source or
* Object form, made available under the License, as indicated by a
* copyright notice that is included in or attached to the work
* (an example is provided in the Appendix below).
*
* "Derivative Works" shall mean any work, whether in Source or Object
* form, that is based on (or derived from) the Work and for which the
* editorial revisions, annotations, elaborations, or other modifications
* represent, as a whole, an original work of authorship. For the purposes
* of this License, Derivative Works shall not include works that remain
* separable from, or merely link (or bind by name) to the interfaces of,
* the Work and Derivative Works thereof.
*
* "Contribution" shall mean any work of authorship, including
* the original version of the Work and any modifications or additions
* to that Work or Derivative Works thereof, that is intentionally
* submitted to Licensor for inclusion in the Work by the copyright owner
* or by an individual or Legal Entity authorized to submit on behalf of
* the copyright owner. For the purposes of this definition, "submitted"
* means any form of electronic, verbal, or written communication sent
* to the Licensor or its representatives, including but not limited to
* communication on electronic mailing lists, source code control systems,
* and issue tracking systems that are managed by, or on behalf of, the
* Licensor for the purpose of discussing and improving the Work, but
* excluding communication that is conspicuously marked or otherwise
* designated in writing by the copyright owner as "Not a Contribution."
*
* "Contributor" shall mean Licensor and any individual or Legal Entity
* on behalf of whom a Contribution has been received by Licensor and
* subsequently incorporated within the Work.
*
* 2. Grant of Copyright License. Subject to the terms and conditions of
* this License, each Contributor hereby grants to You a perpetual,
* worldwide, non-exclusive, no-charge, royalty-free, irrevocable
* copyright license to reproduce, prepare Derivative Works of,
* publicly display, publicly perform, sublicense, and distribute the
* Work and such Derivative Works in Source or Object form.
*
* 3. Grant of Patent License. Subject to the terms and conditions of
* this License, each Contributor hereby grants to You a perpetual,
* worldwide, non-exclusive, no-charge, royalty-free, irrevocable
* (except as stated in this section) patent license to make, have made,
* use, offer to sell, sell, import, and otherwise transfer the Work,
* where such license applies only to those patent claims licensable
* by such Contributor that are necessarily infringed by their
* Contribution(s) alone or by combination of their Contribution(s)
* with the Work to which such Contribution(s) was submitted. If You
* institute patent litigation against any entity (including a
* cross-claim or counterclaim in a lawsuit) alleging that the Work
* or a Contribution incorporated within the Work constitutes direct
* or contributory patent infringement, then any patent licenses
* granted to You under this License for that Work shall terminate
* as of the date such litigation is filed.
*
* 4. Redistribution. You may reproduce and distribute copies of the
* Work or Derivative Works thereof in any medium, with or without
* modifications, and in Source or Object form, provided that You
* meet the following conditions:
*
* (a) You must give any other recipients of the Work or
* Derivative Works a copy of this License; and
*
* (b) You must cause any modified files to carry prominent notices
* stating that You changed the files; and
*
* (c) You must retain, in the Source form of any Derivative Works
* that You distribute, all copyright, patent, trademark, and
* attribution notices from the Source form of the Work,
* excluding those notices that do not pertain to any part of
* the Derivative Works; and
*
* (d) If the Work includes a "NOTICE" text file as part of its
* distribution, then any Derivative Works that You distribute must
* include a readable copy of the attribution notices contained
* within such NOTICE file, excluding those notices that do not
* pertain to any part of the Derivative Works, in at least one
* of the following places: within a NOTICE text file distributed
* as part of the Derivative Works; within the Source form or
* documentation, if provided along with the Derivative Works; or,
* within a display generated by the Derivative Works, if and
* wherever such third-party notices normally appear. The contents
* of the NOTICE file are for informational purposes only and
* do not modify the License. You may add Your own attribution
* notices within Derivative Works that You distribute, alongside
* or as an addendum to the NOTICE text from the Work, provided
* that such additional attribution notices cannot be construed
* as modifying the License.
*
* You may add Your own copyright statement to Your modifications and
* may provide additional or different license terms and conditions
* for use, reproduction, or distribution of Your modifications, or
* for any such Derivative Works as a whole, provided Your use,
* reproduction, and distribution of the Work otherwise complies with
* the conditions stated in this License.
*
* 5. Submission of Contributions. Unless You explicitly state otherwise,
* any Contribution intentionally submitted for inclusion in the Work
* by You to the Licensor shall be under the terms and conditions of
* this License, without any additional terms or conditions.
* Notwithstanding the above, nothing herein shall supersede or modify
* the terms of any separate license agreement you may have executed
* with Licensor regarding such Contributions.
*
* 6. Trademarks. This License does not grant permission to use the trade
* names, trademarks, service marks, or product names of the Licensor,
* except as required for reasonable and customary use in describing the
* origin of the Work and reproducing the content of the NOTICE file.
*
* 7. Disclaimer of Warranty. Unless required by applicable law or
* agreed to in writing, Licensor provides the Work (and each
* Contributor provides its Contributions) on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
* implied, including, without limitation, any warranties or conditions
* of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
* PARTICULAR PURPOSE. You are solely responsible for determining the
* appropriateness of using or redistributing the Work and assume any
* risks associated with Your exercise of permissions under this License.
*
* 8. Limitation of Liability. In no event and under no legal theory,
* whether in tort (including negligence), contract, or otherwise,
* unless required by applicable law (such as deliberate and grossly
* negligent acts) or agreed to in writing, shall any Contributor be
* liable to You for damages, including any direct, indirect, special,
* incidental, or consequential damages of any character arising as a
* result of this License or out of the use or inability to use the
* Work (including but not limited to damages for loss of goodwill,
* work stoppage, computer failure or malfunction, or any and all
* other commercial damages or losses), even if such Contributor
* has been advised of the possibility of such damages.
*
* 9. Accepting Warranty or Additional Liability. While redistributing
* the Work or Derivative Works thereof, You may choose to offer,
* and charge a fee for, acceptance of support, warranty, indemnity,
* or other liability obligations and/or rights consistent with this
* License. However, in accepting such obligations, You may act only
* on Your own behalf and on Your sole responsibility, not on behalf
* of any other Contributor, and only if You agree to indemnify,
* defend, and hold each Contributor harmless for any liability
* incurred by, or claims asserted against, such Contributor by reason
* of your accepting any such warranty or additional liability.
*
* END OF TERMS AND CONDITIONS
*
* APPENDIX: How to apply the Apache License to your work.
*
* To apply the Apache License to your work, attach the following
* boilerplate notice, with the fields enclosed by brackets "[]"
* replaced with your own identifying information. (Don't include
* the brackets!) The text should be enclosed in the appropriate
* comment syntax for the file format. We also recommend that a
* file or class name and description of purpose be included on the
* same "printed page" as the copyright notice for easier
* identification within third-party archives.
*
* Copyright 2016 Alibaba Group
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.taobao.weex.utils;
import android.text.TextUtils;
import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONArray;
import com.alibaba.fastjson.JSONObject;
import com.taobao.weex.bridge.JSCallback;
import com.taobao.weex.bridge.JSCallbackCreator;
import com.taobao.weex.bridge.WXModuleManager;
import com.taobao.weex.common.IWXObject;
import java.lang.reflect.Field;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.ParameterizedType;
import java.lang.reflect.Type;
import java.math.BigDecimal;
public class WXReflectionUtils {
public static Object parseArgument(Type paramClazz, Object value) {
if (paramClazz == String.class) {
return value instanceof String ? value : JSON.toJSONString(value);
} else if (paramClazz == int.class) {
return value.getClass().isAssignableFrom(int.class) ? value : WXUtils.getInt(value);
} else if (paramClazz == long.class) {
return value.getClass().isAssignableFrom(long.class) ? value : WXUtils.getLong(value);
} else if (paramClazz == double.class) {
return value.getClass().isAssignableFrom(double.class) ? value : WXUtils.getDouble(value);
} else if (paramClazz == float.class) {
return value.getClass().isAssignableFrom(float.class) ? value : WXUtils.getFloat(value);
} else {
return JSON.parseObject(value instanceof String ? (String) value : JSON.toJSONString(value), paramClazz);
}
}
public static Object[] prepareArguments(Type[] paramClazzs, JSONArray args, JSCallbackCreator creator) throws Exception {
Object[] params = new Object[paramClazzs.length];
Object value;
Type paramClazz;
for (int i = 0; i < paramClazzs.length; i++) {
paramClazz = paramClazzs[i];
if(i>=args.size()){
if(!paramClazz.getClass().isPrimitive()) {
params[i] = null;
continue;
}else {
throw new Exception("[prepareArguments] method argument list not match.");
}
}
value = args.get(i);
if (paramClazz == JSONObject.class) {
params[i] = value;
} else if(JSCallback.class == paramClazz){
if(value instanceof String){
params[i] = creator.create((String)value);
}else{
throw new Exception("Parameter type not match.");
}
} else {
params[i] = WXReflectionUtils.parseArgument(paramClazz,value);
}
}
return params;
}
public static void setValue(Object obj, String fieldName, Object value) {
if (obj == null || TextUtils.isEmpty(fieldName)) {
return;
}
try {
// Field field = obj.getClass().getDeclaredField(fieldName);
Field field = getDeclaredField(obj, fieldName);
Object realValue = value;
if (value instanceof BigDecimal || value instanceof Number || value instanceof String) {
if (field.getType() == Float.class || field.getType() == float.class) {
realValue = Float.parseFloat(value.toString());
} else if (field.getType() == Double.class || field.getType() == double.class) {
realValue = Double.parseDouble(value.toString());
} else if (field.getType() == Integer.class || field.getType() == int.class) {
realValue = (int) Double.parseDouble(value.toString());
} else if (field.getType() == Boolean.class || field.getType() == boolean.class) {
realValue = Boolean.valueOf(value.toString());
}
}
if (field.getType() == boolean.class || field.getType() == Boolean.class) {
if (value != null) {
realValue = Boolean.valueOf(value.toString());
}
}
setProperty(obj, field, realValue);
} catch (Exception e) {
return;
}
}
/**
* get field form object and it's parent
*/
public static Field getDeclaredField(Object object, String fieldName) {
Field field = null;
Class<?> clazz = object.getClass();
for (; clazz != Object.class; clazz = clazz.getSuperclass()) {
try {
field = clazz.getDeclaredField(fieldName);
return field;
} catch (Exception e) {
}
}
return null;
}
/**
* Set property(field) of the specified object.
* @param bean The object which has the given property
* @param field The field to be set
* @param value The value to be set to the field
* @throws IllegalAccessException
* @throws InvocationTargetException
* @throws NoSuchMethodException
*/
public static void setProperty(Object bean, Field field, Object value) throws IllegalAccessException,
InvocationTargetException,
NoSuchMethodException {
if (bean == null || field == null) {
return;
}
try {
field.setAccessible(true);
field.set(bean, value);
} catch (Exception e) {
}
}
}
| |
/*
* Copyright 2010-2015 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazonaws.services.codedeploy.model;
import java.io.Serializable;
/**
* <p>
* Information about an on-premises instance tag filter.
* </p>
*/
public class TagFilter implements Serializable, Cloneable {
/**
* <p>
* The on-premises instance tag filter key.
* </p>
*/
private String key;
/**
* <p>
* The on-premises instance tag filter value.
* </p>
*/
private String value;
/**
* <p>
* The on-premises instance tag filter type:
* </p>
* <ul>
* <li>KEY_ONLY: Key only.</li>
* <li>VALUE_ONLY: Value only.</li>
* <li>KEY_AND_VALUE: Key and value.</li>
* </ul>
*/
private String type;
/**
* <p>
* The on-premises instance tag filter key.
* </p>
*
* @param key
* The on-premises instance tag filter key.
*/
public void setKey(String key) {
this.key = key;
}
/**
* <p>
* The on-premises instance tag filter key.
* </p>
*
* @return The on-premises instance tag filter key.
*/
public String getKey() {
return this.key;
}
/**
* <p>
* The on-premises instance tag filter key.
* </p>
*
* @param key
* The on-premises instance tag filter key.
* @return Returns a reference to this object so that method calls can be
* chained together.
*/
public TagFilter withKey(String key) {
setKey(key);
return this;
}
/**
* <p>
* The on-premises instance tag filter value.
* </p>
*
* @param value
* The on-premises instance tag filter value.
*/
public void setValue(String value) {
this.value = value;
}
/**
* <p>
* The on-premises instance tag filter value.
* </p>
*
* @return The on-premises instance tag filter value.
*/
public String getValue() {
return this.value;
}
/**
* <p>
* The on-premises instance tag filter value.
* </p>
*
* @param value
* The on-premises instance tag filter value.
* @return Returns a reference to this object so that method calls can be
* chained together.
*/
public TagFilter withValue(String value) {
setValue(value);
return this;
}
/**
* <p>
* The on-premises instance tag filter type:
* </p>
* <ul>
* <li>KEY_ONLY: Key only.</li>
* <li>VALUE_ONLY: Value only.</li>
* <li>KEY_AND_VALUE: Key and value.</li>
* </ul>
*
* @param type
* The on-premises instance tag filter type:</p>
* <ul>
* <li>KEY_ONLY: Key only.</li>
* <li>VALUE_ONLY: Value only.</li>
* <li>KEY_AND_VALUE: Key and value.</li>
* @see TagFilterType
*/
public void setType(String type) {
this.type = type;
}
/**
* <p>
* The on-premises instance tag filter type:
* </p>
* <ul>
* <li>KEY_ONLY: Key only.</li>
* <li>VALUE_ONLY: Value only.</li>
* <li>KEY_AND_VALUE: Key and value.</li>
* </ul>
*
* @return The on-premises instance tag filter type:</p>
* <ul>
* <li>KEY_ONLY: Key only.</li>
* <li>VALUE_ONLY: Value only.</li>
* <li>KEY_AND_VALUE: Key and value.</li>
* @see TagFilterType
*/
public String getType() {
return this.type;
}
/**
* <p>
* The on-premises instance tag filter type:
* </p>
* <ul>
* <li>KEY_ONLY: Key only.</li>
* <li>VALUE_ONLY: Value only.</li>
* <li>KEY_AND_VALUE: Key and value.</li>
* </ul>
*
* @param type
* The on-premises instance tag filter type:</p>
* <ul>
* <li>KEY_ONLY: Key only.</li>
* <li>VALUE_ONLY: Value only.</li>
* <li>KEY_AND_VALUE: Key and value.</li>
* @return Returns a reference to this object so that method calls can be
* chained together.
* @see TagFilterType
*/
public TagFilter withType(String type) {
setType(type);
return this;
}
/**
* <p>
* The on-premises instance tag filter type:
* </p>
* <ul>
* <li>KEY_ONLY: Key only.</li>
* <li>VALUE_ONLY: Value only.</li>
* <li>KEY_AND_VALUE: Key and value.</li>
* </ul>
*
* @param type
* The on-premises instance tag filter type:</p>
* <ul>
* <li>KEY_ONLY: Key only.</li>
* <li>VALUE_ONLY: Value only.</li>
* <li>KEY_AND_VALUE: Key and value.</li>
* @return Returns a reference to this object so that method calls can be
* chained together.
* @see TagFilterType
*/
public void setType(TagFilterType type) {
this.type = type.toString();
}
/**
* <p>
* The on-premises instance tag filter type:
* </p>
* <ul>
* <li>KEY_ONLY: Key only.</li>
* <li>VALUE_ONLY: Value only.</li>
* <li>KEY_AND_VALUE: Key and value.</li>
* </ul>
*
* @param type
* The on-premises instance tag filter type:</p>
* <ul>
* <li>KEY_ONLY: Key only.</li>
* <li>VALUE_ONLY: Value only.</li>
* <li>KEY_AND_VALUE: Key and value.</li>
* @return Returns a reference to this object so that method calls can be
* chained together.
* @see TagFilterType
*/
public TagFilter withType(TagFilterType type) {
setType(type);
return this;
}
/**
* Returns a string representation of this object; useful for testing and
* debugging.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getKey() != null)
sb.append("Key: " + getKey() + ",");
if (getValue() != null)
sb.append("Value: " + getValue() + ",");
if (getType() != null)
sb.append("Type: " + getType());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof TagFilter == false)
return false;
TagFilter other = (TagFilter) obj;
if (other.getKey() == null ^ this.getKey() == null)
return false;
if (other.getKey() != null
&& other.getKey().equals(this.getKey()) == false)
return false;
if (other.getValue() == null ^ this.getValue() == null)
return false;
if (other.getValue() != null
&& other.getValue().equals(this.getValue()) == false)
return false;
if (other.getType() == null ^ this.getType() == null)
return false;
if (other.getType() != null
&& other.getType().equals(this.getType()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode
+ ((getKey() == null) ? 0 : getKey().hashCode());
hashCode = prime * hashCode
+ ((getValue() == null) ? 0 : getValue().hashCode());
hashCode = prime * hashCode
+ ((getType() == null) ? 0 : getType().hashCode());
return hashCode;
}
@Override
public TagFilter clone() {
try {
return (TagFilter) super.clone();
} catch (CloneNotSupportedException e) {
throw new IllegalStateException(
"Got a CloneNotSupportedException from Object.clone() "
+ "even though we're Cloneable!", e);
}
}
}
| |
package integeruser.jgltut.tut08;
import integeruser.jgltut.Tutorial;
import integeruser.jgltut.framework.Framework;
import integeruser.jgltut.framework.Mesh;
import integeruser.jgltut.framework.Timer;
import org.joml.Matrix4f;
import org.joml.MatrixStackf;
import org.joml.Quaternionf;
import org.joml.Vector4f;
import java.util.ArrayList;
import static org.lwjgl.glfw.GLFW.*;
import static org.lwjgl.opengl.GL11.*;
import static org.lwjgl.opengl.GL20.*;
/**
* Visit https://github.com/integeruser/jgltut for info and updates.
* Original: https://bitbucket.org/alfonse/gltut/src/default/Tut%2008%20Getting%20Oriented/Interpolation.cpp
* <p>
* Part II. Positioning
* Chapter 8. Getting Oriented
* <p>
* SPACE - toggles between regular linear interpolation and slerp.
* Q,W,E,R,T,Y,U - cause the ship to interpolate to a new orientation.
*/
public class Interpolation extends Tutorial {
public static void main(String[] args) {
Framework.CURRENT_TUTORIAL_DATAPATH = "/integeruser/jgltut/tut08/data/";
new Interpolation().start(500, 500);
}
@Override
protected void init() {
initializeProgram();
ship = new Mesh("Ship.xml");
glEnable(GL_CULL_FACE);
glCullFace(GL_BACK);
glFrontFace(GL_CW);
glEnable(GL_DEPTH_TEST);
glDepthMask(true);
glDepthFunc(GL_LEQUAL);
glDepthRange(0.0f, 1.0f);
glfwSetKeyCallback(window, (window, key, scancode, action, mods) -> {
if (action == GLFW_PRESS) {
for (int orientIndex = 0; orientIndex < orientKeys.length; orientIndex++) {
if (key == orientKeys[orientIndex]) {
applyOrientation(orientIndex);
break;
}
}
switch (key) {
case GLFW_KEY_SPACE:
boolean slerp = orient.toggleSlerp();
System.out.printf(slerp ? "Slerp\n" : "Lerp\n");
break;
case GLFW_KEY_ESCAPE:
glfwSetWindowShouldClose(window, true);
break;
}
}
});
}
@Override
protected void display() {
orient.updateTime();
glClearColor(0.0f, 0.0f, 0.0f, 0.0f);
glClearDepth(1.0f);
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
MatrixStackf currMatrix = new MatrixStackf();
currMatrix.translate(0.0f, 0.0f, -200.0f);
currMatrix.mul(orient.getOrient().get(new Matrix4f()));
glUseProgram(theProgram);
currMatrix.scale(3.0f, 3.0f, 3.0f);
currMatrix.rotateX((float) Math.toRadians(-90.0f));
// Set the base color for this object.
glUniform4f(baseColorUnif, 1.0f, 1.0f, 1.0f, 1.0f);
glUniformMatrix4fv(modelToCameraMatrixUnif, false, currMatrix.get(mat4Buffer));
ship.render("tint");
glUseProgram(0);
}
@Override
protected void reshape(int w, int h) {
cameraToClipMatrix.m00(frustumScale * (h / (float) w));
cameraToClipMatrix.m11(frustumScale);
glUseProgram(theProgram);
glUniformMatrix4fv(cameraToClipMatrixUnif, false, cameraToClipMatrix.get(mat4Buffer));
glUseProgram(0);
glViewport(0, 0, w, h);
}
@Override
protected void update() {
}
////////////////////////////////
private int theProgram;
private int modelToCameraMatrixUnif;
private int cameraToClipMatrixUnif;
private int baseColorUnif;
private Matrix4f cameraToClipMatrix = new Matrix4f();
private final float frustumScale = calcFrustumScale(20.0f);
private void initializeProgram() {
ArrayList<Integer> shaderList = new ArrayList<>();
shaderList.add(Framework.loadShader(GL_VERTEX_SHADER, "PosColorLocalTransform.vert"));
shaderList.add(Framework.loadShader(GL_FRAGMENT_SHADER, "ColorMultUniform.frag"));
theProgram = Framework.createProgram(shaderList);
modelToCameraMatrixUnif = glGetUniformLocation(theProgram, "modelToCameraMatrix");
cameraToClipMatrixUnif = glGetUniformLocation(theProgram, "cameraToClipMatrix");
baseColorUnif = glGetUniformLocation(theProgram, "baseColor");
float zNear = 1.0f;
float zFar = 600.0f;
cameraToClipMatrix.m00(frustumScale);
cameraToClipMatrix.m11(frustumScale);
cameraToClipMatrix.m22((zFar + zNear) / (zNear - zFar));
cameraToClipMatrix.m23(-1.0f);
cameraToClipMatrix.m32((2 * zFar * zNear) / (zNear - zFar));
glUseProgram(theProgram);
glUniformMatrix4fv(cameraToClipMatrixUnif, false, cameraToClipMatrix.get(mat4Buffer));
glUseProgram(0);
}
private float calcFrustumScale(float fovDeg) {
float fovRad = (float) Math.toRadians(fovDeg);
return (float) (1.0f / Math.tan(fovRad / 2.0f));
}
////////////////////////////////
private Mesh ship;
private Quaternionf orients[] = {
new Quaternionf(0.7071f, 0.0f, 0.0f, 0.7071f),
new Quaternionf(0.5f, -0.5f, 0.5f, 0.5f),
new Quaternionf(-0.7892f, -0.3700f, -0.02514f, -0.4895f),
new Quaternionf(0.7892f, 0.3700f, 0.02514f, 0.4895f),
new Quaternionf(-0.1591f, -0.7991f, -0.4344f, 0.3840f),
new Quaternionf(0.5208f, 0.6483f, 0.0410f, 0.5537f),
new Quaternionf(0.0f, 1.0f, 0.0f, 0.0f)
};
private int orientKeys[] = {
GLFW_KEY_Q,
GLFW_KEY_W,
GLFW_KEY_E,
GLFW_KEY_R,
GLFW_KEY_T,
GLFW_KEY_Y,
GLFW_KEY_U
};
private Orientation orient = new Orientation();
private class Orientation {
boolean isAnimating;
boolean slerp;
int currOrientIndex;
Animation anim = new Animation();
class Animation {
int finalOrientIndex;
Timer currTimer;
boolean updateTime() {
return currTimer.update(elapsedTime);
}
void startAnimation(int destinationIndex, float duration) {
finalOrientIndex = destinationIndex;
currTimer = new Timer(Timer.Type.SINGLE, duration);
}
Quaternionf getOrient(Quaternionf initial, boolean slerp) {
if (slerp) {
return slerp(initial, orients[finalOrientIndex], currTimer.getAlpha());
} else {
return lerp(initial, orients[finalOrientIndex], currTimer.getAlpha());
}
}
int getFinalIndex() {
return finalOrientIndex;
}
}
void updateTime() {
if (isAnimating) {
boolean isFinished = anim.updateTime();
if (isFinished) {
isAnimating = false;
currOrientIndex = anim.getFinalIndex();
}
}
}
void animateToOrient(int destinationIndex) {
if (currOrientIndex == destinationIndex) return;
anim.startAnimation(destinationIndex, 1.0f);
isAnimating = true;
}
boolean toggleSlerp() {
slerp = !slerp;
return slerp;
}
Quaternionf getOrient() {
if (isAnimating) {
return anim.getOrient(orients[currOrientIndex], slerp);
} else {
return orients[currOrientIndex];
}
}
boolean isAnimating() {
return isAnimating;
}
}
private void applyOrientation(int orientationIndex) {
if (!orient.isAnimating()) {
orient.animateToOrient(orientationIndex);
}
}
private Quaternionf slerp(Quaternionf v0, Quaternionf v1, float alpha) {
final float DOT_THRESHOLD = 0.9995f;
float dot = v0.dot(v1);
if (dot > DOT_THRESHOLD) return lerp(v0, v1, alpha);
dot = Math.min(Math.max(dot, -1.0f), 1.0f);
float theta_0 = (float) Math.acos(dot);
float theta = theta_0 * alpha;
Vector4f p = vectorize(v0).mul(dot).negate();
Vector4f v2 = vectorize(v1).add(p).normalize();
Vector4f a = vectorize(v0).mul((float) Math.cos(theta));
Vector4f b = new Vector4f(v2).mul((float) Math.sin(theta));
Vector4f res = a.add(b);
return new Quaternionf(res.x, res.y, res.z, res.w);
}
private Quaternionf lerp(Quaternionf v0, Quaternionf v1, float alpha) {
Vector4f start = vectorize(v0);
Vector4f end = vectorize(v1);
Vector4f interp = new Vector4f(start).lerp(end, alpha);
System.out.printf("alpha: %f, (%f, %f, %f, %f)\n", alpha, interp.w, interp.x, interp.y, interp.z);
interp.normalize();
return new Quaternionf(interp.x, interp.y, interp.z, interp.w);
}
private Vector4f vectorize(Quaternionf theQuat) {
Vector4f vec = new Vector4f();
vec.x = theQuat.x;
vec.y = theQuat.y;
vec.z = theQuat.z;
vec.w = theQuat.w;
return vec;
}
}
| |
/**
* Copyright 2015 Palantir Technologies
*
* Licensed under the BSD-3 License (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://opensource.org/licenses/BSD-3-Clause
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.palantir.atlasdb.transaction.impl;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.fail;
import java.util.Map;
import java.util.concurrent.BrokenBarrierException;
import java.util.concurrent.Callable;
import java.util.concurrent.CyclicBarrier;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Future;
import org.junit.Assert;
import org.junit.Test;
import com.google.common.base.Function;
import com.google.common.base.Suppliers;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.Maps;
import com.palantir.atlasdb.cleaner.NoOpCleaner;
import com.palantir.atlasdb.keyvalue.api.Cell;
import com.palantir.atlasdb.keyvalue.api.RangeRequest;
import com.palantir.atlasdb.keyvalue.api.RowResult;
import com.palantir.atlasdb.transaction.api.AtlasDbConstraintCheckingMode;
import com.palantir.atlasdb.transaction.api.ConflictHandler;
import com.palantir.atlasdb.transaction.api.Transaction;
import com.palantir.atlasdb.transaction.api.TransactionFailedRetriableException;
import com.palantir.atlasdb.transaction.api.TransactionManager;
import com.palantir.atlasdb.transaction.api.TransactionReadSentinelBehavior;
import com.palantir.atlasdb.transaction.api.TransactionSerializableConflictException;
import com.palantir.common.base.BatchingVisitables;
import com.palantir.common.base.Throwables;
import com.palantir.common.concurrent.PTExecutors;
import com.palantir.lock.LockRefreshToken;
public abstract class AbstractSerializableTransactionTest extends AbstractTransactionTest {
@Override
protected TransactionManager getManager() {
return new SerializableTransactionManager(
keyValueService,
timestampService,
lockClient,
lockService,
transactionService,
Suppliers.ofInstance(AtlasDbConstraintCheckingMode.FULL_CONSTRAINT_CHECKING_THROWS_EXCEPTIONS),
conflictDetectionManager,
SweepStrategyManagers.createDefault(keyValueService),
NoOpCleaner.INSTANCE);
}
@Override
protected Transaction startTransaction() {
ImmutableMap<String, ConflictHandler> tablesToWriteWrite = ImmutableMap.of(
TEST_TABLE,
ConflictHandler.SERIALIZABLE,
TransactionConstants.TRANSACTION_TABLE,
ConflictHandler.IGNORE_ALL);
return new SerializableTransaction(
keyValueService,
lockService,
timestampService,
transactionService,
NoOpCleaner.INSTANCE,
Suppliers.ofInstance(timestampService.getFreshTimestamp()),
ConflictDetectionManagers.fromMap(tablesToWriteWrite),
SweepStrategyManagers.createDefault(keyValueService),
0L,
ImmutableList.<LockRefreshToken>of(),
AtlasDbConstraintCheckingMode.NO_CONSTRAINT_CHECKING,
null,
TransactionReadSentinelBehavior.THROW_EXCEPTION,
true) {
@Override
protected Map<Cell, byte[]> transformGetsForTesting(Map<Cell, byte[]> map) {
return Maps.transformValues(map, new Function<byte[], byte[]>() {
@Override
public byte[] apply(byte[] input) {
return input.clone();
}
});
}
};
}
@Test
public void testClassicWriteSkew() {
Transaction t0 = startTransaction();
put(t0, "row1", "col1", "100");
put(t0, "row2", "col1", "100");
t0.commit();
Transaction t1 = startTransaction();
Transaction t2 = startTransaction();
withdrawMoney(t1, true, false);
withdrawMoney(t2, false, false);
t1.commit();
try {
t2.commit();
fail();
} catch (TransactionSerializableConflictException e) {
// this is expectecd to throw because it is a write skew
}
}
@Test
public void testClassicWriteSkew2() {
Transaction t0 = startTransaction();
put(t0, "row1", "col1", "100");
put(t0, "row2", "col1", "100");
t0.commit();
Transaction t1 = startTransaction();
Transaction t2 = startTransaction();
withdrawMoney(t1, true, false);
withdrawMoney(t2, false, false);
t2.commit();
try {
t1.commit();
fail();
} catch (TransactionSerializableConflictException e) {
// this is expectecd to throw because it is a write skew
}
}
@Test(expected=TransactionFailedRetriableException.class)
public void testConcurrentWriteSkew() throws InterruptedException, BrokenBarrierException {
Transaction t0 = startTransaction();
put(t0, "row1", "col1", "100");
put(t0, "row2", "col1", "100");
t0.commit();
final CyclicBarrier barrier = new CyclicBarrier(2);
final Transaction t1 = startTransaction();
ExecutorService exec = PTExecutors.newCachedThreadPool();
Future<?> f = exec.submit( new Callable<Void>() {
@Override
public Void call() throws Exception {
withdrawMoney(t1, true, false);
barrier.await();
t1.commit();
return null;
}
});
Transaction t2 = startTransaction();
withdrawMoney(t2, false, false);
barrier.await();
t2.commit();
try {
f.get();
fail();
} catch (ExecutionException e) {
throw Throwables.rewrapAndThrowUncheckedException(e.getCause());
}
}
@Test
public void testClassicWriteSkewCell() {
Transaction t0 = startTransaction();
put(t0, "row1", "col1", "100");
put(t0, "row2", "col1", "100");
t0.commit();
Transaction t1 = startTransaction();
Transaction t2 = startTransaction();
withdrawMoney(t1, true, true);
withdrawMoney(t2, false, true);
t1.commit();
try {
t2.commit();
fail();
} catch (TransactionSerializableConflictException e) {
// this is expectecd to throw because it is a write skew
}
}
@Test
public void testClassicWriteSkew2Cell() {
Transaction t0 = startTransaction();
put(t0, "row1", "col1", "100");
put(t0, "row2", "col1", "100");
t0.commit();
Transaction t1 = startTransaction();
Transaction t2 = startTransaction();
withdrawMoney(t1, true, true);
withdrawMoney(t2, false, true);
t2.commit();
try {
t1.commit();
fail();
} catch (TransactionSerializableConflictException e) {
// this is expectecd to throw because it is a write skew
}
}
@Test(expected=TransactionFailedRetriableException.class)
public void testConcurrentWriteSkewCell() throws InterruptedException, BrokenBarrierException {
Transaction t0 = startTransaction();
put(t0, "row1", "col1", "100");
put(t0, "row2", "col1", "100");
t0.commit();
final CyclicBarrier barrier = new CyclicBarrier(2);
final Transaction t1 = startTransaction();
ExecutorService exec = PTExecutors.newCachedThreadPool();
Future<?> f = exec.submit( new Callable<Void>() {
@Override
public Void call() throws Exception {
withdrawMoney(t1, true, true);
barrier.await();
t1.commit();
return null;
}
});
Transaction t2 = startTransaction();
withdrawMoney(t2, false, true);
barrier.await();
t2.commit();
try {
f.get();
fail();
} catch (ExecutionException e) {
throw Throwables.rewrapAndThrowUncheckedException(e.getCause());
}
}
private void withdrawMoney(Transaction t, boolean account, boolean isCellGet) {
long account1 = Long.valueOf(isCellGet ? getCell(t, "row1", "col1") : get(t, "row1", "col1"));
long account2 = Long.valueOf(isCellGet ? getCell(t, "row2", "col1") : get(t, "row2", "col1"));
if (account) {
account1 -= 150;
} else {
account2 -= 150;
}
Assert.assertTrue(account1 + account2 >= 0);
if (account) {
put(t, "row1", "col1", String.valueOf(account1));
} else {
put(t, "row2", "col1", String.valueOf(account2));
}
}
@Test
public void testCycleWithReadOnly() {
// readOnly has a r/w dep on t2 and t2 has a r/w on t1 and t1 has a w/r dep on readOnly
// This creates a cycle that is valid under SI, but not SSI
// The main issue is that readOnly reads an invalid state of the world. because it reads the updated value of
// t1, but the old value of t2.
String initialValue = "100";
String newValue = "101";
Transaction t0 = startTransaction();
put(t0, "row1", "col1", initialValue);
put(t0, "row2", "col1", initialValue);
t0.commit();
Transaction t1 = startTransaction();
put(t1, "row1", "col1", newValue);
Transaction t2 = startTransaction();
String row1Get = get(t2, "row1", "col1");
assertEquals(initialValue, row1Get);
put(t2, "row2", "col1", row1Get);
t1.commit();
Transaction readOnly = startTransaction();
assertEquals(newValue, get(readOnly, "row1", "col1"));
assertEquals(initialValue, get(readOnly, "row2", "col1"));
try {
t2.commit();
fail();
} catch (TransactionSerializableConflictException e) {
// this is expectecd to throw because it is a write skew
}
}
@Test
public void testLargerCycleWithReadOnly() {
String initialValue = "100";
String newValue = "101";
String newValue2 = "102";
Transaction t0 = startTransaction();
put(t0, "row1", "col1", initialValue);
put(t0, "row2", "col1", initialValue);
t0.commit();
Transaction t1 = startTransaction();
put(t1, "row1", "col1", newValue);
Transaction t2 = startTransaction();
String row1Get = get(t2, "row1", "col1");
assertEquals(initialValue, row1Get);
put(t2, "row2", "col1", row1Get);
t1.commit();
Transaction t3 = startTransaction();
put(t3, "row1", "col1", newValue2);
t3.commit();
Transaction readOnly = startTransaction();
assertEquals(newValue2, get(readOnly, "row1", "col1"));
assertEquals(initialValue, get(readOnly, "row2", "col1"));
try {
t2.commit();
fail();
} catch (TransactionSerializableConflictException e) {
// this is expectecd to throw because it is a write skew
}
}
@Test
public void testNonPhantomRead() {
String initialValue = "100";
Transaction t0 = startTransaction();
put(t0, "row1", "col1", initialValue);
put(t0, "row2", "col1", initialValue);
t0.commit();
Transaction t1 = startTransaction();
RowResult<byte[]> first = BatchingVisitables.getFirst(t1.getRange(TEST_TABLE, RangeRequest.builder().build()));
put(t1, "row22", "col1", initialValue);
Transaction t2 = startTransaction();
put(t2, "row11", "col1", initialValue);
t2.commit();
t1.commit();
}
@Test
public void testPhantomReadFail() {
String initialValue = "100";
Transaction t0 = startTransaction();
put(t0, "row1", "col1", initialValue);
put(t0, "row2", "col1", initialValue);
t0.commit();
Transaction t1 = startTransaction();
RowResult<byte[]> first = BatchingVisitables.getFirst(t1.getRange(TEST_TABLE, RangeRequest.builder().build()));
put(t1, "row22", "col1", initialValue);
Transaction t2 = startTransaction();
put(t2, "row0", "col1", initialValue);
t2.commit();
try {
t1.commit();
fail();
} catch (TransactionSerializableConflictException e) {
// this is expectecd to throw because it is a write skew
}
}
@Test
public void testPhantomReadFail2() {
String initialValue = "100";
Transaction t0 = startTransaction();
put(t0, "row1", "col1", initialValue);
put(t0, "row2", "col1", initialValue);
t0.commit();
Transaction t1 = startTransaction();
BatchingVisitables.copyToList(t1.getRange(TEST_TABLE, RangeRequest.builder().build()));
put(t1, "row22", "col1", initialValue);
Transaction t2 = startTransaction();
put(t2, "row3", "col1", initialValue);
t2.commit();
try {
t1.commit();
fail();
} catch (TransactionSerializableConflictException e) {
// this is expectecd to throw because it is a write skew
}
}
@Test
public void testCellReadWriteFailure() {
String initialValue = "100";
Transaction t0 = startTransaction();
put(t0, "row1", "col1", initialValue);
put(t0, "row2", "col1", initialValue);
t0.commit();
Transaction t1 = startTransaction();
BatchingVisitables.copyToList(t1.getRange(TEST_TABLE, RangeRequest.builder().build()));
put(t1, "row22", "col1", initialValue);
Transaction t2 = startTransaction();
put(t2, "row3", "col1", initialValue);
t2.commit();
try {
t1.commit();
fail();
} catch (TransactionSerializableConflictException e) {
// this is expectecd to throw because it is a write skew
}
}
@Test
public void testCellReadWriteFailure2() {
String initialValue = "100";
Transaction t0 = startTransaction();
put(t0, "row1", "col1", initialValue);
put(t0, "row2", "col1", initialValue);
t0.commit();
Transaction t1 = startTransaction();
BatchingVisitables.copyToList(t1.getRange(TEST_TABLE, RangeRequest.builder().build()));
put(t1, "row22", "col1", initialValue);
Transaction t2 = startTransaction();
put(t2, "row2", "col1", "101");
t2.commit();
try {
t1.commit();
fail();
} catch (TransactionSerializableConflictException e) {
// this is expectecd to throw because it is a write skew
}
}
}
| |
// Copyright (C) 2016 The Android Open Source Project
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.gerrit.acceptance.api.change;
import static com.google.common.truth.Truth.assertThat;
import static com.google.gerrit.entities.Patch.MERGE_LIST;
import static com.google.gerrit.git.ObjectIds.abbreviateName;
import static com.google.gerrit.testing.GerritJUnit.assertThrows;
import static java.nio.charset.StandardCharsets.UTF_8;
import static org.eclipse.jgit.lib.Constants.HEAD;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.gerrit.acceptance.AbstractDaemonTest;
import com.google.gerrit.acceptance.NoHttpd;
import com.google.gerrit.acceptance.PushOneCommit;
import com.google.gerrit.common.RawInputUtil;
import com.google.gerrit.extensions.api.changes.RevisionApi;
import com.google.gerrit.extensions.common.CommitInfo;
import com.google.gerrit.extensions.common.DiffInfo;
import com.google.gerrit.extensions.restapi.BinaryResult;
import com.google.gerrit.extensions.restapi.ResourceConflictException;
import java.io.ByteArrayOutputStream;
import java.util.List;
import java.util.Set;
import org.eclipse.jgit.lib.ObjectId;
import org.eclipse.jgit.revwalk.RevCommit;
import org.junit.Before;
import org.junit.Test;
@NoHttpd
public class MergeListIT extends AbstractDaemonTest {
private String changeId;
private RevCommit merge;
private RevCommit parent1;
private RevCommit grandParent1;
private RevCommit parent2;
private RevCommit grandParent2;
@Before
public void setup() throws Exception {
ObjectId initial = repo().exactRef(HEAD).getLeaf().getObjectId();
PushOneCommit.Result gp1 =
pushFactory
.create(
admin.newIdent(),
testRepo,
"grand parent 1",
ImmutableMap.of("foo", "foo-1.1", "bar", "bar-1.1"))
.to("refs/for/master");
grandParent1 = gp1.getCommit();
PushOneCommit.Result p1 =
pushFactory
.create(
admin.newIdent(),
testRepo,
"parent 1",
ImmutableMap.of("foo", "foo-1.2", "bar", "bar-1.2"))
.to("refs/for/master");
parent1 = p1.getCommit();
// reset HEAD in order to create a sibling of the first change
testRepo.reset(initial);
PushOneCommit.Result gp2 =
pushFactory
.create(
admin.newIdent(),
testRepo,
"grand parent 2",
ImmutableMap.of("foo", "foo-2.1", "bar", "bar-2.1"))
.to("refs/for/master");
grandParent2 = gp2.getCommit();
PushOneCommit.Result p2 =
pushFactory
.create(
admin.newIdent(),
testRepo,
"parent 2",
ImmutableMap.of("foo", "foo-2.2", "bar", "bar-2.2"))
.to("refs/for/master");
parent2 = p2.getCommit();
PushOneCommit m =
pushFactory.create(
admin.newIdent(), testRepo, "merge", ImmutableMap.of("foo", "foo-1", "bar", "bar-2"));
m.setParents(ImmutableList.of(p1.getCommit(), p2.getCommit()));
PushOneCommit.Result result = m.to("refs/for/master");
result.assertOkStatus();
merge = result.getCommit();
changeId = result.getChangeId();
}
@Test
public void getMergeList() throws Exception {
List<CommitInfo> mergeList = current(changeId).getMergeList().get();
assertThat(mergeList).hasSize(2);
assertThat(mergeList.get(0).commit).isEqualTo(parent2.name());
assertThat(mergeList.get(1).commit).isEqualTo(grandParent2.name());
mergeList = current(changeId).getMergeList().withUninterestingParent(2).get();
assertThat(mergeList).hasSize(2);
assertThat(mergeList.get(0).commit).isEqualTo(parent1.name());
assertThat(mergeList.get(1).commit).isEqualTo(grandParent1.name());
}
@Test
public void getMergeListContent() throws Exception {
BinaryResult bin = current(changeId).file(MERGE_LIST).content();
ByteArrayOutputStream os = new ByteArrayOutputStream();
bin.writeTo(os);
String content = new String(os.toByteArray(), UTF_8);
assertThat(content).isEqualTo(getMergeListContent(parent2, grandParent2));
}
@Test
public void getFileList() throws Exception {
assertThat(getFiles(changeId)).contains(MERGE_LIST);
assertThat(getFiles(changeId, 1)).contains(MERGE_LIST);
assertThat(getFiles(changeId, 2)).contains(MERGE_LIST);
assertThat(getFiles(createChange().getChangeId())).doesNotContain(MERGE_LIST);
}
@Test
public void getDiffForMergeList() throws Exception {
DiffInfo diff = getMergeListDiff(changeId);
assertDiffForNewFile(diff, merge, MERGE_LIST, getMergeListContent(parent2, grandParent2));
diff = getMergeListDiff(changeId, 1);
assertDiffForNewFile(diff, merge, MERGE_LIST, getMergeListContent(parent2, grandParent2));
diff = getMergeListDiff(changeId, 2);
assertDiffForNewFile(diff, merge, MERGE_LIST, getMergeListContent(parent1, grandParent1));
}
@Test
public void editMergeList() throws Exception {
gApi.changes().id(changeId).edit().create();
ResourceConflictException thrown =
assertThrows(
ResourceConflictException.class,
() ->
gApi.changes()
.id(changeId)
.edit()
.modifyFile(MERGE_LIST, RawInputUtil.create("new content")));
assertThat(thrown).hasMessageThat().contains("Invalid path: " + MERGE_LIST);
}
@Test
public void deleteMergeList() throws Exception {
gApi.changes().id(changeId).edit().create();
ResourceConflictException thrown =
assertThrows(
ResourceConflictException.class,
() -> gApi.changes().id(changeId).edit().deleteFile(MERGE_LIST));
assertThat(thrown).hasMessageThat().contains("no changes were made");
}
private String getMergeListContent(RevCommit... commits) {
StringBuilder mergeList = new StringBuilder("Merge List:\n\n");
for (RevCommit c : commits) {
mergeList
.append("* ")
.append(abbreviateName(c, 8))
.append(" ")
.append(c.getShortMessage())
.append("\n");
}
return mergeList.toString();
}
private Set<String> getFiles(String changeId) throws Exception {
return current(changeId).files().keySet();
}
private Set<String> getFiles(String changeId, int parent) throws Exception {
return current(changeId).files(parent).keySet();
}
private DiffInfo getMergeListDiff(String changeId) throws Exception {
return current(changeId).file(MERGE_LIST).diff();
}
private DiffInfo getMergeListDiff(String changeId, int parent) throws Exception {
return current(changeId).file(MERGE_LIST).diff(parent);
}
private RevisionApi current(String changeId) throws Exception {
return gApi.changes().id(changeId).current();
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.accumulo.test.mapred;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import java.io.File;
import java.io.FileFilter;
import java.io.IOException;
import org.apache.accumulo.core.client.BatchWriter;
import org.apache.accumulo.core.client.BatchWriterConfig;
import org.apache.accumulo.core.client.Connector;
import org.apache.accumulo.core.client.mapred.AccumuloFileOutputFormat;
import org.apache.accumulo.core.client.mapred.AccumuloInputFormat;
import org.apache.accumulo.core.client.mapreduce.lib.impl.ConfiguratorBase;
import org.apache.accumulo.core.client.sample.RowSampler;
import org.apache.accumulo.core.client.sample.SamplerConfiguration;
import org.apache.accumulo.core.conf.DefaultConfiguration;
import org.apache.accumulo.core.data.Key;
import org.apache.accumulo.core.data.Mutation;
import org.apache.accumulo.core.data.Value;
import org.apache.accumulo.core.file.FileSKVIterator;
import org.apache.accumulo.core.file.rfile.RFileOperations;
import org.apache.accumulo.core.sample.impl.SamplerConfigurationImpl;
import org.apache.accumulo.core.util.CachedConfiguration;
import org.apache.accumulo.harness.AccumuloClusterHarness;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.mapred.JobClient;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapred.Mapper;
import org.apache.hadoop.mapred.OutputCollector;
import org.apache.hadoop.mapred.Reporter;
import org.apache.hadoop.mapred.lib.IdentityMapper;
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.TemporaryFolder;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class AccumuloFileOutputFormatIT extends AccumuloClusterHarness {
private static final Logger log = LoggerFactory.getLogger(AccumuloFileOutputFormatIT.class);
private static final int JOB_VISIBILITY_CACHE_SIZE = 3000;
private static final String PREFIX = AccumuloFileOutputFormatIT.class.getSimpleName();
private static final String BAD_TABLE = PREFIX + "_mapred_bad_table";
private static final String TEST_TABLE = PREFIX + "_mapred_test_table";
private static final String EMPTY_TABLE = PREFIX + "_mapred_empty_table";
private static AssertionError e1 = null;
private static AssertionError e2 = null;
private static final SamplerConfiguration SAMPLER_CONFIG = new SamplerConfiguration(RowSampler.class.getName()).addOption("hasher", "murmur3_32").addOption(
"modulus", "3");
@Rule
public TemporaryFolder folder = new TemporaryFolder(new File(System.getProperty("user.dir") + "/target"));
@Test
public void testEmptyWrite() throws Exception {
getConnector().tableOperations().create(EMPTY_TABLE);
handleWriteTests(false);
}
@Test
public void testRealWrite() throws Exception {
Connector c = getConnector();
c.tableOperations().create(TEST_TABLE);
BatchWriter bw = c.createBatchWriter(TEST_TABLE, new BatchWriterConfig());
Mutation m = new Mutation("Key");
m.put("", "", "");
bw.addMutation(m);
bw.close();
handleWriteTests(true);
}
private static class MRTester extends Configured implements Tool {
private static class BadKeyMapper implements Mapper<Key,Value,Key,Value> {
int index = 0;
@Override
public void map(Key key, Value value, OutputCollector<Key,Value> output, Reporter reporter) throws IOException {
try {
try {
output.collect(key, value);
if (index == 2)
fail();
} catch (Exception e) {
log.error(e.toString(), e);
assertEquals(2, index);
}
} catch (AssertionError e) {
e1 = e;
}
index++;
}
@Override
public void configure(JobConf job) {}
@Override
public void close() throws IOException {
try {
assertEquals(2, index);
} catch (AssertionError e) {
e2 = e;
}
}
}
@Override
public int run(String[] args) throws Exception {
if (args.length != 2) {
throw new IllegalArgumentException("Usage : " + MRTester.class.getName() + " <table> <outputfile>");
}
String table = args[0];
JobConf job = new JobConf(getConf());
job.setJarByClass(this.getClass());
ConfiguratorBase.setVisibilityCacheSize(job, JOB_VISIBILITY_CACHE_SIZE);
job.setInputFormat(AccumuloInputFormat.class);
AccumuloInputFormat.setZooKeeperInstance(job, getCluster().getClientConfig());
AccumuloInputFormat.setConnectorInfo(job, getAdminPrincipal(), getAdminToken());
AccumuloInputFormat.setInputTableName(job, table);
AccumuloFileOutputFormat.setOutputPath(job, new Path(args[1]));
AccumuloFileOutputFormat.setSampler(job, SAMPLER_CONFIG);
job.setMapperClass(BAD_TABLE.equals(table) ? BadKeyMapper.class : IdentityMapper.class);
job.setMapOutputKeyClass(Key.class);
job.setMapOutputValueClass(Value.class);
job.setOutputFormat(AccumuloFileOutputFormat.class);
job.setNumReduceTasks(0);
return JobClient.runJob(job).isSuccessful() ? 0 : 1;
}
public static void main(String[] args) throws Exception {
Configuration conf = new Configuration();
conf.set("mapreduce.framework.name", "local");
conf.set("mapreduce.cluster.local.dir", new File(System.getProperty("user.dir"), "target/mapreduce-tmp").getAbsolutePath());
assertEquals(0, ToolRunner.run(conf, new MRTester(), args));
}
}
private void handleWriteTests(boolean content) throws Exception {
File f = folder.newFile(testName.getMethodName());
if (f.delete()) {
log.debug("Deleted {}", f);
}
MRTester.main(new String[] {content ? TEST_TABLE : EMPTY_TABLE, f.getAbsolutePath()});
assertTrue(f.exists());
File[] files = f.listFiles(new FileFilter() {
@Override
public boolean accept(File file) {
return file.getName().startsWith("part-m-");
}
});
assertNotNull(files);
if (content) {
assertEquals(1, files.length);
assertTrue(files[0].exists());
Configuration conf = CachedConfiguration.getInstance();
DefaultConfiguration acuconf = DefaultConfiguration.getInstance();
FileSKVIterator sample = RFileOperations.getInstance().newReaderBuilder().forFile(files[0].toString(), FileSystem.get(conf), conf)
.withTableConfiguration(acuconf).build().getSample(new SamplerConfigurationImpl(SAMPLER_CONFIG));
assertNotNull(sample);
} else {
assertEquals(0, files.length);
}
}
@Test
public void writeBadVisibility() throws Exception {
Connector c = getConnector();
c.tableOperations().create(BAD_TABLE);
BatchWriter bw = c.createBatchWriter(BAD_TABLE, new BatchWriterConfig());
Mutation m = new Mutation("r1");
m.put("cf1", "cq1", "A&B");
m.put("cf1", "cq1", "A&B");
m.put("cf1", "cq2", "A&");
bw.addMutation(m);
bw.close();
File f = folder.newFile(testName.getMethodName());
if (f.delete()) {
log.debug("Deleted {}", f);
}
MRTester.main(new String[] {BAD_TABLE, f.getAbsolutePath()});
assertNull(e1);
assertNull(e2);
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ignite.internal.processors.cache;
import org.apache.ignite.IgniteCache;
import org.apache.ignite.IgniteCheckedException;
import org.apache.ignite.IgniteException;
import org.apache.ignite.cache.eviction.EvictableEntry;
import org.apache.ignite.internal.processors.cache.transactions.IgniteInternalTx;
import org.apache.ignite.internal.util.lang.GridMetadataAwareAdapter;
import org.apache.ignite.internal.util.lang.GridTuple;
import org.apache.ignite.internal.util.tostring.GridToStringInclude;
import org.apache.ignite.internal.util.typedef.internal.S;
import org.apache.ignite.internal.util.typedef.internal.U;
import org.jetbrains.annotations.Nullable;
/**
* Entry wrapper that never obscures obsolete entries from user.
*/
public class CacheEvictableEntryImpl<K, V> implements EvictableEntry<K, V> {
/** */
private static final int META_KEY = GridMetadataAwareAdapter.EntryKey.CACHE_EVICTABLE_ENTRY_KEY.key();
/** Cached entry. */
@GridToStringInclude
protected GridCacheEntryEx cached;
/**
* @param cached Cached entry.
*/
protected CacheEvictableEntryImpl(GridCacheEntryEx cached) {
this.cached = cached;
}
/** {@inheritDoc} */
@Override public K getKey() throws IllegalStateException {
return cached.key().value(cached.context().cacheObjectContext(), false);
}
/** {@inheritDoc} */
@Override public boolean isCached() {
return !cached.obsoleteOrDeleted();
}
/** {@inheritDoc} */
@Override public boolean evict() {
GridCacheContext<K, V> ctx = cached.context();
try {
assert ctx != null;
CacheEvictionManager mgr = ctx.evicts();
if (mgr == null) {
assert ctx.kernalContext().isStopping();
return false;
}
return mgr.evict(cached, null, false, null);
}
catch (IgniteCheckedException e) {
U.error(ctx.grid().log(), "Failed to evict entry from cache: " + cached, e);
return false;
}
}
/**
* @return Peeks value.
*/
@Nullable public V peek() {
try {
CacheObject val = cached.peek();
return val != null ? val.<V>value(cached.context().cacheObjectContext(), false) : null;
}
catch (GridCacheEntryRemovedException ignored) {
return null;
}
catch (IgniteCheckedException e) {
throw new IgniteException(e);
}
}
/** {@inheritDoc} */
@Override public int size() {
try {
GridCacheContext<Object, Object> cctx = cached.context();
KeyCacheObject key = cached.key();
byte[] keyBytes = key.valueBytes(cctx.cacheObjectContext());
byte[] valBytes = null;
CacheObject cacheObj = cached.valueBytes();
if (cacheObj != null)
valBytes = cacheObj.valueBytes(cctx.cacheObjectContext());
return valBytes == null ? keyBytes.length : keyBytes.length + valBytes.length;
}
catch (GridCacheEntryRemovedException ignored) {
return 0;
}
catch (IgniteCheckedException e) {
throw new IgniteException(e);
}
}
/** {@inheritDoc} */
@Override public V getValue() {
try {
IgniteInternalTx tx = cached.context().tm().userTx();
if (tx != null) {
GridTuple<CacheObject> peek = tx.peek(cached.context(), false, cached.key());
if (peek != null)
return peek.get().value(cached.context().cacheObjectContext(), false);
}
if (cached.detached()) {
CacheObject val = cached.rawGet();
return val != null ? val.<V>value(cached.context().cacheObjectContext(), false) : null;
}
for (;;) {
GridCacheEntryEx e = cached.context().cache().peekEx(cached.key());
if (e == null)
return null;
try {
CacheObject val = e.peek();
return val != null ? val.<V>value(cached.context().cacheObjectContext(), false) : null;
}
catch (GridCacheEntryRemovedException ignored) {
// No-op.
}
catch (IgniteCheckedException ex) {
throw new IgniteException(ex);
}
}
}
catch (GridCacheFilterFailedException ignored) {
throw new IgniteException("Should never happen.");
}
}
/** {@inheritDoc} */
@Nullable @Override public <T> T addMeta(T val) {
return cached.addMeta(META_KEY, val);
}
/** {@inheritDoc} */
@Nullable @Override public <T> T meta() {
return cached.meta(META_KEY);
}
/** {@inheritDoc} */
@Nullable @Override public <T> T removeMeta() {
return cached.removeMeta(META_KEY);
}
/** {@inheritDoc} */
@Override public <T> boolean removeMeta(T val) {
return cached.removeMeta(META_KEY, val);
}
/** {@inheritDoc} */
@Nullable @Override public <T> T putMetaIfAbsent(T val) {
return cached.putMetaIfAbsent(META_KEY, val);
}
/** {@inheritDoc} */
@Override public <T> boolean replaceMeta(T curVal, T newVal) {
return cached.replaceMeta(META_KEY,curVal, newVal);
}
/** {@inheritDoc} */
@Override public <T> T unwrap(Class<T> clazz) {
if (clazz.isAssignableFrom(IgniteCache.class))
return (T)cached.context().grid().cache(cached.context().name());
if (clazz.isAssignableFrom(getClass()))
return clazz.cast(this);
throw new IllegalArgumentException();
}
/** {@inheritDoc} */
@Override public int hashCode() {
return cached.key().hashCode();
}
/** {@inheritDoc} */
@Override public boolean equals(Object obj) {
if (obj == this)
return true;
if (obj instanceof CacheEvictableEntryImpl) {
CacheEvictableEntryImpl<K, V> other = (CacheEvictableEntryImpl<K, V>)obj;
return cached.key().equals(other.cached.key());
}
return false;
}
/** {@inheritDoc} */
@Override public String toString() {
return S.toString(CacheEvictableEntryImpl.class, this);
}
}
| |
package datastructure;
public class Factor {
String name;
public double []values = null;
public String [] variables = null;
public int [] domain=null;
//TODO implement assignmentToIndex and indexToAssignment such that they can handle variables besides binary variables, current implementation only supports binary variables
//TODO check the validity of the program after changing the code of above 2 functions
public Factor(String name,double[] values, String[] variables, int [] domain) throws Exception
{
boolean check=sanityCheck(values, variables, domain);
if (check)
{
this.name=name;
this.values=values;
this.variables=variables;
this.domain=domain;
}
else
{
throw new Exception();
}
}
private boolean sanityCheck(double[] values, String[] variables,
int[] domain) {
//check if each variable has a domain
if(variables.length!=domain.length)
{
return false;
}
// // check if values are between 0 and 1
//
// for(int i =0; i< values.length;i++)
// {
// if(values[i]<0 || values[i]>1)
// {
// return true;
// }
// }
// check if total values are complete
int size=1;
for( int i=0;i<domain.length;i++)
{
size=size*domain[i];
}
if(size!=values.length)
{
return false;
}
//TODO
//duplicate variable names not allowed
//TODO
//name must be unique
return true;
}
public int assignmentToIndex(int [] assignment) throws Exception
{
if(assignment.length!=variables.length) throw new Exception();
for(int i=0;i<assignment.length;i++)
{
if(assignment[i]>=domain[i]||assignment[i]<0) throw new Exception();
}
int index=recAssignmentToIndex(assignment,this.variables);
return index;
}
private int recAssignmentToIndex(int[] assignment, String[] variables) {
if(assignment.length==0)
{
return 0;
}
@SuppressWarnings("unused")
int indexDomain=0;
for(int i=0;i<this.variables.length;i++)
{
if(this.variables[i].equalsIgnoreCase(variables[0]))
{
indexDomain=i;
}
}
//only works for binary variables for now.. to improve check notes the general logic.
int []indexes=new int[(int) Math.pow(2, assignment.length-1)];
int start=(int) (assignment[0]*Math.pow(2, (assignment.length-1)-0));
//int end=(int) ((assignment[0]*Math.pow(this.domain[indexDomain], (assignment.length-1)-0)) + (Math.pow(2, (assignment.length-1)-0)-1));
for (int i=0;i<indexes.length;i++)
{
indexes[i]=start;
start++;
}
if(assignment.length==1)
{
return indexes[0];
}
else
{
return indexes[recAssignmentToIndex(reducedArray(assignment), reducedVariables(variables))];
}
}
public int[] indexToAssignment(int i) throws Exception
{
if(i>values.length-1)
{
throw new Exception();
}
//logic only for binary variables
String str=Integer.toBinaryString(i);
int[] assignment=null;
if(str.length()!=variables.length)
{
// System.out.println(str);
assignment=new int[variables.length];
for(int j=0;j<variables.length-str.length();j++)
{
assignment[j]=0;
}
for(int k=variables.length-str.length();k<variables.length;k++)
{
assignment[k]=Integer.parseInt(String.valueOf(str.charAt((k-(variables.length-str.length())))));
//System.out.println(assignment[k]);
}
return assignment;
}
else
{
assignment=new int[variables.length];
for(int k=0;k<str.length();k++)
{
assignment[k]=Integer.parseInt(str.substring(k, k+1));
//System.out.print(assignment[k]);
}
return assignment;
}
}
private int[] reducedArray(int [] array)
{
int [] newArray=new int[array.length-1];
for(int i=1;i<array.length;i++)
{
newArray[i-1]=array[i];
}
return newArray;
}
private String[] reducedVariables(String[] array)
{
String [] newArray=new String[array.length-1];
for(int i=1;i<array.length;i++)
{
newArray[i-1]=array[i];
}
return newArray;
}
public void printFactor()
{
System.out.println(" Factor : " + this.name);
System.out.print(" Variables : " );
for(int i=0;i<this.variables.length;i++)
{
System.out.print( " " + this.variables[i]+ " ");
}
System.out.println();
System.out.print(" Domain : ");
for(int i=0;i<this.domain.length;i++)
{
System.out.print( " " + this.domain[i]+ " ");
}
System.out.println();
System.out.println(" Values : ");
for(int i=0;i<this.values.length;i++)
{
System.out.println(" "+this.values[i]);
}
System.out.println("\n");
}
public double sumOfValues()
{
double sum=0.0;
for(int i=0;i<this.values.length;i++)
{
sum=sum+this.values[i];
}
return sum;
}
}
| |
/*
* Copyright (c) 1994, 2003, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation. Oracle designates this
* particular file as subject to the "Classpath" exception as provided
* by Oracle in the LICENSE file that accompanied this code.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
package sun.tools.java;
import java.util.Hashtable;
import java.util.Vector;
import java.util.Enumeration;
import java.util.List;
import java.util.Collections;
import java.io.IOException;
/**
* This class describes the classes and packages imported
* from a source file. A Hashtable called bindings is maintained
* to quickly map symbol names to classes. This table is flushed
* everytime a new import is added.
*
* A class name is resolved as follows:
* - if it is a qualified name then return the corresponding class
* - if the name corresponds to an individually imported class then return that class
* - check if the class is defined in any of the imported packages,
* if it is then return it, make sure it is defined in only one package
* - assume that the class is defined in the current package
*
* WARNING: The contents of this source file are not part of any
* supported API. Code that depends on them does so at its own risk:
* they are subject to change or removal without notice.
*/
public
class Imports implements Constants {
/**
* The current package, which is implicitly imported,
* and has precedence over other imported packages.
*/
Identifier currentPackage = idNull;
/**
* A location for the current package declaration. Used to
* report errors against the current package.
*/
long currentPackageWhere = 0;
/**
* The imported classes, including memoized imports from packages.
*/
Hashtable classes = new Hashtable();
/**
* The imported package identifiers. This will not contain duplicate
* imports for the same package. It will also not contain the
* current package.
*/
Vector packages = new Vector();
/**
* The (originally) imported classes.
* A vector of IdentifierToken.
*/
Vector singles = new Vector();
/**
* Are the import names checked yet?
*/
protected int checked;
/**
* Constructor, always import java.lang.
*/
public Imports(Environment env) {
addPackage(idJavaLang);
}
/**
* Check the names of the imports.
*/
public synchronized void resolve(Environment env) {
if (checked != 0) {
return;
}
checked = -1;
// After all class information has been read, now we can
// safely inspect import information for errors.
// If we did this before all parsing was finished,
// we could get vicious circularities, since files can
// import each others' classes.
// A note: the resolution of the package java.lang takes place
// in the sun.tools.javac.BatchEnvironment#setExemptPackages().
// Make sure that the current package's name does not collide
// with the name of an existing class. (bug 4101529)
//
// This change has been backed out because, on WIN32, it
// failed to distinguish between java.awt.event and
// java.awt.Event when looking for a directory. We will
// add this back in later.
//
// if (currentPackage != idNull) {
// Identifier resolvedName =
// env.resolvePackageQualifiedName(currentPackage);
//
// Identifier className = resolvedName.getTopName();
//
// if (importable(className, env)) {
// // The name of the current package is also the name
// // of a class.
// env.error(currentPackageWhere, "package.class.conflict",
// currentPackage, className);
// }
// }
Vector resolvedPackages = new Vector();
for (Enumeration e = packages.elements() ; e.hasMoreElements() ;) {
IdentifierToken t = (IdentifierToken)e.nextElement();
Identifier nm = t.getName();
long where = t.getWhere();
// Check to see if this package is exempt from the "exists"
// check. See the note in
// sun.tools.javac.BatchEnvironment#setExemptPackages()
// for more information.
if (env.isExemptPackage(nm)) {
resolvedPackages.addElement(t);
continue;
}
// (Note: This code is moved from BatchParser.importPackage().)
try {
Identifier rnm = env.resolvePackageQualifiedName(nm);
if (importable(rnm, env)) {
// This name is a real class; better not be a package too.
if (env.getPackage(rnm.getTopName()).exists()) {
env.error(where, "class.and.package",
rnm.getTopName());
}
// Pass an "inner" name to the imports.
if (!rnm.isInner())
rnm = Identifier.lookupInner(rnm, idNull);
nm = rnm;
} else if (!env.getPackage(nm).exists()) {
env.error(where, "package.not.found", nm, "import");
} else if (rnm.isInner()) {
// nm exists, and rnm.getTopName() is a parent package
env.error(where, "class.and.package", rnm.getTopName());
}
resolvedPackages.addElement(new IdentifierToken(where, nm));
} catch (IOException ee) {
env.error(where, "io.exception", "import");
}
}
packages = resolvedPackages;
for (Enumeration e = singles.elements() ; e.hasMoreElements() ;) {
IdentifierToken t = (IdentifierToken)e.nextElement();
Identifier nm = t.getName();
long where = t.getWhere();
Identifier pkg = nm.getQualifier();
// (Note: This code is moved from BatchParser.importClass().)
nm = env.resolvePackageQualifiedName(nm);
if (!env.classExists(nm.getTopName())) {
env.error(where, "class.not.found", nm, "import");
}
// (Note: This code is moved from Imports.addClass().)
Identifier snm = nm.getFlatName().getName();
// make sure it isn't already imported explicitly
Identifier className = (Identifier)classes.get(snm);
if (className != null) {
Identifier f1 = Identifier.lookup(className.getQualifier(),
className.getFlatName());
Identifier f2 = Identifier.lookup(nm.getQualifier(),
nm.getFlatName());
if (!f1.equals(f2)) {
env.error(where, "ambig.class", nm, className);
}
}
classes.put(snm, nm);
// The code here needs to check to see, if we
// are importing an inner class, that all of its
// enclosing classes are visible to us. To check this,
// we need to construct a definition for the class.
// The code here used to call...
//
// ClassDefinition def = env.getClassDefinition(nm);
//
// ...but that interfered with the basicCheck()'ing of
// interfaces in certain cases (bug no. 4086139). Never
// fear. Instead we load the class with a call to the
// new getClassDefinitionNoCheck() which does no basicCheck() and
// lets us answer the questions we are interested in w/o
// interfering with the demand-driven nature of basicCheck().
try {
// Get a declaration
ClassDeclaration decl = env.getClassDeclaration(nm);
// Get the definition (no env argument)
ClassDefinition def = decl.getClassDefinitionNoCheck(env);
// Get the true name of the package containing this class.
// `pkg' from above is insufficient. It includes the
// names of our enclosing classes. Fix for 4086815.
Identifier importedPackage = def.getName().getQualifier();
// Walk out the outerClass chain, ensuring that each level
// is visible from our perspective.
for (; def != null; def = def.getOuterClass()) {
if (def.isPrivate()
|| !(def.isPublic()
|| importedPackage.equals(currentPackage))) {
env.error(where, "cant.access.class", def);
break;
}
}
} catch (AmbiguousClass ee) {
env.error(where, "ambig.class", ee.name1, ee.name2);
} catch (ClassNotFound ee) {
env.error(where, "class.not.found", ee.name, "import");
}
}
checked = 1;
}
/**
* Lookup a class, given the current set of imports,
* AmbiguousClass exception is thrown if the name can be
* resolved in more than one way. A ClassNotFound exception
* is thrown if the class is not found in the imported classes
* and packages.
*/
public synchronized Identifier resolve(Environment env, Identifier nm) throws ClassNotFound {
if (tracing) env.dtEnter("Imports.resolve: " + nm);
// If the class has the special ambiguous prefix, then we will
// get the original AmbiguousClass exception by removing the
// prefix and proceeding in the normal fashion.
// (part of solution for 4059855)
if (nm.hasAmbigPrefix()) {
nm = nm.removeAmbigPrefix();
}
if (nm.isQualified()) {
// Don't bother it is already qualified
if (tracing) env.dtExit("Imports.resolve: QUALIFIED " + nm);
return nm;
}
if (checked <= 0) {
checked = 0;
resolve(env);
}
// Check if it was imported before
Identifier className = (Identifier)classes.get(nm);
if (className != null) {
if (tracing) env.dtExit("Imports.resolve: PREVIOUSLY IMPORTED " + nm);
return className;
}
// Note: the section below has changed a bit during the fix
// for bug 4093217. The current package is no longer grouped
// with the rest of the import-on-demands; it is now checked
// separately. Also, the list of import-on-demands is now
// guarranteed to be duplicate-free, so the code below can afford
// to be a bit simpler.
// First we look in the current package. The current package
// is given precedence over the rest of the import-on-demands,
// which means, among other things, that a class in the current
// package cannot be ambiguous.
Identifier id = Identifier.lookup(currentPackage, nm);
if (importable(id, env)) {
className = id;
} else {
// If it isn't in the current package, try to find it in
// our import-on-demands.
Enumeration e = packages.elements();
while (e.hasMoreElements()) {
IdentifierToken t = (IdentifierToken)e.nextElement();
id = Identifier.lookup(t.getName(), nm);
if (importable(id, env)) {
if (className == null) {
// We haven't found any other matching classes yet.
// Set className to what we've found and continue
// looking for an ambiguity.
className = id;
} else {
if (tracing)
env.dtExit("Imports.resolve: AMBIGUOUS " + nm);
// We've found an ambiguity.
throw new AmbiguousClass(className, id);
}
}
}
}
// Make sure a class was found
if (className == null) {
if (tracing) env.dtExit("Imports.resolve: NOT FOUND " + nm);
throw new ClassNotFound(nm);
}
// Remember the binding
classes.put(nm, className);
if (tracing) env.dtExit("Imports.resolve: FIRST IMPORT " + nm);
return className;
}
/**
* Check to see if 'id' names an importable class in `env'.
* This method was made public and static for utility.
*/
static public boolean importable(Identifier id, Environment env) {
if (!id.isInner()) {
return env.classExists(id);
} else if (!env.classExists(id.getTopName())) {
return false;
} else {
// load the top class and look inside it
try {
// There used to be a call to...
// env.getClassDeclaration(id.getTopName());
// ...here. It has been replaced with the
// two statements below. These should be functionally
// the same except for the fact that
// getClassDefinitionNoCheck() does not call
// basicCheck(). This allows us to avoid a circular
// need to do basicChecking that can arise with
// certain patterns of importing and inheritance.
// This is a fix for a variant of bug 4086139.
//
// Note: the special case code in env.getClassDefinition()
// which handles inner class names is not replicated below.
// This should be okay, as we are looking up id.getTopName(),
// not id.
ClassDeclaration decl =
env.getClassDeclaration(id.getTopName());
ClassDefinition c =
decl.getClassDefinitionNoCheck(env);
return c.innerClassExists(id.getFlatName().getTail());
} catch (ClassNotFound ee) {
return false;
}
}
}
/**
* Suppose a resolve() call has failed.
* This routine can be used silently to give a reasonable
* default qualification (the current package) to the identifier.
* This decision is recorded for future reference.
*/
public synchronized Identifier forceResolve(Environment env, Identifier nm) {
if (nm.isQualified())
return nm;
Identifier className = (Identifier)classes.get(nm);
if (className != null) {
return className;
}
className = Identifier.lookup(currentPackage, nm);
classes.put(nm, className);
return className;
}
/**
* Add a class import
*/
public synchronized void addClass(IdentifierToken t) {
singles.addElement(t);
}
// for compatibility
public void addClass(Identifier nm) throws AmbiguousClass {
addClass(new IdentifierToken(nm));
}
/**
* Add a package import, or perhaps an inner class scope.
* Ignore any duplicate imports.
*/
public synchronized void addPackage(IdentifierToken t) {
final Identifier name = t.getName();
// If this is a duplicate import for the current package,
// ignore it.
if (name == currentPackage) {
return;
}
// If this is a duplicate of a package which has already been
// added to the list, ignore it.
final int size = packages.size();
for (int i = 0; i < size; i++) {
if (name == ((IdentifierToken)packages.elementAt(i)).getName()) {
return;
}
}
// Add the package to the list.
packages.addElement(t);
}
// for compatibility
public void addPackage(Identifier id) {
addPackage(new IdentifierToken(id));
}
/**
* Specify the current package with an IdentifierToken.
*/
public synchronized void setCurrentPackage(IdentifierToken t) {
currentPackage = t.getName();
currentPackageWhere = t.getWhere();
}
/**
* Specify the current package
*/
public synchronized void setCurrentPackage(Identifier id) {
currentPackage = id;
}
/**
* Report the current package
*/
public Identifier getCurrentPackage() {
return currentPackage;
}
/**
* Return an unmodifiable list of IdentifierToken representing
* packages specified as imports.
*/
public List getImportedPackages() {
return Collections.unmodifiableList(packages);
}
/**
* Return an unmodifiable list of IdentifierToken representing
* classes specified as imports.
*/
public List getImportedClasses() {
return Collections.unmodifiableList(singles);
}
/**
* Extend an environment with my resolve() method.
*/
public Environment newEnvironment(Environment env) {
return new ImportEnvironment(env, this);
}
}
final
class ImportEnvironment extends Environment {
Imports imports;
ImportEnvironment(Environment env, Imports imports) {
super(env, env.getSource());
this.imports = imports;
}
public Identifier resolve(Identifier nm) throws ClassNotFound {
return imports.resolve(this, nm);
}
public Imports getImports() {
return imports;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.tinkerpop.gremlin.structure.io.graphson;
import org.apache.tinkerpop.gremlin.process.traversal.Path;
import org.apache.tinkerpop.gremlin.process.traversal.Traversal;
import org.apache.tinkerpop.gremlin.process.traversal.TraversalStrategy;
import org.apache.tinkerpop.gremlin.process.traversal.step.util.MutablePath;
import org.apache.tinkerpop.gremlin.process.traversal.step.util.Tree;
import org.apache.tinkerpop.gremlin.process.traversal.util.DefaultTraversalMetrics;
import org.apache.tinkerpop.gremlin.process.traversal.util.Metrics;
import org.apache.tinkerpop.gremlin.process.traversal.util.MutableMetrics;
import org.apache.tinkerpop.gremlin.process.traversal.util.TraversalExplanation;
import org.apache.tinkerpop.gremlin.process.traversal.util.TraversalMetrics;
import org.apache.tinkerpop.gremlin.structure.Edge;
import org.apache.tinkerpop.gremlin.structure.Element;
import org.apache.tinkerpop.gremlin.structure.Property;
import org.apache.tinkerpop.gremlin.structure.Vertex;
import org.apache.tinkerpop.gremlin.structure.VertexProperty;
import org.apache.tinkerpop.gremlin.structure.util.Comparators;
import org.apache.tinkerpop.gremlin.structure.util.detached.DetachedEdge;
import org.apache.tinkerpop.gremlin.structure.util.detached.DetachedFactory;
import org.apache.tinkerpop.gremlin.structure.util.detached.DetachedProperty;
import org.apache.tinkerpop.gremlin.structure.util.detached.DetachedVertex;
import org.apache.tinkerpop.gremlin.structure.util.detached.DetachedVertexProperty;
import org.apache.tinkerpop.gremlin.util.iterator.IteratorUtils;
import org.apache.tinkerpop.shaded.jackson.core.JsonGenerationException;
import org.apache.tinkerpop.shaded.jackson.core.JsonGenerator;
import org.apache.tinkerpop.shaded.jackson.core.JsonParser;
import org.apache.tinkerpop.shaded.jackson.core.JsonProcessingException;
import org.apache.tinkerpop.shaded.jackson.core.JsonToken;
import org.apache.tinkerpop.shaded.jackson.databind.DeserializationContext;
import org.apache.tinkerpop.shaded.jackson.databind.JavaType;
import org.apache.tinkerpop.shaded.jackson.databind.JsonNode;
import org.apache.tinkerpop.shaded.jackson.databind.SerializerProvider;
import org.apache.tinkerpop.shaded.jackson.databind.deser.std.StdDeserializer;
import org.apache.tinkerpop.shaded.jackson.databind.jsontype.TypeSerializer;
import org.apache.tinkerpop.shaded.jackson.databind.node.ArrayNode;
import org.apache.tinkerpop.shaded.jackson.databind.ser.std.StdKeySerializer;
import org.apache.tinkerpop.shaded.jackson.databind.ser.std.StdScalarSerializer;
import org.apache.tinkerpop.shaded.jackson.databind.ser.std.StdSerializer;
import org.apache.tinkerpop.shaded.jackson.databind.type.TypeFactory;
import org.javatuples.Pair;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Comparator;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.TimeUnit;
import static org.apache.tinkerpop.gremlin.structure.io.graphson.GraphSONUtil.safeWriteObjectField;
/**
* GraphSON serializers for graph-based objects such as vertices, edges, properties, and paths. These serializers
* present a generalized way to serialize the implementations of core interfaces.
*
* @author Stephen Mallette (http://stephen.genoprime.com)
*/
class GraphSONSerializersV2d0 {
private GraphSONSerializersV2d0() {
}
////////////////////////////// SERIALIZERS /////////////////////////////////
final static class VertexJacksonSerializer extends StdScalarSerializer<Vertex> {
private final boolean normalize;
public VertexJacksonSerializer(final boolean normalize) {
super(Vertex.class);
this.normalize = normalize;
}
@Override
public void serialize(final Vertex vertex, final JsonGenerator jsonGenerator, final SerializerProvider serializerProvider)
throws IOException {
jsonGenerator.writeStartObject();
jsonGenerator.writeObjectField(GraphSONTokens.ID, vertex.id());
jsonGenerator.writeStringField(GraphSONTokens.LABEL, vertex.label());
writeProperties(vertex, jsonGenerator);
jsonGenerator.writeEndObject();
}
private void writeProperties(final Vertex vertex, final JsonGenerator jsonGenerator) throws IOException {
if (vertex.keys().size() == 0)
return;
jsonGenerator.writeFieldName(GraphSONTokens.PROPERTIES);
jsonGenerator.writeStartObject();
final List<String> keys = normalize ?
IteratorUtils.list(vertex.keys().iterator(), Comparator.naturalOrder()) : new ArrayList<>(vertex.keys());
for (String key : keys) {
final Iterator<VertexProperty<Object>> vertexProperties = normalize ?
IteratorUtils.list(vertex.properties(key), Comparators.PROPERTY_COMPARATOR).iterator() : vertex.properties(key);
if (vertexProperties.hasNext()) {
jsonGenerator.writeFieldName(key);
jsonGenerator.writeStartArray();
while (vertexProperties.hasNext()) {
jsonGenerator.writeObject(vertexProperties.next());
}
jsonGenerator.writeEndArray();
}
}
jsonGenerator.writeEndObject();
}
}
final static class EdgeJacksonSerializer extends StdScalarSerializer<Edge> {
private final boolean normalize;
public EdgeJacksonSerializer(final boolean normalize) {
super(Edge.class);
this.normalize = normalize;
}
@Override
public void serialize(final Edge edge, final JsonGenerator jsonGenerator, final SerializerProvider serializerProvider)
throws IOException {
jsonGenerator.writeStartObject();
jsonGenerator.writeObjectField(GraphSONTokens.ID, edge.id());
jsonGenerator.writeStringField(GraphSONTokens.LABEL, edge.label());
jsonGenerator.writeStringField(GraphSONTokens.IN_LABEL, edge.inVertex().label());
jsonGenerator.writeStringField(GraphSONTokens.OUT_LABEL, edge.outVertex().label());
jsonGenerator.writeObjectField(GraphSONTokens.IN, edge.inVertex().id());
jsonGenerator.writeObjectField(GraphSONTokens.OUT, edge.outVertex().id());
writeProperties(edge, jsonGenerator);
jsonGenerator.writeEndObject();
}
private void writeProperties(final Edge edge, final JsonGenerator jsonGenerator) throws IOException {
final Iterator<Property<Object>> elementProperties = normalize ?
IteratorUtils.list(edge.properties(), Comparators.PROPERTY_COMPARATOR).iterator() : edge.properties();
if (elementProperties.hasNext()) {
jsonGenerator.writeFieldName(GraphSONTokens.PROPERTIES);
jsonGenerator.writeStartObject();
elementProperties.forEachRemaining(prop -> safeWriteObjectField(jsonGenerator, prop.key(), prop));
jsonGenerator.writeEndObject();
}
}
}
final static class PropertyJacksonSerializer extends StdScalarSerializer<Property> {
public PropertyJacksonSerializer() {
super(Property.class);
}
@Override
public void serialize(final Property property, final JsonGenerator jsonGenerator, final SerializerProvider serializerProvider)
throws IOException {
jsonGenerator.writeStartObject();
jsonGenerator.writeObjectField(GraphSONTokens.KEY, property.key());
jsonGenerator.writeObjectField(GraphSONTokens.VALUE, property.value());
jsonGenerator.writeEndObject();
}
}
final static class VertexPropertyJacksonSerializer extends StdScalarSerializer<VertexProperty> {
private final boolean normalize;
private final boolean includeLabel;
public VertexPropertyJacksonSerializer(final boolean normalize, final boolean includeLabel) {
super(VertexProperty.class);
this.normalize = normalize;
this.includeLabel = includeLabel;
}
@Override
public void serialize(final VertexProperty property, final JsonGenerator jsonGenerator, final SerializerProvider serializerProvider)
throws IOException {
jsonGenerator.writeStartObject();
jsonGenerator.writeObjectField(GraphSONTokens.ID, property.id());
jsonGenerator.writeObjectField(GraphSONTokens.VALUE, property.value());
if (includeLabel)
jsonGenerator.writeStringField(GraphSONTokens.LABEL, property.label());
tryWriteMetaProperties(property, jsonGenerator, normalize);
jsonGenerator.writeEndObject();
}
private static void tryWriteMetaProperties(final VertexProperty property, final JsonGenerator jsonGenerator,
final boolean normalize) throws IOException {
// when "detached" you can't check features of the graph it detached from so it has to be
// treated differently from a regular VertexProperty implementation.
if (property instanceof DetachedVertexProperty) {
// only write meta properties key if they exist
if (property.properties().hasNext()) {
writeMetaProperties(property, jsonGenerator, normalize);
}
} else {
// still attached - so we can check the features to see if it's worth even trying to write the
// meta properties key
if (property.graph().features().vertex().supportsMetaProperties() && property.properties().hasNext()) {
writeMetaProperties(property, jsonGenerator, normalize);
}
}
}
private static void writeMetaProperties(final VertexProperty property, final JsonGenerator jsonGenerator,
final boolean normalize) throws IOException {
jsonGenerator.writeFieldName(GraphSONTokens.PROPERTIES);
jsonGenerator.writeStartObject();
final Iterator<Property<Object>> metaProperties = normalize ?
IteratorUtils.list((Iterator<Property<Object>>) property.properties(), Comparators.PROPERTY_COMPARATOR).iterator() : property.properties();
while (metaProperties.hasNext()) {
final Property<Object> metaProperty = metaProperties.next();
jsonGenerator.writeObjectField(metaProperty.key(), metaProperty.value());
}
jsonGenerator.writeEndObject();
}
}
final static class PathJacksonSerializer extends StdScalarSerializer<Path> {
public PathJacksonSerializer() {
super(Path.class);
}
@Override
public void serialize(final Path path, final JsonGenerator jsonGenerator, final SerializerProvider serializerProvider)
throws IOException, JsonGenerationException {
jsonGenerator.writeStartObject();
// paths shouldn't serialize with properties if the path contains graph elements
final Path p = DetachedFactory.detach(path, false);
jsonGenerator.writeObjectField(GraphSONTokens.LABELS, p.labels());
jsonGenerator.writeObjectField(GraphSONTokens.OBJECTS, p.objects());
jsonGenerator.writeEndObject();
}
}
final static class TreeJacksonSerializer extends StdScalarSerializer<Tree> {
public TreeJacksonSerializer() {
super(Tree.class);
}
@Override
public void serialize(final Tree tree, final JsonGenerator jsonGenerator, final SerializerProvider serializerProvider) throws IOException, JsonGenerationException {
jsonGenerator.writeStartArray();
final Set<Map.Entry<Element, Tree>> set = tree.entrySet();
for (Map.Entry<Element, Tree> entry : set) {
jsonGenerator.writeStartObject();
jsonGenerator.writeObjectField(GraphSONTokens.KEY, entry.getKey());
jsonGenerator.writeObjectField(GraphSONTokens.VALUE, entry.getValue());
jsonGenerator.writeEndObject();
}
jsonGenerator.writeEndArray();
}
}
final static class TraversalExplanationJacksonSerializer extends StdSerializer<TraversalExplanation> {
public TraversalExplanationJacksonSerializer() {
super(TraversalExplanation.class);
}
@Override
public void serialize(final TraversalExplanation traversalExplanation, final JsonGenerator jsonGenerator,
final SerializerProvider serializerProvider) throws IOException {
final Map<String, Object> m = new HashMap<>();
m.put(GraphSONTokens.ORIGINAL, getStepsAsList(traversalExplanation.getOriginalTraversal()));
final List<Pair<TraversalStrategy, Traversal.Admin<?, ?>>> strategyTraversals = traversalExplanation.getStrategyTraversals();
final List<Map<String, Object>> intermediates = new ArrayList<>();
for (final Pair<TraversalStrategy, Traversal.Admin<?, ?>> pair : strategyTraversals) {
final Map<String, Object> intermediate = new HashMap<>();
intermediate.put(GraphSONTokens.STRATEGY, pair.getValue0().toString());
intermediate.put(GraphSONTokens.CATEGORY, pair.getValue0().getTraversalCategory().getSimpleName());
intermediate.put(GraphSONTokens.TRAVERSAL, getStepsAsList(pair.getValue1()));
intermediates.add(intermediate);
}
m.put(GraphSONTokens.INTERMEDIATE, intermediates);
if (strategyTraversals.isEmpty())
m.put(GraphSONTokens.FINAL, getStepsAsList(traversalExplanation.getOriginalTraversal()));
else
m.put(GraphSONTokens.FINAL, getStepsAsList(strategyTraversals.get(strategyTraversals.size() - 1).getValue1()));
jsonGenerator.writeObject(m);
}
private List<String> getStepsAsList(final Traversal.Admin<?, ?> t) {
final List<String> steps = new ArrayList<>();
t.getSteps().iterator().forEachRemaining(s -> steps.add(s.toString()));
return steps;
}
}
final static class IntegerGraphSONSerializer extends StdScalarSerializer<Integer> {
public IntegerGraphSONSerializer() {
super(Integer.class);
}
@Override
public void serialize(final Integer integer, final JsonGenerator jsonGenerator,
final SerializerProvider serializerProvider) throws IOException {
jsonGenerator.writeNumber(((Integer) integer).intValue());
}
}
final static class DoubleGraphSONSerializer extends StdScalarSerializer<Double> {
public DoubleGraphSONSerializer() {
super(Double.class);
}
@Override
public void serialize(final Double doubleValue, final JsonGenerator jsonGenerator,
final SerializerProvider serializerProvider) throws IOException {
jsonGenerator.writeNumber(doubleValue);
}
}
final static class TraversalMetricsJacksonSerializer extends StdScalarSerializer<TraversalMetrics> {
public TraversalMetricsJacksonSerializer() {
super(TraversalMetrics.class);
}
@Override
public void serialize(final TraversalMetrics traversalMetrics, final JsonGenerator jsonGenerator, final SerializerProvider serializerProvider)
throws IOException {
// creation of the map enables all the fields to be properly written with their type if required
final Map<String, Object> m = new HashMap<>();
m.put(GraphSONTokens.DURATION, traversalMetrics.getDuration(TimeUnit.NANOSECONDS) / 1000000d);
final List<Metrics> metrics = new ArrayList<>();
metrics.addAll(traversalMetrics.getMetrics());
m.put(GraphSONTokens.METRICS, metrics);
jsonGenerator.writeObject(m);
}
}
final static class MetricsJacksonSerializer extends StdScalarSerializer<Metrics> {
public MetricsJacksonSerializer() {
super(Metrics.class);
}
@Override
public void serialize(final Metrics metrics, final JsonGenerator jsonGenerator,
final SerializerProvider serializerProvider) throws IOException {
final Map<String, Object> m = new HashMap<>();
m.put(GraphSONTokens.ID, metrics.getId());
m.put(GraphSONTokens.NAME, metrics.getName());
m.put(GraphSONTokens.COUNTS, metrics.getCounts());
m.put(GraphSONTokens.DURATION, metrics.getDuration(TimeUnit.NANOSECONDS) / 1000000d);
if (!metrics.getAnnotations().isEmpty()) {
m.put(GraphSONTokens.ANNOTATIONS, metrics.getAnnotations());
}
if (!metrics.getNested().isEmpty()) {
final List<Metrics> nested = new ArrayList<>();
metrics.getNested().forEach(it -> nested.add(it));
m.put(GraphSONTokens.METRICS, nested);
}
jsonGenerator.writeObject(m);
}
}
/**
* Maps in the JVM can have {@link Object} as a key, but in JSON they must be a {@link String}.
*/
final static class GraphSONKeySerializer extends StdKeySerializer {
@Override
public void serialize(final Object o, final JsonGenerator jsonGenerator, final SerializerProvider serializerProvider) throws IOException {
ser(o, jsonGenerator, serializerProvider);
}
@Override
public void serializeWithType(final Object o, final JsonGenerator jsonGenerator,
final SerializerProvider serializerProvider, final TypeSerializer typeSerializer) throws IOException {
ser(o, jsonGenerator, serializerProvider);
}
private void ser(final Object o, final JsonGenerator jsonGenerator,
final SerializerProvider serializerProvider) throws IOException {
if (Element.class.isAssignableFrom(o.getClass()))
jsonGenerator.writeFieldName((((Element) o).id()).toString());
else
super.serialize(o, jsonGenerator, serializerProvider);
}
}
//////////////////////////// DESERIALIZERS ///////////////////////////
static class VertexJacksonDeserializer extends StdDeserializer<Vertex> {
public VertexJacksonDeserializer() {
super(Vertex.class);
}
public Vertex deserialize(final JsonParser jsonParser, final DeserializationContext deserializationContext) throws IOException, JsonProcessingException {
final DetachedVertex.Builder v = DetachedVertex.build();
while (jsonParser.nextToken() != JsonToken.END_OBJECT) {
if (jsonParser.getCurrentName().equals(GraphSONTokens.ID)) {
jsonParser.nextToken();
v.setId(deserializationContext.readValue(jsonParser, Object.class));
} else if (jsonParser.getCurrentName().equals(GraphSONTokens.LABEL)) {
jsonParser.nextToken();
v.setLabel(jsonParser.getText());
} else if (jsonParser.getCurrentName().equals(GraphSONTokens.PROPERTIES)) {
jsonParser.nextToken();
while (jsonParser.nextToken() != JsonToken.END_OBJECT) {
jsonParser.nextToken();
while (jsonParser.nextToken() != JsonToken.END_ARRAY) {
v.addProperty((DetachedVertexProperty) deserializationContext.readValue(jsonParser, VertexProperty.class));
}
}
}
}
return v.create();
}
@Override
public boolean isCachable() {
return true;
}
}
static class EdgeJacksonDeserializer extends StdDeserializer<Edge> {
public EdgeJacksonDeserializer() {
super(Edge.class);
}
@Override
public Edge deserialize(final JsonParser jsonParser, final DeserializationContext deserializationContext) throws IOException, JsonProcessingException {
final DetachedEdge.Builder e = DetachedEdge.build();
final DetachedVertex.Builder inV = DetachedVertex.build();
final DetachedVertex.Builder outV = DetachedVertex.build();
while (jsonParser.nextToken() != JsonToken.END_OBJECT) {
if (jsonParser.getCurrentName().equals(GraphSONTokens.ID)) {
jsonParser.nextToken();
e.setId(deserializationContext.readValue(jsonParser, Object.class));
} else if (jsonParser.getCurrentName().equals(GraphSONTokens.LABEL)) {
jsonParser.nextToken();
e.setLabel(jsonParser.getText());
} else if (jsonParser.getCurrentName().equals(GraphSONTokens.OUT)) {
jsonParser.nextToken();
outV.setId(deserializationContext.readValue(jsonParser, Object.class));
} else if (jsonParser.getCurrentName().equals(GraphSONTokens.OUT_LABEL)) {
jsonParser.nextToken();
outV.setLabel(jsonParser.getText());
} else if (jsonParser.getCurrentName().equals(GraphSONTokens.IN)) {
jsonParser.nextToken();
inV.setId(deserializationContext.readValue(jsonParser, Object.class));
} else if (jsonParser.getCurrentName().equals(GraphSONTokens.IN_LABEL)) {
jsonParser.nextToken();
inV.setLabel(jsonParser.getText());
} else if (jsonParser.getCurrentName().equals(GraphSONTokens.PROPERTIES)) {
jsonParser.nextToken();
while (jsonParser.nextToken() != JsonToken.END_OBJECT) {
jsonParser.nextToken();
e.addProperty(deserializationContext.readValue(jsonParser, Property.class));
}
}
}
e.setInV(inV.create());
e.setOutV(outV.create());
return e.create();
}
@Override
public boolean isCachable() {
return true;
}
}
static class PropertyJacksonDeserializer extends StdDeserializer<Property> {
public PropertyJacksonDeserializer() {
super(Property.class);
}
@Override
public Property deserialize(final JsonParser jsonParser, final DeserializationContext deserializationContext) throws IOException, JsonProcessingException {
String key = null;
Object value = null;
while (jsonParser.nextToken() != JsonToken.END_OBJECT) {
if (jsonParser.getCurrentName().equals(GraphSONTokens.KEY)) {
jsonParser.nextToken();
key = jsonParser.getText();
} else if (jsonParser.getCurrentName().equals(GraphSONTokens.VALUE)) {
jsonParser.nextToken();
value = deserializationContext.readValue(jsonParser, Object.class);
}
}
return new DetachedProperty<>(key, value);
}
@Override
public boolean isCachable() {
return true;
}
}
static class PathJacksonDeserializer extends StdDeserializer<Path> {
private static final JavaType setType = TypeFactory.defaultInstance().constructCollectionType(HashSet.class, String.class);
public PathJacksonDeserializer() {
super(Path.class);
}
@Override
public Path deserialize(final JsonParser jsonParser, final DeserializationContext deserializationContext) throws IOException, JsonProcessingException {
final JsonNode n = jsonParser.readValueAsTree();
final Path p = MutablePath.make();
final ArrayNode labels = (ArrayNode) n.get(GraphSONTokens.LABELS);
final ArrayNode objects = (ArrayNode) n.get(GraphSONTokens.OBJECTS);
for (int i = 0; i < objects.size(); i++) {
final JsonParser po = objects.get(i).traverse();
po.nextToken();
final JsonParser pl = labels.get(i).traverse();
pl.nextToken();
p.extend(deserializationContext.readValue(po, Object.class), deserializationContext.readValue(pl, setType));
}
return p;
}
@Override
public boolean isCachable() {
return true;
}
}
static class VertexPropertyJacksonDeserializer extends StdDeserializer<VertexProperty> {
private static final JavaType propertiesType = TypeFactory.defaultInstance().constructMapType(HashMap.class, String.class, Object.class);
protected VertexPropertyJacksonDeserializer() {
super(VertexProperty.class);
}
@Override
public VertexProperty deserialize(final JsonParser jsonParser, final DeserializationContext deserializationContext) throws IOException, JsonProcessingException {
final DetachedVertexProperty.Builder vp = DetachedVertexProperty.build();
while (jsonParser.nextToken() != JsonToken.END_OBJECT) {
if (jsonParser.getCurrentName().equals(GraphSONTokens.ID)) {
jsonParser.nextToken();
vp.setId(deserializationContext.readValue(jsonParser, Object.class));
} else if (jsonParser.getCurrentName().equals(GraphSONTokens.LABEL)) {
jsonParser.nextToken();
vp.setLabel(jsonParser.getText());
} else if (jsonParser.getCurrentName().equals(GraphSONTokens.VALUE)) {
jsonParser.nextToken();
vp.setValue(deserializationContext.readValue(jsonParser, Object.class));
} else if (jsonParser.getCurrentName().equals(GraphSONTokens.PROPERTIES)) {
jsonParser.nextToken();
while (jsonParser.nextToken() != JsonToken.END_OBJECT) {
final String key = jsonParser.getCurrentName();
jsonParser.nextToken();
final Object val = deserializationContext.readValue(jsonParser, Object.class);
vp.addProperty(new DetachedProperty(key, val));
}
}
}
return vp.create();
}
@Override
public boolean isCachable() {
return true;
}
}
static class MetricsJacksonDeserializer extends AbstractObjectDeserializer<Metrics> {
public MetricsJacksonDeserializer() {
super(Metrics.class);
}
@Override
public Metrics createObject(final Map<String, Object> metricsData) {
final MutableMetrics m = new MutableMetrics((String)metricsData.get(GraphSONTokens.ID), (String)metricsData.get(GraphSONTokens.NAME));
m.setDuration(Math.round((Double) metricsData.get(GraphSONTokens.DURATION) * 1000000), TimeUnit.NANOSECONDS);
for (Map.Entry<String, Long> count : ((Map<String, Long>)metricsData.getOrDefault(GraphSONTokens.COUNTS, new HashMap<>(0))).entrySet()) {
m.setCount(count.getKey(), count.getValue());
}
for (Map.Entry<String, Long> count : ((Map<String, Long>) metricsData.getOrDefault(GraphSONTokens.ANNOTATIONS, new HashMap<>(0))).entrySet()) {
m.setAnnotation(count.getKey(), count.getValue());
}
for (MutableMetrics nested : (List<MutableMetrics>)metricsData.getOrDefault(GraphSONTokens.METRICS, new ArrayList<>(0))) {
m.addNested(nested);
}
return m;
}
}
static class TraversalMetricsJacksonDeserializer extends AbstractObjectDeserializer<TraversalMetrics> {
public TraversalMetricsJacksonDeserializer() {
super(TraversalMetrics.class);
}
@Override
public TraversalMetrics createObject(final Map<String, Object> traversalMetricsData) {
return new DefaultTraversalMetrics(
Math.round((Double) traversalMetricsData.get(GraphSONTokens.DURATION) * 1000000),
(List<MutableMetrics>) traversalMetricsData.get(GraphSONTokens.METRICS)
);
}
}
static class TreeJacksonDeserializer extends StdDeserializer<Tree> {
public TreeJacksonDeserializer() {
super(Tree.class);
}
@Override
public Tree deserialize(final JsonParser jsonParser, final DeserializationContext deserializationContext) throws IOException, JsonProcessingException {
final List<Map> data = deserializationContext.readValue(jsonParser, List.class);
final Tree t = new Tree();
for (Map<String, Object> entry : data) {
t.put(entry.get(GraphSONTokens.KEY), entry.get(GraphSONTokens.VALUE));
}
return t;
}
@Override
public boolean isCachable() {
return true;
}
}
static class IntegerJacksonDeserializer extends StdDeserializer<Integer> {
protected IntegerJacksonDeserializer() {
super(Integer.class);
}
@Override
public Integer deserialize(JsonParser jsonParser, DeserializationContext deserializationContext) throws IOException, JsonProcessingException {
return jsonParser.getIntValue();
}
@Override
public boolean isCachable() {
return true;
}
}
static class DoubleJacksonDeserializer extends StdDeserializer<Double> {
protected DoubleJacksonDeserializer() {
super(Double.class);
}
@Override
public Double deserialize(JsonParser jsonParser, DeserializationContext deserializationContext) throws IOException, JsonProcessingException {
return jsonParser.getDoubleValue();
}
@Override
public boolean isCachable() {
return true;
}
}
}
| |
/***
* ASM: a very small and fast Java bytecode manipulation framework
* Copyright (c) 2000-2011 INRIA, France Telecom
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
* 3. Neither the name of the copyright holders nor the names of its
* contributors may be used to endorse or promote products derived from
* this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
* LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
* THE POSSIBILITY OF SUCH DAMAGE.
*/
package com.github.geequery.asm;
/**
* An {@link FieldVisitor} that generates Java fields in bytecode form.
*
* @author Eric Bruneton
*/
final class FieldWriter extends FieldVisitor {
/**
* The class writer to which this field must be added.
*/
private final ClassWriter cw;
/**
* Access flags of this field.
*/
private final int access;
/**
* The index of the constant pool item that contains the name of this
* method.
*/
private final int name;
/**
* The index of the constant pool item that contains the descriptor of this
* field.
*/
private final int desc;
/**
* The index of the constant pool item that contains the signature of this
* field.
*/
private int signature;
/**
* The index of the constant pool item that contains the constant value of
* this field.
*/
private int value;
/**
* The runtime visible annotations of this field. May be <tt>null</tt>.
*/
private AnnotationWriter anns;
/**
* The runtime invisible annotations of this field. May be <tt>null</tt>.
*/
private AnnotationWriter ianns;
/**
* The runtime visible type annotations of this field. May be <tt>null</tt>.
*/
private AnnotationWriter tanns;
/**
* The runtime invisible type annotations of this field. May be
* <tt>null</tt>.
*/
private AnnotationWriter itanns;
/**
* The non standard attributes of this field. May be <tt>null</tt>.
*/
private Attribute attrs;
// ------------------------------------------------------------------------
// Constructor
// ------------------------------------------------------------------------
/**
* Constructs a new {@link FieldWriter}.
*
* @param cw
* the class writer to which this field must be added.
* @param access
* the field's access flags (see {@link Opcodes}).
* @param name
* the field's name.
* @param desc
* the field's descriptor (see {@link Type}).
* @param signature
* the field's signature. May be <tt>null</tt>.
* @param value
* the field's constant value. May be <tt>null</tt>.
*/
FieldWriter(final ClassWriter cw, final int access, final String name,
final String desc, final String signature, final Object value) {
super(Opcodes.ASM6);
if (cw.firstField == null) {
cw.firstField = this;
} else {
cw.lastField.fv = this;
}
cw.lastField = this;
this.cw = cw;
this.access = access;
this.name = cw.newUTF8(name);
this.desc = cw.newUTF8(desc);
if (ClassReader.SIGNATURES && signature != null) {
this.signature = cw.newUTF8(signature);
}
if (value != null) {
this.value = cw.newConstItem(value).index;
}
}
// ------------------------------------------------------------------------
// Implementation of the FieldVisitor abstract class
// ------------------------------------------------------------------------
@Override
public AnnotationVisitor visitAnnotation(final String desc,
final boolean visible) {
if (!ClassReader.ANNOTATIONS) {
return null;
}
ByteVector bv = new ByteVector();
// write type, and reserve space for values count
bv.putShort(cw.newUTF8(desc)).putShort(0);
AnnotationWriter aw = new AnnotationWriter(cw, true, bv, bv, 2);
if (visible) {
aw.next = anns;
anns = aw;
} else {
aw.next = ianns;
ianns = aw;
}
return aw;
}
@Override
public AnnotationVisitor visitTypeAnnotation(final int typeRef,
final TypePath typePath, final String desc, final boolean visible) {
if (!ClassReader.ANNOTATIONS) {
return null;
}
ByteVector bv = new ByteVector();
// write target_type and target_info
AnnotationWriter.putTarget(typeRef, typePath, bv);
// write type, and reserve space for values count
bv.putShort(cw.newUTF8(desc)).putShort(0);
AnnotationWriter aw = new AnnotationWriter(cw, true, bv, bv,
bv.length - 2);
if (visible) {
aw.next = tanns;
tanns = aw;
} else {
aw.next = itanns;
itanns = aw;
}
return aw;
}
@Override
public void visitAttribute(final Attribute attr) {
attr.next = attrs;
attrs = attr;
}
@Override
public void visitEnd() {
}
// ------------------------------------------------------------------------
// Utility methods
// ------------------------------------------------------------------------
/**
* Returns the size of this field.
*
* @return the size of this field.
*/
int getSize() {
int size = 8;
if (value != 0) {
cw.newUTF8("ConstantValue");
size += 8;
}
if ((access & Opcodes.ACC_SYNTHETIC) != 0) {
if ((cw.version & 0xFFFF) < Opcodes.V1_5
|| (access & ClassWriter.ACC_SYNTHETIC_ATTRIBUTE) != 0) {
cw.newUTF8("Synthetic");
size += 6;
}
}
if ((access & Opcodes.ACC_DEPRECATED) != 0) {
cw.newUTF8("Deprecated");
size += 6;
}
if (ClassReader.SIGNATURES && signature != 0) {
cw.newUTF8("Signature");
size += 8;
}
if (ClassReader.ANNOTATIONS && anns != null) {
cw.newUTF8("RuntimeVisibleAnnotations");
size += 8 + anns.getSize();
}
if (ClassReader.ANNOTATIONS && ianns != null) {
cw.newUTF8("RuntimeInvisibleAnnotations");
size += 8 + ianns.getSize();
}
if (ClassReader.ANNOTATIONS && tanns != null) {
cw.newUTF8("RuntimeVisibleTypeAnnotations");
size += 8 + tanns.getSize();
}
if (ClassReader.ANNOTATIONS && itanns != null) {
cw.newUTF8("RuntimeInvisibleTypeAnnotations");
size += 8 + itanns.getSize();
}
if (attrs != null) {
size += attrs.getSize(cw, null, 0, -1, -1);
}
return size;
}
/**
* Puts the content of this field into the given byte vector.
*
* @param out
* where the content of this field must be put.
*/
void put(final ByteVector out) {
final int FACTOR = ClassWriter.TO_ACC_SYNTHETIC;
int mask = Opcodes.ACC_DEPRECATED | ClassWriter.ACC_SYNTHETIC_ATTRIBUTE
| ((access & ClassWriter.ACC_SYNTHETIC_ATTRIBUTE) / FACTOR);
out.putShort(access & ~mask).putShort(name).putShort(desc);
int attributeCount = 0;
if (value != 0) {
++attributeCount;
}
if ((access & Opcodes.ACC_SYNTHETIC) != 0) {
if ((cw.version & 0xFFFF) < Opcodes.V1_5
|| (access & ClassWriter.ACC_SYNTHETIC_ATTRIBUTE) != 0) {
++attributeCount;
}
}
if ((access & Opcodes.ACC_DEPRECATED) != 0) {
++attributeCount;
}
if (ClassReader.SIGNATURES && signature != 0) {
++attributeCount;
}
if (ClassReader.ANNOTATIONS && anns != null) {
++attributeCount;
}
if (ClassReader.ANNOTATIONS && ianns != null) {
++attributeCount;
}
if (ClassReader.ANNOTATIONS && tanns != null) {
++attributeCount;
}
if (ClassReader.ANNOTATIONS && itanns != null) {
++attributeCount;
}
if (attrs != null) {
attributeCount += attrs.getCount();
}
out.putShort(attributeCount);
if (value != 0) {
out.putShort(cw.newUTF8("ConstantValue"));
out.putInt(2).putShort(value);
}
if ((access & Opcodes.ACC_SYNTHETIC) != 0) {
if ((cw.version & 0xFFFF) < Opcodes.V1_5
|| (access & ClassWriter.ACC_SYNTHETIC_ATTRIBUTE) != 0) {
out.putShort(cw.newUTF8("Synthetic")).putInt(0);
}
}
if ((access & Opcodes.ACC_DEPRECATED) != 0) {
out.putShort(cw.newUTF8("Deprecated")).putInt(0);
}
if (ClassReader.SIGNATURES && signature != 0) {
out.putShort(cw.newUTF8("Signature"));
out.putInt(2).putShort(signature);
}
if (ClassReader.ANNOTATIONS && anns != null) {
out.putShort(cw.newUTF8("RuntimeVisibleAnnotations"));
anns.put(out);
}
if (ClassReader.ANNOTATIONS && ianns != null) {
out.putShort(cw.newUTF8("RuntimeInvisibleAnnotations"));
ianns.put(out);
}
if (ClassReader.ANNOTATIONS && tanns != null) {
out.putShort(cw.newUTF8("RuntimeVisibleTypeAnnotations"));
tanns.put(out);
}
if (ClassReader.ANNOTATIONS && itanns != null) {
out.putShort(cw.newUTF8("RuntimeInvisibleTypeAnnotations"));
itanns.put(out);
}
if (attrs != null) {
attrs.put(cw, null, 0, -1, -1, out);
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dubbo.common.utils;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.OutputStream;
import java.io.OutputStreamWriter;
import java.io.PrintWriter;
import java.io.Reader;
import java.io.StringReader;
import java.io.StringWriter;
import java.io.Writer;
import java.util.ArrayList;
import java.util.List;
public class IOUtils {
private static final int BUFFER_SIZE = 1024 * 8;
private IOUtils() {
}
/**
* write.
*
* @param is InputStream instance.
* @param os OutputStream instance.
* @return count.
* @throws IOException
*/
public static long write(InputStream is, OutputStream os) throws IOException {
return write(is, os, BUFFER_SIZE);
}
/**
* write.
*
* @param is InputStream instance.
* @param os OutputStream instance.
* @param bufferSize buffer size.
* @return count.
* @throws IOException
*/
public static long write(InputStream is, OutputStream os, int bufferSize) throws IOException {
int read;
long total = 0;
byte[] buff = new byte[bufferSize];
while (is.available() > 0) {
read = is.read(buff, 0, buff.length);
if (read > 0) {
os.write(buff, 0, read);
total += read;
}
}
return total;
}
/**
* read string.
*
* @param reader Reader instance.
* @return String.
* @throws IOException
*/
public static String read(Reader reader) throws IOException {
StringWriter writer = new StringWriter();
try {
write(reader, writer);
return writer.getBuffer().toString();
} finally {
writer.close();
}
}
/**
* write string.
*
* @param writer Writer instance.
* @param string String.
* @throws IOException
*/
public static long write(Writer writer, String string) throws IOException {
Reader reader = new StringReader(string);
try {
return write(reader, writer);
} finally {
reader.close();
}
}
/**
* write.
*
* @param reader Reader.
* @param writer Writer.
* @return count.
* @throws IOException
*/
public static long write(Reader reader, Writer writer) throws IOException {
return write(reader, writer, BUFFER_SIZE);
}
/**
* write.
*
* @param reader Reader.
* @param writer Writer.
* @param bufferSize buffer size.
* @return count.
* @throws IOException
*/
public static long write(Reader reader, Writer writer, int bufferSize) throws IOException {
int read;
long total = 0;
char[] buf = new char[BUFFER_SIZE];
while ((read = reader.read(buf)) != -1) {
writer.write(buf, 0, read);
total += read;
}
return total;
}
/**
* read lines.
*
* @param file file.
* @return lines.
* @throws IOException
*/
public static String[] readLines(File file) throws IOException {
if (file == null || !file.exists() || !file.canRead()) {
return new String[0];
}
return readLines(new FileInputStream(file));
}
/**
* read lines.
*
* @param is input stream.
* @return lines.
* @throws IOException
*/
public static String[] readLines(InputStream is) throws IOException {
List<String> lines = new ArrayList<String>();
BufferedReader reader = new BufferedReader(new InputStreamReader(is));
try {
String line;
while ((line = reader.readLine()) != null) {
lines.add(line);
}
return lines.toArray(new String[0]);
} finally {
reader.close();
}
}
/**
* write lines.
*
* @param os output stream.
* @param lines lines.
* @throws IOException
*/
public static void writeLines(OutputStream os, String[] lines) throws IOException {
PrintWriter writer = new PrintWriter(new OutputStreamWriter(os));
try {
for (String line : lines) {
writer.println(line);
}
writer.flush();
} finally {
writer.close();
}
}
/**
* write lines.
*
* @param file file.
* @param lines lines.
* @throws IOException
*/
public static void writeLines(File file, String[] lines) throws IOException {
if (file == null) {
throw new IOException("File is null.");
}
writeLines(new FileOutputStream(file), lines);
}
/**
* append lines.
*
* @param file file.
* @param lines lines.
* @throws IOException
*/
public static void appendLines(File file, String[] lines) throws IOException {
if (file == null) {
throw new IOException("File is null.");
}
writeLines(new FileOutputStream(file, true), lines);
}
}
| |
/**AbyssalCraft
*Copyright 2012-2014 Shinoow
*
*Licensed under the Apache License, Version 2.0 (the "License");
*you may not use this file except in compliance with the License.
*You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
*Unless required by applicable law or agreed to in writing, software
*distributed under the License is distributed on an "AS IS" BASIS,
*WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
*See the License for the specific language governing permissions and
*limitations under the License.
*/
package com.shinoow.abyssalcraft.common.blocks;
import java.util.ArrayList;
import java.util.Random;
import net.minecraft.block.Block;
import net.minecraft.block.BlockLeavesBase;
import net.minecraft.block.material.Material;
import net.minecraft.client.Minecraft;
import net.minecraft.client.renderer.texture.IIconRegister;
import net.minecraft.entity.player.EntityPlayer;
import net.minecraft.init.Blocks;
import net.minecraft.item.Item;
import net.minecraft.item.ItemStack;
import net.minecraft.util.IIcon;
import net.minecraft.world.IBlockAccess;
import net.minecraft.world.World;
import net.minecraftforge.common.IShearable;
import com.shinoow.abyssalcraft.AbyssalCraft;
import cpw.mods.fml.relauncher.Side;
import cpw.mods.fml.relauncher.SideOnly;
public class DLTLeaves extends BlockLeavesBase implements IShearable
{
public static final String[] LEAF_TYPES = new String[] {"DLT"};
int[] adjacentTreeBlocks;
@SideOnly(Side.CLIENT)
private int iconType;
public static final String[][] field_94396_b = new String[][] {{"DLT_L"}, {"DLT_L_opaque"}};
private IIcon[][] iconArray = new IIcon[2][];
public DLTLeaves()
{
super(Material.leaves , false);
setTickRandomly(true);
setCreativeTab(AbyssalCraft.tabDecoration);
}
public void func_149749_a(World par1World, int par2, int par3, int par4, Block par5, int par6)
{
byte var7 = 1;
int var8 = var7 + 1;
if (par1World.checkChunksExist(par2 - var8, par3 - var8, par4 - var8, par2 + var8, par3 + var8, par4 + var8))
{
for (int var9 = -var7; var9 <= var7; ++var9)
{
for (int var10 = -var7; var10 <= var7; ++var10)
{
for (int var11 = -var7; var11 <= var7; ++var11)
{
Block var12 = par1World.getBlock(par2 + var9, par3 + var10, par4 + var11);
if (var12.isLeaves(par1World, par2 +var9, par3 +var10, par4 +var11))
{
var12.beginLeavesDecay(par1World, par2 + var9, par3 + var10, par4 + var11);
}
}
}
}
}
}
public void func_149674_a(World par1World, int par2, int par3, int par4, Random par5Random)
{
if (!par1World.isRemote)
{
int var6 = par1World.getBlockMetadata(par2, par3, par4);
if ((var6 & 8) != 0 && (var6 & 4) == 0)
{
byte var7 = 4;
int var8 = var7 + 1;
byte var9 = 32;
int var10 = var9 * var9;
int var11 = var9 / 2;
if (adjacentTreeBlocks == null)
{
adjacentTreeBlocks = new int[var9 * var9 * var9];
}
int var12;
if (par1World.checkChunksExist(par2 - var8, par3 - var8, par4 - var8, par2 + var8, par3 + var8, par4 + var8))
{
int var13;
int var14;
for (var12 = -var7; var12 <= var7; ++var12)
{
for (var13 = -var7; var13 <= var7; ++var13)
{
for (var14 = -var7; var14 <= var7; ++var14)
{
Block block = par1World.getBlock(par2 + var12, par3 + var13, par4 + var14);
if (block.canSustainLeaves(par1World, par2 + var12, par3 + var13, par4 + var14))
{
adjacentTreeBlocks[(var12 + var11) * var10 + (var13 + var11) * var9 + var14 + var11] = 0;
}
else if (block.isLeaves(par1World, par2 + var12, par3 + var13, par4 + var14))
{
adjacentTreeBlocks[(var12 + var11) * var10 + (var13 + var11) * var9 + var14 + var11] = -2;
}
else
{
adjacentTreeBlocks[(var12 + var11) * var10 + (var13 + var11) * var9 + var14 + var11] = -1;
}
}
}
}
for (var12 = 1; var12 <= 4; ++var12)
{
for (var13 = -var7; var13 <= var7; ++var13)
{
for (var14 = -var7; var14 <= var7; ++var14)
{
for (int var15 = -var7; var15 <= var7; ++var15)
{
if (adjacentTreeBlocks[(var13 + var11) * var10 + (var14 + var11) * var9 + var15 + var11] == var12 - 1)
{
if (adjacentTreeBlocks[(var13 + var11 - 1) * var10 + (var14 + var11) * var9 + var15 + var11] == -2)
{
adjacentTreeBlocks[(var13 + var11 - 1) * var10 + (var14 + var11) * var9 + var15 + var11] = var12;
}
if (adjacentTreeBlocks[(var13 + var11 + 1) * var10 + (var14 + var11) * var9 + var15 + var11] == -2)
{
adjacentTreeBlocks[(var13 + var11 + 1) * var10 + (var14 + var11) * var9 + var15 + var11] = var12;
}
if (adjacentTreeBlocks[(var13 + var11) * var10 + (var14 + var11 - 1) * var9 + var15 + var11] == -2)
{
adjacentTreeBlocks[(var13 + var11) * var10 + (var14 + var11 - 1) * var9 + var15 + var11] = var12;
}
if (adjacentTreeBlocks[(var13 + var11) * var10 + (var14 + var11 + 1) * var9 + var15 + var11] == -2)
{
adjacentTreeBlocks[(var13 + var11) * var10 + (var14 + var11 + 1) * var9 + var15 + var11] = var12;
}
if (adjacentTreeBlocks[(var13 + var11) * var10 + (var14 + var11) * var9 + var15 + var11 - 1] == -2)
{
adjacentTreeBlocks[(var13 + var11) * var10 + (var14 + var11) * var9 + var15 + var11 - 1] = var12;
}
if (adjacentTreeBlocks[(var13 + var11) * var10 + (var14 + var11) * var9 + var15 + var11 + 1] == -2)
{
adjacentTreeBlocks[(var13 + var11) * var10 + (var14 + var11) * var9 + var15 + var11 + 1] = var12;
}
}
}
}
}
}
}
var12 = adjacentTreeBlocks[var11 * var10 + var11 * var9 + var11];
if (var12 >= 0)
{
par1World.setBlockMetadataWithNotify(par2, par3, par4, var6 & -9, 4);
}
else
{
removeLeaves(par1World, par2, par3, par4);
}
}
}
}
@SideOnly(Side.CLIENT)
public void func_149734_b(World par1World, int par2, int par3, int par4, Random par5Random)
{
if (par1World.canLightningStrikeAt(par2, par3 + 1, par4) && !World.doesBlockHaveSolidTopSurface(par1World, par2, par3 - 1, par4) && par5Random.nextInt(15) == 1)
{
double var6 = par2 + par5Random.nextFloat();
double var8 = par3 - 0.05D;
double var10 = par4 + par5Random.nextFloat();
par1World.spawnParticle("dripWater", var6, var8, var10, 0.0D, 0.0D, 0.0D);
}
}
private void removeLeaves(World par1World, int par2, int par3, int par4)
{
this.dropBlockAsItem(par1World, par2, par3, par4, par1World.getBlockMetadata(par2, par3, par4), 0);
par1World.setBlock(par2, par3, par4, Blocks.air);
}
public int func_149745_a(Random par1Random)
{
return par1Random.nextInt(20) == 0 ? 1 : 0;
}
@Override
public Item getItemDropped(int par1, Random par2Random, int par3)
{
return Item.getItemFromBlock(AbyssalCraft.DLTSapling);
}
@Override
public void dropBlockAsItemWithChance(World par1World, int par2, int par3, int par4, int par5, float par6, int par7)
{
if (!par1World.isRemote)
{
byte var8 = 20;
if ((par5 & 3) == 3)
{
var8 = 40;
}
if (par1World.rand.nextInt(var8) == 0)
{
Item var9 = getItemDropped(par5, par1World.rand, par7);
this.dropBlockAsItem(par1World, par2, par3, par4, new ItemStack(var9, 1, damageDropped(par5)));
}
if ((par5 & 3) == 0 && par1World.rand.nextInt(200) == 0)
{
this.dropBlockAsItem(par1World, par2, par3, par4, new ItemStack(AbyssalCraft.DLTSapling, 1, 0));
}
}
}
@Override
public void harvestBlock(World par1World, EntityPlayer par2EntityPlayer, int par3, int par4, int par5, int par6)
{
super.harvestBlock(par1World, par2EntityPlayer, par3, par4, par5, par6);
}
@Override
public int damageDropped(int par1)
{
return par1 & 3;
}
@Override
public boolean isOpaqueCube()
{
return false;
}
@Override
public boolean isShearable(ItemStack item, IBlockAccess world, int x, int y, int z)
{
return true;
}
@Override
@SideOnly(Side.CLIENT)
/**
* From the specified side and block metadata retrieves the blocks texture. Args: side, metadata
*/
public IIcon getIcon(int par1, int par2)
{
setGraphicsLevel(Minecraft.getMinecraft().gameSettings.fancyGraphics);
return (par2 & 3) == 1 ? iconArray[iconType][1] : (par2 & 3) == 3 ? iconArray[iconType][3] : (par2 & 3) == 2 ? iconArray[iconType][2] : iconArray[iconType][0];
}
@SideOnly(Side.CLIENT)
/**
* Pass true to draw this block using fancy graphics, or false for fast graphics.
*/
public void setGraphicsLevel(boolean par1)
{
field_150121_P = par1;
iconType = par1 ? 0 : 1;
}
@Override
protected ItemStack createStackedBlock(int par1)
{
return new ItemStack(this, 1, par1 & 3);
}
@Override
public ArrayList<ItemStack> onSheared(ItemStack item, IBlockAccess world, int x, int y, int z, int fortune)
{
ArrayList<ItemStack> ret = new ArrayList<ItemStack>();
ret.add(new ItemStack(this, 1, world.getBlockMetadata(x, y, z) & 3));
return ret;
}
@Override
public void beginLeavesDecay(World world, int x, int y, int z)
{
world.setBlockMetadataWithNotify(x, y, z, world.getBlockMetadata(x, y, z) | 8, 4);
}
@Override
@SideOnly(Side.CLIENT)
/**
* When this method is called, your block should register all the icons it needs with the given IconRegister. This
* is the only chance you get to register icons.
*/
public void registerBlockIcons(IIconRegister par1IconRegister)
{
for (int i = 0; i < field_94396_b.length; ++i)
{
iconArray[i] = new IIcon[field_94396_b[i].length];
for (int j = 0; j < field_94396_b[i].length; ++j)
{
iconArray[i][j] = par1IconRegister.registerIcon(AbyssalCraft.modid + ":" + field_94396_b[i][j]);
}
}
}
}
| |
/*
* Autopsy Forensic Browser
*
* Copyright 2013 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.sleuthkit.autopsy.scalpel;
import java.io.File;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import java.util.logging.Level;
import org.sleuthkit.autopsy.casemodule.Case;
import org.sleuthkit.autopsy.coreutils.Logger;
import org.sleuthkit.autopsy.coreutils.PlatformUtil;
import org.sleuthkit.autopsy.ingest.IngestModuleAbstractFile;
import org.sleuthkit.autopsy.ingest.IngestModuleAbstractFile.ProcessResult;
import org.sleuthkit.autopsy.ingest.IngestModuleInit;
import org.sleuthkit.autopsy.ingest.IngestServices;
import org.sleuthkit.autopsy.ingest.ModuleContentEvent;
import org.sleuthkit.autopsy.ingest.PipelineContext;
import org.sleuthkit.autopsy.scalpel.jni.ScalpelOutputParser.CarvedFileMeta;
import org.sleuthkit.autopsy.scalpel.jni.ScalpelCarver;
import org.sleuthkit.autopsy.scalpel.jni.ScalpelException;
import org.sleuthkit.datamodel.AbstractFile;
import org.sleuthkit.datamodel.Content;
import org.sleuthkit.datamodel.FileSystem;
import org.sleuthkit.datamodel.Image;
import org.sleuthkit.datamodel.LayoutFile;
import org.sleuthkit.datamodel.SleuthkitCase;
import org.sleuthkit.datamodel.TskCoreException;
import org.sleuthkit.datamodel.TskData.TSK_DB_FILES_TYPE_ENUM;
import org.sleuthkit.datamodel.TskFileRange;
import org.sleuthkit.datamodel.Volume;
/**
* Scalpel carving ingest module
*/
public class ScalpelCarverIngestModule { // extends IngestModuleAbstractFile { // disable autodiscovery for now {
private static final Logger logger = Logger.getLogger(ScalpelCarverIngestModule.class.getName());
private static ScalpelCarverIngestModule instance;
private final String MODULE_NAME = "Scalpel Carver";
private final String MODULE_DESCRIPTION = "Carves files from unallocated space at ingest time.\nCarved files are reanalyzed and displayed in the directory tree.";
private final String MODULE_VERSION = "1.0";
private final String MODULE_OUTPUT_DIR_NAME = "ScalpelCarver";
private String moduleOutputDirPath;
private String configFileName = "scalpel.conf";
private String configFilePath;
private boolean initialized = false;
private ScalpelCarver carver;
private ScalpelCarverIngestModule() {
ScalpelCarver.init();
}
// @Override
public ProcessResult process(PipelineContext<IngestModuleAbstractFile> pipelineContext, AbstractFile abstractFile) {
if (!initialized) {
return ProcessResult.OK;
}
// only process files whose type is TSK_DB_FILES_TYPE_ENUM.UNALLOC_BLOCKS
TSK_DB_FILES_TYPE_ENUM type = abstractFile.getType();
if (type != TSK_DB_FILES_TYPE_ENUM.UNALLOC_BLOCKS) {
return ProcessResult.OK;
}
// create the output directory for this run
String scalpelOutputDirPath = moduleOutputDirPath + File.separator + abstractFile.getId();
File scalpelOutputDir = new File(scalpelOutputDirPath);
if (!scalpelOutputDir.exists()) {
if (!scalpelOutputDir.mkdir()) {
logger.log(Level.SEVERE, "Could not create Scalpel output directory: " + scalpelOutputDirPath);
return ProcessResult.OK;
}
}
// find the ID of the parent FileSystem, Volume or Image
long id = -1;
Content parent = null;
try {
parent = abstractFile.getParent();
} catch (TskCoreException ex) {
logger.log(Level.SEVERE, "Exception while trying to get parent of AbstractFile.", ex);
}
while (parent != null) {
if (parent instanceof FileSystem ||
parent instanceof Volume ||
parent instanceof Image) {
id = parent.getId();
break;
}
try {
parent = parent.getParent();
} catch (TskCoreException ex) {
logger.log(Level.SEVERE, "Exception while trying to get parent of Content object.", ex);
}
}
// make sure we have a valid systemID
if (id == -1) {
logger.log(Level.SEVERE, "Could not get an ID for a FileSystem, Volume or Image for the given AbstractFile.");
return ProcessResult.OK;
}
// carve the AbstractFile
List<CarvedFileMeta> output = null;
try {
output = carver.carve(abstractFile, configFilePath, scalpelOutputDirPath);
} catch (ScalpelException ex) {
logger.log(Level.SEVERE, "Error when attempting to carved data from AbstractFile with ID " + abstractFile.getId());
return ProcessResult.OK;
}
// get the image's size
long imageSize = Long.MAX_VALUE;
try {
imageSize = abstractFile.getImage().getSize();
} catch (TskCoreException ex) {
logger.log(Level.SEVERE, "Could not obtain the image's size.");
}
// add a carved file to the DB for each file that scalpel carved
SleuthkitCase db = Case.getCurrentCase().getSleuthkitCase();
List<LayoutFile> carvedFiles = new ArrayList<LayoutFile>(output.size());
for (CarvedFileMeta carvedFileMeta : output) {
// calculate the byte offset of this carved file
long byteOffset;
try {
byteOffset = abstractFile.convertToImgOffset(carvedFileMeta.getByteStart());
} catch (TskCoreException ex) {
logger.log(Level.SEVERE, "Could not calculate the image byte offset of AbstractFile (" + abstractFile.getName() + ")");
break;
}
// get the size of the carved file
long size = carvedFileMeta.getByteLength();
// create the list of TskFileRange objects
List<TskFileRange> data = new ArrayList<TskFileRange>();
data.add(new TskFileRange(byteOffset, size, 0));
// add the carved file
try {
carvedFiles.add(db.addCarvedFile(carvedFileMeta.getFileName(), size, id, data));
} catch (TskCoreException ex) {
logger.log(Level.SEVERE, "There was a problem while trying to add a carved file to the database.", ex);
}
}
// get the IngestServices object
IngestServices is = IngestServices.getDefault();
// get the parent directory of the carved files
Content carvedFileDir = null;
if (!carvedFiles.isEmpty()) {
try {
carvedFileDir = carvedFiles.get(0).getParent();
} catch (TskCoreException ex) {
logger.log(Level.SEVERE, "There was a problem while trying to obtain the carved files directory.", ex);
}
}
// send a notification about the carved files directory
if (carvedFileDir != null) {
is.fireModuleContentEvent(new ModuleContentEvent(carvedFileDir));
} else {
logger.log(Level.SEVERE, "Could not obtain the carved files directory.");
}
// reschedule carved files
for (LayoutFile carvedFile : carvedFiles) {
is.scheduleFile(carvedFile, pipelineContext);
}
return ProcessResult.OK;
}
public static ScalpelCarverIngestModule getDefault() {
if (instance == null) {
synchronized (ScalpelCarverIngestModule.class) {
if (instance == null) {
instance = new ScalpelCarverIngestModule();
}
}
}
return instance;
}
// @Override
public void init(IngestModuleInit initContext) {
// make sure this is Windows
String os = System.getProperty("os.name");
if (!os.startsWith("Windows")) {
logger.log(Level.WARNING, "Scalpel carving module is not compatible with non-Windows OS's at this time.");
return;
}
carver = new ScalpelCarver();
if (! carver.isInitialized()) {
logger.log(Level.SEVERE, "Error initializing scalpel carver. ");
return;
}
// make sure module output directory exists; create it if it doesn't
moduleOutputDirPath = Case.getCurrentCase().getModulesOutputDirAbsPath() +
File.separator + MODULE_OUTPUT_DIR_NAME;
File moduleOutputDir = new File(moduleOutputDirPath);
if (!moduleOutputDir.exists()) {
if (!moduleOutputDir.mkdir()) {
logger.log(Level.SEVERE, "Could not create the output directory for the Scalpel module.");
return;
}
}
// create path to scalpel config file in user's home directory
configFilePath = PlatformUtil.getUserConfigDirectory()
+ File.separator + configFileName;
// copy the default config file to the user's home directory if one
// is not already there
try {
PlatformUtil.extractResourceToUserConfigDir(this.getClass(), configFileName);
} catch (IOException ex) {
logger.log(Level.SEVERE, "Could not obtain the path to the Scalpel configuration file.", ex);
return;
}
initialized = true;
}
// @Override
public void complete() { }
// @Override
public void stop() { }
// @Override
public String getName() {
return MODULE_NAME;
}
// @Override
public String getVersion() {
return MODULE_VERSION;
}
// @Override
public String getDescription() {
return MODULE_DESCRIPTION;
}
// @Override
public boolean hasBackgroundJobsRunning() {
return false;
}
}
| |
/*
* Copyright (c) 2018, The Jaeger Authors
* Copyright (c) 2016, Uber Technologies, Inc
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package io.jaegertracing.internal;
import io.jaegertracing.internal.baggage.BaggageSetter;
import io.jaegertracing.internal.baggage.DefaultBaggageRestrictionManager;
import io.jaegertracing.internal.clock.Clock;
import io.jaegertracing.internal.clock.SystemClock;
import io.jaegertracing.internal.exceptions.EmptyIpException;
import io.jaegertracing.internal.exceptions.NotFourOctetsException;
import io.jaegertracing.internal.exceptions.UnsupportedFormatException;
import io.jaegertracing.internal.metrics.Metrics;
import io.jaegertracing.internal.metrics.NoopMetricsFactory;
import io.jaegertracing.internal.propagation.BinaryCodec;
import io.jaegertracing.internal.propagation.TextMapCodec;
import io.jaegertracing.internal.reporters.RemoteReporter;
import io.jaegertracing.internal.samplers.RemoteControlledSampler;
import io.jaegertracing.internal.samplers.SamplingStatus;
import io.jaegertracing.internal.utils.Utils;
import io.jaegertracing.spi.BaggageRestrictionManager;
import io.jaegertracing.spi.Extractor;
import io.jaegertracing.spi.Injector;
import io.jaegertracing.spi.MetricsFactory;
import io.jaegertracing.spi.Reporter;
import io.jaegertracing.spi.Sampler;
import io.opentracing.References;
import io.opentracing.Scope;
import io.opentracing.ScopeManager;
import io.opentracing.Span;
import io.opentracing.SpanContext;
import io.opentracing.Tracer;
import io.opentracing.propagation.Format;
import io.opentracing.tag.Tag;
import io.opentracing.tag.Tags;
import io.opentracing.util.ThreadLocalScopeManager;
import java.io.Closeable;
import java.io.InputStream;
import java.net.Inet4Address;
import java.net.InetAddress;
import java.net.UnknownHostException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import lombok.ToString;
import lombok.extern.slf4j.Slf4j;
@ToString
@Slf4j
public class JaegerTracer implements Tracer, Closeable {
private final String version;
private final String serviceName;
private final Reporter reporter;
private final Sampler sampler;
private final Map<String, ?> tags;
private final boolean zipkinSharedRpcSpan;
private final boolean expandExceptionLogs;
private final boolean useTraceId128Bit;
@ToString.Exclude
private final PropagationRegistry registry;
@ToString.Exclude
private final Clock clock;
@ToString.Exclude
private final Metrics metrics;
@ToString.Exclude
private final ScopeManager scopeManager;
@ToString.Exclude
private final BaggageSetter baggageSetter;
@ToString.Exclude
private final JaegerObjectFactory objectFactory;
@ToString.Exclude
private final int ipv4; // human readable representation is present within the tag map
@ToString.Exclude
private Thread shutdownHook;
protected JaegerTracer(JaegerTracer.Builder builder) {
this.serviceName = builder.serviceName;
this.reporter = builder.reporter;
this.sampler = builder.sampler;
this.registry = builder.registry;
this.clock = builder.clock;
this.metrics = builder.metrics;
this.zipkinSharedRpcSpan = builder.zipkinSharedRpcSpan;
this.scopeManager = builder.scopeManager;
this.baggageSetter = new BaggageSetter(builder.baggageRestrictionManager, metrics);
this.expandExceptionLogs = builder.expandExceptionLogs;
this.objectFactory = builder.objectFactory;
this.useTraceId128Bit = builder.useTraceId128Bit;
this.version = loadVersion();
Map<String, Object> tags = new HashMap<String, Object>(builder.tags);
tags.put(Constants.JAEGER_CLIENT_VERSION_TAG_KEY, this.version);
if (tags.get(Constants.TRACER_HOSTNAME_TAG_KEY) == null) {
String hostname = getHostName();
if (hostname != null) {
tags.put(Constants.TRACER_HOSTNAME_TAG_KEY, hostname);
}
}
int ipv4;
Object ipTag = tags.get(Constants.TRACER_IP_TAG_KEY);
if (ipTag == null) {
try {
tags.put(Constants.TRACER_IP_TAG_KEY, InetAddress.getLocalHost().getHostAddress());
ipv4 = Utils.ipToInt(Inet4Address.getLocalHost().getHostAddress());
} catch (UnknownHostException e) {
ipv4 = 0;
}
} else {
try {
ipv4 = Utils.ipToInt(ipTag.toString());
} catch (EmptyIpException e) {
ipv4 = 0;
} catch (NotFourOctetsException e) {
ipv4 = 0;
}
}
this.ipv4 = ipv4;
this.tags = Collections.unmodifiableMap(tags);
if (builder.manualShutdown || runsInGlassFish()) {
log.info("No shutdown hook registered: Please call close() manually on application shutdown.");
} else {
// register this tracer with a shutdown hook, to flush the spans before the VM shuts down
shutdownHook = new Thread() {
@Override
public void run() {
shutdownHook = null;
JaegerTracer.this.close();
}
};
Runtime.getRuntime().addShutdownHook(shutdownHook);
}
}
private boolean runsInGlassFish() {
return System.getProperty("com.sun.aas.instanceRoot") != null;
}
public String getVersion() {
return version;
}
public Metrics getMetrics() {
return metrics;
}
public String getServiceName() {
return serviceName;
}
public Map<String, ?> tags() {
return tags;
}
public int getIpv4() {
return ipv4;
}
Clock clock() {
return clock;
}
Reporter getReporter() {
return reporter;
}
void reportSpan(JaegerSpan span) {
reporter.report(span);
metrics.spansFinished.inc(1);
}
@Override
public ScopeManager scopeManager() {
return scopeManager;
}
/**
* Retrieves the currently active span from the {@link ScopeManager}. It cannot be guaranteed that this span
* will be a {@link JaegerSpan}, as other libraries might have set this active span there. Consumers expecting
* this to return a {@link JaegerSpan} should always check the type of the return and act accordingly.
*
* @return the currently active span from the {@link ScopeManager}
*/
@Override
public Span activeSpan() {
// the active scope might have been added there through an API extension, similar to what the OT java-metrics
// library does -- therefore, we can't guarantee that we are returning a JaegerSpan here.
return this.scopeManager.activeSpan();
}
@Override
public JaegerTracer.SpanBuilder buildSpan(String operationName) {
return objectFactory.createSpanBuilder(this, operationName);
}
@Override
public <T> void inject(SpanContext spanContext, Format<T> format, T carrier) {
Injector<T> injector = registry.getInjector(format);
if (injector == null) {
throw new UnsupportedFormatException(format);
}
injector.inject((JaegerSpanContext) spanContext, carrier);
}
@Override
public <T> JaegerSpanContext extract(Format<T> format, T carrier) {
Extractor<T> extractor = registry.getExtractor(format);
if (extractor == null) {
throw new UnsupportedFormatException(format);
}
return extractor.extract(carrier);
}
/**
* Shuts down the {@link Reporter} and {@link Sampler}
*/
@Override
public void close() {
reporter.close();
sampler.close();
if (shutdownHook != null) {
Runtime.getRuntime().removeShutdownHook(shutdownHook);
}
}
public class SpanBuilder implements Tracer.SpanBuilder {
private static final long MIN_EPOCH_MICROSECONDS = 1000000000000000L;
private String operationName;
private long startTimeMicroseconds;
/**
* In 99% situations there is only one parent (childOf), so we do not want to allocate
* a collection of references.
*/
private List<Reference> references = Collections.emptyList();
private final Map<String, Object> tags = new HashMap<String, Object>();
private boolean ignoreActiveSpan = false;
protected SpanBuilder(String operationName) {
this.operationName = operationName;
}
private void verifyStartTimeInMicroseconds() {
if (startTimeMicroseconds < MIN_EPOCH_MICROSECONDS) {
log.warn("'startTimeMicroseconds' {} is not a valid epoch microseconds timestamp", startTimeMicroseconds);
}
}
@Override
public JaegerTracer.SpanBuilder asChildOf(SpanContext parent) {
return addReference(References.CHILD_OF, parent);
}
@Override
public JaegerTracer.SpanBuilder asChildOf(Span parent) {
return addReference(References.CHILD_OF, parent != null ? parent.context() : null);
}
@Override
public JaegerTracer.SpanBuilder addReference(String referenceType, SpanContext reference) {
if (reference == null) {
return this;
}
if (!(reference instanceof JaegerSpanContext)) {
log.warn("Expected to have a JaegerSpanContext but got " + reference.getClass().getName());
return this;
}
JaegerSpanContext referencedContext = (JaegerSpanContext) reference;
// Jaeger thrift currently does not support other reference types
if (!References.CHILD_OF.equals(referenceType)
&& !References.FOLLOWS_FROM.equals(referenceType)) {
return this;
}
if (references.isEmpty()) {
// Optimization for 99% situations, when there is only one parent
references = Collections.singletonList(new Reference(referencedContext, referenceType));
} else {
if (references.size() == 1) {
references = new ArrayList<Reference>(references);
}
references.add(new Reference(referencedContext, referenceType));
}
return this;
}
@Override
public JaegerTracer.SpanBuilder withTag(String key, String value) {
tags.put(key, value);
return this;
}
@Override
public JaegerTracer.SpanBuilder withTag(String key, boolean value) {
tags.put(key, value);
return this;
}
@Override
public JaegerTracer.SpanBuilder withTag(String key, Number value) {
tags.put(key, value);
return this;
}
@Override
public <T> Tracer.SpanBuilder withTag(Tag<T> tag, T value) {
if (tag != null && tag.getKey() != null) {
this.tags.put(tag.getKey(), value);
}
return this;
}
@Override
public JaegerTracer.SpanBuilder withStartTimestamp(long microseconds) {
this.startTimeMicroseconds = microseconds;
return this;
}
private JaegerSpanContext createNewContext() {
String debugId = getDebugId();
long spanId = Utils.uniqueId();
long traceIdLow = spanId;
long traceIdHigh = isUseTraceId128Bit() ? Utils.uniqueId() : 0;
byte flags = 0;
if (debugId != null) {
flags = (byte) (flags | JaegerSpanContext.flagSampled | JaegerSpanContext.flagDebug);
tags.put(Constants.DEBUG_ID_HEADER_KEY, debugId);
metrics.traceStartedSampled.inc(1);
} else {
// TODO: (prithvi) Don't assume operationName is set on creation
SamplingStatus samplingStatus = sampler.sample(operationName, spanId);
if (samplingStatus.isSampled()) {
flags |= JaegerSpanContext.flagSampled;
tags.putAll(samplingStatus.getTags());
metrics.traceStartedSampled.inc(1);
} else {
metrics.traceStartedNotSampled.inc(1);
}
}
return getObjectFactory().createSpanContext(
traceIdHigh,
traceIdLow,
spanId,
0,
flags,
getBaggage(),
debugId);
}
private Map<String, String> getBaggage() {
Map<String, String> baggage = null;
// optimization for 99% use cases, when there is only one parent
if (references.size() == 1) {
return references.get(0).getSpanContext().baggage();
}
for (Reference reference : references) {
if (reference.getSpanContext().baggage() != null) {
if (baggage == null) {
baggage = new HashMap<String, String>();
}
baggage.putAll(reference.getSpanContext().baggage());
}
}
return baggage;
}
private JaegerSpanContext createChildContext() {
JaegerSpanContext preferredReference = preferredReference();
if (isRpcServer()) {
if (isSampled()) {
metrics.tracesJoinedSampled.inc(1);
} else {
metrics.tracesJoinedNotSampled.inc(1);
}
// Zipkin server compatibility
if (zipkinSharedRpcSpan) {
return preferredReference;
}
}
return getObjectFactory().createSpanContext(
preferredReference.getTraceIdHigh(),
preferredReference.getTraceIdLow(),
Utils.uniqueId(),
preferredReference.getSpanId(),
// should we do OR across passed references?
preferredReference.getFlags(),
getBaggage(),
null);
}
//Visible for testing
boolean isRpcServer() {
return Tags.SPAN_KIND_SERVER.equals(tags.get(Tags.SPAN_KIND.getKey()));
}
private JaegerSpanContext preferredReference() {
Reference preferredReference = references.get(0);
for (Reference reference : references) {
// childOf takes precedence as a preferred parent
if (References.CHILD_OF.equals(reference.getType())
&& !References.CHILD_OF.equals(preferredReference.getType())) {
preferredReference = reference;
break;
}
}
return preferredReference.getSpanContext();
}
private boolean isSampled() {
if (references != null) {
for (Reference reference : references) {
if (reference.getSpanContext().isSampled()) {
return true;
}
}
}
return false;
}
private String getDebugId() {
if (references.isEmpty()) {
return null;
}
return references.get(0).getSpanContext().getDebugId();
}
@Override
public JaegerSpan start() {
JaegerSpanContext context;
// Check if active span should be established as CHILD_OF relationship
if (references.isEmpty() && !ignoreActiveSpan && null != scopeManager.activeSpan()) {
asChildOf(scopeManager.activeSpan());
}
if (references.isEmpty() || !references.get(0).getSpanContext().hasTrace()) {
context = createNewContext();
} else {
context = createChildContext();
}
long startTimeNanoTicks = 0;
boolean computeDurationViaNanoTicks = false;
if (startTimeMicroseconds == 0) {
startTimeMicroseconds = clock.currentTimeMicros();
if (!clock.isMicrosAccurate()) {
startTimeNanoTicks = clock.currentNanoTicks();
computeDurationViaNanoTicks = true;
}
} else {
verifyStartTimeInMicroseconds();
}
JaegerSpan jaegerSpan = getObjectFactory().createSpan(
JaegerTracer.this,
operationName,
context,
startTimeMicroseconds,
startTimeNanoTicks,
computeDurationViaNanoTicks,
tags,
references);
if (context.isSampled()) {
metrics.spansStartedSampled.inc(1);
} else {
metrics.spansStartedNotSampled.inc(1);
}
return jaegerSpan;
}
@Deprecated
// @Override keep compatibility with 0.32.0
public Scope startActive(final boolean finishSpanOnClose) {
if (!finishSpanOnClose) {
return scopeManager.activate(start());
}
return new Scope() {
Span span = start();
Scope wrapped = scopeManager.activate(span);
@Override
public void close() {
wrapped.close();
span.finish();
}
// @Override keep compatibility with 0.32.0
public Span span() {
return span;
}
};
}
@Override
public JaegerTracer.SpanBuilder ignoreActiveSpan() {
ignoreActiveSpan = true;
return this;
}
@Deprecated
// @Override keep compatibility with 0.32.0
public Span startManual() {
return start();
}
private JaegerObjectFactory getObjectFactory() {
return JaegerTracer.this.objectFactory;
}
}
/**
* Builds a {@link JaegerTracer} with options.
*/
public static class Builder {
private Sampler sampler;
private Reporter reporter;
private final PropagationRegistry registry = new PropagationRegistry();
private Metrics metrics = new Metrics(new NoopMetricsFactory());
private final String serviceName;
private Clock clock = new SystemClock();
private Map<String, Object> tags = new HashMap<String, Object>();
private boolean zipkinSharedRpcSpan;
private ScopeManager scopeManager = new ThreadLocalScopeManager();
private BaggageRestrictionManager baggageRestrictionManager = new DefaultBaggageRestrictionManager();
private boolean expandExceptionLogs;
private final JaegerObjectFactory objectFactory;
private boolean useTraceId128Bit;
private boolean manualShutdown;
public Builder(String serviceName) {
this(serviceName, new JaegerObjectFactory());
}
protected Builder(String serviceName, JaegerObjectFactory objectFactory) {
this.serviceName = checkValidServiceName(serviceName);
this.objectFactory = objectFactory;
TextMapCodec textMapCodec =
TextMapCodec.builder()
.withUrlEncoding(false)
.withObjectFactory(this.objectFactory)
.build();
this.registerInjector(Format.Builtin.TEXT_MAP, textMapCodec);
this.registerExtractor(Format.Builtin.TEXT_MAP, textMapCodec);
TextMapCodec httpCodec =
TextMapCodec.builder()
.withUrlEncoding(true)
.withObjectFactory(this.objectFactory)
.build();
this.registerInjector(Format.Builtin.HTTP_HEADERS, httpCodec);
this.registerExtractor(Format.Builtin.HTTP_HEADERS, httpCodec);
BinaryCodec binaryCodec =
BinaryCodec.builder()
.withObjectFactory(this.objectFactory)
.build();
this.registerInjector(Format.Builtin.BINARY, binaryCodec);
this.registerExtractor(Format.Builtin.BINARY, binaryCodec);
}
/**
* @param reporter reporter.
*/
public Builder withReporter(Reporter reporter) {
this.reporter = reporter;
return this;
}
/**
* @param sampler sampler.
*/
public Builder withSampler(Sampler sampler) {
this.sampler = sampler;
return this;
}
public <T> Builder registerInjector(Format<T> format, Injector<T> injector) {
this.registry.register(format, injector);
return this;
}
public <T> Builder registerExtractor(Format<T> format, Extractor<T> extractor) {
this.registry.register(format, extractor);
return this;
}
/**
* Creates a new {@link Metrics} to be used with the tracer, backed by the given {@link MetricsFactory}
*
* @param metricsFactory the metrics factory to use
* @return this instance of the builder
*/
public Builder withMetricsFactory(MetricsFactory metricsFactory) {
this.metrics = new Metrics(metricsFactory);
return this;
}
public Builder withScopeManager(ScopeManager scopeManager) {
this.scopeManager = scopeManager;
return this;
}
public Builder withClock(Clock clock) {
this.clock = clock;
return this;
}
public Builder withZipkinSharedRpcSpan() {
zipkinSharedRpcSpan = true;
return this;
}
public Builder withExpandExceptionLogs() {
this.expandExceptionLogs = true;
return this;
}
public Builder withMetrics(Metrics metrics) {
this.metrics = metrics;
return this;
}
public Builder withTraceId128Bit() {
this.useTraceId128Bit = true;
return this;
}
public Builder withTag(String key, String value) {
tags.put(key, value);
return this;
}
public Builder withTag(String key, boolean value) {
tags.put(key, value);
return this;
}
public Builder withTag(String key, Number value) {
tags.put(key, value);
return this;
}
public Builder withTags(Map<String, String> tags) {
if (tags != null) {
this.tags.putAll(tags);
}
return this;
}
public Builder withBaggageRestrictionManager(BaggageRestrictionManager baggageRestrictionManager) {
this.baggageRestrictionManager = baggageRestrictionManager;
return this;
}
public Builder withManualShutdown() {
this.manualShutdown = true;
return this;
}
public JaegerTracer build() {
if (reporter == null) {
reporter = new RemoteReporter.Builder()
.withMetrics(metrics)
.build();
}
if (sampler == null) {
sampler = new RemoteControlledSampler.Builder(serviceName)
.withMetrics(metrics)
.build();
}
return createTracer();
}
protected JaegerTracer createTracer() {
return new JaegerTracer(this);
}
public static String checkValidServiceName(String serviceName) {
if (serviceName == null || serviceName.trim().length() == 0) {
throw new IllegalArgumentException("Service name must not be null or empty");
}
return serviceName;
}
}
private static String loadVersion() {
return "Java-" + getVersionFromProperties();
}
public static String getVersionFromProperties() {
String version;
try {
InputStream is = JaegerTracer.class.getResourceAsStream("jaeger.properties");
try {
Properties prop = new Properties();
prop.load(is);
version = prop.getProperty(Constants.JAEGER_CLIENT_VERSION_TAG_KEY);
} finally {
is.close();
}
} catch (Exception e) {
throw new RuntimeException("Cannot read jaeger.properties", e);
}
if (version == null) {
throw new RuntimeException("Cannot read " + Constants.JAEGER_CLIENT_VERSION_TAG_KEY + " from jaeger.properties");
}
return version;
}
String getHostName() {
try {
return InetAddress.getLocalHost().getHostName();
} catch (UnknownHostException e) {
log.error("Cannot obtain host name", e);
return null;
}
}
JaegerSpanContext setBaggage(JaegerSpan jaegerSpan, String key, String value) {
return baggageSetter.setBaggage(jaegerSpan, key, value);
}
boolean isExpandExceptionLogs() {
return this.expandExceptionLogs;
}
public boolean isUseTraceId128Bit() {
return this.useTraceId128Bit;
}
@Override
public Scope activateSpan(Span span) {
return scopeManager().activate(span);
}
}
| |
/*
* Copyright 2000-2017 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.java.codeInsight.daemon;
import com.intellij.analysis.PackagesScopesProvider;
import com.intellij.application.options.colors.ScopeAttributesUtil;
import com.intellij.codeInsight.daemon.DaemonAnalyzerTestCase;
import com.intellij.codeInsight.daemon.impl.HighlightInfo;
import com.intellij.codeInspection.deadCode.UnusedDeclarationInspectionBase;
import com.intellij.openapi.application.ex.PathManagerEx;
import com.intellij.openapi.editor.colors.EditorColorsManager;
import com.intellij.openapi.editor.colors.EditorColorsScheme;
import com.intellij.openapi.editor.colors.TextAttributesKey;
import com.intellij.openapi.editor.markup.EffectType;
import com.intellij.openapi.editor.markup.TextAttributes;
import com.intellij.openapi.module.Module;
import com.intellij.openapi.module.ModuleManager;
import com.intellij.openapi.projectRoots.Sdk;
import com.intellij.openapi.roots.LanguageLevelProjectExtension;
import com.intellij.openapi.roots.ModuleRootModificationUtil;
import com.intellij.openapi.vfs.LocalFileSystem;
import com.intellij.openapi.vfs.VirtualFile;
import com.intellij.packageDependencies.DependencyValidationManager;
import com.intellij.pom.java.LanguageLevel;
import com.intellij.psi.PsiClass;
import com.intellij.psi.PsiClassType;
import com.intellij.psi.PsiField;
import com.intellij.psi.PsiJavaFile;
import com.intellij.psi.search.scope.packageSet.NamedScope;
import com.intellij.psi.search.scope.packageSet.NamedScopeManager;
import com.intellij.psi.search.scope.packageSet.NamedScopesHolder;
import com.intellij.psi.search.scope.packageSet.PatternPackageSet;
import com.intellij.testFramework.IdeaTestUtil;
import org.jetbrains.annotations.NonNls;
import java.awt.*;
import java.io.File;
import java.util.Collection;
/**
* This class intended for "heavy-loaded" tests only, e.g. those need to setup separate project directory structure to run.
* For "lightweight" tests use LightAdvHighlightingTest.
*/
public class AdvHighlightingTest extends DaemonAnalyzerTestCase {
@NonNls private static final String BASE_PATH = "/codeInsight/daemonCodeAnalyzer/advHighlighting";
@Override
protected Sdk getTestProjectJdk() {
LanguageLevelProjectExtension.getInstance(myProject).setLanguageLevel(LanguageLevel.JDK_1_4);
return IdeaTestUtil.getMockJdk14();
}
public void testPackageLocals() throws Exception {
doTest(BASE_PATH + "/packageLocals/x/sub/UsingMain.java", BASE_PATH + "/packageLocals", false, false);
}
public void testPackageLocalClassInTheMiddle() throws Exception {
doTest(BASE_PATH + "/packageLocals/x/A.java", BASE_PATH + "/packageLocals", false, false);
}
public void testEffectiveAccessLevel() throws Exception {
doTest(BASE_PATH + "/accessLevel/effectiveAccess/p2/p3.java", BASE_PATH + "/accessLevel", false, false);
}
public void testSingleImportConflict() throws Exception {
doTest(BASE_PATH + "/singleImport/d.java", BASE_PATH + "/singleImport", false, false);
}
public void testDuplicateTopLevelClass() throws Exception {
doTest(BASE_PATH + "/duplicateClass/A.java", BASE_PATH + "/duplicateClass", false, false);
}
public void testDuplicateTopLevelClass2() throws Exception {
doTest(BASE_PATH + "/duplicateClass/java/lang/Runnable.java", BASE_PATH + "/duplicateClass", false, false);
}
public void testProtectedConstructorCall() throws Exception {
doTest(BASE_PATH + "/protectedConstructor/p2/C2.java", BASE_PATH + "/protectedConstructor", false, false);
}
public void testProtectedConstructorCallInSamePackage() throws Exception {
doTest(BASE_PATH + "/protectedConstructor/samePackage/C2.java", BASE_PATH + "/protectedConstructor", false, false);
}
public void testProtectedConstructorCallInInner() throws Exception {
doTest(BASE_PATH + "/protectedConstructorInInner/p2/C2.java", BASE_PATH + "/protectedConstructorInInner", false, false);
}
public void testArrayLengthAccessFromSubClass() throws Exception {
doTest(BASE_PATH + "/arrayLength/p2/SubTest.java", BASE_PATH + "/arrayLength", false, false);
}
public void testAccessibleMember() throws Exception {
doTest(BASE_PATH + "/accessibleMember/com/red/C.java", BASE_PATH + "/accessibleMember", false, false);
}
public void testStaticPackageLocalMember() throws Exception {
doTest(BASE_PATH + "/staticPackageLocalMember/p1/C.java", BASE_PATH + "/staticPackageLocalMember", false, false);
}
public void testOnDemandImportConflict() throws Exception {
doTest(BASE_PATH + "/onDemandImportConflict/Outer.java", BASE_PATH + "/onDemandImportConflict", false, false);
}
public void testPackageLocalOverride() throws Exception {
doTest(BASE_PATH + "/packageLocalOverride/y/C.java", BASE_PATH + "/packageLocalOverride", false, false);
}
public void testPackageLocalOverrideJustCheckThatPackageLocalMethodDoesNotGetOverridden() throws Exception {
doTest(BASE_PATH + "/packageLocalOverride/y/B.java", BASE_PATH + "/packageLocalOverride", false, false);
}
public void testProtectedAccessFromOtherPackage() throws Exception {
doTest(BASE_PATH + "/protectedAccessFromOtherPackage/a/Main.java", BASE_PATH + "/protectedAccessFromOtherPackage", false, false);
}
public void testProtectedFieldAccessFromOtherPackage() throws Exception {
doTest(BASE_PATH + "/protectedAccessFromOtherPackage/a/A.java", BASE_PATH + "/protectedAccessFromOtherPackage", false, false);
}
public void testPackageLocalClassInTheMiddle1() throws Exception {
doTest(BASE_PATH + "/foreignPackageInBetween/a/A1.java", BASE_PATH + "/foreignPackageInBetween", false, false);
}
public void testImportOnDemand() throws Exception {
doTest(BASE_PATH + "/importOnDemand/y/Y.java", BASE_PATH + "/importOnDemand", false, false);
}
public void testImportOnDemandVsSingle() throws Exception {
doTest(BASE_PATH + "/importOnDemandVsSingle/y/Y.java", BASE_PATH + "/importOnDemandVsSingle", false, false);
}
public void testImportSingleVsSamePackage() throws Exception {
doTest(BASE_PATH + "/importSingleVsSamePackage/y/Y.java", BASE_PATH + "/importSingleVsSamePackage", false, false);
}
public void testImportSingleVsInherited() throws Exception {
doTest(BASE_PATH + "/importSingleVsInherited/Test.java", BASE_PATH + "/importSingleVsInherited", false, false);
}
public void testImportOnDemandVsInherited() throws Exception {
doTest(BASE_PATH + "/importOnDemandVsInherited/Test.java", BASE_PATH + "/importOnDemandVsInherited", false, false);
}
public void testOverridePackageLocal() throws Exception {
doTest(BASE_PATH + "/overridePackageLocal/x/y/Derived.java", BASE_PATH + "/overridePackageLocal", false, false);
}
public void testAlreadyImportedClass() throws Exception {
doTest(BASE_PATH + "/alreadyImportedClass/pack/AlreadyImportedClass.java", BASE_PATH + "/alreadyImportedClass", false, false);
}
public void testImportDefaultPackage1() throws Exception {
doTest(BASE_PATH + "/importDefaultPackage/x/Usage.java", BASE_PATH + "/importDefaultPackage", false, false);
}
public void testImportDefaultPackage2() throws Exception {
doTest(BASE_PATH + "/importDefaultPackage/x/ImportOnDemandUsage.java", BASE_PATH + "/importDefaultPackage", false, false);
}
public void testImportDefaultPackage3() throws Exception {
doTest(BASE_PATH + "/importDefaultPackage/Test.java", BASE_PATH + "/importDefaultPackage", false, false);
}
public void testImportDefaultPackageInvalid() throws Exception {
doTest(BASE_PATH + "/importDefaultPackage/x/InvalidUse.java", BASE_PATH + "/importDefaultPackage", false, false);
}
public void testScopeBased() {
NamedScope xScope = new NamedScope("xxx", new PatternPackageSet("x..*", PatternPackageSet.SCOPE_SOURCE, null));
NamedScope utilScope = new NamedScope("util", new PatternPackageSet("java.util.*", PatternPackageSet.SCOPE_LIBRARY, null));
NamedScopeManager scopeManager = NamedScopeManager.getInstance(getProject());
scopeManager.addScope(xScope);
scopeManager.addScope(utilScope);
EditorColorsManager manager = EditorColorsManager.getInstance();
EditorColorsScheme scheme = (EditorColorsScheme)manager.getGlobalScheme().clone();
manager.addColorsScheme(scheme);
EditorColorsManager.getInstance().setGlobalScheme(scheme);
TextAttributesKey xKey = ScopeAttributesUtil.getScopeTextAttributeKey(xScope.getName());
TextAttributes xAttributes = new TextAttributes(Color.cyan, Color.darkGray, Color.blue, EffectType.BOXED, Font.ITALIC);
scheme.setAttributes(xKey, xAttributes);
TextAttributesKey utilKey = ScopeAttributesUtil.getScopeTextAttributeKey(utilScope.getName());
TextAttributes utilAttributes = new TextAttributes(Color.gray, Color.magenta, Color.orange, EffectType.STRIKEOUT, Font.BOLD);
scheme.setAttributes(utilKey, utilAttributes);
try {
testFile(BASE_PATH + "/scopeBased/x/X.java").projectRoot(BASE_PATH + "/scopeBased").checkSymbolNames().test();
}
finally {
scopeManager.removeAllSets();
}
}
public void testSharedScopeBased() {
NamedScope xScope = new NamedScope("xxx", new PatternPackageSet("x..*", PatternPackageSet.SCOPE_ANY, null));
NamedScope utilScope = new NamedScope("util", new PatternPackageSet("java.util.*", PatternPackageSet.SCOPE_LIBRARY, null));
NamedScopesHolder scopeManager = DependencyValidationManager.getInstance(getProject());
scopeManager.addScope(xScope);
scopeManager.addScope(utilScope);
EditorColorsManager manager = EditorColorsManager.getInstance();
EditorColorsScheme scheme = (EditorColorsScheme)manager.getGlobalScheme().clone();
manager.addColorsScheme(scheme);
EditorColorsManager.getInstance().setGlobalScheme(scheme);
TextAttributesKey xKey = ScopeAttributesUtil.getScopeTextAttributeKey(xScope.getName());
TextAttributes xAttributes = new TextAttributes(Color.cyan, Color.darkGray, Color.blue, null, Font.ITALIC);
scheme.setAttributes(xKey, xAttributes);
TextAttributesKey utilKey = ScopeAttributesUtil.getScopeTextAttributeKey(utilScope.getName());
TextAttributes utilAttributes = new TextAttributes(Color.gray, Color.magenta, Color.orange, EffectType.STRIKEOUT, Font.BOLD);
scheme.setAttributes(utilKey, utilAttributes);
NamedScope projectScope = PackagesScopesProvider.getInstance(myProject).getProjectProductionScope();
TextAttributesKey projectKey = ScopeAttributesUtil.getScopeTextAttributeKey(projectScope.getName());
TextAttributes projectAttributes = new TextAttributes(null, null, Color.blue, EffectType.BOXED, Font.ITALIC);
scheme.setAttributes(projectKey, projectAttributes);
try {
testFile(BASE_PATH + "/scopeBased/x/Shared.java").projectRoot(BASE_PATH + "/scopeBased").checkSymbolNames().test();
}
finally {
scopeManager.removeAllSets();
}
}
public void testMultiJDKConflict() {
String path = PathManagerEx.getTestDataPath() + BASE_PATH + "/" + getTestName(true);
VirtualFile root = LocalFileSystem.getInstance().findFileByIoFile(new File(path));
assert root != null : path;
loadAllModulesUnder(root);
ModuleManager moduleManager = ModuleManager.getInstance(getProject());
Module java4 = moduleManager.findModuleByName("java4");
Module java5 = moduleManager.findModuleByName("java5");
ModuleRootModificationUtil.setModuleSdk(java4, IdeaTestUtil.getMockJdk17("java 1.4"));
ModuleRootModificationUtil.setModuleSdk(java5, IdeaTestUtil.getMockJdk17("java 1.5"));
ModuleRootModificationUtil.addDependency(java5, java4);
configureByExistingFile(root.findFileByRelativePath("moduleJava5/com/Java5.java"));
Collection<HighlightInfo> infos = highlightErrors();
assertEmpty(infos);
}
public void testSameFQNClasses() {
String path = PathManagerEx.getTestDataPath() + BASE_PATH + "/" + getTestName(true);
VirtualFile root = LocalFileSystem.getInstance().findFileByIoFile(new File(path));
assert root != null : path;
loadAllModulesUnder(root);
configureByExistingFile(root.findFileByRelativePath("client/src/BugTest.java"));
Collection<HighlightInfo> infos = highlightErrors();
assertEmpty(infos);
}
public void testSameClassesInSourceAndLib() {
String path = PathManagerEx.getTestDataPath() + BASE_PATH + "/" + getTestName(true);
VirtualFile root = LocalFileSystem.getInstance().findFileByIoFile(new File(path));
assert root != null : path;
loadAllModulesUnder(root);
configureByExistingFile(root.findFileByRelativePath("src/ppp/SomeClass.java"));
PsiField field = ((PsiJavaFile)myFile).getClasses()[0].findFieldByName("f", false);
assert field != null;
PsiClass aClass = ((PsiClassType)field.getType()).resolve();
assert aClass != null;
assertEquals("ppp.BadClass", aClass.getQualifiedName());
//lies in source
VirtualFile vFile1 = myFile.getVirtualFile();
VirtualFile vFile2 = aClass.getContainingFile().getVirtualFile();
assert vFile1 != null;
assert vFile2 != null;
assertEquals(vFile1.getParent(), vFile2.getParent());
}
public void testNotAKeywords() throws Exception {
LanguageLevelProjectExtension.getInstance(myProject).setLanguageLevel(LanguageLevel.JDK_1_4);
doTest(BASE_PATH + "/notAKeywords/Test.java", BASE_PATH + "/notAKeywords", false, false);
}
public void testPackageAndClassConflict11() throws Exception {
doTest(BASE_PATH + "/packageClassClash1/pkg/sub/Test.java", BASE_PATH + "/packageClassClash1", false, false);
}
public void testPackageAndClassConflict12() throws Exception {
doTest(BASE_PATH + "/packageClassClash1/pkg/sub.java", BASE_PATH + "/packageClassClash1", false, false);
}
public void testPackageAndClassConflict21() throws Exception {
doTest(BASE_PATH + "/packageClassClash2/pkg/sub/Test.java", BASE_PATH + "/packageClassClash2", false, false);
}
public void testPackageAndClassConflict22() throws Exception {
doTest(BASE_PATH + "/packageClassClash2/pkg/Sub.java", BASE_PATH + "/packageClassClash2", false, false);
}
public void testPackageAndClassConflictNoClassInSubdir() throws Exception {
doTest(BASE_PATH + "/packageClassClashNoClassInDir/pkg/sub.java", BASE_PATH + "/packageClassClashNoClassInDir", false, false);
}
// todo[r.sh] IDEA-91596 (probably PJCRE.resolve() should be changed to qualifier-first model)
//public void testPackageAndClassConflict3() throws Exception {
// doTest(BASE_PATH + "/packageClassClash3/test/Test.java", BASE_PATH + "/packageClassClash3", false, false);
//}
public void testDefaultPackageAndClassConflict() throws Exception {
doTest(BASE_PATH + "/lang.java", false, false);
}
public void testPackageObscuring() throws Exception {
doTest(BASE_PATH + "/packageObscuring/main/Main.java", BASE_PATH + "/packageObscuring", false, false);
}
public void testPublicClassInRightFile() throws Exception {
doTest(BASE_PATH + "/publicClassInRightFile/x/X.java", BASE_PATH + "/publicClassInRightFile", false, false);
}
public void testPublicClassInRightFile2() throws Exception {
doTest(BASE_PATH + "/publicClassInRightFile/x/Y.java", BASE_PATH + "/publicClassInRightFile", false, false);
}
public void testUnusedPublicMethodReferencedViaSubclass() throws Exception {
UnusedDeclarationInspectionBase deadCodeInspection = new UnusedDeclarationInspectionBase(true);
enableInspectionTool(deadCodeInspection);
allowTreeAccessForAllFiles();
doTest(BASE_PATH + "/unusedPublicMethodRefViaSubclass/x/I.java", BASE_PATH + "/unusedPublicMethodRefViaSubclass", true, false);
}
}
| |
/**
* $Id$
* $URL$
* AbstractGenericDaoTest.java - genericdao - Apr 25, 2008 6:07:16 PM - azeckoski
**************************************************************************
* Copyright (c) 2008 Aaron Zeckoski
* Licensed under the Apache License, Version 2
*
* A copy of the Apache License, Version 2 has been included in this
* distribution and is available at: http://www.apache.org/licenses/LICENSE-2.0.txt
*
* Aaron Zeckoski (azeckoski@gmail.com) (aaronz@vt.edu) (aaron@caret.cam.ac.uk)
*/
package org.sakaiproject.genericdao.test;
import java.util.List;
import org.sakaiproject.genericdao.api.BasicGenericDao;
import org.sakaiproject.genericdao.api.finders.ByPropsFinder;
import org.sakaiproject.genericdao.api.search.Order;
import org.sakaiproject.genericdao.api.search.Restriction;
import org.sakaiproject.genericdao.api.search.Search;
import org.sakaiproject.genericdao.test.BasicDataInterceptor.Intercept;
/**
* Testing the {@link org.sakaiproject.genericdao.api.BasicGenericDao}
*
* @author Aaron Zeckoski (aaronz@vt.edu)
*/
@SuppressWarnings("deprecation")
public abstract class AbstractTestBasicGenericDao extends AbstractTestBaseDao {
protected BasicGenericDao genericDao;
// run this before each test starts and as part of the transaction
protected void onSetUpInTransaction() {
// get the GenericDaoFinder from the spring context (you should inject this)
genericDao = (BasicGenericDao) applicationContext.getBean("org.sakaiproject.genericdao.dao.BasicGenericDao");
if (genericDao == null) {
throw new RuntimeException("onSetUpInTransaction: BasicGenericDao could not be retrieved from spring context");
}
commonStartup(genericDao);
}
// TESTS
public void testCountBySearch() {
long count = genericDao.countBySearch(GenericTestObject.class,
new Search("hiddenItem", Boolean.FALSE) );
assertEquals(4, count);
count = genericDao.countBySearch(GenericTestObject.class,
new Search( new Restriction("hiddenItem", Boolean.FALSE, Restriction.NOT_EQUALS) ) );
assertEquals(2, count);
count = genericDao.countBySearch(GenericTestObject.class,
new Search( "title", "invalid" ) );
assertEquals(0, count);
// test foreign keys
count = genericDao.countBySearch(GenericTestParentObject.class,
new Search( "gto.id", gto4.getId() ) );
assertEquals(1, count);
count = genericDao.countBySearch(GenericTestParentObject.class,
new Search( "gto.id", Long.valueOf(999999) ) );
assertEquals(0, count);
count = genericDao.countBySearch(GenericTestParentObject.class,
new Search( "gto.id", "", Restriction.NOT_NULL ) );
assertEquals(2, count);
}
public void testCountBySearchInterceptors() {
dataInterceptor.reset();
assertEquals(0, dataInterceptor.getIntercepts().size());
long count = genericDao.countBySearch(GenericTestObject.class,
new Search("hiddenItem", Boolean.FALSE) );
assertEquals(4, count);
// no intercepts
assertEquals(0, dataInterceptor.getIntercepts().size());
count = genericDao.countBySearch(GenericTestObject.class,
new Search( new Restriction("hiddenItem", Boolean.FALSE, Restriction.NOT_EQUALS) ) );
assertEquals(2, count);
// no intercepts
assertEquals(0, dataInterceptor.getIntercepts().size());
}
public void testFindBySearch() {
List<GenericTestObject> l = genericDao.findBySearch(GenericTestObject.class,
new Search("hiddenItem", Boolean.FALSE) );
assertNotNull(l);
assertEquals(4, l.size());
assertTrue(l.contains(gto1));
assertTrue(l.contains(gto2));
assertTrue(l.contains(gto3));
assertTrue(l.contains(gto6));
// now do a couple tests on the array handling ability of the system
String[] titles = new String[] {gto1.getTitle(), gto3.getTitle(), gto5.getTitle()};
l = genericDao.findBySearch(GenericTestObject.class,
new Search("title", titles) );
assertNotNull(l);
assertEquals(3, l.size());
assertTrue(l.contains(gto1));
assertTrue(l.contains(gto3));
assertTrue(l.contains(gto5));
l = genericDao.findBySearch(GenericTestObject.class,
new Search("title", titles, Restriction.NOT_EQUALS) );
assertNotNull(l);
assertEquals(3, l.size());
assertTrue(l.contains(gto2));
assertTrue(l.contains(gto4));
assertTrue(l.contains(gto6));
String[] onetitle = new String[] {gto3.getTitle()};
l = genericDao.findBySearch(GenericTestObject.class,
new Search("title", onetitle) );
assertNotNull(l);
assertEquals(1, l.size());
assertTrue(l.contains(gto3));
// test the various searches and filters
l = genericDao.findBySearch(GenericTestObject.class,
new Search( new Restriction("title", TEST_TITLE+"%", Restriction.LIKE) ) );
assertNotNull(l);
assertEquals(5, l.size());
l = genericDao.findBySearch(GenericTestObject.class,
new Search( new Restriction("title", TEST_TITLE+"%", Restriction.LIKE), new Order("title"), 2, 2) );
assertEquals(2, l.size());
assertTrue(l.contains(gto3));
assertTrue(l.contains(gto4));
l = genericDao.findBySearch(GenericTestObject.class,
new Search( new Restriction[] {
new Restriction("hiddenItem", Boolean.FALSE, Restriction.EQUALS),
new Restriction("title", titles)
}, new Order("title")) );
assertEquals(2, l.size());
assertTrue(l.contains(gto1));
assertTrue(l.contains(gto3));
l = genericDao.findBySearch(GenericTestObject.class,
new Search( new Restriction[] {
new Restriction("hiddenItem", Boolean.FALSE, Restriction.EQUALS),
new Restriction("title", titles)
}, new Order("title"), 0, 0, false) );
assertEquals(5, l.size());
assertTrue(l.contains(gto1));
assertTrue(l.contains(gto2));
assertTrue(l.contains(gto3));
assertTrue(l.contains(gto5));
assertTrue(l.contains(gto6));
l = genericDao.findBySearch(GenericTestObject.class,
new Search( new Restriction[] {
new Restriction("hiddenItem", Boolean.FALSE, Restriction.EQUALS),
new Restriction("title", titles)
}, new Order("title"), 1, 2, false) );
assertEquals(2, l.size());
assertTrue(l.contains(gto2));
assertTrue(l.contains(gto3));
// test that empty search is ok
l = genericDao.findBySearch(GenericTestObject.class, new Search() );
assertEquals(6, l.size());
assertTrue(l.contains(gto1));
assertTrue(l.contains(gto2));
assertTrue(l.contains(gto3));
assertTrue(l.contains(gto4));
assertTrue(l.contains(gto5));
assertTrue(l.contains(gto6));
// test search with only order is ok
Search orderOnly = new Search();
orderOnly.addOrder( new Order("title") );
l = genericDao.findBySearch(GenericTestObject.class, orderOnly );
assertEquals(6, l.size());
assertTrue(l.contains(gto1));
assertTrue(l.contains(gto2));
assertTrue(l.contains(gto3));
assertTrue(l.contains(gto4));
assertTrue(l.contains(gto5));
assertTrue(l.contains(gto6));
// null search causes exception
try {
l = genericDao.findBySearch(GenericTestObject.class, null);
fail("Should have thrown exception");
} catch (IllegalArgumentException e) {
assertNotNull(e.getMessage());
}
// now test the ability to deal with foreign keys
List<GenericTestParentObject> pl = null;
pl = genericDao.findBySearch(GenericTestParentObject.class,
new Search( "gto.id", "", Restriction.NOT_NULL ) );
assertEquals(2, pl.size());
assertTrue(pl.contains(gtpo1));
assertTrue(pl.contains(gtpo2));
pl = genericDao.findBySearch(GenericTestParentObject.class,
new Search( "gto.id", gto4.getId() ) );
assertEquals(1, pl.size());
assertTrue(pl.contains(gtpo1));
pl = genericDao.findBySearch(GenericTestParentObject.class,
new Search( "gto.id", gto5.getId() ) );
assertEquals(1, pl.size());
assertTrue(pl.contains(gtpo2));
pl = genericDao.findBySearch(GenericTestParentObject.class,
new Search( "gto.id", Long.valueOf(10000000) ) );
assertEquals(0, pl.size());
}
public void testFindBySearchInterceptors() {
dataInterceptor.reset();
assertEquals(0, dataInterceptor.getIntercepts().size());
List<GenericTestObject> l = genericDao.findBySearch(GenericTestObject.class,
new Search("hiddenItem", Boolean.FALSE) );
assertNotNull(l);
assertEquals(4, l.size());
assertTrue(l.contains(gto1));
assertTrue(l.contains(gto2));
assertTrue(l.contains(gto3));
assertTrue(l.contains(gto6));
assertEquals(2, dataInterceptor.getIntercepts().size());
Intercept intercept = dataInterceptor.getIntercepts().get(0);
assertEquals("findBySearch", intercept.operation);
assertEquals("beforeRead", intercept.intercept);
assertEquals(null, intercept.ids);
assertNotNull(intercept.search);
assertEquals(null, intercept.entities);
intercept = dataInterceptor.getIntercepts().get(1);
assertEquals("findBySearch", intercept.operation);
assertEquals("afterRead", intercept.intercept);
assertEquals(4, intercept.ids.length);
assertNotNull(intercept.search);
assertNotNull(intercept.entities);
assertEquals(4, intercept.entities.length);
assertEquals(gto1, intercept.entities[0]);
assertEquals(gto2, intercept.entities[1]);
assertEquals(gto3, intercept.entities[2]);
assertEquals(gto6, intercept.entities[3]);
// test the various searches and filters
dataInterceptor.reset();
l = genericDao.findBySearch(GenericTestObject.class,
new Search( new Restriction("title", TEST_TITLE+"%", Restriction.LIKE), new Order("title"), 2, 2) );
assertEquals(2, l.size());
assertTrue(l.contains(gto3));
assertTrue(l.contains(gto4));
assertEquals(2, dataInterceptor.getIntercepts().size());
intercept = dataInterceptor.getIntercepts().get(0);
assertEquals("findBySearch", intercept.operation);
assertEquals("beforeRead", intercept.intercept);
assertEquals(null, intercept.ids);
assertNotNull(intercept.search);
assertEquals(null, intercept.entities);
intercept = dataInterceptor.getIntercepts().get(1);
assertEquals("findBySearch", intercept.operation);
assertEquals("afterRead", intercept.intercept);
assertEquals(2, intercept.ids.length);
assertNotNull(intercept.search);
assertNotNull(intercept.entities);
assertEquals(2, intercept.entities.length);
assertEquals(gto3, intercept.entities[0]);
assertEquals(gto4, intercept.entities[1]);
// now test the ability to deal with no results
dataInterceptor.reset();
l = genericDao.findBySearch(GenericTestObject.class,
new Search( "id", Long.valueOf(9999999)) );
assertEquals(0, l.size());
assertEquals(2, dataInterceptor.getIntercepts().size());
intercept = dataInterceptor.getIntercepts().get(0);
assertEquals("findBySearch", intercept.operation);
assertEquals("beforeRead", intercept.intercept);
assertEquals(null, intercept.ids);
assertNotNull(intercept.search);
assertEquals(null, intercept.entities);
intercept = dataInterceptor.getIntercepts().get(1);
assertEquals("findBySearch", intercept.operation);
assertEquals("afterRead", intercept.intercept);
assertEquals(0, intercept.ids.length);
assertNotNull(intercept.search);
assertNotNull(intercept.entities);
assertEquals(0, intercept.entities.length);
}
public void testFindOneBySearch() {
GenericTestObject gto = null;
GenericTestParentObject gtpo = null;
String[] onetitle = new String[] {gto3.getTitle()};
gto = genericDao.findOneBySearch(GenericTestObject.class,
new Search("title", onetitle) );
assertNotNull(gto);
assertEquals(gto3.getId(), gto.getId());
gto = genericDao.findOneBySearch(GenericTestObject.class,
new Search( new Restriction("title", TEST_TITLE+"%", Restriction.LIKE), new Order("title"), 2, 2) );
assertNotNull(gto);
assertEquals(gto3.getId(), gto.getId());
gto = genericDao.findOneBySearch(GenericTestObject.class,
new Search("title", "XXXXXXXXXXXXXX") );
assertNull(gto);
// test foreign keys
gtpo = genericDao.findOneBySearch(GenericTestParentObject.class,
new Search( "gto.id", gto5.getId() ) );
assertNotNull(gtpo);
assertEquals(gtpo2.getUid(), gtpo.getUid());
}
public void testFindOneBySearchInterceptors() {
GenericTestObject gto = null;
dataInterceptor.reset();
assertEquals(0, dataInterceptor.getIntercepts().size());
String[] onetitle = new String[] {gto3.getTitle()};
gto = genericDao.findOneBySearch(GenericTestObject.class,
new Search("title", onetitle) );
assertNotNull(gto);
assertEquals(gto3.getId(), gto.getId());
assertEquals(2, dataInterceptor.getIntercepts().size());
Intercept intercept = dataInterceptor.getIntercepts().get(0);
assertEquals("findOneBySearch", intercept.operation);
assertEquals("beforeRead", intercept.intercept);
assertEquals(null, intercept.ids);
assertNotNull(intercept.search);
assertEquals(null, intercept.entities);
intercept = dataInterceptor.getIntercepts().get(1);
assertEquals("findOneBySearch", intercept.operation);
assertEquals("afterRead", intercept.intercept);
assertEquals(1, intercept.ids.length);
assertNotNull(intercept.search);
assertNotNull(intercept.entities);
assertEquals(1, intercept.entities.length);
dataInterceptor.reset();
gto = genericDao.findOneBySearch(GenericTestObject.class,
new Search("title", "XXXXXXXXXXXXXX") );
assertNull(gto);
assertEquals(1, dataInterceptor.getIntercepts().size());
intercept = dataInterceptor.getIntercepts().get(0);
assertEquals("findOneBySearch", intercept.operation);
assertEquals("beforeRead", intercept.intercept);
assertEquals(null, intercept.ids);
assertNotNull(intercept.search);
assertEquals(null, intercept.entities);
}
// DEPRECATED TESTS BELOW
/**
* Test method for {@link org.sakaiproject.genericdao.hibernate.HibernateBasicGenericDao#countByProperties(java.lang.Class, java.lang.String[], java.lang.Object[])}.
*/
public void testCountByPropertiesClassStringArrayObjectArray() {
int count = genericDao.countByProperties(GenericTestObject.class,
new String[] {"hiddenItem"}, new Object[] {Boolean.FALSE});
assertEquals(4, count);
}
/**
* Test method for {@link org.sakaiproject.genericdao.hibernate.HibernateBasicGenericDao#countByProperties(java.lang.Class, java.lang.String[], java.lang.Object[], int[])}.
*/
public void testCountByPropertiesClassStringArrayObjectArrayIntArray() {
int count = genericDao.countByProperties(GenericTestObject.class,
new String[] {"hiddenItem"}, new Object[] {Boolean.TRUE},
new int[] {ByPropsFinder.NOT_EQUALS});
assertEquals(4, count);
}
/**
* Test method for {@link org.sakaiproject.genericdao.hibernate.HibernateBasicGenericDao#findByProperties(java.lang.Class, java.lang.String[], java.lang.Object[])}.
*/
@SuppressWarnings({ "unchecked" })
public void testFindByPropertiesClassStringArrayObjectArray() {
List l = genericDao.findByProperties(GenericTestObject.class,
new String[] {"hiddenItem"}, new Object[] {Boolean.FALSE});
assertNotNull(l);
assertEquals(4, l.size());
assertTrue(l.contains(gto1));
assertTrue(l.contains(gto2));
assertTrue(l.contains(gto3));
assertTrue(l.contains(gto6));
// now do a couple tests on the array handling ability of the system
String[] titles = new String[] {gto1.getTitle(), gto3.getTitle(), gto5.getTitle()};
l = genericDao.findByProperties(GenericTestObject.class,
new String[] {"title"}, new Object[] {titles});
assertNotNull(l);
assertEquals(3, l.size());
assertTrue(l.contains(gto1));
assertTrue(l.contains(gto3));
assertTrue(l.contains(gto5));
String[] onetitle = new String[] {gto3.getTitle()};
l = genericDao.findByProperties(GenericTestObject.class,
new String[] {"title"}, new Object[] {onetitle});
assertNotNull(l);
assertEquals(1, l.size());
assertTrue(l.contains(gto3));
}
/**
* Test method for {@link org.sakaiproject.genericdao.hibernate.HibernateBasicGenericDao#findByProperties(java.lang.Class, java.lang.String[], java.lang.Object[], int[])}.
*/
@SuppressWarnings({ "unchecked" })
public void testFindByPropertiesClassStringArrayObjectArrayIntArray() {
List l = genericDao.findByProperties(GenericTestObject.class,
new String[] {"hiddenItem"}, new Object[] {Boolean.FALSE},
new int[] {ByPropsFinder.NOT_EQUALS});
assertNotNull(l);
assertEquals(2, l.size());
assertTrue(l.contains(gto4));
assertTrue(l.contains(gto5));
}
/**
* Test method for {@link org.sakaiproject.genericdao.hibernate.HibernateBasicGenericDao#findByProperties(java.lang.Class, java.lang.String[], java.lang.Object[], int[], java.lang.String[])}.
*/
@SuppressWarnings({ "unchecked" })
public void testFindByPropertiesClassStringArrayObjectArrayIntArrayStringArray() {
List l = genericDao.findByProperties(GenericTestObject.class,
new String[] {"hiddenItem"}, new Object[] {Boolean.FALSE},
new int[] {ByPropsFinder.EQUALS}, new String[] {"title"});
assertNotNull(l);
assertEquals(4, l.size());
assertTrue(l.contains(gto1));
assertTrue(l.contains(gto2));
assertTrue(l.contains(gto3));
assertTrue(l.contains(gto6));
assertEquals(l.get(0), gto1);
assertEquals(l.get(1), gto2);
assertEquals(l.get(2), gto3);
assertEquals(l.get(3), gto6);
}
/**
* Test method for {@link org.sakaiproject.genericdao.hibernate.HibernateBasicGenericDao#findByProperties(java.lang.Class, java.lang.String[], java.lang.Object[], int[], int, int)}.
*/
@SuppressWarnings({ "unchecked" })
public void testFindByPropertiesClassStringArrayObjectArrayIntArrayIntInt() {
List l = genericDao.findByProperties(GenericTestObject.class,
new String[] {"title"}, new Object[] {"aaronz test%"},
new int[] {ByPropsFinder.LIKE}, 0, 4);
assertNotNull(l);
assertEquals(4, l.size());
}
/**
* Test method for {@link org.sakaiproject.genericdao.hibernate.HibernateBasicGenericDao#findByProperties(java.lang.Class, java.lang.String[], java.lang.Object[], int[], java.lang.String[], int, int)}.
*/
@SuppressWarnings({ "unchecked" })
public void testFindByPropertiesClassStringArrayObjectArrayIntArrayStringArrayIntInt() {
List l = genericDao.findByProperties(GenericTestObject.class,
new String[] {"title"}, new Object[] {"aaronz test%"},
new int[] {ByPropsFinder.LIKE},
new String[] {"title"+ByPropsFinder.DESC}, 1, 3);
assertNotNull(l);
assertEquals(3, l.size());
assertTrue(! l.contains(gto1));
assertTrue(l.contains(gto2));
assertTrue(l.contains(gto3));
assertTrue(l.contains(gto4));
assertTrue(! l.contains(gto5));
assertEquals(l.get(0), gto4);
assertEquals(l.get(1), gto3);
assertEquals(l.get(2), gto2);
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more contributor license
* agreements. See the NOTICE file distributed with this work for additional information regarding
* copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License. You may obtain a
* copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package org.apache.geode.management.internal.cli.util;
import static org.apache.geode.cache.Region.SEPARATOR;
import static org.assertj.core.api.Assertions.assertThat;
import static org.assertj.core.api.Assertions.assertThatThrownBy;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
import org.junit.Before;
import org.junit.Test;
public class HyphenFormatterTest {
private HyphenFormatter formatter;
@Before
public void setUp() {
this.formatter = new HyphenFormatter();
}
@Test
public void containsOptionWithOneOptionReturnsTrue() {
String cmd = "start locator --name=loc1";
assertTrue(this.formatter.containsOption(cmd));
}
@Test
public void containsOptionWithNoOptionReturnsFalse() {
String cmd = "start locator";
assertFalse(this.formatter.containsOption(cmd));
}
@Test
public void containsOptionWithMultipleOptionsReturnsTrue() {
String cmd = "start locator --name=loc1 --J=-Dfoo=bar --J=-Dbar=foo";
assertTrue(this.formatter.containsOption(cmd));
}
@Test
public void valueWithoutQuotesReturnsWithQuotes() {
String cmd = "start locator --name=loc1 --J=-Dfoo=bar";
String formattedCmd = this.formatter.formatCommand(cmd);
String expected = "start locator --name=loc1 --J=\"-Dfoo=bar\"";
assertThat(formattedCmd).isEqualTo(expected);
}
@Test
public void valueWithoutQuotesReturnsWithQuotes_2() {
String cmd = "start locator --J=-Dfoo=bar --name=loc1";
String formattedCmd = this.formatter.formatCommand(cmd);
String expected = "start locator --J=\"-Dfoo=bar\" --name=loc1";
assertThat(formattedCmd).isEqualTo(expected);
}
@Test
public void valueWithHyphenWithoutQuotesFails() {
String cmd =
"rebalance --exclude-region=" + SEPARATOR
+ "GemfireDataCommandsDUnitTestRegion2 --simulate=true --time-out=-1";
String formattedCmd = this.formatter.formatCommand(cmd);
String expected =
"rebalance --exclude-region=" + SEPARATOR
+ "GemfireDataCommandsDUnitTestRegion2 --simulate=true --time-out=\"-1\"";
assertThat(formattedCmd).isEqualTo(expected);
}
@Test
public void valueWithHyphenWithoutQuotes() {
String cmd =
"rebalance --exclude-region=" + SEPARATOR
+ "GemfireDataCommandsDUnitTestRegion2 --simulate=true --time-out=-1";
String formattedCmd = this.formatter.formatCommand(cmd);
String expected =
"rebalance --exclude-region=" + SEPARATOR
+ "GemfireDataCommandsDUnitTestRegion2 --simulate=true --time-out=\"-1\"";
assertThat(formattedCmd).isEqualTo(expected);
}
@Test
public void nullShouldThrowNullPointerException() {
assertThatThrownBy(() -> this.formatter.formatCommand(null))
.isExactlyInstanceOf(NullPointerException.class);
}
@Test
public void emptyShouldThrowNullPointerException() {
assertThat(this.formatter.formatCommand("")).isEqualTo("");
}
@Test
public void multipleJOptions() {
String cmd = "start locator --name=loc1 --J=-Dfoo=bar --J=-Dbar=foo";
String formattedCmd = this.formatter.formatCommand(cmd);
String expected = "start locator --name=loc1 --J=\"-Dfoo=bar\" --J=\"-Dbar=foo\"";
assertThat(formattedCmd).isEqualTo(expected);
}
@Test
public void multipleJOptionsWithSomethingAfter() {
String cmd = "start locator --name=loc1 --J=-Dfoo=bar --J=-Dbar=foo --group=locators";
String formattedCmd = this.formatter.formatCommand(cmd);
String expected =
"start locator --name=loc1 --J=\"-Dfoo=bar\" --J=\"-Dbar=foo\" --group=locators";
assertThat(formattedCmd).isEqualTo(expected);
}
@Test
public void multipleJOptionsWithSomethingBetween() {
String cmd = "start locator --name=loc1 --J=-Dfoo=bar --group=locators --J=-Dbar=foo";
String formattedCmd = this.formatter.formatCommand(cmd);
String expected =
"start locator --name=loc1 --J=\"-Dfoo=bar\" --group=locators --J=\"-Dbar=foo\"";
assertThat(formattedCmd).isEqualTo(expected);
}
@Test
public void valueWithQuotes() {
String cmd = "start locator --name=loc1 --J=\"-Dfoo=bar\"";
String formattedCmd = this.formatter.formatCommand(cmd);
assertThat(formattedCmd).isEqualTo(cmd);
}
@Test
public void oneValueWithQuotesOneWithout() {
String cmd = "start locator --name=loc1 --J=\"-Dfoo=bar\" --J=-Dfoo=bar";
String formattedCmd = this.formatter.formatCommand(cmd);
String expected = "start locator --name=loc1 --J=\"-Dfoo=bar\" --J=\"-Dfoo=bar\"";
assertThat(formattedCmd).as(cmd).isEqualTo(expected);
}
@Test
public void oneValueWithoutQuotesOneWith() {
String cmd = "start locator --name=loc1 --J=-Dfoo=bar --J=\"-Dfoo=bar\"";
String formattedCmd = this.formatter.formatCommand(cmd);
String expected = "start locator --name=loc1 --J=\"-Dfoo=bar\" --J=\"-Dfoo=bar\"";
assertThat(formattedCmd).isEqualTo(expected);
}
@Test
public void twoValuesWithQuotes() {
String cmd = "start locator --name=loc1 --J=\"-Dfoo=bar\" --J=\"-Dfoo=bar\"";
String formattedCmd = this.formatter.formatCommand(cmd);
assertThat(formattedCmd).as(cmd).isEqualTo(cmd);
}
@Test
public void valueContainingQuotes() {
String cmd = "start locator --name=loc1 --J=\"-Dfoo=region\"";
String formattedCmd = this.formatter.formatCommand(cmd);
String expected = "start locator --name=loc1 --J=\"-Dfoo=region\"";
assertThat(formattedCmd).as(cmd).isEqualTo(expected);
}
@Test
public void valueContainingQuotesAndSpace() {
String cmd = "start locator --name=loc1 --J=\"-Dfoo=my phrase\"";
String formattedCmd = this.formatter.formatCommand(cmd);
String expected = "start locator --name=loc1 --J=\"-Dfoo=my phrase\"";
assertThat(formattedCmd).as(cmd).isEqualTo(expected);
}
@Test
public void valueContainingQuotesAndMultipleSpaces() {
String cmd = "start locator --name=loc1 --J=\"-Dfoo=this is a phrase\"";
String formattedCmd = this.formatter.formatCommand(cmd);
String expected = "start locator --name=loc1 --J=\"-Dfoo=this is a phrase\"";
assertThat(formattedCmd).as(cmd).isEqualTo(expected);
}
@Test
public void valueContainingMultipleJWithSpaces() {
String cmd =
"start locator --name=loc1 --J=-Dfoo=this is a phrase --J=\"-Dfoo=a short sentence\"";
String formattedCmd = this.formatter.formatCommand(cmd);
String expected =
"start locator --name=loc1 --J=\"-Dfoo=this is a phrase\" --J=\"-Dfoo=a short sentence\"";
assertThat(formattedCmd).as(cmd).isEqualTo(expected);
}
@Test
public void valueContainingMultipleJWithSpaces2() {
String cmd =
"start locator --name=loc1 --J=\"-Dfoo=this is a phrase \" --J=\"-Dfoo=a short sentence\"";
String formattedCmd = this.formatter.formatCommand(cmd);
String expected =
"start locator --name=loc1 --J=\"-Dfoo=this is a phrase \" --J=\"-Dfoo=a short sentence\"";
assertThat(formattedCmd).as(cmd).isEqualTo(expected);
}
@Test
public void optionAfterOneJOption() {
String cmd = "start locator --name=loc1 --J=-Dfoo=bar --http-service=8080";
String formattedCmd = this.formatter.formatCommand(cmd);
String expected = "start locator --name=loc1 --J=\"-Dfoo=bar\" --http-service=8080";
assertThat(formattedCmd).as(cmd).isEqualTo(expected);
}
@Test
public void optionWithMoreThanOneHyphen() {
String cmd = "start locator --name=loc1 --http-service-port=8080";
String formattedCmd = this.formatter.formatCommand(cmd);
String expected = "start locator --name=loc1 --http-service-port=8080";
assertThat(formattedCmd).as(cmd).isEqualTo(expected);
}
@Test
public void optionWithOneHyphenAfterOneJOption() {
String cmd =
"start server --name=me3 --J=-Dgemfire.jmx-manager=true --compatible-with-redis-port=8080";
String formattedCmd = this.formatter.formatCommand(cmd);
String expected =
"start server --name=me3 --J=\"-Dgemfire.jmx-manager=true\" --compatible-with-redis-port=8080";
assertThat(formattedCmd).as(cmd).isEqualTo(expected);
}
@Test // reproduces GEODE-2104
public void optionWithMoreThanOneHyphenAfterOneJOption() {
String cmd = "start server --name=me3 --J=-Dgemfire.jmx-manager=true --http-service-port=8080";
String formattedCmd = this.formatter.formatCommand(cmd);
String expected =
"start server --name=me3 --J=\"-Dgemfire.jmx-manager=true\" --http-service-port=8080";
assertThat(formattedCmd).as(cmd).isEqualTo(expected);
}
@Test
public void optionWithOneHyphenAfterTwoJOptions() {
String cmd =
"start server --name=me3 --J=-Dgemfire.jmx-manager=true --J=-Dgemfire.jmx-manager-start=true --compatible-with-redis-port=8080";
String formattedCmd = this.formatter.formatCommand(cmd);
String expected =
"start server --name=me3 --J=\"-Dgemfire.jmx-manager=true\" --J=\"-Dgemfire.jmx-manager-start=true\" --compatible-with-redis-port=8080";
assertThat(formattedCmd).as(cmd).isEqualTo(expected);
}
@Test // reproduces GEODE-2104
public void optionWithMoreThanOneHyphenAfterTwoJOptions() {
String cmd =
"start server --name=me3 --J=-Dgemfire.jmx-manager=true --J=-Dgemfire.jmx-manager-start=true --http-service-port=8080";
String formattedCmd = this.formatter.formatCommand(cmd);
String expected =
"start server --name=me3 --J=\"-Dgemfire.jmx-manager=true\" --J=\"-Dgemfire.jmx-manager-start=true\" --http-service-port=8080";
assertThat(formattedCmd).as(cmd).isEqualTo(expected);
}
@Test // reproduces GEODE-2075
public void optionWithMoreThanOneHyphenWithoutValueAfterJOptions() {
String cmd =
"start server --name=Server2 --log-level=config --J=-Dgemfire.locators=localhost[10334] --disable-default-server";
String formattedCmd = this.formatter.formatCommand(cmd);
String expected =
"start server --name=Server2 --log-level=config --J=\"-Dgemfire.locators=localhost[10334]\" --disable-default-server";
assertThat(formattedCmd).as(cmd).isEqualTo(expected);
}
}
| |
/**
*/
package soundgates.presentation;
import java.util.ArrayList;
import java.util.Collection;
import org.eclipse.emf.common.ui.viewer.IViewerProvider;
import org.eclipse.emf.edit.domain.EditingDomain;
import org.eclipse.emf.edit.domain.IEditingDomainProvider;
import org.eclipse.emf.edit.ui.action.ControlAction;
import org.eclipse.emf.edit.ui.action.CreateChildAction;
import org.eclipse.emf.edit.ui.action.CreateSiblingAction;
import org.eclipse.emf.edit.ui.action.EditingDomainActionBarContributor;
import org.eclipse.emf.edit.ui.action.LoadResourceAction;
import org.eclipse.emf.edit.ui.action.ValidateAction;
import org.eclipse.jface.action.Action;
import org.eclipse.jface.action.ActionContributionItem;
import org.eclipse.jface.action.IAction;
import org.eclipse.jface.action.IContributionItem;
import org.eclipse.jface.action.IContributionManager;
import org.eclipse.jface.action.IMenuListener;
import org.eclipse.jface.action.IMenuManager;
import org.eclipse.jface.action.IToolBarManager;
import org.eclipse.jface.action.MenuManager;
import org.eclipse.jface.action.Separator;
import org.eclipse.jface.action.SubContributionItem;
import org.eclipse.jface.viewers.ISelection;
import org.eclipse.jface.viewers.ISelectionChangedListener;
import org.eclipse.jface.viewers.ISelectionProvider;
import org.eclipse.jface.viewers.IStructuredSelection;
import org.eclipse.jface.viewers.SelectionChangedEvent;
import org.eclipse.jface.viewers.Viewer;
import org.eclipse.ui.IEditorPart;
import org.eclipse.ui.PartInitException;
/**
* This is the action bar contributor for the Soundgates model editor.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public class SoundgatesActionBarContributor
extends EditingDomainActionBarContributor
implements ISelectionChangedListener {
/**
* This keeps track of the active editor.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
protected IEditorPart activeEditorPart;
/**
* This keeps track of the current selection provider.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
protected ISelectionProvider selectionProvider;
/**
* This action opens the Properties view.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
protected IAction showPropertiesViewAction =
new Action(SoundgatesEditorPlugin.INSTANCE.getString("_UI_ShowPropertiesView_menu_item")) {
@Override
public void run() {
try {
getPage().showView("org.eclipse.ui.views.PropertySheet");
}
catch (PartInitException exception) {
SoundgatesEditorPlugin.INSTANCE.log(exception);
}
}
};
/**
* This action refreshes the viewer of the current editor if the editor
* implements {@link org.eclipse.emf.common.ui.viewer.IViewerProvider}.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
protected IAction refreshViewerAction =
new Action(SoundgatesEditorPlugin.INSTANCE.getString("_UI_RefreshViewer_menu_item")) {
@Override
public boolean isEnabled() {
return activeEditorPart instanceof IViewerProvider;
}
@Override
public void run() {
if (activeEditorPart instanceof IViewerProvider) {
Viewer viewer = ((IViewerProvider)activeEditorPart).getViewer();
if (viewer != null) {
viewer.refresh();
}
}
}
};
/**
* This will contain one {@link org.eclipse.emf.edit.ui.action.CreateChildAction} corresponding to each descriptor
* generated for the current selection by the item provider.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
protected Collection<IAction> createChildActions;
/**
* This is the menu manager into which menu contribution items should be added for CreateChild actions.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
protected IMenuManager createChildMenuManager;
/**
* This will contain one {@link org.eclipse.emf.edit.ui.action.CreateSiblingAction} corresponding to each descriptor
* generated for the current selection by the item provider.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
protected Collection<IAction> createSiblingActions;
/**
* This is the menu manager into which menu contribution items should be added for CreateSibling actions.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
protected IMenuManager createSiblingMenuManager;
/**
* This creates an instance of the contributor.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public SoundgatesActionBarContributor() {
super(ADDITIONS_LAST_STYLE);
loadResourceAction = new LoadResourceAction();
validateAction = new ValidateAction();
controlAction = new ControlAction();
}
/**
* This adds Separators for editor additions to the tool bar.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public void contributeToToolBar(IToolBarManager toolBarManager) {
toolBarManager.add(new Separator("soundgates-settings"));
toolBarManager.add(new Separator("soundgates-additions"));
}
/**
* This adds to the menu bar a menu and some separators for editor additions,
* as well as the sub-menus for object creation items.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public void contributeToMenu(IMenuManager menuManager) {
super.contributeToMenu(menuManager);
IMenuManager submenuManager = new MenuManager(SoundgatesEditorPlugin.INSTANCE.getString("_UI_SoundgatesEditor_menu"), "soundgatesMenuID");
menuManager.insertAfter("additions", submenuManager);
submenuManager.add(new Separator("settings"));
submenuManager.add(new Separator("actions"));
submenuManager.add(new Separator("additions"));
submenuManager.add(new Separator("additions-end"));
// Prepare for CreateChild item addition or removal.
//
createChildMenuManager = new MenuManager(SoundgatesEditorPlugin.INSTANCE.getString("_UI_CreateChild_menu_item"));
submenuManager.insertBefore("additions", createChildMenuManager);
// Prepare for CreateSibling item addition or removal.
//
createSiblingMenuManager = new MenuManager(SoundgatesEditorPlugin.INSTANCE.getString("_UI_CreateSibling_menu_item"));
submenuManager.insertBefore("additions", createSiblingMenuManager);
// Force an update because Eclipse hides empty menus now.
//
submenuManager.addMenuListener
(new IMenuListener() {
public void menuAboutToShow(IMenuManager menuManager) {
menuManager.updateAll(true);
}
});
addGlobalActions(submenuManager);
}
/**
* When the active editor changes, this remembers the change and registers with it as a selection provider.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public void setActiveEditor(IEditorPart part) {
super.setActiveEditor(part);
activeEditorPart = part;
// Switch to the new selection provider.
//
if (selectionProvider != null) {
selectionProvider.removeSelectionChangedListener(this);
}
if (part == null) {
selectionProvider = null;
}
else {
selectionProvider = part.getSite().getSelectionProvider();
selectionProvider.addSelectionChangedListener(this);
// Fake a selection changed event to update the menus.
//
if (selectionProvider.getSelection() != null) {
selectionChanged(new SelectionChangedEvent(selectionProvider, selectionProvider.getSelection()));
}
}
}
/**
* This implements {@link org.eclipse.jface.viewers.ISelectionChangedListener},
* handling {@link org.eclipse.jface.viewers.SelectionChangedEvent}s by querying for the children and siblings
* that can be added to the selected object and updating the menus accordingly.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public void selectionChanged(SelectionChangedEvent event) {
// Remove any menu items for old selection.
//
if (createChildMenuManager != null) {
depopulateManager(createChildMenuManager, createChildActions);
}
if (createSiblingMenuManager != null) {
depopulateManager(createSiblingMenuManager, createSiblingActions);
}
// Query the new selection for appropriate new child/sibling descriptors
//
Collection<?> newChildDescriptors = null;
Collection<?> newSiblingDescriptors = null;
ISelection selection = event.getSelection();
if (selection instanceof IStructuredSelection && ((IStructuredSelection)selection).size() == 1) {
Object object = ((IStructuredSelection)selection).getFirstElement();
EditingDomain domain = ((IEditingDomainProvider)activeEditorPart).getEditingDomain();
newChildDescriptors = domain.getNewChildDescriptors(object, null);
newSiblingDescriptors = domain.getNewChildDescriptors(null, object);
}
// Generate actions for selection; populate and redraw the menus.
//
createChildActions = generateCreateChildActions(newChildDescriptors, selection);
createSiblingActions = generateCreateSiblingActions(newSiblingDescriptors, selection);
if (createChildMenuManager != null) {
populateManager(createChildMenuManager, createChildActions, null);
createChildMenuManager.update(true);
}
if (createSiblingMenuManager != null) {
populateManager(createSiblingMenuManager, createSiblingActions, null);
createSiblingMenuManager.update(true);
}
}
/**
* This generates a {@link org.eclipse.emf.edit.ui.action.CreateChildAction} for each object in <code>descriptors</code>,
* and returns the collection of these actions.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
protected Collection<IAction> generateCreateChildActions(Collection<?> descriptors, ISelection selection) {
Collection<IAction> actions = new ArrayList<IAction>();
if (descriptors != null) {
for (Object descriptor : descriptors) {
actions.add(new CreateChildAction(activeEditorPart, selection, descriptor));
}
}
return actions;
}
/**
* This generates a {@link org.eclipse.emf.edit.ui.action.CreateSiblingAction} for each object in <code>descriptors</code>,
* and returns the collection of these actions.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
protected Collection<IAction> generateCreateSiblingActions(Collection<?> descriptors, ISelection selection) {
Collection<IAction> actions = new ArrayList<IAction>();
if (descriptors != null) {
for (Object descriptor : descriptors) {
actions.add(new CreateSiblingAction(activeEditorPart, selection, descriptor));
}
}
return actions;
}
/**
* This populates the specified <code>manager</code> with {@link org.eclipse.jface.action.ActionContributionItem}s
* based on the {@link org.eclipse.jface.action.IAction}s contained in the <code>actions</code> collection,
* by inserting them before the specified contribution item <code>contributionID</code>.
* If <code>contributionID</code> is <code>null</code>, they are simply added.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
protected void populateManager(IContributionManager manager, Collection<? extends IAction> actions, String contributionID) {
if (actions != null) {
for (IAction action : actions) {
if (contributionID != null) {
manager.insertBefore(contributionID, action);
}
else {
manager.add(action);
}
}
}
}
/**
* This removes from the specified <code>manager</code> all {@link org.eclipse.jface.action.ActionContributionItem}s
* based on the {@link org.eclipse.jface.action.IAction}s contained in the <code>actions</code> collection.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
protected void depopulateManager(IContributionManager manager, Collection<? extends IAction> actions) {
if (actions != null) {
IContributionItem[] items = manager.getItems();
for (int i = 0; i < items.length; i++) {
// Look into SubContributionItems
//
IContributionItem contributionItem = items[i];
while (contributionItem instanceof SubContributionItem) {
contributionItem = ((SubContributionItem)contributionItem).getInnerItem();
}
// Delete the ActionContributionItems with matching action.
//
if (contributionItem instanceof ActionContributionItem) {
IAction action = ((ActionContributionItem)contributionItem).getAction();
if (actions.contains(action)) {
manager.remove(contributionItem);
}
}
}
}
}
/**
* This populates the pop-up menu before it appears.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public void menuAboutToShow(IMenuManager menuManager) {
super.menuAboutToShow(menuManager);
MenuManager submenuManager = null;
submenuManager = new MenuManager(SoundgatesEditorPlugin.INSTANCE.getString("_UI_CreateChild_menu_item"));
populateManager(submenuManager, createChildActions, null);
menuManager.insertBefore("edit", submenuManager);
submenuManager = new MenuManager(SoundgatesEditorPlugin.INSTANCE.getString("_UI_CreateSibling_menu_item"));
populateManager(submenuManager, createSiblingActions, null);
menuManager.insertBefore("edit", submenuManager);
}
/**
* This inserts global actions before the "additions-end" separator.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
protected void addGlobalActions(IMenuManager menuManager) {
menuManager.insertAfter("additions-end", new Separator("ui-actions"));
menuManager.insertAfter("ui-actions", showPropertiesViewAction);
refreshViewerAction.setEnabled(refreshViewerAction.isEnabled());
menuManager.insertAfter("ui-actions", refreshViewerAction);
super.addGlobalActions(menuManager);
}
/**
* This ensures that a delete action will clean up all references to deleted objects.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
protected boolean removeAllReferencesOnDelete() {
return true;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more contributor license
* agreements. See the NOTICE file distributed with this work for additional information regarding
* copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License. You may obtain a
* copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package org.apache.geode.cache30;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import java.util.concurrent.locks.Lock;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.apache.geode.cache.AttributesFactory;
import org.apache.geode.cache.CacheException;
import org.apache.geode.cache.CacheLoader;
import org.apache.geode.cache.CacheLoaderException;
import org.apache.geode.cache.LoaderHelper;
import org.apache.geode.cache.Region;
import org.apache.geode.cache.RegionAttributes;
import org.apache.geode.cache.RegionExistsException;
import org.apache.geode.cache.Scope;
import org.apache.geode.cache.TimeoutException;
import org.apache.geode.test.dunit.Host;
import org.apache.geode.test.dunit.LogWriterUtils;
import org.apache.geode.test.dunit.VM;
import org.apache.geode.test.dunit.cache.internal.JUnit4CacheTestCase;
import org.apache.geode.test.junit.categories.DLockTest;
/**
* This class tests distributed locking of global region entries.
*/
@Category({DLockTest.class})
public class GlobalLockingDUnitTest extends JUnit4CacheTestCase {
private static Region region_testBug32356;
public GlobalLockingDUnitTest() {
super();
}
/**
* Returns region attributes for a <code>GLOBAL</code> region
*/
protected RegionAttributes getGlobalAttrs() {
AttributesFactory factory = new AttributesFactory();
factory.setScope(Scope.GLOBAL);
return factory.create();
}
protected Region getOrCreateRootRegion() {
Region root = getRootRegion();
if (root == null) {
try {
root = createRootRegion(getGlobalAttrs());
} catch (RegionExistsException ex) {
fail("Huh?");
} catch (TimeoutException ex) {
fail(ex.toString());
}
}
return root;
}
////////////////////// Test Methods //////////////////////
/**
* Tests for 32356 R2 tryLock w/ 0 timeout broken in Distributed Lock Service
*/
@Test
public void testBug32356() throws Exception {
LogWriterUtils.getLogWriter().fine("[testBug32356]");
Host host = Host.getHost(0);
final String name = this.getUniqueName();
final Object key = "32356";
// lock/unlock '32356' in all vms... (make all vms aware of token)
LogWriterUtils.getLogWriter().fine("[testBug32356] lock/unlock '32356' in all vms");
for (int i = 0; i < 4; i++) {
final int vm = i;
host.getVM(vm).invoke(new CacheSerializableRunnable("testBug32356_step1") {
@Override
public void run2() throws CacheException {
region_testBug32356 = getOrCreateRootRegion().createSubregion(name, getGlobalAttrs());
Lock lock = region_testBug32356.getDistributedLock(key);
lock.lock();
lock.unlock();
}
});
}
// attempt try-lock of zero wait time in all vms
LogWriterUtils.getLogWriter()
.fine("[testBug32356] attempt try-lock of zero wait time in all vms");
for (int i = 0; i < 4; i++) {
final int vm = i;
host.getVM(vm).invoke(new CacheSerializableRunnable("testBug32356_step2") {
@Override
public void run2() throws CacheException {
Lock lock = region_testBug32356.getDistributedLock(key);
// bug 32356 should cause this to fail...
assertTrue("Found bug 32356", lock.tryLock());
lock.unlock();
}
});
}
}
@Test
public void testNonGlobalRegion() throws CacheException {
String name = this.getUniqueName();
AttributesFactory factory = new AttributesFactory(getGlobalAttrs());
factory.setScope(Scope.LOCAL);
Region region = getOrCreateRootRegion().createSubregion(name + "LOCAL", factory.create());
try {
region.getDistributedLock("obj");
fail("Should have thrown an IllegalStateException");
} catch (IllegalStateException ex) {
// pass...
}
factory.setScope(Scope.DISTRIBUTED_ACK);
region = getOrCreateRootRegion().createSubregion(name + "DACK", factory.create());
try {
region.getDistributedLock("obj");
fail("Should have thrown an IllegalStateException");
} catch (IllegalStateException ex) {
// pass...
}
factory.setScope(Scope.DISTRIBUTED_NO_ACK);
region = getOrCreateRootRegion().createSubregion(name + "DNOACK", factory.create());
try {
region.getDistributedLock("obj");
fail("Should have thrown an IllegalStateException");
} catch (IllegalStateException ex) {
// pass...
}
}
@Test
public void testSingleVMLockUnlock() throws CacheException {
String name = this.getUniqueName() + "-GLOBAL";
Region region = getOrCreateRootRegion().createSubregion(name, getGlobalAttrs());
Lock lock = region.getDistributedLock("obj");
lock.lock();
lock.unlock();
}
@Test
public void testIsLockGrantorAttribute() throws Exception {
String name = this.getUniqueName() + "-testIsLockGrantorAttribute";
AttributesFactory factory = new AttributesFactory(getGlobalAttrs());
factory.setLockGrantor(true);
Region region = getOrCreateRootRegion().createSubregion(name, factory.create());
assertEquals("Setting isLockGrantor failed to result in becoming lock grantor", true,
((org.apache.geode.internal.cache.DistributedRegion) region).getLockService()
.isLockGrantor());
}
/**
* Get the lock in one VM, try to create in other
*/
@Test
public void testCreateLockTimeout() {
Host host = Host.getHost(0);
VM vm0 = host.getVM(0);
VM vm1 = host.getVM(1);
final String name = this.getUniqueName();
final Object key = new Integer(5);
vm0.invoke(new CacheSerializableRunnable("Get lock") {
@Override
public void run2() throws CacheException {
Region r = getOrCreateRootRegion().createSubregion(name, getGlobalAttrs());
Lock lock = r.getDistributedLock(key);
lock.lock();
}
});
vm1.invoke(new CacheSerializableRunnable("Lock timeout creating entry") {
@Override
public void run2() throws CacheException {
getOrCreateRootRegion().getCache().setLockTimeout(2);
Region r = getOrCreateRootRegion().createSubregion(name, getGlobalAttrs());
try {
r.create(key, "the value");
fail("create() should have thrown TimeoutException");
} catch (TimeoutException ex) {
// pass
}
}
});
}
/**
* get the lock in one VM, try to put() in other
*/
@Test
public void testPutLockTimeout() {
Host host = Host.getHost(0);
VM vm0 = host.getVM(0);
VM vm1 = host.getVM(1);
final String name = this.getUniqueName();
final Object key = new Integer(5);
vm0.invoke(new CacheSerializableRunnable("Get lock") {
@Override
public void run2() throws CacheException {
Region r = getOrCreateRootRegion().createSubregion(name, getGlobalAttrs());
Lock lock = r.getDistributedLock(key);
lock.lock();
}
});
vm1.invoke(new CacheSerializableRunnable("Lock timeout putting entry") {
@Override
public void run2() throws CacheException {
getOrCreateRootRegion().getCache().setLockTimeout(2);
Region r = getOrCreateRootRegion().createSubregion(name, getGlobalAttrs());
try {
r.put(key, "the value");
fail("put() should have thrown TimeoutException");
} catch (TimeoutException ex) {
// pass
}
}
});
}
/**
* get lock in one VM, try to invoke loader in other
*/
@Test
public void testLoadLockTimeout() {
Host host = Host.getHost(0);
VM vm0 = host.getVM(0);
VM vm1 = host.getVM(1);
final String name = this.getUniqueName();
final Object key = new Integer(5);
// In first VM, get a lock on the entry
vm0.invoke(new CacheSerializableRunnable("Get lock") {
@Override
public void run2() throws CacheException {
Region r = getOrCreateRootRegion().createSubregion(name, getGlobalAttrs());
Lock lock = r.getDistributedLock(key);
lock.lock();
}
});
// In second VM, do a get that tries to invoke a loader
vm1.invoke(new CacheSerializableRunnable("Lock timeout local loader") {
@Override
public void run2() throws CacheException {
getOrCreateRootRegion().getCache().setLockTimeout(2);
Region r = getOrCreateRootRegion().createSubregion(name, getGlobalAttrs());
r.getAttributesMutator().setCacheLoader(new CacheLoader() {
@Override
public Object load(LoaderHelper helper) throws CacheLoaderException {
throw new CacheLoaderException("Made it into the loader!");
}
@Override
public void close() {}
});
try {
r.get(key);
fail("get() should have thrown TimeoutException");
} catch (TimeoutException ex) {
// pass
}
}
});
}
/**
* get lock in one VM, try to invalidate in other
*/
@Test
public void testInvalidateLockTimeout() {
Host host = Host.getHost(0);
VM vm0 = host.getVM(0);
VM vm1 = host.getVM(1);
final String name = this.getUniqueName();
final Object key = new Integer(5);
vm0.invoke(new CacheSerializableRunnable("Get lock") {
@Override
public void run2() throws CacheException {
Region r = getOrCreateRootRegion().createSubregion(name, getGlobalAttrs());
Lock lock = r.getDistributedLock(key);
lock.lock();
}
});
vm1.invoke(new CacheSerializableRunnable("Lock timeout invalidating entry") {
@Override
public void run2() throws CacheException {
getOrCreateRootRegion().getCache().setLockTimeout(2);
Region r = getOrCreateRootRegion().createSubregion(name, getGlobalAttrs());
try {
r.invalidate(key);
fail("invalidate() should have thrown TimeoutException");
} catch (TimeoutException ex) {
// pass
}
}
});
}
/**
* get lock in one VM, try to destroy in other
*/
@Test
public void testDestroyLockTimeout() {
Host host = Host.getHost(0);
VM vm0 = host.getVM(0);
VM vm1 = host.getVM(1);
final String name = this.getUniqueName();
final Object key = new Integer(5);
vm0.invoke(new CacheSerializableRunnable("Get lock") {
@Override
public void run2() throws CacheException {
Region r = getOrCreateRootRegion().createSubregion(name, getGlobalAttrs());
Lock lock = r.getDistributedLock(key);
lock.lock();
r.put(key, "value");
}
});
vm1.invoke(new CacheSerializableRunnable("Lock timeout destroying entry") {
@Override
public void run2() throws CacheException {
getOrCreateRootRegion().getCache().setLockTimeout(2);
Region r = getOrCreateRootRegion().createSubregion(name, getGlobalAttrs());
r.get(key);
try {
r.destroy(key);
fail("destroy() should have thrown TimeoutException");
} catch (TimeoutException ex) {
// pass
}
}
});
}
/**
* get the lock, region.get(), region.put(), release lock
*/
@Test
public void testLockGetPut() throws CacheException {
Host host = Host.getHost(0);
VM vm0 = host.getVM(0);
VM vm1 = host.getVM(1);
final String name = this.getUniqueName() + "-GLOBAL";
final Object key = new Integer(5);
// First, create region & entry, and lock the entry, in Master VM
Region r = getOrCreateRootRegion().createSubregion(name, getGlobalAttrs());
Lock lock = r.getDistributedLock(key);
lock.lock();
r.create(key, "value 1");
assertEquals("value 1", r.get(key));
// Now, make sure a locking operation times out in another VM
vm0.invoke(new CacheSerializableRunnable("Unsuccessful locking operation") {
@Override
public void run2() throws CacheException {
try {
getOrCreateRootRegion().getCache().setLockTimeout(2);
Region r2 = getOrCreateRootRegion().createSubregion(name, getGlobalAttrs());
assertEquals("value 1", r2.get(key));
r2.put(key, "wrong value");
fail("put() should have thrown TimeoutException");
} catch (TimeoutException ex) {
// pass
}
}
});
// Now, in Master, do another locking operation, then release the lock
r.put(key, "value 2");
lock.unlock();
// Finally, successfully perform a locking in other VM
vm1.invoke(new CacheSerializableRunnable("Successful locking operation") {
@Override
public void run2() throws CacheException {
getOrCreateRootRegion().getCache().setLockTimeout(2);
Region r2 = getOrCreateRootRegion().createSubregion(name, getGlobalAttrs());
assertEquals("value 2", r2.get(key));
r2.put(key, "value 3");
}
});
assertEquals("value 3", r.get(key));
}
/**
* Test Region.getRegionDistributedLock(), calling lock() and then unlock()
*/
@Test
public void testRegionDistributedLockSimple() throws CacheException {
final String name = this.getUniqueName();
Region r = getOrCreateRootRegion().createSubregion(name, getGlobalAttrs());
Lock lock = r.getRegionDistributedLock();
lock.lock();
lock.unlock();
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.catalina.valves;
import java.io.IOException;
import java.util.Enumeration;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import java.util.regex.Pattern;
import javax.servlet.ServletException;
import org.apache.catalina.AccessLog;
import org.apache.catalina.connector.Request;
import org.apache.catalina.connector.Response;
import org.apache.juli.logging.Log;
import org.apache.juli.logging.LogFactory;
/**
* <p>
* Tomcat port of <a href="http://httpd.apache.org/docs/trunk/mod/mod_remoteip.html">mod_remoteip</a>, this valve replaces the apparent
* client remote IP address and hostname for the request with the IP address list presented by a proxy or a load balancer via a request
* headers (e.g. "X-Forwarded-For").
* </p>
* <p>
* Another feature of this valve is to replace the apparent scheme (http/https) and server port with the scheme presented by a proxy or a
* load balancer via a request header (e.g. "X-Forwarded-Proto").
* </p>
* <p>
* This valve proceeds as follows:
* </p>
* <p>
* If the incoming <code>request.getRemoteAddr()</code> matches the valve's list of internal proxies :
* <ul>
* <li>Loop on the comma delimited list of IPs and hostnames passed by the preceding load balancer or proxy in the given request's Http
* header named <code>$remoteIpHeader</code> (default value <code>x-forwarded-for</code>). Values are processed in right-to-left order.</li>
* <li>For each ip/host of the list:
* <ul>
* <li>if it matches the internal proxies list, the ip/host is swallowed</li>
* <li>if it matches the trusted proxies list, the ip/host is added to the created proxies header</li>
* <li>otherwise, the ip/host is declared to be the remote ip and looping is stopped.</li>
* </ul>
* </li>
* <li>If the request http header named <code>$protocolHeader</code> (e.g. <code>x-forwarded-for</code>) equals to the value of
* <code>protocolHeaderHttpsValue</code> configuration parameter (default <code>https</code>) then <code>request.isSecure = true</code>,
* <code>request.scheme = https</code> and <code>request.serverPort = 443</code>. Note that 443 can be overwritten with the
* <code>$httpsServerPort</code> configuration parameter.</li>
* </ul>
* </p>
* <p>
* <strong>Configuration parameters:</strong>
* <table border="1">
* <tr>
* <th>RemoteIpValve property</th>
* <th>Description</th>
* <th>Equivalent mod_remoteip directive</th>
* <th>Format</th>
* <th>Default Value</th>
* </tr>
* <tr>
* <td>remoteIpHeader</td>
* <td>Name of the Http Header read by this valve that holds the list of traversed IP addresses starting from the requesting client</td>
* <td>RemoteIPHeader</td>
* <td>Compliant http header name</td>
* <td>x-forwarded-for</td>
* </tr>
* <tr>
* <td>internalProxies</td>
* <td>Regular expression that matches the IP addresses of internal proxies.
* If they appear in the <code>remoteIpHeader</code> value, they will be
* trusted and will not appear
* in the <code>proxiesHeader</code> value</td>
* <td>RemoteIPInternalProxy</td>
* <td>Regular expression (in the syntax supported by
* {@link java.util.regex.Pattern java.util.regex})</td>
* <td>10\.\d{1,3}\.\d{1,3}\.\d{1,3}|192\.168\.\d{1,3}\.\d{1,3}|169\.254\.\d{1,3}\.\d{1,3}|127\.\d{1,3}\.\d{1,3}\.\d{1,3}<br/>
* By default, 10/8, 192.168/16, 169.254/16 and 127/8 are allowed ; 172.16/12 has not been enabled by default because it is complex to
* describe with regular expressions</td>
* </tr>
* </tr>
* <tr>
* <td>proxiesHeader</td>
* <td>Name of the http header created by this valve to hold the list of proxies that have been processed in the incoming
* <code>remoteIpHeader</code></td>
* <td>RemoteIPProxiesHeader</td>
* <td>Compliant http header name</td>
* <td>x-forwarded-by</td>
* </tr>
* <tr>
* <td>trustedProxies</td>
* <td>Regular expression that matches the IP addresses of trusted proxies.
* If they appear in the <code>remoteIpHeader</code> value, they will be
* trusted and will appear in the <code>proxiesHeader</code> value</td>
* <td>RemoteIPTrustedProxy</td>
* <td>Regular expression (in the syntax supported by
* {@link java.util.regex.Pattern java.util.regex})</td>
* <td> </td>
* </tr>
* <tr>
* <td>protocolHeader</td>
* <td>Name of the http header read by this valve that holds the flag that this request </td>
* <td>N/A</td>
* <td>Compliant http header name like <code>X-Forwarded-Proto</code>, <code>X-Forwarded-Ssl</code> or <code>Front-End-Https</code></td>
* <td><code>null</code></td>
* </tr>
* <tr>
* <td>protocolHeaderHttpsValue</td>
* <td>Value of the <code>protocolHeader</code> to indicate that it is an Https request</td>
* <td>N/A</td>
* <td>String like <code>https</code> or <code>ON</code></td>
* <td><code>https</code></td>
* </tr>
* <tr>
* <td>httpServerPort</td>
* <td>Value returned by {@link javax.servlet.ServletRequest#getServerPort()} when the <code>protocolHeader</code> indicates <code>http</code> protocol</td>
* <td>N/A</td>
* <td>integer</td>
* <td>80</td>
* </tr>
* <tr>
* <td>httpsServerPort</td>
* <td>Value returned by {@link javax.servlet.ServletRequest#getServerPort()} when the <code>protocolHeader</code> indicates <code>https</code> protocol</td>
* <td>N/A</td>
* <td>integer</td>
* <td>443</td>
* </tr>
* </table>
* </p>
* <p>
* <p>
* This Valve may be attached to any Container, depending on the granularity of the filtering you wish to perform.
* </p>
* <p>
* <strong>Regular expression vs. IP address blocks:</strong> <code>mod_remoteip</code> allows to use address blocks (e.g.
* <code>192.168/16</code>) to configure <code>RemoteIPInternalProxy</code> and <code>RemoteIPTrustedProxy</code> ; as Tomcat doesn't have a
* library similar to <a
* href="http://apr.apache.org/docs/apr/1.3/group__apr__network__io.html#gb74d21b8898b7c40bf7fd07ad3eb993d">apr_ipsubnet_test</a>,
* <code>RemoteIpValve</code> uses regular expression to configure <code>internalProxies</code> and <code>trustedProxies</code> in the same
* fashion as {@link RequestFilterValve} does.
* </p>
* <hr/>
* <p>
* <strong>Sample with internal proxies</strong>
* </p>
* <p>
* RemoteIpValve configuration:
* </p>
* <code><pre>
* <Valve
* className="org.apache.catalina.valves.RemoteIpValve"
* internalProxies="192\.168\.0\.10|192\.168\.0\.11"
* remoteIpHeader="x-forwarded-for"
* remoteIpProxiesHeader="x-forwarded-by"
* protocolHeader="x-forwarded-proto"
* /></pre></code>
* <p>
* Request values:
* <table border="1">
* <tr>
* <th>property</th>
* <th>Value Before RemoteIpValve</th>
* <th>Value After RemoteIpValve</th>
* </tr>
* <tr>
* <td>request.remoteAddr</td>
* <td>192.168.0.10</td>
* <td>140.211.11.130</td>
* </tr>
* <tr>
* <td>request.header['x-forwarded-for']</td>
* <td>140.211.11.130, 192.168.0.10</td>
* <td>null</td>
* </tr>
* <tr>
* <td>request.header['x-forwarded-by']</td>
* <td>null</td>
* <td>null</td>
* </tr>
* <tr>
* <td>request.header['x-forwarded-proto']</td>
* <td>https</td>
* <td>https</td>
* </tr>
* <tr>
* <td>request.scheme</td>
* <td>http</td>
* <td>https</td>
* </tr>
* <tr>
* <td>request.secure</td>
* <td>false</td>
* <td>true</td>
* </tr>
* <tr>
* <td>request.serverPort</td>
* <td>80</td>
* <td>443</td>
* </tr>
* </table>
* Note : <code>x-forwarded-by</code> header is null because only internal proxies as been traversed by the request.
* <code>x-forwarded-by</code> is null because all the proxies are trusted or internal.
* </p>
* <hr/>
* <p>
* <strong>Sample with trusted proxies</strong>
* </p>
* <p>
* RemoteIpValve configuration:
* </p>
* <code><pre>
* <Valve
* className="org.apache.catalina.valves.RemoteIpValve"
* internalProxies="192\.168\.0\.10|192\.168\.0\.11"
* remoteIpHeader="x-forwarded-for"
* remoteIpProxiesHeader="x-forwarded-by"
* trustedProxies="proxy1|proxy2"
* /></pre></code>
* <p>
* Request values:
* <table border="1">
* <tr>
* <th>property</th>
* <th>Value Before RemoteIpValve</th>
* <th>Value After RemoteIpValve</th>
* </tr>
* <tr>
* <td>request.remoteAddr</td>
* <td>192.168.0.10</td>
* <td>140.211.11.130</td>
* </tr>
* <tr>
* <td>request.header['x-forwarded-for']</td>
* <td>140.211.11.130, proxy1, proxy2</td>
* <td>null</td>
* </tr>
* <tr>
* <td>request.header['x-forwarded-by']</td>
* <td>null</td>
* <td>proxy1, proxy2</td>
* </tr>
* </table>
* Note : <code>proxy1</code> and <code>proxy2</code> are both trusted proxies that come in <code>x-forwarded-for</code> header, they both
* are migrated in <code>x-forwarded-by</code> header. <code>x-forwarded-by</code> is null because all the proxies are trusted or internal.
* </p>
* <hr/>
* <p>
* <strong>Sample with internal and trusted proxies</strong>
* </p>
* <p>
* RemoteIpValve configuration:
* </p>
* <code><pre>
* <Valve
* className="org.apache.catalina.valves.RemoteIpValve"
* internalProxies="192\.168\.0\.10|192\.168\.0\.11"
* remoteIpHeader="x-forwarded-for"
* remoteIpProxiesHeader="x-forwarded-by"
* trustedProxies="proxy1|proxy2"
* /></pre></code>
* <p>
* Request values:
* <table border="1">
* <tr>
* <th>property</th>
* <th>Value Before RemoteIpValve</th>
* <th>Value After RemoteIpValve</th>
* </tr>
* <tr>
* <td>request.remoteAddr</td>
* <td>192.168.0.10</td>
* <td>140.211.11.130</td>
* </tr>
* <tr>
* <td>request.header['x-forwarded-for']</td>
* <td>140.211.11.130, proxy1, proxy2, 192.168.0.10</td>
* <td>null</td>
* </tr>
* <tr>
* <td>request.header['x-forwarded-by']</td>
* <td>null</td>
* <td>proxy1, proxy2</td>
* </tr>
* </table>
* Note : <code>proxy1</code> and <code>proxy2</code> are both trusted proxies that come in <code>x-forwarded-for</code> header, they both
* are migrated in <code>x-forwarded-by</code> header. As <code>192.168.0.10</code> is an internal proxy, it does not appear in
* <code>x-forwarded-by</code>. <code>x-forwarded-by</code> is null because all the proxies are trusted or internal.
* </p>
* <hr/>
* <p>
* <strong>Sample with an untrusted proxy</strong>
* </p>
* <p>
* RemoteIpValve configuration:
* </p>
* <code><pre>
* <Valve
* className="org.apache.catalina.valves.RemoteIpValve"
* internalProxies="192\.168\.0\.10|192\.168\.0\.11"
* remoteIpHeader="x-forwarded-for"
* remoteIpProxiesHeader="x-forwarded-by"
* trustedProxies="proxy1|proxy2"
* /></pre></code>
* <p>
* Request values:
* <table border="1">
* <tr>
* <th>property</th>
* <th>Value Before RemoteIpValve</th>
* <th>Value After RemoteIpValve</th>
* </tr>
* <tr>
* <td>request.remoteAddr</td>
* <td>192.168.0.10</td>
* <td>untrusted-proxy</td>
* </tr>
* <tr>
* <td>request.header['x-forwarded-for']</td>
* <td>140.211.11.130, untrusted-proxy, proxy1</td>
* <td>140.211.11.130</td>
* </tr>
* <tr>
* <td>request.header['x-forwarded-by']</td>
* <td>null</td>
* <td>proxy1</td>
* </tr>
* </table>
* Note : <code>x-forwarded-by</code> holds the trusted proxy <code>proxy1</code>. <code>x-forwarded-by</code> holds
* <code>140.211.11.130</code> because <code>untrusted-proxy</code> is not trusted and thus, we can not trust that
* <code>untrusted-proxy</code> is the actual remote ip. <code>request.remoteAddr</code> is <code>untrusted-proxy</code> that is an IP
* verified by <code>proxy1</code>.
* </p>
*/
public class RemoteIpValve extends ValveBase {
/**
* {@link Pattern} for a comma delimited string that support whitespace characters
*/
private static final Pattern commaSeparatedValuesPattern = Pattern.compile("\\s*,\\s*");
/**
* The descriptive information related to this implementation.
*/
private static final String info = "org.apache.catalina.valves.RemoteIpValve/1.0";
/**
* Logger
*/
private static final Log log = LogFactory.getLog(RemoteIpValve.class);
/**
* Convert a given comma delimited String into an array of String
*
* @return array of String (non <code>null</code>)
*/
protected static String[] commaDelimitedListToStringArray(String commaDelimitedStrings) {
return (commaDelimitedStrings == null || commaDelimitedStrings.length() == 0) ? new String[0] : commaSeparatedValuesPattern
.split(commaDelimitedStrings);
}
/**
* Convert an array of strings in a comma delimited string
*/
protected static String listToCommaDelimitedString(List<String> stringList) {
if (stringList == null) {
return "";
}
StringBuilder result = new StringBuilder();
for (Iterator<String> it = stringList.iterator(); it.hasNext();) {
Object element = it.next();
if (element != null) {
result.append(element);
if (it.hasNext()) {
result.append(", ");
}
}
}
return result.toString();
}
/**
* @see #setHttpServerPort(int)
*/
private int httpServerPort = 80;
/**
* @see #setHttpsServerPort(int)
*/
private int httpsServerPort = 443;
private boolean changeLocalPort = false;
/**
* @see #setInternalProxies(String)
*/
private Pattern internalProxies = Pattern.compile(
"10\\.\\d{1,3}\\.\\d{1,3}\\.\\d{1,3}|" +
"192\\.168\\.\\d{1,3}\\.\\d{1,3}|" +
"169\\.254\\.\\d{1,3}\\.\\d{1,3}|" +
"127\\.\\d{1,3}\\.\\d{1,3}\\.\\d{1,3}");
/**
* @see #setProtocolHeader(String)
*/
private String protocolHeader = null;
/**
* @see #setProtocolHeaderHttpsValue(String)
*/
private String protocolHeaderHttpsValue = "https";
private String portHeader = null;
/**
* @see #setProxiesHeader(String)
*/
private String proxiesHeader = "X-Forwarded-By";
/**
* @see #setRemoteIpHeader(String)
*/
private String remoteIpHeader = "X-Forwarded-For";
/**
* @see #setRequestAttributesEnabled(boolean)
*/
private boolean requestAttributesEnabled = true;
/**
* @see RemoteIpValve#setTrustedProxies(String)
*/
private Pattern trustedProxies = null;
/**
* Default constructor that ensures {@link ValveBase#ValveBase(boolean)} is
* called with <code>true</code>.
*/
public RemoteIpValve() {
// Async requests are supported with this valve
super(true);
}
public int getHttpsServerPort() {
return httpsServerPort;
}
public int getHttpServerPort() {
return httpServerPort;
}
public boolean isChangeLocalPort() {
return changeLocalPort;
}
public void setChangeLocalPort(boolean changeLocalPort) {
this.changeLocalPort = changeLocalPort;
}
/**
* Obtain the name of the HTTP header used to override the value returned
* by {@link Request#getServerPort()} and (optionally depending on {link
* {@link #isChangeLocalPort()} {@link Request#getLocalPort()}.
*
* @return The HTTP header name
*/
public String getPortHeader() {
return portHeader;
}
/**
* Set the name of the HTTP header used to override the value returned
* by {@link Request#getServerPort()} and (optionally depending on {link
* {@link #isChangeLocalPort()} {@link Request#getLocalPort()}.
*
* @param portHeader The HTTP header name
*/
public void setPortHeader(String portHeader) {
this.portHeader = portHeader;
}
/**
* Return descriptive information about this Valve implementation.
*/
@Override
public String getInfo() {
return info;
}
/**
* @see #setInternalProxies(String)
* @return Regular expression that defines the internal proxies
*/
public String getInternalProxies() {
if (internalProxies == null) {
return null;
}
return internalProxies.toString();
}
/**
* @see #setProtocolHeader(String)
* @return the protocol header (e.g. "X-Forwarded-Proto")
*/
public String getProtocolHeader() {
return protocolHeader;
}
/**
* @see RemoteIpValve#setProtocolHeaderHttpsValue(String)
* @return the value of the protocol header for incoming https request (e.g. "https")
*/
public String getProtocolHeaderHttpsValue() {
return protocolHeaderHttpsValue;
}
/**
* @see #setProxiesHeader(String)
* @return the proxies header name (e.g. "X-Forwarded-By")
*/
public String getProxiesHeader() {
return proxiesHeader;
}
/**
* @see #setRemoteIpHeader(String)
* @return the remote IP header name (e.g. "X-Forwarded-For")
*/
public String getRemoteIpHeader() {
return remoteIpHeader;
}
/**
* @see #setRequestAttributesEnabled(boolean)
* @return <code>true</code> if the attributes will be logged, otherwise
* <code>false</code>
*/
public boolean getRequestAttributesEnabled() {
return requestAttributesEnabled;
}
/**
* @see #setTrustedProxies(String)
* @return Regular expression that defines the trusted proxies
*/
public String getTrustedProxies() {
if (trustedProxies == null) {
return null;
}
return trustedProxies.toString();
}
/**
* {@inheritDoc}
*/
@Override
public void invoke(Request request, Response response) throws IOException, ServletException {
final String originalRemoteAddr = request.getRemoteAddr();
final String originalRemoteHost = request.getRemoteHost();
final String originalScheme = request.getScheme();
final boolean originalSecure = request.isSecure();
final int originalServerPort = request.getServerPort();
if (internalProxies !=null &&
internalProxies.matcher(originalRemoteAddr).matches()) {
String remoteIp = null;
// In java 6, proxiesHeaderValue should be declared as a java.util.Deque
LinkedList<String> proxiesHeaderValue = new LinkedList<String>();
StringBuilder concatRemoteIpHeaderValue = new StringBuilder();
for (Enumeration<String> e = request.getHeaders(remoteIpHeader); e.hasMoreElements();) {
if (concatRemoteIpHeaderValue.length() > 0) {
concatRemoteIpHeaderValue.append(", ");
}
concatRemoteIpHeaderValue.append(e.nextElement());
}
String[] remoteIpHeaderValue = commaDelimitedListToStringArray(concatRemoteIpHeaderValue.toString());
int idx;
// loop on remoteIpHeaderValue to find the first trusted remote ip and to build the proxies chain
for (idx = remoteIpHeaderValue.length - 1; idx >= 0; idx--) {
String currentRemoteIp = remoteIpHeaderValue[idx];
remoteIp = currentRemoteIp;
if (internalProxies.matcher(currentRemoteIp).matches()) {
// do nothing, internalProxies IPs are not appended to the
} else if (trustedProxies != null &&
trustedProxies.matcher(currentRemoteIp).matches()) {
proxiesHeaderValue.addFirst(currentRemoteIp);
} else {
idx--; // decrement idx because break statement doesn't do it
break;
}
}
// continue to loop on remoteIpHeaderValue to build the new value of the remoteIpHeader
LinkedList<String> newRemoteIpHeaderValue = new LinkedList<String>();
for (; idx >= 0; idx--) {
String currentRemoteIp = remoteIpHeaderValue[idx];
newRemoteIpHeaderValue.addFirst(currentRemoteIp);
}
if (remoteIp != null) {
request.setRemoteAddr(remoteIp);
request.setRemoteHost(remoteIp);
// use request.coyoteRequest.mimeHeaders.setValue(str).setString(str) because request.addHeader(str, str) is no-op in Tomcat
// 6.0
if (proxiesHeaderValue.size() == 0) {
request.getCoyoteRequest().getMimeHeaders().removeHeader(proxiesHeader);
} else {
String commaDelimitedListOfProxies = listToCommaDelimitedString(proxiesHeaderValue);
request.getCoyoteRequest().getMimeHeaders().setValue(proxiesHeader).setString(commaDelimitedListOfProxies);
}
if (newRemoteIpHeaderValue.size() == 0) {
request.getCoyoteRequest().getMimeHeaders().removeHeader(remoteIpHeader);
} else {
String commaDelimitedRemoteIpHeaderValue = listToCommaDelimitedString(newRemoteIpHeaderValue);
request.getCoyoteRequest().getMimeHeaders().setValue(remoteIpHeader).setString(commaDelimitedRemoteIpHeaderValue);
}
}
if (protocolHeader != null) {
String protocolHeaderValue = request.getHeader(protocolHeader);
if (protocolHeaderValue == null) {
// don't modify the secure,scheme and serverPort attributes
// of the request
} else if (protocolHeaderHttpsValue.equalsIgnoreCase(protocolHeaderValue)) {
request.setSecure(true);
// use request.coyoteRequest.scheme instead of request.setScheme() because request.setScheme() is no-op in Tomcat 6.0
request.getCoyoteRequest().scheme().setString("https");
setPorts(request, httpsServerPort);
} else {
request.setSecure(false);
// use request.coyoteRequest.scheme instead of request.setScheme() because request.setScheme() is no-op in Tomcat 6.0
request.getCoyoteRequest().scheme().setString("http");
setPorts(request, httpServerPort);
}
}
if (log.isDebugEnabled()) {
log.debug("Incoming request " + request.getRequestURI() + " with originalRemoteAddr '" + originalRemoteAddr
+ "', originalRemoteHost='" + originalRemoteHost + "', originalSecure='" + originalSecure + "', originalScheme='"
+ originalScheme + "' will be seen as newRemoteAddr='" + request.getRemoteAddr() + "', newRemoteHost='"
+ request.getRemoteHost() + "', newScheme='" + request.getScheme() + "', newSecure='" + request.isSecure() + "'");
}
} else {
if (log.isDebugEnabled()) {
log.debug("Skip RemoteIpValve for request " + request.getRequestURI() + " with originalRemoteAddr '"
+ request.getRemoteAddr() + "'");
}
}
if (requestAttributesEnabled) {
request.setAttribute(AccessLog.REMOTE_ADDR_ATTRIBUTE,
request.getRemoteAddr());
request.setAttribute(AccessLog.REMOTE_HOST_ATTRIBUTE,
request.getRemoteHost());
request.setAttribute(AccessLog.PROTOCOL_ATTRIBUTE,
request.getProtocol());
request.setAttribute(AccessLog.SERVER_PORT_ATTRIBUTE,
Integer.valueOf(request.getServerPort()));
}
try {
getNext().invoke(request, response);
} finally {
request.setRemoteAddr(originalRemoteAddr);
request.setRemoteHost(originalRemoteHost);
request.setSecure(originalSecure);
// use request.coyoteRequest.scheme instead of request.setScheme() because request.setScheme() is no-op in Tomcat 6.0
request.getCoyoteRequest().scheme().setString(originalScheme);
request.setServerPort(originalServerPort);
}
}
private void setPorts(Request request, int defaultPort) {
int port = defaultPort;
if (portHeader != null) {
String portHeaderValue = request.getHeader(portHeader);
if (portHeaderValue != null) {
try {
port = Integer.parseInt(portHeaderValue);
} catch (NumberFormatException nfe) {
if (log.isDebugEnabled()) {
log.debug(sm.getString(
"remoteIpValve.invalidPortHeader",
portHeaderValue, portHeader), nfe);
}
}
}
}
request.setServerPort(port);
if (changeLocalPort) {
request.setLocalPort(port);
}
}
/**
* <p>
* Server Port value if the {@link #protocolHeader} is not <code>null</code> and does not indicate HTTP
* </p>
* <p>
* Default value : 80
* </p>
*/
public void setHttpServerPort(int httpServerPort) {
this.httpServerPort = httpServerPort;
}
/**
* <p>
* Server Port value if the {@link #protocolHeader} indicates HTTPS
* </p>
* <p>
* Default value : 443
* </p>
*/
public void setHttpsServerPort(int httpsServerPort) {
this.httpsServerPort = httpsServerPort;
}
/**
* <p>
* Regular expression that defines the internal proxies.
* </p>
* <p>
* Default value : 10\.\d{1,3}\.\d{1,3}\.\d{1,3}|192\.168\.\d{1,3}\.\d{1,3}|169\.254.\d{1,3}.\d{1,3}|127\.\d{1,3}\.\d{1,3}\.\d{1,3}
* </p>
*/
public void setInternalProxies(String internalProxies) {
if (internalProxies == null || internalProxies.length() == 0) {
this.internalProxies = null;
} else {
this.internalProxies = Pattern.compile(internalProxies);
}
}
/**
* <p>
* Header that holds the incoming protocol, usally named <code>X-Forwarded-Proto</code>. If <code>null</code>, request.scheme and
* request.secure will not be modified.
* </p>
* <p>
* Default value : <code>null</code>
* </p>
*/
public void setProtocolHeader(String protocolHeader) {
this.protocolHeader = protocolHeader;
}
/**
* <p>
* Case insensitive value of the protocol header to indicate that the incoming http request uses SSL.
* </p>
* <p>
* Default value : <code>https</code>
* </p>
*/
public void setProtocolHeaderHttpsValue(String protocolHeaderHttpsValue) {
this.protocolHeaderHttpsValue = protocolHeaderHttpsValue;
}
/**
* <p>
* The proxiesHeader directive specifies a header into which mod_remoteip will collect a list of all of the intermediate client IP
* addresses trusted to resolve the actual remote IP. Note that intermediate RemoteIPTrustedProxy addresses are recorded in this header,
* while any intermediate RemoteIPInternalProxy addresses are discarded.
* </p>
* <p>
* Name of the http header that holds the list of trusted proxies that has been traversed by the http request.
* </p>
* <p>
* The value of this header can be comma delimited.
* </p>
* <p>
* Default value : <code>X-Forwarded-By</code>
* </p>
*/
public void setProxiesHeader(String proxiesHeader) {
this.proxiesHeader = proxiesHeader;
}
/**
* <p>
* Name of the http header from which the remote ip is extracted.
* </p>
* <p>
* The value of this header can be comma delimited.
* </p>
* <p>
* Default value : <code>X-Forwarded-For</code>
* </p>
*
* @param remoteIpHeader
*/
public void setRemoteIpHeader(String remoteIpHeader) {
this.remoteIpHeader = remoteIpHeader;
}
/**
* Should this valve set request attributes for IP address, Hostname,
* protocol and port used for the request? This are typically used in
* conjunction with the {@link AccessLog} which will otherwise log the
* original values. Default is <code>true</code>.
*
* The attributes set are:
* <ul>
* <li>org.apache.catalina.AccessLog.RemoteAddr</li>
* <li>org.apache.catalina.AccessLog.RemoteHost</li>
* <li>org.apache.catalina.AccessLog.Protocol</li>
* <li>org.apache.catalina.AccessLog.ServerPort</li>
* </ul>
*
* @param requestAttributesEnabled <code>true</code> causes the attributes
* to be set, <code>false</code> disables
* the setting of the attributes.
*/
public void setRequestAttributesEnabled(boolean requestAttributesEnabled) {
this.requestAttributesEnabled = requestAttributesEnabled;
}
/**
* <p>
* Regular expression defining proxies that are trusted when they appear in
* the {@link #remoteIpHeader} header.
* </p>
* <p>
* Default value : empty list, no external proxy is trusted.
* </p>
*/
public void setTrustedProxies(String trustedProxies) {
if (trustedProxies == null || trustedProxies.length() == 0) {
this.trustedProxies = null;
} else {
this.trustedProxies = Pattern.compile(trustedProxies);
}
}
}
| |
/*
* Copyright 2016 The Netty Project
*
* The Netty Project licenses this file to you under the Apache License,
* version 2.0 (the "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package io.netty.handler.flush;
import io.netty.channel.Channel;
import io.netty.channel.ChannelHandler;
import io.netty.channel.ChannelHandlerContext;
import io.netty.channel.ChannelOutboundInvoker;
import io.netty.channel.ChannelPipeline;
import io.netty.channel.ChannelPromise;
import java.util.concurrent.Future;
/**
* {@link ChannelHandler} which consolidates {@link Channel#flush()} / {@link ChannelHandlerContext#flush()}
* operations (which also includes
* {@link Channel#writeAndFlush(Object)} / {@link Channel#writeAndFlush(Object, ChannelPromise)} and
* {@link ChannelOutboundInvoker#writeAndFlush(Object)} /
* {@link ChannelOutboundInvoker#writeAndFlush(Object, ChannelPromise)}).
* <p>
* Flush operations are generally speaking expensive as these may trigger a syscall on the transport level. Thus it is
* in most cases (where write latency can be traded with throughput) a good idea to try to minimize flush operations
* as much as possible.
* <p>
* If a read loop is currently ongoing, {@link #flush(ChannelHandlerContext)} will not be passed on to the next
* {@link ChannelHandler} in the {@link ChannelPipeline}, as it will pick up any pending flushes when
* {@link #channelReadComplete(ChannelHandlerContext)} is triggered.
* If no read loop is ongoing, the behavior depends on the {@code consolidateWhenNoReadInProgress} constructor argument:
* <ul>
* <li>if {@code false}, flushes are passed on to the next handler directly;</li>
* <li>if {@code true}, the invocation of the next handler is submitted as a separate task on the event loop. Under
* high throughput, this gives the opportunity to process other flushes before the task gets executed, thus
* batching multiple flushes into one.</li>
* </ul>
* If {@code explicitFlushAfterFlushes} is reached the flush will be forwarded as well (whether while in a read loop, or
* while batching outside of a read loop).
* <p>
* If the {@link Channel} becomes non-writable it will also try to execute any pending flush operations.
* <p>
* The {@link FlushConsolidationHandler} should be put as first {@link ChannelHandler} in the
* {@link ChannelPipeline} to have the best effect.
*/
public class FlushConsolidationHandler implements ChannelHandler {
private final int explicitFlushAfterFlushes;
private final boolean consolidateWhenNoReadInProgress;
private final Runnable flushTask;
private int flushPendingCount;
private boolean readInProgress;
private ChannelHandlerContext ctx;
private Future<?> nextScheduledFlush;
/**
* The default number of flushes after which a flush will be forwarded to downstream handlers (whether while in a
* read loop, or while batching outside of a read loop).
*/
public static final int DEFAULT_EXPLICIT_FLUSH_AFTER_FLUSHES = 256;
/**
* Create new instance which explicit flush after {@value DEFAULT_EXPLICIT_FLUSH_AFTER_FLUSHES} pending flush
* operations at the latest.
*/
public FlushConsolidationHandler() {
this(DEFAULT_EXPLICIT_FLUSH_AFTER_FLUSHES, false);
}
/**
* Create new instance which doesn't consolidate flushes when no read is in progress.
*
* @param explicitFlushAfterFlushes the number of flushes after which an explicit flush will be done.
*/
public FlushConsolidationHandler(int explicitFlushAfterFlushes) {
this(explicitFlushAfterFlushes, false);
}
/**
* Create new instance.
*
* @param explicitFlushAfterFlushes the number of flushes after which an explicit flush will be done.
* @param consolidateWhenNoReadInProgress whether to consolidate flushes even when no read loop is currently
* ongoing.
*/
public FlushConsolidationHandler(int explicitFlushAfterFlushes, boolean consolidateWhenNoReadInProgress) {
if (explicitFlushAfterFlushes <= 0) {
throw new IllegalArgumentException("explicitFlushAfterFlushes: "
+ explicitFlushAfterFlushes + " (expected: > 0)");
}
this.explicitFlushAfterFlushes = explicitFlushAfterFlushes;
this.consolidateWhenNoReadInProgress = consolidateWhenNoReadInProgress;
flushTask = consolidateWhenNoReadInProgress ?
() -> {
if (flushPendingCount > 0 && !readInProgress) {
flushPendingCount = 0;
ctx.flush();
nextScheduledFlush = null;
} // else we'll flush when the read completes
}
: null;
}
@Override
public void handlerAdded(ChannelHandlerContext ctx) throws Exception {
this.ctx = ctx;
}
@Override
public void flush(ChannelHandlerContext ctx) throws Exception {
if (readInProgress) {
// If there is still a read in progress we are sure we will see a channelReadComplete(...) call. Thus
// we only need to flush if we reach the explicitFlushAfterFlushes limit.
if (++flushPendingCount == explicitFlushAfterFlushes) {
flushNow(ctx);
}
} else if (consolidateWhenNoReadInProgress) {
// Flush immediately if we reach the threshold, otherwise schedule
if (++flushPendingCount == explicitFlushAfterFlushes) {
flushNow(ctx);
} else {
scheduleFlush(ctx);
}
} else {
// Always flush directly
flushNow(ctx);
}
}
@Override
public void channelReadComplete(ChannelHandlerContext ctx) throws Exception {
// This may be the last event in the read loop, so flush now!
resetReadAndFlushIfNeeded(ctx);
ctx.fireChannelReadComplete();
}
@Override
public void channelRead(ChannelHandlerContext ctx, Object msg) throws Exception {
readInProgress = true;
ctx.fireChannelRead(msg);
}
@Override
public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) throws Exception {
// To ensure we not miss to flush anything, do it now.
resetReadAndFlushIfNeeded(ctx);
ctx.fireExceptionCaught(cause);
}
@Override
public void disconnect(ChannelHandlerContext ctx, ChannelPromise promise) throws Exception {
// Try to flush one last time if flushes are pending before disconnect the channel.
resetReadAndFlushIfNeeded(ctx);
ctx.disconnect(promise);
}
@Override
public void close(ChannelHandlerContext ctx, ChannelPromise promise) throws Exception {
// Try to flush one last time if flushes are pending before close the channel.
resetReadAndFlushIfNeeded(ctx);
ctx.close(promise);
}
@Override
public void channelWritabilityChanged(ChannelHandlerContext ctx) throws Exception {
if (!ctx.channel().isWritable()) {
// The writability of the channel changed to false, so flush all consolidated flushes now to free up memory.
flushIfNeeded(ctx);
}
ctx.fireChannelWritabilityChanged();
}
@Override
public void handlerRemoved(ChannelHandlerContext ctx) throws Exception {
flushIfNeeded(ctx);
}
private void resetReadAndFlushIfNeeded(ChannelHandlerContext ctx) {
readInProgress = false;
flushIfNeeded(ctx);
}
private void flushIfNeeded(ChannelHandlerContext ctx) {
if (flushPendingCount > 0) {
flushNow(ctx);
}
}
private void flushNow(ChannelHandlerContext ctx) {
cancelScheduledFlush();
flushPendingCount = 0;
ctx.flush();
}
private void scheduleFlush(final ChannelHandlerContext ctx) {
if (nextScheduledFlush == null) {
// Run as soon as possible, but still yield to give a chance for additional writes to enqueue.
nextScheduledFlush = ctx.channel().eventLoop().submit(flushTask);
}
}
private void cancelScheduledFlush() {
if (nextScheduledFlush != null) {
nextScheduledFlush.cancel(false);
nextScheduledFlush = null;
}
}
}
| |
/*
* $Id: MultiSplitPane.java,v 1.1 2007/09/21 06:43:06 searle Exp $
*
* Copyright 2004 Sun Microsystems, Inc., 4150 Network Circle,
* Santa Clara, California 95054, U.S.A. All rights reserved.
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the Free Software
* Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
*/
/* Modified by Steve Searle for Apollo use:
* Back ported to Java 1.4 compatible code
*/
package org.jdesktop.swingx;
import java.awt.Color;
import java.awt.Cursor;
import java.awt.Graphics;
import java.awt.Graphics2D;
import java.awt.Rectangle;
import java.awt.event.KeyEvent;
import java.awt.event.KeyListener;
import java.awt.event.MouseEvent;
import javax.accessibility.AccessibleContext;
import javax.accessibility.AccessibleRole;
import javax.swing.JPanel;
import javax.swing.event.MouseInputAdapter;
import org.jdesktop.swingx.MultiSplitLayout.Divider;
import org.jdesktop.swingx.MultiSplitLayout.Node;
import java.util.*;
/**
*
* <p>
* All properties in this class are bound: when a properties value
* is changed, all PropertyChangeListeners are fired.
*
* @author Hans Muller
*/
public class MultiSplitPane extends JPanel {
private AccessibleContext accessibleContext = null;
private boolean continuousLayout = true;
private DividerPainter dividerPainter = new DefaultDividerPainter();
/**
* Creates a MultiSplitPane with it's LayoutManager set to
* to an empty MultiSplitLayout.
*/
public MultiSplitPane() {
super(new MultiSplitLayout());
InputHandler inputHandler = new InputHandler();
addMouseListener(inputHandler);
addMouseMotionListener(inputHandler);
addKeyListener(inputHandler);
setFocusable(true);
}
/**
* A convenience method that returns the layout manager cast
* to MutliSplitLayout.
*
* @return this MultiSplitPane's layout manager
* @see java.awt.Container#getLayout
* @see #setModel
*/
public final MultiSplitLayout getMultiSplitLayout() {
return (MultiSplitLayout)getLayout();
}
/**
* A convenience method that sets the MultiSplitLayout model.
* Equivalent to <code>getMultiSplitLayout.setModel(model)</code>
*
* @param model the root of the MultiSplitLayout model
* @see #getMultiSplitLayout
* @see MultiSplitLayout#setModel
*/
public final void setModel(Node model) {
getMultiSplitLayout().setModel(model);
}
/**
* A convenience method that sets the MultiSplitLayout dividerSize
* property. Equivalent to
* <code>getMultiSplitLayout().setDividerSize(newDividerSize)</code>.
*
* @param dividerSize the value of the dividerSize property
* @see #getMultiSplitLayout
* @see MultiSplitLayout#setDividerSize
*/
public final void setDividerSize(int dividerSize) {
getMultiSplitLayout().setDividerSize(dividerSize);
}
/**
* Sets the value of the <code>continuousLayout</code> property.
* If true, then the layout is revalidated continuously while
* a divider is being moved. The default value of this property
* is true.
*
* @param continuousLayout value of the continuousLayout property
* @see #isContinuousLayout
*/
public void setContinuousLayout(boolean continuousLayout) {
boolean oldContinuousLayout = continuousLayout;
this.continuousLayout = continuousLayout;
firePropertyChange("continuousLayout", oldContinuousLayout, continuousLayout);
}
/**
* Returns true if dragging a divider only updates
* the layout when the drag gesture ends (typically, when the
* mouse button is released).
*
* @return the value of the <code>continuousLayout</code> property
* @see #setContinuousLayout
*/
public boolean isContinuousLayout() {
return continuousLayout;
}
/**
* Returns the Divider that's currently being moved, typically
* because the user is dragging it, or null.
*
* @return the Divider that's being moved or null.
*/
public Divider activeDivider() {
return dragDivider;
}
/**
* Draws a single Divider. Typically used to specialize the
* way the active Divider is painted.
*
* @see #getDividerPainter
* @see #setDividerPainter
*/
public static abstract class DividerPainter {
/**
* Paint a single Divider.
*
* @param g the Graphics object to paint with
* @param divider the Divider to paint
*/
public abstract void paint(Graphics g, Divider divider);
}
private class DefaultDividerPainter extends DividerPainter {
public void paint(Graphics g, Divider divider) {
if ((divider == activeDivider()) && !isContinuousLayout()) {
Graphics2D g2d = (Graphics2D)g;
g2d.setColor(Color.black);
g2d.fill(divider.getBounds());
}
}
}
/**
* The DividerPainter that's used to paint Dividers on this MultiSplitPane.
* This property may be null.
*
* @return the value of the dividerPainter Property
* @see #setDividerPainter
*/
public DividerPainter getDividerPainter() {
return dividerPainter;
}
/**
* Sets the DividerPainter that's used to paint Dividers on this
* MultiSplitPane. The default DividerPainter only draws
* the activeDivider (if there is one) and then, only if
* continuousLayout is false. The value of this property is
* used by the paintChildren method: Dividers are painted after
* the MultiSplitPane's children have been rendered so that
* the activeDivider can appear "on top of" the children.
*
* @param dividerPainter the value of the dividerPainter property, can be null
* @see #paintChildren
* @see #activeDivider
*/
public void setDividerPainter(DividerPainter dividerPainter) {
this.dividerPainter = dividerPainter;
}
/**
* Uses the DividerPainter (if any) to paint each Divider that
* overlaps the clip Rectangle. This is done after the call to
* <code>super.paintChildren()</code> so that Dividers can be
* rendered "on top of" the children.
* <p>
* {@inheritDoc}
*/
protected void paintChildren(Graphics g) {
super.paintChildren(g);
DividerPainter dp = getDividerPainter();
Rectangle clipR = g.getClipBounds();
if ((dp != null) && (clipR != null)) {
Graphics dpg = g.create();
try {
MultiSplitLayout msl = getMultiSplitLayout();
Iterator divIter = msl.dividersThatOverlap(clipR).iterator();
while (divIter.hasNext()) {
Divider divider = (Divider)divIter.next();
dp.paint(dpg, divider);
}
}
finally {
dpg.dispose();
}
}
}
private boolean dragUnderway = false;
private MultiSplitLayout.Divider dragDivider = null;
private Rectangle initialDividerBounds = null;
private boolean oldFloatingDividers = true;
private int dragOffsetX = 0;
private int dragOffsetY = 0;
private int dragMin = -1;
private int dragMax = -1;
private void startDrag(int mx, int my) {
requestFocusInWindow();
MultiSplitLayout msl = getMultiSplitLayout();
MultiSplitLayout.Divider divider = msl.dividerAt(mx, my);
if (divider != null) {
MultiSplitLayout.Node prevNode = divider.previousSibling();
MultiSplitLayout.Node nextNode = divider.nextSibling();
if ((prevNode == null) || (nextNode == null)) {
dragUnderway = false;
}
else {
initialDividerBounds = divider.getBounds();
dragOffsetX = mx - initialDividerBounds.x;
dragOffsetY = my - initialDividerBounds.y;
dragDivider = divider;
Rectangle prevNodeBounds = prevNode.getBounds();
Rectangle nextNodeBounds = nextNode.getBounds();
if (dragDivider.isVertical()) {
dragMin = prevNodeBounds.x;
dragMax = nextNodeBounds.x + nextNodeBounds.width;
dragMax -= dragDivider.getBounds().width;
}
else {
dragMin = prevNodeBounds.y;
dragMax = nextNodeBounds.y + nextNodeBounds.height;
dragMax -= dragDivider.getBounds().height;
}
oldFloatingDividers = getMultiSplitLayout().getFloatingDividers();
getMultiSplitLayout().setFloatingDividers(false);
dragUnderway = true;
}
}
else {
dragUnderway = false;
}
}
private void repaintDragLimits() {
Rectangle damageR = dragDivider.getBounds();
if (dragDivider.isVertical()) {
damageR.x = dragMin;
damageR.width = dragMax - dragMin;
}
else {
damageR.y = dragMin;
damageR.height = dragMax - dragMin;
}
repaint(damageR);
}
private void updateDrag(int mx, int my) {
if (!dragUnderway) {
return;
}
Rectangle oldBounds = dragDivider.getBounds();
Rectangle bounds = new Rectangle(oldBounds);
if (dragDivider.isVertical()) {
bounds.x = mx - dragOffsetX;
bounds.x = Math.max(bounds.x, dragMin);
bounds.x = Math.min(bounds.x, dragMax);
}
else {
bounds.y = my - dragOffsetY;
bounds.y = Math.max(bounds.y, dragMin);
bounds.y = Math.min(bounds.y, dragMax);
}
dragDivider.setBounds(bounds);
if (isContinuousLayout()) {
revalidate();
repaintDragLimits();
}
else {
repaint(oldBounds.union(bounds));
}
}
private void clearDragState() {
dragDivider = null;
initialDividerBounds = null;
oldFloatingDividers = true;
dragOffsetX = dragOffsetY = 0;
dragMin = dragMax = -1;
dragUnderway = false;
}
private void finishDrag(int x, int y) {
if (dragUnderway) {
clearDragState();
if (!isContinuousLayout()) {
revalidate();
repaint();
}
}
}
private void cancelDrag() {
if (dragUnderway) {
dragDivider.setBounds(initialDividerBounds);
getMultiSplitLayout().setFloatingDividers(oldFloatingDividers);
setCursor(Cursor.getPredefinedCursor(Cursor.DEFAULT_CURSOR));
repaint();
revalidate();
clearDragState();
}
}
private void updateCursor(int x, int y, boolean show) {
if (dragUnderway) {
return;
}
int cursorID = Cursor.DEFAULT_CURSOR;
if (show) {
MultiSplitLayout.Divider divider = getMultiSplitLayout().dividerAt(x, y);
if (divider != null) {
cursorID = (divider.isVertical()) ?
Cursor.E_RESIZE_CURSOR :
Cursor.N_RESIZE_CURSOR;
}
}
setCursor(Cursor.getPredefinedCursor(cursorID));
}
private class InputHandler extends MouseInputAdapter implements KeyListener {
public void mouseEntered(MouseEvent e) {
updateCursor(e.getX(), e.getY(), true);
}
public void mouseMoved(MouseEvent e) {
updateCursor(e.getX(), e.getY(), true);
}
public void mouseExited(MouseEvent e) {
updateCursor(e.getX(), e.getY(), false);
}
public void mousePressed(MouseEvent e) {
startDrag(e.getX(), e.getY());
}
public void mouseReleased(MouseEvent e) {
finishDrag(e.getX(), e.getY());
}
public void mouseDragged(MouseEvent e) {
updateDrag(e.getX(), e.getY());
}
public void keyPressed(KeyEvent e) {
if (e.getKeyCode() == KeyEvent.VK_ESCAPE) {
cancelDrag();
}
}
public void keyReleased(KeyEvent e) { }
public void keyTyped(KeyEvent e) { }
}
public AccessibleContext getAccessibleContext() {
if( accessibleContext == null ) {
accessibleContext = new AccessibleMultiSplitPane();
}
return accessibleContext;
}
protected class AccessibleMultiSplitPane extends AccessibleJPanel {
public AccessibleRole getAccessibleRole() {
return AccessibleRole.SPLIT_PANE;
}
}
}
| |
// =================================================================================================
// Copyright 2011 Twitter, Inc.
// -------------------------------------------------------------------------------------------------
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this work except in compliance with the License.
// You may obtain a copy of the License in the LICENSE file, or at:
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// =================================================================================================
package com.twitter.common.stats;
import com.google.common.collect.Lists;
import org.junit.Before;
import org.junit.Test;
import java.util.Collections;
import java.util.List;
import static org.junit.Assert.*;
import static org.hamcrest.CoreMatchers.*;
/**
* @author William Farner
*/
public class PercentileTest {
private static final double EPSILON = 1e-6;
private static final float SAMPLE_RATE = 100;
private static final double[] PERCENTILES = new double[] {0, 10, 50, 90, 99, 99.9, 99.99, 100};
private Percentile<Integer> percentiles;
@Before
public void setUp() {
percentiles = new Percentile<Integer>("test", SAMPLE_RATE, PERCENTILES);
}
@Test
public void testNoData() {
checkPercentiles(percentiles, 0, 0, 0, 0, 0, 0, 0, 0);
checkValuesAreFlushed(percentiles);
}
@Test
public void testSingleValue() {
percentiles.record(10);
checkPercentiles(percentiles, 10, 10, 10, 10, 10, 10, 10, 10);
checkValuesAreFlushed(percentiles);
}
@Test
public void testConstant() {
for (int i = 0; i < 100; i++) {
percentiles.record(10);
}
checkPercentiles(percentiles, 10, 10, 10, 10, 10, 10, 10, 10);
checkValuesAreFlushed(percentiles);
}
@Test
public void testLinear() {
for (int i = 0; i < 10001; i++) {
percentiles.record(i);
}
checkPercentiles(percentiles, 0, 1000, 5000, 9000, 9900, 9990, 9999, 10000);
checkValuesAreFlushed(percentiles);
}
@Test
public void testMultipleSampleWindows() {
Percentile<Integer> mypercentile = new Percentile<Integer>("test", 2, null, PERCENTILES);
for (int i = 0; i < 10000; i++) {
mypercentile.record(i);
}
// Large number filler so that our percentile hit an integer index.
mypercentile.record(90000);
checkPercentiles(mypercentile, 0, 1000, 5000, 9000, 9900, 9990, 9999, 90000);
for (int i = 10000; i < 20000; i++) {
mypercentile.record(i);
}
checkPercentiles(mypercentile, 0, 2000, 10000, 18000, 19800, 19980, 19998, 90000);
for (int i = 20000; i < 30000; i++) {
mypercentile.record(i);
}
// Previous filler is flushed from the sample queue. Refill.
mypercentile.record(90000);
checkPercentiles(mypercentile, 10000, 12000, 20000, 28000, 29800, 29980, 29998, 90000);
}
@Test
public void testNullSampler() {
int N = 10001;
Percentile<Integer> mypercentile = new Percentile<Integer>("test", 1, null, PERCENTILES);
for (int i = 0; i < N; i++) {
mypercentile.record(i);
}
assertThat(mypercentile.samples.size(), is(N));
checkPercentiles(mypercentile, 0, 1000, 5000, 9000, 9900, 9990, 9999, 10000);
checkValuesAreFlushed(mypercentile);
}
@Test
public void testReverseLinear() {
for (int i = 0; i < 10001; i++) {
percentiles.record(i);
}
checkPercentiles(percentiles, 0, 1000, 5000, 9000, 9900, 9990, 9999, 10000);
checkValuesAreFlushed(percentiles);
}
@Test
public void testShuffledSteps() {
List<Integer> values = Lists.newArrayList();
for (int i = 0; i < 1000; i++) {
for (int j = 0; j < 10; j++) {
values.add(i);
}
}
values.add(2000);
Collections.shuffle(values);
for (int sample : values) {
percentiles.record(sample);
}
checkPercentiles(percentiles, 0, 100, 500, 900, 990, 999, 999, 2000);
checkValuesAreFlushed(percentiles);
}
@Test
public void testNegativeValues() {
List<Integer> values = Lists.newArrayList();
for (int i = 0; i < 1000; i++) {
for (int j = 0; j < 10; j++) {
values.add(-1 * i);
}
}
values.add(-2000);
Collections.shuffle(values);
for (int sample : values) {
percentiles.record(sample);
}
checkPercentiles(percentiles, -2000, -900, -500, -100, -10, -1, 0, 0);
checkValuesAreFlushed(percentiles);
}
@Test
public void testPercentileInterpolates() {
for (int i = 0; i < 9999; i++) {
percentiles.record(i);
}
checkPercentiles(percentiles, 0, 999.8, 4999, 8998.2, 9898.02, 9988.002, 9997.0002, 9998);
checkValuesAreFlushed(percentiles);
}
@Test
public void testHonorsBufferLimit() {
for (int i = 0; i < 1000; i++) {
percentiles.record(0);
}
// Now fill the buffer with a constant.
for (int i = 0; i < Percentile.MAX_BUFFER_SIZE; i++) {
percentiles.record(1);
}
assertThat(percentiles.samples.size(), is(Percentile.MAX_BUFFER_SIZE));
checkPercentiles(percentiles, 1, 1, 1, 1, 1, 1, 1, 1);
checkValuesAreFlushed(percentiles);
}
private void checkPercentiles(Percentile<Integer> input_percentiles, double... values) {
assertThat(values.length, is(PERCENTILES.length));
for (int i = 0; i < values.length; i++) {
checkPercentile(input_percentiles, PERCENTILES[i], values[i]);
}
}
private void checkValuesAreFlushed(Percentile<Integer> input_percentiles, double... values) {
// Check that the values were flushed.
for (int i = 0; i < values.length; i++) {
checkPercentile(input_percentiles, PERCENTILES[i], 0);
}
assertThat(percentiles.samples.isEmpty(), is(true));
}
private void checkPercentile(Percentile<Integer> input_percentiles,
double percentile, double value) {
assertEquals(value, input_percentiles.getPercentile(percentile).sample(), EPSILON);
}
}
| |
/*
* Copyright 2000-2016 Vaadin Ltd.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package com.vaadin.client.ui.menubar;
import java.util.Iterator;
import java.util.Stack;
import com.google.gwt.core.client.GWT;
import com.google.gwt.dom.client.Element;
import com.google.gwt.user.client.Command;
import com.vaadin.client.ApplicationConnection;
import com.vaadin.client.Paintable;
import com.vaadin.client.TooltipInfo;
import com.vaadin.client.UIDL;
import com.vaadin.client.annotations.OnStateChange;
import com.vaadin.client.ui.AbstractComponentConnector;
import com.vaadin.client.ui.Icon;
import com.vaadin.client.ui.SimpleManagedLayout;
import com.vaadin.client.ui.VMenuBar;
import com.vaadin.shared.ui.ComponentStateUtil;
import com.vaadin.shared.ui.Connect;
import com.vaadin.shared.ui.menubar.MenuBarConstants;
import com.vaadin.shared.ui.menubar.MenuBarState;
@Connect(com.vaadin.ui.MenuBar.class)
public class MenuBarConnector extends AbstractComponentConnector
implements Paintable, SimpleManagedLayout {
/**
* This method must be implemented to update the client-side component from
* UIDL data received from server.
*
* This method is called when the page is loaded for the first time, and
* every time UI changes in the component are received from the server.
*/
@Override
public void updateFromUIDL(UIDL uidl, ApplicationConnection client) {
if (!isRealUpdate(uidl)) {
return;
}
VMenuBar widget = getWidget();
widget.htmlContentAllowed = uidl
.hasAttribute(MenuBarConstants.HTML_CONTENT_ALLOWED);
widget.openRootOnHover = uidl
.getBooleanAttribute(MenuBarConstants.OPEN_ROOT_MENU_ON_HOWER);
widget.enabled = isEnabled();
// For future connections
widget.client = client;
widget.uidlId = uidl.getId();
// Empty the menu every time it receives new information
if (!widget.getItems().isEmpty()) {
widget.clearItems();
}
UIDL options = uidl.getChildUIDL(0);
if (null != getState()
&& !ComponentStateUtil.isUndefinedWidth(getState())) {
UIDL moreItemUIDL = options.getChildUIDL(0);
StringBuilder itemHTML = new StringBuilder();
if (moreItemUIDL.hasAttribute("icon")) {
Icon icon = client
.getIcon(moreItemUIDL.getStringAttribute("icon"));
if (icon != null) {
itemHTML.append(icon.getElement().getString());
}
}
String moreItemText = moreItemUIDL.getStringAttribute("text");
if ("".equals(moreItemText)) {
moreItemText = "►";
}
itemHTML.append(moreItemText);
widget.moreItem = GWT.create(VMenuBar.CustomMenuItem.class);
widget.moreItem.setHTML(itemHTML.toString());
widget.moreItem.setCommand(VMenuBar.emptyCommand);
widget.collapsedRootItems = new VMenuBar(true, widget);
widget.moreItem.setSubMenu(widget.collapsedRootItems);
widget.moreItem.addStyleName(
widget.getStylePrimaryName() + "-more-menuitem");
}
UIDL uidlItems = uidl.getChildUIDL(1);
Iterator<Object> itr = uidlItems.iterator();
Stack<Iterator<Object>> iteratorStack = new Stack<>();
Stack<VMenuBar> menuStack = new Stack<>();
VMenuBar currentMenu = widget;
while (itr.hasNext()) {
UIDL item = (UIDL) itr.next();
VMenuBar.CustomMenuItem currentItem = null;
final int itemId = item.getIntAttribute("id");
boolean itemHasCommand = item.hasAttribute("command");
boolean itemIsCheckable = item
.hasAttribute(MenuBarConstants.ATTRIBUTE_CHECKED);
String itemHTML = widget.buildItemHTML(item);
Command cmd = null;
if (!item.hasAttribute("separator")) {
if (itemHasCommand || itemIsCheckable) {
// Construct a command that fires onMenuClick(int) with the
// item's id-number
cmd = () -> widget.hostReference.onMenuClick(itemId);
}
}
currentItem = currentMenu.addItem(itemHTML, cmd);
currentItem.updateFromUIDL(item, client);
if (item.getChildCount() > 0) {
menuStack.push(currentMenu);
iteratorStack.push(itr);
itr = item.iterator();
currentMenu = new VMenuBar(true, currentMenu);
client.getVTooltip().connectHandlersToWidget(currentMenu);
// this is the top-level style that also propagates to items -
// any item specific styles are set above in
// currentItem.updateFromUIDL(item, client)
if (ComponentStateUtil.hasStyles(getState())) {
for (String style : getState().styles) {
currentMenu.addStyleDependentName(style);
}
}
currentItem.setSubMenu(currentMenu);
}
while (!itr.hasNext() && !iteratorStack.empty()) {
boolean hasCheckableItem = false;
for (VMenuBar.CustomMenuItem menuItem : currentMenu
.getItems()) {
hasCheckableItem = hasCheckableItem
|| menuItem.isCheckable();
}
if (hasCheckableItem) {
currentMenu.addStyleDependentName("check-column");
} else {
currentMenu.removeStyleDependentName("check-column");
}
itr = iteratorStack.pop();
currentMenu = menuStack.pop();
}
}
getLayoutManager().setNeedsHorizontalLayout(this);
}
@Override
public VMenuBar getWidget() {
return (VMenuBar) super.getWidget();
}
@Override
public MenuBarState getState() {
return (MenuBarState) super.getState();
}
@Override
public void layout() {
getWidget().iLayout();
}
@Override
public TooltipInfo getTooltipInfo(Element element) {
TooltipInfo info = null;
// Check content of widget to find tooltip for element
if (element != getWidget().getElement()) {
VMenuBar.CustomMenuItem item = getWidget()
.getMenuItemWithElement(element);
if (item != null) {
info = item.getTooltip();
}
}
// Use default tooltip if nothing found from DOM three
if (info == null) {
info = super.getTooltipInfo(element);
}
return info;
}
@Override
public boolean hasTooltip() {
/*
* Item tooltips are not processed until updateFromUIDL, so we can't be
* sure that there are no tooltips during onStateChange when this method
* is used.
*/
return true;
}
@OnStateChange("enabled")
void updateEnabled() {
if (getState().enabled) {
getWidget().getElement().removeAttribute("aria-disabled");
} else {
getWidget().getElement().setAttribute("aria-disabled", "true");
}
}
@OnStateChange("tabIndex")
void updateTabIndex() {
getWidget().getElement().setAttribute("tabindex",
String.valueOf(getState().tabIndex));
}
}
| |
package python.time;
@org.python.Module(
__doc__ =
"This module provides various functions to manipulate time values.\n" +
"\n" +
"There are two standard representations of time. One is the number\n" +
"of seconds since the Epoch, in UTC (a.k.a. GMT). It may be an integer\n" +
"or a floating point number (to represent fractions of seconds).\n" +
"The Epoch is system-defined; on Unix, it is generally January 1st, 1970.\n" +
"The actual value can be retrieved by calling gmtime(0).\n" +
"\n" +
"The other representation is a tuple of 9 integers giving local time.\n" +
"The tuple items are:\n" +
" year (including century, e.g. 1998)\n" +
" month (1-12)\n" +
" day (1-31)\n" +
" hours (0-23)\n" +
" minutes (0-59)\n" +
" seconds (0-59)\n" +
" weekday (0-6, Monday is 0)\n" +
" Julian day (day in the year, 1-366)\n" +
" DST (Daylight Savings Time) flag (-1, 0 or 1)\n" +
"If the DST flag is 0, the time is given in the regular time zone;\n" +
"if it is 1, the time is given in the DST time zone;\n" +
"if it is -1, mktime() should guess based on the date and time.\n" +
"\n" +
"Variables:\n" +
"\n" +
"timezone -- difference in seconds between UTC and local standard time\n" +
"altzone -- difference in seconds between UTC and local DST time\n" +
"daylight -- whether local time should reflect DST\n" +
"tzname -- tuple of (standard time zone name, DST time zone name)\n" +
"\n" +
"Functions:\n" +
"\n" +
"time() -- return current time in seconds since the Epoch as a float\n" +
"clock() -- return CPU time since process start as a float\n" +
"sleep() -- delay for a number of seconds given as a float\n" +
"gmtime() -- convert seconds since Epoch to UTC tuple\n" +
"localtime() -- convert seconds since Epoch to local time tuple\n" +
"asctime() -- convert time tuple to string\n" +
"ctime() -- convert time in seconds to string\n" +
"mktime() -- convert local time tuple to seconds since Epoch\n" +
"strftime() -- convert time tuple to string according to format specification\n" +
"strptime() -- parse string to time tuple according to format specification\n" +
"tzset() -- change the local timezone"
)
public class __init__ extends org.python.types.Module {
public __init__() {
super();
vm_start_time = python.platform.__init__.impl.clock();
}
private static long vm_start_time;
public static org.python.Object _STRUCT_TM_ITEMS;
@org.python.Attribute
public static org.python.Object __file__ = new org.python.types.Str("python/common/python/time/__init__.java");
@org.python.Attribute
public static org.python.Object __loader__ = org.python.types.NoneType.NONE; // TODO
@org.python.Attribute
public static org.python.Object __name__ = new org.python.types.Str("time");
@org.python.Attribute
public static org.python.Object __package__ = new org.python.types.Str("");
@org.python.Attribute
public static org.python.Object __spec__ = org.python.types.NoneType.NONE; // TODO
public static org.python.types.Int altzone;
@org.python.Method(
__doc__ = ""
)
public static org.python.Object asctime() {
throw new org.python.exceptions.NotImplementedError("time.asctime() has not been implemented.");
}
@org.python.Method(
__doc__ = "clock() -> floating point number\n" +
"\n" +
"Return the CPU time or real time since the start of the process or since\n" +
"the first call to clock(). This has as much precision as the system\n" +
"records.\n"
)
public static org.python.Object clock() {
long current_time = python.platform.__init__.impl.clock() - vm_start_time;
// thread time is in nanoseconds; convert to seconds.
return new org.python.types.Float(current_time / 1000000000.0);
}
@org.python.Method(
__doc__ = ""
)
public static org.python.Object ctime() {
throw new org.python.exceptions.NotImplementedError("time.ctime() has not been implemented.");
}
public static org.python.types.Int daylight;
@org.python.Method(
__doc__ = ""
)
public static org.python.Object get_clock_info() {
throw new org.python.exceptions.NotImplementedError("time.get_clock_info() has not been implemented.");
}
@org.python.Method(
__doc__ = ""
)
public static org.python.Object gmtime() {
throw new org.python.exceptions.NotImplementedError("time.gmtime() has not been implemented.");
}
@org.python.Method(
__doc__ = ""
)
public static org.python.Object localtime() {
throw new org.python.exceptions.NotImplementedError("time.localtime() has not been implemented.");
}
@org.python.Method(
__doc__ = ""
)
public static org.python.Object mktime() {
throw new org.python.exceptions.NotImplementedError("time.mktime() has not been implemented.");
}
@org.python.Method(
__doc__ = ""
)
public static org.python.Object monotonic() {
throw new org.python.exceptions.NotImplementedError("time.monotonic() has not been implemented.");
}
@org.python.Method(
__doc__ = ""
)
public static org.python.Object perf_counter() {
throw new org.python.exceptions.NotImplementedError("time.perf_counter() has not been implemented.");
}
@org.python.Method(
__doc__ = ""
)
public static org.python.Object process_time() {
throw new org.python.exceptions.NotImplementedError("time.process_time() has not been implemented.");
}
@org.python.Method(
__doc__ = "sleep(seconds)\n" +
"\n" +
"Delay execution for a given number of seconds. The argument may be\n" +
"a floating point number for subsecond precision.\n",
args = {"seconds"}
)
public static org.python.Object sleep(org.python.Object seconds) {
try {
java.lang.Thread.sleep((int) (((org.python.types.Float) seconds.__float__()).value * 1000.0));
} catch (ClassCastException e) {
throw new org.python.exceptions.TypeError("a float is required");
} catch (InterruptedException ex) {
Thread.currentThread().interrupt();
}
return org.python.types.NoneType.NONE;
}
@org.python.Method(
__doc__ = ""
)
public static org.python.Object strftime() {
throw new org.python.exceptions.NotImplementedError("time.strftime() has not been implemented.");
}
@org.python.Method(
__doc__ = ""
)
public static org.python.Object strptime() {
throw new org.python.exceptions.NotImplementedError("time.strptime() has not been implemented.");
}
// public static org.python.Object struct_time;
@org.python.Method(
__doc__ = "time() -> floating point number\n" +
"\n" +
"Return the current time in seconds since the Epoch.\n" +
"Fractions of a second may be present if the system clock provides them.\n"
)
public static org.python.types.Float time() {
return new org.python.types.Float(System.currentTimeMillis() / 1000.0);
}
public static org.python.types.Int timezone;
public static org.python.types.Tuple tzname;
@org.python.Method(
__doc__ = ""
)
public static org.python.Object tzset() {
throw new org.python.exceptions.NotImplementedError("time.tzset() has not been implemented.");
}
}
| |
/**
* Copyright (C) 2009 - present by OpenGamma Inc. and the OpenGamma group of companies
*
* Please see distribution for license.
*/
package com.opengamma.financial.view.rest;
import java.net.URI;
import java.util.Map;
import java.util.concurrent.ExecutorService;
import javax.ws.rs.Consumes;
import javax.ws.rs.DELETE;
import javax.ws.rs.GET;
import javax.ws.rs.POST;
import javax.ws.rs.PUT;
import javax.ws.rs.Path;
import javax.ws.rs.Produces;
import javax.ws.rs.core.Response;
import org.fudgemsg.FudgeMsg;
import com.opengamma.engine.resource.EngineResourceReference;
import com.opengamma.engine.view.ViewDefinition;
import com.opengamma.engine.view.client.ViewClient;
import com.opengamma.engine.view.client.ViewClientState;
import com.opengamma.engine.view.client.ViewResultMode;
import com.opengamma.engine.view.cycle.ViewCycle;
import com.opengamma.financial.livedata.rest.DataLiveDataInjectorResource;
import com.opengamma.financial.rest.AbstractRestfulJmsResultPublisher;
import com.opengamma.id.UniqueId;
import com.opengamma.transport.jaxrs.FudgeRest;
import com.opengamma.util.ArgumentChecker;
import com.opengamma.util.fudgemsg.OpenGammaFudgeContext;
import com.opengamma.util.jms.JmsConnector;
/**
* RESTful resource for a view client.
*/
@Consumes(FudgeRest.MEDIA)
@Produces(FudgeRest.MEDIA)
public class DataViewClientResource extends AbstractRestfulJmsResultPublisher {
//CSOFF: just constants
public static final String PATH_UNIQUE_ID = "id";
public static final String PATH_USER = "user";
public static final String PATH_STATE = "state";
public static final String PATH_IS_ATTACHED = "isAttached";
public static final String PATH_ATTACH_SEARCH = "attachSearch";
public static final String PATH_ATTACH_DIRECT = "attachDirect";
public static final String PATH_DETACH = "detach";
public static final String PATH_LIVE_DATA_OVERRIDE_INJECTOR = "overrides";
public static final String PATH_RESULT_MODE = "resultMode";
public static final String PATH_FRAGMENT_RESULT_MODE = "fragmentResultMode";
public static final String PATH_RESUME = "resume";
public static final String PATH_PAUSE = "pause";
public static final String PATH_COMPLETED = "completed";
public static final String PATH_RESULT_AVAILABLE = "resultAvailable";
public static final String PATH_LATEST_RESULT = "latestResult";
public static final String PATH_VIEW_DEFINITION = "viewDefinition";
public static final String PATH_VIEW_PROCESS = "viewProcess";
public static final String PATH_LATEST_COMPILED_VIEW_DEFINITION = "latestCompiledViewDefinition";
public static final String PATH_VIEW_CYCLE_ACCESS_SUPPORTED = "viewCycleAccessSupported";
public static final String PATH_CREATE_LATEST_CYCLE_REFERENCE = "createLatestCycleReference";
public static final String PATH_CREATE_CYCLE_REFERENCE = "createCycleReference";
public static final String PATH_TRIGGER_CYCLE = "triggerCycle";
public static final String PATH_SET_MINIMUM_LOG_MODE = "logMode";
public static final String PATH_UPDATE_PERIOD = "updatePeriod";
public static final String UPDATE_PERIOD_FIELD = "updatePeriod";
public static final String VIEW_CYCLE_ACCESS_SUPPORTED_FIELD = "isViewCycleAccessSupported";
public static final String PATH_VIEW_PROCESS_CONTEXT_MAP = "viewProcessContextMap";
//CSON: just constants
private final ViewClient _viewClient;
private final DataEngineResourceManagerResource<ViewCycle> _viewCycleManagerResource;
public DataViewClientResource(ViewClient viewClient, DataEngineResourceManagerResource<ViewCycle> viewCycleManagerResource, JmsConnector jmsConnector, ExecutorService executor) {
super(createJmsResultPublisher(viewClient, jmsConnector), executor);
_viewClient = viewClient;
_viewCycleManagerResource = viewCycleManagerResource;
}
private static ViewClientJmsResultPublisher createJmsResultPublisher(ViewClient viewClient, JmsConnector jmsConnector) {
if (jmsConnector == null) {
return null;
} else {
return new ViewClientJmsResultPublisher(viewClient, OpenGammaFudgeContext.getInstance(), jmsConnector);
}
}
/*package*/ ViewClient getViewClient() {
return _viewClient;
}
//-------------------------------------------------------------------------
@Override
protected boolean isTerminated() {
return getViewClient().getState() == ViewClientState.TERMINATED;
}
@Override
protected void expire() {
shutdown();
}
//-------------------------------------------------------------------------
@GET
@Path(PATH_UNIQUE_ID)
public Response getUniqueId() {
updateLastAccessed();
return responseOkObject(getViewClient().getUniqueId());
}
@GET
@Path(PATH_USER)
public Response getUser() {
updateLastAccessed();
return responseOkObject(getViewClient().getUser());
}
@GET
@Path(PATH_STATE)
public Response getState() {
updateLastAccessed();
return responseOkObject(getViewClient().getState());
}
//-------------------------------------------------------------------------
@GET
@Path(PATH_IS_ATTACHED)
public Response isAttached() {
updateLastAccessed();
return responseOk(getViewClient().isAttached());
}
@POST
@Consumes(FudgeRest.MEDIA)
@Path(PATH_ATTACH_SEARCH)
public Response attachToViewProcess(AttachToViewProcessRequest request) {
updateLastAccessed();
ArgumentChecker.notNull(request.getViewDefinitionId(), "viewDefinitionId");
ArgumentChecker.notNull(request.getExecutionOptions(), "executionOptions");
ArgumentChecker.notNull(request.isNewBatchProcess(), "isNewBatchProcess");
getViewClient().attachToViewProcess(request.getViewDefinitionId(), request.getExecutionOptions(), request.isNewBatchProcess());
return responseOk();
}
@POST
@Consumes(FudgeRest.MEDIA)
@Path(PATH_ATTACH_DIRECT)
public Response attachToViewProcess(UniqueId viewProcessId) {
updateLastAccessed();
ArgumentChecker.notNull(viewProcessId, "viewProcessId");
getViewClient().attachToViewProcess(viewProcessId);
return responseOk();
}
@POST
@Path(PATH_DETACH)
public Response detachFromViewProcess() {
updateLastAccessed();
getViewClient().detachFromViewProcess();
return responseOk();
}
@Path(PATH_LIVE_DATA_OVERRIDE_INJECTOR)
public DataLiveDataInjectorResource getLiveDataOverrideInjector() {
updateLastAccessed();
return new DataLiveDataInjectorResource(getViewClient().getLiveDataOverrideInjector());
}
@GET
@Path(PATH_VIEW_DEFINITION)
public Response getLatestViewDefinition() {
ViewDefinition result = getViewClient().getLatestViewDefinition();
return responseOkObject(result);
}
@Path(PATH_VIEW_PROCESS)
public DataViewProcessResource getViewProcess() {
updateLastAccessed();
return new DataViewProcessResource(getViewClient().getViewProcess());
}
//-------------------------------------------------------------------------
@PUT
@Path(PATH_UPDATE_PERIOD)
@Consumes(FudgeRest.MEDIA)
public Response setUpdatePeriod(FudgeMsg msg) {
updateLastAccessed();
long periodMillis = msg.getLong(UPDATE_PERIOD_FIELD);
getViewClient().setUpdatePeriod(periodMillis);
return responseOk();
}
//-------------------------------------------------------------------------
@GET
@Path(PATH_RESULT_MODE)
public Response getResultMode() {
updateLastAccessed();
return responseOkObject(getViewClient().getResultMode());
}
@PUT
@Path(PATH_RESULT_MODE)
public Response setResultMode(ViewResultMode viewResultMode) {
updateLastAccessed();
getViewClient().setResultMode(viewResultMode);
return responseOk();
}
//-------------------------------------------------------------------------
@PUT
@Path(PATH_VIEW_PROCESS_CONTEXT_MAP)
public Response setViewProcessContextMap(Map<String, String> viewProcessContextMap) {
updateLastAccessed();
getViewClient().setViewProcessContextMap(viewProcessContextMap);
return responseOk();
}
//-------------------------------------------------------------------------
@GET
@Path(PATH_FRAGMENT_RESULT_MODE)
public Response getFragmentResultMode() {
updateLastAccessed();
return responseOkObject(getViewClient().getFragmentResultMode());
}
@PUT
@Path(PATH_FRAGMENT_RESULT_MODE)
public Response setFragmentResultMode(ViewResultMode viewResultMode) {
updateLastAccessed();
getViewClient().setFragmentResultMode(viewResultMode);
return responseOk();
}
//-------------------------------------------------------------------------
@POST
@Path(PATH_PAUSE)
public Response pause() {
updateLastAccessed();
getViewClient().pause();
return responseOk();
}
@POST
@Path(PATH_RESUME)
public Response resume() {
updateLastAccessed();
getViewClient().resume();
return responseOk();
}
@POST
@Path(PATH_TRIGGER_CYCLE)
public Response triggerCycle() {
updateLastAccessed();
getViewClient().triggerCycle();
return responseOk();
}
@GET
@Path(PATH_COMPLETED)
public Response isCompleted() {
updateLastAccessed();
return responseOk(getViewClient().isCompleted());
}
@GET
@Path(PATH_RESULT_AVAILABLE)
public Response isResultAvailable() {
updateLastAccessed();
return responseOk(getViewClient().isResultAvailable());
}
@GET
@Path(PATH_LATEST_RESULT)
public Response getLatestResult() {
updateLastAccessed();
return responseOkObject(getViewClient().getLatestResult());
}
@GET
@Path(PATH_LATEST_COMPILED_VIEW_DEFINITION)
public Response getLatestCompiledViewDefinition() {
updateLastAccessed();
return responseOkObject(getViewClient().getLatestCompiledViewDefinition());
}
@GET
@Path(PATH_VIEW_CYCLE_ACCESS_SUPPORTED)
public Response isViewCycleAccessSupported() {
updateLastAccessed();
return responseOk(getViewClient().isViewCycleAccessSupported());
}
@POST
@Path(PATH_VIEW_CYCLE_ACCESS_SUPPORTED)
public Response setViewCycleAccessSupported(FudgeMsg msg) {
updateLastAccessed();
boolean isViewCycleAccessSupported = msg.getBoolean(VIEW_CYCLE_ACCESS_SUPPORTED_FIELD);
getViewClient().setViewCycleAccessSupported(isViewCycleAccessSupported);
return responseOk();
}
@POST
@Path(PATH_CREATE_LATEST_CYCLE_REFERENCE)
public Response createLatestCycleReference() {
updateLastAccessed();
EngineResourceReference<? extends ViewCycle> reference = getViewClient().createLatestCycleReference();
return getReferenceResponse(reference);
}
@POST
@Path(PATH_CREATE_CYCLE_REFERENCE)
@Consumes(FudgeRest.MEDIA)
public Response createCycleReference(UniqueId cycleId) {
updateLastAccessed();
EngineResourceReference<? extends ViewCycle> reference = getViewClient().createCycleReference(cycleId);
return getReferenceResponse(reference);
}
private Response getReferenceResponse(EngineResourceReference<? extends ViewCycle> reference) {
updateLastAccessed();
if (reference == null) {
return responseOkNoContent();
}
URI referenceUri = _viewCycleManagerResource.manageReference(reference);
return responseCreated(referenceUri);
}
//-------------------------------------------------------------------------
@POST
@Path(PATH_SET_MINIMUM_LOG_MODE)
@Consumes(FudgeRest.MEDIA)
public Response setMinimumLogMode(SetMinimumLogModeRequest request) {
updateLastAccessed();
ArgumentChecker.notNull(request.getMinimumLogMode(), "minimumLogMode");
ArgumentChecker.notNull(request.getTargets(), "targets");
getViewClient().setMinimumLogMode(request.getMinimumLogMode(), request.getTargets());
return responseOk();
}
//-------------------------------------------------------------------------
@DELETE
public void shutdown() {
getViewClient().shutdown();
stopResultStream();
}
}
| |
/*L
* Copyright SAIC
*
* Distributed under the OSI-approved BSD 3-Clause License.
* See http://ncip.github.com/i-spy/LICENSE.txt for details.
*/
package gov.nih.nci.ispy.web.xml;
import gov.nih.nci.caintegrator.analysis.messaging.ClassComparisonResultEntry;
import gov.nih.nci.caintegrator.application.service.annotation.GeneExprAnnotationService;
import gov.nih.nci.caintegrator.application.service.annotation.ReporterAnnotation;
import gov.nih.nci.caintegrator.dto.query.ClassComparisonQueryDTO;
import gov.nih.nci.caintegrator.dto.query.ClinicalQueryDTO;
import gov.nih.nci.caintegrator.service.findings.ClassComparisonFinding;
import gov.nih.nci.caintegrator.service.findings.Finding;
import gov.nih.nci.ispy.service.annotation.GeneExprAnnotationServiceFactory;
import java.math.BigDecimal;
import java.text.DecimalFormat;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import java.util.Map;
import org.apache.commons.lang.StringUtils;
import org.dom4j.Document;
import org.dom4j.DocumentHelper;
import org.dom4j.Element;
/**
* @author LandyR
* Feb 8, 2005
*
*/
public class ClassComparisonReport{
/**
*
*/
public ClassComparisonReport() {
//super();
}
public static Document getReportXML(Finding finding, Map filterMapParams) {
//changed the sig to include an allannotation flag, hence this wrapper method is born
return getReportXML(finding, filterMapParams, false);
}
/* (non-Javadoc)
* @see gov.nih.nci.nautilus.ui.report.ReportGenerator#getTemplate(gov.nih.nci.nautilus.resultset.Resultant, java.lang.String)
*/
public static Document getReportXML(Finding finding, Map filterMapParams, boolean allAnnotations) {
allAnnotations = true; //force this for now ISPY prerelease - RCL 3/2
DecimalFormat resultFormat = new DecimalFormat("0.0000");
DecimalFormat sciFormat = new DecimalFormat("0.00E0");
DecimalFormat tmpsciFormat = new DecimalFormat("###0.0000#####################");
/*
* this is for filtering, we will want a p-value filter for CC
*/
ArrayList filter_string = new ArrayList(); // hashmap of genes | reporters | cytobands
String filter_type = "show"; // show | hide
String filter_element = "none"; // none | gene | reporter | cytoband
if(filterMapParams.containsKey("filter_string") && filterMapParams.get("filter_string") != null)
filter_string = (ArrayList) filterMapParams.get("filter_string");
if(filterMapParams.containsKey("filter_type") && filterMapParams.get("filter_type") != null)
filter_type = (String) filterMapParams.get("filter_type");
if(filterMapParams.containsKey("filter_element") && filterMapParams.get("filter_element") != null)
filter_element = (String) filterMapParams.get("filter_element");
String defaultV = "--";
String delim = " | ";
Document document = DocumentHelper.createDocument();
Element report = document.addElement( "Report" );
Element cell = null;
Element data = null;
Element dataRow = null;
//add the atts
report.addAttribute("reportType", "Class Comparison");
//fudge these for now
report.addAttribute("groupBy", "none");
String queryName = "none";
queryName = finding.getTaskId();
//set the queryName to be unique for session/cache access
report.addAttribute("queryName", queryName);
report.addAttribute("sessionId", "the session id");
report.addAttribute("creationTime", "right now");
StringBuffer sb = new StringBuffer();
int recordCount = 0;
int totalSamples = 0;
//TODO: instance of
ClassComparisonFinding ccf = (ClassComparisonFinding) finding;
//process the query details
ArrayList<String> queryDetails = new ArrayList();
ClassComparisonQueryDTO ccdto = (ClassComparisonQueryDTO)ccf.getQueryDTO();
String reporterType = ccdto.getArrayPlatformDE().getValueObject();
if(ccdto != null) {
String tmp = "";
tmp = ccdto.getQueryName()!=null ? ccdto.getQueryName() : "";
queryDetails.add("Query Name: " + tmp);
tmp = ccdto.getArrayPlatformDE() != null ? ccdto.getArrayPlatformDE().getValue().toString() : "";
queryDetails.add("Array Platform: " + tmp);
tmp = "";
List<ClinicalQueryDTO> grps = ccdto.getComparisonGroups()!=null ? ccdto.getComparisonGroups() : new ArrayList();
Collection grs = new ArrayList();
for(ClinicalQueryDTO cdto : grps) {
if(cdto.getQueryName()!=null)
grs.add(cdto.getQueryName());
}
tmp += StringUtils.join(grs.toArray(), ", ") + " (baseline)";
queryDetails.add("Groups: " + tmp);
/*
noHTMLString = noHTMLString.replaceAll("<", "{");
noHTMLString = noHTMLString.replaceAll(">", "}");
noHTMLString = noHTMLString.replaceAll(" ", " ");
*/
tmp = ccdto.getExprFoldChangeDE() != null ? ccdto.getExprFoldChangeDE().getValue().toString() : "";
queryDetails.add("Fold Change: " + tmp);
//queryDetails.add("Institutions: " + ccdto.getInstitutionDEs());
tmp = ccdto.getMultiGroupComparisonAdjustmentTypeDE()!=null ? ccdto.getMultiGroupComparisonAdjustmentTypeDE().getValue().toString() : "";
queryDetails.add("Multi Group: " + tmp);
tmp = ccdto.getStatisticalSignificanceDE()!=null ? ccdto.getStatisticalSignificanceDE().getValue().toString() : "";
queryDetails.add("Stat Sig.: " + tmp);
tmp = ccdto.getStatisticTypeDE()!=null ? ccdto.getStatisticTypeDE().getValue().toString() : "";
queryDetails.add("Stat Type: " + tmp);
}
/*
queryDetails.add("Analysis Result name: " + ccform.getAnalysisResultName());
queryDetails.add("Array Platform: " + ccform.getArrayPlatform());
queryDetails.add("Baseline group: " + ccform.getBaselineGroup());
queryDetails.add("Comparison Groups: " + ccform.getSelectedGroups()[0].toString());
queryDetails.add("Comparison Adjustment: " + ccform.getComparisonAdjustment());
//queryDetails.add("Comp. Adj. Coll: " + ccform.getComparisonAdjustmentCollection());
//queryDetails.add("Existing Groups: " + ccform.getExistingGroups());
//queryDetails.add("Existing group list: " + ccform.getExistingGroupsList());
//queryDetails.add("Fold Change: " + ccform.getFoldChange());
queryDetails.add("Fold Change auto: " + ccform.getFoldChangeAuto());
//queryDetails.add("Fold change auto list: " + ccform.getFoldChangeAutoList());
//queryDetails.add("Fold change manual: " + ccform.getFoldChangeManual());
queryDetails.add("Stastic: " + ccform.getStatistic());
queryDetails.add("Stastical method: " + ccform.getStatisticalMethod());
//queryDetails.add("Stastical method coll.: " + ccform.getStatisticalMethodCollection());
queryDetails.add("Stastical significance: " + ccform.getStatisticalSignificance());
*/
String qd = "";
for(String q : queryDetails){
qd += q + " ||| ";
}
if(ccf != null) {
Element details = report.addElement("Query_details");
cell = details.addElement("Data");
cell.addText(qd);
cell = null;
Element headerRow = report.addElement("Row").addAttribute("name", "headerRow");
cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header");
data = cell.addElement("Data").addAttribute("type", "header").addText("Reporter");
data = null;
cell = null;
cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header");
data = cell.addElement("Data").addAttribute("type", "header").addText("Group Avg");
data = null;
cell = null;
cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header");
String isAdj = ccf.arePvaluesAdjusted() ? " (Adjusted) " : "";
data = cell.addElement("Data").addAttribute("type", "header").addText("P-Value"+isAdj);
data = null;
cell = null;
cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header");
data = cell.addElement("Data").addAttribute("type", "header").addText("Fold Change");
data = null;
cell = null;
cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header");
data = cell.addElement("Data").addAttribute("type", "header").addText("Gene Symbol");
data = null;
cell = null;
//starting annotations...get them only if allAnnotations == true
if(allAnnotations) {
cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "csv").addAttribute("group", "header");
data = cell.addElement("Data").addAttribute("type", "header").addText("GenBank Acc");
data = null;
cell = null;
cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "csv").addAttribute("group", "header");
data = cell.addElement("Data").addAttribute("type", "header").addText("Locus link");
data = null;
cell = null;
cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "csv").addAttribute("group", "header");
data = cell.addElement("Data").addAttribute("type", "header").addText("GO Id");
data = null;
cell = null;
cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "csv").addAttribute("group", "header");
data = cell.addElement("Data").addAttribute("type", "header").addText("Pathways");
data = null;
cell = null;
}
/* done with the headerRow and SampleRow Elements, time to add data rows */
/*
Map<String,ReporterResultset> reporterResultsetMap = null;
reporterResultsetMap = ccf.getReporterAnnotationsMap();
*/
List<ClassComparisonResultEntry> classComparisonResultEntrys = ccf.getResultEntries();
List<String> reporterIds = new ArrayList<String>();
for (ClassComparisonResultEntry classComparisonResultEntry: classComparisonResultEntrys){
if(classComparisonResultEntry.getReporterId() != null){
reporterIds.add(classComparisonResultEntry.getReporterId());
}
}
Map reporterResultsetMap = null;
try {
GeneExprAnnotationService geService = GeneExprAnnotationServiceFactory.getInstance();
reporterResultsetMap = geService.getAnnotationsMapForReporters(reporterIds);
} catch (Exception e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
/*
//new stuff
AnnotationHandler h = new AnnotationHandler();
Map reporterResultsetMap = null;
if(allAnnotations){
//Map<String, ReporterAnnotations> reporterResultsetMap = null;
try {
reporterResultsetMap = h.getAllAnnotationsFor(reporterIds);
} catch (Exception e) {
e.printStackTrace();
}
}
else {
//Map<String, String> reporterResultsetMap = null;
try {
reporterResultsetMap = h.getGeneSymbolsFor(reporterIds);
} catch (Exception e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
*/
/*
//this looks like a failsafe for the old method
if(reporterResultsetMap == null) {
try {
reporterResultsetMap = GeneExprAnnotationService.getAnnotationsMapForReporters(reporterIds);
}
catch(Exception e){}
}
*/
for(ClassComparisonResultEntry ccre : ccf.getResultEntries()) {
dataRow = report.addElement("Row").addAttribute("name", "dataRow");
cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "reporter").addAttribute("group", "data");
data = cell.addElement("Data").addAttribute("type", reporterType).addText(ccre.getReporterId());
data = null;
cell = null;
cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data");
data = cell.addElement("Data").addAttribute("type", "header").addText(resultFormat.format(ccre.getMeanGrp1()) + " / " + resultFormat.format(ccre.getMeanBaselineGrp()));
data = null;
cell = null;
cell = dataRow.addElement("Cell").addAttribute("type", "pval").addAttribute("class", "data").addAttribute("group", "data");
//String pv = (ccre.getPvalue() == null) ? String.valueOf(ccre.getPvalue()) : "N/A";
String pv = defaultV;
BigDecimal bigd;
try {
bigd = new BigDecimal(ccre.getPvalue());
pv = bigd.toPlainString();
}
catch (Exception e) {
//missing value
}
data = cell.addElement("Data").addAttribute("type", "header").addText(pv);
data = null;
cell = null;
cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data");
data = cell.addElement("Data").addAttribute("type", "header").addText(String.valueOf(resultFormat.format(ccre.getFoldChange())));
data = null;
cell = null;
//if only showing genes
if(!allAnnotations && reporterResultsetMap != null) {
String reporterId = ccre.getReporterId().toUpperCase().trim();
String genes = reporterResultsetMap.get(reporterId)!=null ? (String)reporterResultsetMap.get(reporterId) : defaultV;
cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "gene").addAttribute("group", "data");
data = cell.addElement("Data").addAttribute("type", "header").addText(genes);
data = null;
cell = null;
}
else {
//get the gene symbols for this reporter
//ccre.getReporterId()
String genes = defaultV;
//start annotations
String accIds = defaultV;
String llink = defaultV;
String go = defaultV;
String pw = defaultV;
if(reporterResultsetMap != null ){ // && reporterIds != null
//int count = 0;
String reporterId = ccre.getReporterId().toUpperCase().trim();
//ReporterResultset reporterResultset = reporterResultsetMap.get(reporterId);
ReporterAnnotation ra = (ReporterAnnotation) reporterResultsetMap.get(reporterId);
//Collection<String> geneSymbols = (Collection<String>)reporterResultset.getAssiciatedGeneSymbols();
if(ra!=null){
List geneSymbols = ra.getGeneSymbols();
//if(geneSymbols != null)
// genes = geneSymbols.toString();
if(geneSymbols != null){
genes = StringUtils.join(geneSymbols.toArray(), delim);
}
Collection<String> genBank_AccIDS = (Collection<String>)ra.getGenbankAccessions();
if(genBank_AccIDS != null){
accIds = StringUtils.join(genBank_AccIDS.toArray(), delim);
}
Collection<String> locusLinkIDs = (Collection<String>)ra.getLocusLinkIds();
if(locusLinkIDs != null){
llink = StringUtils.join(locusLinkIDs.toArray(), delim);
}
Collection<String> goIds = (Collection<String>)ra.getGOIds();
if(goIds != null){
go = StringUtils.join(goIds.toArray(), delim);
}
Collection<String> pathways = (Collection<String>)ra.getPathwayIds();
if(pathways != null){
pw = StringUtils.join(pathways.toArray(), delim);
}
}
}
cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "gene").addAttribute("group", "data");
data = cell.addElement("Data").addAttribute("type", "header").addText(genes);
data = null;
cell = null;
cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "csv").addAttribute("group", "data");
data = cell.addElement("Data").addAttribute("type", "header").addText(accIds);
data = null;
cell = null;
cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "csv").addAttribute("group", "data");
data = cell.addElement("Data").addAttribute("type", "header").addText(llink);
data = null;
cell = null;
cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "csv").addAttribute("group", "data");
data = cell.addElement("Data").addAttribute("type", "header").addText(go);
data = null;
cell = null;
cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "csv").addAttribute("group", "data");
data = cell.addElement("Data").addAttribute("type", "header").addText(pw);
data = null;
cell = null;
}
}
}
else {
//TODO: handle this error
sb.append("<br><Br>Class Comparison is empty<br>");
}
return document;
}
}
| |
/*
* See LICENSE file in distribution for copyright and licensing information.
*/
package ioke.lang.parser;
import java.io.Reader;
import java.io.StringReader;
import java.io.IOException;
import java.util.Arrays;
import java.util.ArrayList;
import java.util.List;
import java.util.ListIterator;
import java.util.Map;
import java.util.HashMap;
import java.util.Set;
import java.util.HashSet;
import ioke.lang.IokeObject;
import ioke.lang.Message;
import ioke.lang.Runtime;
import ioke.lang.Dict;
import ioke.lang.Number;
import ioke.lang.Symbol;
import ioke.lang.IokeSystem;
import ioke.lang.exceptions.ControlFlow;
import static ioke.lang.parser.Operators.OpEntry;
import static ioke.lang.parser.Operators.OpArity;
import static ioke.lang.parser.Operators.DEFAULT_UNARY_OPERATORS;
import static ioke.lang.parser.Operators.DEFAULT_ONLY_UNARY_OPERATORS;
/**
*
* @author <a href="mailto:ola.bini@gmail.com">Ola Bini</a>
*/
public class IokeParser {
public final Runtime runtime;
final Reader reader;
final IokeObject context;
final IokeObject message;
protected ChainContext top = new ChainContext(null);
protected final Map<String, OpEntry> operatorTable = new HashMap<String, OpEntry>();
protected final Map<String, OpArity> trinaryOperatorTable = new HashMap<String, OpArity>();
protected final Map<String, OpEntry> invertedOperatorTable = new HashMap<String, OpEntry>();
protected final Set<String> unaryOperators = DEFAULT_UNARY_OPERATORS;
protected final Set<String> onlyUnaryOperators = DEFAULT_ONLY_UNARY_OPERATORS;
public IokeParser(Runtime runtime, Reader reader, IokeObject context, IokeObject message) throws ControlFlow {
this.runtime = runtime;
this.reader = reader;
this.context = context;
this.message = message;
Operators.createOrGetOpTables(this);
}
public IokeObject parseFully() throws IOException, ControlFlow {
IokeObject result = parseMessageChain();
return result;
}
protected IokeObject parseMessageChain() throws IOException, ControlFlow {
top = new ChainContext(top);
while(parseMessage());
top.popOperatorsTo(999999);
IokeObject ret = top.pop();
top = top.parent;
return ret;
}
protected List<Object> parseCommaSeparatedMessageChains() throws IOException, ControlFlow {
ArrayList<Object> chain = new ArrayList<Object>();
IokeObject curr = parseMessageChain();
while(curr != null) {
chain.add(curr);
readWhiteSpace();
int rr = peek();
if(rr == ',') {
read();
curr = parseMessageChain();
if(curr == null) {
fail("Expected expression following comma");
}
} else {
if(curr != null && Message.isTerminator(curr) && Message.next(curr) == null) {
chain.remove(chain.size()-1);
}
curr = null;
}
}
return chain;
}
protected int lineNumber = 1;
protected int currentCharacter = -1;
protected boolean skipLF = false;
protected int saved2 = -2;
protected int saved = -2;
protected int read() throws IOException {
if(saved > -2) {
int x = saved;
saved = saved2;
saved2 = -2;
if(skipLF) {
skipLF = false;
if(x == '\n') {
return x;
}
}
currentCharacter++;
switch(x) {
case '\r':
skipLF = true;
case '\n': /* Fall through */
lineNumber++;
currentCharacter = 0;
}
return x;
}
int xx = reader.read();
if(skipLF) {
skipLF = false;
if(xx == '\n') {
return xx;
}
}
currentCharacter++;
switch(xx) {
case '\r':
skipLF = true;
case '\n': /* Fall through */
lineNumber++;
currentCharacter = 0;
}
return xx;
}
protected int peek() throws IOException {
if(saved == -2) {
if(saved2 != -2) {
saved = saved2;
saved2 = -2;
} else {
saved = reader.read();
}
}
return saved;
}
protected int peek2() throws IOException {
if(saved == -2) {
saved = reader.read();
}
if(saved2 == -2) {
saved2 = reader.read();
}
return saved2;
}
protected boolean parseMessage() throws IOException, ControlFlow {
int rr;
while(true) {
rr = peek();
switch(rr) {
case -1:
read();
return false;
case ',':
case ')':
case ']':
case '}':
return false;
case '(':
read();
parseEmptyMessageSend();
return true;
case '[':
read();
parseOpenCloseMessageSend(']', "[]");
return true;
case '{':
read();
parseOpenCloseMessageSend('}', "{}");
return true;
case '#':
read();
switch(peek()) {
case '{':
parseSimpleOpenCloseMessageSend('}', "set");
return true;
case '/':
parseRegexpLiteral('/');
return true;
case '[':
parseText('[');
return true;
case 'r':
parseRegexpLiteral('r');
return true;
case '!':
parseComment();
break;
default:
parseOperatorChars('#');
return true;
}
break;
case '"':
read();
parseText('"');
return true;
case '0':
case '1':
case '2':
case '3':
case '4':
case '5':
case '6':
case '7':
case '8':
case '9':
read();
parseNumber(rr);
return true;
case '.':
read();
if((rr = peek()) == '.') {
parseRange();
} else {
parseTerminator('.');
}
return true;
case ';':
read();
parseComment();
break;
case ' ':
case '\u0009':
case '\u000b':
case '\u000c':
read();
readWhiteSpace();
break;
case '\\':
read();
if((rr = peek()) == '\n') {
read();
break;
} else {
fail("Expected newline after free-floating escape character");
}
case '\r':
case '\n':
read();
parseTerminator(rr);
return true;
case '+':
case '-':
case '*':
case '%':
case '<':
case '>':
case '!':
case '?':
case '~':
case '&':
case '|':
case '^':
case '$':
case '=':
case '@':
case '\'':
case '`':
case '/':
read();
parseOperatorChars(rr);
return true;
case ':':
read();
if(isLetter(rr = peek()) || isIDDigit(rr)) {
parseRegularMessageSend(':');
} else {
parseOperatorChars(':');
}
return true;
default:
read();
parseRegularMessageSend(rr);
return true;
}
}
}
protected void fail(int l, int c, String message, String expected, String got) throws ControlFlow {
String file = ((IokeSystem)IokeObject.data(runtime.system)).currentFile();
final IokeObject condition = IokeObject.as(IokeObject.getCellChain(runtime.condition,
this.message,
this.context,
"Error",
"Parser",
"Syntax"), this.context).mimic(this.message, this.context);
condition.setCell("message", this.message);
condition.setCell("context", this.context);
condition.setCell("receiver", this.context);
if(expected != null) {
condition.setCell("expected", runtime.newText(expected));
}
if(got != null) {
condition.setCell("got", runtime.newText(got));
}
condition.setCell("file", runtime.newText(file));
condition.setCell("line", runtime.newNumber(l));
condition.setCell("character", runtime.newNumber(c));
condition.setCell("text", runtime.newText(file + ":" + l + ":" + c + ": " + message));
runtime.errorCondition(condition);
}
protected void fail(String message) throws ControlFlow {
fail(lineNumber, currentCharacter, message, null, null);
}
protected void parseCharacter(int c) throws IOException, ControlFlow {
int l = lineNumber;
int cc = currentCharacter;
readWhiteSpace();
int rr = read();
if(rr != c) {
fail(l, cc, "Expected: '" + (char)c + "' got: " + charDesc(rr), "" + (char)c, charDesc(rr));
}
}
protected boolean isUnary(String name) {
return unaryOperators.contains(name) && (top.head == null || Message.isTerminator(top.last));
}
protected static int possibleOperatorPrecedence(String name) {
if(name.length() > 0) {
char first = name.charAt(0);
switch(first) {
case '|':
return 9;
case '^':
return 8;
case '&':
return 7;
case '<':
case '>':
return 5;
case '=':
case '!':
case '?':
case '~':
case '$':
return 6;
case '+':
case '-':
return 3;
case '*':
case '/':
case '%':
return 2;
}
}
return -1;
}
protected void possibleOperator(IokeObject mx) throws ControlFlow {
String name = Message.name(mx);
if(isUnary(name) || onlyUnaryOperators.contains(name)) {
top.add(mx);
top.push(-1, mx, Level.Type.UNARY);
return;
}
OpEntry op = operatorTable.get(name);
if(op != null) {
top.popOperatorsTo(op.precedence);
top.add(mx);
top.push(op.precedence, mx, Level.Type.REGULAR);
} else {
OpArity opa = trinaryOperatorTable.get(name);
if(opa != null) {
if(opa.arity == 2) {
IokeObject last = top.prepareAssignmentMessage();
mx.getArguments().add(last);
top.add(mx);
top.push(13, mx, Level.Type.ASSIGNMENT);
} else {
IokeObject last = top.prepareAssignmentMessage();
mx.getArguments().add(last);
top.add(mx);
}
} else {
op = invertedOperatorTable.get(name);
if(op != null) {
top.popOperatorsTo(op.precedence);
top.add(mx);
top.push(op.precedence, mx, Level.Type.INVERTED);
} else {
int possible = possibleOperatorPrecedence(name);
if(possible != -1) {
top.popOperatorsTo(possible);
top.add(mx);
top.push(possible, mx, Level.Type.REGULAR);
} else {
top.add(mx);
}
}
}
}
}
protected void parseEmptyMessageSend() throws IOException, ControlFlow {
int l = lineNumber; int cc = currentCharacter-1;
List<Object> args = parseCommaSeparatedMessageChains();
parseCharacter(')');
Message m = new Message(runtime, "");
m.setLine(l);
m.setPosition(cc);
IokeObject mx = runtime.createMessage(m);
Message.setArguments(mx, args);
top.add(mx);
}
protected void parseOpenCloseMessageSend(char end, String name) throws IOException, ControlFlow {
int l = lineNumber; int cc = currentCharacter-1;
int rr = peek();
int r2 = peek2();
Message m = new Message(runtime, name);
m.setLine(l);
m.setPosition(cc);
IokeObject mx = runtime.createMessage(m);
if(rr == end && r2 == '(') {
read();
read();
List<Object> args = parseCommaSeparatedMessageChains();
parseCharacter(')');
Message.setArguments(mx, args);
} else {
List<Object> args = parseCommaSeparatedMessageChains();
parseCharacter(end);
Message.setArguments(mx, args);
}
top.add(mx);
}
protected void parseSimpleOpenCloseMessageSend(char end, String name) throws IOException, ControlFlow {
int l = lineNumber; int cc = currentCharacter-1;
read();
List<Object> args = parseCommaSeparatedMessageChains();
parseCharacter(end);
Message m = new Message(runtime, name);
m.setLine(l);
m.setPosition(cc);
IokeObject mx = runtime.createMessage(m);
Message.setArguments(mx, args);
top.add(mx);
}
protected void parseComment() throws IOException {
int rr;
while((rr = peek()) != '\n' && rr != '\r' && rr != -1) {
read();
}
}
protected final static String[] RANGES = {
"",
".",
"..",
"...",
"....",
".....",
"......",
".......",
"........",
".........",
"..........",
"...........",
"............"
};
protected void parseRange() throws IOException, ControlFlow {
int l = lineNumber; int cc = currentCharacter-1;
int count = 2;
read();
int rr;
while((rr = peek()) == '.') {
count++;
read();
}
String result = null;
if(count < 13) {
result = RANGES[count];
} else {
StringBuilder sb = new StringBuilder();
for(int i = 0; i<count; i++) {
sb.append('.');
}
result = sb.toString();
}
Message m = new Message(runtime, result);
m.setLine(l);
m.setPosition(cc);
IokeObject mx = runtime.createMessage(m);
if(rr == '(') {
read();
List<Object> args = parseCommaSeparatedMessageChains();
parseCharacter(')');
Message.setArguments(mx, args);
top.add(mx);
} else {
possibleOperator(mx);
}
}
protected void parseTerminator(int indicator) throws IOException, ControlFlow {
int l = lineNumber; int cc = currentCharacter-1;
int rr;
int rr2;
if(indicator == '\r') {
rr = peek();
if(rr == '\n') {
read();
}
}
while(true) {
rr = peek();
rr2 = peek2();
if((rr == '.' && rr2 != '.') ||
(rr == '\n')) {
read();
} else if(rr == '\r' && rr2 == '\n') {
read(); read();
} else {
break;
}
}
if(!(top.last == null && top.currentLevel.operatorMessage != null)) {
top.popOperatorsTo(999999);
}
Message m = new Message(runtime, ".", null, true);
m.setLine(l);
m.setPosition(cc);
top.add(runtime.createMessage(m));
}
protected void readWhiteSpace() throws IOException {
int rr;
while((rr = peek()) == ' ' ||
rr == '\u0009' ||
rr == '\u000b' ||
rr == '\u000c') {
read();
}
}
protected void parseRegexpLiteral(int indicator) throws IOException, ControlFlow {
StringBuilder sb = new StringBuilder();
boolean slash = indicator == '/';
int l = lineNumber; int cc = currentCharacter-1;
read();
if(!slash) {
parseCharacter('[');
}
int rr;
String name = "internal:createRegexp";
List<Object> args = new ArrayList<Object>();
while(true) {
switch(rr = peek()) {
case -1:
fail("Expected end of regular expression, found EOF");
break;
case '/':
read();
if(slash) {
args.add(sb.toString());
Message m = new Message(runtime, "internal:createRegexp");
m.setLine(l);
m.setPosition(cc);
IokeObject mm = runtime.createMessage(m);
if(!name.equals("internal:createRegexp")) {
Message.setName(mm, name);
}
Message.setArguments(mm, args);
sb = new StringBuilder();
while(true) {
switch(rr = peek()) {
case 'x':
case 'i':
case 'u':
case 'm':
case 's':
read();
sb.append((char)rr);
break;
default:
args.add(sb.toString());
top.add(mm);
return;
}
}
} else {
sb.append((char)rr);
}
break;
case ']':
read();
if(!slash) {
args.add(sb.toString());
Message m = new Message(runtime, "internal:createRegexp");
m.setLine(l);
m.setPosition(cc);
IokeObject mm = runtime.createMessage(m);
if(!name.equals("internal:createRegexp")) {
Message.setName(mm, name);
}
Message.setArguments(mm, args);
sb = new StringBuilder();
while(true) {
switch(rr = peek()) {
case 'x':
case 'i':
case 'u':
case 'm':
case 's':
read();
sb.append((char)rr);
break;
default:
args.add(sb.toString());
top.add(mm);
return;
}
}
} else {
sb.append((char)rr);
}
break;
case '#':
read();
if((rr = peek()) == '{') {
read();
args.add(sb.toString());
sb = new StringBuilder();
name = "internal:compositeRegexp";
args.add(parseMessageChain());
readWhiteSpace();
parseCharacter('}');
} else {
sb.append((char)'#');
}
break;
case '\\':
read();
parseRegexpEscape(sb);
break;
default:
read();
sb.append((char)rr);
break;
}
}
}
protected void parseText(int indicator) throws IOException, ControlFlow {
StringBuilder sb = new StringBuilder();
boolean dquote = indicator == '"';
int l = lineNumber; int cc = currentCharacter-1;
if(!dquote) {
read();
}
int rr;
String name = "internal:createText";
List<Object> args = new ArrayList<Object>();
List<Integer> lines = new ArrayList<Integer>();
List<Integer> cols = new ArrayList<Integer>();
lines.add(l); cols.add(cc);
while(true) {
switch(rr = peek()) {
case -1:
fail("Expected end of text, found EOF");
break;
case '"':
read();
if(dquote) {
args.add(sb.toString());
Message m = new Message(runtime, "internal:createText");
m.setLine(l);
m.setPosition(cc);
IokeObject mm = runtime.createMessage(m);
if(!name.equals("internal:createText")) {
for(int i = 0; i<args.size(); i++) {
Object o = args.get(i);
if(o instanceof String) {
Message mx = new Message(runtime, "internal:createText", o);
mx.setLine(lines.get(i));
mx.setPosition(cols.get(i));
IokeObject mmx = runtime.createMessage(mx);
args.set(i, mmx);
}
}
Message.setName(mm, name);
}
Message.setArguments(mm, args);
top.add(mm);
return;
} else {
sb.append((char)rr);
}
break;
case ']':
read();
if(!dquote) {
args.add(sb.toString());
Message m = new Message(runtime, "internal:createText");
m.setLine(l);
m.setPosition(cc);
IokeObject mm = runtime.createMessage(m);
if(!name.equals("internal:createText")) {
for(int i = 0; i<args.size(); i++) {
Object o = args.get(i);
if(o instanceof String) {
Message mx = new Message(runtime, "internal:createText", o);
mx.setLine(lines.get(i));
mx.setPosition(cols.get(i));
IokeObject mmx = runtime.createMessage(mx);
args.set(i, mmx);
}
}
Message.setName(mm, name);
}
Message.setArguments(mm, args);
top.add(mm);
return;
} else {
sb.append((char)rr);
}
break;
case '#':
read();
if((rr = peek()) == '{') {
read();
args.add(sb.toString());
sb = new StringBuilder();
lines.add(l);
cols.add(cc);
lines.add(lineNumber);
cols.add(currentCharacter);
name = "internal:concatenateText";
args.add(parseMessageChain());
readWhiteSpace();
l = lineNumber; cc = currentCharacter;
parseCharacter('}');
} else {
sb.append((char)'#');
}
break;
case '\\':
read();
parseDoubleQuoteEscape(sb);
break;
default:
read();
sb.append((char)rr);
break;
}
}
}
protected void parseRegexpEscape(StringBuilder sb) throws IOException, ControlFlow {
sb.append('\\');
int rr = peek();
switch(rr) {
case 'u':
read();
sb.append((char)rr);
for(int i = 0; i < 4; i++) {
rr = peek();
if((rr >= '0' && rr <= '9') ||
(rr >= 'a' && rr <= 'f') ||
(rr >= 'A' && rr <= 'F')) {
read();
sb.append((char)rr);
} else {
fail("Expected four hexadecimal characters in unicode escape - got: " + charDesc(rr));
}
}
break;
case '0':
case '1':
case '2':
case '3':
case '4':
case '5':
case '6':
case '7':
read();
sb.append((char)rr);
if(rr <= '3') {
rr = peek();
if(rr >= '0' && rr <= '7') {
read();
sb.append((char)rr);
rr = peek();
if(rr >= '0' && rr <= '7') {
read();
sb.append((char)rr);
}
}
} else {
rr = peek();
if(rr >= '0' && rr <= '7') {
read();
sb.append((char)rr);
}
}
break;
case 't':
case 'n':
case 'f':
case 'r':
case '/':
case '\\':
case '\n':
case '#':
case 'A':
case 'd':
case 'D':
case 's':
case 'S':
case 'w':
case 'W':
case 'b':
case 'B':
case 'z':
case 'Z':
case '<':
case '>':
case 'G':
case 'p':
case 'P':
case '{':
case '}':
case '[':
case ']':
case '*':
case '(':
case ')':
case '$':
case '^':
case '+':
case '?':
case '.':
case '|':
read();
sb.append((char)rr);
break;
case '\r':
read();
sb.append((char)rr);
if((rr = peek()) == '\n') {
read();
sb.append((char)rr);
}
break;
default:
fail("Undefined regular expression escape character: " + charDesc(rr));
break;
}
}
protected void parseDoubleQuoteEscape(StringBuilder sb) throws IOException, ControlFlow {
sb.append('\\');
int rr = peek();
switch(rr) {
case 'u':
read();
sb.append((char)rr);
for(int i = 0; i < 4; i++) {
rr = peek();
if((rr >= '0' && rr <= '9') ||
(rr >= 'a' && rr <= 'f') ||
(rr >= 'A' && rr <= 'F')) {
read();
sb.append((char)rr);
} else {
fail("Expected four hexadecimal characters in unicode escape - got: " + charDesc(rr));
}
}
break;
case '0':
case '1':
case '2':
case '3':
case '4':
case '5':
case '6':
case '7':
read();
sb.append((char)rr);
if(rr <= '3') {
rr = peek();
if(rr >= '0' && rr <= '7') {
read();
sb.append((char)rr);
rr = peek();
if(rr >= '0' && rr <= '7') {
read();
sb.append((char)rr);
}
}
} else {
rr = peek();
if(rr >= '0' && rr <= '7') {
read();
sb.append((char)rr);
}
}
break;
case 'b':
case 't':
case 'n':
case 'f':
case 'r':
case '"':
case ']':
case '\\':
case '\n':
case '#':
case 'e':
read();
sb.append((char)rr);
break;
case '\r':
read();
sb.append((char)rr);
if((rr = peek()) == '\n') {
read();
sb.append((char)rr);
}
break;
default:
fail("Undefined text escape character: " + charDesc(rr));
break;
}
}
protected void parseOperatorChars(int indicator) throws IOException, ControlFlow {
int l = lineNumber; int cc = currentCharacter-1;
StringBuilder sb = new StringBuilder();
sb.append((char)indicator);
int rr;
while(true) {
rr = peek();
switch(rr) {
case '+':
case '-':
case '*':
case '%':
case '<':
case '>':
case '!':
case '?':
case '~':
case '&':
case '|':
case '^':
case '$':
case '=':
case '@':
case '\'':
case '`':
case ':':
case '#':
read();
sb.append((char)rr);
break;
case '/':
if(indicator != '#') {
read();
sb.append((char)rr);
break;
}
// FALL THROUGH
default:
Message m = new Message(runtime, sb.toString());
m.setLine(l);
m.setPosition(cc);
IokeObject mx = runtime.createMessage(m);
if(rr == '(') {
read();
List<Object> args = parseCommaSeparatedMessageChains();
parseCharacter(')');
Message.setArguments(mx, args);
top.add(mx);
} else {
possibleOperator(mx);
}
return;
}
}
}
protected void parseNumber(int indicator) throws IOException, ControlFlow {
int l = lineNumber; int cc = currentCharacter-1;
boolean decimal = false;
StringBuilder sb = new StringBuilder();
sb.append((char)indicator);
int rr = -1;
if(indicator == '0') {
rr = peek();
if(rr == 'x' || rr == 'X') {
read();
sb.append((char)rr);
rr = peek();
if((rr >= '0' && rr <= '9') ||
(rr >= 'a' && rr <= 'f') ||
(rr >= 'A' && rr <= 'F')) {
read();
sb.append((char)rr);
rr = peek();
while((rr >= '0' && rr <= '9') ||
(rr >= 'a' && rr <= 'f') ||
(rr >= 'A' && rr <= 'F')) {
read();
sb.append((char)rr);
rr = peek();
}
} else {
fail("Expected at least one hexadecimal characters in hexadecimal number literal - got: " + charDesc(rr));
}
} else {
int r2 = peek2();
if(rr == '.' && (r2 >= '0' && r2 <= '9')) {
decimal = true;
sb.append((char)rr);
sb.append((char)r2);
read(); read();
while((rr = peek()) >= '0' && rr <= '9') {
read();
sb.append((char)rr);
}
if(rr == 'e' || rr == 'E') {
read();
sb.append((char)rr);
if((rr = peek()) == '-' || rr == '+') {
read();
sb.append((char)rr);
rr = peek();
}
if(rr >= '0' && rr <= '9') {
read();
sb.append((char)rr);
while((rr = peek()) >= '0' && rr <= '9') {
read();
sb.append((char)rr);
}
} else {
fail("Expected at least one decimal character following exponent specifier in number literal - got: " + charDesc(rr));
}
}
}
}
} else {
while((rr = peek()) >= '0' && rr <= '9') {
read();
sb.append((char)rr);
}
int r2 = peek2();
if(rr == '.' && r2 >= '0' && r2 <= '9') {
decimal = true;
sb.append((char)rr);
sb.append((char)r2);
read(); read();
while((rr = peek()) >= '0' && rr <= '9') {
read();
sb.append((char)rr);
}
if(rr == 'e' || rr == 'E') {
read();
sb.append((char)rr);
if((rr = peek()) == '-' || rr == '+') {
read();
sb.append((char)rr);
rr = peek();
}
if(rr >= '0' && rr <= '9') {
read();
sb.append((char)rr);
while((rr = peek()) >= '0' && rr <= '9') {
read();
sb.append((char)rr);
}
} else {
fail("Expected at least one decimal character following exponent specifier in number literal - got: " + charDesc(rr));
}
}
} else if(rr == 'e' || rr == 'E') {
decimal = true;
read();
sb.append((char)rr);
if((rr = peek()) == '-' || rr == '+') {
read();
sb.append((char)rr);
rr = peek();
}
if(rr >= '0' && rr <= '9') {
read();
sb.append((char)rr);
while((rr = peek()) >= '0' && rr <= '9') {
read();
sb.append((char)rr);
}
} else {
fail("Expected at least one decimal character following exponent specifier in number literal - got: " + charDesc(rr));
}
}
}
// TODO: add unit specifier here
Message m = decimal ? new Message(runtime, "internal:createDecimal", sb.toString()) : new Message(runtime, "internal:createNumber", sb.toString());
m.setLine(l);
m.setPosition(cc);
top.add(runtime.createMessage(m));
}
protected void parseRegularMessageSend(int indicator) throws IOException, ControlFlow {
int l = lineNumber; int cc = currentCharacter-1;
StringBuilder sb = new StringBuilder();
sb.append((char)indicator);
int rr = -1;
while(isLetter(rr = peek()) || isIDDigit(rr) || rr == ':' || rr == '!' || rr == '?' || rr == '$') {
read();
sb.append((char)rr);
}
Message m = new Message(runtime, sb.toString());
m.setLine(l);
m.setPosition(cc);
IokeObject mx = runtime.createMessage(m);
if(rr == '(') {
read();
List<Object> args = parseCommaSeparatedMessageChains();
parseCharacter(')');
Message.setArguments(mx, args);
top.add(mx);
} else {
possibleOperator(mx);
}
}
protected boolean isLetter(int c) {
return ((c>='A' && c<='Z') ||
c=='_' ||
(c>='a' && c<='z') ||
(c>='\u00C0' && c<='\u00D6') ||
(c>='\u00D8' && c<='\u00F6') ||
(c>='\u00F8' && c<='\u1FFF') ||
(c>='\u2200' && c<='\u22FF') ||
(c>='\u27C0' && c<='\u27EF') ||
(c>='\u2980' && c<='\u2AFF') ||
(c>='\u3040' && c<='\u318F') ||
(c>='\u3300' && c<='\u337F') ||
(c>='\u3400' && c<='\u3D2D') ||
(c>='\u4E00' && c<='\u9FFF') ||
(c>='\uF900' && c<='\uFAFF'));
}
protected boolean isIDDigit(int c) {
return ((c>='0' && c<='9') ||
(c>='\u0660' && c<='\u0669') ||
(c>='\u06F0' && c<='\u06F9') ||
(c>='\u0966' && c<='\u096F') ||
(c>='\u09E6' && c<='\u09EF') ||
(c>='\u0A66' && c<='\u0A6F') ||
(c>='\u0AE6' && c<='\u0AEF') ||
(c>='\u0B66' && c<='\u0B6F') ||
(c>='\u0BE7' && c<='\u0BEF') ||
(c>='\u0C66' && c<='\u0C6F') ||
(c>='\u0CE6' && c<='\u0CEF') ||
(c>='\u0D66' && c<='\u0D6F') ||
(c>='\u0E50' && c<='\u0E59') ||
(c>='\u0ED0' && c<='\u0ED9') ||
(c>='\u1040' && c<='\u1049'));
}
protected static String charDesc(int c) {
if(c == -1) {
return "EOF";
} else if(c == 9) {
return "TAB";
} else if(c == 10 || c == 13) {
return "EOL";
} else {
return "'" + (char)c + "'";
}
}
}
| |
/*
* Copyright 2012-2017 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.boot.actuate.autoconfigure.endpoint;
import java.util.Collections;
import java.util.List;
import java.util.Set;
import org.springframework.beans.factory.ObjectProvider;
import org.springframework.boot.actuate.audit.AuditEventRepository;
import org.springframework.boot.actuate.autoconfigure.ManagementContextConfiguration;
import org.springframework.boot.actuate.autoconfigure.health.HealthMvcEndpointProperties;
import org.springframework.boot.actuate.autoconfigure.web.ManagementServerProperties;
import org.springframework.boot.actuate.condition.ConditionalOnEnabledEndpoint;
import org.springframework.boot.actuate.endpoint.Endpoint;
import org.springframework.boot.actuate.endpoint.EnvironmentEndpoint;
import org.springframework.boot.actuate.endpoint.HealthEndpoint;
import org.springframework.boot.actuate.endpoint.LoggersEndpoint;
import org.springframework.boot.actuate.endpoint.MetricsEndpoint;
import org.springframework.boot.actuate.endpoint.ShutdownEndpoint;
import org.springframework.boot.actuate.endpoint.mvc.AuditEventsMvcEndpoint;
import org.springframework.boot.actuate.endpoint.mvc.EndpointHandlerMapping;
import org.springframework.boot.actuate.endpoint.mvc.EndpointHandlerMappingCustomizer;
import org.springframework.boot.actuate.endpoint.mvc.EnvironmentMvcEndpoint;
import org.springframework.boot.actuate.endpoint.mvc.HealthMvcEndpoint;
import org.springframework.boot.actuate.endpoint.mvc.HeapdumpMvcEndpoint;
import org.springframework.boot.actuate.endpoint.mvc.LogFileMvcEndpoint;
import org.springframework.boot.actuate.endpoint.mvc.LoggersMvcEndpoint;
import org.springframework.boot.actuate.endpoint.mvc.MetricsMvcEndpoint;
import org.springframework.boot.actuate.endpoint.mvc.MvcEndpoint;
import org.springframework.boot.actuate.endpoint.mvc.MvcEndpointSecurityInterceptor;
import org.springframework.boot.actuate.endpoint.mvc.MvcEndpoints;
import org.springframework.boot.actuate.endpoint.mvc.ShutdownMvcEndpoint;
import org.springframework.boot.autoconfigure.condition.ConditionMessage;
import org.springframework.boot.autoconfigure.condition.ConditionOutcome;
import org.springframework.boot.autoconfigure.condition.ConditionalOnBean;
import org.springframework.boot.autoconfigure.condition.ConditionalOnMissingBean;
import org.springframework.boot.autoconfigure.condition.SpringBootCondition;
import org.springframework.boot.context.properties.EnableConfigurationProperties;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.ConditionContext;
import org.springframework.context.annotation.Conditional;
import org.springframework.core.env.Environment;
import org.springframework.core.type.AnnotatedTypeMetadata;
import org.springframework.util.CollectionUtils;
import org.springframework.util.StringUtils;
import org.springframework.web.cors.CorsConfiguration;
/**
* Configuration to expose {@link Endpoint} instances over Spring MVC.
*
* @author Dave Syer
* @author Ben Hale
* @author Vedran Pavic
* @author Madhura Bhave
* @since 2.0.0
*/
@ManagementContextConfiguration
@EnableConfigurationProperties({ HealthMvcEndpointProperties.class,
EndpointCorsProperties.class })
public class EndpointWebMvcManagementContextConfiguration {
private final HealthMvcEndpointProperties healthMvcEndpointProperties;
private final ManagementServerProperties managementServerProperties;
private final EndpointCorsProperties corsProperties;
private final List<EndpointHandlerMappingCustomizer> mappingCustomizers;
public EndpointWebMvcManagementContextConfiguration(
HealthMvcEndpointProperties healthMvcEndpointProperties,
ManagementServerProperties managementServerProperties,
EndpointCorsProperties corsProperties,
ObjectProvider<List<EndpointHandlerMappingCustomizer>> mappingCustomizers) {
this.healthMvcEndpointProperties = healthMvcEndpointProperties;
this.managementServerProperties = managementServerProperties;
this.corsProperties = corsProperties;
List<EndpointHandlerMappingCustomizer> providedCustomizers = mappingCustomizers
.getIfAvailable();
this.mappingCustomizers = providedCustomizers == null
? Collections.<EndpointHandlerMappingCustomizer>emptyList()
: providedCustomizers;
}
@Bean
@ConditionalOnMissingBean
public EndpointHandlerMapping endpointHandlerMapping() {
Set<MvcEndpoint> endpoints = mvcEndpoints().getEndpoints();
CorsConfiguration corsConfiguration = getCorsConfiguration(this.corsProperties);
EndpointHandlerMapping mapping = new EndpointHandlerMapping(endpoints,
corsConfiguration);
mapping.setPrefix(this.managementServerProperties.getContextPath());
MvcEndpointSecurityInterceptor securityInterceptor = new MvcEndpointSecurityInterceptor(
this.managementServerProperties.getSecurity().isEnabled(),
this.managementServerProperties.getSecurity().getRoles());
mapping.setSecurityInterceptor(securityInterceptor);
for (EndpointHandlerMappingCustomizer customizer : this.mappingCustomizers) {
customizer.customize(mapping);
}
return mapping;
}
private CorsConfiguration getCorsConfiguration(EndpointCorsProperties properties) {
if (CollectionUtils.isEmpty(properties.getAllowedOrigins())) {
return null;
}
CorsConfiguration configuration = new CorsConfiguration();
configuration.setAllowedOrigins(properties.getAllowedOrigins());
if (!CollectionUtils.isEmpty(properties.getAllowedHeaders())) {
configuration.setAllowedHeaders(properties.getAllowedHeaders());
}
if (!CollectionUtils.isEmpty(properties.getAllowedMethods())) {
configuration.setAllowedMethods(properties.getAllowedMethods());
}
if (!CollectionUtils.isEmpty(properties.getExposedHeaders())) {
configuration.setExposedHeaders(properties.getExposedHeaders());
}
if (properties.getMaxAge() != null) {
configuration.setMaxAge(properties.getMaxAge());
}
if (properties.getAllowCredentials() != null) {
configuration.setAllowCredentials(properties.getAllowCredentials());
}
return configuration;
}
@Bean
@ConditionalOnMissingBean
public MvcEndpoints mvcEndpoints() {
return new MvcEndpoints();
}
@Bean
@ConditionalOnBean(EnvironmentEndpoint.class)
@ConditionalOnEnabledEndpoint("env")
public EnvironmentMvcEndpoint environmentMvcEndpoint(EnvironmentEndpoint delegate) {
return new EnvironmentMvcEndpoint(delegate);
}
@Bean
@ConditionalOnMissingBean
@ConditionalOnEnabledEndpoint("heapdump")
public HeapdumpMvcEndpoint heapdumpMvcEndpoint() {
return new HeapdumpMvcEndpoint();
}
@Bean
@ConditionalOnBean(HealthEndpoint.class)
@ConditionalOnMissingBean
@ConditionalOnEnabledEndpoint("health")
public HealthMvcEndpoint healthMvcEndpoint(HealthEndpoint delegate,
ManagementServerProperties managementServerProperties) {
HealthMvcEndpoint healthMvcEndpoint = new HealthMvcEndpoint(delegate,
this.managementServerProperties.getSecurity().isEnabled(),
managementServerProperties.getSecurity().getRoles());
if (this.healthMvcEndpointProperties.getMapping() != null) {
healthMvcEndpoint
.addStatusMapping(this.healthMvcEndpointProperties.getMapping());
}
return healthMvcEndpoint;
}
@Bean
@ConditionalOnBean(LoggersEndpoint.class)
@ConditionalOnEnabledEndpoint("loggers")
public LoggersMvcEndpoint loggersMvcEndpoint(LoggersEndpoint delegate) {
return new LoggersMvcEndpoint(delegate);
}
@Bean
@ConditionalOnBean(MetricsEndpoint.class)
@ConditionalOnEnabledEndpoint("metrics")
public MetricsMvcEndpoint metricsMvcEndpoint(MetricsEndpoint delegate) {
return new MetricsMvcEndpoint(delegate);
}
@Bean
@ConditionalOnEnabledEndpoint("logfile")
@Conditional(LogFileCondition.class)
public LogFileMvcEndpoint logfileMvcEndpoint() {
return new LogFileMvcEndpoint();
}
@Bean
@ConditionalOnBean(ShutdownEndpoint.class)
@ConditionalOnEnabledEndpoint(value = "shutdown", enabledByDefault = false)
public ShutdownMvcEndpoint shutdownMvcEndpoint(ShutdownEndpoint delegate) {
return new ShutdownMvcEndpoint(delegate);
}
@Bean
@ConditionalOnBean(AuditEventRepository.class)
@ConditionalOnEnabledEndpoint("auditevents")
public AuditEventsMvcEndpoint auditEventMvcEndpoint(
AuditEventRepository auditEventRepository) {
return new AuditEventsMvcEndpoint(auditEventRepository);
}
private static class LogFileCondition extends SpringBootCondition {
@Override
public ConditionOutcome getMatchOutcome(ConditionContext context,
AnnotatedTypeMetadata metadata) {
Environment environment = context.getEnvironment();
String config = environment.resolvePlaceholders("${logging.file:}");
ConditionMessage.Builder message = ConditionMessage.forCondition("Log File");
if (StringUtils.hasText(config)) {
return ConditionOutcome
.match(message.found("logging.file").items(config));
}
config = environment.resolvePlaceholders("${logging.path:}");
if (StringUtils.hasText(config)) {
return ConditionOutcome
.match(message.found("logging.path").items(config));
}
config = environment.getProperty("endpoints.logfile.external-file");
if (StringUtils.hasText(config)) {
return ConditionOutcome.match(
message.found("endpoints.logfile.external-file").items(config));
}
return ConditionOutcome.noMatch(message.didNotFind("logging file").atAll());
}
}
}
| |
/*
* Copyright 2014 The gRPC Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.grpc.internal;
import static com.google.common.base.Charsets.UTF_8;
import com.google.common.base.Preconditions;
import io.grpc.KnownLength;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.nio.ByteBuffer;
import java.nio.charset.Charset;
/**
* Utility methods for creating {@link ReadableBuffer} instances.
*/
public final class ReadableBuffers {
private static final ReadableBuffer EMPTY_BUFFER = new ByteArrayWrapper(new byte[0]);
/**
* Returns an empty {@link ReadableBuffer} instance.
*/
public static ReadableBuffer empty() {
return EMPTY_BUFFER;
}
/**
* Shortcut for {@code wrap(bytes, 0, bytes.length}.
*/
public static ReadableBuffer wrap(byte[] bytes) {
return new ByteArrayWrapper(bytes, 0, bytes.length);
}
/**
* Creates a new {@link ReadableBuffer} that is backed by the given byte array.
*
* @param bytes the byte array being wrapped.
* @param offset the starting offset for the buffer within the byte array.
* @param length the length of the buffer from the {@code offset} index.
*/
public static ReadableBuffer wrap(byte[] bytes, int offset, int length) {
return new ByteArrayWrapper(bytes, offset, length);
}
/**
* Creates a new {@link ReadableBuffer} that is backed by the given {@link ByteBuffer}. Calls to
* read from the buffer will increment the position of the {@link ByteBuffer}.
*/
public static ReadableBuffer wrap(ByteBuffer bytes) {
return new ByteReadableBufferWrapper(bytes);
}
/**
* Reads an entire {@link ReadableBuffer} to a new array. After calling this method, the buffer
* will contain no readable bytes.
*/
public static byte[] readArray(ReadableBuffer buffer) {
Preconditions.checkNotNull(buffer, "buffer");
int length = buffer.readableBytes();
byte[] bytes = new byte[length];
buffer.readBytes(bytes, 0, length);
return bytes;
}
/**
* Reads the entire {@link ReadableBuffer} to a new {@link String} with the given charset.
*/
public static String readAsString(ReadableBuffer buffer, Charset charset) {
Preconditions.checkNotNull(charset, "charset");
byte[] bytes = readArray(buffer);
return new String(bytes, charset);
}
/**
* Reads the entire {@link ReadableBuffer} to a new {@link String} using UTF-8 decoding.
*/
public static String readAsStringUtf8(ReadableBuffer buffer) {
return readAsString(buffer, UTF_8);
}
/**
* Creates a new {@link InputStream} backed by the given buffer. Any read taken on the stream will
* automatically increment the read position of this buffer. Closing the stream, however, does not
* affect the original buffer.
*
* @param buffer the buffer backing the new {@link InputStream}.
* @param owner if {@code true}, the returned stream will close the buffer when closed.
*/
public static InputStream openStream(ReadableBuffer buffer, boolean owner) {
return new BufferInputStream(owner ? buffer : ignoreClose(buffer));
}
/**
* Decorates the given {@link ReadableBuffer} to ignore calls to {@link ReadableBuffer#close}.
*
* @param buffer the buffer to be decorated.
* @return a wrapper around {@code buffer} that ignores calls to {@link ReadableBuffer#close}.
*/
public static ReadableBuffer ignoreClose(ReadableBuffer buffer) {
return new ForwardingReadableBuffer(buffer) {
@Override
public void close() {
// Ignore.
}
};
}
/**
* A {@link ReadableBuffer} that is backed by a byte array.
*/
private static class ByteArrayWrapper extends AbstractReadableBuffer {
int offset;
final int end;
final byte[] bytes;
ByteArrayWrapper(byte[] bytes) {
this(bytes, 0, bytes.length);
}
ByteArrayWrapper(byte[] bytes, int offset, int length) {
Preconditions.checkArgument(offset >= 0, "offset must be >= 0");
Preconditions.checkArgument(length >= 0, "length must be >= 0");
Preconditions.checkArgument(offset + length <= bytes.length,
"offset + length exceeds array boundary");
this.bytes = Preconditions.checkNotNull(bytes, "bytes");
this.offset = offset;
this.end = offset + length;
}
@Override
public int readableBytes() {
return end - offset;
}
@Override
public void skipBytes(int length) {
checkReadable(length);
offset += length;
}
@Override
public int readUnsignedByte() {
checkReadable(1);
return bytes[offset++] & 0xFF;
}
@Override
public void readBytes(byte[] dest, int destIndex, int length) {
System.arraycopy(bytes, offset, dest, destIndex, length);
offset += length;
}
@Override
public void readBytes(ByteBuffer dest) {
Preconditions.checkNotNull(dest, "dest");
int length = dest.remaining();
checkReadable(length);
dest.put(bytes, offset, length);
offset += length;
}
@Override
public void readBytes(OutputStream dest, int length) throws IOException {
checkReadable(length);
dest.write(bytes, offset, length);
offset += length;
}
@Override
public ByteArrayWrapper readBytes(int length) {
checkReadable(length);
int originalOffset = offset;
offset += length;
return new ByteArrayWrapper(bytes, originalOffset, length);
}
@Override
public boolean hasArray() {
return true;
}
@Override
public byte[] array() {
return bytes;
}
@Override
public int arrayOffset() {
return offset;
}
}
/**
* A {@link ReadableBuffer} that is backed by a {@link ByteBuffer}.
*/
private static class ByteReadableBufferWrapper extends AbstractReadableBuffer {
final ByteBuffer bytes;
ByteReadableBufferWrapper(ByteBuffer bytes) {
this.bytes = Preconditions.checkNotNull(bytes, "bytes");
}
@Override
public int readableBytes() {
return bytes.remaining();
}
@Override
public int readUnsignedByte() {
checkReadable(1);
return bytes.get() & 0xFF;
}
@Override
public void skipBytes(int length) {
checkReadable(length);
bytes.position(bytes.position() + length);
}
@Override
public void readBytes(byte[] dest, int destOffset, int length) {
checkReadable(length);
bytes.get(dest, destOffset, length);
}
@Override
public void readBytes(ByteBuffer dest) {
Preconditions.checkNotNull(dest, "dest");
int length = dest.remaining();
checkReadable(length);
// Change the limit so that only length bytes are available.
int prevLimit = bytes.limit();
bytes.limit(bytes.position() + length);
// Write the bytes and restore the original limit.
dest.put(bytes);
bytes.limit(prevLimit);
}
@Override
public void readBytes(OutputStream dest, int length) throws IOException {
checkReadable(length);
if (hasArray()) {
dest.write(array(), arrayOffset(), length);
bytes.position(bytes.position() + length);
} else {
// The buffer doesn't support array(). Copy the data to an intermediate buffer.
byte[] array = new byte[length];
bytes.get(array);
dest.write(array);
}
}
@Override
public ByteReadableBufferWrapper readBytes(int length) {
checkReadable(length);
ByteBuffer buffer = bytes.duplicate();
buffer.limit(bytes.position() + length);
bytes.position(bytes.position() + length);
return new ByteReadableBufferWrapper(buffer);
}
@Override
public boolean hasArray() {
return bytes.hasArray();
}
@Override
public byte[] array() {
return bytes.array();
}
@Override
public int arrayOffset() {
return bytes.arrayOffset() + bytes.position();
}
}
/**
* An {@link InputStream} that is backed by a {@link ReadableBuffer}.
*/
private static final class BufferInputStream extends InputStream implements KnownLength {
final ReadableBuffer buffer;
public BufferInputStream(ReadableBuffer buffer) {
this.buffer = Preconditions.checkNotNull(buffer, "buffer");
}
@Override
public int available() throws IOException {
return buffer.readableBytes();
}
@Override
public int read() {
if (buffer.readableBytes() == 0) {
// EOF.
return -1;
}
return buffer.readUnsignedByte();
}
@Override
public int read(byte[] dest, int destOffset, int length) throws IOException {
if (buffer.readableBytes() == 0) {
// EOF.
return -1;
}
length = Math.min(buffer.readableBytes(), length);
buffer.readBytes(dest, destOffset, length);
return length;
}
@Override
public void close() throws IOException {
buffer.close();
}
}
private ReadableBuffers() {}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.catalina.ant;
import java.io.UnsupportedEncodingException;
import java.net.URLEncoder;
import org.apache.tools.ant.BuildException;
/**
* Ant task that implements the <code>/status</code> command, supported by the
* mod_jk status (1.2.9) application.
*
* @author Peter Rossbach
* @version $Revision: 833130 $
* @since 5.5.9
*/
public class JKStatusUpdateTask extends AbstractCatalinaTask {
private String worker = "lb";
private String workerType = "lb";
private int internalid = 0;
private Integer lbRetries;
private Integer lbRecovertime;
private Boolean lbStickySession = Boolean.TRUE;
private Boolean lbForceSession = Boolean.FALSE;
private Integer workerLoadFactor;
private String workerRedirect;
private String workerClusterDomain;
private Boolean workerDisabled = Boolean.FALSE;
private Boolean workerStopped = Boolean.FALSE;
private boolean isLBMode = true;
private String workerLb;
/**
*
*/
public JKStatusUpdateTask() {
super();
setUrl("http://localhost/status");
}
/**
* @return Returns the internalid.
*/
public int getInternalid() {
return internalid;
}
/**
* @param internalid
* The internalid to set.
*/
public void setInternalid(int internalid) {
this.internalid = internalid;
}
/**
* @return Returns the lbForceSession.
*/
public Boolean getLbForceSession() {
return lbForceSession;
}
/**
* @param lbForceSession
* The lbForceSession to set.
*/
public void setLbForceSession(Boolean lbForceSession) {
this.lbForceSession = lbForceSession;
}
/**
* @return Returns the lbRecovertime.
*/
public Integer getLbRecovertime() {
return lbRecovertime;
}
/**
* @param lbRecovertime
* The lbRecovertime to set.
*/
public void setLbRecovertime(Integer lbRecovertime) {
this.lbRecovertime = lbRecovertime;
}
/**
* @return Returns the lbRetries.
*/
public Integer getLbRetries() {
return lbRetries;
}
/**
* @param lbRetries
* The lbRetries to set.
*/
public void setLbRetries(Integer lbRetries) {
this.lbRetries = lbRetries;
}
/**
* @return Returns the lbStickySession.
*/
public Boolean getLbStickySession() {
return lbStickySession;
}
/**
* @param lbStickySession
* The lbStickySession to set.
*/
public void setLbStickySession(Boolean lbStickySession) {
this.lbStickySession = lbStickySession;
}
/**
* @return Returns the worker.
*/
public String getWorker() {
return worker;
}
/**
* @param worker
* The worker to set.
*/
public void setWorker(String worker) {
this.worker = worker;
}
/**
* @return Returns the workerType.
*/
public String getWorkerType() {
return workerType;
}
/**
* @param workerType
* The workerType to set.
*/
public void setWorkerType(String workerType) {
this.workerType = workerType;
}
/**
* @return Returns the workerLb.
*/
public String getWorkerLb() {
return workerLb;
}
/**
* @param workerLb
* The workerLb to set.
*/
public void setWorkerLb(String workerLb) {
this.workerLb = workerLb;
}
/**
* @return Returns the workerClusterDomain.
*/
public String getWorkerClusterDomain() {
return workerClusterDomain;
}
/**
* @param workerClusterDomain
* The workerClusterDomain to set.
*/
public void setWorkerClusterDomain(String workerClusterDomain) {
this.workerClusterDomain = workerClusterDomain;
}
/**
* @return Returns the workerDisabled.
*/
public Boolean getWorkerDisabled() {
return workerDisabled;
}
/**
* @param workerDisabled
* The workerDisabled to set.
*/
public void setWorkerDisabled(Boolean workerDisabled) {
this.workerDisabled = workerDisabled;
}
/**
* @return Returns the workerStopped.
*/
public Boolean getWorkerStopped() {
return workerStopped;
}
/**
* @param workerStopped The workerStopped to set.
*/
public void setWorkerStopped(Boolean workerStopped) {
this.workerStopped = workerStopped;
}
/**
* @return Returns the workerLoadFactor.
*/
public Integer getWorkerLoadFactor() {
return workerLoadFactor;
}
/**
* @param workerLoadFactor
* The workerLoadFactor to set.
*/
public void setWorkerLoadFactor(Integer workerLoadFactor) {
this.workerLoadFactor = workerLoadFactor;
}
/**
* @return Returns the workerRedirect.
*/
public String getWorkerRedirect() {
return workerRedirect;
}
/**
* @param workerRedirect
* The workerRedirect to set.
*/
public void setWorkerRedirect(String workerRedirect) {
this.workerRedirect = workerRedirect;
}
/**
* Execute the requested operation.
*
* @exception BuildException
* if an error occurs
*/
@Override
public void execute() throws BuildException {
super.execute();
checkParameter();
StringBuilder sb = createLink();
execute(sb.toString(), null, null, -1);
}
/**
* Create JkStatus link
* <ul>
* <li><b>load balance example:
* </b>http://localhost/status?cmd=update&mime=txt&w=lb&lf=false&ls=true</li>
* <li><b>worker example:
* </b>http://localhost/status?cmd=update&mime=txt&w=node1&l=lb&wf=1&wd=false&ws=false
* </li>
* </ul>
*
* @return create jkstatus link
*/
private StringBuilder createLink() {
// Building URL
StringBuilder sb = new StringBuilder();
try {
sb.append("?cmd=update&mime=txt");
sb.append("&w=");
sb.append(URLEncoder.encode(worker, getCharset()));
if (isLBMode) {
//http://localhost/status?cmd=update&mime=txt&w=lb&lf=false&ls=true
if ((lbRetries != null)) { // > 0
sb.append("&lr=");
sb.append(lbRetries);
}
if ((lbRecovertime != null)) { // > 59
sb.append("<=");
sb.append(lbRecovertime);
}
if ((lbStickySession != null)) {
sb.append("&ls=");
sb.append(lbStickySession);
}
if ((lbForceSession != null)) {
sb.append("&lf=");
sb.append(lbForceSession);
}
} else {
//http://localhost/status?cmd=update&mime=txt&w=node1&l=lb&wf=1&wd=false&ws=false
if ((workerLb != null)) { // must be configured
sb.append("&l=");
sb.append(URLEncoder.encode(workerLb, getCharset()));
}
if ((workerLoadFactor != null)) { // >= 1
sb.append("&wf=");
sb.append(workerLoadFactor);
}
if ((workerDisabled != null)) {
sb.append("&wd=");
sb.append(workerDisabled);
}
if ((workerStopped != null)) {
sb.append("&ws=");
sb.append(workerStopped);
}
if ((workerRedirect != null)) { // other worker conrecte lb's
sb.append("&wr=");
}
if ((workerClusterDomain != null)) {
sb.append("&wc=");
sb.append(URLEncoder.encode(workerClusterDomain,
getCharset()));
}
}
} catch (UnsupportedEncodingException e) {
throw new BuildException("Invalid 'charset' attribute: "
+ getCharset());
}
return sb;
}
/**
* check correct lb and worker parameter
*/
protected void checkParameter() {
if (worker == null) {
throw new BuildException("Must specify 'worker' attribute");
}
if (workerType == null) {
throw new BuildException("Must specify 'workerType' attribute");
}
if ("lb".equals(workerType)) {
if (lbRecovertime == null && lbRetries == null) {
throw new BuildException(
"Must specify at a lb worker either 'lbRecovertime' or"
+ "'lbRetries' attribute");
}
if (lbStickySession == null || lbForceSession == null) {
throw new BuildException("Must specify at a lb worker either"
+ "'lbStickySession' and 'lbForceSession' attribute");
}
if (null != lbRecovertime && 60 < lbRecovertime.intValue()) {
throw new BuildException(
"The 'lbRecovertime' must be greater than 59");
}
if (null != lbRetries && 1 < lbRetries.intValue()) {
throw new BuildException(
"The 'lbRetries' must be greater than 1");
}
isLBMode = true;
} else if ("worker".equals(workerType)) {
if (workerDisabled == null) {
throw new BuildException(
"Must specify at a node worker 'workerDisabled' attribute");
}
if (workerStopped == null) {
throw new BuildException(
"Must specify at a node worker 'workerStopped' attribute");
}
if (workerLoadFactor == null ) {
throw new BuildException(
"Must specify at a node worker 'workerLoadFactor' attribute");
}
if (workerClusterDomain == null) {
throw new BuildException(
"Must specify at a node worker 'workerClusterDomain' attribute");
}
if (workerRedirect == null) {
throw new BuildException(
"Must specify at a node worker 'workerRedirect' attribute");
}
if (workerLb == null) {
throw new BuildException("Must specify 'workerLb' attribute");
}
if (workerLoadFactor.intValue() < 1) {
throw new BuildException(
"The 'workerLoadFactor' must be greater or equal 1");
}
isLBMode = false;
} else {
throw new BuildException(
"Only 'lb' and 'worker' supported as workerType attribute");
}
}
}
| |
/* The following code was generated by JFlex 1.4.1 on 4/28/12 4:57 PM */
/*
* 04/24/2012
*
* LatexTokenMaker.java - Scanner for LaTeX.
*
* This library is distributed under a modified BSD license. See the included
* RSyntaxTextArea.License.txt file for details.
*/
package org.fife.ui.rsyntaxtextarea.modes;
import java.io.*;
import javax.swing.text.Segment;
import org.fife.ui.rsyntaxtextarea.*;
/**
* Scanner for the LaTeX.<p>
*
* This implementation was created using
* <a href="http://www.jflex.de/">JFlex</a> 1.4.1; however, the generated file
* was modified for performance. Memory allocation needs to be almost
* completely removed to be competitive with the handwritten lexers (subclasses
* of <code>AbstractTokenMaker</code>, so this class has been modified so that
* Strings are never allocated (via yytext()), and the scanner never has to
* worry about refilling its buffer (needlessly copying chars around).
* We can achieve this because RSTA always scans exactly 1 line of tokens at a
* time, and hands the scanner this line as an array of characters (a Segment
* really). Since tokens contain pointers to char arrays instead of Strings
* holding their contents, there is no need for allocating new memory for
* Strings.<p>
*
* The actual algorithm generated for scanning has, of course, not been
* modified.<p>
*
* If you wish to regenerate this file yourself, keep in mind the following:
* <ul>
* <li>The generated LatexTokenMaker.java</code> file will contain two
* definitions of both <code>zzRefill</code> and <code>yyreset</code>.
* You should hand-delete the second of each definition (the ones
* generated by the lexer), as these generated methods modify the input
* buffer, which we'll never have to do.</li>
* <li>You should also change the declaration/definition of zzBuffer to NOT
* be initialized. This is a needless memory allocation for us since we
* will be pointing the array somewhere else anyway.</li>
* <li>You should NOT call <code>yylex()</code> on the generated scanner
* directly; rather, you should use <code>getTokenList</code> as you would
* with any other <code>TokenMaker</code> instance.</li>
* </ul>
*
* @author Robert Futrell
* @version 0.5
*
*/
public class LatexTokenMaker extends AbstractJFlexTokenMaker {
/** This character denotes the end of file */
public static final int YYEOF = -1;
/** lexical states */
public static final int EOL_COMMENT = 1;
public static final int YYINITIAL = 0;
/**
* Translates characters to character classes
*/
private static final String ZZ_CMAP_PACKED =
"\11\0\1\3\1\32\1\0\1\3\23\0\1\3\1\5\1\0\1\5"+
"\1\7\1\4\7\5\1\2\1\22\1\6\12\1\1\20\1\5\1\0"+
"\1\5\1\0\2\5\32\1\1\5\1\23\1\5\1\0\1\2\1\0"+
"\1\1\1\25\1\1\1\31\1\17\1\14\1\26\1\10\1\15\2\1"+
"\1\16\1\1\1\27\1\1\1\12\2\1\1\13\1\11\2\1\1\21"+
"\3\1\1\30\1\0\1\24\1\5\uff81\0";
/**
* Translates characters to character classes
*/
private static final char [] ZZ_CMAP = zzUnpackCMap(ZZ_CMAP_PACKED);
/**
* Translates DFA states to action switch labels.
*/
private static final int [] ZZ_ACTION = zzUnpackAction();
private static final String ZZ_ACTION_PACKED_0 =
"\2\0\2\1\1\2\1\3\1\1\1\4\1\5\4\6"+
"\1\7\3\10\4\0\2\10\4\0\2\10\2\0\1\11"+
"\1\0\1\10\3\0\1\10\1\12\2\0\1\13";
private static int [] zzUnpackAction() {
int [] result = new int[42];
int offset = 0;
offset = zzUnpackAction(ZZ_ACTION_PACKED_0, offset, result);
return result;
}
private static int zzUnpackAction(String packed, int offset, int [] result) {
int i = 0; /* index in packed string */
int j = offset; /* index in unpacked array */
int l = packed.length();
while (i < l) {
int count = packed.charAt(i++);
int value = packed.charAt(i++);
do result[j++] = value; while (--count > 0);
}
return j;
}
/**
* Translates a state to a row index in the transition table
*/
private static final int [] ZZ_ROWMAP = zzUnpackRowMap();
private static final String ZZ_ROWMAP_PACKED_0 =
"\0\0\0\33\0\66\0\121\0\66\0\66\0\154\0\66"+
"\0\66\0\207\0\242\0\275\0\330\0\66\0\363\0\u010e"+
"\0\u0129\0\u0144\0\u015f\0\u017a\0\u0195\0\u01b0\0\u01cb\0\u01e6"+
"\0\u0201\0\u021c\0\u0237\0\u0252\0\u026d\0\u0288\0\u02a3\0\u02be"+
"\0\u02d9\0\u02f4\0\u030f\0\u02be\0\u032a\0\u0345\0\66\0\u0360"+
"\0\u037b\0\66";
private static int [] zzUnpackRowMap() {
int [] result = new int[42];
int offset = 0;
offset = zzUnpackRowMap(ZZ_ROWMAP_PACKED_0, offset, result);
return result;
}
private static int zzUnpackRowMap(String packed, int offset, int [] result) {
int i = 0; /* index in packed string */
int j = offset; /* index in unpacked array */
int l = packed.length();
while (i < l) {
int high = packed.charAt(i++) << 16;
result[j++] = high | packed.charAt(i++);
}
return j;
}
/**
* The transition table of the DFA
*/
private static final int [] ZZ_TRANS = zzUnpackTrans();
private static final String ZZ_TRANS_PACKED_0 =
"\1\3\2\4\1\5\1\6\3\3\10\4\1\3\1\4"+
"\1\3\1\7\1\10\3\4\1\10\1\4\1\11\10\12"+
"\1\13\3\12\1\14\4\12\1\15\10\12\1\16\34\0"+
"\2\4\5\0\10\4\1\0\1\4\3\0\3\4\1\0"+
"\1\4\2\0\2\17\5\0\7\17\1\20\1\0\1\17"+
"\3\0\1\21\2\17\1\0\1\17\1\0\10\12\1\0"+
"\3\12\1\0\4\12\1\0\10\12\12\0\1\22\32\0"+
"\1\23\3\0\1\24\36\0\1\25\12\0\2\17\5\0"+
"\10\17\1\0\1\17\3\0\3\17\1\0\1\17\2\0"+
"\2\17\5\0\10\17\1\0\1\17\3\0\2\17\1\26"+
"\1\0\1\17\2\0\2\17\5\0\7\17\1\27\1\0"+
"\1\17\3\0\3\17\1\0\1\17\12\0\1\30\33\0"+
"\1\31\36\0\1\32\35\0\1\33\12\0\2\17\5\0"+
"\10\17\1\0\1\17\3\0\3\17\1\0\1\34\2\0"+
"\2\17\5\0\10\17\1\0\1\17\3\0\1\17\1\35"+
"\1\17\1\0\1\17\13\0\1\36\40\0\1\37\31\0"+
"\1\31\35\0\1\40\11\0\2\17\5\0\10\17\1\0"+
"\1\17\3\0\3\17\1\41\1\17\2\0\2\17\5\0"+
"\5\17\1\42\2\17\1\0\1\17\3\0\3\17\1\0"+
"\1\17\14\0\1\31\4\0\1\37\20\0\1\43\25\0"+
"\1\40\1\44\1\0\2\44\12\40\1\44\1\40\1\44"+
"\2\0\3\40\1\0\1\40\2\0\2\45\5\0\10\45"+
"\1\0\1\45\3\0\3\45\1\0\1\45\2\0\2\17"+
"\5\0\10\17\1\0\1\17\3\0\2\17\1\46\1\0"+
"\1\17\7\0\1\40\25\0\2\45\5\0\10\45\1\0"+
"\1\45\2\0\1\47\3\45\1\0\1\45\2\0\2\17"+
"\5\0\10\17\1\0\1\17\3\0\3\17\1\50\1\17"+
"\2\0\2\51\5\0\10\51\1\0\1\51\3\0\3\51"+
"\1\0\1\51\2\0\2\51\5\0\10\51\1\0\1\51"+
"\2\0\1\52\3\51\1\0\1\51\1\0";
private static int [] zzUnpackTrans() {
int [] result = new int[918];
int offset = 0;
offset = zzUnpackTrans(ZZ_TRANS_PACKED_0, offset, result);
return result;
}
private static int zzUnpackTrans(String packed, int offset, int [] result) {
int i = 0; /* index in packed string */
int j = offset; /* index in unpacked array */
int l = packed.length();
while (i < l) {
int count = packed.charAt(i++);
int value = packed.charAt(i++);
value--;
do result[j++] = value; while (--count > 0);
}
return j;
}
/* error codes */
private static final int ZZ_UNKNOWN_ERROR = 0;
private static final int ZZ_NO_MATCH = 1;
private static final int ZZ_PUSHBACK_2BIG = 2;
/* error messages for the codes above */
private static final String ZZ_ERROR_MSG[] = {
"Unkown internal scanner error",
"Error: could not match input",
"Error: pushback value was too large"
};
/**
* ZZ_ATTRIBUTE[aState] contains the attributes of state <code>aState</code>
*/
private static final int [] ZZ_ATTRIBUTE = zzUnpackAttribute();
private static final String ZZ_ATTRIBUTE_PACKED_0 =
"\2\0\1\11\1\1\2\11\1\1\2\11\4\1\1\11"+
"\3\1\4\0\2\1\4\0\2\1\2\0\1\1\1\0"+
"\1\1\3\0\1\1\1\11\2\0\1\11";
private static int [] zzUnpackAttribute() {
int [] result = new int[42];
int offset = 0;
offset = zzUnpackAttribute(ZZ_ATTRIBUTE_PACKED_0, offset, result);
return result;
}
private static int zzUnpackAttribute(String packed, int offset, int [] result) {
int i = 0; /* index in packed string */
int j = offset; /* index in unpacked array */
int l = packed.length();
while (i < l) {
int count = packed.charAt(i++);
int value = packed.charAt(i++);
do result[j++] = value; while (--count > 0);
}
return j;
}
/** the input device */
private java.io.Reader zzReader;
/** the current state of the DFA */
private int zzState;
/** the current lexical state */
private int zzLexicalState = YYINITIAL;
/** this buffer contains the current text to be matched and is
the source of the yytext() string */
private char zzBuffer[];
/** the textposition at the last accepting state */
private int zzMarkedPos;
/** the current text position in the buffer */
private int zzCurrentPos;
/** startRead marks the beginning of the yytext() string in the buffer */
private int zzStartRead;
/** endRead marks the last character in the buffer, that has been read
from input */
private int zzEndRead;
/** zzAtEOF == true <=> the scanner is at the EOF */
private boolean zzAtEOF;
/* user code: */
/**
* Constructor. This must be here because JFlex does not generate a
* no-parameter constructor.
*/
public LatexTokenMaker() {
}
/**
* Adds the token specified to the current linked list of tokens.
*
* @param tokenType The token's type.
* @see #addToken(int, int, int)
*/
private void addHyperlinkToken(int start, int end, int tokenType) {
int so = start + offsetShift;
addToken(zzBuffer, start,end, tokenType, so, true);
}
/**
* Adds the token specified to the current linked list of tokens.
*
* @param tokenType The token's type.
*/
private void addToken(int tokenType) {
addToken(zzStartRead, zzMarkedPos-1, tokenType);
}
/**
* Adds the token specified to the current linked list of tokens.
*
* @param tokenType The token's type.
* @see #addHyperlinkToken(int, int, int)
*/
private void addToken(int start, int end, int tokenType) {
int so = start + offsetShift;
addToken(zzBuffer, start,end, tokenType, so, false);
}
/**
* Adds the token specified to the current linked list of tokens.
*
* @param array The character array.
* @param start The starting offset in the array.
* @param end The ending offset in the array.
* @param tokenType The token's type.
* @param startOffset The offset in the document at which this token
* occurs.
* @param hyperlink Whether this token is a hyperlink.
*/
public void addToken(char[] array, int start, int end, int tokenType,
int startOffset, boolean hyperlink) {
super.addToken(array, start,end, tokenType, startOffset, hyperlink);
zzStartRead = zzMarkedPos;
}
/**
* ${inheritDoc}
*/
public String[] getLineCommentStartAndEnd() {
return new String[] { "%", null };
}
/**
* Returns the first token in the linked list of tokens generated
* from <code>text</code>. This method must be implemented by
* subclasses so they can correctly implement syntax highlighting.
*
* @param text The text from which to get tokens.
* @param initialTokenType The token type we should start with.
* @param startOffset The offset into the document at which
* <code>text</code> starts.
* @return The first <code>Token</code> in a linked list representing
* the syntax highlighted text.
*/
public Token getTokenList(Segment text, int initialTokenType, int startOffset) {
resetTokenList();
this.offsetShift = -text.offset + startOffset;
// Start off in the proper state.
int state = Token.NULL;
s = text;
try {
yyreset(zzReader);
yybegin(state);
return yylex();
} catch (IOException ioe) {
ioe.printStackTrace();
return new Token();
}
}
/**
* Refills the input buffer.
*
* @return <code>true</code> if EOF was reached, otherwise
* <code>false</code>.
* @exception IOException if any I/O-Error occurs.
*/
private boolean zzRefill() {
return zzCurrentPos>=s.offset+s.count;
}
/**
* Resets the scanner to read from a new input stream.
* Does not close the old reader.
*
* All internal variables are reset, the old input stream
* <b>cannot</b> be reused (internal buffer is discarded and lost).
* Lexical state is set to <tt>YY_INITIAL</tt>.
*
* @param reader the new input stream
*/
public final void yyreset(java.io.Reader reader) {
// 's' has been updated.
zzBuffer = s.array;
/*
* We replaced the line below with the two below it because zzRefill
* no longer "refills" the buffer (since the way we do it, it's always
* "full" the first time through, since it points to the segment's
* array). So, we assign zzEndRead here.
*/
//zzStartRead = zzEndRead = s.offset;
zzStartRead = s.offset;
zzEndRead = zzStartRead + s.count - 1;
zzCurrentPos = zzMarkedPos = s.offset;
zzLexicalState = YYINITIAL;
zzReader = reader;
zzAtEOF = false;
}
/**
* Creates a new scanner
* There is also a java.io.InputStream version of this constructor.
*
* @param in the java.io.Reader to read input from.
*/
public LatexTokenMaker(java.io.Reader in) {
this.zzReader = in;
}
/**
* Creates a new scanner.
* There is also java.io.Reader version of this constructor.
*
* @param in the java.io.Inputstream to read input from.
*/
public LatexTokenMaker(java.io.InputStream in) {
this(new java.io.InputStreamReader(in));
}
/**
* Unpacks the compressed character translation table.
*
* @param packed the packed character translation table
* @return the unpacked character translation table
*/
private static char [] zzUnpackCMap(String packed) {
char [] map = new char[0x10000];
int i = 0; /* index in packed string */
int j = 0; /* index in unpacked array */
while (i < 112) {
int count = packed.charAt(i++);
char value = packed.charAt(i++);
do map[j++] = value; while (--count > 0);
}
return map;
}
/**
* Closes the input stream.
*/
public final void yyclose() throws java.io.IOException {
zzAtEOF = true; /* indicate end of file */
zzEndRead = zzStartRead; /* invalidate buffer */
if (zzReader != null)
zzReader.close();
}
/**
* Returns the current lexical state.
*/
public final int yystate() {
return zzLexicalState;
}
/**
* Enters a new lexical state
*
* @param newState the new lexical state
*/
public final void yybegin(int newState) {
zzLexicalState = newState;
}
/**
* Returns the text matched by the current regular expression.
*/
public final String yytext() {
return new String( zzBuffer, zzStartRead, zzMarkedPos-zzStartRead );
}
/**
* Returns the character at position <tt>pos</tt> from the
* matched text.
*
* It is equivalent to yytext().charAt(pos), but faster
*
* @param pos the position of the character to fetch.
* A value from 0 to yylength()-1.
*
* @return the character at position pos
*/
public final char yycharat(int pos) {
return zzBuffer[zzStartRead+pos];
}
/**
* Returns the length of the matched text region.
*/
public final int yylength() {
return zzMarkedPos-zzStartRead;
}
/**
* Reports an error that occured while scanning.
*
* In a wellformed scanner (no or only correct usage of
* yypushback(int) and a match-all fallback rule) this method
* will only be called with things that "Can't Possibly Happen".
* If this method is called, something is seriously wrong
* (e.g. a JFlex bug producing a faulty scanner etc.).
*
* Usual syntax/scanner level error handling should be done
* in error fallback rules.
*
* @param errorCode the code of the errormessage to display
*/
private void zzScanError(int errorCode) {
String message;
try {
message = ZZ_ERROR_MSG[errorCode];
}
catch (ArrayIndexOutOfBoundsException e) {
message = ZZ_ERROR_MSG[ZZ_UNKNOWN_ERROR];
}
throw new Error(message);
}
/**
* Pushes the specified amount of characters back into the input stream.
*
* They will be read again by then next call of the scanning method
*
* @param number the number of characters to be read again.
* This number must not be greater than yylength()!
*/
public void yypushback(int number) {
if ( number > yylength() )
zzScanError(ZZ_PUSHBACK_2BIG);
zzMarkedPos -= number;
}
/**
* Resumes scanning until the next regular expression is matched,
* the end of input is encountered or an I/O-Error occurs.
*
* @return the next token
* @exception java.io.IOException if any I/O-Error occurs
*/
public org.fife.ui.rsyntaxtextarea.Token yylex() throws java.io.IOException {
int zzInput;
int zzAction;
// cached fields:
int zzCurrentPosL;
int zzMarkedPosL;
int zzEndReadL = zzEndRead;
char [] zzBufferL = zzBuffer;
char [] zzCMapL = ZZ_CMAP;
int [] zzTransL = ZZ_TRANS;
int [] zzRowMapL = ZZ_ROWMAP;
int [] zzAttrL = ZZ_ATTRIBUTE;
while (true) {
zzMarkedPosL = zzMarkedPos;
zzAction = -1;
zzCurrentPosL = zzCurrentPos = zzStartRead = zzMarkedPosL;
zzState = zzLexicalState;
zzForAction: {
while (true) {
if (zzCurrentPosL < zzEndReadL)
zzInput = zzBufferL[zzCurrentPosL++];
else if (zzAtEOF) {
zzInput = YYEOF;
break zzForAction;
}
else {
// store back cached positions
zzCurrentPos = zzCurrentPosL;
zzMarkedPos = zzMarkedPosL;
boolean eof = zzRefill();
// get translated positions and possibly new buffer
zzCurrentPosL = zzCurrentPos;
zzMarkedPosL = zzMarkedPos;
zzBufferL = zzBuffer;
zzEndReadL = zzEndRead;
if (eof) {
zzInput = YYEOF;
break zzForAction;
}
else {
zzInput = zzBufferL[zzCurrentPosL++];
}
}
int zzNext = zzTransL[ zzRowMapL[zzState] + zzCMapL[zzInput] ];
if (zzNext == -1) break zzForAction;
zzState = zzNext;
int zzAttributes = zzAttrL[zzState];
if ( (zzAttributes & 1) == 1 ) {
zzAction = zzState;
zzMarkedPosL = zzCurrentPosL;
if ( (zzAttributes & 8) == 8 ) break zzForAction;
}
}
}
// store back cached position
zzMarkedPos = zzMarkedPosL;
switch (zzAction < 0 ? zzAction : ZZ_ACTION[zzAction]) {
case 1:
{ addToken(Token.IDENTIFIER);
}
case 12: break;
case 8:
{ addToken(Token.FUNCTION);
}
case 13: break;
case 2:
{ addToken(Token.WHITESPACE);
}
case 14: break;
case 9:
{ int temp=zzStartRead; addToken(start,zzStartRead-1, Token.COMMENT_EOL); addHyperlinkToken(temp,zzMarkedPos-1, Token.COMMENT_EOL); start = zzMarkedPos;
}
case 15: break;
case 3:
{ start = zzMarkedPos-1; yybegin(EOL_COMMENT);
}
case 16: break;
case 5:
{ addNullToken(); return firstToken;
}
case 17: break;
case 7:
{ addToken(start,zzStartRead-1, Token.COMMENT_EOL); addNullToken(); return firstToken;
}
case 18: break;
case 10:
{ int temp = zzStartRead;
addToken(temp, temp+3, Token.RESERVED_WORD);
addToken(temp+4, temp+4, Token.SEPARATOR);
addToken(temp+5, zzMarkedPos-2, Token.RESERVED_WORD);
addToken(zzMarkedPos-1, zzMarkedPos-1, Token.SEPARATOR);
}
case 19: break;
case 11:
{ int temp = zzStartRead;
addToken(temp, temp+5, Token.RESERVED_WORD);
addToken(temp+6, temp+6, Token.SEPARATOR);
addToken(temp+7, zzMarkedPos-2, Token.RESERVED_WORD);
addToken(zzMarkedPos-1, zzMarkedPos-1, Token.SEPARATOR);
}
case 20: break;
case 6:
{
}
case 21: break;
case 4:
{ addToken(Token.SEPARATOR);
}
case 22: break;
default:
if (zzInput == YYEOF && zzStartRead == zzCurrentPos) {
zzAtEOF = true;
switch (zzLexicalState) {
case EOL_COMMENT: {
addToken(start,zzStartRead-1, Token.COMMENT_EOL); addNullToken(); return firstToken;
}
case 43: break;
case YYINITIAL: {
addNullToken(); return firstToken;
}
case 44: break;
default:
return null;
}
}
else {
zzScanError(ZZ_NO_MATCH);
}
}
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ignite.internal.processors.query.h2;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.Iterator;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import org.apache.ignite.IgniteCache;
import org.apache.ignite.IgniteCheckedException;
import org.apache.ignite.IgniteLogger;
import org.apache.ignite.binary.BinaryObject;
import org.apache.ignite.binary.BinaryObjectBuilder;
import org.apache.ignite.cache.QueryEntity;
import org.apache.ignite.cache.QueryIndex;
import org.apache.ignite.cache.QueryIndexType;
import org.apache.ignite.configuration.CacheConfiguration;
import org.apache.ignite.configuration.IgniteConfiguration;
import org.apache.ignite.internal.IgniteEx;
import org.apache.ignite.internal.binary.BinaryMarshaller;
import org.apache.ignite.internal.binary.BinaryObjectImpl;
import org.apache.ignite.internal.processors.cache.CacheObject;
import org.apache.ignite.internal.processors.cache.CacheObjectContext;
import org.apache.ignite.internal.processors.cache.CacheObjectValueContext;
import org.apache.ignite.internal.processors.cache.KeyCacheObject;
import org.apache.ignite.internal.processors.query.GridQueryFieldsResult;
import org.apache.ignite.internal.processors.query.GridQueryIndexDescriptor;
import org.apache.ignite.internal.processors.query.GridQueryProperty;
import org.apache.ignite.internal.processors.query.GridQueryTypeDescriptor;
import org.apache.ignite.internal.util.typedef.F;
import org.apache.ignite.internal.util.typedef.internal.U;
import org.apache.ignite.lang.IgniteBiTuple;
import org.apache.ignite.plugin.extensions.communication.MessageReader;
import org.apache.ignite.plugin.extensions.communication.MessageWriter;
import org.apache.ignite.spi.IgniteSpiCloseableIterator;
import org.apache.ignite.spi.IgniteSpiException;
import org.apache.ignite.testframework.GridStringLogger;
import org.apache.ignite.testframework.GridTestUtils;
import org.apache.ignite.testframework.junits.common.GridCommonAbstractTest;
import org.h2.util.JdbcUtils;
import org.jetbrains.annotations.Nullable;
/**
* Tests for all SQL based indexing SPI implementations.
*/
public abstract class GridIndexingSpiAbstractSelfTest extends GridCommonAbstractTest {
/** */
private static final TextIndex textIdx = new TextIndex(F.asList("txt"));
/** */
private static final LinkedHashMap<String, String> fieldsAA = new LinkedHashMap<>();
/** */
private static final LinkedHashMap<String, String> fieldsAB = new LinkedHashMap<>();
/** */
private static final LinkedHashMap<String, String> fieldsBA = new LinkedHashMap<>();
/** */
private IgniteEx ignite0;
/** {@inheritDoc} */
@Override protected IgniteConfiguration getConfiguration(String gridName) throws Exception {
IgniteConfiguration cfg = super.getConfiguration(gridName);
cfg.setMarshaller(new BinaryMarshaller());
return cfg;
}
/*
* Fields initialization.
*/
static {
fieldsAA.put("id", Long.class.getName());
fieldsAA.put("name", String.class.getName());
fieldsAA.put("age", Integer.class.getName());
fieldsAB.putAll(fieldsAA);
fieldsAB.put("txt", String.class.getName());
fieldsBA.putAll(fieldsAA);
fieldsBA.put("sex", Boolean.class.getName());
}
/** */
private static TypeDesc typeAA = new TypeDesc("A", "A", Collections.<String, Class<?>>emptyMap(), null);
/** */
private static TypeDesc typeAB = new TypeDesc("A", "B", Collections.<String, Class<?>>emptyMap(), textIdx);
/** */
private static TypeDesc typeBA = new TypeDesc("B", "A", Collections.<String, Class<?>>emptyMap(), null);
/** {@inheritDoc} */
@Override protected void beforeTest() throws Exception {
ignite0 = startGrid(0);
}
/**
*/
private CacheConfiguration cacheACfg() {
CacheConfiguration<?,?> cfg = new CacheConfiguration<>(DEFAULT_CACHE_NAME);
cfg.setName("A");
QueryEntity eA = new QueryEntity(Integer.class.getName(), "A");
eA.setFields(fieldsAA);
QueryEntity eB = new QueryEntity(Integer.class.getName(), "B");
eB.setFields(fieldsAB);
List<QueryEntity> list = new ArrayList<>(2);
list.add(eA);
list.add(eB);
QueryIndex idx = new QueryIndex("txt");
idx.setIndexType(QueryIndexType.FULLTEXT);
eB.setIndexes(Collections.singleton(idx));
cfg.setQueryEntities(list);
return cfg;
}
/**
*
*/
private CacheConfiguration cacheBCfg() {
CacheConfiguration cfg = new CacheConfiguration(DEFAULT_CACHE_NAME);
cfg.setName("B");
QueryEntity eA = new QueryEntity(Integer.class.getName(), "A");
eA.setFields(fieldsBA);
cfg.setQueryEntities(Collections.singleton(eA));
return cfg;
}
/** {@inheritDoc} */
@Override protected void afterTest() throws Exception {
stopAllGrids();
}
/**
* @param id Id.
* @param name Name.
* @param age Age.
* @return AA.
*/
private BinaryObjectBuilder aa(String typeName, long id, String name, int age) {
BinaryObjectBuilder aBuilder = ignite0.binary().builder(typeName)
.setField("id", id)
.setField("name", name)
.setField("age", age);
return aBuilder;
}
/**
* @param id Id.
* @param name Name.
* @param age Age.
* @param txt Text.
* @return AB.
*/
private BinaryObjectBuilder ab(long id, String name, int age, String txt) {
BinaryObjectBuilder aBuilder = aa("B", id, name, age);
aBuilder.setField("txt", txt);
return aBuilder;
}
/**
* @param id Id.
* @param name Name.
* @param age Age.
* @param sex Sex.
* @return BA.
*/
private BinaryObjectBuilder ba(long id, String name, int age, boolean sex) {
BinaryObjectBuilder builder = aa("A", id, name, age);
builder.setField("sex", sex);
return builder;
}
/**
* @param row Row
* @return Value.
* @throws IgniteSpiException If failed.
*/
private BinaryObjectImpl value(IgniteBiTuple<Integer, BinaryObjectImpl> row) throws IgniteSpiException {
return row.get2();
}
/**
* @return Indexing.
*/
private IgniteH2Indexing getIndexing() {
return U.field(ignite0.context().query(), "idx");
}
/**
* @return {@code true} if OFF-HEAP mode should be tested.
*/
protected boolean offheap() {
return false;
}
/**
* @param key Key.
* @return Cache object.
*/
private KeyCacheObject key(int key) {
return new TestCacheObject(key);
}
/**
* @throws Exception If failed.
*/
public void testSpi() throws Exception {
IgniteH2Indexing spi = getIndexing();
IgniteCache<Integer, BinaryObject> cacheA = ignite0.createCache(cacheACfg());
IgniteCache<Integer, BinaryObject> cacheB = ignite0.createCache(cacheBCfg());
assertFalse(spi.queryLocalSql(spi.schema(typeAA.cacheName()), "select * from A.A", null, Collections.emptySet(),
typeAA.name(), null, null).hasNext());
assertFalse(spi.queryLocalSql(spi.schema(typeAB.cacheName()), "select * from A.B", null, Collections.emptySet(),
typeAB.name(), null, null).hasNext());
assertFalse(spi.queryLocalSql(spi.schema(typeBA.cacheName()), "select * from B.A", null, Collections.emptySet(),
typeBA.name(), null, null).hasNext());
assertFalse(spi.queryLocalSql(spi.schema(typeBA.cacheName()), "select * from B.A, A.B, A.A", null,
Collections.emptySet(), typeBA.name(), null, null).hasNext());
try {
spi.queryLocalSql(spi.schema(typeBA.cacheName()), "select aa.*, ab.*, ba.* from A.A aa, A.B ab, B.A ba",
null, Collections.emptySet(), typeBA.name(), null, null).hasNext();
fail("Enumerations of aliases in select block must be prohibited");
}
catch (IgniteCheckedException ignored) {
// all fine
}
assertFalse(spi.queryLocalSql(spi.schema(typeAB.cacheName()), "select ab.* from A.B ab", null,
Collections.emptySet(), typeAB.name(), null, null).hasNext());
assertFalse(spi.queryLocalSql(spi.schema(typeBA.cacheName()), "select ba.* from B.A as ba", null,
Collections.emptySet(), typeBA.name(), null, null).hasNext());
cacheA.put(1, aa("A", 1, "Vasya", 10).build());
cacheA.put(1, ab(1, "Vasya", 20, "Some text about Vasya goes here.").build());
cacheB.put(1, ba(2, "Petya", 25, true).build());
cacheB.put(1, ba(2, "Kolya", 25, true).build());
cacheA.put(2, aa("A", 2, "Valera", 19).build());
cacheA.put(3, aa("A", 3, "Borya", 18).build());
cacheA.put(4, ab(4, "Vitalya", 20, "Very Good guy").build());
// Query data.
Iterator<IgniteBiTuple<Integer, BinaryObjectImpl>> res = spi.queryLocalSql(spi.schema(typeAA.cacheName()),
"from a order by age", null, Collections.emptySet(), typeAA.name(), null, null);
assertTrue(res.hasNext());
assertEquals(aa("A", 3, "Borya", 18).build(), value(res.next()));
assertTrue(res.hasNext());
assertEquals(aa("A", 2, "Valera", 19).build(), value(res.next()));
assertFalse(res.hasNext());
res = spi.queryLocalSql(spi.schema(typeAA.cacheName()), "select aa.* from a aa order by aa.age", null,
Collections.emptySet(), typeAA.name(), null, null);
assertTrue(res.hasNext());
assertEquals(aa("A", 3, "Borya", 18).build(), value(res.next()));
assertTrue(res.hasNext());
assertEquals(aa("A", 2, "Valera", 19).build(), value(res.next()));
assertFalse(res.hasNext());
res = spi.queryLocalSql(spi.schema(typeAB.cacheName()), "from b order by name", null, Collections.emptySet(),
typeAB.name(), null, null);
assertTrue(res.hasNext());
assertEquals(ab(1, "Vasya", 20, "Some text about Vasya goes here.").build(), value(res.next()));
assertTrue(res.hasNext());
assertEquals(ab(4, "Vitalya", 20, "Very Good guy").build(), value(res.next()));
assertFalse(res.hasNext());
res = spi.queryLocalSql(spi.schema(typeAB.cacheName()), "select bb.* from b as bb order by bb.name", null,
Collections.emptySet(), typeAB.name(), null, null);
assertTrue(res.hasNext());
assertEquals(ab(1, "Vasya", 20, "Some text about Vasya goes here.").build(), value(res.next()));
assertTrue(res.hasNext());
assertEquals(ab(4, "Vitalya", 20, "Very Good guy").build(), value(res.next()));
assertFalse(res.hasNext());
res = spi.queryLocalSql(spi.schema(typeBA.cacheName()), "from a", null, Collections.emptySet(), typeBA.name(),
null, null);
assertTrue(res.hasNext());
assertEquals(ba(2, "Kolya", 25, true).build(), value(res.next()));
assertFalse(res.hasNext());
// Text queries
Iterator<IgniteBiTuple<Integer, BinaryObjectImpl>> txtRes = spi.queryLocalText(spi.schema(typeAB.cacheName()),
"good", typeAB.name(), null);
assertTrue(txtRes.hasNext());
assertEquals(ab(4, "Vitalya", 20, "Very Good guy").build(), value(txtRes.next()));
assertFalse(txtRes.hasNext());
// Fields query
GridQueryFieldsResult fieldsRes =
spi.queryLocalSqlFields(spi.schema("A"), "select a.a.name n1, a.a.age a1, b.a.name n2, " +
"b.a.age a2 from a.a, b.a where a.a.id = b.a.id ", Collections.emptySet(), null, false, 0, null);
String[] aliases = {"N1", "A1", "N2", "A2"};
Object[] vals = { "Valera", 19, "Kolya", 25};
IgniteSpiCloseableIterator<List<?>> it = fieldsRes.iterator();
assertTrue(it.hasNext());
List<?> fields = it.next();
assertEquals(4, fields.size());
int i = 0;
for (Object f : fields) {
assertEquals(aliases[i], fieldsRes.metaData().get(i).fieldName());
assertEquals(vals[i++], f);
}
assertFalse(it.hasNext());
// Remove
cacheA.remove(2);
cacheB.remove(1);
}
/**
* Test long queries write explain warnings into log.
*
* @throws Exception If failed.
*/
public void testLongQueries() throws Exception {
IgniteH2Indexing spi = getIndexing();
ignite0.createCache(cacheACfg());
long longQryExecTime = IgniteConfiguration.DFLT_LONG_QRY_WARN_TIMEOUT;
GridStringLogger log = new GridStringLogger(false, this.log);
IgniteLogger oldLog = GridTestUtils.getFieldValue(spi, "log");
try {
GridTestUtils.setFieldValue(spi, "log", log);
String sql = "select sum(x) FROM SYSTEM_RANGE(?, ?)";
long now = U.currentTimeMillis();
long time = now;
long range = 1000000L;
while (now - time <= longQryExecTime * 3 / 2) {
time = now;
range *= 3;
GridQueryFieldsResult res = spi.queryLocalSqlFields(spi.schema("A"), sql, Arrays.<Object>asList(1,
range), null, false, 0, null);
assert res.iterator().hasNext();
now = U.currentTimeMillis();
}
String res = log.toString();
assertTrue(res.contains("/* PUBLIC.RANGE_INDEX */"));
}
finally {
GridTestUtils.setFieldValue(spi, "log", oldLog);
}
}
/**
* Index descriptor.
*/
private static class TextIndex implements GridQueryIndexDescriptor {
/** */
private final Collection<String> fields;
/**
* @param fields Fields.
*/
private TextIndex(Collection<String> fields) {
this.fields = Collections.unmodifiableCollection(fields);
}
/** {@inheritDoc} */
@Override public String name() {
return null;
}
/** {@inheritDoc} */
@Override public Collection<String> fields() {
return fields;
}
/** {@inheritDoc} */
@Override public boolean descending(String field) {
return false;
}
/** {@inheritDoc} */
@Override public QueryIndexType type() {
return QueryIndexType.FULLTEXT;
}
/** {@inheritDoc} */
@Override public int inlineSize() {
return 0;
}
}
/**
* Type descriptor.
*/
private static class TypeDesc implements GridQueryTypeDescriptor {
/** */
private final String name;
/** */
private final String cacheName;
/** */
private final Map<String, Class<?>> valFields;
/** */
private final GridQueryIndexDescriptor textIdx;
/**
* @param cacheName Cache name.
* @param name Type name.
* @param valFields Fields.
* @param textIdx Fulltext index.
*/
private TypeDesc(String cacheName, String name, Map<String, Class<?>> valFields, GridQueryIndexDescriptor textIdx) {
this.name = name;
this.cacheName = cacheName;
this.valFields = Collections.unmodifiableMap(valFields);
this.textIdx = textIdx;
}
/** {@inheritDoc} */
@Override public String affinityKey() {
return null;
}
/** {@inheritDoc} */
@Override public String name() {
return name;
}
/** {@inheritDoc} */
@Override public String tableName() {
return null;
}
/**
* @return Cache name.
*/
String cacheName() {
return cacheName;
}
/** {@inheritDoc} */
@Override public Map<String, Class<?>> fields() {
return valFields;
}
/** {@inheritDoc} */
@Override public GridQueryProperty property(final String name) {
return new GridQueryProperty() {
/** */
@Override public Object value(Object key, Object val) throws IgniteCheckedException {
return TypeDesc.this.value(name, key, val);
}
/** */
@Override public void setValue(Object key, Object val, Object propVal) throws IgniteCheckedException {
throw new UnsupportedOperationException();
}
/** */
@Override public String name() {
return name;
}
/** */
@Override public Class<?> type() {
return Object.class;
}
/** */
@Override public boolean key() {
return false;
}
/** */
@Override public GridQueryProperty parent() {
return null;
}
};
}
/** {@inheritDoc} */
@SuppressWarnings("unchecked")
@Override public <T> T value(String field, Object key, Object val) throws IgniteSpiException {
assert !F.isEmpty(field);
assert key instanceof Integer;
Map<String, T> m = (Map<String, T>)val;
if (m.containsKey(field))
return m.get(field);
return null;
}
/** {@inheritDoc} */
@SuppressWarnings("unchecked")
@Override public void setValue(String field, Object key, Object val, Object propVal) throws IgniteCheckedException {
assert !F.isEmpty(field);
assert key instanceof Integer;
Map<String, Object> m = (Map<String, Object>)val;
m.put(field, propVal);
}
/** */
@Override public Map<String, GridQueryIndexDescriptor> indexes() {
return Collections.emptyMap();
}
/** */
@Override public GridQueryIndexDescriptor textIndex() {
return textIdx;
}
/** */
@Override public Class<?> valueClass() {
return Object.class;
}
/** */
@Override public Class<?> keyClass() {
return Integer.class;
}
/** */
@Override public String keyTypeName() {
return null;
}
/** */
@Override public String valueTypeName() {
return null;
}
/** */
@Override public boolean valueTextIndex() {
return textIdx == null;
}
/** */
@Override public int typeId() {
return 0;
}
/** {@inheritDoc} */
@Override public String keyFieldName() {
return null;
}
/** {@inheritDoc} */
@Override public String valueFieldName() {
return null;
}
/** {@inheritDoc} */
@Nullable @Override public String keyFieldAlias() {
return null;
}
/** {@inheritDoc} */
@Nullable @Override public String valueFieldAlias() {
return null;
}
}
/**
*/
private static class TestCacheObject implements KeyCacheObject {
/** */
private Object val;
/** */
private int part;
/**
* @param val Value.
*/
private TestCacheObject(Object val) {
this.val = val;
}
/** {@inheritDoc} */
@Override public void onAckReceived() {
// No-op.
}
/** {@inheritDoc} */
@Nullable @Override public <T> T value(CacheObjectValueContext ctx, boolean cpy) {
return (T)val;
}
/** {@inheritDoc} */
@Override public int partition() {
return part;
}
/** {@inheritDoc} */
@Override public void partition(int part) {
this.part = part;
}
/** {@inheritDoc} */
@Override public byte[] valueBytes(CacheObjectValueContext ctx) throws IgniteCheckedException {
return JdbcUtils.serialize(val, null);
}
/** {@inheritDoc} */
@Override public boolean putValue(ByteBuffer buf) throws IgniteCheckedException {
return false;
}
/** {@inheritDoc} */
@Override public int putValue(long addr) throws IgniteCheckedException {
return 0;
}
/** {@inheritDoc} */
@Override public boolean putValue(final ByteBuffer buf, final int off, final int len)
throws IgniteCheckedException {
return false;
}
/** {@inheritDoc} */
@Override public int valueBytesLength(CacheObjectContext ctx) throws IgniteCheckedException {
return 0;
}
/** {@inheritDoc} */
@Override public byte cacheObjectType() {
throw new UnsupportedOperationException();
}
/** {@inheritDoc} */
@Override public boolean isPlatformType() {
return true;
}
/** {@inheritDoc} */
@Override public KeyCacheObject copy(int part) {
return this;
}
/** {@inheritDoc} */
@Override public CacheObject prepareForCache(CacheObjectContext ctx) {
throw new UnsupportedOperationException();
}
/** {@inheritDoc} */
@Override public void finishUnmarshal(CacheObjectValueContext ctx, ClassLoader ldr) throws IgniteCheckedException {
throw new UnsupportedOperationException();
}
/** {@inheritDoc} */
@Override public void prepareMarshal(CacheObjectValueContext ctx) throws IgniteCheckedException {
throw new UnsupportedOperationException();
}
/** {@inheritDoc} */
@Override public boolean writeTo(ByteBuffer buf, MessageWriter writer) {
throw new UnsupportedOperationException();
}
/** {@inheritDoc} */
@Override public boolean readFrom(ByteBuffer buf, MessageReader reader) {
throw new UnsupportedOperationException();
}
/** {@inheritDoc} */
@Override public short directType() {
throw new UnsupportedOperationException();
}
/** {@inheritDoc} */
@Override public byte fieldsCount() {
throw new UnsupportedOperationException();
}
/** {@inheritDoc} */
@Override public boolean internal() {
return false;
}
}
}
| |
/*
* Copyright (C) 2009 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.example.android.apis.graphics;
import java.io.IOException;
import java.io.InputStream;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.CharBuffer;
import java.nio.FloatBuffer;
import javax.microedition.khronos.egl.EGLConfig;
import javax.microedition.khronos.opengles.GL;
import javax.microedition.khronos.opengles.GL10;
import javax.microedition.khronos.opengles.GL11;
import javax.microedition.khronos.opengles.GL11Ext;
import android.content.Context;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.opengl.GLSurfaceView;
import android.opengl.GLU;
import android.opengl.GLUtils;
import android.os.SystemClock;
import com.example.android.apis.R;
public class MatrixPaletteRenderer implements GLSurfaceView.Renderer{
private Context mContext;
private Grid mGrid;
private int mTextureID;
/** A grid is a topologically rectangular array of vertices.
*
* This grid class is customized for the vertex data required for this
* example.
*
* The vertex and index data are held in VBO objects because on most
* GPUs VBO objects are the fastest way of rendering static vertex
* and index data.
*
*/
private static class Grid {
// Size of vertex data elements in bytes:
final static int FLOAT_SIZE = 4;
final static int CHAR_SIZE = 2;
// Vertex structure:
// float x, y, z;
// float u, v;
// float weight0, weight1;
// byte palette0, palette1, pad0, pad1;
final static int VERTEX_SIZE = 8 * FLOAT_SIZE;
final static int VERTEX_TEXTURE_BUFFER_INDEX_OFFSET = 3;
final static int VERTEX_WEIGHT_BUFFER_INDEX_OFFSET = 5;
final static int VERTEX_PALETTE_INDEX_OFFSET = 7 * FLOAT_SIZE;
private int mVertexBufferObjectId;
private int mElementBufferObjectId;
// These buffers are used to hold the vertex and index data while
// constructing the grid. Once createBufferObjects() is called
// the buffers are nulled out to save memory.
private ByteBuffer mVertexByteBuffer;
private FloatBuffer mVertexBuffer;
private CharBuffer mIndexBuffer;
private int mW;
private int mH;
private int mIndexCount;
public Grid(int w, int h) {
if (w < 0 || w >= 65536) {
throw new IllegalArgumentException("w");
}
if (h < 0 || h >= 65536) {
throw new IllegalArgumentException("h");
}
if (w * h >= 65536) {
throw new IllegalArgumentException("w * h >= 65536");
}
mW = w;
mH = h;
int size = w * h;
mVertexByteBuffer = ByteBuffer.allocateDirect(VERTEX_SIZE * size)
.order(ByteOrder.nativeOrder());
mVertexBuffer = mVertexByteBuffer.asFloatBuffer();
int quadW = mW - 1;
int quadH = mH - 1;
int quadCount = quadW * quadH;
int indexCount = quadCount * 6;
mIndexCount = indexCount;
mIndexBuffer = ByteBuffer.allocateDirect(CHAR_SIZE * indexCount)
.order(ByteOrder.nativeOrder()).asCharBuffer();
/*
* Initialize triangle list mesh.
*
* [0]-----[ 1] ...
* | / |
* | / |
* | / |
* [w]-----[w+1] ...
* | |
*
*/
{
int i = 0;
for (int y = 0; y < quadH; y++) {
for (int x = 0; x < quadW; x++) {
char a = (char) (y * mW + x);
char b = (char) (y * mW + x + 1);
char c = (char) ((y + 1) * mW + x);
char d = (char) ((y + 1) * mW + x + 1);
mIndexBuffer.put(i++, a);
mIndexBuffer.put(i++, c);
mIndexBuffer.put(i++, b);
mIndexBuffer.put(i++, b);
mIndexBuffer.put(i++, c);
mIndexBuffer.put(i++, d);
}
}
}
}
public void set(int i, int j, float x, float y, float z,
float u, float v,
float w0, float w1,
int p0, int p1) {
if (i < 0 || i >= mW) {
throw new IllegalArgumentException("i");
}
if (j < 0 || j >= mH) {
throw new IllegalArgumentException("j");
}
if (w0 + w1 != 1.0f) {
throw new IllegalArgumentException("Weights must add up to 1.0f");
}
int index = mW * j + i;
mVertexBuffer.position(index * VERTEX_SIZE / FLOAT_SIZE);
mVertexBuffer.put(x);
mVertexBuffer.put(y);
mVertexBuffer.put(z);
mVertexBuffer.put(u);
mVertexBuffer.put(v);
mVertexBuffer.put(w0);
mVertexBuffer.put(w1);
mVertexByteBuffer.position(index * VERTEX_SIZE + VERTEX_PALETTE_INDEX_OFFSET);
mVertexByteBuffer.put((byte) p0);
mVertexByteBuffer.put((byte) p1);
}
public void createBufferObjects(GL gl) {
// Generate a the vertex and element buffer IDs
int[] vboIds = new int[2];
GL11 gl11 = (GL11) gl;
gl11.glGenBuffers(2, vboIds, 0);
mVertexBufferObjectId = vboIds[0];
mElementBufferObjectId = vboIds[1];
// Upload the vertex data
gl11.glBindBuffer(GL11.GL_ARRAY_BUFFER, mVertexBufferObjectId);
mVertexByteBuffer.position(0);
gl11.glBufferData(GL11.GL_ARRAY_BUFFER, mVertexByteBuffer.capacity(), mVertexByteBuffer, GL11.GL_STATIC_DRAW);
gl11.glBindBuffer(GL11.GL_ELEMENT_ARRAY_BUFFER, mElementBufferObjectId);
mIndexBuffer.position(0);
gl11.glBufferData(GL11.GL_ELEMENT_ARRAY_BUFFER, mIndexBuffer.capacity() * CHAR_SIZE, mIndexBuffer, GL11.GL_STATIC_DRAW);
// We don't need the in-memory data any more
mVertexBuffer = null;
mVertexByteBuffer = null;
mIndexBuffer = null;
}
public void draw(GL10 gl) {
GL11 gl11 = (GL11) gl;
GL11Ext gl11Ext = (GL11Ext) gl;
gl.glEnableClientState(GL10.GL_VERTEX_ARRAY);
gl11.glBindBuffer(GL11.GL_ARRAY_BUFFER, mVertexBufferObjectId);
gl11.glVertexPointer(3, GL10.GL_FLOAT, VERTEX_SIZE, 0);
gl11.glTexCoordPointer(2, GL10.GL_FLOAT, VERTEX_SIZE, VERTEX_TEXTURE_BUFFER_INDEX_OFFSET * FLOAT_SIZE);
gl.glEnableClientState(GL11Ext.GL_MATRIX_INDEX_ARRAY_OES);
gl.glEnableClientState(GL11Ext.GL_WEIGHT_ARRAY_OES);
gl11Ext.glWeightPointerOES(2, GL10.GL_FLOAT, VERTEX_SIZE, VERTEX_WEIGHT_BUFFER_INDEX_OFFSET * FLOAT_SIZE);
gl11Ext.glMatrixIndexPointerOES(2, GL10.GL_UNSIGNED_BYTE, VERTEX_SIZE, VERTEX_PALETTE_INDEX_OFFSET );
gl11.glBindBuffer(GL11.GL_ELEMENT_ARRAY_BUFFER, mElementBufferObjectId);
gl11.glDrawElements(GL10.GL_TRIANGLES, mIndexCount, GL10.GL_UNSIGNED_SHORT, 0);
gl.glDisableClientState(GL10.GL_VERTEX_ARRAY);
gl.glDisableClientState(GL11Ext.GL_MATRIX_INDEX_ARRAY_OES);
gl.glDisableClientState(GL11Ext.GL_WEIGHT_ARRAY_OES);
gl11.glBindBuffer(GL11.GL_ARRAY_BUFFER, 0);
gl11.glBindBuffer(GL11.GL_ELEMENT_ARRAY_BUFFER, 0);
}
}
public MatrixPaletteRenderer(Context context) {
mContext = context;
}
public void onSurfaceCreated(GL10 gl, EGLConfig config) {
/*
* By default, OpenGL enables features that improve quality
* but reduce performance. One might want to tweak that
* especially on software renderer.
*/
gl.glDisable(GL10.GL_DITHER);
/*
* Some one-time OpenGL initialization can be made here
* probably based on features of this particular context
*/
gl.glHint(GL10.GL_PERSPECTIVE_CORRECTION_HINT,
GL10.GL_FASTEST);
gl.glClearColor(.5f, .5f, .5f, 1);
gl.glShadeModel(GL10.GL_SMOOTH);
gl.glEnable(GL10.GL_DEPTH_TEST);
gl.glEnable(GL10.GL_TEXTURE_2D);
/*
* Create our texture. This has to be done each time the
* surface is created.
*/
int[] textures = new int[1];
gl.glGenTextures(1, textures, 0);
mTextureID = textures[0];
gl.glBindTexture(GL10.GL_TEXTURE_2D, mTextureID);
gl.glTexParameterf(GL10.GL_TEXTURE_2D, GL10.GL_TEXTURE_MIN_FILTER,
GL10.GL_NEAREST);
gl.glTexParameterf(GL10.GL_TEXTURE_2D,
GL10.GL_TEXTURE_MAG_FILTER,
GL10.GL_LINEAR);
gl.glTexParameterf(GL10.GL_TEXTURE_2D, GL10.GL_TEXTURE_WRAP_S,
GL10.GL_CLAMP_TO_EDGE);
gl.glTexParameterf(GL10.GL_TEXTURE_2D, GL10.GL_TEXTURE_WRAP_T,
GL10.GL_CLAMP_TO_EDGE);
gl.glTexEnvf(GL10.GL_TEXTURE_ENV, GL10.GL_TEXTURE_ENV_MODE,
GL10.GL_REPLACE);
InputStream is = mContext.getResources()
.openRawResource(R.raw.robot);
Bitmap bitmap;
try {
bitmap = BitmapFactory.decodeStream(is);
} finally {
try {
is.close();
} catch(IOException e) {
// Ignore.
}
}
GLUtils.texImage2D(GL10.GL_TEXTURE_2D, 0, bitmap, 0);
bitmap.recycle();
mGrid = generateWeightedGrid(gl);
}
public void onDrawFrame(GL10 gl) {
/*
* By default, OpenGL enables features that improve quality
* but reduce performance. One might want to tweak that
* especially on software renderer.
*/
gl.glDisable(GL10.GL_DITHER);
gl.glTexEnvx(GL10.GL_TEXTURE_ENV, GL10.GL_TEXTURE_ENV_MODE,
GL10.GL_MODULATE);
/*
* Usually, the first thing one might want to do is to clear
* the screen. The most efficient way of doing this is to use
* glClear().
*/
gl.glClear(GL10.GL_COLOR_BUFFER_BIT | GL10.GL_DEPTH_BUFFER_BIT);
gl.glEnable(GL10.GL_DEPTH_TEST);
gl.glEnable(GL10.GL_CULL_FACE);
/*
* Now we're ready to draw some 3D objects
*/
gl.glMatrixMode(GL10.GL_MODELVIEW);
gl.glLoadIdentity();
GLU.gluLookAt(gl, 0, 0, -5, 0f, 0f, 0f, 0f, 1.0f, 0.0f);
gl.glEnableClientState(GL10.GL_VERTEX_ARRAY);
gl.glEnableClientState(GL10.GL_TEXTURE_COORD_ARRAY);
gl.glActiveTexture(GL10.GL_TEXTURE0);
gl.glBindTexture(GL10.GL_TEXTURE_2D, mTextureID);
gl.glTexParameterx(GL10.GL_TEXTURE_2D, GL10.GL_TEXTURE_WRAP_S,
GL10.GL_REPEAT);
gl.glTexParameterx(GL10.GL_TEXTURE_2D, GL10.GL_TEXTURE_WRAP_T,
GL10.GL_REPEAT);
long time = SystemClock.uptimeMillis() % 4000L;
// Rock back and forth
double animationUnit = ((double) time) / 4000;
float unitAngle = (float) Math.cos(animationUnit * 2 * Math.PI);
float angle = unitAngle * 135f;
gl.glEnable(GL11Ext.GL_MATRIX_PALETTE_OES);
gl.glMatrixMode(GL11Ext.GL_MATRIX_PALETTE_OES);
GL11Ext gl11Ext = (GL11Ext) gl;
// matrix 0: no transformation
gl11Ext.glCurrentPaletteMatrixOES(0);
gl11Ext.glLoadPaletteFromModelViewMatrixOES();
// matrix 1: rotate by "angle"
gl.glRotatef(angle, 0, 0, 1.0f);
gl11Ext.glCurrentPaletteMatrixOES(1);
gl11Ext.glLoadPaletteFromModelViewMatrixOES();
mGrid.draw(gl);
gl.glDisable(GL11Ext.GL_MATRIX_PALETTE_OES);
}
public void onSurfaceChanged(GL10 gl, int w, int h) {
gl.glViewport(0, 0, w, h);
/*
* Set our projection matrix. This doesn't have to be done
* each time we draw, but usually a new projection needs to
* be set when the viewport is resized.
*/
float ratio = (float) w / h;
gl.glMatrixMode(GL10.GL_PROJECTION);
gl.glLoadIdentity();
gl.glFrustumf(-ratio, ratio, -1, 1, 3, 7);
}
private Grid generateWeightedGrid(GL gl) {
final int uSteps = 20;
final int vSteps = 20;
float radius = 0.25f;
float height = 2.0f;
Grid grid = new Grid(uSteps + 1, vSteps + 1);
for (int j = 0; j <= vSteps; j++) {
for (int i = 0; i <= uSteps; i++) {
double angle = Math.PI * 2 * i / uSteps;
float x = radius * (float) Math.cos(angle);
float y = height * ((float) j / vSteps - 0.5f);
float z = radius * (float) Math.sin(angle);
float u = -4.0f * (float) i / uSteps;
float v = -4.0f * (float) j / vSteps;
float w0 = (float) j / vSteps;
float w1 = 1.0f - w0;
grid.set(i, j, x, y, z, u, v, w0, w1, 0, 1);
}
}
grid.createBufferObjects(gl);
return grid;
}
}
| |
/* Generated By:JavaCC: Do not edit this line. SimpleCharStream.java Version 3.0 */
package avrora.test.probes;
import java.io.*;
/**
* An implementation of interface CharStream, where the stream is assumed to
* contain only ASCII characters (without unicode processing).
*/
public class SimpleCharStream
{
public static final boolean staticFlag = false;
int bufsize;
int available;
int tokenBegin;
public int bufpos = -1;
protected int[] bufline;
protected int[] bufcolumn;
protected int column = 0;
protected int line = 1;
protected boolean prevCharIsCR = false;
protected boolean prevCharIsLF = false;
protected Reader inputStream;
protected char[] buffer;
protected int maxNextCharInd = 0;
protected int inBuf = 0;
protected void ExpandBuff(boolean wrapAround)
{
char[] newbuffer = new char[bufsize + 2048];
int[] newbufline = new int[bufsize + 2048];
int[] newbufcolumn = new int[bufsize + 2048];
try
{
if (wrapAround)
{
System.arraycopy(buffer, tokenBegin, newbuffer, 0, bufsize - tokenBegin);
System.arraycopy(buffer, 0, newbuffer,
bufsize - tokenBegin, bufpos);
buffer = newbuffer;
System.arraycopy(bufline, tokenBegin, newbufline, 0, bufsize - tokenBegin);
System.arraycopy(bufline, 0, newbufline, bufsize - tokenBegin, bufpos);
bufline = newbufline;
System.arraycopy(bufcolumn, tokenBegin, newbufcolumn, 0, bufsize - tokenBegin);
System.arraycopy(bufcolumn, 0, newbufcolumn, bufsize - tokenBegin, bufpos);
bufcolumn = newbufcolumn;
maxNextCharInd = (bufpos += (bufsize - tokenBegin));
}
else
{
System.arraycopy(buffer, tokenBegin, newbuffer, 0, bufsize - tokenBegin);
buffer = newbuffer;
System.arraycopy(bufline, tokenBegin, newbufline, 0, bufsize - tokenBegin);
bufline = newbufline;
System.arraycopy(bufcolumn, tokenBegin, newbufcolumn, 0, bufsize - tokenBegin);
bufcolumn = newbufcolumn;
maxNextCharInd = (bufpos -= tokenBegin);
}
}
catch (Throwable t)
{
throw new Error(t.getMessage());
}
bufsize += 2048;
available = bufsize;
tokenBegin = 0;
}
protected void FillBuff() throws IOException
{
if (maxNextCharInd == available)
{
if (available == bufsize)
{
if (tokenBegin > 2048)
{
bufpos = maxNextCharInd = 0;
available = tokenBegin;
}
else if (tokenBegin < 0)
bufpos = maxNextCharInd = 0;
else
ExpandBuff(false);
}
else if (available > tokenBegin)
available = bufsize;
else if ((tokenBegin - available) < 2048)
ExpandBuff(true);
else
available = tokenBegin;
}
int i;
try {
if ((i = inputStream.read(buffer, maxNextCharInd,
available - maxNextCharInd)) == -1)
{
inputStream.close();
throw new IOException();
}
else
maxNextCharInd += i;
}
catch(IOException e) {
--bufpos;
backup(0);
if (tokenBegin == -1)
tokenBegin = bufpos;
throw e;
}
}
public char BeginToken() throws IOException
{
tokenBegin = -1;
char c = readChar();
tokenBegin = bufpos;
return c;
}
protected void UpdateLineColumn(char c)
{
column++;
if (prevCharIsLF)
{
prevCharIsLF = false;
line += (column = 1);
}
else if (prevCharIsCR)
{
prevCharIsCR = false;
if (c == '\n')
{
prevCharIsLF = true;
}
else
line += (column = 1);
}
switch (c)
{
case '\r' :
prevCharIsCR = true;
break;
case '\n' :
prevCharIsLF = true;
break;
case '\t' :
column--;
column += (8 - (column & 07));
break;
default :
break;
}
bufline[bufpos] = line;
bufcolumn[bufpos] = column;
}
public char readChar() throws IOException
{
if (inBuf > 0)
{
--inBuf;
if (++bufpos == bufsize)
bufpos = 0;
return buffer[bufpos];
}
if (++bufpos >= maxNextCharInd)
FillBuff();
char c = buffer[bufpos];
UpdateLineColumn(c);
return (c);
}
/**
* @deprecated
* @see #getEndColumn
*/
public int getColumn() {
return bufcolumn[bufpos];
}
/**
* @deprecated
* @see #getEndLine
*/
public int getLine() {
return bufline[bufpos];
}
public int getEndColumn() {
return bufcolumn[bufpos];
}
public int getEndLine() {
return bufline[bufpos];
}
public int getBeginColumn() {
return bufcolumn[tokenBegin];
}
public int getBeginLine() {
return bufline[tokenBegin];
}
public void backup(int amount) {
inBuf += amount;
if ((bufpos -= amount) < 0)
bufpos += bufsize;
}
public SimpleCharStream(Reader dstream, int startline,
int startcolumn, int buffersize)
{
inputStream = dstream;
line = startline;
column = startcolumn - 1;
available = bufsize = buffersize;
buffer = new char[buffersize];
bufline = new int[buffersize];
bufcolumn = new int[buffersize];
}
public SimpleCharStream(Reader dstream, int startline,
int startcolumn)
{
this(dstream, startline, startcolumn, 4096);
}
public SimpleCharStream(Reader dstream)
{
this(dstream, 1, 1, 4096);
}
public void ReInit(Reader dstream, int startline,
int startcolumn, int buffersize)
{
inputStream = dstream;
line = startline;
column = startcolumn - 1;
if (buffer == null || buffersize != buffer.length)
{
available = bufsize = buffersize;
buffer = new char[buffersize];
bufline = new int[buffersize];
bufcolumn = new int[buffersize];
}
prevCharIsLF = prevCharIsCR = false;
tokenBegin = inBuf = maxNextCharInd = 0;
bufpos = -1;
}
public void ReInit(Reader dstream, int startline,
int startcolumn)
{
ReInit(dstream, startline, startcolumn, 4096);
}
public void ReInit(Reader dstream)
{
ReInit(dstream, 1, 1, 4096);
}
public SimpleCharStream(InputStream dstream, int startline,
int startcolumn, int buffersize)
{
this(new InputStreamReader(dstream), startline, startcolumn, 4096);
}
public SimpleCharStream(InputStream dstream, int startline,
int startcolumn)
{
this(dstream, startline, startcolumn, 4096);
}
public SimpleCharStream(InputStream dstream)
{
this(dstream, 1, 1, 4096);
}
public void ReInit(InputStream dstream, int startline,
int startcolumn, int buffersize)
{
ReInit(new InputStreamReader(dstream), startline, startcolumn, 4096);
}
public void ReInit(InputStream dstream)
{
ReInit(dstream, 1, 1, 4096);
}
public void ReInit(InputStream dstream, int startline,
int startcolumn)
{
ReInit(dstream, startline, startcolumn, 4096);
}
public String GetImage()
{
if (bufpos >= tokenBegin)
return new String(buffer, tokenBegin, bufpos - tokenBegin + 1);
else
return new String(buffer, tokenBegin, bufsize - tokenBegin) +
new String(buffer, 0, bufpos + 1);
}
public char[] GetSuffix(int len)
{
char[] ret = new char[len];
if ((bufpos + 1) >= len)
System.arraycopy(buffer, bufpos - len + 1, ret, 0, len);
else
{
System.arraycopy(buffer, bufsize - (len - bufpos - 1), ret, 0,
len - bufpos - 1);
System.arraycopy(buffer, 0, ret, len - bufpos - 1, bufpos + 1);
}
return ret;
}
public void Done()
{
buffer = null;
bufline = null;
bufcolumn = null;
}
/**
* Method to adjust line and column numbers for the start of a token.<BR>
*/
public void adjustBeginLineColumn(int newLine, int newCol)
{
int start = tokenBegin;
int len;
if (bufpos >= tokenBegin)
{
len = bufpos - tokenBegin + inBuf + 1;
}
else
{
len = bufsize - tokenBegin + bufpos + 1 + inBuf;
}
int i = 0, j = 0, k;
int nextColDiff, columnDiff = 0;
while (i < len &&
bufline[j = start % bufsize] == bufline[k = ++start % bufsize])
{
bufline[j] = newLine;
nextColDiff = columnDiff + bufcolumn[k] - bufcolumn[j];
bufcolumn[j] = newCol + columnDiff;
columnDiff = nextColDiff;
i++;
}
if (i < len)
{
bufline[j] = newLine++;
bufcolumn[j] = newCol + columnDiff;
while (i++ < len)
{
if (bufline[j = start % bufsize] != bufline[++start % bufsize])
bufline[j] = newLine++;
else
bufline[j] = newLine;
}
}
line = bufline[j];
column = bufcolumn[j];
}
}
| |
package org.jdbx.demo;
import java.sql.*;
import java.util.ArrayList;
import java.util.List;
import javax.sql.DataSource;
import org.jdbx.*;
/**
* DocSnippets contains all code snippets from UserGuide.md
* to make sure that they compile.
*/
@SuppressWarnings({"unused","resource"})
public class DocSnippets
{
public List<City> readmeEx1Jdbc() throws SQLException
{
List<City> list = new ArrayList<>();
try (Statement stmt = con.createStatement()) {
ResultSet result = stmt.executeQuery("SELECT * FROM Cities ORDER BY name");
while (result.next())
list.add(City.read(result));
}
return list;
}
public List<City> readmeEx2Jdbc() throws SQLException
{
return Jdbx.query(con, "SELECT * FROM Cities ORDER BY name").rows().read(City::read);
}
public Integer readmeEx2Jdbc(Connection con, String firstName, String lastName)
{
try (PreparedStatement pstmt = con.prepareStatement("INSERT INTO Users VALUES (DEFAULT, ?, ?)",
new String[] { "id" })) {
pstmt.setString(1, firstName);
pstmt.setString(2, lastName);
if (pstmt.executeUpdate() != 1)
throw new IllegalStateException("insert failed");
Integer id = null;
ResultSet result = pstmt.getGeneratedKeys();
if (result.next())
id = result.getObject(1, Integer.class);
if (id == null)
throw new IllegalStateException("id not returned");
return id;
}
catch (SQLException e) {
throw new IllegalStateException("sql error", e);
}
}
public Integer readmeEx2Jdbx(Connection con, String firstName, String lastName)
{
try (PrepStmt pstmt = new PrepStmt(con)) {
return pstmt.init().returnCols("id").sql("INSERT INTO Users VALUES (DEFAULT, ?, ?)")
.params(firstName, lastName)
.createUpdate().runGetCol(Integer.class).requireCount(1).requireValue();
}
}
public void stmtsCreateClose()
{
// createing
new StaticStmt(con);
new PrepStmt(con);
new CallStmt(con);
new StaticStmt(ds);
new PrepStmt(ds);
new CallStmt(ds);
// close
try (StaticStmt stmt = new StaticStmt(con)) {
}
// configure
stmt.options()
.setQueryTimeoutSeconds(20)
.setFetchRows(5000)
.setResultType(ResultType.SCROLL_SENSITIVE)
.setResultConcurrency(Concurrency.READ_ONLY);
int seconds = stmt.options().getQueryTimeoutSeconds();
}
public void stmtsInit()
{
pstmt.init("INSERT INTO Users VALUES (DEFAULT, ?, ?)");
cstmt.init("{call getUserName(?, ?)}");
pstmt.init().returnCols("id").sql("INSERT INTO Users VALUES (DEFAULT, ?, ?)");
pstmt.init().namedParams().sql("UPDATE Users SET name = :name");
}
public void stmtsOptions()
{
stmt.options().setQueryTimeoutSeconds(20).setFetchRows(5000);
int timeoutSecs = stmt.options().getQueryTimeoutSeconds();
}
public void stmtsParams()
{
pstmt.param(1).setString("John");
pstmt.param(2).setString("Doe");
pstmt.param(2).setInt(42);
pstmt.param(1, "John").param(2, "Doe");
pstmt.params("John", "Doe");
pstmt.init().namedParams().sql("INSERT INTO Users VALUES (DEFAULT, :lastname, :firstname, :lastname + ', ' + :firstname)");
pstmt.param("lastname").setString("John");
pstmt.param("firstname").setString("Doe");
cstmt.init("{call GetUserName(?,?,?)}");
cstmt.param(1).setLong(831L);
cstmt.param(2).out(java.sql.Types.VARCHAR);
cstmt.param(3).out(java.sql.Types.VARCHAR);
cstmt.execute(); // explained in next chapters
String lastName = cstmt.param(2).getString();
String firstName = cstmt.param(3).getString();
pstmt.clearParams();
cstmt.clearParams();
}
public List<City> queryRunningEx1() throws Exception
{
String sql = "SELECT * FROM Cities ORDER BY name";
if (jdbc)
{
// JDBC:
Statement stmt = con.createStatement();
ResultSet result = stmt.executeQuery(sql);
List<City> cities = new ArrayList<>();
while (result.next()) {
City city = City.read(result);
cities.add(city);
}
return cities;
}
else {
// JDBX
try (StaticStmt stmt = new StaticStmt(con)) {
return stmt.query(sql).rows().read(City::read);
}
}
}
public String queryRunningEx2() throws Exception
{
String sql = "SELECT name FROM Cities WHERE code = ?";
if (jdbc)
{
// JDBC:
try (PreparedStatement pstmt = con.prepareStatement(sql)) {
pstmt.setString(1, "MUC");
ResultSet result = pstmt.executeQuery();
String name = null;
if (result.next())
name = result.getString(1);
return name;
}
}
else
{
// JDBX
try (PrepStmt pstmt = new PrepStmt(con)) {
return pstmt.init(sql).params("MUC").query().row().col().getString();
}
}
}
public void queryResultClass() throws Exception
{
qr = stmt.query(sql);
qr = pstmt.init(sql).params("a", "b").query();
}
public void querySingleRow() throws Exception
{
qr.row().col(); // returns a builder to retrieve a value of the first column
qr.row().col().getString(); // returns the value of the first column as String
qr.row().col(3); // returns a builder to retrieve a value of the third column
qr.row().col(3).getInteger(); // returns the value of the third column as Integer
qr.row().col("sort"); // returns a builder to retrieve a value of the "sort" column
qr.row().col("sort").getInt(); // returns the value of "sort" column as int
qr.row().cols(); // returns the value of all columns, as Object[]
qr.row().cols(1,3,7); // returns the value of columns 1,3,7, as Object[]
qr.row().map(); // returns a Map<String,Object> mapping column name to value
qr.row().read(City::read); // returns the value returned by the reader function
qr.row().required().col().getString();
qr.row().unique().col().getString();
}
public void queryAllRows() throws Exception
{
qr.rows();
qr.rows().col(); // return values of first column
qr.rows().col().getString(); // return values of first column as List<String>
qr.rows().col(3); // return values of column by column number
qr.rows().col(3).getDouble(); // return values of third column, as List<Double>
qr.rows().col("sort"); // return values of column by name
qr.rows().col("sort").getInteger(); // return values of "sort" column, as List<Integer>
qr.rows().cols(); // return values of all columns, as List<Object[]>
qr.rows().cols(1,3,7); // return values of columns 1,3,7, as List<Object[]>
qr.rows().map(); // return a List<Map<String,Object>>
qr.rows().max(5);
qr.skip(3).rows();
}
public void queryCursor() throws Exception
{
qc.col(); // first column
qc.col().getString(); // first column as String
qc.col(3); // column by number
qc.col(3).getDouble(); // third column as double
qc.col("sort"); // column by name
qc.col("sort").getInteger(); // "sort" column, as Integer
qc.cols(1,3,7); // columns 1,3,7, as Object[]
qc.map(); // returns a Map<String,Object>
qr.row().read(City::read);
qr.rows().read(City::read);
}
public static class City1
{
public static City1 read(QueryCursor qc) {
City1 city = new City1();
city.setCode(qc.col(1).getString());
city.setName(qc.col(2).getString());
return city;
}
public void setCode(String value)
{
}
public void setName(String value)
{
}
}
public void queryCursorNav() throws Exception
{
while (qc.next()) {
// read the result row
}
stmt.options().setResultType(ResultType.SCROLL_SENSITIVE).setResultConcurrency(Concurrency.CONCUR_UPDATABLE);
// qr is obtained from stmt
qc = stmt.query("sql").cursor();
qc.position().isBeforeFirst();
// also: .isAfterLast(), .isLast()
qc.move().first() ;
qc.move().absolute(5);
qc.move().relative(2);
// also: .relative(), .afterLast(), .beforeFirst(), .first(), .etc.
qc.row().update();
qc.row().refresh();
// also: .insert(), .isUpdated(), .delete(), .isDeleted(), etc.
}
public void queryCursorObtain() throws Exception
{
stmt.options().setResultConcurrency(Concurrency.CONCUR_UPDATABLE);
qc.col("status").setString("ok");
qc.row().update();
qc.row().refresh();
}
public void update() throws Exception
{
stmt.update(sql);
pstmt.update();
}
public void updateRun() throws Exception
{
long count;
count = stmt.update(null).count();
count = pstmt.update().count();
stmt.update(null).requireCount(1);
}
public void updateClass() throws Exception
{
u = stmt.createUpdate(sql);
u = pstmt.init(sql).createUpdate();
}
public void updateReturnCols() throws Exception
{
UpdateResult<Integer> result = stmt.createUpdate("INSERT INTO Users VALUES (DEFAULT, 'John', 'Doe'")
.returnAutoKeyCols() // step 1: tell the Update to return auto-generated key columns
.runGetCol(Integer.class); // step 2: run the update, extract the new inserted primary key value as Integer
long inserted = result.count();
Integer newId = result.value();
newId = stmt.createUpdate("INSERT INTO ...")
.returnAutoKeyCols()
.runGetCol(Integer.class)
.requireCount(1) // could throw an Exception
.requireValue(); // could throw an Exception
stmt.createUpdate(sql).returnCols(1, 5, 7);
stmt.createUpdate(sql).returnCols("id", "timestamp");
newId = pstmt.init().returnAutoKeyCols().sql("INSERT INTO Users VALUES (DEFAULT, ?, ?)")
.params("John", "Doe")
.createUpdate()
.runGetCol(Integer.class)
.requireValue();
}
public void updateAutoGen() throws Exception
{
long updated = stmt.createUpdate("UPDATE MegaTable SET timestamp = NOW()")
.enableLargeCount() // configures the Update
.run() // runs the Update and returns the UpdateResult
.count(); // returns update count
}
public void execute() throws Exception
{
ExecuteResult result = stmt.execute(sql);
while (result.next()) {
if (result.isQueryResult())
result.getQueryResult();
else
result.getUpdateResult();
}
}
public void batches() throws Exception
{
stmt.batch()
.add("INSERT INTO BatchDemo (name) VALUES ('A')")
.add("INSERT INTO BatchDemo (name) VALUES ('B'), ('C'), ('D')")
.run() // returns a BatchResult
.requireSize(2)
.requireCount(0, 1)
.requireCount(1, 3);
pstmt.init("INSERT INTO BatchDemo (name) VALUES (?)");
pstmt.params("A").batch().add();
pstmt.params("B").batch().add()
.run() // returns a BatchResult
.requireSize(2)
.requireCount(0, 1)
.requireCount(1, 1);
}
public void runningSingleCommands() throws Exception
{
int cityCount = Jdbx.query(con, "SELECT COUNT(*) FROM Cities").row().col().getInt();
Jdbx.update(con, "INSERT INTO Status (flag) VALUES (?)", "F").requireCount(1);
}
public void multiStmts() throws Exception
{
try (MultiStmt mstmt = new MultiStmt(con))
{
StaticStmt s1 = mstmt.newStaticStmt();
PrepStmt s2 = mstmt.newPrepStmt();
}
}
private String sql;
private StaticStmt stmt;
private PrepStmt pstmt;
private CallStmt cstmt;
private Connection con;
private DataSource ds;
private QueryResult qr;
private QueryCursor qc;
private Update u;
private boolean jdbc;
}
| |
package org.meluskyc.codebriefcase.activity;
import android.app.LoaderManager;
import android.app.SearchManager;
import android.content.Context;
import android.content.CursorLoader;
import android.content.Intent;
import android.content.Loader;
import android.database.Cursor;
import android.net.Uri;
import android.os.Bundle;
import android.preference.PreferenceManager;
import android.support.design.widget.FloatingActionButton;
import android.support.design.widget.NavigationView;
import android.support.v4.view.GravityCompat;
import android.support.v4.widget.DrawerLayout;
import android.support.v7.widget.LinearLayoutManager;
import android.support.v7.widget.RecyclerView;
import android.support.v7.widget.SearchView;
import android.support.v7.widget.Toolbar;
import android.text.TextUtils;
import android.view.Menu;
import android.view.MenuItem;
import android.view.SubMenu;
import android.view.View;
import org.meluskyc.codebriefcase.R;
import org.meluskyc.codebriefcase.adapter.ItemsCursorAdapter;
import org.meluskyc.codebriefcase.database.CodeBriefcaseContract.Item;
import org.meluskyc.codebriefcase.database.CodeBriefcaseContract.ItemSearch;
import org.meluskyc.codebriefcase.database.CodeBriefcaseContract.Qualified;
import org.meluskyc.codebriefcase.database.CodeBriefcaseContract.Tag;
import org.meluskyc.codebriefcase.utils.AppUtils;
import java.util.HashMap;
public class MainActivity extends BaseActivity implements LoaderManager.LoaderCallbacks<Cursor>,
SearchView.OnQueryTextListener {
/**
* Open {@link AddEditActivity} with an item's ID
*/
public static final String EXTRA_ITEM_ID = "itemId";
/**
* {@code SearchView} search text
*/
public static final String EXTRA_SEARCH_QUERY = "searchQuery";
private String searchQuery;
/**
* Loader IDs
*/
private final int ITEMS_LOADER = 1;
private final int TAGS_LOADER = 2;
/**
* No filter on {@code ListView}
*/
private final int FILTER_NONE = -1;
/**
* {@code RecyclerView} filter starred
*/
private final int FILTER_STARRED = -2;
/**
* {@code RecyclerView} filter. Initialize to none.
*/
private long filter = FILTER_NONE;
/**
* Adapter for the {@code RecyclerView}
*/
private ItemsCursorAdapter itemsAdapter;
/**
* store the tag ID for each tag filter
*/
private HashMap<String, Long> filterIdsMap;
private Toolbar toolbar;
private DrawerLayout drawerLayout;
private NavigationView navigationView;
@Override
protected void onCreate(Bundle savedInstanceState) {
PreferenceManager.setDefaultValues(this, R.xml.preferences, false);
super.onCreate(savedInstanceState);
filterIdsMap = new HashMap<>();
searchQuery = "";
setContentView(R.layout.activity_main);
setupFab();
setupToolbar();
setupDrawer();
setupListView();
LoaderManager loaderManager = getLoaderManager();
loaderManager.initLoader(ITEMS_LOADER, null, this);
loaderManager.initLoader(TAGS_LOADER, null, this);
if (savedInstanceState != null) {
searchQuery = savedInstanceState.getString(EXTRA_SEARCH_QUERY);
}
}
/**
* Sets up the floating action button.
*/
private void setupFab() {
FloatingActionButton fab = (FloatingActionButton) findViewById(R.id.main_fab);
fab.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
startActivity(new Intent(MainActivity.this, AddEditActivity.class));
}
});
}
/**
* Sets up the toolbar.
*/
private void setupToolbar() {
toolbar = (Toolbar) findViewById(R.id.main_toolbar);
setSupportActionBar(toolbar);
getSupportActionBar().setDisplayHomeAsUpEnabled(true);
}
/**
* Sets up the list view.
*/
private void setupListView() {
RecyclerView recyclerview = (RecyclerView) findViewById(R.id.main_list_items);
recyclerview.setLayoutManager(new LinearLayoutManager(this));
itemsAdapter = new ItemsCursorAdapter(this);
itemsAdapter.setHasStableIds(true);
//recyclerview.setHasFixedSize(true);
recyclerview.setAdapter(itemsAdapter);
}
/**
* Sets up the navigation drawer. The navigation drawer contains static links
* to activities and a dynamically generated list of tag filters.
*/
private void setupDrawer() {
toolbar.setNavigationIcon(R.drawable.ic_drawer);
toolbar.setNavigationOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
drawerLayout.openDrawer(GravityCompat.START);
}
});
navigationView = (NavigationView) findViewById(R.id.main_nav);
drawerLayout = (DrawerLayout) findViewById(R.id.main_drawer_layout);
navigationView.setNavigationItemSelectedListener(
new NavigationView.OnNavigationItemSelectedListener() {
@Override
public boolean onNavigationItemSelected(MenuItem menuItem) {
menuItem.setChecked(true);
switch (menuItem.getItemId()) {
case R.id.nav_web:
menuItem.setChecked(false);
startActivity(new Intent(MainActivity.this, WebActivity.class));
break;
case R.id.nav_settings:
menuItem.setChecked(false);
startActivity(new Intent(MainActivity.this,
SettingsActivity.class));
break;
case R.id.nav_about:
menuItem.setChecked(false);
startActivity(new Intent(MainActivity.this,
AboutActivity.class));
break;
default:
if (filter == filterIdsMap.get(menuItem.toString())) {
filter = FILTER_NONE;
menuItem.setChecked(false);
} else {
filter = filterIdsMap.get(menuItem.toString());
}
getLoaderManager().restartLoader(ITEMS_LOADER, null,
MainActivity.this);
break;
}
drawerLayout.closeDrawers();
return true;
}
});
}
@Override
public Loader<Cursor> onCreateLoader(int id, Bundle args) {
switch (id) {
case ITEMS_LOADER:
if (TextUtils.isEmpty(searchQuery)) {
final Uri uri;
if (filter == FILTER_STARRED) {
uri = Item.buildStarredUri();
} else {
uri = Item.buildTagDirUri(filter);
}
return new CursorLoader(this, uri, new String[]{Qualified.ITEM_ID,
Item.ITEM_TAG_PRIMARY, Item.ITEM_DESCRIPTION,
Item.ITEM_DATE_UPDATED, Item.ITEM_TAG_SECONDARY,
Tag.TAG_COLOR, Item.ITEM_STARRED}, null, null,
Item.ITEM_DATE_UPDATED + " DESC");
} else {
return new CursorLoader(this, Item.buildSearchUri(searchQuery),
new String[]{ItemSearch.ITEM_SEARCH_DOCID + " AS _id",
Item.ITEM_TAG_PRIMARY, Item.ITEM_DESCRIPTION,
Item.ITEM_DATE_UPDATED, Item.ITEM_TAG_SECONDARY,
Tag.TAG_COLOR, Item.ITEM_STARRED}, null, null,
Item.ITEM_DATE_UPDATED + " DESC");
}
case TAGS_LOADER:
return new CursorLoader(this, Item.buildTagDirUri(),
new String[]{AppUtils.formatQueryDistinctParameter(Qualified.TAG_ID),
Item.ITEM_TAG_PRIMARY}, null, null,
Item.ITEM_TAG_PRIMARY + " COLLATE NOCASE ASC");
default:
return null;
}
}
@Override
public void onLoadFinished(Loader<Cursor> loader, Cursor cursor) {
switch (loader.getId()) {
case ITEMS_LOADER:
itemsAdapter.setCursor(cursor);
break;
case TAGS_LOADER:
filterIdsMap.clear();
Menu menu = navigationView.getMenu();
menu.removeGroup(R.id.nav_filters);
SubMenu submenu = menu.addSubMenu(R.id.nav_filters, Menu.NONE, Menu.NONE,
getString(R.string.Filter));
submenu
.add(getString(R.string.starred))
.setIcon(R.drawable.ic_drawer_star)
.setCheckable(true);
filterIdsMap.put(getString(R.string.starred), (long) FILTER_STARRED);
while (cursor.moveToNext()) {
MenuItem newItem = submenu
.add(cursor.getString(cursor.getColumnIndex(Item.ITEM_TAG_PRIMARY)))
.setCheckable(true);
filterIdsMap.put(newItem.toString(),
cursor.getLong(cursor.getColumnIndex(Tag.TAG_ID)));
}
submenu.setGroupCheckable(R.id.nav_filters, true, true);
break;
default:
break;
}
}
@Override
public void onLoaderReset(Loader<Cursor> loader) {
switch (loader.getId()) {
case ITEMS_LOADER:
itemsAdapter.setCursor(null);
break;
default:
break;
}
}
@Override
public void onResume() {
super.onResume();
LoaderManager loaderManager = getLoaderManager();
loaderManager.restartLoader(ITEMS_LOADER, null, this);
loaderManager.restartLoader(TAGS_LOADER, null, this);
}
@Override
protected void onSaveInstanceState(Bundle bundle) {
super.onSaveInstanceState(bundle);
bundle.putString(EXTRA_SEARCH_QUERY, searchQuery);
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
getMenuInflater().inflate(R.menu.menu_main, menu);
SearchManager searchManager =
(SearchManager) getSystemService(Context.SEARCH_SERVICE);
SearchView searchView =
(SearchView) menu.findItem(R.id.main_menu_search).getActionView();
searchView.setSearchableInfo(
searchManager.getSearchableInfo(getComponentName()));
searchView.setOnQueryTextListener(this);
return true;
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
return super.onOptionsItemSelected(item);
}
@Override
public boolean onQueryTextSubmit(String query) {
return false;
}
@Override
public boolean onQueryTextChange(String newText) {
searchQuery = TextUtils.isEmpty(newText) ? "" : newText;
getLoaderManager().restartLoader(ITEMS_LOADER, null, this);
itemsAdapter.notifyDataSetChanged();
return true;
}
}
| |
/*
* Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.kinesisfirehose.model;
import java.io.Serializable;
import javax.annotation.Generated;
import com.amazonaws.AmazonWebServiceRequest;
/**
*
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/firehose-2015-08-04/CreateDeliveryStream" target="_top">AWS API
* Documentation</a>
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class CreateDeliveryStreamRequest extends com.amazonaws.AmazonWebServiceRequest implements Serializable, Cloneable {
/**
* <p>
* The name of the delivery stream. This name must be unique per AWS account in the same AWS Region. If the delivery
* streams are in different accounts or different Regions, you can have multiple delivery streams with the same
* name.
* </p>
*/
private String deliveryStreamName;
/**
* <p>
* The delivery stream type. This parameter can be one of the following values:
* </p>
* <ul>
* <li>
* <p>
* <code>DirectPut</code>: Provider applications access the delivery stream directly.
* </p>
* </li>
* <li>
* <p>
* <code>KinesisStreamAsSource</code>: The delivery stream uses a Kinesis data stream as a source.
* </p>
* </li>
* </ul>
*/
private String deliveryStreamType;
/**
* <p>
* When a Kinesis data stream is used as the source for the delivery stream, a
* <a>KinesisStreamSourceConfiguration</a> containing the Kinesis data stream Amazon Resource Name (ARN) and the
* role ARN for the source stream.
* </p>
*/
private KinesisStreamSourceConfiguration kinesisStreamSourceConfiguration;
/**
* <p>
* Used to specify the type and Amazon Resource Name (ARN) of the KMS key needed for Server-Side Encryption (SSE).
* </p>
*/
private DeliveryStreamEncryptionConfigurationInput deliveryStreamEncryptionConfigurationInput;
/**
* <p>
* [Deprecated] The destination in Amazon S3. You can specify only one destination.
* </p>
*/
@Deprecated
private S3DestinationConfiguration s3DestinationConfiguration;
/**
* <p>
* The destination in Amazon S3. You can specify only one destination.
* </p>
*/
private ExtendedS3DestinationConfiguration extendedS3DestinationConfiguration;
/**
* <p>
* The destination in Amazon Redshift. You can specify only one destination.
* </p>
*/
private RedshiftDestinationConfiguration redshiftDestinationConfiguration;
/**
* <p>
* The destination in Amazon ES. You can specify only one destination.
* </p>
*/
private ElasticsearchDestinationConfiguration elasticsearchDestinationConfiguration;
private AmazonopensearchserviceDestinationConfiguration amazonopensearchserviceDestinationConfiguration;
/**
* <p>
* The destination in Splunk. You can specify only one destination.
* </p>
*/
private SplunkDestinationConfiguration splunkDestinationConfiguration;
/**
* <p>
* Enables configuring Kinesis Firehose to deliver data to any HTTP endpoint destination. You can specify only one
* destination.
* </p>
*/
private HttpEndpointDestinationConfiguration httpEndpointDestinationConfiguration;
/**
* <p>
* A set of tags to assign to the delivery stream. A tag is a key-value pair that you can define and assign to AWS
* resources. Tags are metadata. For example, you can add friendly names and descriptions or other types of
* information that can help you distinguish the delivery stream. For more information about tags, see <a
* href="https://docs.aws.amazon.com/awsaccountbilling/latest/aboutv2/cost-alloc-tags.html">Using Cost Allocation
* Tags</a> in the AWS Billing and Cost Management User Guide.
* </p>
* <p>
* You can specify up to 50 tags when creating a delivery stream.
* </p>
*/
private java.util.List<Tag> tags;
/**
* <p>
* The name of the delivery stream. This name must be unique per AWS account in the same AWS Region. If the delivery
* streams are in different accounts or different Regions, you can have multiple delivery streams with the same
* name.
* </p>
*
* @param deliveryStreamName
* The name of the delivery stream. This name must be unique per AWS account in the same AWS Region. If the
* delivery streams are in different accounts or different Regions, you can have multiple delivery streams
* with the same name.
*/
public void setDeliveryStreamName(String deliveryStreamName) {
this.deliveryStreamName = deliveryStreamName;
}
/**
* <p>
* The name of the delivery stream. This name must be unique per AWS account in the same AWS Region. If the delivery
* streams are in different accounts or different Regions, you can have multiple delivery streams with the same
* name.
* </p>
*
* @return The name of the delivery stream. This name must be unique per AWS account in the same AWS Region. If the
* delivery streams are in different accounts or different Regions, you can have multiple delivery streams
* with the same name.
*/
public String getDeliveryStreamName() {
return this.deliveryStreamName;
}
/**
* <p>
* The name of the delivery stream. This name must be unique per AWS account in the same AWS Region. If the delivery
* streams are in different accounts or different Regions, you can have multiple delivery streams with the same
* name.
* </p>
*
* @param deliveryStreamName
* The name of the delivery stream. This name must be unique per AWS account in the same AWS Region. If the
* delivery streams are in different accounts or different Regions, you can have multiple delivery streams
* with the same name.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public CreateDeliveryStreamRequest withDeliveryStreamName(String deliveryStreamName) {
setDeliveryStreamName(deliveryStreamName);
return this;
}
/**
* <p>
* The delivery stream type. This parameter can be one of the following values:
* </p>
* <ul>
* <li>
* <p>
* <code>DirectPut</code>: Provider applications access the delivery stream directly.
* </p>
* </li>
* <li>
* <p>
* <code>KinesisStreamAsSource</code>: The delivery stream uses a Kinesis data stream as a source.
* </p>
* </li>
* </ul>
*
* @param deliveryStreamType
* The delivery stream type. This parameter can be one of the following values:</p>
* <ul>
* <li>
* <p>
* <code>DirectPut</code>: Provider applications access the delivery stream directly.
* </p>
* </li>
* <li>
* <p>
* <code>KinesisStreamAsSource</code>: The delivery stream uses a Kinesis data stream as a source.
* </p>
* </li>
* @see DeliveryStreamType
*/
public void setDeliveryStreamType(String deliveryStreamType) {
this.deliveryStreamType = deliveryStreamType;
}
/**
* <p>
* The delivery stream type. This parameter can be one of the following values:
* </p>
* <ul>
* <li>
* <p>
* <code>DirectPut</code>: Provider applications access the delivery stream directly.
* </p>
* </li>
* <li>
* <p>
* <code>KinesisStreamAsSource</code>: The delivery stream uses a Kinesis data stream as a source.
* </p>
* </li>
* </ul>
*
* @return The delivery stream type. This parameter can be one of the following values:</p>
* <ul>
* <li>
* <p>
* <code>DirectPut</code>: Provider applications access the delivery stream directly.
* </p>
* </li>
* <li>
* <p>
* <code>KinesisStreamAsSource</code>: The delivery stream uses a Kinesis data stream as a source.
* </p>
* </li>
* @see DeliveryStreamType
*/
public String getDeliveryStreamType() {
return this.deliveryStreamType;
}
/**
* <p>
* The delivery stream type. This parameter can be one of the following values:
* </p>
* <ul>
* <li>
* <p>
* <code>DirectPut</code>: Provider applications access the delivery stream directly.
* </p>
* </li>
* <li>
* <p>
* <code>KinesisStreamAsSource</code>: The delivery stream uses a Kinesis data stream as a source.
* </p>
* </li>
* </ul>
*
* @param deliveryStreamType
* The delivery stream type. This parameter can be one of the following values:</p>
* <ul>
* <li>
* <p>
* <code>DirectPut</code>: Provider applications access the delivery stream directly.
* </p>
* </li>
* <li>
* <p>
* <code>KinesisStreamAsSource</code>: The delivery stream uses a Kinesis data stream as a source.
* </p>
* </li>
* @return Returns a reference to this object so that method calls can be chained together.
* @see DeliveryStreamType
*/
public CreateDeliveryStreamRequest withDeliveryStreamType(String deliveryStreamType) {
setDeliveryStreamType(deliveryStreamType);
return this;
}
/**
* <p>
* The delivery stream type. This parameter can be one of the following values:
* </p>
* <ul>
* <li>
* <p>
* <code>DirectPut</code>: Provider applications access the delivery stream directly.
* </p>
* </li>
* <li>
* <p>
* <code>KinesisStreamAsSource</code>: The delivery stream uses a Kinesis data stream as a source.
* </p>
* </li>
* </ul>
*
* @param deliveryStreamType
* The delivery stream type. This parameter can be one of the following values:</p>
* <ul>
* <li>
* <p>
* <code>DirectPut</code>: Provider applications access the delivery stream directly.
* </p>
* </li>
* <li>
* <p>
* <code>KinesisStreamAsSource</code>: The delivery stream uses a Kinesis data stream as a source.
* </p>
* </li>
* @return Returns a reference to this object so that method calls can be chained together.
* @see DeliveryStreamType
*/
public CreateDeliveryStreamRequest withDeliveryStreamType(DeliveryStreamType deliveryStreamType) {
this.deliveryStreamType = deliveryStreamType.toString();
return this;
}
/**
* <p>
* When a Kinesis data stream is used as the source for the delivery stream, a
* <a>KinesisStreamSourceConfiguration</a> containing the Kinesis data stream Amazon Resource Name (ARN) and the
* role ARN for the source stream.
* </p>
*
* @param kinesisStreamSourceConfiguration
* When a Kinesis data stream is used as the source for the delivery stream, a
* <a>KinesisStreamSourceConfiguration</a> containing the Kinesis data stream Amazon Resource Name (ARN) and
* the role ARN for the source stream.
*/
public void setKinesisStreamSourceConfiguration(KinesisStreamSourceConfiguration kinesisStreamSourceConfiguration) {
this.kinesisStreamSourceConfiguration = kinesisStreamSourceConfiguration;
}
/**
* <p>
* When a Kinesis data stream is used as the source for the delivery stream, a
* <a>KinesisStreamSourceConfiguration</a> containing the Kinesis data stream Amazon Resource Name (ARN) and the
* role ARN for the source stream.
* </p>
*
* @return When a Kinesis data stream is used as the source for the delivery stream, a
* <a>KinesisStreamSourceConfiguration</a> containing the Kinesis data stream Amazon Resource Name (ARN) and
* the role ARN for the source stream.
*/
public KinesisStreamSourceConfiguration getKinesisStreamSourceConfiguration() {
return this.kinesisStreamSourceConfiguration;
}
/**
* <p>
* When a Kinesis data stream is used as the source for the delivery stream, a
* <a>KinesisStreamSourceConfiguration</a> containing the Kinesis data stream Amazon Resource Name (ARN) and the
* role ARN for the source stream.
* </p>
*
* @param kinesisStreamSourceConfiguration
* When a Kinesis data stream is used as the source for the delivery stream, a
* <a>KinesisStreamSourceConfiguration</a> containing the Kinesis data stream Amazon Resource Name (ARN) and
* the role ARN for the source stream.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public CreateDeliveryStreamRequest withKinesisStreamSourceConfiguration(KinesisStreamSourceConfiguration kinesisStreamSourceConfiguration) {
setKinesisStreamSourceConfiguration(kinesisStreamSourceConfiguration);
return this;
}
/**
* <p>
* Used to specify the type and Amazon Resource Name (ARN) of the KMS key needed for Server-Side Encryption (SSE).
* </p>
*
* @param deliveryStreamEncryptionConfigurationInput
* Used to specify the type and Amazon Resource Name (ARN) of the KMS key needed for Server-Side Encryption
* (SSE).
*/
public void setDeliveryStreamEncryptionConfigurationInput(DeliveryStreamEncryptionConfigurationInput deliveryStreamEncryptionConfigurationInput) {
this.deliveryStreamEncryptionConfigurationInput = deliveryStreamEncryptionConfigurationInput;
}
/**
* <p>
* Used to specify the type and Amazon Resource Name (ARN) of the KMS key needed for Server-Side Encryption (SSE).
* </p>
*
* @return Used to specify the type and Amazon Resource Name (ARN) of the KMS key needed for Server-Side Encryption
* (SSE).
*/
public DeliveryStreamEncryptionConfigurationInput getDeliveryStreamEncryptionConfigurationInput() {
return this.deliveryStreamEncryptionConfigurationInput;
}
/**
* <p>
* Used to specify the type and Amazon Resource Name (ARN) of the KMS key needed for Server-Side Encryption (SSE).
* </p>
*
* @param deliveryStreamEncryptionConfigurationInput
* Used to specify the type and Amazon Resource Name (ARN) of the KMS key needed for Server-Side Encryption
* (SSE).
* @return Returns a reference to this object so that method calls can be chained together.
*/
public CreateDeliveryStreamRequest withDeliveryStreamEncryptionConfigurationInput(
DeliveryStreamEncryptionConfigurationInput deliveryStreamEncryptionConfigurationInput) {
setDeliveryStreamEncryptionConfigurationInput(deliveryStreamEncryptionConfigurationInput);
return this;
}
/**
* <p>
* [Deprecated] The destination in Amazon S3. You can specify only one destination.
* </p>
*
* @param s3DestinationConfiguration
* [Deprecated] The destination in Amazon S3. You can specify only one destination.
*/
@Deprecated
public void setS3DestinationConfiguration(S3DestinationConfiguration s3DestinationConfiguration) {
this.s3DestinationConfiguration = s3DestinationConfiguration;
}
/**
* <p>
* [Deprecated] The destination in Amazon S3. You can specify only one destination.
* </p>
*
* @return [Deprecated] The destination in Amazon S3. You can specify only one destination.
*/
@Deprecated
public S3DestinationConfiguration getS3DestinationConfiguration() {
return this.s3DestinationConfiguration;
}
/**
* <p>
* [Deprecated] The destination in Amazon S3. You can specify only one destination.
* </p>
*
* @param s3DestinationConfiguration
* [Deprecated] The destination in Amazon S3. You can specify only one destination.
* @return Returns a reference to this object so that method calls can be chained together.
*/
@Deprecated
public CreateDeliveryStreamRequest withS3DestinationConfiguration(S3DestinationConfiguration s3DestinationConfiguration) {
setS3DestinationConfiguration(s3DestinationConfiguration);
return this;
}
/**
* <p>
* The destination in Amazon S3. You can specify only one destination.
* </p>
*
* @param extendedS3DestinationConfiguration
* The destination in Amazon S3. You can specify only one destination.
*/
public void setExtendedS3DestinationConfiguration(ExtendedS3DestinationConfiguration extendedS3DestinationConfiguration) {
this.extendedS3DestinationConfiguration = extendedS3DestinationConfiguration;
}
/**
* <p>
* The destination in Amazon S3. You can specify only one destination.
* </p>
*
* @return The destination in Amazon S3. You can specify only one destination.
*/
public ExtendedS3DestinationConfiguration getExtendedS3DestinationConfiguration() {
return this.extendedS3DestinationConfiguration;
}
/**
* <p>
* The destination in Amazon S3. You can specify only one destination.
* </p>
*
* @param extendedS3DestinationConfiguration
* The destination in Amazon S3. You can specify only one destination.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public CreateDeliveryStreamRequest withExtendedS3DestinationConfiguration(ExtendedS3DestinationConfiguration extendedS3DestinationConfiguration) {
setExtendedS3DestinationConfiguration(extendedS3DestinationConfiguration);
return this;
}
/**
* <p>
* The destination in Amazon Redshift. You can specify only one destination.
* </p>
*
* @param redshiftDestinationConfiguration
* The destination in Amazon Redshift. You can specify only one destination.
*/
public void setRedshiftDestinationConfiguration(RedshiftDestinationConfiguration redshiftDestinationConfiguration) {
this.redshiftDestinationConfiguration = redshiftDestinationConfiguration;
}
/**
* <p>
* The destination in Amazon Redshift. You can specify only one destination.
* </p>
*
* @return The destination in Amazon Redshift. You can specify only one destination.
*/
public RedshiftDestinationConfiguration getRedshiftDestinationConfiguration() {
return this.redshiftDestinationConfiguration;
}
/**
* <p>
* The destination in Amazon Redshift. You can specify only one destination.
* </p>
*
* @param redshiftDestinationConfiguration
* The destination in Amazon Redshift. You can specify only one destination.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public CreateDeliveryStreamRequest withRedshiftDestinationConfiguration(RedshiftDestinationConfiguration redshiftDestinationConfiguration) {
setRedshiftDestinationConfiguration(redshiftDestinationConfiguration);
return this;
}
/**
* <p>
* The destination in Amazon ES. You can specify only one destination.
* </p>
*
* @param elasticsearchDestinationConfiguration
* The destination in Amazon ES. You can specify only one destination.
*/
public void setElasticsearchDestinationConfiguration(ElasticsearchDestinationConfiguration elasticsearchDestinationConfiguration) {
this.elasticsearchDestinationConfiguration = elasticsearchDestinationConfiguration;
}
/**
* <p>
* The destination in Amazon ES. You can specify only one destination.
* </p>
*
* @return The destination in Amazon ES. You can specify only one destination.
*/
public ElasticsearchDestinationConfiguration getElasticsearchDestinationConfiguration() {
return this.elasticsearchDestinationConfiguration;
}
/**
* <p>
* The destination in Amazon ES. You can specify only one destination.
* </p>
*
* @param elasticsearchDestinationConfiguration
* The destination in Amazon ES. You can specify only one destination.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public CreateDeliveryStreamRequest withElasticsearchDestinationConfiguration(ElasticsearchDestinationConfiguration elasticsearchDestinationConfiguration) {
setElasticsearchDestinationConfiguration(elasticsearchDestinationConfiguration);
return this;
}
/**
* @param amazonopensearchserviceDestinationConfiguration
*/
public void setAmazonopensearchserviceDestinationConfiguration(
AmazonopensearchserviceDestinationConfiguration amazonopensearchserviceDestinationConfiguration) {
this.amazonopensearchserviceDestinationConfiguration = amazonopensearchserviceDestinationConfiguration;
}
/**
* @return
*/
public AmazonopensearchserviceDestinationConfiguration getAmazonopensearchserviceDestinationConfiguration() {
return this.amazonopensearchserviceDestinationConfiguration;
}
/**
* @param amazonopensearchserviceDestinationConfiguration
* @return Returns a reference to this object so that method calls can be chained together.
*/
public CreateDeliveryStreamRequest withAmazonopensearchserviceDestinationConfiguration(
AmazonopensearchserviceDestinationConfiguration amazonopensearchserviceDestinationConfiguration) {
setAmazonopensearchserviceDestinationConfiguration(amazonopensearchserviceDestinationConfiguration);
return this;
}
/**
* <p>
* The destination in Splunk. You can specify only one destination.
* </p>
*
* @param splunkDestinationConfiguration
* The destination in Splunk. You can specify only one destination.
*/
public void setSplunkDestinationConfiguration(SplunkDestinationConfiguration splunkDestinationConfiguration) {
this.splunkDestinationConfiguration = splunkDestinationConfiguration;
}
/**
* <p>
* The destination in Splunk. You can specify only one destination.
* </p>
*
* @return The destination in Splunk. You can specify only one destination.
*/
public SplunkDestinationConfiguration getSplunkDestinationConfiguration() {
return this.splunkDestinationConfiguration;
}
/**
* <p>
* The destination in Splunk. You can specify only one destination.
* </p>
*
* @param splunkDestinationConfiguration
* The destination in Splunk. You can specify only one destination.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public CreateDeliveryStreamRequest withSplunkDestinationConfiguration(SplunkDestinationConfiguration splunkDestinationConfiguration) {
setSplunkDestinationConfiguration(splunkDestinationConfiguration);
return this;
}
/**
* <p>
* Enables configuring Kinesis Firehose to deliver data to any HTTP endpoint destination. You can specify only one
* destination.
* </p>
*
* @param httpEndpointDestinationConfiguration
* Enables configuring Kinesis Firehose to deliver data to any HTTP endpoint destination. You can specify
* only one destination.
*/
public void setHttpEndpointDestinationConfiguration(HttpEndpointDestinationConfiguration httpEndpointDestinationConfiguration) {
this.httpEndpointDestinationConfiguration = httpEndpointDestinationConfiguration;
}
/**
* <p>
* Enables configuring Kinesis Firehose to deliver data to any HTTP endpoint destination. You can specify only one
* destination.
* </p>
*
* @return Enables configuring Kinesis Firehose to deliver data to any HTTP endpoint destination. You can specify
* only one destination.
*/
public HttpEndpointDestinationConfiguration getHttpEndpointDestinationConfiguration() {
return this.httpEndpointDestinationConfiguration;
}
/**
* <p>
* Enables configuring Kinesis Firehose to deliver data to any HTTP endpoint destination. You can specify only one
* destination.
* </p>
*
* @param httpEndpointDestinationConfiguration
* Enables configuring Kinesis Firehose to deliver data to any HTTP endpoint destination. You can specify
* only one destination.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public CreateDeliveryStreamRequest withHttpEndpointDestinationConfiguration(HttpEndpointDestinationConfiguration httpEndpointDestinationConfiguration) {
setHttpEndpointDestinationConfiguration(httpEndpointDestinationConfiguration);
return this;
}
/**
* <p>
* A set of tags to assign to the delivery stream. A tag is a key-value pair that you can define and assign to AWS
* resources. Tags are metadata. For example, you can add friendly names and descriptions or other types of
* information that can help you distinguish the delivery stream. For more information about tags, see <a
* href="https://docs.aws.amazon.com/awsaccountbilling/latest/aboutv2/cost-alloc-tags.html">Using Cost Allocation
* Tags</a> in the AWS Billing and Cost Management User Guide.
* </p>
* <p>
* You can specify up to 50 tags when creating a delivery stream.
* </p>
*
* @return A set of tags to assign to the delivery stream. A tag is a key-value pair that you can define and assign
* to AWS resources. Tags are metadata. For example, you can add friendly names and descriptions or other
* types of information that can help you distinguish the delivery stream. For more information about tags,
* see <a href="https://docs.aws.amazon.com/awsaccountbilling/latest/aboutv2/cost-alloc-tags.html">Using
* Cost Allocation Tags</a> in the AWS Billing and Cost Management User Guide.</p>
* <p>
* You can specify up to 50 tags when creating a delivery stream.
*/
public java.util.List<Tag> getTags() {
return tags;
}
/**
* <p>
* A set of tags to assign to the delivery stream. A tag is a key-value pair that you can define and assign to AWS
* resources. Tags are metadata. For example, you can add friendly names and descriptions or other types of
* information that can help you distinguish the delivery stream. For more information about tags, see <a
* href="https://docs.aws.amazon.com/awsaccountbilling/latest/aboutv2/cost-alloc-tags.html">Using Cost Allocation
* Tags</a> in the AWS Billing and Cost Management User Guide.
* </p>
* <p>
* You can specify up to 50 tags when creating a delivery stream.
* </p>
*
* @param tags
* A set of tags to assign to the delivery stream. A tag is a key-value pair that you can define and assign
* to AWS resources. Tags are metadata. For example, you can add friendly names and descriptions or other
* types of information that can help you distinguish the delivery stream. For more information about tags,
* see <a href="https://docs.aws.amazon.com/awsaccountbilling/latest/aboutv2/cost-alloc-tags.html">Using Cost
* Allocation Tags</a> in the AWS Billing and Cost Management User Guide.</p>
* <p>
* You can specify up to 50 tags when creating a delivery stream.
*/
public void setTags(java.util.Collection<Tag> tags) {
if (tags == null) {
this.tags = null;
return;
}
this.tags = new java.util.ArrayList<Tag>(tags);
}
/**
* <p>
* A set of tags to assign to the delivery stream. A tag is a key-value pair that you can define and assign to AWS
* resources. Tags are metadata. For example, you can add friendly names and descriptions or other types of
* information that can help you distinguish the delivery stream. For more information about tags, see <a
* href="https://docs.aws.amazon.com/awsaccountbilling/latest/aboutv2/cost-alloc-tags.html">Using Cost Allocation
* Tags</a> in the AWS Billing and Cost Management User Guide.
* </p>
* <p>
* You can specify up to 50 tags when creating a delivery stream.
* </p>
* <p>
* <b>NOTE:</b> This method appends the values to the existing list (if any). Use
* {@link #setTags(java.util.Collection)} or {@link #withTags(java.util.Collection)} if you want to override the
* existing values.
* </p>
*
* @param tags
* A set of tags to assign to the delivery stream. A tag is a key-value pair that you can define and assign
* to AWS resources. Tags are metadata. For example, you can add friendly names and descriptions or other
* types of information that can help you distinguish the delivery stream. For more information about tags,
* see <a href="https://docs.aws.amazon.com/awsaccountbilling/latest/aboutv2/cost-alloc-tags.html">Using Cost
* Allocation Tags</a> in the AWS Billing and Cost Management User Guide.</p>
* <p>
* You can specify up to 50 tags when creating a delivery stream.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public CreateDeliveryStreamRequest withTags(Tag... tags) {
if (this.tags == null) {
setTags(new java.util.ArrayList<Tag>(tags.length));
}
for (Tag ele : tags) {
this.tags.add(ele);
}
return this;
}
/**
* <p>
* A set of tags to assign to the delivery stream. A tag is a key-value pair that you can define and assign to AWS
* resources. Tags are metadata. For example, you can add friendly names and descriptions or other types of
* information that can help you distinguish the delivery stream. For more information about tags, see <a
* href="https://docs.aws.amazon.com/awsaccountbilling/latest/aboutv2/cost-alloc-tags.html">Using Cost Allocation
* Tags</a> in the AWS Billing and Cost Management User Guide.
* </p>
* <p>
* You can specify up to 50 tags when creating a delivery stream.
* </p>
*
* @param tags
* A set of tags to assign to the delivery stream. A tag is a key-value pair that you can define and assign
* to AWS resources. Tags are metadata. For example, you can add friendly names and descriptions or other
* types of information that can help you distinguish the delivery stream. For more information about tags,
* see <a href="https://docs.aws.amazon.com/awsaccountbilling/latest/aboutv2/cost-alloc-tags.html">Using Cost
* Allocation Tags</a> in the AWS Billing and Cost Management User Guide.</p>
* <p>
* You can specify up to 50 tags when creating a delivery stream.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public CreateDeliveryStreamRequest withTags(java.util.Collection<Tag> tags) {
setTags(tags);
return this;
}
/**
* Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be
* redacted from this string using a placeholder value.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getDeliveryStreamName() != null)
sb.append("DeliveryStreamName: ").append(getDeliveryStreamName()).append(",");
if (getDeliveryStreamType() != null)
sb.append("DeliveryStreamType: ").append(getDeliveryStreamType()).append(",");
if (getKinesisStreamSourceConfiguration() != null)
sb.append("KinesisStreamSourceConfiguration: ").append(getKinesisStreamSourceConfiguration()).append(",");
if (getDeliveryStreamEncryptionConfigurationInput() != null)
sb.append("DeliveryStreamEncryptionConfigurationInput: ").append(getDeliveryStreamEncryptionConfigurationInput()).append(",");
if (getS3DestinationConfiguration() != null)
sb.append("S3DestinationConfiguration: ").append(getS3DestinationConfiguration()).append(",");
if (getExtendedS3DestinationConfiguration() != null)
sb.append("ExtendedS3DestinationConfiguration: ").append(getExtendedS3DestinationConfiguration()).append(",");
if (getRedshiftDestinationConfiguration() != null)
sb.append("RedshiftDestinationConfiguration: ").append(getRedshiftDestinationConfiguration()).append(",");
if (getElasticsearchDestinationConfiguration() != null)
sb.append("ElasticsearchDestinationConfiguration: ").append(getElasticsearchDestinationConfiguration()).append(",");
if (getAmazonopensearchserviceDestinationConfiguration() != null)
sb.append("AmazonopensearchserviceDestinationConfiguration: ").append(getAmazonopensearchserviceDestinationConfiguration()).append(",");
if (getSplunkDestinationConfiguration() != null)
sb.append("SplunkDestinationConfiguration: ").append(getSplunkDestinationConfiguration()).append(",");
if (getHttpEndpointDestinationConfiguration() != null)
sb.append("HttpEndpointDestinationConfiguration: ").append(getHttpEndpointDestinationConfiguration()).append(",");
if (getTags() != null)
sb.append("Tags: ").append(getTags());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof CreateDeliveryStreamRequest == false)
return false;
CreateDeliveryStreamRequest other = (CreateDeliveryStreamRequest) obj;
if (other.getDeliveryStreamName() == null ^ this.getDeliveryStreamName() == null)
return false;
if (other.getDeliveryStreamName() != null && other.getDeliveryStreamName().equals(this.getDeliveryStreamName()) == false)
return false;
if (other.getDeliveryStreamType() == null ^ this.getDeliveryStreamType() == null)
return false;
if (other.getDeliveryStreamType() != null && other.getDeliveryStreamType().equals(this.getDeliveryStreamType()) == false)
return false;
if (other.getKinesisStreamSourceConfiguration() == null ^ this.getKinesisStreamSourceConfiguration() == null)
return false;
if (other.getKinesisStreamSourceConfiguration() != null
&& other.getKinesisStreamSourceConfiguration().equals(this.getKinesisStreamSourceConfiguration()) == false)
return false;
if (other.getDeliveryStreamEncryptionConfigurationInput() == null ^ this.getDeliveryStreamEncryptionConfigurationInput() == null)
return false;
if (other.getDeliveryStreamEncryptionConfigurationInput() != null
&& other.getDeliveryStreamEncryptionConfigurationInput().equals(this.getDeliveryStreamEncryptionConfigurationInput()) == false)
return false;
if (other.getS3DestinationConfiguration() == null ^ this.getS3DestinationConfiguration() == null)
return false;
if (other.getS3DestinationConfiguration() != null && other.getS3DestinationConfiguration().equals(this.getS3DestinationConfiguration()) == false)
return false;
if (other.getExtendedS3DestinationConfiguration() == null ^ this.getExtendedS3DestinationConfiguration() == null)
return false;
if (other.getExtendedS3DestinationConfiguration() != null
&& other.getExtendedS3DestinationConfiguration().equals(this.getExtendedS3DestinationConfiguration()) == false)
return false;
if (other.getRedshiftDestinationConfiguration() == null ^ this.getRedshiftDestinationConfiguration() == null)
return false;
if (other.getRedshiftDestinationConfiguration() != null
&& other.getRedshiftDestinationConfiguration().equals(this.getRedshiftDestinationConfiguration()) == false)
return false;
if (other.getElasticsearchDestinationConfiguration() == null ^ this.getElasticsearchDestinationConfiguration() == null)
return false;
if (other.getElasticsearchDestinationConfiguration() != null
&& other.getElasticsearchDestinationConfiguration().equals(this.getElasticsearchDestinationConfiguration()) == false)
return false;
if (other.getAmazonopensearchserviceDestinationConfiguration() == null ^ this.getAmazonopensearchserviceDestinationConfiguration() == null)
return false;
if (other.getAmazonopensearchserviceDestinationConfiguration() != null
&& other.getAmazonopensearchserviceDestinationConfiguration().equals(this.getAmazonopensearchserviceDestinationConfiguration()) == false)
return false;
if (other.getSplunkDestinationConfiguration() == null ^ this.getSplunkDestinationConfiguration() == null)
return false;
if (other.getSplunkDestinationConfiguration() != null
&& other.getSplunkDestinationConfiguration().equals(this.getSplunkDestinationConfiguration()) == false)
return false;
if (other.getHttpEndpointDestinationConfiguration() == null ^ this.getHttpEndpointDestinationConfiguration() == null)
return false;
if (other.getHttpEndpointDestinationConfiguration() != null
&& other.getHttpEndpointDestinationConfiguration().equals(this.getHttpEndpointDestinationConfiguration()) == false)
return false;
if (other.getTags() == null ^ this.getTags() == null)
return false;
if (other.getTags() != null && other.getTags().equals(this.getTags()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getDeliveryStreamName() == null) ? 0 : getDeliveryStreamName().hashCode());
hashCode = prime * hashCode + ((getDeliveryStreamType() == null) ? 0 : getDeliveryStreamType().hashCode());
hashCode = prime * hashCode + ((getKinesisStreamSourceConfiguration() == null) ? 0 : getKinesisStreamSourceConfiguration().hashCode());
hashCode = prime * hashCode
+ ((getDeliveryStreamEncryptionConfigurationInput() == null) ? 0 : getDeliveryStreamEncryptionConfigurationInput().hashCode());
hashCode = prime * hashCode + ((getS3DestinationConfiguration() == null) ? 0 : getS3DestinationConfiguration().hashCode());
hashCode = prime * hashCode + ((getExtendedS3DestinationConfiguration() == null) ? 0 : getExtendedS3DestinationConfiguration().hashCode());
hashCode = prime * hashCode + ((getRedshiftDestinationConfiguration() == null) ? 0 : getRedshiftDestinationConfiguration().hashCode());
hashCode = prime * hashCode + ((getElasticsearchDestinationConfiguration() == null) ? 0 : getElasticsearchDestinationConfiguration().hashCode());
hashCode = prime * hashCode
+ ((getAmazonopensearchserviceDestinationConfiguration() == null) ? 0 : getAmazonopensearchserviceDestinationConfiguration().hashCode());
hashCode = prime * hashCode + ((getSplunkDestinationConfiguration() == null) ? 0 : getSplunkDestinationConfiguration().hashCode());
hashCode = prime * hashCode + ((getHttpEndpointDestinationConfiguration() == null) ? 0 : getHttpEndpointDestinationConfiguration().hashCode());
hashCode = prime * hashCode + ((getTags() == null) ? 0 : getTags().hashCode());
return hashCode;
}
@Override
public CreateDeliveryStreamRequest clone() {
return (CreateDeliveryStreamRequest) super.clone();
}
}
| |
/*
Copyright [2016] [Taqdir Ali]
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package org.uclab.mm.datamodel.dc;
import java.io.Serializable;
import javax.persistence.Entity;
import javax.persistence.GeneratedValue;
import javax.persistence.GenerationType;
import javax.persistence.Id;
import java.util.Date;
import java.util.GregorianCalendar;
/**
*
* @author Taqdir
*/
@Entity
public class Users implements Serializable {
private static final long serialVersionUID = 1L;
@Id
@GeneratedValue(strategy = GenerationType.AUTO)
private Long id = null;
private Long userID;
private String firstName;
private String lastName;
private String middleName;
private int genderId;
//private GregorianCalendar dateOfBirth; updated
private String dateOfBirth;
private String contactNumber;
private String emailAddress;
private int martialStatusId;
private int activityLevelId;
private String activityLevelDescription;
private int occupationId;
private String password;
private String requestType;
private int userTypeID ;
private String userTypeDescription;
private String genderDescription;
private String maritalStatusDescription;
private String occupationDescription;
public Long getId() {
return id;
}
public void setId(Long id) {
this.id = id;
}
@Override
public int hashCode() {
int hash = 0;
hash += (id != null ? id.hashCode() : 0);
return hash;
}
@Override
public boolean equals(Object object) {
// TODO: Warning - this method won't work in the case the id fields are not set
if (!(object instanceof Users)) {
return false;
}
Users other = (Users) object;
if ((this.id == null && other.id != null) || (this.id != null && !this.id.equals(other.id))) {
return false;
}
return true;
}
@Override
public String toString() {
return "org.uclab.mm.datamodel.dc.Users[ id=" + id + " ]";
}
/**
* @return the userID
*/
public Long getUserID() {
return userID;
}
/**
* @param userID the userID to set
*/
public void setUserID(Long userID) {
this.userID = userID;
}
/**
* @return the firstName
*/
public String getFirstName() {
return firstName;
}
/**
* @param firstName the firstName to set
*/
public void setFirstName(String firstName) {
this.firstName = firstName;
}
/**
* @return the lastName
*/
public String getLastName() {
return lastName;
}
/**
* @param lastName the lastName to set
*/
public void setLastName(String lastName) {
this.lastName = lastName;
}
/**
* @return the middleName
*/
public String getMiddleName() {
return middleName;
}
/**
* @param middleName the middleName to set
*/
public void setMiddleName(String middleName) {
this.middleName = middleName;
}
/**
* @return the genderId
*/
public int getGenderId() {
return genderId;
}
/**
* @param genderId the genderId to set
*/
public void setGenderId(int genderId) {
this.genderId = genderId;
}
/**
* @return the dateOfBirth updated
*/
public String getDateOfBirth() {
return dateOfBirth;
}
/**
* @param dateOfBirth the dateOfBirth to set
*/
public void setDateOfBirth(String dateOfBirth) {
this.dateOfBirth = dateOfBirth;
}
/**
* @return the contactNumber
*/
public String getContactNumber() {
return contactNumber;
}
/**
* @param contactNumber the contactNumber to set
*/
public void setContactNumber(String contactNumber) {
this.contactNumber = contactNumber;
}
/**
* @return the emailAddress
*/
public String getEmailAddress() {
return emailAddress;
}
/**
* @param emailAddress the emailAddress to set
*/
public void setEmailAddress(String emailAddress) {
this.emailAddress = emailAddress;
}
/**
* @return the martialStatusId
*/
public int getMartialStatusId() {
return martialStatusId;
}
/**
* @param martialStatusId the martialStatusId to set
*/
public void setMartialStatusId(int martialStatusId) {
this.martialStatusId = martialStatusId;
}
/**
* @return the activityLevelId
*/
public int getActivityLevelId() {
return activityLevelId;
}
/**
* @param activityLevelId the activityLevelId to set
*/
public void setActivityLevelId(int activityLevelId) {
this.activityLevelId = activityLevelId;
}
/**
* @return the occupationId
*/
public int getOccupationId() {
return occupationId;
}
/**
* @param occupationId the occupationId to set
*/
public void setOccupationId(int occupationId) {
this.occupationId = occupationId;
}
/**
* @return the password
*/
public String getPassword() {
return password;
}
/**
* @param password the password to set
*/
public void setPassword(String password) {
this.password = password;
}
/**
* @return the requestType
*/
public String getRequestType() {
return requestType;
}
/**
* @param requestType the requestType to set
*/
public void setRequestType(String requestType) {
this.requestType = requestType;
}
/**
* @return the activityLevelDescription
*/
public String getActivityLevelDescription() {
return activityLevelDescription;
}
/**
* @param activityLevelDescription the activityLevelDescription to set
*/
public void setActivityLevelDescription(String activityLevelDescription) {
this.activityLevelDescription = activityLevelDescription;
}
/**
* @return the userTypeID
*/
public int getUserTypeID() {
return userTypeID;
}
/**
* @param userTypeID the userTypeID to set
*/
public void setUserTypeID(int userTypeID) {
this.userTypeID = userTypeID;
}
/**
* @return the userTypeDescription
*/
public String getUserTypeDescription() {
return userTypeDescription;
}
/**
* @param userTypeDescription the userTypeDescription to set
*/
public void setUserTypeDescription(String userTypeDescription) {
this.userTypeDescription = userTypeDescription;
}
/**
* @return the genderDescription
*/
public String getGenderDescription() {
return genderDescription;
}
/**
* @param genderDescription the genderDescription to set
*/
public void setGenderDescription(String genderDescription) {
this.genderDescription = genderDescription;
}
/**
* @return the maritalStatusDescription
*/
public String getMaritalStatusDescription() {
return maritalStatusDescription;
}
/**
* @param maritalStatusDescription the maritalStatusDescription to set
*/
public void setMaritalStatusDescription(String maritalStatusDescription) {
this.maritalStatusDescription = maritalStatusDescription;
}
/**
* @return the occupationDescription
*/
public String getOccupationDescription() {
return occupationDescription;
}
/**
* @param occupationDescription the occupationDescription to set
*/
public void setOccupationDescription(String occupationDescription) {
this.occupationDescription = occupationDescription;
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hdfs.server.namenode;
import java.text.NumberFormat;
import java.util.Arrays;
import java.util.Collections;
import java.util.Comparator;
import java.util.EnumMap;
import java.util.Formatter;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Objects;
import org.apache.hadoop.fs.StorageType;
import org.apache.hadoop.hdfs.protocol.BlockStoragePolicy;
/**
* Aggregate the storage type information for a set of blocks
*
*/
public class StoragePolicySummary {
Map<StorageTypeAllocation, Long> storageComboCounts = new HashMap<>();
final BlockStoragePolicy[] storagePolicies;
int totalBlocks;
StoragePolicySummary(BlockStoragePolicy[] storagePolicies) {
this.storagePolicies = storagePolicies;
}
// Add a storage type combination
void add(StorageType[] storageTypes, BlockStoragePolicy policy) {
StorageTypeAllocation storageCombo =
new StorageTypeAllocation(storageTypes, policy);
Long count = storageComboCounts.get(storageCombo);
if (count == null) {
storageComboCounts.put(storageCombo, 1l);
storageCombo.setActualStoragePolicy(
getStoragePolicy(storageCombo.getStorageTypes()));
} else {
storageComboCounts.put(storageCombo, count.longValue()+1);
}
totalBlocks++;
}
// sort the storageType combinations based on the total blocks counts
// in descending order
static List<Entry<StorageTypeAllocation, Long>> sortByComparator(
Map<StorageTypeAllocation, Long> unsortMap) {
List<Entry<StorageTypeAllocation, Long>> storageAllocations =
new LinkedList<>(unsortMap.entrySet());
// Sorting the list based on values
Collections.sort(storageAllocations,
new Comparator<Entry<StorageTypeAllocation, Long>>() {
public int compare(Entry<StorageTypeAllocation, Long> o1,
Entry<StorageTypeAllocation, Long> o2)
{
return o2.getValue().compareTo(o1.getValue());
}
});
return storageAllocations;
}
public String toString() {
StringBuilder compliantBlocksSB = new StringBuilder();
compliantBlocksSB
.append("\nBlocks satisfying the specified storage policy:")
.append("\nStorage Policy"
+ " # of blocks % of blocks\n");
StringBuilder nonCompliantBlocksSB = new StringBuilder();
Formatter compliantFormatter = new Formatter(compliantBlocksSB);
Formatter nonCompliantFormatter = new Formatter(nonCompliantBlocksSB);
NumberFormat percentFormat = NumberFormat.getPercentInstance();
percentFormat.setMinimumFractionDigits(4);
percentFormat.setMaximumFractionDigits(4);
for (Map.Entry<StorageTypeAllocation, Long> storageComboCount:
sortByComparator(storageComboCounts)) {
double percent = (double) storageComboCount.getValue() /
(double) totalBlocks;
StorageTypeAllocation sta = storageComboCount.getKey();
if (sta.policyMatches()) {
compliantFormatter.format("%-25s %10d %20s%n",
sta.getStoragePolicyDescriptor(),
storageComboCount.getValue(),
percentFormat.format(percent));
} else {
if (nonCompliantBlocksSB.length() == 0) {
nonCompliantBlocksSB
.append("\nBlocks NOT satisfying the specified storage policy:")
.append("\nStorage Policy ")
.append(
"Specified Storage Policy # of blocks % of blocks\n");
}
nonCompliantFormatter.format("%-35s %-20s %10d %20s%n",
sta.getStoragePolicyDescriptor(),
sta.getSpecifiedStoragePolicy().getName(),
storageComboCount.getValue(),
percentFormat.format(percent));
}
}
if (nonCompliantBlocksSB.length() == 0) {
nonCompliantBlocksSB.append("\nAll blocks satisfy specified storage policy.\n");
}
compliantFormatter.close();
nonCompliantFormatter.close();
return compliantBlocksSB.toString() + nonCompliantBlocksSB;
}
/**
*
* @param storageTypes - sorted array of storageTypes
* @return Storage Policy which matches the specific storage Combination
*/
private BlockStoragePolicy getStoragePolicy(StorageType[] storageTypes) {
for (BlockStoragePolicy storagePolicy:storagePolicies) {
StorageType[] policyStorageTypes = storagePolicy.getStorageTypes();
policyStorageTypes = Arrays.copyOf(policyStorageTypes, policyStorageTypes.length);
Arrays.sort(policyStorageTypes);
if (policyStorageTypes.length <= storageTypes.length) {
int i = 0;
for (; i < policyStorageTypes.length; i++) {
if (policyStorageTypes[i] != storageTypes[i]) {
break;
}
}
if (i < policyStorageTypes.length) {
continue;
}
int j=policyStorageTypes.length;
for (; j < storageTypes.length; j++) {
if (policyStorageTypes[i-1] != storageTypes[j]) {
break;
}
}
if (j==storageTypes.length) {
return storagePolicy;
}
}
}
return null;
}
/**
* Internal class which represents a unique Storage type combination
*
*/
static class StorageTypeAllocation {
private final BlockStoragePolicy specifiedStoragePolicy;
private final StorageType[] storageTypes;
private BlockStoragePolicy actualStoragePolicy;
StorageTypeAllocation(StorageType[] storageTypes,
BlockStoragePolicy specifiedStoragePolicy) {
Arrays.sort(storageTypes);
this.storageTypes = storageTypes;
this.specifiedStoragePolicy = specifiedStoragePolicy;
}
StorageType[] getStorageTypes() {
return storageTypes;
}
BlockStoragePolicy getSpecifiedStoragePolicy() {
return specifiedStoragePolicy;
}
void setActualStoragePolicy(BlockStoragePolicy actualStoragePolicy) {
this.actualStoragePolicy = actualStoragePolicy;
}
BlockStoragePolicy getActualStoragePolicy() {
return actualStoragePolicy;
}
private static String getStorageAllocationAsString
(Map<StorageType, Integer> storageType_countmap) {
StringBuilder sb = new StringBuilder();
for (Map.Entry<StorageType, Integer>
storageTypeCountEntry:storageType_countmap.entrySet()) {
sb.append(storageTypeCountEntry.getKey().name()+ ":"
+ storageTypeCountEntry.getValue() + ",");
}
if (sb.length() > 1) {
sb.deleteCharAt(sb.length()-1);
}
return sb.toString();
}
private String getStorageAllocationAsString() {
Map<StorageType, Integer> storageType_countmap =
new EnumMap<>(StorageType.class);
for (StorageType storageType: storageTypes) {
Integer count = storageType_countmap.get(storageType);
if (count == null) {
storageType_countmap.put(storageType, 1);
} else {
storageType_countmap.put(storageType, count.intValue()+1);
}
}
return (getStorageAllocationAsString(storageType_countmap));
}
String getStoragePolicyDescriptor() {
StringBuilder storagePolicyDescriptorSB = new StringBuilder();
if (actualStoragePolicy!=null) {
storagePolicyDescriptorSB.append(getStorageAllocationAsString())
.append("(")
.append(actualStoragePolicy.getName())
.append(")");
} else {
storagePolicyDescriptorSB.append(getStorageAllocationAsString());
}
return storagePolicyDescriptorSB.toString();
}
boolean policyMatches() {
return specifiedStoragePolicy.equals(actualStoragePolicy);
}
@Override
public String toString() {
return specifiedStoragePolicy.getName() + "|" + getStoragePolicyDescriptor();
}
@Override
public int hashCode() {
return Objects.hash(specifiedStoragePolicy,Arrays.hashCode(storageTypes));
}
@Override
public boolean equals(Object another) {
return (another instanceof StorageTypeAllocation &&
Objects.equals(specifiedStoragePolicy,
((StorageTypeAllocation)another).specifiedStoragePolicy) &&
Arrays.equals(storageTypes,
((StorageTypeAllocation)another).storageTypes));
}
}
}
| |
package mil.nga.giat.geowave.store.data.field;
import java.math.BigDecimal;
import java.math.BigInteger;
import java.nio.ByteBuffer;
import java.util.Calendar;
import java.util.Date;
import mil.nga.giat.geowave.index.ByteArrayId;
import mil.nga.giat.geowave.index.StringUtils;
import mil.nga.giat.geowave.store.GeometryUtils;
import mil.nga.giat.geowave.store.TimeUtils;
import org.apache.commons.lang3.ArrayUtils;
import com.vividsolutions.jts.geom.Geometry;
/**
* This class contains all of the primitive writer field types supported
*
*/
public class BasicWriter<RowType, FieldType> implements
FieldWriter<RowType, FieldType>
{
private FieldVisibilityHandler<RowType, Object> visibilityHandler;
private FieldWriter<?, FieldType> writer;
public BasicWriter(
final FieldWriter<?, FieldType> writer ) {
this(
writer,
null);
}
public BasicWriter(
final FieldWriter<?, FieldType> writer,
final FieldVisibilityHandler<RowType, Object> visibilityHandler ) {
this.writer = writer;
this.visibilityHandler = visibilityHandler;
}
@Override
public byte[] getVisibility(
final RowType rowValue,
final ByteArrayId fieldId,
final FieldType fieldValue ) {
if (visibilityHandler != null) {
return visibilityHandler.getVisibility(
rowValue,
fieldId,
fieldValue);
}
return new byte[] {};
}
@Override
public byte[] writeField(
final FieldType fieldValue ) {
return writer.writeField(fieldValue);
}
public static class BooleanWriter implements
FieldWriter<Object, Boolean>
{
@Override
public byte[] getVisibility(
final Object rowValue,
final ByteArrayId fieldId,
final Boolean fieldValue ) {
return new byte[] {};
}
@Override
public byte[] writeField(
final Boolean fieldValue ) {
return new byte[] {
((fieldValue == null) || !fieldValue) ? (byte) 0 : (byte) 1
};
}
}
public static class ByteWriter implements
FieldWriter<Object, Byte>
{
@Override
public byte[] getVisibility(
final Object rowValue,
final ByteArrayId fieldId,
final Byte fieldValue ) {
return new byte[] {};
}
@Override
public byte[] writeField(
final Byte fieldValue ) {
return new byte[] {
fieldValue
};
}
}
public static class ShortWriter implements
FieldWriter<Object, Short>
{
@Override
public byte[] writeField(
final Short fieldValue ) {
final ByteBuffer buf = ByteBuffer.allocate(2);
buf.putShort(fieldValue);
return buf.array();
}
@Override
public byte[] getVisibility(
final Object rowValue,
final ByteArrayId fieldId,
final Short fieldValue ) {
return new byte[] {};
}
}
public static class FloatWriter implements
FieldWriter<Object, Float>
{
@Override
public byte[] writeField(
final Float fieldValue ) {
final ByteBuffer buf = ByteBuffer.allocate(4);
buf.putFloat(fieldValue);
return buf.array();
}
@Override
public byte[] getVisibility(
final Object rowValue,
final ByteArrayId fieldId,
final Float fieldValue ) {
return new byte[] {};
}
}
public static class DoubleWriter implements
FieldWriter<Object, Double>
{
@Override
public byte[] writeField(
final Double fieldValue ) {
final ByteBuffer buf = ByteBuffer.allocate(8);
buf.putDouble(fieldValue);
return buf.array();
}
@Override
public byte[] getVisibility(
final Object rowValue,
final ByteArrayId fieldId,
final Double fieldValue ) {
return new byte[] {};
}
}
public static class BigDecimalWriter implements
FieldWriter<Object, BigDecimal>
{
@Override
public byte[] writeField(
final BigDecimal fieldValue ) {
final ByteBuffer buf = ByteBuffer.allocate(8);
buf.putDouble(fieldValue.doubleValue());
return buf.array();
}
@Override
public byte[] getVisibility(
final Object rowValue,
final ByteArrayId fieldId,
final BigDecimal fieldValue ) {
return new byte[] {};
}
}
public static class IntWriter implements
FieldWriter<Object, Integer>
{
@Override
public byte[] writeField(
final Integer fieldValue ) {
final ByteBuffer buf = ByteBuffer.allocate(4);
buf.putInt(fieldValue);
return buf.array();
}
@Override
public byte[] getVisibility(
final Object rowValue,
final ByteArrayId fieldId,
final Integer fieldValue ) {
return new byte[] {};
}
}
public static class LongWriter implements
FieldWriter<Object, Long>
{
public LongWriter() {
super();
}
@Override
public byte[] writeField(
final Long fieldValue ) {
final ByteBuffer buf = ByteBuffer.allocate(8);
buf.putLong(fieldValue);
return buf.array();
}
@Override
public byte[] getVisibility(
final Object rowValue,
final ByteArrayId fieldId,
final Long fieldValue ) {
return new byte[] {};
}
}
public static class BigIntegerWriter implements
FieldWriter<Object, BigInteger>
{
@Override
public byte[] writeField(
final BigInteger fieldValue ) {
return fieldValue.toByteArray();
}
@Override
public byte[] getVisibility(
final Object rowValue,
final ByteArrayId fieldId,
final BigInteger fieldValue ) {
return new byte[] {};
}
}
public static class StringWriter implements
FieldWriter<Object, String>
{
@Override
public byte[] writeField(
final String fieldValue ) {
if (fieldValue == null) {
return new byte[] {};
}
return StringUtils.stringToBinary(fieldValue);
}
@Override
public byte[] getVisibility(
final Object rowValue,
final ByteArrayId fieldId,
final String fieldValue ) {
return new byte[] {};
}
}
public static class GeometryWriter implements
FieldWriter<Object, Geometry>
{
@Override
public byte[] writeField(
final Geometry fieldValue ) {
if (fieldValue == null) {
return new byte[] {};
}
return GeometryUtils.geometryToBinary(fieldValue);
}
@Override
public byte[] getVisibility(
final Object rowValue,
final ByteArrayId fieldId,
final Geometry fieldValue ) {
return new byte[] {};
}
}
public static class DateWriter implements
FieldWriter<Object, Date>
{
@Override
public byte[] writeField(
final Date fieldData ) {
if (fieldData == null) {
return new byte[] {};
}
final ByteBuffer buf = ByteBuffer.allocate(8);
buf.putLong(fieldData.getTime());
return buf.array();
}
@Override
public byte[] getVisibility(
final Object rowValue,
final ByteArrayId fieldId,
final Date fieldValue ) {
return new byte[] {};
}
}
public static class CalendarWriter implements
FieldWriter<Object, Calendar>
{
@Override
public byte[] writeField(
final Calendar cal ) {
if (cal == null) {
return new byte[] {};
}
final ByteBuffer buf = ByteBuffer.allocate(8);
buf.putLong(TimeUtils.calendarToGMTMillis(cal));
return buf.array();
}
@Override
public byte[] getVisibility(
final Object rowValue,
final ByteArrayId fieldId,
final Calendar fieldValue ) {
return new byte[] {};
}
}
public static class ByteArrayWriter implements
FieldWriter<Object, Byte[]>
{
@Override
public byte[] writeField(
final Byte[] fieldValue ) {
if (fieldValue == null) {
return new byte[] {};
}
return ArrayUtils.toPrimitive(fieldValue);
}
@Override
public byte[] getVisibility(
final Object rowValue,
final ByteArrayId fieldId,
final Byte[] fieldValue ) {
return new byte[] {};
}
}
public static class PrimitiveByteArrayWriter implements
FieldWriter<Object, byte[]>
{
@Override
public byte[] writeField(
final byte[] fieldValue ) {
if (fieldValue == null) {
return new byte[] {};
}
return fieldValue;
}
@Override
public byte[] getVisibility(
final Object rowValue,
final ByteArrayId fieldId,
final byte[] fieldValue ) {
return new byte[] {};
}
}
}
| |
/*
* Copyright (c) 2017, WSO2 Inc. (http://www.wso2.org) All Rights Reserved.
*
* WSO2 Inc. licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file except
* in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.wso2.carbon.apimgt.impl.publishers;
import org.apache.commons.io.FileUtils;
import org.apache.http.HttpEntity;
import org.apache.http.HttpStatus;
import org.apache.http.StatusLine;
import org.apache.http.client.methods.CloseableHttpResponse;
import org.apache.http.client.methods.HttpDelete;
import org.apache.http.client.methods.HttpGet;
import org.apache.http.client.methods.HttpPost;
import org.apache.http.entity.mime.MultipartEntityBuilder;
import org.apache.http.impl.client.CloseableHttpClient;
import org.apache.http.util.EntityUtils;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.Matchers;
import org.mockito.Mockito;
import org.powermock.api.mockito.PowerMockito;
import org.powermock.core.classloader.annotations.PrepareForTest;
import org.powermock.modules.junit4.PowerMockRunner;
import org.wso2.carbon.apimgt.api.APIManagementException;
import org.wso2.carbon.apimgt.api.APIProvider;
import org.wso2.carbon.apimgt.api.model.API;
import org.wso2.carbon.apimgt.api.model.APIIdentifier;
import org.wso2.carbon.apimgt.api.model.APIStore;
import org.wso2.carbon.apimgt.impl.APIConstants;
import org.wso2.carbon.apimgt.impl.APIManagerConfiguration;
import org.wso2.carbon.apimgt.impl.APIManagerConfigurationService;
import org.wso2.carbon.apimgt.impl.TestUtils;
import org.wso2.carbon.apimgt.impl.importexport.APIImportExportException;
import org.wso2.carbon.apimgt.impl.importexport.APIImportExportManager;
import org.wso2.carbon.apimgt.impl.importexport.ExportFormat;
import org.wso2.carbon.apimgt.impl.internal.ServiceReferenceHolder;
import org.wso2.carbon.user.core.UserStoreException;
import org.wso2.carbon.user.core.service.RealmService;
import org.wso2.carbon.user.core.tenant.TenantManager;
import org.wso2.carbon.utils.CarbonUtils;
import java.io.File;
import java.net.HttpURLConnection;
import java.net.URL;
@RunWith(PowerMockRunner.class)
@PrepareForTest({EntityUtils.class, CarbonUtils.class, ServiceReferenceHolder.class, URL.class, HttpURLConnection.class,
FileUtils.class, WSO2APIPublisher.class, MultipartEntityBuilder.class})
public class WSO2APIPublisherTestCase {
private int tenantID = -1234;
private String username = "admin";
private String tenantDomain = "carbon.super";
private String apiIdentifier = "P1_API1_v1.0.0";
private String storeName = "Sample";
private String storeUserName = "admin";
private String storePassword = "admin";
private String storeEndpoint = "https://localhost:9292/sample";
private String storeRedirectURL = "http://localhost:9292/redirect";
private APIStore store;
private APIIdentifier identifier;
private API api;
private WSO2APIPublisher wso2APIPublisher;
private TenantManager tenantManager;
private String apiArtifactDir = "/tmp/test";
private StatusLine statusLine;
private APIImportExportManager apiImportExportManager;
private CloseableHttpClient defaultHttpClient;
@Before
public void init() throws Exception {
store = new APIStore();
store.setDisplayName(storeName);
store.setUsername(storeUserName);
store.setPassword(storePassword);
store.setEndpoint(storeEndpoint);
identifier = new APIIdentifier(apiIdentifier);
api = new API(identifier);
defaultHttpClient = Mockito.mock(CloseableHttpClient.class);
wso2APIPublisher = new WSO2APIPublisherWrapper(defaultHttpClient, username, Mockito.mock(APIProvider.class));
CloseableHttpResponse httpResponse = Mockito.mock(CloseableHttpResponse.class);
ServiceReferenceHolder serviceReferenceHolder = TestUtils.getServiceReferenceHolder();
RealmService realmService = Mockito.mock(RealmService.class);
tenantManager = Mockito.mock(TenantManager.class);
Mockito.when(serviceReferenceHolder.getRealmService()).thenReturn(realmService);
Mockito.when(realmService.getTenantManager()).thenReturn(tenantManager);
HttpEntity entity = Mockito.mock(HttpEntity.class);
statusLine = Mockito.mock(StatusLine.class);
Mockito.doReturn(statusLine).when(httpResponse).getStatusLine();
Mockito.doReturn(entity).when(httpResponse).getEntity();
PowerMockito.mockStatic(EntityUtils.class);
APIManagerConfigurationService apiManagerConfigurationService = Mockito.mock(APIManagerConfigurationService.class);
Mockito.when(serviceReferenceHolder.getAPIManagerConfigurationService()).thenReturn(apiManagerConfigurationService);
APIManagerConfiguration apiManagerConfiguration = Mockito.mock(APIManagerConfiguration.class);
Mockito.when(apiManagerConfigurationService.getAPIManagerConfiguration()).thenReturn(apiManagerConfiguration);
Mockito.when(apiManagerConfiguration.getFirstProperty(APIConstants.EXTERNAL_API_STORES + "."
+ APIConstants.EXTERNAL_API_STORES_STORE_URL)).thenReturn(storeRedirectURL);
HttpGet httpGet = Mockito.mock(HttpGet.class);
HttpPost httpPost = Mockito.mock(HttpPost.class);
HttpDelete httpDelete = Mockito.mock(HttpDelete.class);
PowerMockito.whenNew(HttpGet.class).withAnyArguments().thenReturn(httpGet);
PowerMockito.whenNew(HttpPost.class).withAnyArguments().thenReturn(httpPost);
PowerMockito.whenNew(HttpDelete.class).withAnyArguments().thenReturn(httpDelete);
Mockito.doReturn(httpResponse).when(defaultHttpClient).execute(httpPost);
Mockito.doReturn(httpResponse).when(defaultHttpClient).execute(httpGet);
Mockito.doReturn(httpResponse).when(defaultHttpClient).execute(httpDelete);
MultipartEntityBuilder multipartEntityBuilder = Mockito.mock(MultipartEntityBuilder.class);
PowerMockito.mockStatic(MultipartEntityBuilder.class);
Mockito.when(MultipartEntityBuilder.create()).thenReturn(multipartEntityBuilder);
Mockito.when(multipartEntityBuilder.build()).thenReturn(Mockito.mock(HttpEntity.class));
Mockito.doNothing().when(httpPost).setEntity(Matchers.any());
apiImportExportManager = Mockito.mock(APIImportExportManager.class);
PowerMockito.whenNew(APIImportExportManager.class).withAnyArguments().thenReturn(apiImportExportManager);
}
@Test
public void testPublishToStoreWithNullStoreArguments() {
//Error path - When username or password or endpoint is not defined
APIStore nullStore = new APIStore();
nullStore.setDisplayName(storeName);
try {
wso2APIPublisher.publishToStore(api, nullStore);
Assert.fail("APIManagement exception not thrown for error scenario");
} catch (APIManagementException e) {
String msg = "External APIStore endpoint URL or credentials are not defined. " +
"Cannot proceed with publishing API to the APIStore - " + nullStore.getDisplayName();
Assert.assertEquals(msg, e.getMessage());
}
}
@Test
public void testPublishAndUpdateToStore() throws Exception {
Mockito.when(tenantManager.getTenantId(tenantDomain)).thenReturn(tenantID);
Mockito.doReturn(apiArtifactDir).when(apiImportExportManager).exportAPIArtifacts(Matchers.any(API.class),
Matchers.anyBoolean(), Matchers.any(ExportFormat.class));
//Test Unauthenticated scenario for publishing API
Mockito.doReturn(HttpStatus.SC_UNAUTHORIZED).when(statusLine).getStatusCode();
String unauthenticatedResponse = "{\"code\":401,\"message\":\"\",\"description\":\"Unauthenticated request\"," +
"\"moreInfo\":\"\",\"error\":[]}";
PowerMockito.when(EntityUtils.toString(Matchers.any())).thenReturn(unauthenticatedResponse);
String errorMsg = "Import API service call received unsuccessful response: " + unauthenticatedResponse
+ " status: " + HttpStatus.SC_UNAUTHORIZED;
try {
wso2APIPublisher.publishToStore(api, store);
Assert.fail("APIManagement exception not thrown for error scenario");
} catch (APIManagementException e) {
Assert.assertEquals(errorMsg, e.getMessage());
}
//Test Unauthenticated scenario for updating API
try {
wso2APIPublisher.updateToStore(api, store);
Assert.fail("APIManagement exception not thrown for error scenario");
} catch (APIManagementException e) {
Assert.assertEquals(errorMsg, e.getMessage());
}
//Test Successful scenario for publishing and updating API
Mockito.doReturn(HttpStatus.SC_OK).when(statusLine).getStatusCode();
String successResponse = "API imported successfully.";
PowerMockito.when(EntityUtils.toString(Matchers.any())).thenReturn(successResponse);
Assert.assertTrue("API Publish is unsuccessful", wso2APIPublisher.publishToStore(api, store));
Assert.assertTrue("API Update is unsuccessful", wso2APIPublisher.updateToStore(api, store));
}
@Test
public void testFailureWhileExportingAPI() throws Exception {
//Error path - When exporting API failed
PowerMockito.doThrow(new APIImportExportException("Error while exporting API")).when(apiImportExportManager)
.exportAPIArtifacts(Matchers.any(API.class), Matchers.anyBoolean(), Matchers.any(ExportFormat.class));
try {
wso2APIPublisher.publishToStore(api, store);
Assert.fail("APIManagement exception not thrown for error scenario");
} catch (APIManagementException e) {
String errorMsg = "Error while exporting API: " + api.getId().getApiName() + " version: "
+ api.getId().getVersion();
Assert.assertEquals(errorMsg, e.getMessage());
}
PowerMockito.doThrow(
new UserStoreException("Error in getting the tenant id with tenant domain: " + tenantDomain + "."))
.when(tenantManager).getTenantId(tenantDomain);
try {
wso2APIPublisher.publishToStore(api, store);
Assert.fail("APIManagement exception not thrown for error scenario");
} catch (APIManagementException e) {
String errorMsg = "Error while getting tenantId for tenant domain: " + tenantDomain
+ " when exporting API:" + api.getId().getApiName() + " version: " + api.getId().getVersion();
Assert.assertEquals(errorMsg, e.getMessage());
}
}
@Test
public void testCheckingAPIExists() throws Exception {
//Test error path when multiple APIs received for search request
Mockito.doReturn(HttpStatus.SC_OK).when(statusLine).getStatusCode();
String apiGetResponse = "{\n" +
" \"count\": 2,\n" +
" \"list\": [\n" +
" {\n" +
" \"id\": \"735ad20d-f382-4ab3-8000-97fce885c853\",\n" +
" \"name\": \"API1\",\n" +
" \"version\": \"v1.0.0\",\n" +
" },\n" +
" {\n" +
" \"id\": \"2346e0cc-926c-4b1d-8624-9d08371494c6\",\n" +
" \"name\": \"TestAPI1\",\n" +
" \"version\": \"v1.0.0\",\n" +
" }\n" +
" ],\n" +
" \"pagination\": {\n" +
" \"offset\": 0,\n" +
" \"limit\": 25,\n" +
" \"total\": 2,\n" +
" \"next\": \"\",\n" +
" \"previous\": \"\"\n" +
" }\n" +
"}";
PowerMockito.when(EntityUtils.toString(Matchers.any())).thenReturn(apiGetResponse);
String errorMessage = "Duplicate APIs exists in external store for API name:"
+ identifier.getApiName() + " version: " + identifier.getVersion();
try {
wso2APIPublisher.isAPIAvailable(api, store);
Assert.fail("APIManagement exception not thrown for error scenario");
} catch (APIManagementException e) {
Assert.assertEquals(errorMessage, e.getMessage());
}
//Test successful API non existence response
apiGetResponse = "{\n" +
" \"count\": 0,\n" +
" \"list\": [],\n" +
" \"pagination\": {\n" +
" \"offset\": 0,\n" +
" \"limit\": 25,\n" +
" \"total\": 0,\n" +
" \"next\": \"\",\n" +
" \"previous\": \"\"\n" +
" }\n" +
"}";
PowerMockito.when(EntityUtils.toString(Matchers.any())).thenReturn(apiGetResponse);
Assert.assertFalse("API Exists response received", wso2APIPublisher.isAPIAvailable(api, store));
//Test successful API existence response
apiGetResponse = "{\n" +
" \"count\": 1,\n" +
" \"list\": [\n" +
" {\n" +
" \"id\": \"735ad20d-f382-4ab3-8000-97fce885c853\",\n" +
" \"name\": \"API1\",\n" +
" \"version\": \"1.0.0\",\n" +
" }\n" +
" ],\n" +
" \"pagination\": {\n" +
" \"offset\": 0,\n" +
" \"limit\": 25,\n" +
" \"total\": 1,\n" +
" \"next\": \"\",\n" +
" \"previous\": \"\"\n" +
" }\n" +
"}";
PowerMockito.when(EntityUtils.toString(Matchers.any())).thenReturn(apiGetResponse);
Assert.assertTrue("API non exists response received", wso2APIPublisher.isAPIAvailable(api, store));
}
@Test
public void testDeletingAPI() throws Exception {
//Test error path when deleting non existing API
Mockito.doReturn(HttpStatus.SC_OK).when(statusLine).getStatusCode();
String apiGetResponse = "{\n" +
" \"count\": 0,\n" +
" \"list\": [],\n" +
" \"pagination\": {\n" +
" \"offset\": 0,\n" +
" \"limit\": 25,\n" +
" \"total\": 0,\n" +
" \"next\": \"\",\n" +
" \"previous\": \"\"\n" +
" }\n" +
"}";
PowerMockito.when(EntityUtils.toString(Matchers.any())).thenReturn(apiGetResponse);
try {
wso2APIPublisher.deleteFromStore(identifier, store);
Assert.fail("APIManagement exception not thrown for error scenario");
} catch (APIManagementException e) {
String errorMessage = "API: " + identifier.getApiName() + " version: " + identifier.getVersion()
+ " does not exist in external store: " + store.getName();
Assert.assertEquals(errorMessage, e.getMessage());
}
//Test successful API deletion
apiGetResponse = "{\n" +
" \"count\": 1,\n" +
" \"list\": [\n" +
" {\n" +
" \"id\": \"735ad20d-f382-4ab3-8000-97fce885c853\",\n" +
" \"name\": \"API1\",\n" +
" \"version\": \"1.0.0\",\n" +
" }\n" +
" ],\n" +
" \"pagination\": {\n" +
" \"offset\": 0,\n" +
" \"limit\": 25,\n" +
" \"total\": 1,\n" +
" \"next\": \"\",\n" +
" \"previous\": \"\"\n" +
" }\n" +
"}";
PowerMockito.when(EntityUtils.toString(Matchers.any())).thenReturn(apiGetResponse);
Assert.assertTrue("API deletion failed", wso2APIPublisher.deleteFromStore(identifier, store));
//Test error path API deletion failed due to server error
String apiDeleteResponse = "{\"code\":500,\"message\":\"\",\"description\":\"Internal Server Error\"," +
"\"moreInfo\":\"\",\"error\":[]}";
HttpDelete httpDeleteFail = Mockito.mock(HttpDelete.class);
PowerMockito.whenNew(HttpDelete.class).withAnyArguments().thenReturn(httpDeleteFail);
CloseableHttpResponse deletionFailedResponse = Mockito.mock(CloseableHttpResponse.class);
StatusLine deletionFailedStatusLine = Mockito.mock(StatusLine.class);
Mockito.doReturn(deletionFailedStatusLine).when(deletionFailedResponse).getStatusLine();
HttpEntity deletionFailedEntity = Mockito.mock(HttpEntity.class);
Mockito.doReturn(deletionFailedEntity).when(deletionFailedResponse).getEntity();
PowerMockito.when(EntityUtils.toString(deletionFailedEntity)).thenReturn(apiDeleteResponse);
Mockito.doReturn(deletionFailedResponse).when(defaultHttpClient).execute(httpDeleteFail);
//Error path when deleting API
Mockito.doReturn(HttpStatus.SC_INTERNAL_SERVER_ERROR).when(deletionFailedStatusLine).getStatusCode();
try {
wso2APIPublisher.deleteFromStore(identifier, store);
Assert.fail("APIManagement exception not thrown for error scenario");
} catch (APIManagementException e) {
String errorMessage = "API Delete service call received unsuccessful response status: "
+ HttpStatus.SC_INTERNAL_SERVER_ERROR + " response: " + apiDeleteResponse;
Assert.assertEquals(errorMessage, e.getMessage());
}
}
}
| |
/*
Derby - Class org.apache.derbyTesting.functionTests.tests.derbynet.SecureServerTest
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to You under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package org.apache.derbyTesting.functionTests.tests.derbynet;
import java.io.File;
import java.sql.Connection;
import java.sql.DriverManager;
import java.util.ArrayList;
import java.util.Arrays;
import junit.framework.Test;
import junit.framework.TestSuite;
import org.apache.derbyTesting.junit.BaseJDBCTestCase;
import org.apache.derbyTesting.junit.Derby;
import org.apache.derbyTesting.junit.NetworkServerTestSetup;
import org.apache.derbyTesting.junit.SecurityManagerSetup;
import org.apache.derbyTesting.junit.SpawnedProcess;
import org.apache.derbyTesting.junit.SupportFilesSetup;
import org.apache.derbyTesting.junit.TestConfiguration;
import org.apache.derbyTesting.functionTests.util.PrivilegedFileOpsForTests;
/**
* This Junit test class tests whether the server comes up under a security
* manager as expected.
*/
public class SecureServerTest extends BaseJDBCTestCase
{
///////////////////////////////////////////////////////////////////////////////////
//
// CONSTANTS
//
///////////////////////////////////////////////////////////////////////////////////
// basic properties file which tests that properties are picked up from derby.properties
private static final String BASIC = "functionTests/tests/derbynet/SecureServerTest.derby.properties";
private static final String SST_USER_NAME="MARY";
private static final String SST_PASSWORD = "marypwd";
private static final String HOSTW = "0.0.0.0";
private static final String ALTW = "0.00.000.0";
private static final String IPV6W = "::";
///////////////////////////////////////////////////////////////////////////////////
//
// INNER CLASSES
//
///////////////////////////////////////////////////////////////////////////////////
/**
* <p>
* Possible outcomes for the experiment of bringing up the server.
* </p>
*/
public static final class Outcome
{
private boolean _serverShouldComeUp;
private String _expectedServerOutput;
public Outcome
(
boolean serverShouldComeUp,
String expectedServerOutput
)
{
_serverShouldComeUp = serverShouldComeUp;
_expectedServerOutput = expectedServerOutput;
}
public boolean serverShouldComeUp() { return _serverShouldComeUp; }
public String expectedServerOutput() { return _expectedServerOutput; }
}
///////////////////////////////////////////////////////////////////////////////////
//
// STATE
//
///////////////////////////////////////////////////////////////////////////////////
private static final Outcome RUNNING_SECURITY_NOT_BOOTED = new Outcome( true, "" );
private static final Outcome RUNNING_SECURITY_BOOTED = new Outcome( true, serverBootedOK() );
/** Reference to the enclosing NetworkServerTestSetup. */
private NetworkServerTestSetup nsTestSetup;
// startup state
private boolean _unsecureSet;
private boolean _authenticationRequired;
private String _customDerbyProperties;
private String _wildCardHost;
// expected outcomes
private Outcome _outcome;
///////////////////////////////////////////////////////////////////////////////////
//
// CONSTRUCTORS
//
///////////////////////////////////////////////////////////////////////////////////
public SecureServerTest
(
boolean unsecureSet,
boolean authenticationRequired,
String customDerbyProperties,
String wildCardHost,
Outcome outcome
)
{
super( "testServerStartup" );
_unsecureSet = unsecureSet;
_authenticationRequired = authenticationRequired;
_customDerbyProperties = customDerbyProperties;
_wildCardHost = wildCardHost;
_outcome = outcome;
}
///////////////////////////////////////////////////////////////////////////////////
//
// JUnit MACHINERY
//
///////////////////////////////////////////////////////////////////////////////////
/**
* Tests to run.
*/
public static Test suite()
{
//NetworkServerTestSetup.setWaitTime( 10000L );
TestSuite suite = new TestSuite("SecureServerTest");
// Server booting requires that we run from the jar files
if ( !TestConfiguration.loadingFromJars() ) { return suite; }
// Need derbynet.jar in the classpath!
if (!Derby.hasServer())
return suite;
// O = Overriden
// A = Authenticated
// C = Custom properties
// W = Wildcard host
//
// .addTest( decorateTest( O, A, C, W, Outcome ) );
//
suite.addTest( decorateTest( false, false, null, null, RUNNING_SECURITY_BOOTED ) );
suite.addTest( decorateTest( false, false, BASIC, null, RUNNING_SECURITY_BOOTED ) );
suite.addTest( decorateTest( false, true, null, null, RUNNING_SECURITY_BOOTED ) );
suite.addTest( decorateTest( false, true, null, HOSTW, RUNNING_SECURITY_BOOTED ) );
suite.addTest( decorateTest( false, true, null, ALTW, RUNNING_SECURITY_BOOTED ) );
// this wildcard port is rejected by the server right now
//suite.addTest( decorateTest( false, true, null, IPV6W, RUNNING_SECURITY_BOOTED ) );
suite.addTest( decorateTest( true, false, null, null, RUNNING_SECURITY_NOT_BOOTED ) );
suite.addTest( decorateTest( true, true, null, null, RUNNING_SECURITY_NOT_BOOTED ) );
return suite;
}
///////////////////////////////////////////////////////////////////////////////////
//
// TEST DECORATION
//
///////////////////////////////////////////////////////////////////////////////////
/**
* <p>
* Compose the required decorators to bring up the server in the correct
* configuration.
* </p>
*/
private static Test decorateTest
(
boolean unsecureSet,
boolean authenticationRequired,
String customDerbyProperties,
String wildCardHost,
Outcome outcome
)
{
SecureServerTest secureServerTest = new SecureServerTest
(
unsecureSet,
authenticationRequired,
customDerbyProperties,
wildCardHost,
outcome
);
String[] startupProperties = getStartupProperties( authenticationRequired, customDerbyProperties );
String[] startupArgs = getStartupArgs( unsecureSet, wildCardHost );
NetworkServerTestSetup networkServerTestSetup =
new NetworkServerTestSetup
(
secureServerTest,
startupProperties,
startupArgs,
secureServerTest._outcome.serverShouldComeUp()
);
secureServerTest.nsTestSetup = networkServerTestSetup;
Test testSetup =
SecurityManagerSetup.noSecurityManager(networkServerTestSetup);
// if using the custom derby.properties, copy the custom properties to a visible place
if ( customDerbyProperties != null )
{
testSetup = new SupportFilesSetup
(
testSetup,
null,
new String[] { "functionTests/tests/derbynet/SecureServerTest.derby.properties" },
null,
new String[] { "derby.properties" }
);
}
Test test = TestConfiguration.defaultServerDecorator( testSetup );
// DERBY-2109: add support for user credentials
test = TestConfiguration.changeUserDecorator( test,
SST_USER_NAME,
SST_PASSWORD );
return test;
}
/**
* <p>
* Return an array of startup args suitable for booting a server.
* </p>
*/
private static String[] getStartupArgs( boolean setUnsecureOption, String wildCardHost )
{
ArrayList<String> list = new ArrayList<String>();
if ( setUnsecureOption )
{
list.add( "-noSecurityManager" );
}
if ( wildCardHost != null )
{
list.add( NetworkServerTestSetup.HOST_OPTION );
list.add( wildCardHost );
}
return list.toArray(new String[list.size()]);
}
/**
* <p>
* Return a set of startup properties suitable for SystemPropertyTestSetup.
* </p>
*/
private static String[] getStartupProperties( boolean authenticationRequired, String customDerbyProperties )
{
ArrayList<String> list = new ArrayList<String>();
if ( authenticationRequired )
{
list.add( "derby.connection.requireAuthentication=true" );
list.add( "derby.authentication.provider=BUILTIN" );
list.add( "derby.user." + SST_USER_NAME + "=" + SST_PASSWORD );
}
if ( customDerbyProperties != null )
{
list.add( "derby.system.home=extinout" );
}
return list.toArray(new String[list.size()]);
}
///////////////////////////////////////////////////////////////////////////////////
//
// JUnit TESTS
//
///////////////////////////////////////////////////////////////////////////////////
/**
* Verify if the server came up and if so, was a security manager installed.
*/
public void testServerStartup()
throws Exception
{
String myName = toString();
boolean serverCameUp = serverCameUp();
String serverOutput = getServerOutput();
boolean outputOK = ( serverOutput.indexOf( _outcome.expectedServerOutput() ) >= 0 );
assertEquals( myName + ": serverCameUp = " + serverCameUp, _outcome.serverShouldComeUp(), serverCameUp );
if (!(runsWithEmma() || runsWithJaCoCo())) {
// With Emma we run without the security manager, so we can't
// assert on seeing it.
assertTrue( myName + "\nExpected: " +
_outcome.expectedServerOutput() +
"\nBut saw: " + serverOutput , outputOK );
}
//
// make sure that the default policy lets us connect to the server if the hostname was
// wildcarded (DERBY-2811)
//
if ( _authenticationRequired && ( _wildCardHost != null ) ) { connectToServer(); }
//
// make sure that we can run sysinfo and turn on tracing (DERBY-3086)
//
runsysinfo();
enableTracing();
setTraceDirectory();
disableTracing();
}
private void disableTracing() throws Exception {
String traceOffOutput = runServerCommand(
new String[] { "trace", "off" });
println( "Output for trace off command:\n\n" + traceOffOutput );
if ( traceOffOutput.indexOf( "Trace turned off for all sessions." ) < 0 )
{ fail( "Failed to turn trace off:\n\n:" + traceOffOutput ); }
}
private void setTraceDirectory() throws Exception {
String traceDirectoryOutput = runServerCommand(
new String[] { "tracedirectory", "trace" });
println( "Output for tracedirectory trace command:\n\n" + traceDirectoryOutput );
if ( traceDirectoryOutput.indexOf( "Trace directory changed to trace." ) < 0 )
{ fail( "Unexpected output in setting trace directory:" + traceDirectoryOutput ); }
String pingOutput = runServerCommand( new String[] { "ping" } );
if (pingOutput.indexOf("Connection obtained for host:") < 0)
{ fail ("Failed ping after changing trace directory: " + pingOutput);}
assertTrue("directory trace does not exist",
PrivilegedFileOpsForTests.exists(new File("trace")));
}
private void connectToServer()
throws Exception
{
final TestConfiguration config = getTestConfiguration();
String url
= ( "jdbc:derby://localhost:" + config.getPort()
+ "/" + "wombat;create=true"
+ ";user=" + config.getUserName()
+ ";password=" + config.getUserPassword() );
println( "XXX in connectToServer(). url = " + url );
// just try to get a connection
Class.forName( "org.apache.derby.jdbc.ClientDriver" );
Connection conn = DriverManager.getConnection( url );
assertNotNull( "Connection should not be null...", conn );
conn.close();
}
private void runsysinfo()
throws Exception
{
String sysinfoOutput = runServerCommand(
new String[] { "sysinfo" } );
if ( sysinfoOutput.indexOf( "Security Exception:" ) > -1 )
{ fail( "Security exceptions in sysinfo output:\n\n:" + sysinfoOutput ); }
}
private void enableTracing()
throws Exception
{
String traceOnOutput = runServerCommand(
new String[] { "trace", "on" } );
println( "Output for trace on command:\n\n" + traceOnOutput );
if ( traceOnOutput.indexOf( "Trace turned on for all sessions." ) < 0 )
{ fail( "Security exceptions in output of trace enabling command:\n\n:" + traceOnOutput ); }
}
///////////////////////////////////////////////////////////////////////////////////
//
// Object OVERLOADS
//
///////////////////////////////////////////////////////////////////////////////////
public String toString()
{
StringBuilder buffer = new StringBuilder();
buffer.append( "SecureServerTest( " );
buffer.append( "Opened = " ); buffer.append( _unsecureSet);
buffer.append( ", Authenticated= " ); buffer.append( _authenticationRequired );
buffer.append( ", CustomDerbyProperties= " ); buffer.append( _customDerbyProperties );
buffer.append( ", WildCardHost= " ); buffer.append( _wildCardHost );
buffer.append( " )" );
return buffer.toString();
}
///////////////////////////////////////////////////////////////////////////////////
//
// MINIONS
//
///////////////////////////////////////////////////////////////////////////////////
/**
* <p>
* Run a NetworkServerControl command.
* </p>
*/
private String runServerCommand( String[] commandSpecifics )
throws Exception
{
String portNumber = Integer.toString( getTestConfiguration().getPort() );
ArrayList<String> cmdList = new ArrayList<String>();
cmdList.add("-Demma.verbosity.level=silent");
cmdList.add("org.apache.derby.drda.NetworkServerControl");
cmdList.add("-p");
cmdList.add(portNumber);
cmdList.addAll(Arrays.asList(commandSpecifics));
String[] cmd = (String[]) cmdList.toArray(commandSpecifics);
Process serverProcess = execJavaCmd(cmd);
SpawnedProcess spawned = new SpawnedProcess(serverProcess,
cmdList.toString());
// Ensure it completes without failures.
assertEquals(0, spawned.complete());
return spawned.getFullServerOutput();
}
private String getServerOutput()
throws Exception
{
return nsTestSetup.getServerProcess().getNextServerOutput();
}
private static String serverBootedOK()
{
return "Security manager installed using the Basic server security policy.";
}
private boolean serverCameUp()
throws Exception
{
return NetworkServerTestSetup.pingForServerUp(
NetworkServerTestSetup.getNetworkServerControl(),
nsTestSetup.getServerProcess().getProcess(), true);
}
}
| |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.admin.cluster.settings.ClusterUpdateSettingsRequest;
import org.elasticsearch.action.ingest.PutPipelineRequest;
import org.elasticsearch.action.search.SearchRequest;
import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.action.support.PlainActionFuture;
import org.elasticsearch.client.indices.CreateIndexRequest;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.util.concurrent.ThreadContext;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.core.internal.io.IOUtils;
import org.elasticsearch.ingest.Pipeline;
import org.elasticsearch.search.SearchHit;
import org.elasticsearch.search.SearchModule;
import org.elasticsearch.test.rest.ESRestTestCase;
import org.junit.AfterClass;
import org.junit.Before;
import java.io.IOException;
import java.nio.charset.StandardCharsets;
import java.util.Arrays;
import java.util.Base64;
import java.util.Collections;
import java.util.Objects;
import java.util.stream.Collectors;
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
public abstract class ESRestHighLevelClientTestCase extends ESRestTestCase {
private static RestHighLevelClient restHighLevelClient;
@Before
public void initHighLevelClient() throws IOException {
super.initClient();
if (restHighLevelClient == null) {
restHighLevelClient = new HighLevelClient(client());
}
}
@AfterClass
public static void cleanupClient() throws IOException {
IOUtils.close(restHighLevelClient);
restHighLevelClient = null;
}
protected static RestHighLevelClient highLevelClient() {
return restHighLevelClient;
}
/**
* Executes the provided request using either the sync method or its async variant, both provided as functions
*/
protected static <Req, Resp> Resp execute(Req request, SyncMethod<Req, Resp> syncMethod,
AsyncMethod<Req, Resp> asyncMethod) throws IOException {
return execute(request, syncMethod, asyncMethod, RequestOptions.DEFAULT);
}
/**
* Executes the provided request using either the sync method or its async variant, both provided as functions
*/
protected static <Req, Resp> Resp execute(Req request, SyncMethod<Req, Resp> syncMethod,
AsyncMethod<Req, Resp> asyncMethod, RequestOptions options) throws IOException {
if (randomBoolean()) {
return syncMethod.execute(request, options);
} else {
PlainActionFuture<Resp> future = PlainActionFuture.newFuture();
asyncMethod.execute(request, options, future);
return future.actionGet();
}
}
/**
* Executes the provided request using either the sync method or its async
* variant, both provided as functions. This variant is used when the call does
* not have a request object (only headers and the request path).
*/
protected static <Resp> Resp execute(SyncMethodNoRequest<Resp> syncMethodNoRequest, AsyncMethodNoRequest<Resp> asyncMethodNoRequest,
RequestOptions requestOptions) throws IOException {
if (randomBoolean()) {
return syncMethodNoRequest.execute(requestOptions);
} else {
PlainActionFuture<Resp> future = PlainActionFuture.newFuture();
asyncMethodNoRequest.execute(requestOptions, future);
return future.actionGet();
}
}
@FunctionalInterface
protected interface SyncMethod<Request, Response> {
Response execute(Request request, RequestOptions options) throws IOException;
}
@FunctionalInterface
protected interface SyncMethodNoRequest<Response> {
Response execute(RequestOptions options) throws IOException;
}
@FunctionalInterface
protected interface AsyncMethod<Request, Response> {
void execute(Request request, RequestOptions options, ActionListener<Response> listener);
}
@FunctionalInterface
protected interface AsyncMethodNoRequest<Response> {
void execute(RequestOptions options, ActionListener<Response> listener);
}
private static class HighLevelClient extends RestHighLevelClient {
private HighLevelClient(RestClient restClient) {
super(restClient, (client) -> {}, new SearchModule(Settings.EMPTY, Collections.emptyList()).getNamedXContents());
}
}
protected static XContentBuilder buildRandomXContentPipeline(XContentBuilder pipelineBuilder) throws IOException {
pipelineBuilder.startObject();
{
pipelineBuilder.field(Pipeline.DESCRIPTION_KEY, "some random set of processors");
pipelineBuilder.startArray(Pipeline.PROCESSORS_KEY);
{
pipelineBuilder.startObject().startObject("set");
{
pipelineBuilder
.field("field", "foo")
.field("value", "bar");
}
pipelineBuilder.endObject().endObject();
pipelineBuilder.startObject().startObject("convert");
{
pipelineBuilder
.field("field", "rank")
.field("type", "integer");
}
pipelineBuilder.endObject().endObject();
}
pipelineBuilder.endArray();
}
pipelineBuilder.endObject();
return pipelineBuilder;
}
protected static XContentBuilder buildRandomXContentPipeline() throws IOException {
XContentType xContentType = randomFrom(XContentType.values());
XContentBuilder pipelineBuilder = XContentBuilder.builder(xContentType.xContent());
return buildRandomXContentPipeline(pipelineBuilder);
}
protected static void createFieldAddingPipleine(String id, String fieldName, String value) throws IOException {
XContentBuilder pipeline = jsonBuilder()
.startObject()
.startArray("processors")
.startObject()
.startObject("set")
.field("field", fieldName)
.field("value", value)
.endObject()
.endObject()
.endArray()
.endObject();
createPipeline(new PutPipelineRequest(id, BytesReference.bytes(pipeline), XContentType.JSON));
}
protected static void createPipeline(String pipelineId) throws IOException {
XContentBuilder builder = buildRandomXContentPipeline();
createPipeline(new PutPipelineRequest(pipelineId, BytesReference.bytes(builder), builder.contentType()));
}
protected static void createPipeline(PutPipelineRequest putPipelineRequest) throws IOException {
assertTrue(execute(
putPipelineRequest, highLevelClient().ingest()::putPipeline, highLevelClient().ingest()::putPipelineAsync).isAcknowledged());
}
protected static void clusterUpdateSettings(Settings persistentSettings,
Settings transientSettings) throws IOException {
ClusterUpdateSettingsRequest request = new ClusterUpdateSettingsRequest();
request.persistentSettings(persistentSettings);
request.transientSettings(transientSettings);
assertTrue(execute(
request, highLevelClient().cluster()::putSettings, highLevelClient().cluster()::putSettingsAsync).isAcknowledged());
}
@Override
protected Settings restClientSettings() {
final String user = Objects.requireNonNull(System.getProperty("tests.rest.cluster.username"));
final String pass = Objects.requireNonNull(System.getProperty("tests.rest.cluster.password"));
final String token = "Basic " + Base64.getEncoder().encodeToString((user + ":" + pass).getBytes(StandardCharsets.UTF_8));
return Settings.builder()
.put(super.restClientSettings())
.put(ThreadContext.PREFIX + ".Authorization", token)
.build();
}
protected Iterable<SearchHit> searchAll(String... indices) throws IOException {
SearchRequest searchRequest = new SearchRequest(indices);
return searchAll(searchRequest);
}
protected Iterable<SearchHit> searchAll(SearchRequest searchRequest) throws IOException {
refreshIndexes(searchRequest.indices());
SearchResponse search = highLevelClient().search(searchRequest, RequestOptions.DEFAULT);
return search.getHits();
}
protected void refreshIndexes(String... indices) throws IOException {
String joinedIndices = Arrays.stream(indices)
.collect(Collectors.joining(","));
Response refreshResponse = client().performRequest(new Request("POST", "/" + joinedIndices + "/_refresh"));
assertEquals(200, refreshResponse.getStatusLine().getStatusCode());
}
protected void createIndexWithMultipleShards(String index) throws IOException {
CreateIndexRequest indexRequest = new CreateIndexRequest(index);
int shards = randomIntBetween(8,10);
indexRequest.settings(Settings.builder()
.put("index.number_of_shards", shards)
.put("index.number_of_replicas", 0)
);
highLevelClient().indices().create(indexRequest, RequestOptions.DEFAULT);
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.