gt stringclasses 1
value | context stringlengths 2.05k 161k |
|---|---|
/*
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
*/
package org.apache.cordova;
import java.util.ArrayList;
import java.util.Locale;
import org.json.JSONException;
import org.json.JSONObject;
import android.app.Activity;
import android.app.AlertDialog;
import android.annotation.SuppressLint;
import android.content.DialogInterface;
import android.content.Intent;
import android.content.res.Configuration;
import android.graphics.Color;
import android.media.AudioManager;
import android.os.Build;
import android.os.Bundle;
import android.util.Log;
import android.view.Menu;
import android.view.MenuItem;
import android.view.View;
import android.view.ViewGroup;
import android.view.Window;
import android.view.WindowManager;
import android.webkit.WebViewClient;
import android.widget.FrameLayout;
/**
* This class is the main Android activity that represents the Cordova
* application. It should be extended by the user to load the specific
* html file that contains the application.
*
* As an example:
*
* <pre>
* package org.apache.cordova.examples;
*
* import android.os.Bundle;
* import org.apache.cordova.*;
*
* public class Example extends CordovaActivity {
* @Override
* public void onCreate(Bundle savedInstanceState) {
* super.onCreate(savedInstanceState);
* super.init();
* // Load your application
* loadUrl(launchUrl);
* }
* }
* </pre>
*
* Cordova xml configuration: Cordova uses a configuration file at
* res/xml/config.xml to specify its settings. See "The config.xml File"
* guide in cordova-docs at http://cordova.apache.org/docs for the documentation
* for the configuration. The use of the set*Property() methods is
* deprecated in favor of the config.xml file.
*
*/
public class CordovaActivity extends Activity {
public static String TAG = "CordovaActivity";
// The webview for our app
protected CordovaWebView appView;
private static int ACTIVITY_STARTING = 0;
private static int ACTIVITY_RUNNING = 1;
private static int ACTIVITY_EXITING = 2;
// Keep app running when pause is received. (default = true)
// If true, then the JavaScript and native code continue to run in the background
// when another application (activity) is started.
protected boolean keepRunning = true;
// Flag to keep immersive mode if set to fullscreen
protected boolean immersiveMode;
// Read from config.xml:
protected CordovaPreferences preferences;
protected String launchUrl;
protected ArrayList<PluginEntry> pluginEntries;
protected CordovaInterfaceImpl cordovaInterface;
/**
* Called when the activity is first created.
*/
@Override
public void onCreate(Bundle savedInstanceState) {
LOG.i(TAG, "Apache Cordova native platform version " + CordovaWebView.CORDOVA_VERSION + " is starting");
LOG.d(TAG, "CordovaActivity.onCreate()");
// need to activate preferences before super.onCreate to avoid "requestFeature() must be called before adding content" exception
loadConfig();
if(!preferences.getBoolean("ShowTitle", false))
{
getWindow().requestFeature(Window.FEATURE_NO_TITLE);
}
if(preferences.getBoolean("SetFullscreen", false))
{
Log.d(TAG, "The SetFullscreen configuration is deprecated in favor of Fullscreen, and will be removed in a future version.");
preferences.set("Fullscreen", true);
}
if(preferences.getBoolean("Fullscreen", false))
{
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.KITKAT)
{
immersiveMode = true;
}
else
{
getWindow().setFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN,
WindowManager.LayoutParams.FLAG_FULLSCREEN);
}
} else {
getWindow().setFlags(WindowManager.LayoutParams.FLAG_FORCE_NOT_FULLSCREEN,
WindowManager.LayoutParams.FLAG_FORCE_NOT_FULLSCREEN);
}
super.onCreate(savedInstanceState);
cordovaInterface = makeCordovaInterface();
if(savedInstanceState != null)
{
cordovaInterface.restoreInstanceState(savedInstanceState);
}
}
protected void init() {
appView = makeWebView();
createViews();
if (!appView.isInitialized()) {
appView.init(cordovaInterface, pluginEntries, preferences);
}
cordovaInterface.onCordovaInit(appView.getPluginManager());
// Wire the hardware volume controls to control media if desired.
String volumePref = preferences.getString("DefaultVolumeStream", "");
if ("media".equals(volumePref.toLowerCase(Locale.ENGLISH))) {
setVolumeControlStream(AudioManager.STREAM_MUSIC);
}
}
@SuppressWarnings("deprecation")
protected void loadConfig() {
ConfigXmlParser parser = new ConfigXmlParser();
parser.parse(this);
preferences = parser.getPreferences();
preferences.setPreferencesBundle(getIntent().getExtras());
launchUrl = parser.getLaunchUrl();
pluginEntries = parser.getPluginEntries();
Config.parser = parser;
}
//Suppressing warnings in AndroidStudio
@SuppressWarnings({"deprecation", "ResourceType"})
protected void createViews() {
//Why are we setting a constant as the ID? This should be investigated
appView.getView().setId(100);
appView.getView().setLayoutParams(new FrameLayout.LayoutParams(
ViewGroup.LayoutParams.MATCH_PARENT,
ViewGroup.LayoutParams.MATCH_PARENT));
setContentView(appView.getView());
if (preferences.contains("BackgroundColor")) {
int backgroundColor = preferences.getInteger("BackgroundColor", Color.BLACK);
// Background of activity:
appView.getView().setBackgroundColor(backgroundColor);
}
appView.getView().requestFocusFromTouch();
}
/**
* Construct the default web view object.
*
* Override this to customize the webview that is used.
*/
protected CordovaWebView makeWebView() {
return new CordovaWebViewImpl(makeWebViewEngine());
}
protected CordovaWebViewEngine makeWebViewEngine() {
return CordovaWebViewImpl.createEngine(this, preferences);
}
protected CordovaInterfaceImpl makeCordovaInterface() {
return new CordovaInterfaceImpl(this) {
@Override
public Object onMessage(String id, Object data) {
// Plumb this to CordovaActivity.onMessage for backwards compatibility
return CordovaActivity.this.onMessage(id, data);
}
};
}
/**
* Load the url into the webview.
*/
public void loadUrl(String url) {
if (appView == null) {
init();
}
// If keepRunning
this.keepRunning = preferences.getBoolean("KeepRunning", true);
appView.loadUrlIntoView(url, true);
}
/**
* Called when the system is about to start resuming a previous activity.
*/
@Override
protected void onPause() {
super.onPause();
LOG.d(TAG, "Paused the activity.");
if (this.appView != null) {
// CB-9382 If there is an activity that started for result and main activity is waiting for callback
// result, we shoudn't stop WebView Javascript timers, as activity for result might be using them
boolean keepRunning = this.keepRunning || this.cordovaInterface.activityResultCallback != null;
this.appView.handlePause(keepRunning);
}
}
/**
* Called when the activity receives a new intent
**/
@Override
protected void onNewIntent(Intent intent) {
super.onNewIntent(intent);
//Forward to plugins
if (this.appView != null)
this.appView.onNewIntent(intent);
}
/**
* Called when the activity will start interacting with the user.
*/
@Override
protected void onResume() {
super.onResume();
LOG.d(TAG, "Resumed the activity.");
if (this.appView == null) {
return;
}
// Force window to have focus, so application always
// receive user input. Workaround for some devices (Samsung Galaxy Note 3 at least)
this.getWindow().getDecorView().requestFocus();
this.appView.handleResume(this.keepRunning);
}
/**
* Called when the activity is no longer visible to the user.
*/
@Override
protected void onStop() {
super.onStop();
LOG.d(TAG, "Stopped the activity.");
if (this.appView == null) {
return;
}
this.appView.handleStop();
}
/**
* Called when the activity is becoming visible to the user.
*/
@Override
protected void onStart() {
super.onStart();
LOG.d(TAG, "Started the activity.");
if (this.appView == null) {
return;
}
this.appView.handleStart();
}
/**
* The final call you receive before your activity is destroyed.
*/
@Override
public void onDestroy() {
LOG.d(TAG, "CordovaActivity.onDestroy()");
super.onDestroy();
if (this.appView != null) {
appView.handleDestroy();
}
}
/**
* Called when view focus is changed
*/
@Override
public void onWindowFocusChanged(boolean hasFocus) {
super.onWindowFocusChanged(hasFocus);
if (hasFocus && immersiveMode) {
final int uiOptions = View.SYSTEM_UI_FLAG_LAYOUT_STABLE
| View.SYSTEM_UI_FLAG_LAYOUT_HIDE_NAVIGATION
| View.SYSTEM_UI_FLAG_LAYOUT_FULLSCREEN
| View.SYSTEM_UI_FLAG_HIDE_NAVIGATION
| View.SYSTEM_UI_FLAG_FULLSCREEN
| View.SYSTEM_UI_FLAG_IMMERSIVE_STICKY;
getWindow().getDecorView().setSystemUiVisibility(uiOptions);
}
}
@SuppressLint("NewApi")
@Override
public void startActivityForResult(Intent intent, int requestCode, Bundle options) {
// Capture requestCode here so that it is captured in the setActivityResultCallback() case.
cordovaInterface.setActivityResultRequestCode(requestCode);
super.startActivityForResult(intent, requestCode, options);
}
/**
* Called when an activity you launched exits, giving you the requestCode you started it with,
* the resultCode it returned, and any additional data from it.
*
* @param requestCode The request code originally supplied to startActivityForResult(),
* allowing you to identify who this result came from.
* @param resultCode The integer result code returned by the child activity through its setResult().
* @param intent An Intent, which can return result data to the caller (various data can be attached to Intent "extras").
*/
@Override
protected void onActivityResult(int requestCode, int resultCode, Intent intent) {
LOG.d(TAG, "Incoming Result. Request code = " + requestCode);
super.onActivityResult(requestCode, resultCode, intent);
cordovaInterface.onActivityResult(requestCode, resultCode, intent);
}
/**
* Report an error to the host application. These errors are unrecoverable (i.e. the main resource is unavailable).
* The errorCode parameter corresponds to one of the ERROR_* constants.
*
* @param errorCode The error code corresponding to an ERROR_* value.
* @param description A String describing the error.
* @param failingUrl The url that failed to load.
*/
public void onReceivedError(final int errorCode, final String description, final String failingUrl) {
final CordovaActivity me = this;
// If errorUrl specified, then load it
final String errorUrl = preferences.getString("errorUrl", null);
if ((errorUrl != null) && (!failingUrl.equals(errorUrl)) && (appView != null)) {
// Load URL on UI thread
me.runOnUiThread(new Runnable() {
public void run() {
me.appView.showWebPage(errorUrl, false, true, null);
}
});
}
// If not, then display error dialog
else {
final boolean exit = !(errorCode == WebViewClient.ERROR_HOST_LOOKUP);
me.runOnUiThread(new Runnable() {
public void run() {
if (exit) {
me.appView.getView().setVisibility(View.GONE);
me.displayError("Application Error", description + " (" + failingUrl + ")", "OK", exit);
}
}
});
}
}
/**
* Display an error dialog and optionally exit application.
*/
public void displayError(final String title, final String message, final String button, final boolean exit) {
final CordovaActivity me = this;
me.runOnUiThread(new Runnable() {
public void run() {
try {
AlertDialog.Builder dlg = new AlertDialog.Builder(me);
dlg.setMessage(message);
dlg.setTitle(title);
dlg.setCancelable(false);
dlg.setPositiveButton(button,
new AlertDialog.OnClickListener() {
public void onClick(DialogInterface dialog, int which) {
dialog.dismiss();
if (exit) {
finish();
}
}
});
dlg.create();
dlg.show();
} catch (Exception e) {
finish();
}
}
});
}
/*
* Hook in Cordova for menu plugins
*/
@Override
public boolean onCreateOptionsMenu(Menu menu) {
if (appView != null) {
appView.getPluginManager().postMessage("onCreateOptionsMenu", menu);
}
return super.onCreateOptionsMenu(menu);
}
@Override
public boolean onPrepareOptionsMenu(Menu menu) {
if (appView != null) {
appView.getPluginManager().postMessage("onPrepareOptionsMenu", menu);
}
return true;
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
if (appView != null) {
appView.getPluginManager().postMessage("onOptionsItemSelected", item);
}
return true;
}
/**
* Called when a message is sent to plugin.
*
* @param id The message id
* @param data The message data
* @return Object or null
*/
public Object onMessage(String id, Object data) {
if ("onReceivedError".equals(id)) {
JSONObject d = (JSONObject) data;
try {
this.onReceivedError(d.getInt("errorCode"), d.getString("description"), d.getString("url"));
} catch (JSONException e) {
e.printStackTrace();
}
} else if ("exit".equals(id)) {
finish();
}
return null;
}
protected void onSaveInstanceState(Bundle outState)
{
cordovaInterface.onSaveInstanceState(outState);
super.onSaveInstanceState(outState);
}
/**
* Called by the system when the device configuration changes while your activity is running.
*
* @param newConfig The new device configuration
*/
@Override
public void onConfigurationChanged(Configuration newConfig) {
super.onConfigurationChanged(newConfig);
if (this.appView == null) {
return;
}
PluginManager pm = this.appView.getPluginManager();
if (pm != null) {
pm.onConfigurationChanged(newConfig);
}
}
}
| |
/**
*/
package microserviceMetamodel.impl;
import microserviceMetamodel.AnomalyInjector;
import microserviceMetamodel.AnotherMicroserviceMetamodelFactory;
import microserviceMetamodel.AnotherMicroserviceMetamodelPackage;
import microserviceMetamodel.Configuration;
import microserviceMetamodel.DependencyModel;
import microserviceMetamodel.Endpoint;
import microserviceMetamodel.ExecutionEnvironment;
import microserviceMetamodel.Host;
import microserviceMetamodel.InfrastructureModel;
import microserviceMetamodel.MetaModelStructure;
import microserviceMetamodel.Microservice;
import microserviceMetamodel.MicroserviceOperationTimeSeriesPoint;
import microserviceMetamodel.MicroserviceRepository;
import microserviceMetamodel.MicroserviceType;
import microserviceMetamodel.OperationToOperationCallingDependency;
import microserviceMetamodel.PhysicalHost;
import microserviceMetamodel.RESTOperation;
import microserviceMetamodel.RESTVerb;
import microserviceMetamodel.TimeSeries;
import microserviceMetamodel.TimeSeriesPoint;
import microserviceMetamodel.Version;
import microserviceMetamodel.VirtualHost;
import microserviceMetamodel.util.AnotherMicroserviceMetamodelValidator;
import org.eclipse.emf.ecore.EAttribute;
import org.eclipse.emf.ecore.EClass;
import org.eclipse.emf.ecore.EEnum;
import org.eclipse.emf.ecore.EPackage;
import org.eclipse.emf.ecore.EReference;
import org.eclipse.emf.ecore.EValidator;
import org.eclipse.emf.ecore.impl.EPackageImpl;
import org.eclipse.emf.ecore.xml.type.XMLTypePackage;
/**
* <!-- begin-user-doc -->
* An implementation of the model <b>Package</b>.
* <!-- end-user-doc -->
* @generated
*/
public class AnotherMicroserviceMetamodelPackageImpl extends EPackageImpl implements AnotherMicroserviceMetamodelPackage {
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
private EClass microserviceEClass = null;
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
private EClass configurationEClass = null;
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
private EClass executionEnvironmentEClass = null;
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
private EClass anomalyInjectorEClass = null;
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
private EClass versionEClass = null;
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
private EClass endpointEClass = null;
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
private EClass restOperationEClass = null;
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
private EClass microserviceTypeEClass = null;
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
private EClass microserviceRepositoryEClass = null;
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
private EClass hostEClass = null;
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
private EClass containerEClass = null;
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
private EClass virtualHostEClass = null;
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
private EClass physicalHostEClass = null;
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
private EClass infrastructureModelEClass = null;
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
private EClass dependencyModelEClass = null;
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
private EClass operationToOperationCallingDependencyEClass = null;
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
private EClass timeSeriesEClass = null;
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
private EClass timeSeriesPointEClass = null;
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
private EClass microserviceOperationTimeSeriesPointEClass = null;
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
private EClass metaModelStructureEClass = null;
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
private EEnum restVerbEEnum = null;
/**
* Creates an instance of the model <b>Package</b>, registered with
* {@link org.eclipse.emf.ecore.EPackage.Registry EPackage.Registry} by the package
* package URI value.
* <p>Note: the correct way to create the package is via the static
* factory method {@link #init init()}, which also performs
* initialization of the package, or returns the registered package,
* if one already exists.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see org.eclipse.emf.ecore.EPackage.Registry
* @see microserviceMetamodel.AnotherMicroserviceMetamodelPackage#eNS_URI
* @see #init()
* @generated
*/
private AnotherMicroserviceMetamodelPackageImpl() {
super(eNS_URI, AnotherMicroserviceMetamodelFactory.eINSTANCE);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
private static boolean isInited = false;
/**
* Creates, registers, and initializes the <b>Package</b> for this model, and for any others upon which it depends.
*
* <p>This method is used to initialize {@link AnotherMicroserviceMetamodelPackage#eINSTANCE} when that field is accessed.
* Clients should not invoke it directly. Instead, they should simply access that field to obtain the package.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #eNS_URI
* @see #createPackageContents()
* @see #initializePackageContents()
* @generated
*/
public static AnotherMicroserviceMetamodelPackage init() {
if (isInited) return (AnotherMicroserviceMetamodelPackage)EPackage.Registry.INSTANCE.getEPackage(AnotherMicroserviceMetamodelPackage.eNS_URI);
// Obtain or create and register package
AnotherMicroserviceMetamodelPackageImpl theAnotherMicroserviceMetamodelPackage = (AnotherMicroserviceMetamodelPackageImpl)(EPackage.Registry.INSTANCE.get(eNS_URI) instanceof AnotherMicroserviceMetamodelPackageImpl ? EPackage.Registry.INSTANCE.get(eNS_URI) : new AnotherMicroserviceMetamodelPackageImpl());
isInited = true;
// Initialize simple dependencies
XMLTypePackage.eINSTANCE.eClass();
// Create package meta-data objects
theAnotherMicroserviceMetamodelPackage.createPackageContents();
// Initialize created meta-data
theAnotherMicroserviceMetamodelPackage.initializePackageContents();
// Register package validator
EValidator.Registry.INSTANCE.put
(theAnotherMicroserviceMetamodelPackage,
new EValidator.Descriptor() {
public EValidator getEValidator() {
return AnotherMicroserviceMetamodelValidator.INSTANCE;
}
});
// Mark meta-data to indicate it can't be changed
theAnotherMicroserviceMetamodelPackage.freeze();
// Update the registry and return the package
EPackage.Registry.INSTANCE.put(AnotherMicroserviceMetamodelPackage.eNS_URI, theAnotherMicroserviceMetamodelPackage);
return theAnotherMicroserviceMetamodelPackage;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EClass getMicroservice() {
return microserviceEClass;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EReference getMicroservice_Environment() {
return (EReference)microserviceEClass.getEStructuralFeatures().get(0);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EReference getMicroservice_Endpoints() {
return (EReference)microserviceEClass.getEStructuralFeatures().get(1);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EReference getMicroservice_MicroserviceType() {
return (EReference)microserviceEClass.getEStructuralFeatures().get(2);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EReference getMicroservice_Version() {
return (EReference)microserviceEClass.getEStructuralFeatures().get(3);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EAttribute getMicroservice_Uuid() {
return (EAttribute)microserviceEClass.getEStructuralFeatures().get(4);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EClass getConfiguration() {
return configurationEClass;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EReference getConfiguration_Microservices() {
return (EReference)configurationEClass.getEStructuralFeatures().get(0);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EClass getExecutionEnvironment() {
return executionEnvironmentEClass;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EReference getExecutionEnvironment_Microservices() {
return (EReference)executionEnvironmentEClass.getEStructuralFeatures().get(0);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EClass getAnomalyInjector() {
return anomalyInjectorEClass;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EClass getVersion() {
return versionEClass;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EAttribute getVersion_VersionString() {
return (EAttribute)versionEClass.getEStructuralFeatures().get(0);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EClass getEndpoint() {
return endpointEClass;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EReference getEndpoint_RestOperations() {
return (EReference)endpointEClass.getEStructuralFeatures().get(0);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EAttribute getEndpoint_IpAddress() {
return (EAttribute)endpointEClass.getEStructuralFeatures().get(1);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EAttribute getEndpoint_Port() {
return (EAttribute)endpointEClass.getEStructuralFeatures().get(2);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EAttribute getEndpoint_Url() {
return (EAttribute)endpointEClass.getEStructuralFeatures().get(3);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EAttribute getEndpoint_Protocol() {
return (EAttribute)endpointEClass.getEStructuralFeatures().get(4);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EClass getRESTOperation() {
return restOperationEClass;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EAttribute getRESTOperation_Name() {
return (EAttribute)restOperationEClass.getEStructuralFeatures().get(0);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EAttribute getRESTOperation_SubPath() {
return (EAttribute)restOperationEClass.getEStructuralFeatures().get(1);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EAttribute getRESTOperation_RestVerb() {
return (EAttribute)restOperationEClass.getEStructuralFeatures().get(2);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EClass getMicroserviceType() {
return microserviceTypeEClass;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EReference getMicroserviceType_RestOperations() {
return (EReference)microserviceTypeEClass.getEStructuralFeatures().get(0);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EReference getMicroserviceType_Microservices() {
return (EReference)microserviceTypeEClass.getEStructuralFeatures().get(1);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EReference getMicroserviceType_Versions() {
return (EReference)microserviceTypeEClass.getEStructuralFeatures().get(2);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EAttribute getMicroserviceType_Identifier() {
return (EAttribute)microserviceTypeEClass.getEStructuralFeatures().get(3);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EReference getMicroserviceType_Dependencies() {
return (EReference)microserviceTypeEClass.getEStructuralFeatures().get(4);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EClass getMicroserviceRepository() {
return microserviceRepositoryEClass;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EReference getMicroserviceRepository_MicroserviceTypes() {
return (EReference)microserviceRepositoryEClass.getEStructuralFeatures().get(0);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EClass getHost() {
return hostEClass;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EReference getHost_Containers() {
return (EReference)hostEClass.getEStructuralFeatures().get(0);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EAttribute getHost_Hostname() {
return (EAttribute)hostEClass.getEStructuralFeatures().get(1);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EClass getContainer() {
return containerEClass;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EReference getContainer_Host() {
return (EReference)containerEClass.getEStructuralFeatures().get(0);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EClass getVirtualHost() {
return virtualHostEClass;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EReference getVirtualHost_ParentHost() {
return (EReference)virtualHostEClass.getEStructuralFeatures().get(0);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EClass getPhysicalHost() {
return physicalHostEClass;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EReference getPhysicalHost_VirtualHosts() {
return (EReference)physicalHostEClass.getEStructuralFeatures().get(0);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EClass getInfrastructureModel() {
return infrastructureModelEClass;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EReference getInfrastructureModel_Hosts() {
return (EReference)infrastructureModelEClass.getEStructuralFeatures().get(0);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EClass getDependencyModel() {
return dependencyModelEClass;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EReference getDependencyModel_OperationToOperationCallingDependencies() {
return (EReference)dependencyModelEClass.getEStructuralFeatures().get(0);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EClass getOperationToOperationCallingDependency() {
return operationToOperationCallingDependencyEClass;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EReference getOperationToOperationCallingDependency_CallingMicroservice() {
return (EReference)operationToOperationCallingDependencyEClass.getEStructuralFeatures().get(4);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EReference getOperationToOperationCallingDependency_CalledMicroservice() {
return (EReference)operationToOperationCallingDependencyEClass.getEStructuralFeatures().get(0);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EReference getOperationToOperationCallingDependency_CalledOperation() {
return (EReference)operationToOperationCallingDependencyEClass.getEStructuralFeatures().get(1);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EReference getOperationToOperationCallingDependency_CallingOperation() {
return (EReference)operationToOperationCallingDependencyEClass.getEStructuralFeatures().get(2);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EReference getOperationToOperationCallingDependency_CallingVersion() {
return (EReference)operationToOperationCallingDependencyEClass.getEStructuralFeatures().get(3);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EClass getTimeSeries() {
return timeSeriesEClass;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EReference getTimeSeries_TimeSeriesPoints() {
return (EReference)timeSeriesEClass.getEStructuralFeatures().get(0);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EClass getTimeSeriesPoint() {
return timeSeriesPointEClass;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EReference getTimeSeriesPoint_TimeSeries() {
return (EReference)timeSeriesPointEClass.getEStructuralFeatures().get(0);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EClass getMicroserviceOperationTimeSeriesPoint() {
return microserviceOperationTimeSeriesPointEClass;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EReference getMicroserviceOperationTimeSeriesPoint_Endpoint() {
return (EReference)microserviceOperationTimeSeriesPointEClass.getEStructuralFeatures().get(0);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EReference getMicroserviceOperationTimeSeriesPoint_RestOperation() {
return (EReference)microserviceOperationTimeSeriesPointEClass.getEStructuralFeatures().get(1);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EClass getMetaModelStructure() {
return metaModelStructureEClass;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EReference getMetaModelStructure_Infrastructure() {
return (EReference)metaModelStructureEClass.getEStructuralFeatures().get(0);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EReference getMetaModelStructure_Configurations() {
return (EReference)metaModelStructureEClass.getEStructuralFeatures().get(1);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EReference getMetaModelStructure_Dependencies() {
return (EReference)metaModelStructureEClass.getEStructuralFeatures().get(2);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EReference getMetaModelStructure_TimeSeries() {
return (EReference)metaModelStructureEClass.getEStructuralFeatures().get(3);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EReference getMetaModelStructure_MicroserviceRepository() {
return (EReference)metaModelStructureEClass.getEStructuralFeatures().get(4);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EEnum getRESTVerb() {
return restVerbEEnum;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public AnotherMicroserviceMetamodelFactory getAnotherMicroserviceMetamodelFactory() {
return (AnotherMicroserviceMetamodelFactory)getEFactoryInstance();
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
private boolean isCreated = false;
/**
* Creates the meta-model objects for the package. This method is
* guarded to have no affect on any invocation but its first.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public void createPackageContents() {
if (isCreated) return;
isCreated = true;
// Create classes and their features
microserviceEClass = createEClass(MICROSERVICE);
createEReference(microserviceEClass, MICROSERVICE__ENVIRONMENT);
createEReference(microserviceEClass, MICROSERVICE__ENDPOINTS);
createEReference(microserviceEClass, MICROSERVICE__MICROSERVICE_TYPE);
createEReference(microserviceEClass, MICROSERVICE__VERSION);
createEAttribute(microserviceEClass, MICROSERVICE__UUID);
configurationEClass = createEClass(CONFIGURATION);
createEReference(configurationEClass, CONFIGURATION__MICROSERVICES);
executionEnvironmentEClass = createEClass(EXECUTION_ENVIRONMENT);
createEReference(executionEnvironmentEClass, EXECUTION_ENVIRONMENT__MICROSERVICES);
anomalyInjectorEClass = createEClass(ANOMALY_INJECTOR);
versionEClass = createEClass(VERSION);
createEAttribute(versionEClass, VERSION__VERSION_STRING);
endpointEClass = createEClass(ENDPOINT);
createEReference(endpointEClass, ENDPOINT__REST_OPERATIONS);
createEAttribute(endpointEClass, ENDPOINT__IP_ADDRESS);
createEAttribute(endpointEClass, ENDPOINT__PORT);
createEAttribute(endpointEClass, ENDPOINT__URL);
createEAttribute(endpointEClass, ENDPOINT__PROTOCOL);
restOperationEClass = createEClass(REST_OPERATION);
createEAttribute(restOperationEClass, REST_OPERATION__NAME);
createEAttribute(restOperationEClass, REST_OPERATION__SUB_PATH);
createEAttribute(restOperationEClass, REST_OPERATION__REST_VERB);
microserviceTypeEClass = createEClass(MICROSERVICE_TYPE);
createEReference(microserviceTypeEClass, MICROSERVICE_TYPE__REST_OPERATIONS);
createEReference(microserviceTypeEClass, MICROSERVICE_TYPE__MICROSERVICES);
createEReference(microserviceTypeEClass, MICROSERVICE_TYPE__VERSIONS);
createEAttribute(microserviceTypeEClass, MICROSERVICE_TYPE__IDENTIFIER);
createEReference(microserviceTypeEClass, MICROSERVICE_TYPE__DEPENDENCIES);
microserviceRepositoryEClass = createEClass(MICROSERVICE_REPOSITORY);
createEReference(microserviceRepositoryEClass, MICROSERVICE_REPOSITORY__MICROSERVICE_TYPES);
hostEClass = createEClass(HOST);
createEReference(hostEClass, HOST__CONTAINERS);
createEAttribute(hostEClass, HOST__HOSTNAME);
containerEClass = createEClass(CONTAINER);
createEReference(containerEClass, CONTAINER__HOST);
virtualHostEClass = createEClass(VIRTUAL_HOST);
createEReference(virtualHostEClass, VIRTUAL_HOST__PARENT_HOST);
physicalHostEClass = createEClass(PHYSICAL_HOST);
createEReference(physicalHostEClass, PHYSICAL_HOST__VIRTUAL_HOSTS);
infrastructureModelEClass = createEClass(INFRASTRUCTURE_MODEL);
createEReference(infrastructureModelEClass, INFRASTRUCTURE_MODEL__HOSTS);
dependencyModelEClass = createEClass(DEPENDENCY_MODEL);
createEReference(dependencyModelEClass, DEPENDENCY_MODEL__OPERATION_TO_OPERATION_CALLING_DEPENDENCIES);
operationToOperationCallingDependencyEClass = createEClass(OPERATION_TO_OPERATION_CALLING_DEPENDENCY);
createEReference(operationToOperationCallingDependencyEClass, OPERATION_TO_OPERATION_CALLING_DEPENDENCY__CALLED_MICROSERVICE);
createEReference(operationToOperationCallingDependencyEClass, OPERATION_TO_OPERATION_CALLING_DEPENDENCY__CALLED_OPERATION);
createEReference(operationToOperationCallingDependencyEClass, OPERATION_TO_OPERATION_CALLING_DEPENDENCY__CALLING_OPERATION);
createEReference(operationToOperationCallingDependencyEClass, OPERATION_TO_OPERATION_CALLING_DEPENDENCY__CALLING_VERSION);
createEReference(operationToOperationCallingDependencyEClass, OPERATION_TO_OPERATION_CALLING_DEPENDENCY__CALLING_MICROSERVICE);
timeSeriesEClass = createEClass(TIME_SERIES);
createEReference(timeSeriesEClass, TIME_SERIES__TIME_SERIES_POINTS);
timeSeriesPointEClass = createEClass(TIME_SERIES_POINT);
createEReference(timeSeriesPointEClass, TIME_SERIES_POINT__TIME_SERIES);
microserviceOperationTimeSeriesPointEClass = createEClass(MICROSERVICE_OPERATION_TIME_SERIES_POINT);
createEReference(microserviceOperationTimeSeriesPointEClass, MICROSERVICE_OPERATION_TIME_SERIES_POINT__ENDPOINT);
createEReference(microserviceOperationTimeSeriesPointEClass, MICROSERVICE_OPERATION_TIME_SERIES_POINT__REST_OPERATION);
metaModelStructureEClass = createEClass(META_MODEL_STRUCTURE);
createEReference(metaModelStructureEClass, META_MODEL_STRUCTURE__INFRASTRUCTURE);
createEReference(metaModelStructureEClass, META_MODEL_STRUCTURE__CONFIGURATIONS);
createEReference(metaModelStructureEClass, META_MODEL_STRUCTURE__DEPENDENCIES);
createEReference(metaModelStructureEClass, META_MODEL_STRUCTURE__TIME_SERIES);
createEReference(metaModelStructureEClass, META_MODEL_STRUCTURE__MICROSERVICE_REPOSITORY);
// Create enums
restVerbEEnum = createEEnum(REST_VERB);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
private boolean isInitialized = false;
/**
* Complete the initialization of the package and its meta-model. This
* method is guarded to have no affect on any invocation but its first.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public void initializePackageContents() {
if (isInitialized) return;
isInitialized = true;
// Initialize package
setName(eNAME);
setNsPrefix(eNS_PREFIX);
setNsURI(eNS_URI);
// Obtain other dependent packages
XMLTypePackage theXMLTypePackage = (XMLTypePackage)EPackage.Registry.INSTANCE.getEPackage(XMLTypePackage.eNS_URI);
// Create type parameters
// Set bounds for type parameters
// Add supertypes to classes
hostEClass.getESuperTypes().add(this.getExecutionEnvironment());
containerEClass.getESuperTypes().add(this.getExecutionEnvironment());
virtualHostEClass.getESuperTypes().add(this.getHost());
physicalHostEClass.getESuperTypes().add(this.getHost());
microserviceOperationTimeSeriesPointEClass.getESuperTypes().add(this.getTimeSeriesPoint());
// Initialize classes, features, and operations; add parameters
initEClass(microserviceEClass, Microservice.class, "Microservice", !IS_ABSTRACT, !IS_INTERFACE, IS_GENERATED_INSTANCE_CLASS);
initEReference(getMicroservice_Environment(), this.getExecutionEnvironment(), this.getExecutionEnvironment_Microservices(), "environment", null, 1, 1, Microservice.class, !IS_TRANSIENT, !IS_VOLATILE, IS_CHANGEABLE, !IS_COMPOSITE, IS_RESOLVE_PROXIES, !IS_UNSETTABLE, IS_UNIQUE, !IS_DERIVED, IS_ORDERED);
initEReference(getMicroservice_Endpoints(), this.getEndpoint(), null, "endpoints", null, 1, -1, Microservice.class, !IS_TRANSIENT, !IS_VOLATILE, IS_CHANGEABLE, IS_COMPOSITE, !IS_RESOLVE_PROXIES, !IS_UNSETTABLE, IS_UNIQUE, !IS_DERIVED, IS_ORDERED);
initEReference(getMicroservice_MicroserviceType(), this.getMicroserviceType(), this.getMicroserviceType_Microservices(), "microserviceType", null, 1, 1, Microservice.class, !IS_TRANSIENT, !IS_VOLATILE, IS_CHANGEABLE, !IS_COMPOSITE, IS_RESOLVE_PROXIES, !IS_UNSETTABLE, IS_UNIQUE, !IS_DERIVED, IS_ORDERED);
initEReference(getMicroservice_Version(), this.getVersion(), null, "version", null, 1, 1, Microservice.class, !IS_TRANSIENT, !IS_VOLATILE, IS_CHANGEABLE, !IS_COMPOSITE, IS_RESOLVE_PROXIES, !IS_UNSETTABLE, IS_UNIQUE, !IS_DERIVED, IS_ORDERED);
initEAttribute(getMicroservice_Uuid(), theXMLTypePackage.getString(), "uuid", null, 1, 1, Microservice.class, !IS_TRANSIENT, !IS_VOLATILE, IS_CHANGEABLE, !IS_UNSETTABLE, !IS_ID, IS_UNIQUE, !IS_DERIVED, IS_ORDERED);
initEClass(configurationEClass, Configuration.class, "Configuration", !IS_ABSTRACT, !IS_INTERFACE, IS_GENERATED_INSTANCE_CLASS);
initEReference(getConfiguration_Microservices(), this.getMicroservice(), null, "microservices", null, 1, -1, Configuration.class, !IS_TRANSIENT, !IS_VOLATILE, IS_CHANGEABLE, IS_COMPOSITE, !IS_RESOLVE_PROXIES, !IS_UNSETTABLE, IS_UNIQUE, !IS_DERIVED, IS_ORDERED);
initEClass(executionEnvironmentEClass, ExecutionEnvironment.class, "ExecutionEnvironment", IS_ABSTRACT, !IS_INTERFACE, IS_GENERATED_INSTANCE_CLASS);
initEReference(getExecutionEnvironment_Microservices(), this.getMicroservice(), this.getMicroservice_Environment(), "microservices", null, 0, -1, ExecutionEnvironment.class, !IS_TRANSIENT, !IS_VOLATILE, IS_CHANGEABLE, !IS_COMPOSITE, IS_RESOLVE_PROXIES, !IS_UNSETTABLE, IS_UNIQUE, !IS_DERIVED, IS_ORDERED);
initEClass(anomalyInjectorEClass, AnomalyInjector.class, "AnomalyInjector", IS_ABSTRACT, IS_INTERFACE, IS_GENERATED_INSTANCE_CLASS);
initEClass(versionEClass, Version.class, "Version", !IS_ABSTRACT, !IS_INTERFACE, IS_GENERATED_INSTANCE_CLASS);
initEAttribute(getVersion_VersionString(), ecorePackage.getEString(), "versionString", null, 1, 1, Version.class, !IS_TRANSIENT, !IS_VOLATILE, IS_CHANGEABLE, !IS_UNSETTABLE, !IS_ID, IS_UNIQUE, !IS_DERIVED, IS_ORDERED);
initEClass(endpointEClass, Endpoint.class, "Endpoint", !IS_ABSTRACT, !IS_INTERFACE, IS_GENERATED_INSTANCE_CLASS);
initEReference(getEndpoint_RestOperations(), this.getRESTOperation(), null, "restOperations", null, 1, -1, Endpoint.class, !IS_TRANSIENT, !IS_VOLATILE, IS_CHANGEABLE, !IS_COMPOSITE, IS_RESOLVE_PROXIES, !IS_UNSETTABLE, IS_UNIQUE, !IS_DERIVED, IS_ORDERED);
initEAttribute(getEndpoint_IpAddress(), ecorePackage.getEString(), "ipAddress", null, 1, 1, Endpoint.class, !IS_TRANSIENT, !IS_VOLATILE, IS_CHANGEABLE, !IS_UNSETTABLE, !IS_ID, IS_UNIQUE, IS_DERIVED, IS_ORDERED);
initEAttribute(getEndpoint_Port(), theXMLTypePackage.getIntObject(), "port", null, 1, 1, Endpoint.class, !IS_TRANSIENT, !IS_VOLATILE, IS_CHANGEABLE, !IS_UNSETTABLE, !IS_ID, IS_UNIQUE, !IS_DERIVED, IS_ORDERED);
initEAttribute(getEndpoint_Url(), ecorePackage.getEString(), "url", null, 0, 1, Endpoint.class, !IS_TRANSIENT, !IS_VOLATILE, IS_CHANGEABLE, !IS_UNSETTABLE, !IS_ID, IS_UNIQUE, !IS_DERIVED, IS_ORDERED);
initEAttribute(getEndpoint_Protocol(), ecorePackage.getEString(), "protocol", null, 1, 1, Endpoint.class, !IS_TRANSIENT, !IS_VOLATILE, IS_CHANGEABLE, !IS_UNSETTABLE, !IS_ID, IS_UNIQUE, !IS_DERIVED, IS_ORDERED);
initEClass(restOperationEClass, RESTOperation.class, "RESTOperation", !IS_ABSTRACT, !IS_INTERFACE, IS_GENERATED_INSTANCE_CLASS);
initEAttribute(getRESTOperation_Name(), ecorePackage.getEString(), "name", null, 1, 1, RESTOperation.class, !IS_TRANSIENT, !IS_VOLATILE, IS_CHANGEABLE, !IS_UNSETTABLE, !IS_ID, IS_UNIQUE, !IS_DERIVED, IS_ORDERED);
initEAttribute(getRESTOperation_SubPath(), ecorePackage.getEString(), "subPath", null, 1, 1, RESTOperation.class, !IS_TRANSIENT, !IS_VOLATILE, IS_CHANGEABLE, !IS_UNSETTABLE, !IS_ID, IS_UNIQUE, !IS_DERIVED, IS_ORDERED);
initEAttribute(getRESTOperation_RestVerb(), this.getRESTVerb(), "restVerb", null, 1, 1, RESTOperation.class, !IS_TRANSIENT, !IS_VOLATILE, IS_CHANGEABLE, !IS_UNSETTABLE, !IS_ID, IS_UNIQUE, !IS_DERIVED, IS_ORDERED);
initEClass(microserviceTypeEClass, MicroserviceType.class, "MicroserviceType", !IS_ABSTRACT, !IS_INTERFACE, IS_GENERATED_INSTANCE_CLASS);
initEReference(getMicroserviceType_RestOperations(), this.getRESTOperation(), null, "restOperations", null, 0, -1, MicroserviceType.class, !IS_TRANSIENT, !IS_VOLATILE, IS_CHANGEABLE, IS_COMPOSITE, !IS_RESOLVE_PROXIES, !IS_UNSETTABLE, IS_UNIQUE, !IS_DERIVED, IS_ORDERED);
initEReference(getMicroserviceType_Microservices(), this.getMicroservice(), this.getMicroservice_MicroserviceType(), "microservices", null, 0, -1, MicroserviceType.class, !IS_TRANSIENT, !IS_VOLATILE, IS_CHANGEABLE, !IS_COMPOSITE, IS_RESOLVE_PROXIES, !IS_UNSETTABLE, IS_UNIQUE, !IS_DERIVED, IS_ORDERED);
initEReference(getMicroserviceType_Versions(), this.getVersion(), null, "versions", null, 1, -1, MicroserviceType.class, !IS_TRANSIENT, !IS_VOLATILE, IS_CHANGEABLE, IS_COMPOSITE, !IS_RESOLVE_PROXIES, !IS_UNSETTABLE, IS_UNIQUE, !IS_DERIVED, IS_ORDERED);
initEAttribute(getMicroserviceType_Identifier(), ecorePackage.getEString(), "identifier", null, 1, 1, MicroserviceType.class, !IS_TRANSIENT, !IS_VOLATILE, IS_CHANGEABLE, !IS_UNSETTABLE, !IS_ID, IS_UNIQUE, !IS_DERIVED, IS_ORDERED);
initEReference(getMicroserviceType_Dependencies(), this.getOperationToOperationCallingDependency(), this.getOperationToOperationCallingDependency_CallingMicroservice(), "dependencies", null, 0, -1, MicroserviceType.class, !IS_TRANSIENT, !IS_VOLATILE, IS_CHANGEABLE, !IS_COMPOSITE, IS_RESOLVE_PROXIES, !IS_UNSETTABLE, IS_UNIQUE, !IS_DERIVED, IS_ORDERED);
initEClass(microserviceRepositoryEClass, MicroserviceRepository.class, "MicroserviceRepository", !IS_ABSTRACT, !IS_INTERFACE, IS_GENERATED_INSTANCE_CLASS);
initEReference(getMicroserviceRepository_MicroserviceTypes(), this.getMicroserviceType(), null, "microserviceTypes", null, 1, -1, MicroserviceRepository.class, !IS_TRANSIENT, !IS_VOLATILE, IS_CHANGEABLE, IS_COMPOSITE, !IS_RESOLVE_PROXIES, !IS_UNSETTABLE, IS_UNIQUE, !IS_DERIVED, IS_ORDERED);
initEClass(hostEClass, Host.class, "Host", IS_ABSTRACT, !IS_INTERFACE, IS_GENERATED_INSTANCE_CLASS);
initEReference(getHost_Containers(), this.getContainer(), this.getContainer_Host(), "containers", null, 0, -1, Host.class, !IS_TRANSIENT, !IS_VOLATILE, IS_CHANGEABLE, IS_COMPOSITE, !IS_RESOLVE_PROXIES, !IS_UNSETTABLE, IS_UNIQUE, !IS_DERIVED, IS_ORDERED);
initEAttribute(getHost_Hostname(), ecorePackage.getEString(), "hostname", null, 1, 1, Host.class, !IS_TRANSIENT, !IS_VOLATILE, IS_CHANGEABLE, !IS_UNSETTABLE, !IS_ID, IS_UNIQUE, !IS_DERIVED, IS_ORDERED);
initEClass(containerEClass, microserviceMetamodel.Container.class, "Container", IS_ABSTRACT, !IS_INTERFACE, IS_GENERATED_INSTANCE_CLASS);
initEReference(getContainer_Host(), this.getHost(), this.getHost_Containers(), "host", null, 1, 1, microserviceMetamodel.Container.class, !IS_TRANSIENT, !IS_VOLATILE, IS_CHANGEABLE, !IS_COMPOSITE, !IS_RESOLVE_PROXIES, !IS_UNSETTABLE, IS_UNIQUE, !IS_DERIVED, IS_ORDERED);
initEClass(virtualHostEClass, VirtualHost.class, "VirtualHost", !IS_ABSTRACT, !IS_INTERFACE, IS_GENERATED_INSTANCE_CLASS);
initEReference(getVirtualHost_ParentHost(), this.getPhysicalHost(), this.getPhysicalHost_VirtualHosts(), "parentHost", null, 1, 1, VirtualHost.class, !IS_TRANSIENT, !IS_VOLATILE, IS_CHANGEABLE, !IS_COMPOSITE, !IS_RESOLVE_PROXIES, !IS_UNSETTABLE, IS_UNIQUE, !IS_DERIVED, IS_ORDERED);
initEClass(physicalHostEClass, PhysicalHost.class, "PhysicalHost", !IS_ABSTRACT, !IS_INTERFACE, IS_GENERATED_INSTANCE_CLASS);
initEReference(getPhysicalHost_VirtualHosts(), this.getVirtualHost(), this.getVirtualHost_ParentHost(), "virtualHosts", null, 0, -1, PhysicalHost.class, !IS_TRANSIENT, !IS_VOLATILE, IS_CHANGEABLE, IS_COMPOSITE, !IS_RESOLVE_PROXIES, !IS_UNSETTABLE, IS_UNIQUE, !IS_DERIVED, IS_ORDERED);
initEClass(infrastructureModelEClass, InfrastructureModel.class, "InfrastructureModel", !IS_ABSTRACT, !IS_INTERFACE, IS_GENERATED_INSTANCE_CLASS);
initEReference(getInfrastructureModel_Hosts(), this.getPhysicalHost(), null, "hosts", null, 1, -1, InfrastructureModel.class, !IS_TRANSIENT, !IS_VOLATILE, IS_CHANGEABLE, IS_COMPOSITE, !IS_RESOLVE_PROXIES, !IS_UNSETTABLE, IS_UNIQUE, !IS_DERIVED, IS_ORDERED);
initEClass(dependencyModelEClass, DependencyModel.class, "DependencyModel", !IS_ABSTRACT, !IS_INTERFACE, IS_GENERATED_INSTANCE_CLASS);
initEReference(getDependencyModel_OperationToOperationCallingDependencies(), this.getOperationToOperationCallingDependency(), null, "operationToOperationCallingDependencies", null, 0, -1, DependencyModel.class, !IS_TRANSIENT, !IS_VOLATILE, IS_CHANGEABLE, IS_COMPOSITE, !IS_RESOLVE_PROXIES, !IS_UNSETTABLE, IS_UNIQUE, !IS_DERIVED, IS_ORDERED);
initEClass(operationToOperationCallingDependencyEClass, OperationToOperationCallingDependency.class, "OperationToOperationCallingDependency", !IS_ABSTRACT, !IS_INTERFACE, IS_GENERATED_INSTANCE_CLASS);
initEReference(getOperationToOperationCallingDependency_CalledMicroservice(), this.getMicroserviceType(), null, "calledMicroservice", null, 1, 1, OperationToOperationCallingDependency.class, !IS_TRANSIENT, !IS_VOLATILE, IS_CHANGEABLE, !IS_COMPOSITE, IS_RESOLVE_PROXIES, !IS_UNSETTABLE, IS_UNIQUE, !IS_DERIVED, IS_ORDERED);
initEReference(getOperationToOperationCallingDependency_CalledOperation(), this.getRESTOperation(), null, "calledOperation", null, 1, 1, OperationToOperationCallingDependency.class, !IS_TRANSIENT, !IS_VOLATILE, IS_CHANGEABLE, !IS_COMPOSITE, IS_RESOLVE_PROXIES, !IS_UNSETTABLE, IS_UNIQUE, !IS_DERIVED, IS_ORDERED);
initEReference(getOperationToOperationCallingDependency_CallingOperation(), this.getRESTOperation(), null, "callingOperation", null, 1, 1, OperationToOperationCallingDependency.class, !IS_TRANSIENT, !IS_VOLATILE, IS_CHANGEABLE, !IS_COMPOSITE, IS_RESOLVE_PROXIES, !IS_UNSETTABLE, IS_UNIQUE, !IS_DERIVED, IS_ORDERED);
initEReference(getOperationToOperationCallingDependency_CallingVersion(), this.getVersion(), null, "callingVersion", null, 1, 1, OperationToOperationCallingDependency.class, !IS_TRANSIENT, !IS_VOLATILE, IS_CHANGEABLE, !IS_COMPOSITE, IS_RESOLVE_PROXIES, !IS_UNSETTABLE, IS_UNIQUE, !IS_DERIVED, IS_ORDERED);
initEReference(getOperationToOperationCallingDependency_CallingMicroservice(), this.getMicroserviceType(), this.getMicroserviceType_Dependencies(), "callingMicroservice", null, 1, 1, OperationToOperationCallingDependency.class, !IS_TRANSIENT, !IS_VOLATILE, IS_CHANGEABLE, !IS_COMPOSITE, IS_RESOLVE_PROXIES, !IS_UNSETTABLE, IS_UNIQUE, !IS_DERIVED, IS_ORDERED);
initEClass(timeSeriesEClass, TimeSeries.class, "TimeSeries", !IS_ABSTRACT, !IS_INTERFACE, IS_GENERATED_INSTANCE_CLASS);
initEReference(getTimeSeries_TimeSeriesPoints(), this.getTimeSeriesPoint(), this.getTimeSeriesPoint_TimeSeries(), "timeSeriesPoints", null, 0, -1, TimeSeries.class, !IS_TRANSIENT, !IS_VOLATILE, IS_CHANGEABLE, IS_COMPOSITE, !IS_RESOLVE_PROXIES, !IS_UNSETTABLE, IS_UNIQUE, !IS_DERIVED, IS_ORDERED);
initEClass(timeSeriesPointEClass, TimeSeriesPoint.class, "TimeSeriesPoint", !IS_ABSTRACT, !IS_INTERFACE, IS_GENERATED_INSTANCE_CLASS);
initEReference(getTimeSeriesPoint_TimeSeries(), this.getTimeSeries(), this.getTimeSeries_TimeSeriesPoints(), "timeSeries", null, 1, 1, TimeSeriesPoint.class, !IS_TRANSIENT, !IS_VOLATILE, IS_CHANGEABLE, !IS_COMPOSITE, !IS_RESOLVE_PROXIES, !IS_UNSETTABLE, IS_UNIQUE, !IS_DERIVED, IS_ORDERED);
initEClass(microserviceOperationTimeSeriesPointEClass, MicroserviceOperationTimeSeriesPoint.class, "MicroserviceOperationTimeSeriesPoint", !IS_ABSTRACT, !IS_INTERFACE, IS_GENERATED_INSTANCE_CLASS);
initEReference(getMicroserviceOperationTimeSeriesPoint_Endpoint(), this.getEndpoint(), null, "endpoint", null, 1, 1, MicroserviceOperationTimeSeriesPoint.class, !IS_TRANSIENT, !IS_VOLATILE, IS_CHANGEABLE, !IS_COMPOSITE, IS_RESOLVE_PROXIES, !IS_UNSETTABLE, IS_UNIQUE, !IS_DERIVED, IS_ORDERED);
initEReference(getMicroserviceOperationTimeSeriesPoint_RestOperation(), this.getRESTOperation(), null, "restOperation", null, 1, 1, MicroserviceOperationTimeSeriesPoint.class, !IS_TRANSIENT, !IS_VOLATILE, IS_CHANGEABLE, !IS_COMPOSITE, IS_RESOLVE_PROXIES, !IS_UNSETTABLE, IS_UNIQUE, !IS_DERIVED, IS_ORDERED);
initEClass(metaModelStructureEClass, MetaModelStructure.class, "MetaModelStructure", !IS_ABSTRACT, !IS_INTERFACE, IS_GENERATED_INSTANCE_CLASS);
initEReference(getMetaModelStructure_Infrastructure(), this.getInfrastructureModel(), null, "infrastructure", null, 1, 1, MetaModelStructure.class, !IS_TRANSIENT, !IS_VOLATILE, IS_CHANGEABLE, IS_COMPOSITE, !IS_RESOLVE_PROXIES, !IS_UNSETTABLE, IS_UNIQUE, !IS_DERIVED, IS_ORDERED);
initEReference(getMetaModelStructure_Configurations(), this.getConfiguration(), null, "configurations", null, 1, -1, MetaModelStructure.class, !IS_TRANSIENT, !IS_VOLATILE, IS_CHANGEABLE, IS_COMPOSITE, !IS_RESOLVE_PROXIES, !IS_UNSETTABLE, IS_UNIQUE, !IS_DERIVED, IS_ORDERED);
initEReference(getMetaModelStructure_Dependencies(), this.getDependencyModel(), null, "dependencies", null, 1, 1, MetaModelStructure.class, !IS_TRANSIENT, !IS_VOLATILE, IS_CHANGEABLE, IS_COMPOSITE, !IS_RESOLVE_PROXIES, !IS_UNSETTABLE, IS_UNIQUE, !IS_DERIVED, IS_ORDERED);
initEReference(getMetaModelStructure_TimeSeries(), this.getTimeSeries(), null, "timeSeries", null, 1, 1, MetaModelStructure.class, !IS_TRANSIENT, !IS_VOLATILE, IS_CHANGEABLE, IS_COMPOSITE, !IS_RESOLVE_PROXIES, !IS_UNSETTABLE, IS_UNIQUE, !IS_DERIVED, IS_ORDERED);
initEReference(getMetaModelStructure_MicroserviceRepository(), this.getMicroserviceRepository(), null, "microserviceRepository", null, 1, 1, MetaModelStructure.class, !IS_TRANSIENT, !IS_VOLATILE, IS_CHANGEABLE, IS_COMPOSITE, !IS_RESOLVE_PROXIES, !IS_UNSETTABLE, IS_UNIQUE, !IS_DERIVED, IS_ORDERED);
// Initialize enums and add enum literals
initEEnum(restVerbEEnum, RESTVerb.class, "RESTVerb");
addEEnumLiteral(restVerbEEnum, RESTVerb.GET);
addEEnumLiteral(restVerbEEnum, RESTVerb.PUT);
addEEnumLiteral(restVerbEEnum, RESTVerb.POST);
addEEnumLiteral(restVerbEEnum, RESTVerb.DELETE);
// Create resource
createResource(eNS_URI);
// Create annotations
// http://www.eclipse.org/OCL/Import
createImportAnnotations();
// http://www.eclipse.org/emf/2002/Ecore
createEcoreAnnotations();
// http://www.eclipse.org/emf/2002/Ecore/OCL/Pivot
createPivotAnnotations();
}
/**
* Initializes the annotations for <b>http://www.eclipse.org/OCL/Import</b>.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
protected void createImportAnnotations() {
String source = "http://www.eclipse.org/OCL/Import";
addAnnotation
(this,
source,
new String[] {
"ecore.xml.type", "http://www.eclipse.org/emf/2003/XMLType"
});
}
/**
* Initializes the annotations for <b>http://www.eclipse.org/emf/2002/Ecore</b>.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
protected void createEcoreAnnotations() {
String source = "http://www.eclipse.org/emf/2002/Ecore";
addAnnotation
(this,
source,
new String[] {
"invocationDelegates", "http://www.eclipse.org/emf/2002/Ecore/OCL/Pivot",
"settingDelegates", "http://www.eclipse.org/emf/2002/Ecore/OCL/Pivot",
"validationDelegates", "http://www.eclipse.org/emf/2002/Ecore/OCL/Pivot"
});
addAnnotation
(microserviceEClass,
source,
new String[] {
"constraints", "typeAndVersionMatch"
});
addAnnotation
(operationToOperationCallingDependencyEClass,
source,
new String[] {
"constraints", "calledCorrespond callingCorrespond callingVersionFits"
});
addAnnotation
(microserviceOperationTimeSeriesPointEClass,
source,
new String[] {
"constraints", "nonNullReferences correspondingReferences"
});
}
/**
* Initializes the annotations for <b>http://www.eclipse.org/emf/2002/Ecore/OCL/Pivot</b>.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
protected void createPivotAnnotations() {
String source = "http://www.eclipse.org/emf/2002/Ecore/OCL/Pivot";
addAnnotation
(microserviceEClass,
source,
new String[] {
"typeAndVersionMatch", "microserviceType.versions->select(v:Version | v = version)->notEmpty()"
});
addAnnotation
(operationToOperationCallingDependencyEClass,
source,
new String[] {
"calledCorrespond", "calledMicroservice.restOperations->select(rop:RESTOperation | rop=calledOperation)->notEmpty()",
"callingCorrespond", "callingMicroservice.restOperations->select(rop:RESTOperation | rop=callingOperation)->notEmpty()",
"callingVersionFits", "callingMicroservice.versions->select(ver:Version|ver=callingVersion)->notEmpty()"
});
addAnnotation
(microserviceOperationTimeSeriesPointEClass,
source,
new String[] {
"nonNullReferences", "endpoint <> null and restOperation <> null",
"correspondingReferences", "endpoint.restOperations->select(rop:RESTOperation | rop = restOperation)->notEmpty()"
});
}
} //AnotherMicroserviceMetamodelPackageImpl
| |
/*
* Copyright 2016, Yahoo! Inc. Licensed under the terms of the Apache License 2.0. See LICENSE file
* at the project root for terms.
*/
package com.yahoo.sketches.frequencies;
import static com.yahoo.sketches.frequencies.PreambleUtil.SER_VER;
import static com.yahoo.sketches.frequencies.PreambleUtil.extractBufferLength;
import static com.yahoo.sketches.frequencies.PreambleUtil.extractEmptyFlag;
import static com.yahoo.sketches.frequencies.PreambleUtil.extractFamilyID;
import static com.yahoo.sketches.frequencies.PreambleUtil.extractInitialSize;
import static com.yahoo.sketches.frequencies.PreambleUtil.extractLowerK;
import static com.yahoo.sketches.frequencies.PreambleUtil.extractPreLongs;
import static com.yahoo.sketches.frequencies.PreambleUtil.extractSerVer;
import static com.yahoo.sketches.frequencies.PreambleUtil.extractUpperK;
import static com.yahoo.sketches.frequencies.PreambleUtil.insertBufferLength;
import static com.yahoo.sketches.frequencies.PreambleUtil.insertEmptyFlag;
import static com.yahoo.sketches.frequencies.PreambleUtil.insertFamilyID;
import static com.yahoo.sketches.frequencies.PreambleUtil.insertInitialSize;
import static com.yahoo.sketches.frequencies.PreambleUtil.insertLowerK;
import static com.yahoo.sketches.frequencies.PreambleUtil.insertPreLongs;
import static com.yahoo.sketches.frequencies.PreambleUtil.insertSerVer;
import static com.yahoo.sketches.frequencies.PreambleUtil.insertUpperK;
import java.util.Arrays;
import com.yahoo.memory.Memory;
import com.yahoo.memory.NativeMemory;
import com.yahoo.sketches.hashmaps.HashMapReverseEfficient;
/**
* Implements frequent items sketch on the Java heap.
*
* <p>The frequent-items sketch is useful for keeping approximate counters for keys (map from key
* (long) to value (long)). The sketch is initialized with a value k. The sketch will keep roughly k
* counters when it is full size. More specifically, when k is a power of 2, a HashMap will be
* created with 2*k cells, and the number of counters will typically oscillate between roughly .75*k
* and 1.5*k. The space usage of the sketch is therefore proportional to k when it reaches full
* size.
*
* <p>When the sketch is updated with a key and increment, the corresponding counter is incremented or,
* if there is no counter for that key, a new counter is created. If the sketch reaches its maximal
* allowed size, it decrements all of the counters (by an approximately computed median), and
* removes any non-positive counters.
*
* <p>The logic of the frequent-items sketch is such that the stored counts and real counts are never
* too different. More specifically, for any key KEY, the sketch can return an estimate of the true
* frequency of KEY, along with upper and lower bounds on the frequency (that hold
* deterministically). For our implementation, it is guaranteed that, with high probability over the
* randomness of the implementation, the difference between the upper bound and the estimate is at
* most (4/3)*(n/k), where n denotes the stream length (i.e, sum of all the item frequencies), and
* similarly for the lower bound and the estimate. In practice, the difference is usually much
* smaller.
*
* <p>Background: This code implements a variant of what is commonly known as the "Misra-Gries
* algorithm" or "Frequent Items". Variants of it were discovered and rediscovered and redesigned
* several times over the years. a) "Finding repeated elements", Misra, Gries, 1982 b)
* "Frequency estimation of internet packet streams with limited space" Demaine, Lopez-Ortiz, Munro,
* 2002 c) "A simple algorithm for finding frequent elements in streams and bags" Karp, Shenker,
* Papadimitriou, 2003 d) "Efficient Computation of Frequent and Top-k Elements in Data Streams"
* Metwally, Agrawal, Abbadi, 2006
*
* <p>Uses HashMapReverseEfficient
*
* @author Justin Thaler
*/
public class FrequentItems extends FrequencyEstimator {
/**
* We start by allocating a small data structure capable of explicitly storing very small streams
* in full, and growing it as the stream grows. The following constant controls the size of the
* initial data structure
*/
static final int MIN_FREQUENT_ITEMS_SIZE = 4; // This is somewhat arbitrary
/**
* This is a constant large enough that computing the median of SAMPLE_SIZE
* randomly selected entries from a list of numbers and outputting
* the empirical median will give a constant-factor approximaion to the
* true median with high probability
*/
static final int SAMPLE_SIZE = 256;
/**
* The current number of counters that the data structure can support
*/
private int K;
/**
* The value of k passed to the constructor. Used to determine the maximum number of counters the
* sketch can support, and remembered by the sketch for use in resetting to a virgin state.
*/
private int k;
/**
* Initial number of counters supported by the data structure
*/
private int initialSize;
/**
* Hash map mapping stored keys to approximate counts
*/
private HashMapReverseEfficient counters;
/**
* The number of counters to be supported when sketch is full size
*/
private int maxK;
/**
* Tracks the total number of decrements performed on sketch.
*/
private long offset;
/**
* An upper bound on the error in any estimated count due to merging with other FrequentItems
* sketches.
*/
private long mergeError;
/**
* The sum of all frequencies of the stream so far.
*/
private long streamLength = 0;
/**
* The maximum number of samples used to compute approximate median of counters when doing
* decrement
*/
private int sampleSize;
// **CONSTRUCTOR**********************************************************
/**
* @param k Determines the accuracy of the estimates returned by the sketch.
* @param initialCapacity determines the initial size of the sketch.
*
* The guarantee of the sketch is that with high probability, any returned estimate will
* have error at most (4/3)*(n/k), where n is the true sum of frequencies in the stream. In
* practice, the error is typically much smaller. The space usage of the sketch is
* proportional to k. If fewer than ~k different keys are inserted then the counts will be
* exact. More precisely, if k is a power of 2,then when the sketch reaches full size, the
* data structure's HashMap will contain 2*k cells. Assuming that the LOAD_FACTOR of the
* HashMap is set to 0.75, the number of cells of the hash table that are actually filled
* should oscillate between roughly .75*k and 1.5 * k.
*/
public FrequentItems(final int k, final int initialCapacity) {
if (k <= 0) {
throw new IllegalArgumentException("Received negative or zero value for k.");
}
//set initial size of counters data structure so it can exactly store a stream with
//initialCapacity distinct elements
this.K = initialCapacity;
counters = new HashMapReverseEfficient(this.K);
this.k = k;
this.initialSize = initialCapacity;
// set maxK to be the maximum number of counters that can be supported
// by a HashMap with the appropriate number of cells (specifically,
// 2*k cells if k is a power of 2) and a load that does not exceed
// the designated load factor
final int maxHashMapLength = Integer.highestOneBit(4 * k - 1);
this.maxK = (int) (maxHashMapLength * counters.LOAD_FACTOR);
this.offset = 0;
if (this.maxK < SAMPLE_SIZE) {
this.sampleSize = this.maxK;
}
else {
this.sampleSize = SAMPLE_SIZE;
}
}
public FrequentItems(final int k) {
this(k, MIN_FREQUENT_ITEMS_SIZE);
}
/**
* @return the number of positive counters in the sketch.
*/
public int nnz() {
return counters.getSize();
}
@Override
public long getEstimate(final long key) {
// the logic below returns the count of associated counter if key is tracked.
// If the key is not tracked and fewer than maxK counters are in use, 0 is returned.
// Otherwise, the minimum counter value is returned.
if (counters.get(key) > 0) {
return counters.get(key) + offset;
}
else {
return 0;
}
}
@Override
public long getEstimateUpperBound(final long key) {
final long estimate = getEstimate(key);
if (estimate > 0) {
return estimate + mergeError;
}
return mergeError + offset;
}
@Override
public long getEstimateLowerBound(final long key) {
final long estimate = getEstimate(key);
final long returnVal = estimate - offset - mergeError;
return (returnVal > 0) ? returnVal : 0;
}
@Override
public long getMaxError() {
return offset + mergeError;
}
@Override
public void update(final long key) {
update(key, 1);
}
@Override
public void update(final long key, final long increment) {
this.streamLength += increment;
counters.adjust(key, increment);
final int size = this.nnz();
// if the data structure needs to be grown
if ((size >= this.K) && (this.K < this.maxK)) {
// grow the size of the data structure
final int newSize = Math.max(Math.min(this.maxK, 2 * this.K), 1);
this.K = newSize;
final HashMapReverseEfficient newTable = new HashMapReverseEfficient(newSize);
final long[] keys = this.counters.getKeys();
final long[] values = this.counters.getValues();
assert keys.length == size;
for (int i = 0; i < size; i++) {
newTable.adjust(keys[i], values[i]);
}
this.counters = newTable;
}
if (size > this.maxK) {
purge();
assert (this.nnz() <= this.maxK);
}
}
/**
* This function is called when a key is processed that is not currently assigned a counter, and
* all the counters are in use. This function estimates the median of the counters in the sketch
* via sampling, decrements all counts by this estimate, throws out all counters that are no
* longer positive, and increments offset accordingly.
*/
private void purge() {
final int limit = Math.min(this.sampleSize, nnz());
final long[] values = counters.ProtectedGetValues();
int numSamples = 0;
int i = 0;
final long[] samples = new long[limit];
while (numSamples < limit) {
if (counters.isActive(i)) {
samples[numSamples] = values[i];
numSamples++;
}
i++;
}
Arrays.sort(samples, 0, numSamples);
final long val = samples[limit / 2];
counters.adjustAllValuesBy(-1 * val);
counters.keepOnlyLargerThan(0);
this.offset += val;
}
@Override
public FrequencyEstimator merge(final FrequencyEstimator other) {
if (!(other instanceof FrequentItems)) {
throw new IllegalArgumentException("FrequentItems can only merge with other FrequentItems");
}
final FrequentItems otherCasted = (FrequentItems) other;
this.streamLength += otherCasted.streamLength;
this.mergeError += otherCasted.getMaxError();
final long[] otherKeys = otherCasted.counters.getKeys();
final long[] otherValues = otherCasted.counters.getValues();
for (int i = otherKeys.length; i-- > 0;) {
this.update(otherKeys[i], otherValues[i]);
}
return this;
}
@Override
public long[] getFrequentKeys(final long threshold) {
int count = 0;
final long[] keys = counters.ProtectedGetKey();
// first, count the number of candidate frequent keys
for (int i = counters.getLength(); i-- > 0;) {
if (counters.isActive(i) && (getEstimate(keys[i]) >= threshold)) {
count++;
}
}
// allocate an array to store the candidate frequent keys, and then compute them
final long[] freqKeys = new long[count];
count = 0;
for (int i = counters.getLength(); i-- > 0;) {
if (counters.isActive(i) && (getEstimateUpperBound(keys[i]) >= threshold)) {
freqKeys[count] = keys[i];
count++;
}
}
return freqKeys;
}
@Override
public int getK() {
return this.K;
}
@Override
public long getStreamLength() {
return this.streamLength;
}
@Override
public int getMaxK() {
return this.maxK;
}
@Override
public boolean isEmpty() {
return nnz() == 0;
}
@Override
public void reset() {
this.K = this.initialSize;
counters = new HashMapReverseEfficient(this.K);
this.offset = 0;
this.mergeError = 0;
this.streamLength = 0;
}
/**
* Returns the number of bytes required to store this sketch as an array of bytes.
*
* @return the number of bytes required to store this sketch as an array of bytes.
*/
public int getStorageBytes() {
if (isEmpty()) {
return 20;
}
return 48 + 16 * nnz();
}
/**
* Returns summary information about this sketch.
*
* @return a string specifying the FrequentItems object
*/
@Override
public String toString() {
final StringBuilder sb = new StringBuilder();
sb.append(
String.format("%d,%d,%d,%d,%d,%d,", k, mergeError, offset, streamLength, K, initialSize));
// maxK, samplesize are deterministic functions of k, so we don't need them in the serialization
sb.append(counters.hashMapReverseEfficientToString());
return sb.toString();
}
/**
* Turns a string specifying a FrequentItems object into a FrequentItems object.
*
* @param string String specifying a FrequentItems object
* @return a FrequentItems object corresponding to the string
*/
public static FrequentItems StringToFrequentItems(final String string) {
final String[] tokens = string.split(",");
if (tokens.length < 6) {
throw new IllegalArgumentException(
"Tried to make FrequentItems out of string not long enough to specify relevant parameters.");
}
final int k = Integer.parseInt(tokens[0]);
final long mergeError = Long.parseLong(tokens[1]);
final long offset = Long.parseLong(tokens[2]);
final long streamLength = Long.parseLong(tokens[3]);
final int K = Integer.parseInt(tokens[4]);
final int initialSize = Integer.parseInt(tokens[5]);
final FrequentItems sketch = new FrequentItems(k, K);
sketch.mergeError = mergeError;
sketch.offset = offset;
sketch.streamLength = streamLength;
sketch.initialSize = initialSize;
sketch.counters = HashMapReverseEfficient.StringArrayToHashMapReverseEfficient(tokens, 6);
return sketch;
}
// @formatter:off
/**
* @return byte array that looks as follows:
*
* <pre>
*
* || 7 | 6 | 5 | 4 | 3 | 2 | 1 | 0 |
* 0 |||--------k---------------------------|--flag--| FamID | SerVer | PreambleLongs |
* || 15 | 14 | 13 | 12 | 11 | 10 | 9 | 8 |
* 1 ||---------------------------------mergeError--------------------------------------|
* || 23 | 22 | 21 | 20 | 19 | 18 | 17 | 16 |
* 2 ||---------------------------------offset------------------------------------------|
* || 31 | 30 | 29 | 28 | 27 | 26 | 25 | 24 |
* 3 ||-----------------------------------streamLength----------------------------------|
* || 39 | 38 | 37 | 36 | 35 | 34 | 33 | 32 |
* 4 ||------initialSize--------------------|-------------------K-----------------------|
* || 47 | 46 | 45 | 44 | 43 | 42 | 41 | 40 |
* 5 ||------------(unused)-----------------|--------bufferlength-----------------------|
* || 55 | 54 | 53 | 52 | 51 | 50 | 49 | 48 |
* 6 ||----------start of keys buffer, followed by values buffer------------------------|
* </pre>
**/
// @formatter:on
public byte[] toByteArray() {
final int preLongs, arrLongs;
final boolean empty = isEmpty();
if (empty) {
preLongs = 1;
arrLongs = 1;
} else {
preLongs = 6;
arrLongs = preLongs + 2 * nnz();
}
final byte[] outArr = new byte[arrLongs << 3];
final NativeMemory mem = new NativeMemory(outArr);
// build first prelong
long pre0 = 0L;
pre0 = insertPreLongs(preLongs, pre0);
pre0 = insertSerVer(SER_VER, pre0);
pre0 = insertFamilyID(10, pre0);
if (empty) {
pre0 = insertEmptyFlag(1, pre0);
}
else {
pre0 = insertEmptyFlag(0, pre0);
}
pre0 = insertLowerK(this.k, pre0);
if (empty) {
mem.putLong(0, pre0);
} else {
final long[] preArr = new long[6];
preArr[0] = pre0;
preArr[1] = this.mergeError;
preArr[2] = this.offset;
preArr[3] = this.streamLength;
long pre1 = 0L;
pre1 = insertUpperK(this.K, pre1);
pre1 = insertInitialSize(this.initialSize, pre1);
preArr[4] = pre1;
long pre2 = 0L;
pre2 = insertBufferLength(nnz(), pre2);
preArr[5] = pre2;
mem.putLongArray(0, preArr, 0, 6);
mem.putLongArray(48, counters.getKeys(), 0, this.nnz());
mem.putLongArray(48 + (this.nnz() << 3), counters.getValues(), 0, this.nnz());
}
return outArr;
}
/**
* blah
* @param dstMem blah
*/
public void putMemory(final Memory dstMem) {
final byte[] byteArr = toByteArray();
final int arrLen = byteArr.length;
final long memCap = dstMem.getCapacity();
if (memCap < arrLen) {
throw new IllegalArgumentException(
"Destination Memory not large enough: " + memCap + " < " + arrLen);
}
dstMem.putByteArray(0, byteArr, 0, arrLen);
}
/**
* Heapifies the given srcMem, which must be a Memory image of a FrequentItems sketch
*
* @param srcMem a Memory image of a sketch. <a href="{@docRoot}/resources/dictionary.html#mem"
* >See Memory</a>
* @return a FrequentItems on the Java heap.
*/
static FrequentItems getInstance(final Memory srcMem) {
final long memCapBytes = srcMem.getCapacity();
if (memCapBytes < 8) {
throw new IllegalArgumentException("Memory too small: " + memCapBytes);
}
final long pre0 = srcMem.getLong(0);
final int preambleLongs = extractPreLongs(pre0);
assert ((preambleLongs == 1) || (preambleLongs == 6));
final int serVer = extractSerVer(pre0);
assert (serVer == 1);
final int familyID = extractFamilyID(pre0);
assert (familyID == 10);
final int emptyFlag = extractEmptyFlag(pre0);
final int k = extractLowerK(pre0);
if (emptyFlag == 1) {
return new FrequentItems(k);
}
// Not empty, must have valid preamble
final long[] remainderPreArr = new long[5];
srcMem.getLongArray(8, remainderPreArr, 0, 5);
final long mergeError = remainderPreArr[0];
final long offset = remainderPreArr[1];
final long streamLength = remainderPreArr[2];
final long pre1 = remainderPreArr[3];
final long pre2 = remainderPreArr[4];
final int K = extractUpperK(pre1);
final int initialSize = extractInitialSize(pre1);
final int bufferLength = extractBufferLength(pre2);
final FrequentItems hfi = new FrequentItems(k, K);
hfi.initialSize = initialSize;
hfi.offset = offset;
hfi.mergeError = mergeError;
final long[] keyArray = new long[bufferLength];
final long[] valueArray = new long[bufferLength];
srcMem.getLongArray(48, keyArray, 0, bufferLength);
srcMem.getLongArray(48 + 8 * bufferLength, valueArray, 0, bufferLength);
for (int i = 0; i < bufferLength; i++) {
hfi.update(keyArray[i], valueArray[i]);
}
hfi.streamLength = streamLength;
return hfi;
}
}
| |
/*
* Copyright DataStax, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.datastax.oss.driver.api.core.type.codec;
import com.datastax.oss.driver.api.core.ProtocolVersion;
import com.datastax.oss.driver.api.core.cql.Row;
import com.datastax.oss.driver.api.core.data.TupleValue;
import com.datastax.oss.driver.api.core.data.UdtValue;
import com.datastax.oss.driver.api.core.metadata.schema.AggregateMetadata;
import com.datastax.oss.driver.api.core.type.DataType;
import com.datastax.oss.driver.api.core.type.reflect.GenericType;
import com.datastax.oss.driver.shaded.guava.common.base.Preconditions;
import edu.umd.cs.findbugs.annotations.NonNull;
import edu.umd.cs.findbugs.annotations.Nullable;
import java.nio.ByteBuffer;
/**
* Manages the two-way conversion between a CQL type and a Java type.
*
* <p>Type codec implementations:
*
* <ol>
* <li><em>must</em> be thread-safe.
* <li><em>must</em> perform fast and never block.
* <li><em>must</em> support all native protocol versions; it is not possible to use different
* codecs for the same types but under different protocol versions.
* <li><em>must</em> comply with the native protocol specifications; failing to do so will result
* in unexpected results and could cause the driver to crash.
* <li><em>should</em> be stateless and immutable.
* <li><em>should</em> interpret {@code null} values and empty byte buffers (i.e. <code>
* {@link ByteBuffer#remaining()} == 0</code>) in a <em>reasonable</em> way; usually, {@code
* NULL} CQL values should map to {@code null} references, but exceptions exist; e.g. for
* varchar types, a {@code NULL} CQL value maps to a {@code null} reference, whereas an empty
* buffer maps to an empty String. For collection types, it is also admitted that {@code NULL}
* CQL values map to empty Java collections instead of {@code null} references. In any case,
* the codec's behavior with respect to {@code null} values and empty ByteBuffers should be
* clearly documented.
* <li>for Java types that have a primitive equivalent, <em>should</em> implement the appropriate
* "primitive" codec interface, e.g. {@link PrimitiveBooleanCodec} for {@code boolean}. This
* allows the driver to avoid the overhead of boxing when using primitive accessors such as
* {@link Row#getBoolean(int)}.
* <li>when decoding, <em>must</em> not consume {@link ByteBuffer} instances by performing
* relative read operations that modify their current position; codecs should instead prefer
* absolute read methods or, if necessary, {@link ByteBuffer#duplicate() duplicate} their byte
* buffers prior to reading them.
* </ol>
*/
public interface TypeCodec<JavaTypeT> {
@NonNull
GenericType<JavaTypeT> getJavaType();
@NonNull
DataType getCqlType();
/**
* Whether this codec is capable of processing the given Java type.
*
* <p>The default implementation is <em>invariant</em> with respect to the passed argument
* (through the usage of {@link GenericType#equals(Object)}) and <em>it's strongly recommended not
* to modify this behavior</em>. This means that a codec will only ever accept the <em>exact</em>
* Java type that it has been created for.
*
* <p>If the argument represents a Java primitive type, its wrapper type is considered instead.
*/
default boolean accepts(@NonNull GenericType<?> javaType) {
Preconditions.checkNotNull(javaType);
return getJavaType().equals(javaType.wrap());
}
/**
* Whether this codec is capable of processing the given Java class.
*
* <p>This implementation simply compares the given class (or its wrapper type if it is a
* primitive type) against this codec's runtime (raw) class; it is <em>invariant</em> with respect
* to the passed argument (through the usage of {@link Class#equals(Object)} and <em>it's strongly
* recommended not to modify this behavior</em>. This means that a codec will only ever return
* {@code true} for the <em>exact</em> runtime (raw) Java class that it has been created for.
*
* <p>Implementors are encouraged to override this method if there is a more efficient way. In
* particular, if the codec targets a final class, the check can be done with a simple {@code ==}.
*/
default boolean accepts(@NonNull Class<?> javaClass) {
Preconditions.checkNotNull(javaClass);
if (javaClass.isPrimitive()) {
if (javaClass == Boolean.TYPE) {
javaClass = Boolean.class;
} else if (javaClass == Character.TYPE) {
javaClass = Character.class;
} else if (javaClass == Byte.TYPE) {
javaClass = Byte.class;
} else if (javaClass == Short.TYPE) {
javaClass = Short.class;
} else if (javaClass == Integer.TYPE) {
javaClass = Integer.class;
} else if (javaClass == Long.TYPE) {
javaClass = Long.class;
} else if (javaClass == Float.TYPE) {
javaClass = Float.class;
} else if (javaClass == Double.TYPE) {
javaClass = Double.class;
}
}
return getJavaType().getRawType().equals(javaClass);
}
/**
* Whether this codec is capable of encoding the given Java object.
*
* <p>The object's Java type is inferred from its runtime (raw) type, contrary to {@link
* #accepts(GenericType)} which is capable of handling generic types.
*
* <p>Contrary to other {@code accept} methods, this method's default implementation is
* <em>covariant</em> with respect to the passed argument (through the usage of {@link
* Class#isAssignableFrom(Class)}) and <em>it's strongly recommended not to modify this
* behavior</em>. This means that, by default, a codec will accept <em>any subtype</em> of the
* Java type that it has been created for. This is so because codec lookups by arbitrary Java
* objects only make sense when attempting to encode, never when attempting to decode, and indeed
* the {@linkplain #encode(Object, ProtocolVersion) encode} method is covariant with {@code
* JavaTypeT}.
*
* <p>It can only handle non-parameterized types; codecs handling parameterized types, such as
* collection types, must override this method and perform some sort of "manual" inspection of the
* actual type parameters.
*
* <p>Similarly, codecs that only accept a partial subset of all possible values must override
* this method and manually inspect the object to check if it complies or not with the codec's
* limitations.
*
* <p>Finally, if the codec targets a non-generic Java class, it might be possible to implement
* this method with a simple {@code instanceof} check.
*/
default boolean accepts(@NonNull Object value) {
Preconditions.checkNotNull(value);
return getJavaType().getRawType().isAssignableFrom(value.getClass());
}
/** Whether this codec is capable of processing the given CQL type. */
default boolean accepts(@NonNull DataType cqlType) {
Preconditions.checkNotNull(cqlType);
return this.getCqlType().equals(cqlType);
}
/**
* Encodes the given value in the binary format of the CQL type handled by this codec.
*
* <ul>
* <li>Null values should be gracefully handled and no exception should be raised; they should
* be considered as the equivalent of a NULL CQL value;
* <li>Codecs for CQL collection types should not permit null elements;
* <li>Codecs for CQL collection types should treat a {@code null} input as the equivalent of an
* empty collection.
* </ul>
*/
@Nullable
ByteBuffer encode(@Nullable JavaTypeT value, @NonNull ProtocolVersion protocolVersion);
/**
* Decodes a value from the binary format of the CQL type handled by this codec.
*
* <ul>
* <li>Null or empty buffers should be gracefully handled and no exception should be raised;
* they should be considered as the equivalent of a NULL CQL value and, in most cases,
* should map to {@code null} or a default value for the corresponding Java type, if
* applicable;
* <li>Codecs for CQL collection types should clearly document whether they return immutable
* collections or not (note that the driver's default collection codecs return
* <em>mutable</em> collections);
* <li>Codecs for CQL collection types should avoid returning {@code null}; they should return
* empty collections instead (the driver's default collection codecs all comply with this
* rule);
* <li>The provided {@link ByteBuffer} should never be consumed by read operations that modify
* its current position; if necessary, {@link ByteBuffer#duplicate()} duplicate} it before
* consuming.
* </ul>
*/
@Nullable
JavaTypeT decode(@Nullable ByteBuffer bytes, @NonNull ProtocolVersion protocolVersion);
/**
* Formats the given value as a valid CQL literal according to the CQL type handled by this codec.
*
* <p>Implementors should take care of quoting and escaping the resulting CQL literal where
* applicable. Null values should be accepted; in most cases, implementations should return the
* CQL keyword {@code "NULL"} for {@code null} inputs.
*
* <p>Implementing this method is not strictly mandatory. It is used:
*
* <ol>
* <li>by the request logger, if parameter logging is enabled;
* <li>to format the INITCOND in {@link AggregateMetadata#describe(boolean)};
* <li>in the {@code toString()} representation of some driver objects (such as {@link UdtValue}
* and {@link TupleValue}), which is only used in driver logs;
* <li>for literal values in the query builder (see {@code QueryBuilder#literal(Object,
* CodecRegistry)} and {@code QueryBuilder#literal(Object, TypeCodec)}).
* </ol>
*
* If you choose not to implement this method, don't throw an exception but instead return a
* constant string (for example "XxxCodec.format not implemented").
*/
@NonNull
String format(@Nullable JavaTypeT value);
/**
* Parse the given CQL literal into an instance of the Java type handled by this codec.
*
* <p>Implementors should take care of unquoting and unescaping the given CQL string where
* applicable. Null values and empty strings should be accepted, as well as the string {@code
* "NULL"}; in most cases, implementations should interpret these inputs has equivalent to a
* {@code null} reference.
*
* <p>Implementing this method is not strictly mandatory: internally, the driver only uses it to
* parse the INITCOND when building the {@link AggregateMetadata metadata of an aggregate
* function} (and in most cases it will use a built-in codec, unless the INITCOND has a custom
* type).
*
* <p>If you choose not to implement this method, don't throw an exception but instead return
* {@code null}.
*/
@Nullable
JavaTypeT parse(@Nullable String value);
}
| |
package org.cugos.wkg;
import org.junit.Test;
import java.util.Arrays;
import static org.junit.Assert.*;
public class MultiPolygonTest {
@Test
public void create() {
MultiPolygon multiPolygon = new MultiPolygon(
Arrays.asList(
new Polygon(
new LinearRing(Arrays.asList(
Coordinate.create2D(40, 40),
Coordinate.create2D(20, 45),
Coordinate.create2D(45, 30),
Coordinate.create2D(40, 40)
), Dimension.Two),
Arrays.asList(
new LinearRing(Arrays.asList(
Coordinate.create2D(20, 35),
Coordinate.create2D(10, 30),
Coordinate.create2D(10, 10),
Coordinate.create2D(30, 5),
Coordinate.create2D(45, 20),
Coordinate.create2D(20, 35)
), Dimension.Two),
new LinearRing(Arrays.asList(
Coordinate.create2D(30, 20),
Coordinate.create2D(20, 15),
Coordinate.create2D(20, 25),
Coordinate.create2D(30, 20)
), Dimension.Two)
),
Dimension.Two
)
),
Dimension.Two
);
assertEquals("MULTIPOLYGON (((40.0 40.0, 20.0 45.0, 45.0 30.0, 40.0 40.0), (20.0 35.0, 10.0 30.0, 10.0 10.0, 30.0 5.0, 45.0 20.0, 20.0 35.0), (30.0 20.0, 20.0 15.0, 20.0 25.0, 30.0 20.0)))", multiPolygon.toString());
assertEquals(Dimension.Two, multiPolygon.getDimension());
assertNull(multiPolygon.getSrid());
}
@Test
public void createEmpty() {
MultiPolygon mp = MultiPolygon.createEmpty();
assertTrue(mp.isEmpty());
assertTrue(mp.getPolygons().isEmpty());
assertEquals("MULTIPOLYGON EMPTY", mp.toString());
}
@Test
public void empty() {
WKTReader reader = new WKTReader();
Geometry geometry = reader.read("MULTIPOLYGON EMPTY");
assertNotNull(geometry);
assertTrue(geometry instanceof MultiPolygon);
MultiPolygon mp = (MultiPolygon) geometry;
assertTrue(mp.isEmpty());
assertTrue(mp.getPolygons().isEmpty());
assertEquals("MULTIPOLYGON EMPTY", mp.toString());
}
@Test
public void twoDimensional() {
WKTReader reader = new WKTReader();
Geometry geometry = reader.read("MULTIPOLYGON (" +
"((40 40, 20 45, 45 30, 40 40))," +
"((20 35, 10 30, 10 10, 30 5, 45 20, 20 35),(30 20, 20 15, 20 25, 30 20))" +
")");
assertNotNull(geometry);
assertTrue(geometry instanceof MultiPolygon);
MultiPolygon mp = (MultiPolygon) geometry;
assertNull(mp.getSrid());
assertEquals(Dimension.Two, mp.getDimension());
assertEquals(14, mp.getNumberOfCoordinates());
assertEquals(2, mp.getPolygons().size());
// 0
Polygon polygon = mp.getPolygons().get(0);
assertNull(polygon.getSrid());
assertEquals(Dimension.Two, polygon.getDimension());
assertNotNull(polygon.getOuterLinearRing());
assertEquals(4, polygon.getOuterLinearRing().getCoordinates().size());
assertEquals(0, polygon.getInnerLinearRings().size());
LinearRing ring = polygon.getOuterLinearRing();
assertEquals(Coordinate.create2D(40, 40), ring.getCoordinates().get(0));
assertEquals(Coordinate.create2D(20, 45), ring.getCoordinates().get(1));
assertEquals(Coordinate.create2D(45, 30), ring.getCoordinates().get(2));
assertEquals(Coordinate.create2D(40, 40), ring.getCoordinates().get(3));
// 1
polygon = mp.getPolygons().get(1);
assertNull(polygon.getSrid());
assertEquals(Dimension.Two, polygon.getDimension());
assertNotNull(polygon.getOuterLinearRing());
assertEquals(1, polygon.getInnerLinearRings().size());
ring = polygon.getOuterLinearRing();
assertEquals(6, ring.getCoordinates().size());
assertEquals(Coordinate.create2D(20, 35), ring.getCoordinates().get(0));
assertEquals(Coordinate.create2D(10, 30), ring.getCoordinates().get(1));
assertEquals(Coordinate.create2D(10, 10), ring.getCoordinates().get(2));
assertEquals(Coordinate.create2D(30, 5), ring.getCoordinates().get(3));
assertEquals(Coordinate.create2D(45, 20), ring.getCoordinates().get(4));
assertEquals(Coordinate.create2D(20, 35), ring.getCoordinates().get(5));
ring = polygon.getInnerLinearRings().get(0);
assertEquals(4, ring.getCoordinates().size());
assertEquals(Coordinate.create2D(30, 20), ring.getCoordinates().get(0));
assertEquals(Coordinate.create2D(20, 15), ring.getCoordinates().get(1));
assertEquals(Coordinate.create2D(20, 25), ring.getCoordinates().get(2));
assertEquals(Coordinate.create2D(30, 20), ring.getCoordinates().get(3));
// WKT
assertEquals("MULTIPOLYGON (" +
"((40.0 40.0, 20.0 45.0, 45.0 30.0, 40.0 40.0)), " +
"((20.0 35.0, 10.0 30.0, 10.0 10.0, 30.0 5.0, 45.0 20.0, 20.0 35.0), " +
"(30.0 20.0, 20.0 15.0, 20.0 25.0, 30.0 20.0))" +
")", mp.toString());
}
@Test
public void twoDimensionalWithSrid() {
WKTReader reader = new WKTReader();
Geometry geometry = reader.read("SRID=4326;MULTIPOLYGON (" +
"((40 40, 20 45, 45 30, 40 40))," +
"((20 35, 10 30, 10 10, 30 5, 45 20, 20 35),(30 20, 20 15, 20 25, 30 20))" +
")");
assertNotNull(geometry);
assertTrue(geometry instanceof MultiPolygon);
MultiPolygon mp = (MultiPolygon) geometry;
assertEquals("4326", mp.getSrid());
assertEquals(Dimension.Two, mp.getDimension());
assertEquals(2, mp.getPolygons().size());
// 0
Polygon polygon = mp.getPolygons().get(0);
assertEquals("4326", polygon.getSrid());
assertEquals(Dimension.Two, polygon.getDimension());
assertNotNull(polygon.getOuterLinearRing());
assertEquals(4, polygon.getOuterLinearRing().getCoordinates().size());
assertEquals(0, polygon.getInnerLinearRings().size());
LinearRing ring = polygon.getOuterLinearRing();
assertEquals(Coordinate.create2D(40, 40), ring.getCoordinates().get(0));
assertEquals(Coordinate.create2D(20, 45), ring.getCoordinates().get(1));
assertEquals(Coordinate.create2D(45, 30), ring.getCoordinates().get(2));
assertEquals(Coordinate.create2D(40, 40), ring.getCoordinates().get(3));
// 1
polygon = mp.getPolygons().get(1);
assertEquals("4326", polygon.getSrid());
assertEquals(Dimension.Two, polygon.getDimension());
assertNotNull(polygon.getOuterLinearRing());
assertEquals(1, polygon.getInnerLinearRings().size());
ring = polygon.getOuterLinearRing();
assertEquals(6, ring.getCoordinates().size());
assertEquals(Coordinate.create2D(20, 35), ring.getCoordinates().get(0));
assertEquals(Coordinate.create2D(10, 30), ring.getCoordinates().get(1));
assertEquals(Coordinate.create2D(10, 10), ring.getCoordinates().get(2));
assertEquals(Coordinate.create2D(30, 5), ring.getCoordinates().get(3));
assertEquals(Coordinate.create2D(45, 20), ring.getCoordinates().get(4));
assertEquals(Coordinate.create2D(20, 35), ring.getCoordinates().get(5));
ring = polygon.getInnerLinearRings().get(0);
assertEquals(4, ring.getCoordinates().size());
assertEquals(Coordinate.create2D(30, 20), ring.getCoordinates().get(0));
assertEquals(Coordinate.create2D(20, 15), ring.getCoordinates().get(1));
assertEquals(Coordinate.create2D(20, 25), ring.getCoordinates().get(2));
assertEquals(Coordinate.create2D(30, 20), ring.getCoordinates().get(3));
// WKT
assertEquals("SRID=4326;MULTIPOLYGON (" +
"((40.0 40.0, 20.0 45.0, 45.0 30.0, 40.0 40.0)), " +
"((20.0 35.0, 10.0 30.0, 10.0 10.0, 30.0 5.0, 45.0 20.0, 20.0 35.0), " +
"(30.0 20.0, 20.0 15.0, 20.0 25.0, 30.0 20.0))" +
")", mp.toString());
}
@Test
public void twoDimensionalMeasured() {
WKTReader reader = new WKTReader();
Geometry geometry = reader.read("MULTIPOLYGON M (" +
"((40 40 1, 20 45 2, 45 30 3, 40 40 1))," +
"((20 35 5, 10 30 6, 10 10 7, 30 5 8, 45 20 9, 20 35 5),(30 20 1, 20 15 2, 20 25 3, 30 20 1))" +
")");
assertNotNull(geometry);
assertTrue(geometry instanceof MultiPolygon);
MultiPolygon mp = (MultiPolygon) geometry;
assertNull(mp.getSrid());
assertEquals(Dimension.TwoMeasured, mp.getDimension());
assertEquals(2, mp.getPolygons().size());
// 0
Polygon polygon = mp.getPolygons().get(0);
assertNull(polygon.getSrid());
assertEquals(Dimension.TwoMeasured, polygon.getDimension());
assertNotNull(polygon.getOuterLinearRing());
assertEquals(4, polygon.getOuterLinearRing().getCoordinates().size());
assertEquals(0, polygon.getInnerLinearRings().size());
LinearRing ring = polygon.getOuterLinearRing();
assertEquals(Coordinate.create2DM(40, 40, 1), ring.getCoordinates().get(0));
assertEquals(Coordinate.create2DM(20, 45, 2), ring.getCoordinates().get(1));
assertEquals(Coordinate.create2DM(45, 30, 3), ring.getCoordinates().get(2));
assertEquals(Coordinate.create2DM(40, 40, 1), ring.getCoordinates().get(3));
// 1
polygon = mp.getPolygons().get(1);
assertNull(polygon.getSrid());
assertEquals(Dimension.TwoMeasured, polygon.getDimension());
assertNotNull(polygon.getOuterLinearRing());
assertEquals(1, polygon.getInnerLinearRings().size());
ring = polygon.getOuterLinearRing();
assertEquals(6, ring.getCoordinates().size());
assertEquals(Coordinate.create2DM(20, 35, 5), ring.getCoordinates().get(0));
assertEquals(Coordinate.create2DM(10, 30, 6), ring.getCoordinates().get(1));
assertEquals(Coordinate.create2DM(10, 10, 7), ring.getCoordinates().get(2));
assertEquals(Coordinate.create2DM(30, 5, 8), ring.getCoordinates().get(3));
assertEquals(Coordinate.create2DM(45, 20, 9), ring.getCoordinates().get(4));
assertEquals(Coordinate.create2DM(20, 35, 5), ring.getCoordinates().get(5));
ring = polygon.getInnerLinearRings().get(0);
assertEquals(4, ring.getCoordinates().size());
assertEquals(Coordinate.create2DM(30, 20, 1), ring.getCoordinates().get(0));
assertEquals(Coordinate.create2DM(20, 15, 2), ring.getCoordinates().get(1));
assertEquals(Coordinate.create2DM(20, 25, 3), ring.getCoordinates().get(2));
assertEquals(Coordinate.create2DM(30, 20, 1), ring.getCoordinates().get(3));
// WKT
assertEquals("MULTIPOLYGON M (" +
"((40.0 40.0 1.0, 20.0 45.0 2.0, 45.0 30.0 3.0, 40.0 40.0 1.0)), " +
"((20.0 35.0 5.0, 10.0 30.0 6.0, 10.0 10.0 7.0, 30.0 5.0 8.0, 45.0 20.0 9.0, 20.0 35.0 5.0), " +
"(30.0 20.0 1.0, 20.0 15.0 2.0, 20.0 25.0 3.0, 30.0 20.0 1.0))" +
")", mp.toString());
}
@Test
public void threeDimensional() {
WKTReader reader = new WKTReader();
Geometry geometry = reader.read("MULTIPOLYGON Z (" +
"((40 40 1, 20 45 2, 45 30 3, 40 40 1))," +
"((20 35 5, 10 30 6, 10 10 7, 30 5 8, 45 20 9, 20 35 5),(30 20 1, 20 15 2, 20 25 3, 30 20 1))" +
")");
assertNotNull(geometry);
assertTrue(geometry instanceof MultiPolygon);
MultiPolygon mp = (MultiPolygon) geometry;
assertNull(mp.getSrid());
assertEquals(Dimension.Three, mp.getDimension());
assertEquals(2, mp.getPolygons().size());
// 0
Polygon polygon = mp.getPolygons().get(0);
assertNull(polygon.getSrid());
assertEquals(Dimension.Three, polygon.getDimension());
assertNotNull(polygon.getOuterLinearRing());
assertEquals(4, polygon.getOuterLinearRing().getCoordinates().size());
assertEquals(0, polygon.getInnerLinearRings().size());
LinearRing ring = polygon.getOuterLinearRing();
assertEquals(Coordinate.create3D(40, 40, 1), ring.getCoordinates().get(0));
assertEquals(Coordinate.create3D(20, 45, 2), ring.getCoordinates().get(1));
assertEquals(Coordinate.create3D(45, 30, 3), ring.getCoordinates().get(2));
assertEquals(Coordinate.create3D(40, 40, 1), ring.getCoordinates().get(3));
// 1
polygon = mp.getPolygons().get(1);
assertNull(polygon.getSrid());
assertEquals(Dimension.Three, polygon.getDimension());
assertNotNull(polygon.getOuterLinearRing());
assertEquals(1, polygon.getInnerLinearRings().size());
ring = polygon.getOuterLinearRing();
assertEquals(6, ring.getCoordinates().size());
assertEquals(Coordinate.create3D(20, 35, 5), ring.getCoordinates().get(0));
assertEquals(Coordinate.create3D(10, 30, 6), ring.getCoordinates().get(1));
assertEquals(Coordinate.create3D(10, 10, 7), ring.getCoordinates().get(2));
assertEquals(Coordinate.create3D(30, 5, 8), ring.getCoordinates().get(3));
assertEquals(Coordinate.create3D(45, 20, 9), ring.getCoordinates().get(4));
assertEquals(Coordinate.create3D(20, 35, 5), ring.getCoordinates().get(5));
ring = polygon.getInnerLinearRings().get(0);
assertEquals(4, ring.getCoordinates().size());
assertEquals(Coordinate.create3D(30, 20, 1), ring.getCoordinates().get(0));
assertEquals(Coordinate.create3D(20, 15, 2), ring.getCoordinates().get(1));
assertEquals(Coordinate.create3D(20, 25, 3), ring.getCoordinates().get(2));
assertEquals(Coordinate.create3D(30, 20, 1), ring.getCoordinates().get(3));
// WKT
assertEquals("MULTIPOLYGON Z (" +
"((40.0 40.0 1.0, 20.0 45.0 2.0, 45.0 30.0 3.0, 40.0 40.0 1.0)), " +
"((20.0 35.0 5.0, 10.0 30.0 6.0, 10.0 10.0 7.0, 30.0 5.0 8.0, 45.0 20.0 9.0, 20.0 35.0 5.0), " +
"(30.0 20.0 1.0, 20.0 15.0 2.0, 20.0 25.0 3.0, 30.0 20.0 1.0))" +
")", mp.toString());
}
@Test
public void threeDimensionalMeasured() {
WKTReader reader = new WKTReader();
Geometry geometry = reader.read("MULTIPOLYGON ZM (" +
"((40 40 1 1, 20 45 2 2, 45 30 3 3, 40 40 1 1))," +
"((20 35 5 5, 10 30 6 6, 10 10 7 7, 30 5 8 8, 45 20 9 9, 20 35 5 5),(30 20 1 1, 20 15 2 2, 20 25 3 3, 30 20 1 1))" +
")");
assertNotNull(geometry);
assertTrue(geometry instanceof MultiPolygon);
MultiPolygon mp = (MultiPolygon) geometry;
assertNull(mp.getSrid());
assertEquals(Dimension.ThreeMeasured, mp.getDimension());
assertEquals(2, mp.getPolygons().size());
// 0
Polygon polygon = mp.getPolygons().get(0);
assertNull(polygon.getSrid());
assertEquals(Dimension.ThreeMeasured, polygon.getDimension());
assertNotNull(polygon.getOuterLinearRing());
assertEquals(4, polygon.getOuterLinearRing().getCoordinates().size());
assertEquals(0, polygon.getInnerLinearRings().size());
LinearRing ring = polygon.getOuterLinearRing();
assertEquals(Coordinate.create3DM(40, 40, 1, 1), ring.getCoordinates().get(0));
assertEquals(Coordinate.create3DM(20, 45, 2, 2), ring.getCoordinates().get(1));
assertEquals(Coordinate.create3DM(45, 30, 3, 3), ring.getCoordinates().get(2));
assertEquals(Coordinate.create3DM(40, 40, 1, 1), ring.getCoordinates().get(3));
// 1
polygon = mp.getPolygons().get(1);
assertNull(polygon.getSrid());
assertEquals(Dimension.ThreeMeasured, polygon.getDimension());
assertNotNull(polygon.getOuterLinearRing());
assertEquals(1, polygon.getInnerLinearRings().size());
ring = polygon.getOuterLinearRing();
assertEquals(6, ring.getCoordinates().size());
assertEquals(Coordinate.create3DM(20, 35, 5, 5), ring.getCoordinates().get(0));
assertEquals(Coordinate.create3DM(10, 30, 6, 6), ring.getCoordinates().get(1));
assertEquals(Coordinate.create3DM(10, 10, 7, 7), ring.getCoordinates().get(2));
assertEquals(Coordinate.create3DM(30, 5 , 8, 8), ring.getCoordinates().get(3));
assertEquals(Coordinate.create3DM(45, 20, 9, 9), ring.getCoordinates().get(4));
assertEquals(Coordinate.create3DM(20, 35, 5, 5), ring.getCoordinates().get(5));
ring = polygon.getInnerLinearRings().get(0);
assertEquals(4, ring.getCoordinates().size());
assertEquals(Coordinate.create3DM(30, 20, 1, 1), ring.getCoordinates().get(0));
assertEquals(Coordinate.create3DM(20, 15, 2, 2), ring.getCoordinates().get(1));
assertEquals(Coordinate.create3DM(20, 25, 3, 3), ring.getCoordinates().get(2));
assertEquals(Coordinate.create3DM(30, 20, 1, 1), ring.getCoordinates().get(3));
// WKT
assertEquals("MULTIPOLYGON ZM (" +
"((40.0 40.0 1.0 1.0, 20.0 45.0 2.0 2.0, 45.0 30.0 3.0 3.0, 40.0 40.0 1.0 1.0)), " +
"((20.0 35.0 5.0 5.0, 10.0 30.0 6.0 6.0, 10.0 10.0 7.0 7.0, 30.0 5.0 8.0 8.0, 45.0 20.0 9.0 9.0, 20.0 35.0 5.0 5.0), " +
"(30.0 20.0 1.0 1.0, 20.0 15.0 2.0 2.0, 20.0 25.0 3.0 3.0, 30.0 20.0 1.0 1.0))" +
")", mp.toString());
}
}
| |
/*
* The MIT License
*
* Copyright (c) 2018, CloudBees, Inc.
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package jenkins.telemetry;
import com.google.common.annotations.VisibleForTesting;
import hudson.Extension;
import hudson.ExtensionList;
import hudson.ExtensionPoint;
import hudson.ProxyConfiguration;
import hudson.model.AsyncPeriodicWork;
import hudson.model.TaskListener;
import hudson.model.UsageStatistics;
import jenkins.model.Jenkins;
import jenkins.util.SystemProperties;
import net.sf.json.JSONObject;
import org.apache.commons.codec.digest.DigestUtils;
import org.kohsuke.accmod.Restricted;
import org.kohsuke.accmod.restrictions.NoExternalUse;
import edu.umd.cs.findbugs.annotations.CheckForNull;
import edu.umd.cs.findbugs.annotations.NonNull;
import java.io.IOException;
import java.io.OutputStream;
import java.io.OutputStreamWriter;
import java.net.HttpURLConnection;
import java.net.MalformedURLException;
import java.net.URL;
import java.net.URLConnection;
import java.nio.charset.StandardCharsets;
import java.time.LocalDate;
import java.util.concurrent.TimeUnit;
import java.util.logging.Level;
import java.util.logging.Logger;
/**
* Extension point for collecting JEP-214 telemetry.
*
* Implementations should provide a {@code description.jelly} file with additional details about their purpose and
* behavior which will be included in {@code help-usageStatisticsCollected.jelly} for {@link UsageStatistics}.
*
* @see <a href="https://www.jenkins.io/jep/214">JEP-214</a>
*
* @since 2.143
*/
public abstract class Telemetry implements ExtensionPoint {
// https://webhook.site is a nice stand-in for this during development; just needs to end in ? to submit the ID as query parameter
@Restricted(NoExternalUse.class)
@VisibleForTesting
static String ENDPOINT = SystemProperties.getString(Telemetry.class.getName() + ".endpoint", "https://uplink.jenkins.io/events");
private static final Logger LOGGER = Logger.getLogger(Telemetry.class.getName());
/**
* ID of this collector, typically an alphanumeric string (and punctuation).
*
* Good IDs are globally unique and human readable (i.e. no UUIDs).
*
* For a periodically updated list of all public implementations, see https://www.jenkins.io/doc/developer/extensions/jenkins-core/#telemetry
*
* @return ID of the collector, never null or empty
*/
@NonNull
public String getId() {
return getClass().getName();
}
/**
* User friendly display name for this telemetry collector, ideally localized.
*
* @return display name, never null or empty
*/
@NonNull
public abstract String getDisplayName();
/**
* Start date for the collection.
* Will be checked in Jenkins to not collect outside the defined time span.
* This does not have to be precise enough for time zones to be a consideration.
*
* @return collection start date
*/
@NonNull
public abstract LocalDate getStart();
/**
* End date for the collection.
* Will be checked in Jenkins to not collect outside the defined time span.
* This does not have to be precise enough for time zones to be a consideration.
*
* @return collection end date
*/
@NonNull
public abstract LocalDate getEnd();
/**
* Returns the content to be sent to the telemetry service.
*
* This method is called periodically, once per content submission.
*
* @return The JSON payload, or null if no content should be submitted.
*/
@CheckForNull
public abstract JSONObject createContent();
public static ExtensionList<Telemetry> all() {
return ExtensionList.lookup(Telemetry.class);
}
/**
* @since 2.147
* @return whether to collect telemetry
*/
public static boolean isDisabled() {
if (UsageStatistics.DISABLED) {
return true;
}
Jenkins jenkins = Jenkins.getInstanceOrNull();
return jenkins == null || !jenkins.isUsageStatisticsCollected();
}
/**
* Returns true iff we're in the time period during which this is supposed to collect data.
* @return true iff we're in the time period during which this is supposed to collect data
*
* @since 2.202
*/
public boolean isActivePeriod() {
LocalDate now = LocalDate.now();
return now.isAfter(getStart()) && now.isBefore(getEnd());
}
@Extension
public static class TelemetryReporter extends AsyncPeriodicWork {
public TelemetryReporter() {
super("telemetry collection");
}
@Override
public long getRecurrencePeriod() {
return TimeUnit.HOURS.toMillis(24);
}
@Override
protected void execute(TaskListener listener) throws IOException, InterruptedException {
if (isDisabled()) {
LOGGER.info("Collection of anonymous usage statistics is disabled, skipping telemetry collection and submission");
return;
}
Telemetry.all().forEach(telemetry -> {
if (telemetry.getStart().isAfter(LocalDate.now())) {
LOGGER.config("Skipping telemetry for '" + telemetry.getId() + "' as it is configured to start later");
return;
}
if (telemetry.getEnd().isBefore(LocalDate.now())) {
LOGGER.config("Skipping telemetry for '" + telemetry.getId() + "' as it is configured to end in the past");
return;
}
JSONObject data = new JSONObject();
try {
data = telemetry.createContent();
} catch (Exception e) {
LOGGER.log(Level.WARNING, "Failed to build telemetry content for: '" + telemetry.getId() + "'", e);
}
if (data == null) {
LOGGER.log(Level.CONFIG, "Skipping telemetry for '" + telemetry.getId() + "' as it has no data");
return;
}
JSONObject wrappedData = new JSONObject();
wrappedData.put("type", telemetry.getId());
wrappedData.put("payload", data);
String correlationId = ExtensionList.lookupSingleton(Correlator.class).getCorrelationId();
wrappedData.put("correlator", DigestUtils.sha256Hex(correlationId + telemetry.getId()));
try {
URL url = new URL(ENDPOINT);
URLConnection conn = ProxyConfiguration.open(url);
if (!(conn instanceof HttpURLConnection)) {
LOGGER.config("URL did not result in an HttpURLConnection: " + ENDPOINT);
return;
}
HttpURLConnection http = (HttpURLConnection) conn;
http.setRequestProperty("Content-Type", "application/json; charset=utf-8");
http.setDoOutput(true);
String body = wrappedData.toString();
if (LOGGER.isLoggable(Level.FINEST)) {
LOGGER.finest("Submitting JSON: " + body);
}
try (OutputStream out = http.getOutputStream();
OutputStreamWriter writer = new OutputStreamWriter(out, StandardCharsets.UTF_8)) {
writer.append(body);
}
LOGGER.config("Telemetry submission received response '" + http.getResponseCode() + " " + http.getResponseMessage() + "' for: " + telemetry.getId());
} catch (MalformedURLException e) {
LOGGER.config("Malformed endpoint URL: " + ENDPOINT + " for telemetry: " + telemetry.getId());
} catch (IOException e) {
// deliberately low visibility, as temporary infra problems aren't a big deal and we'd
// rather have some unsuccessful submissions than admins opting out to clean up logs
LOGGER.log(Level.CONFIG, "Failed to submit telemetry: " + telemetry.getId() + " to: " + ENDPOINT, e);
}
});
}
}
}
| |
/*
* This file is part of Sponge, licensed under the MIT License (MIT).
*
* Copyright (c) SpongePowered <https://www.spongepowered.org>
* Copyright (c) contributors
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package org.spongepowered.common.registry;
import com.google.common.base.CaseFormat;
import net.minecraft.advancements.CriteriaTriggers;
import net.minecraft.advancements.CriterionTrigger;
import net.minecraft.advancements.FrameType;
import net.minecraft.resources.ResourceLocation;
import net.minecraft.world.Difficulty;
import net.minecraft.world.InteractionHand;
import net.minecraft.world.entity.EquipmentSlot;
import net.minecraft.world.entity.HumanoidArm;
import net.minecraft.world.entity.MobCategory;
import net.minecraft.world.entity.ai.attributes.AttributeModifier;
import net.minecraft.world.entity.animal.Fox;
import net.minecraft.world.entity.animal.MushroomCow;
import net.minecraft.world.entity.animal.Panda;
import net.minecraft.world.entity.animal.TropicalFish;
import net.minecraft.world.entity.boss.enderdragon.phases.EnderDragonPhase;
import net.minecraft.world.entity.monster.Phantom;
import net.minecraft.world.entity.monster.SpellcasterIllager;
import net.minecraft.world.entity.player.ChatVisiblity;
import net.minecraft.world.entity.projectile.AbstractArrow;
import net.minecraft.world.entity.raid.Raid;
import net.minecraft.world.entity.vehicle.Boat;
import net.minecraft.world.item.ArmorMaterials;
import net.minecraft.world.item.DyeColor;
import net.minecraft.world.item.FireworkRocketItem;
import net.minecraft.world.item.Rarity;
import net.minecraft.world.item.Tiers;
import net.minecraft.world.level.GameType;
import net.minecraft.world.level.TickPriority;
import net.minecraft.world.level.block.Mirror;
import net.minecraft.world.level.block.Rotation;
import net.minecraft.world.level.block.entity.BannerPattern;
import net.minecraft.world.level.block.state.properties.AttachFace;
import net.minecraft.world.level.block.state.properties.ChestType;
import net.minecraft.world.level.block.state.properties.ComparatorMode;
import net.minecraft.world.level.block.state.properties.DoorHingeSide;
import net.minecraft.world.level.block.state.properties.Half;
import net.minecraft.world.level.block.state.properties.NoteBlockInstrument;
import net.minecraft.world.level.block.state.properties.PistonType;
import net.minecraft.world.level.block.state.properties.RailShape;
import net.minecraft.world.level.block.state.properties.RedstoneSide;
import net.minecraft.world.level.block.state.properties.SlabType;
import net.minecraft.world.level.block.state.properties.StairsShape;
import net.minecraft.world.level.block.state.properties.StructureMode;
import net.minecraft.world.level.levelgen.Heightmap;
import net.minecraft.world.scores.Team;
import net.minecraft.world.scores.criteria.ObjectiveCriteria;
import org.spongepowered.api.ResourceKey;
import org.spongepowered.api.Sponge;
import org.spongepowered.api.advancement.criteria.trigger.Trigger;
import org.spongepowered.api.advancement.criteria.trigger.Triggers;
import org.spongepowered.api.item.FireworkShape;
import org.spongepowered.api.item.FireworkShapes;
import org.spongepowered.api.registry.DefaultedRegistryReference;
import org.spongepowered.api.registry.Registry;
import org.spongepowered.api.registry.RegistryKey;
import org.spongepowered.api.registry.RegistryType;
import org.spongepowered.api.registry.RegistryTypes;
import org.spongepowered.api.scoreboard.criteria.Criteria;
import org.spongepowered.api.scoreboard.criteria.Criterion;
import org.spongepowered.common.accessor.advancements.CriteriaTriggersAccessor;
import org.spongepowered.common.accessor.world.entity.animal.MushroomCow_MushroomTypeAccessor;
import org.spongepowered.common.accessor.world.item.ArmorMaterialsAccessor;
import org.spongepowered.common.accessor.world.level.GameRulesAccessor;
import org.spongepowered.common.accessor.world.level.block.entity.BannerPatternAccessor;
import org.spongepowered.common.advancement.criterion.SpongeDummyTrigger;
import org.spongepowered.common.advancement.criterion.SpongeScoreTrigger;
import java.util.Collection;
import java.util.HashMap;
import java.util.Locale;
import java.util.Map;
import java.util.function.Consumer;
import java.util.function.Function;
final class VanillaRegistryLoader {
private final SpongeRegistryHolder holder;
public static void load(final SpongeRegistryHolder holder) {
final VanillaRegistryLoader loader = new VanillaRegistryLoader(holder);
loader.loadEnumRegistries();
loader.loadInstanceRegistries();
}
private VanillaRegistryLoader(final SpongeRegistryHolder holder) {
this.holder = holder;
}
private void loadInstanceRegistries() {
this.holder.createRegistry(RegistryTypes.CRITERION, VanillaRegistryLoader.criterion());
this.manualName(RegistryTypes.DRAGON_PHASE_TYPE, EnderDragonPhase.getCount(), map -> {
map.put(EnderDragonPhase.HOLDING_PATTERN, "holding_pattern");
map.put(EnderDragonPhase.STRAFE_PLAYER, "strafe_player");
map.put(EnderDragonPhase.LANDING_APPROACH, "landing_approach");
map.put(EnderDragonPhase.LANDING, "landing");
map.put(EnderDragonPhase.TAKEOFF, "takeoff");
map.put(EnderDragonPhase.SITTING_FLAMING, "sitting_flaming");
map.put(EnderDragonPhase.SITTING_SCANNING, "sitting_scanning");
map.put(EnderDragonPhase.SITTING_ATTACKING, "sitting_attacking");
map.put(EnderDragonPhase.CHARGING_PLAYER, "charging_player");
map.put(EnderDragonPhase.DYING, "dying");
map.put(EnderDragonPhase.HOVERING, "hover");
});
this.holder.createRegistry(RegistryTypes.FIREWORK_SHAPE, VanillaRegistryLoader.fireworkShape());
this.holder.createRegistry(RegistryTypes.TRIGGER, VanillaRegistryLoader.trigger(), true,
(k, trigger) -> CriteriaTriggersAccessor.invoker$register((CriterionTrigger<?>) trigger));
this.knownName(RegistryTypes.GAME_RULE, GameRulesAccessor.accessor$GAME_RULE_TYPES().keySet(), rule -> CaseFormat.UPPER_CAMEL.to(CaseFormat.LOWER_UNDERSCORE, rule.getId()));
}
private void loadEnumRegistries() {
this.knownName(RegistryTypes.ARMOR_MATERIAL, ArmorMaterials.values(), am -> ((ArmorMaterialsAccessor) (Object) am).accessor$name());
this.knownName(RegistryTypes.ATTACHMENT_SURFACE, AttachFace.values(), AttachFace::getSerializedName);
this.manualName(RegistryTypes.ATTRIBUTE_OPERATION, AttributeModifier.Operation.values(), map -> {
// names come from net.minecraft.world.level.storage.loot.functions.SetAttributesFunction.Modifier#operationFromString
map.put(AttributeModifier.Operation.ADDITION, "addition");
map.put(AttributeModifier.Operation.MULTIPLY_BASE, "multiply_base");
map.put(AttributeModifier.Operation.MULTIPLY_TOTAL, "multiply_total");
});
this.knownName(RegistryTypes.BOAT_TYPE, Boat.Type.values(), Boat.Type::getName);
this.knownName(RegistryTypes.CHEST_ATTACHMENT_TYPE, ChestType.values(), ChestType::getSerializedName);
this.manualName(RegistryTypes.COLLISION_RULE, Team.CollisionRule.values(), map -> {
map.put(Team.CollisionRule.ALWAYS, "always");
map.put(Team.CollisionRule.NEVER, "never");
map.put(Team.CollisionRule.PUSH_OTHER_TEAMS, "push_other_teams");
map.put(Team.CollisionRule.PUSH_OWN_TEAM, "push_own_team");
});
this.knownName(RegistryTypes.COMPARATOR_MODE, ComparatorMode.values(), ComparatorMode::getSerializedName);
this.knownName(RegistryTypes.DIFFICULTY, Difficulty.values(), Difficulty::getKey);
this.knownName(RegistryTypes.DYE_COLOR, DyeColor.values(), DyeColor::getSerializedName);
this.knownName(RegistryTypes.DOOR_HINGE, DoorHingeSide.values(), DoorHingeSide::getSerializedName);
this.manualName(RegistryTypes.EQUIPMENT_GROUP, EquipmentSlot.Type.values(), map -> {
map.put(EquipmentSlot.Type.ARMOR, "worn");
map.put(EquipmentSlot.Type.HAND, "held");
});
this.manualName(RegistryTypes.EQUIPMENT_TYPE, EquipmentSlot.values(), map -> {
map.put(EquipmentSlot.CHEST, "chest");
map.put(EquipmentSlot.FEET, "feet");
map.put(EquipmentSlot.HEAD, "head");
map.put(EquipmentSlot.LEGS, "legs");
map.put(EquipmentSlot.MAINHAND, "main_hand");
map.put(EquipmentSlot.OFFHAND, "off_hand");
});
this.knownName(RegistryTypes.FOX_TYPE, Fox.Type.values(), Fox.Type::getName);
this.manualName(RegistryTypes.GAME_MODE, GameType.values(), map -> {
map.put(GameType.NOT_SET, "not_set"); // getName returns "" (empty string) // TODO(kashike): 1.17
map.put(GameType.SURVIVAL, GameType.SURVIVAL.getName());
map.put(GameType.CREATIVE, GameType.CREATIVE.getName());
map.put(GameType.ADVENTURE, GameType.ADVENTURE.getName());
map.put(GameType.SPECTATOR, GameType.SPECTATOR.getName());
});
this.automaticName(RegistryTypes.HAND_PREFERENCE, HumanoidArm.values());
this.automaticName(RegistryTypes.HAND_TYPE, InteractionHand.values());
this.knownName(RegistryTypes.INSTRUMENT_TYPE, NoteBlockInstrument.values(), NoteBlockInstrument::getSerializedName);
this.automaticName(RegistryTypes.ITEM_RARITY, Rarity.values());
this.automaticName(RegistryTypes.ITEM_TIER, Tiers.values());
this.knownName(RegistryTypes.MOOSHROOM_TYPE, MushroomCow.MushroomType.values(), type -> ((MushroomCow_MushroomTypeAccessor) (Object) type).accessor$type());
this.knownName(RegistryTypes.OBJECTIVE_DISPLAY_MODE, ObjectiveCriteria.RenderType.values(), ObjectiveCriteria.RenderType::getId);
this.knownName(RegistryTypes.PANDA_GENE, Panda.Gene.values(), Panda.Gene::getName);
this.automaticName(RegistryTypes.PHANTOM_PHASE, Phantom.AttackPhase.values());
this.automaticName(RegistryTypes.PICKUP_RULE, AbstractArrow.Pickup.values());
this.automaticName(RegistryTypes.MIRROR, Mirror.values());
this.automaticName(RegistryTypes.CHAT_VISIBILITY, ChatVisiblity.values());
this.knownName(RegistryTypes.PISTON_TYPE, PistonType.values(), PistonType::getSerializedName);
this.knownName(RegistryTypes.PORTION_TYPE, Half.values(), Half::getSerializedName);
this.automaticName(RegistryTypes.RAID_STATUS, Raid.RaidStatus.values());
this.automaticName(RegistryTypes.ROTATION, Rotation.values());
this.knownName(RegistryTypes.RAIL_DIRECTION, RailShape.values(), RailShape::getSerializedName);
this.knownName(RegistryTypes.SLAB_PORTION, SlabType.values(), SlabType::getSerializedName);
this.automaticName(RegistryTypes.SPELL_TYPE, SpellcasterIllager.IllagerSpell.values());
this.knownName(RegistryTypes.STAIR_SHAPE, StairsShape.values(), StairsShape::getSerializedName);
this.knownName(RegistryTypes.STRUCTURE_MODE, StructureMode.values(), StructureMode::getSerializedName);
this.automaticName(RegistryTypes.TASK_PRIORITY, TickPriority.values());
this.manualName(RegistryTypes.VISIBILITY, Team.Visibility.values(), map -> {
map.put(Team.Visibility.ALWAYS, "always");
map.put(Team.Visibility.NEVER, "never");
map.put(Team.Visibility.HIDE_FOR_OTHER_TEAMS, "hide_for_other_teams");
map.put(Team.Visibility.HIDE_FOR_OWN_TEAM, "hide_for_own_team");
});
this.knownName(RegistryTypes.WIRE_ATTACHMENT_TYPE, RedstoneSide.values(), RedstoneSide::getSerializedName);
this.knownName(RegistryTypes.ADVANCEMENT_TYPE, FrameType.values(), FrameType::getName);
this.knownName(RegistryTypes.BANNER_PATTERN_SHAPE, BannerPattern.values(), b -> ((BannerPatternAccessor) (Object) b).accessor$filename());
this.automaticName(RegistryTypes.TROPICAL_FISH_SHAPE, TropicalFish.Pattern.values());
this.automaticName(RegistryTypes.HEIGHT_TYPE, Heightmap.Types.values());
this.knownName(RegistryTypes.ENTITY_CATEGORY, MobCategory.values(), MobCategory::getName);
}
private static RegistryLoader<Criterion> criterion() {
return RegistryLoader.of(l -> {
l.add(Criteria.AIR, k -> (Criterion) ObjectiveCriteria.AIR);
l.add(Criteria.ARMOR, k -> (Criterion) ObjectiveCriteria.ARMOR);
l.add(Criteria.DEATH_COUNT, k -> (Criterion) ObjectiveCriteria.DEATH_COUNT);
l.add(Criteria.DUMMY, k -> (Criterion) ObjectiveCriteria.DUMMY);
l.add(Criteria.EXPERIENCE, k -> (Criterion) ObjectiveCriteria.EXPERIENCE);
l.add(Criteria.FOOD, k -> (Criterion) ObjectiveCriteria.FOOD);
l.add(Criteria.HEALTH, k -> (Criterion) ObjectiveCriteria.HEALTH);
l.add(Criteria.LEVEL, k -> (Criterion) ObjectiveCriteria.LEVEL);
l.add(Criteria.PLAYER_KILL_COUNT, k -> (Criterion) ObjectiveCriteria.KILL_COUNT_PLAYERS);
l.add(Criteria.TOTAL_KILL_COUNT, k -> (Criterion) ObjectiveCriteria.KILL_COUNT_ALL);
l.add(Criteria.TRIGGER, k -> (Criterion) ObjectiveCriteria.TRIGGER);
});
}
private static RegistryLoader<FireworkShape> fireworkShape() {
return RegistryLoader.of(l -> {
l.add(FireworkRocketItem.Shape.BURST.getId(), FireworkShapes.BURST, () -> (FireworkShape) (Object) FireworkRocketItem.Shape.BURST);
l.add(FireworkRocketItem.Shape.CREEPER.getId(), FireworkShapes.CREEPER, () -> (FireworkShape) (Object) FireworkRocketItem.Shape.CREEPER);
l.add(FireworkRocketItem.Shape.LARGE_BALL.getId(), FireworkShapes.LARGE_BALL, () -> (FireworkShape) (Object) FireworkRocketItem.Shape.LARGE_BALL);
l.add(FireworkRocketItem.Shape.SMALL_BALL.getId(), FireworkShapes.SMALL_BALL, () -> (FireworkShape) (Object) FireworkRocketItem.Shape.SMALL_BALL);
l.add(FireworkRocketItem.Shape.STAR.getId(), FireworkShapes.STAR, () -> (FireworkShape) (Object) FireworkRocketItem.Shape.STAR);
});
}
private static RegistryLoader<Trigger<?>> trigger() {
return RegistryLoader.of(l -> {
l.add(Triggers.BAD_OMEN, k -> (Trigger) CriteriaTriggers.BAD_OMEN);
l.add(Triggers.BEE_NEST_DESTROYED, k -> (Trigger) CriteriaTriggers.BEE_NEST_DESTROYED);
l.add(Triggers.BRED_ANIMALS, k -> (Trigger) CriteriaTriggers.BRED_ANIMALS);
l.add(Triggers.BREWED_POTION, k -> (Trigger) CriteriaTriggers.BREWED_POTION);
l.add(Triggers.CHANGED_DIMENSION, k -> (Trigger) CriteriaTriggers.CHANGED_DIMENSION);
l.add(Triggers.CHANNELED_LIGHTNING, k -> (Trigger) CriteriaTriggers.CHANNELED_LIGHTNING);
l.add(Triggers.CONSTRUCT_BEACON, k -> (Trigger) CriteriaTriggers.CONSTRUCT_BEACON);
l.add(Triggers.CONSUME_ITEM, k -> (Trigger) CriteriaTriggers.CONSUME_ITEM);
l.add(Triggers.CURED_ZOMBIE_VILLAGER, k -> (Trigger) CriteriaTriggers.CURED_ZOMBIE_VILLAGER);
l.add(Triggers.EFFECTS_CHANGED, k -> (Trigger) CriteriaTriggers.EFFECTS_CHANGED);
l.add(Triggers.ENCHANTED_ITEM, k -> (Trigger) CriteriaTriggers.ENCHANTED_ITEM);
l.add(Triggers.ENTER_BLOCK, k -> (Trigger) CriteriaTriggers.ENTER_BLOCK);
l.add(Triggers.ENTITY_HURT_PLAYER, k -> (Trigger) CriteriaTriggers.ENTITY_HURT_PLAYER);
l.add(Triggers.ENTITY_KILLED_PLAYER, k -> (Trigger) CriteriaTriggers.ENTITY_KILLED_PLAYER);
l.add(Triggers.FILLED_BUCKET, k -> (Trigger) CriteriaTriggers.FILLED_BUCKET);
l.add(Triggers.FISHING_ROD_HOOKED, k -> (Trigger) CriteriaTriggers.FISHING_ROD_HOOKED);
l.add(Triggers.GENERATE_LOOT, k -> (Trigger) CriteriaTriggers.GENERATE_LOOT);
l.add(Triggers.HONEY_BLOCK_SIDE, k -> (Trigger) CriteriaTriggers.HONEY_BLOCK_SLIDE);
l.add(Triggers.IMPOSSIBLE, k -> (Trigger) CriteriaTriggers.IMPOSSIBLE);
l.add(Triggers.INVENTORY_CHANGED, k -> (Trigger) CriteriaTriggers.INVENTORY_CHANGED);
l.add(Triggers.ITEM_DURABILITY_CHANGED, k -> (Trigger) CriteriaTriggers.ITEM_DURABILITY_CHANGED);
l.add(Triggers.ITEM_PICKED_UP_BY_ENTITY, k -> (Trigger) CriteriaTriggers.ITEM_PICKED_UP_BY_ENTITY);
l.add(Triggers.ITEM_USED_ON_BLOCK, k -> (Trigger) CriteriaTriggers.ITEM_USED_ON_BLOCK);
l.add(Triggers.KILLED_BY_CROSSBOW, k -> (Trigger) CriteriaTriggers.KILLED_BY_CROSSBOW);
l.add(Triggers.LEVITATION, k -> (Trigger) CriteriaTriggers.LEVITATION);
l.add(Triggers.LOCATION, k -> (Trigger) CriteriaTriggers.LOCATION);
l.add(Triggers.NETHER_TRAVEL, k -> (Trigger) CriteriaTriggers.NETHER_TRAVEL);
l.add(Triggers.PLACED_BLOCK, k -> (Trigger) CriteriaTriggers.PLACED_BLOCK);
l.add(Triggers.PLAYER_HURT_ENTITY, k -> (Trigger) CriteriaTriggers.PLAYER_HURT_ENTITY);
l.add(Triggers.PLAYER_INTERACTED_WITH_ENTITY, k -> (Trigger) CriteriaTriggers.PLAYER_INTERACTED_WITH_ENTITY);
l.add(Triggers.PLAYER_KILLED_ENTITY, k -> (Trigger) CriteriaTriggers.PLAYER_KILLED_ENTITY);
l.add(Triggers.RAID_WIN, k -> (Trigger) CriteriaTriggers.RAID_WIN);
l.add(Triggers.RECIPE_UNLOCKED, k -> (Trigger) CriteriaTriggers.RECIPE_UNLOCKED);
l.add(Triggers.SHOT_CROSSBOW, k -> (Trigger) CriteriaTriggers.SHOT_CROSSBOW);
l.add(Triggers.SLEPT_IN_BED, k -> (Trigger) CriteriaTriggers.SLEPT_IN_BED);
l.add(Triggers.SUMMONED_ENTITY, k -> (Trigger) CriteriaTriggers.SUMMONED_ENTITY);
l.add(Triggers.TAME_ANIMAL, k -> (Trigger) CriteriaTriggers.TAME_ANIMAL);
l.add(Triggers.TARGET_BLOCK_HIT, k -> (Trigger) CriteriaTriggers.TARGET_BLOCK_HIT);
l.add(Triggers.TICK, k -> (Trigger) CriteriaTriggers.TICK);
l.add(Triggers.USED_ENDER_EYE, k -> (Trigger) CriteriaTriggers.USED_ENDER_EYE);
l.add(Triggers.USED_TOTEM, k -> (Trigger) CriteriaTriggers.USED_TOTEM);
l.add(Triggers.VILLAGER_TRADE, k -> (Trigger) CriteriaTriggers.TRADE);
final DefaultedRegistryReference<Trigger<?>> dummyKey =
RegistryKey.of(RegistryTypes.TRIGGER, ResourceKey.sponge("dummy")).asDefaultedReference(Sponge::game);
l.add(dummyKey, k -> (Trigger) (Object) SpongeDummyTrigger.DUMMY_TRIGGER);
final DefaultedRegistryReference<Trigger<?>> scoreKey =
RegistryKey.of(RegistryTypes.TRIGGER, ResourceKey.sponge("score")).asDefaultedReference(Sponge::game);
l.add(scoreKey, k -> (Trigger) (Object) SpongeScoreTrigger.SCORE_TRIGGER);
});
}
// The following methods are named for clarity above.
@SuppressWarnings("UnusedReturnValue")
private <A, I extends Enum<I>> Registry<A> automaticName(final RegistryType<A> type, final I[] values) {
return this.naming(type, values, value -> value.name().toLowerCase(Locale.ROOT));
}
@SuppressWarnings("UnusedReturnValue")
private <A, I extends Enum<I>> Registry<A> knownName(final RegistryType<A> type,final I[] values, final Function<I, String> name) {
return this.naming(type, values, name);
}
@SuppressWarnings("UnusedReturnValue")
private <A, I> Registry<A> knownName(final RegistryType<A> type, final Collection<I> values, final Function<I, String> name) {
final Map<I, String> map = new HashMap<>();
for (final I value : values) {
map.put(value, name.apply(value));
}
return this.naming(type, values.size(), map);
}
@SuppressWarnings("UnusedReturnValue")
private <A, I extends Enum<I>> Registry<A> manualName(final RegistryType<A> type, final I[] values, final Consumer<Manual<A, I>> byName) {
final Map<I, String> map = new HashMap<>(values.length);
byName.accept(map::put);
return this.naming(type, values, map);
}
@SuppressWarnings("UnusedReturnValue")
private <A, I> Registry<A> manualName(final RegistryType<A> type, final int values, final Consumer<Manual<A, I>> byName) {
final Map<I, String> map = new HashMap<>(values);
byName.accept(map::put);
return this.naming(type, values, map);
}
@SuppressWarnings("UnusedReturnValue")
private <A, I extends Enum<I>> Registry<A> naming(final RegistryType<A> type, final I[] values, final Function<I, String> name) {
final Map<I, String> map = new HashMap<>();
for (final I value : values) {
map.put(value, name.apply(value));
}
return this.naming(type, values, map);
}
@SuppressWarnings("UnusedReturnValue")
private <A, I extends Enum<I>> Registry<A> naming(final RegistryType<A> type, final I[] values, final Map<I, String> byName) {
return this.naming(type, values.length, byName);
}
@SuppressWarnings("UnusedReturnValue")
private <A, I> Registry<A> naming(final RegistryType<A> type, final int values, final Map<I, String> byName) {
if (values != byName.size()) {
throw new IllegalStateException(type.location() + " in " + type.root() + " is has value mismatch: " + values + " / " + byName.size());
}
return this.holder.createRegistry(type, () -> {
final Map<ResourceKey, A> map = new HashMap<>();
for (final Map.Entry<I, String> value : byName.entrySet()) {
final String rawId = value.getValue();
// To address Vanilla shortcomings, some mods will manually prefix their modid onto values they put into Vanilla registry-like
// registrars. We need to account for that possibility
if (rawId.contains(":")) {
map.put((ResourceKey) (Object) new ResourceLocation(rawId), (A) value.getKey());
} else {
map.put(ResourceKey.sponge(rawId), (A) value.getKey());
}
}
return map;
}, false);
}
@SuppressWarnings("unused")
private interface Manual<A, I> {
void put(final I value, final String key);
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.jena.permissions.graph;
import java.lang.reflect.Method;
import java.util.HashMap;
import org.junit.Assert;
import org.apache.jena.graph.Graph ;
import org.apache.jena.permissions.AccessDeniedException;
import org.apache.jena.permissions.Factory;
import org.apache.jena.permissions.SecurityEvaluator;
import org.apache.jena.permissions.SecurityEvaluatorParameters;
import org.apache.jena.permissions.SecurityEvaluator.Action;
import org.apache.jena.permissions.graph.SecuredGraph;
import org.apache.jena.permissions.graph.SecuredPrefixMapping;
import org.apache.jena.shared.PrefixMapping ;
import org.apache.jena.shared.impl.PrefixMappingImpl ;
import org.apache.jena.sparql.graph.GraphFactory ;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
@RunWith( value = SecurityEvaluatorParameters.class )
public class SecuredPrefixMappingTest
{
public static void runTests( final SecurityEvaluator securityEvaluator,
final PrefixMapping prefixMapping ) throws Exception
{
final PrefixMapping pm = prefixMapping;
Assert.assertNotNull("PrefixMapping may not be null", pm);
Assert.assertTrue("PrefixMapping should be secured",
pm instanceof SecuredPrefixMapping);
final SecuredPrefixMappingTest pmTest = new SecuredPrefixMappingTest(
securityEvaluator) {
@Override
public void setup()
{
this.securedMapping = (SecuredPrefixMapping) pm;
}
};
Method lockTest = null;
for (final Method m : pmTest.getClass().getMethods())
{
if (m.isAnnotationPresent(Test.class))
{
// lock test must come last
if (m.getName().equals("testLock"))
{
lockTest = m;
}
else
{
pmTest.setup();
m.invoke(pmTest);
}
}
}
Assert.assertNotNull( "Did not find 'testLock' method", lockTest );
pmTest.setup();
lockTest.invoke(pmTest);
}
private final SecurityEvaluator securityEvaluator;
private final Object principal;
protected SecuredPrefixMapping securedMapping;
public SecuredPrefixMappingTest( final SecurityEvaluator securityEvaluator )
{
this.securityEvaluator = securityEvaluator;
this.principal = securityEvaluator.getPrincipal();
}
@Before
public void setup()
{
final Graph g = GraphFactory.createDefaultGraph();
final SecuredGraph sg = Factory.getInstance(securityEvaluator,
"http://example.com/testGraph", g);
this.securedMapping = sg.getPrefixMapping();
}
@Test
public void testExpandPrefix()
{
try
{
securedMapping.expandPrefix("foo");
if (!securityEvaluator.evaluate(principal, Action.Read,
securedMapping.getModelNode()))
{
Assert.fail("Should have thrown AccessDenied Exception");
}
}
catch (final AccessDeniedException e)
{
if (securityEvaluator.evaluate(principal, Action.Read,
securedMapping.getModelNode()))
{
Assert.fail(String
.format("Should not have thrown AccessDenied Exception: %s - %s",
e, e.getTriple()));
}
}
}
@Test
public void testGetNsPrefixMap()
{
try
{
securedMapping.getNsPrefixMap();
if (!securityEvaluator.evaluate(principal, Action.Read,
securedMapping.getModelNode()))
{
Assert.fail("Should have thrown AccessDenied Exception");
}
}
catch (final AccessDeniedException e)
{
if (securityEvaluator.evaluate(principal, Action.Read,
securedMapping.getModelNode()))
{
Assert.fail(String
.format("Should not have thrown AccessDenied Exception: %s - %s",
e, e.getTriple()));
}
}
}
@Test
public void testGetNsPrefixURI()
{
try
{
securedMapping.getNsPrefixURI("foo");
if (!securityEvaluator.evaluate(principal, Action.Read,
securedMapping.getModelNode()))
{
Assert.fail("Should have thrown AccessDenied Exception");
}
}
catch (final AccessDeniedException e)
{
if (securityEvaluator.evaluate(principal, Action.Read,
securedMapping.getModelNode()))
{
Assert.fail(String
.format("Should not have thrown AccessDenied Exception: %s - %s",
e, e.getTriple()));
}
}
}
@Test
public void testGetNsURIPrefix()
{
try
{
securedMapping.getNsURIPrefix("http://example.com/foo");
if (!securityEvaluator.evaluate(principal, Action.Read,
securedMapping.getModelNode()))
{
Assert.fail("Should have thrown AccessDenied Exception");
}
}
catch (final AccessDeniedException e)
{
if (securityEvaluator.evaluate(principal, Action.Read,
securedMapping.getModelNode()))
{
Assert.fail(String
.format("Should not have thrown AccessDenied Exception: %s - %s",
e, e.getTriple()));
}
}
}
@Test
public void testLock()
{
try
{
securedMapping.lock();
if (!securityEvaluator.evaluate(principal, Action.Update,
securedMapping.getModelNode()))
{
Assert.fail("Should have thrown AccessDenied Exception");
}
}
catch (final AccessDeniedException e)
{
if (securityEvaluator.evaluate(principal, Action.Update,
securedMapping.getModelNode()))
{
Assert.fail(String
.format("Should not have thrown AccessDenied Exception: %s - %s",
e, e.getTriple()));
}
}
}
@Test
public void testQnameFor()
{
try
{
securedMapping.qnameFor("http://example.com/foo/bar");
if (!securityEvaluator.evaluate(principal, Action.Read,
securedMapping.getModelNode()))
{
Assert.fail("Should have thrown AccessDenied Exception");
}
}
catch (final AccessDeniedException e)
{
if (securityEvaluator.evaluate(principal, Action.Read,
securedMapping.getModelNode()))
{
Assert.fail(String
.format("Should not have thrown AccessDenied Exception: %s - %s",
e, e.getTriple()));
}
}
}
@Test
public void testRemoveNsPrefix()
{
try
{
securedMapping.removeNsPrefix("foo");
if (!securityEvaluator.evaluate(principal, Action.Update,
securedMapping.getModelNode()))
{
Assert.fail("Should have thrown AccessDenied Exception");
}
}
catch (final AccessDeniedException e)
{
if (securityEvaluator.evaluate(principal, Action.Update,
securedMapping.getModelNode()))
{
Assert.fail(String
.format("Should not have thrown AccessDenied Exception: %s - %s",
e, e.getTriple()));
}
}
}
@Test
public void testSamePrefixMappingAs()
{
try
{
securedMapping.samePrefixMappingAs(GraphFactory
.createDefaultGraph().getPrefixMapping());
if (!securityEvaluator.evaluate(principal, Action.Read,
securedMapping.getModelNode()))
{
Assert.fail("Should have thrown AccessDenied Exception");
}
}
catch (final AccessDeniedException e)
{
if (securityEvaluator.evaluate(principal, Action.Read,
securedMapping.getModelNode()))
{
Assert.fail(String
.format("Should not have thrown AccessDenied Exception: %s - %s",
e, e.getTriple()));
}
}
}
@Test
public void testSetNsPrefix()
{
try
{
securedMapping.setNsPrefix("foo", "http://example.com/foo");
if (!securityEvaluator.evaluate(principal, Action.Update,
securedMapping.getModelNode()))
{
Assert.fail("Should have thrown AccessDenied Exception");
}
}
catch (final AccessDeniedException e)
{
if (securityEvaluator.evaluate(principal, Action.Update,
securedMapping.getModelNode()))
{
Assert.fail(String
.format("Should not have thrown AccessDenied Exception: %s - %s",
e, e.getTriple()));
}
}
try
{
securedMapping.setNsPrefixes(GraphFactory.createDefaultGraph()
.getPrefixMapping());
if (!securityEvaluator.evaluate(principal, Action.Update,
securedMapping.getModelNode()))
{
Assert.fail("Should have thrown AccessDenied Exception");
}
}
catch (final AccessDeniedException e)
{
if (securityEvaluator.evaluate(principal, Action.Update,
securedMapping.getModelNode()))
{
Assert.fail(String
.format("Should not have thrown AccessDenied Exception: %s - %s",
e, e.getTriple()));
}
}
try
{
securedMapping.setNsPrefixes(new HashMap<String, String>());
if (!securityEvaluator.evaluate(principal, Action.Update,
securedMapping.getModelNode()))
{
Assert.fail("Should have thrown AccessDenied Exception");
}
}
catch (final AccessDeniedException e)
{
if (securityEvaluator.evaluate(principal, Action.Update,
securedMapping.getModelNode()))
{
Assert.fail(String
.format("Should not have thrown AccessDenied Exception: %s - %s",
e, e.getTriple()));
}
}
}
@Test
public void testShortForm()
{
try
{
securedMapping.shortForm("http://example.com/foo/bar");
if (!securityEvaluator.evaluate(principal, Action.Read,
securedMapping.getModelNode()))
{
Assert.fail("Should have thrown AccessDenied Exception");
}
}
catch (final AccessDeniedException e)
{
if (securityEvaluator.evaluate(principal, Action.Read,
securedMapping.getModelNode()))
{
Assert.fail(String
.format("Should not have thrown AccessDenied Exception: %s - %s",
e, e.getTriple()));
}
}
}
@Test
public void testWithDefaultMappings()
{
PrefixMapping pm = new PrefixMappingImpl();
pm.setNsPrefix( "example", "http://example.com");
try
{
// make sure that it must update
securedMapping.withDefaultMappings(pm);
if (!securityEvaluator.evaluate(principal, Action.Update,
securedMapping.getModelNode()))
{
Assert.fail("Should have thrown AccessDenied Exception");
}
}
catch (final AccessDeniedException e)
{
if (securityEvaluator.evaluate(principal, Action.Update,
securedMapping.getModelNode()))
{
Assert.fail(String
.format("Should not have thrown AccessDenied Exception: %s - %s",
e, e.getTriple()));
}
}
}
@Test
public void testWithDefaultMappingsNoAdd()
{
PrefixMapping pm = new PrefixMappingImpl();
try
{
// make sure that it must update
securedMapping.withDefaultMappings(pm);
// if (!securityEvaluator.evaluate(Action.Update,
// securedMapping.getModelNode()))
// {
// Assert.fail("Should have thrown AccessDenied Exception");
// }
}
catch (final AccessDeniedException e)
{
if (securityEvaluator.evaluate(principal, Action.Update,
securedMapping.getModelNode()))
{
Assert.fail(String
.format("Should not have thrown AccessDenied Exception: %s - %s",
e, e.getTriple()));
}
}
}
}
| |
/*! ******************************************************************************
*
* Pentaho Data Integration
*
* Copyright (C) 2002-2013 by Pentaho : http://www.pentaho.com
*
*******************************************************************************
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
******************************************************************************/
package org.pentaho.di.ui.spoon.job;
import java.sql.ResultSet;
import java.util.ArrayList;
import java.util.List;
import java.util.ResourceBundle;
import org.eclipse.swt.SWT;
import org.eclipse.swt.custom.CTabFolder;
import org.eclipse.swt.custom.CTabItem;
import org.eclipse.swt.custom.SashForm;
import org.eclipse.swt.events.SelectionAdapter;
import org.eclipse.swt.events.SelectionEvent;
import org.eclipse.swt.events.SelectionListener;
import org.eclipse.swt.layout.FillLayout;
import org.eclipse.swt.layout.FormAttachment;
import org.eclipse.swt.layout.FormData;
import org.eclipse.swt.layout.FormLayout;
import org.eclipse.swt.widgets.Composite;
import org.eclipse.swt.widgets.Control;
import org.eclipse.swt.widgets.MessageBox;
import org.eclipse.swt.widgets.TableItem;
import org.eclipse.swt.widgets.Text;
import org.eclipse.swt.widgets.ToolBar;
import org.pentaho.di.core.Const;
import org.pentaho.di.core.Props;
import org.pentaho.di.core.RowMetaAndData;
import org.pentaho.di.core.database.Database;
import org.pentaho.di.core.database.DatabaseMeta;
import org.pentaho.di.core.exception.KettleValueException;
import org.pentaho.di.core.logging.JobEntryLogTable;
import org.pentaho.di.core.logging.LogChannel;
import org.pentaho.di.core.logging.LogStatus;
import org.pentaho.di.core.logging.LogTableField;
import org.pentaho.di.core.logging.LogTableInterface;
import org.pentaho.di.core.row.RowMetaInterface;
import org.pentaho.di.core.row.ValueMeta;
import org.pentaho.di.core.row.ValueMetaInterface;
import org.pentaho.di.i18n.BaseMessages;
import org.pentaho.di.job.JobMeta;
import org.pentaho.di.job.entry.JobEntryCopy;
import org.pentaho.di.ui.core.dialog.ErrorDialog;
import org.pentaho.di.ui.core.gui.GUIResource;
import org.pentaho.di.ui.core.widget.ColumnInfo;
import org.pentaho.di.ui.core.widget.TableView;
import org.pentaho.di.ui.spoon.Spoon;
import org.pentaho.di.ui.spoon.XulSpoonResourceBundle;
import org.pentaho.di.ui.spoon.XulSpoonSettingsManager;
import org.pentaho.di.ui.spoon.delegates.SpoonDelegate;
import org.pentaho.di.ui.xul.KettleXulLoader;
import org.pentaho.ui.xul.XulDomContainer;
import org.pentaho.ui.xul.XulLoader;
import org.pentaho.ui.xul.components.XulToolbarbutton;
import org.pentaho.ui.xul.containers.XulToolbar;
import org.pentaho.ui.xul.impl.XulEventHandler;
public class JobHistoryDelegate extends SpoonDelegate implements XulEventHandler {
private static Class<?> PKG = JobGraph.class; // for i18n purposes, needed by Translator2!!
private static final String XUL_FILE_TRANS_GRID_TOOLBAR = "ui/job-history-toolbar.xul";
private JobGraph jobGraph;
private CTabItem jobHistoryTab;
private XulToolbar toolbar;
private Composite jobHistoryComposite;
private JobMeta jobMeta;
private CTabFolder tabFolder;
private XulToolbarbutton refreshButton;
private XulToolbarbutton fetchNextBatchButton;
private XulToolbarbutton fetchAllButton;
private JobHistoryLogTab[] models;
private enum Mode {
INITIAL, NEXT_BATCH, ALL
}
/**
* @param spoon
* Spoon instance
* @param jobGraph
* JobGraph instance
*/
public JobHistoryDelegate( Spoon spoon, JobGraph jobGraph ) {
super( spoon );
this.jobGraph = jobGraph;
}
public void addJobHistory() {
// First, see if we need to add the extra view...
//
if ( jobGraph.extraViewComposite == null || jobGraph.extraViewComposite.isDisposed() ) {
jobGraph.addExtraView();
} else {
if ( jobHistoryTab != null && !jobHistoryTab.isDisposed() ) {
// just set this one active and get out...
//
jobGraph.extraViewTabFolder.setSelection( jobHistoryTab );
return;
}
}
jobMeta = jobGraph.getManagedObject();
// Add a tab to display the logging history tables...
//
jobHistoryTab = new CTabItem( jobGraph.extraViewTabFolder, SWT.NONE );
jobHistoryTab.setImage( GUIResource.getInstance().getImageShowHistory() );
jobHistoryTab.setText( BaseMessages.getString( PKG, "Spoon.TransGraph.HistoryTab.Name" ) );
// Create a composite, slam everything on there like it was in the history tab.
//
jobHistoryComposite = new Composite( jobGraph.extraViewTabFolder, SWT.NONE );
jobHistoryComposite.setLayout( new FormLayout() );
spoon.props.setLook( jobHistoryComposite );
addToolBar();
Control toolbarControl = (Control) toolbar.getManagedObject();
toolbarControl.setLayoutData( new FormData() );
FormData fd = new FormData();
fd.left = new FormAttachment( 0, 0 ); // First one in the left top corner
fd.top = new FormAttachment( 0, 0 );
fd.right = new FormAttachment( 100, 0 );
toolbarControl.setLayoutData( fd );
toolbarControl.setParent( jobHistoryComposite );
addLogTableTabs();
tabFolder.setSelection( 0 );
tabFolder.addSelectionListener( new SelectionListener() {
@Override
public void widgetSelected( SelectionEvent arg0 ) {
setMoreRows( true );
}
@Override
public void widgetDefaultSelected( SelectionEvent arg0 ) {
}
} );
jobHistoryComposite.pack();
jobHistoryTab.setControl( jobHistoryComposite );
jobGraph.extraViewTabFolder.setSelection( jobHistoryTab );
if ( !Props.getInstance().disableInitialExecutionHistory() ) {
refreshAllHistory();
}
}
private void addLogTableTabs() {
// Create a nested tab folder in the tab item, on the history composite...
//
tabFolder = new CTabFolder( jobHistoryComposite, SWT.MULTI );
spoon.props.setLook( tabFolder, Props.WIDGET_STYLE_TAB );
FormData fdTabFolder = new FormData();
fdTabFolder.left = new FormAttachment( 0, 0 ); // First one in the left top corner
fdTabFolder.top = new FormAttachment( (Control) toolbar.getManagedObject(), 0 );
fdTabFolder.right = new FormAttachment( 100, 0 );
fdTabFolder.bottom = new FormAttachment( 100, 0 );
tabFolder.setLayoutData( fdTabFolder );
models = new JobHistoryLogTab[jobMeta.getLogTables().size()];
for ( int i = 0; i < models.length; i++ ) {
models[i] = new JobHistoryLogTab( tabFolder, jobMeta.getLogTables().get( i ) );
}
}
private void addToolBar() {
try {
XulLoader loader = new KettleXulLoader();
loader.setSettingsManager( XulSpoonSettingsManager.getInstance() );
ResourceBundle bundle = new XulSpoonResourceBundle( Spoon.class );
XulDomContainer xulDomContainer = loader.loadXul( XUL_FILE_TRANS_GRID_TOOLBAR, bundle );
xulDomContainer.addEventHandler( this );
toolbar = (XulToolbar) xulDomContainer.getDocumentRoot().getElementById( "nav-toolbar" );
refreshButton = (XulToolbarbutton) xulDomContainer.getDocumentRoot().getElementById( "refresh-history" );
fetchNextBatchButton =
(XulToolbarbutton) xulDomContainer.getDocumentRoot().getElementById( "fetch-next-batch-history" );
fetchAllButton = (XulToolbarbutton) xulDomContainer.getDocumentRoot().getElementById( "fetch-all-history" );
ToolBar swtToolBar = (ToolBar) toolbar.getManagedObject();
swtToolBar.layout( true, true );
} catch ( Throwable t ) {
log.logError( Const.getStackTracker( t ) );
new ErrorDialog( jobHistoryComposite.getShell(),
BaseMessages.getString( PKG, "Spoon.Exception.ErrorReadingXULFile.Title" ),
BaseMessages.getString( PKG, "Spoon.Exception.ErrorReadingXULFile.Message", XUL_FILE_TRANS_GRID_TOOLBAR ),
new Exception( t ) );
}
}
/**
* Public for XUL.
*/
public void clearLogTable() {
clearLogTable( tabFolder.getSelectionIndex() );
}
/**
* User requested to clear the log table.<br>
* Better ask confirmation
*/
private void clearLogTable( int index ) {
JobHistoryLogTab model = models[index];
LogTableInterface logTable = model.logTable;
if ( logTable.isDefined() ) {
String schemaTable = logTable.getQuotedSchemaTableCombination();
DatabaseMeta databaseMeta = logTable.getDatabaseMeta();
MessageBox mb = new MessageBox( jobGraph.getShell(), SWT.YES | SWT.NO | SWT.ICON_QUESTION );
//CHECKSTYLE:LineLength:OFF
mb.setMessage( BaseMessages.getString( PKG, "JobGraph.Dialog.AreYouSureYouWantToRemoveAllLogEntries.Message", schemaTable ) );
mb.setText( BaseMessages.getString( PKG, "JobGraph.Dialog.AreYouSureYouWantToRemoveAllLogEntries.Title" ) );
if ( mb.open() == SWT.YES ) {
Database database = new Database( loggingObject, databaseMeta );
try {
database.connect();
database.truncateTable( schemaTable );
} catch ( Exception e ) {
new ErrorDialog( jobGraph.getShell(),
BaseMessages.getString( PKG, "JobGraph.Dialog.ErrorClearningLoggingTable.Title" ),
BaseMessages.getString( PKG, "JobGraph.Dialog.AreYouSureYouWantToRemoveAllLogEntries.Message" ), e );
} finally {
database.disconnect();
refreshHistory();
if ( model.logDisplayText != null ) {
model.logDisplayText.setText( "" );
}
}
}
}
}
/**
* Public for XUL.
*/
public void replayHistory() {
JobHistoryLogTab model = models[tabFolder.getSelectionIndex()];
int idx = model.logDisplayTableView.getSelectionIndex();
if ( idx >= 0 ) {
String[] fields = model.logDisplayTableView.getItem( idx );
int batchId = Const.toInt( fields[0], -1 );
// String dateString = fields[13];
// Date replayDate = XMLHandler.stringToDate(dateString);
List<JobEntryCopyResult> results = null;
boolean gotResults = false;
// We check in the Job Entry Logging to see the results from all the various job entries that were executed.
//
JobEntryLogTable jeLogTable = jobMeta.getJobEntryLogTable();
if ( jeLogTable.isDefined() ) {
try {
DatabaseMeta databaseMeta = jobMeta.getJobEntryLogTable().getDatabaseMeta();
Database db = new Database( Spoon.loggingObject, databaseMeta );
try {
db.connect();
String schemaTable =
databaseMeta.getQuotedSchemaTableCombination( jeLogTable.getActualSchemaName(), jeLogTable
.getActualTableName() );
String sql =
"SELECT * FROM "
+ schemaTable + " WHERE " + databaseMeta.quoteField( jeLogTable.getKeyField().getFieldName() )
+ " = " + batchId;
List<Object[]> rows = db.getRows( sql, 0 );
RowMetaInterface rowMeta = db.getReturnRowMeta();
results = new ArrayList<JobEntryCopyResult>();
int jobEntryNameIndex =
rowMeta.indexOfValue( jeLogTable
.findField( JobEntryLogTable.ID.JOBENTRYNAME.toString() ).getFieldName() );
int jobEntryResultIndex =
rowMeta
.indexOfValue( jeLogTable.findField( JobEntryLogTable.ID.RESULT.toString() ).getFieldName() );
int jobEntryErrorsIndex =
rowMeta
.indexOfValue( jeLogTable.findField( JobEntryLogTable.ID.ERRORS.toString() ).getFieldName() );
LogTableField copyNrField = jeLogTable.findField( JobEntryLogTable.ID.COPY_NR.toString() );
int jobEntryCopyNrIndex =
copyNrField == null ? -1 : ( copyNrField.isEnabled() ? rowMeta.indexOfValue( copyNrField
.getFieldName() ) : -1 );
for ( Object[] row : rows ) {
String jobEntryName = rowMeta.getString( row, jobEntryNameIndex );
boolean jobEntryResult = rowMeta.getBoolean( row, jobEntryResultIndex );
long errors = rowMeta.getInteger( row, jobEntryErrorsIndex );
long copyNr = jobEntryCopyNrIndex < 0 ? 0 : rowMeta.getInteger( row, jobEntryCopyNrIndex );
JobEntryCopyResult result =
new JobEntryCopyResult( jobEntryName, jobEntryResult, errors, (int) copyNr );
results.add( result );
}
} finally {
db.disconnect();
}
gotResults = true;
} catch ( Exception e ) {
new ErrorDialog(
spoon.getShell(), BaseMessages.getString(
PKG, "JobHistoryDelegate.ReplayHistory.UnexpectedErrorReadingJobEntryHistory.Text" ),
BaseMessages.getString(
PKG, "JobHistoryDelegate.ReplayHistory.UnexpectedErrorReadingJobEntryHistory.Message" ), e );
}
} else {
MessageBox box = new MessageBox( spoon.getShell(), SWT.ICON_ERROR | SWT.OK );
box.setText( BaseMessages.getString( PKG, "JobHistoryDelegate.ReplayHistory.NoJobEntryTable.Text" ) );
box.setMessage( BaseMessages.getString( PKG, "JobHistoryDelegate.ReplayHistory.NoJobEntryTable.Message" ) );
box.open();
}
// spoon.executeJob(jobGraph.getManagedObject(), true, false, replayDate, false);
if ( !gotResults ) {
// For some reason we have no execution results, simply list all the job entries so the user can choose...
//
results = new ArrayList<JobEntryCopyResult>();
for ( JobEntryCopy copy : jobMeta.getJobCopies() ) {
results.add( new JobEntryCopyResult( copy.getName(), null, null, copy.getNr() ) );
}
}
// OK, now that we have our list of job entries, let's first try to find the first job-entry that had a false
// result or where errors>0
// If the error was handled, we look further for a more appropriate target.
//
JobEntryCopy selection = null;
boolean more = true;
JobEntryCopy start = jobMeta.findStart();
while ( selection == null && more ) {
int nrNext = jobMeta.findNrNextJobEntries( start );
more = nrNext > 0;
for ( int n = 0; n < nrNext; n++ ) {
JobEntryCopy copy = jobMeta.findNextJobEntry( start, n );
// See if we can find a result for this job entry...
//
JobEntryCopyResult result = JobEntryCopyResult.findResult( results, copy );
if ( result != null ) {
System.out.println( "TODO: replay" );
// Do nothing???
}
}
}
// Present all job entries to the user.
//
for ( JobEntryCopyResult result : results ) {
System.out.println( "Job entry copy result -- Name="
+ result.getJobEntryName() + ", result=" + result.getResult() + ", errors=" + result.getErrors()
+ ", nr=" + result.getCopyNr() );
}
}
}
/**
* Public for XUL.
*/
public void refreshHistory() {
refreshHistory( tabFolder.getSelectionIndex(), Mode.INITIAL );
}
private void refreshAllHistory() {
for ( int i = 0; i < models.length; i++ ) {
refreshHistory( i, Mode.INITIAL );
}
}
/**
* Background thread refreshes history data
*/
private void refreshHistory( final int index, final Mode fetchMode ) {
new Thread( new Runnable() {
public void run() {
// do gui stuff here
spoon.getDisplay().syncExec( new Runnable() {
public void run() {
setQueryInProgress( true );
JobHistoryLogTab model = models[index];
model.setLogTable( jobMeta.getLogTables().get( index ) );
}
} );
final boolean moreRows = getHistoryData( index, fetchMode );
// do gui stuff here
spoon.getDisplay().syncExec( new Runnable() {
public void run() {
displayHistoryData( index );
setQueryInProgress( false );
setMoreRows( moreRows );
}
} );
}
} ).start();
}
private void setMoreRows( final boolean moreRows ) {
fetchNextBatchButton.setDisabled( !moreRows );
}
/**
* Don't allow more queries until this one finishes.
*
* @param inProgress
* is query in progress
*/
private void setQueryInProgress( final boolean inProgress ) {
refreshButton.setDisabled( inProgress );
fetchNextBatchButton.setDisabled( inProgress );
fetchAllButton.setDisabled( inProgress );
}
private boolean getHistoryData( final int index, final Mode mode ) {
final int BATCH_SIZE = Props.getInstance().getLinesInHistoryFetchSize();
boolean moreRows = false;
JobHistoryLogTab model = models[index];
LogTableInterface logTable = model.logTable;
// See if there is a job loaded that has a connection table specified.
//
if ( jobMeta != null && !Const.isEmpty( jobMeta.getName() ) && logTable.isDefined() ) {
Database database = null;
try {
DatabaseMeta logConnection = logTable.getDatabaseMeta();
// open a connection
database = new Database( loggingObject, logConnection );
database.shareVariablesWith( jobMeta );
database.connect();
int queryLimit = 0;
switch ( mode ) {
case ALL:
model.batchCount = 0;
queryLimit = Props.getInstance().getMaxNrLinesInHistory();
break;
case NEXT_BATCH:
model.batchCount++;
queryLimit = BATCH_SIZE * model.batchCount;
break;
case INITIAL:
model.batchCount = 1;
queryLimit = BATCH_SIZE;
break;
default:
break;
}
database.setQueryLimit( queryLimit );
// First, we get the information out of the database table...
//
String schemaTable = logTable.getQuotedSchemaTableCombination();
StringBuilder sql = new StringBuilder( "SELECT " );
boolean first = true;
for ( LogTableField field : logTable.getFields() ) {
if ( field.isEnabled() && field.isVisible() ) {
if ( !first ) {
sql.append( ", " );
}
first = false;
sql.append( logConnection.quoteField( field.getFieldName() ) );
}
}
sql.append( " FROM " ).append( schemaTable );
RowMetaAndData params = new RowMetaAndData();
// Do we need to limit the amount of data?
//
LogTableField nameField = logTable.getNameField();
LogTableField keyField = logTable.getKeyField();
if ( nameField != null ) {
sql
.append( " WHERE " ).append( logConnection.quoteField( nameField.getFieldName() ) ).append(
" LIKE ?" );
params
.addValue( new ValueMeta( "transname_literal", ValueMetaInterface.TYPE_STRING ), jobMeta.getName() );
}
if ( keyField != null && keyField.isEnabled() ) {
sql
.append( " ORDER BY " ).append( logConnection.quoteField( keyField.getFieldName() ) ).append(
" DESC" );
}
ResultSet resultSet = database.openQuery( sql.toString(), params.getRowMeta(), params.getData() );
List<Object[]> rows = new ArrayList<Object[]>();
Object[] rowData = database.getRow( resultSet );
int rowsFetched = 1;
while ( rowData != null ) {
rows.add( rowData );
rowData = database.getRow( resultSet );
rowsFetched++;
}
if ( rowsFetched >= queryLimit ) {
moreRows = true;
}
database.closeQuery( resultSet );
models[index].rows = rows;
} catch ( Exception e ) {
LogChannel.GENERAL.logError( "Unable to get rows of data from logging table " + models[index].logTable, e );
models[index].rows = new ArrayList<Object[]>();
} finally {
if ( database != null ) {
database.disconnect();
}
}
} else {
models[index].rows = new ArrayList<Object[]>();
}
return moreRows;
}
private void displayHistoryData( final int index ) {
JobHistoryLogTab model = models[index];
ColumnInfo[] colinf = model.logDisplayTableView.getColumns();
// Now, we're going to display the data in the table view
//
if ( model.logDisplayTableView == null || model.logDisplayTableView.isDisposed() ) {
return;
}
int selectionIndex = model.logDisplayTableView.getSelectionIndex();
model.logDisplayTableView.table.clearAll();
List<Object[]> rows = model.rows;
if ( rows != null && rows.size() > 0 ) {
// OK, now that we have a series of rows, we can add them to the table view...
//
for ( Object[] rowData : rows ) {
TableItem item = new TableItem( model.logDisplayTableView.table, SWT.NONE );
for ( int c = 0; c < colinf.length; c++ ) {
ColumnInfo column = colinf[c];
ValueMetaInterface valueMeta = column.getValueMeta();
String string = null;
try {
string = valueMeta.getString( rowData[c] );
} catch ( KettleValueException e ) {
log.logError( "history data conversion issue", e );
}
item.setText( c + 1, Const.NVL( string, "" ) );
}
// Add some color
//
Long errors = null;
LogStatus status = null;
LogTableField errorsField = model.logTable.getErrorsField();
if ( errorsField != null ) {
int index1 = model.logTableFields.indexOf( errorsField );
try {
errors = colinf[index1].getValueMeta().getInteger( rowData[index1] );
} catch ( KettleValueException e ) {
log.logError( "history data conversion issue", e );
}
}
LogTableField statusField = model.logTable.getStatusField();
if ( statusField != null ) {
int index1 = model.logTableFields.indexOf( statusField );
String statusString = null;
try {
statusString = colinf[index1].getValueMeta().getString( rowData[index1] );
} catch ( KettleValueException e ) {
log.logError( "history data conversion issue", e );
}
if ( statusString != null ) {
status = LogStatus.findStatus( statusString );
}
}
if ( errors != null && errors > 0L ) {
item.setBackground( GUIResource.getInstance().getColorRed() );
} else if ( status != null && LogStatus.STOP.equals( status ) ) {
item.setBackground( GUIResource.getInstance().getColorYellow() );
}
}
model.logDisplayTableView.removeEmptyRows();
model.logDisplayTableView.setRowNums();
model.logDisplayTableView.optWidth( true );
} else {
model.logDisplayTableView.clearAll( false );
// new TableItem(wFields.get(tabIndex).table, SWT.NONE); // Give it an item to prevent errors on various
// platforms.
}
if ( selectionIndex >= 0 && selectionIndex < model.logDisplayTableView.getItemCount() ) {
model.logDisplayTableView.table.select( selectionIndex );
showLogEntry();
}
}
private void showLogEntry() {
JobHistoryLogTab model = models[tabFolder.getSelectionIndex()];
Text text = model.logDisplayText;
if ( text == null || text.isDisposed() ) {
return;
}
List<Object[]> list = model.rows;
if ( list == null || list.size() == 0 ) {
String message;
if ( model.logTable.isDefined() ) {
message = BaseMessages.getString( PKG, "JobHistory.PleaseRefresh.Message" );
} else {
message = BaseMessages.getString( PKG, "JobHistory.HistoryConfiguration.Message" );
}
text.setText( message );
return;
}
// grab the selected line in the table:
int nr = model.logDisplayTableView.table.getSelectionIndex();
if ( nr >= 0 && nr < list.size() ) {
// OK, grab this one from the buffer...
Object[] row = list.get( nr );
// What is the name of the log field?
//
LogTableField logField = model.logTable.getLogField();
if ( logField != null ) {
int index = model.logTableFields.indexOf( logField );
if ( index >= 0 ) {
String logText = row[index].toString();
text.setText( Const.NVL( logText, "" ) );
text.setSelection( text.getText().length() );
text.showSelection();
} else {
text.setText( BaseMessages.getString( PKG, "JobHistory.HistoryConfiguration.NoLoggingFieldDefined" ) );
}
}
}
}
/**
* @return the jobHistoryTab
*/
public CTabItem getJobHistoryTab() {
return jobHistoryTab;
}
/*
* (non-Javadoc)
*
* @see org.pentaho.ui.xul.impl.XulEventHandler#getData()
*/
public Object getData() {
return null;
}
/*
* (non-Javadoc)
*
* @see org.pentaho.ui.xul.impl.XulEventHandler#getName()
*/
public String getName() {
return "history";
}
/*
* (non-Javadoc)
*
* @see org.pentaho.ui.xul.impl.XulEventHandler#getXulDomContainer()
*/
public XulDomContainer getXulDomContainer() {
return null;
}
/*
* (non-Javadoc)
*
* @see org.pentaho.ui.xul.impl.XulEventHandler#setData(java.lang.Object)
*/
public void setData( Object data ) {
}
/*
* (non-Javadoc)
*
* @see org.pentaho.ui.xul.impl.XulEventHandler#setName(java.lang.String)
*/
public void setName( String name ) {
}
/*
* (non-Javadoc)
*
* @see org.pentaho.ui.xul.impl.XulEventHandler#setXulDomContainer(org.pentaho.ui.xul.XulDomContainer)
*/
public void setXulDomContainer( XulDomContainer xulDomContainer ) {
}
/**
* XUL event: fetches next x records for current log table.
*/
public void fetchNextBatch() {
int tabIndex = tabFolder.getSelectionIndex();
refreshHistory( tabIndex, Mode.NEXT_BATCH );
}
/**
* XUL event: loads all load records for current log table.
*/
public void fetchAll() {
int tabIndex = tabFolder.getSelectionIndex();
refreshHistory( tabIndex, Mode.ALL );
}
private class JobHistoryLogTab extends CTabItem {
private List<LogTableField> logTableFields = new ArrayList<LogTableField>();
private List<Object[]> rows;
private LogTableInterface logTable;
private Text logDisplayText;
private TableView logDisplayTableView;
/**
* Number of batches fetched so far. When the next batch is fetched, the number of rows displayed will be the max of
* batchCount * BATCH_SIZE and resultSet row count.
*/
public int batchCount;
public JobHistoryLogTab( CTabFolder tabFolder, LogTableInterface logTable ) {
super( tabFolder, SWT.NONE );
setLogTable( logTable );
setText( logTable.getLogTableType() );
Composite logTableComposite = new Composite( tabFolder, SWT.NONE );
logTableComposite.setLayout( new FormLayout() );
spoon.props.setLook( logTableComposite );
setControl( logTableComposite );
SashForm sash = new SashForm( logTableComposite, SWT.VERTICAL );
sash.setLayout( new FillLayout() );
FormData fdSash = new FormData();
fdSash.left = new FormAttachment( 0, 0 ); // First one in the left top corner
fdSash.top = new FormAttachment( 0, 0 );
fdSash.right = new FormAttachment( 100, 0 );
fdSash.bottom = new FormAttachment( 100, 0 );
sash.setLayoutData( fdSash );
logDisplayTableView = createJobLogTableView( sash );
if ( logTable.getLogField() != null ) {
logDisplayText = new Text( sash, SWT.MULTI | SWT.V_SCROLL | SWT.H_SCROLL | SWT.READ_ONLY );
spoon.props.setLook( logDisplayText );
logDisplayText.setVisible( true );
FormData fdText = new FormData();
fdText.left = new FormAttachment( 0, 0 );
fdText.top = new FormAttachment( 0, 0 );
fdText.right = new FormAttachment( 100, 0 );
fdText.bottom = new FormAttachment( 100, 0 );
logDisplayText.setLayoutData( fdText );
sash.setWeights( new int[] { 70, 30, } );
} else {
logDisplayText = null;
sash.setWeights( new int[] { 100, } );
}
}
public void setLogTable( LogTableInterface logTable ) {
this.logTable = logTable;
logTableFields.clear();
for ( LogTableField field : logTable.getFields() ) {
if ( field.isEnabled() && field.isVisible() ) {
logTableFields.add( field );
}
}
// Recreate table view as log table has changed
if ( logDisplayTableView != null ) {
Composite tableParent = logDisplayTableView.getParent();
TableView newTable = createJobLogTableView( tableParent );
newTable.moveAbove( logDisplayTableView );
logDisplayTableView.dispose();
tableParent.layout( false );
logDisplayTableView = newTable;
}
}
private TableView createJobLogTableView( Composite parent ) {
List<ColumnInfo> columnList = new ArrayList<ColumnInfo>();
for ( LogTableField field : logTableFields ) {
if ( !field.isLogField() ) {
ColumnInfo column = new ColumnInfo( field.getName(), ColumnInfo.COLUMN_TYPE_TEXT, false, true );
int valueType = field.getDataType();
String conversionMask = null;
switch ( field.getDataType() ) {
case ValueMetaInterface.TYPE_INTEGER:
conversionMask = "###,###,##0";
column.setAllignement( SWT.RIGHT );
break;
case ValueMetaInterface.TYPE_DATE:
conversionMask = "yyyy/MM/dd HH:mm:ss";
column.setAllignement( SWT.CENTER );
break;
case ValueMetaInterface.TYPE_NUMBER:
conversionMask = " ###,###,##0.00;-###,###,##0.00";
column.setAllignement( SWT.RIGHT );
break;
case ValueMetaInterface.TYPE_STRING:
column.setAllignement( SWT.LEFT );
break;
case ValueMetaInterface.TYPE_BOOLEAN:
DatabaseMeta databaseMeta = logTable.getDatabaseMeta();
if ( databaseMeta != null ) {
if ( !databaseMeta.supportsBooleanDataType() ) {
// Boolean gets converted to String!
//
valueType = ValueMetaInterface.TYPE_STRING;
}
}
break;
default:
break;
}
ValueMetaInterface valueMeta = new ValueMeta( field.getFieldName(), valueType, field.getLength(), -1 );
if ( conversionMask != null ) {
valueMeta.setConversionMask( conversionMask );
}
column.setValueMeta( valueMeta );
columnList.add( column );
}
}
TableView tableView = new TableView( jobMeta, parent, SWT.BORDER | SWT.FULL_SELECTION | SWT.SINGLE,
columnList.toArray( new ColumnInfo[columnList.size()] ), 1,
true, // readonly!
null,
spoon.props );
tableView.table.addSelectionListener( new SelectionAdapter() {
public void widgetSelected( SelectionEvent arg0 ) {
showLogEntry();
}
} );
return tableView;
}
}
}
| |
package com.brotherjing.danmakubay.activities;
import android.os.Handler;
import android.os.Message;
import android.os.Bundle;
import android.support.v7.app.AppCompatActivity;
import android.support.v7.widget.Toolbar;
import android.view.MenuItem;
import android.view.View;
import android.view.ViewGroup;
import android.widget.CheckBox;
import android.widget.CompoundButton;
import android.widget.FrameLayout;
import android.widget.RadioButton;
import android.widget.RadioGroup;
import android.widget.SeekBar;
import com.brotherjing.danmakubay.R;
import com.brotherjing.danmakubay.api.API_SPF;
import com.brotherjing.danmakubay.utils.DataUtil;
import com.brotherjing.danmakubay.utils.ViewUtil;
import com.brotherjing.simpledanmakuview.Danmaku;
import com.brotherjing.simpledanmakuview.DanmakuView;
import java.lang.ref.WeakReference;
public class DanmakuSettingActivity extends AppCompatActivity {
private static final int MESSAGE_SEND = 1;
private static final int MESSAGE_FINISH = 2;
SeekBar sb,sb_text_size,sb_height;
RadioGroup rg,rg_speed;
RadioButton rb1,rb2,rb01,rb02,rb03;
CheckBox cb;
FrameLayout ll;
DanmakuView preview;
View background;
int speed,speed_level,text_size,danmaku_height;
boolean all_app,show_bg;
Danmaku.DanmakuSpeed danmakuSpeed;
private final MyHandler handler = new MyHandler(this);
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_danmaku_setting);
initToolbar();
initView();
initData();
refreshView();
}
@Override
protected void onResume() {
super.onResume();
handler.sendEmptyMessageDelayed(MESSAGE_SEND, 500);
}
@Override
protected void onDestroy() {
super.onDestroy();
handler.removeMessages(MESSAGE_SEND);
handler.sendEmptyMessage(MESSAGE_FINISH);
}
private void refreshView(){
if(all_app) rg.check(rb2.getId());
else rg.check(rb1.getId());
switch (speed_level){
case API_SPF.SPEED_LEVEL_SLOW:rg_speed.check(rb01.getId());break;
case API_SPF.SPEED_LEVEL_NORMAL:rg_speed.check(rb02.getId());break;
case API_SPF.SPEED_LEVEL_FAST:rg_speed.check(rb03.getId());break;
default:break;
}
cb.setChecked(show_bg);
sb.setProgress(speed);
sb_text_size.setProgress(text_size);
ViewGroup.LayoutParams params = ll.getLayoutParams();
params.height = ViewUtil.dp2px(DanmakuSettingActivity.this,20+danmaku_height);
ll.setLayoutParams(params);
ll.requestLayout();
}
private void initData(){
speed = DataUtil.getInt(API_SPF.SPF_SETTING, API_SPF.ITEM_DANMAKU_SPEED, 50);
speed_level = DataUtil.getInt(API_SPF.SPF_SETTING,API_SPF.ITEM_DANMAKU_SPEED_LEVEL,API_SPF.SPEED_LEVEL_NORMAL);
show_bg = DataUtil.getBoolean(API_SPF.SPF_SETTING, API_SPF.ITEM_SHOW_BG, true);
all_app = DataUtil.getBoolean(API_SPF.SPF_SETTING, API_SPF.ITEM_DISPLAY_AREA,true);
text_size = DataUtil.getInt(API_SPF.SPF_SETTING, API_SPF.ITEM_TEXT_SIZE, 50);
danmaku_height = DataUtil.getInt(API_SPF.SPF_SETTING,API_SPF.ITEM_DANMAKU_HEIGHT,180);
}
private void initView(){
sb = f(R.id.sb_danmaku_speed);
sb_text_size = f(R.id.sb_text_size);
sb_height = f(R.id.sb_danmaku_height);
rg = f(R.id.rg_display_area);
rg_speed = f(R.id.rg_danmaku_speed);
cb = f(R.id.cb_show_bg);
rb1 = f(R.id.rb1);
rb2 = f(R.id.rb2);
rb01 = f(R.id.rb01);
rb02 = f(R.id.rb02);
rb03 = f(R.id.rb03);
ll = f(R.id.ll);
preview = f(R.id.danmaku_view);
background = f(R.id.background);
preview.setMode(DanmakuView.MODE_NO_OVERDRAW);
sb.setOnSeekBarChangeListener(new SeekBar.OnSeekBarChangeListener() {
@Override
public void onProgressChanged(SeekBar seekBar, int progress, boolean fromUser) {
speed = progress;
preview.setMSPF(20 - (speed - 50) / 10);
}
@Override
public void onStartTrackingTouch(SeekBar seekBar) {
}
@Override
public void onStopTrackingTouch(SeekBar seekBar) {
}
});
sb_text_size.setOnSeekBarChangeListener(new SeekBar.OnSeekBarChangeListener() {
@Override
public void onProgressChanged(SeekBar seekBar, int progress, boolean fromUser) {
text_size = progress;
preview.setTextSize(18 + (progress - 50) / 10);
}
@Override
public void onStartTrackingTouch(SeekBar seekBar) {
}
@Override
public void onStopTrackingTouch(SeekBar seekBar) {
}
});
sb_height.setOnSeekBarChangeListener(new SeekBar.OnSeekBarChangeListener() {
@Override
public void onProgressChanged(SeekBar seekBar, int progress, boolean fromUser) {
danmaku_height = progress;
ViewGroup.LayoutParams params = ll.getLayoutParams();
params.height = ViewUtil.dp2px(DanmakuSettingActivity.this,20+danmaku_height);
ll.setLayoutParams(params);
ll.requestLayout();
}
@Override
public void onStartTrackingTouch(SeekBar seekBar) {
}
@Override
public void onStopTrackingTouch(SeekBar seekBar) {
}
});
rg.setOnCheckedChangeListener((group, checkedId) -> {
if (checkedId == rb1.getId()) all_app = false;
else if (checkedId == rb2.getId()) all_app = true;
});
rg_speed.setOnCheckedChangeListener((group, checkedId) -> {
if(checkedId==rb01.getId()){
speed_level = API_SPF.SPEED_LEVEL_SLOW;
danmakuSpeed = Danmaku.DanmakuSpeed.SLOW;
}
else if(checkedId==rb02.getId()){
speed_level = API_SPF.SPEED_LEVEL_NORMAL;
danmakuSpeed = Danmaku.DanmakuSpeed.NORMAL;
}
else if(checkedId==rb03.getId()){
speed_level = API_SPF.SPEED_LEVEL_FAST;
danmakuSpeed = Danmaku.DanmakuSpeed.FAST;
}
});
cb.setOnCheckedChangeListener((buttonView, isChecked) -> {
if(isChecked){
show_bg = true;
background.setVisibility(View.VISIBLE);
}
else {
show_bg = false;
background.setVisibility(View.GONE);
}
});
}
private void initToolbar(){
Toolbar toolbar = f(R.id.toolbar);
setSupportActionBar(toolbar);
getSupportActionBar().setDisplayHomeAsUpEnabled(true);
toolbar.setTitle(R.string.set_danmaku);
toolbar.findViewById(R.id.tv_title).setOnClickListener(view -> {
DataUtil.putInt(API_SPF.SPF_SETTING,API_SPF.ITEM_DANMAKU_SPEED,speed);
DataUtil.putBoolean(API_SPF.SPF_SETTING, API_SPF.ITEM_DISPLAY_AREA, all_app);
DataUtil.putBoolean(API_SPF.SPF_SETTING, API_SPF.ITEM_SHOW_BG, show_bg);
DataUtil.putInt(API_SPF.SPF_SETTING, API_SPF.ITEM_TEXT_SIZE, text_size);
DataUtil.putInt(API_SPF.SPF_SETTING,API_SPF.ITEM_DANMAKU_SPEED_LEVEL,speed_level);
DataUtil.putInt(API_SPF.SPF_SETTING,API_SPF.ITEM_DANMAKU_HEIGHT,danmaku_height);
finish();
});
ViewUtil.initStatusBar(this);
}
private <T extends View>T f(int resId){
return (T)super.findViewById(resId);
}
final static class MyHandler extends Handler{
private WeakReference<DanmakuSettingActivity> reference;
public MyHandler(DanmakuSettingActivity activity){
reference = new WeakReference<>(activity);
}
@Override
public void handleMessage(Message msg) {
super.handleMessage(msg);
if(msg.what==MESSAGE_SEND) {
Danmaku danmaku = new Danmaku("23333333");
danmaku.setSpeed(reference.get().danmakuSpeed);
reference.get().preview.addDanmaku(danmaku);
sendEmptyMessageDelayed(MESSAGE_SEND, 500);
}
}
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
int id = item.getItemId();
if (id == android.R.id.home) {
if (!super.onOptionsItemSelected(item)) {
finish();
}
return true;
}
return super.onOptionsItemSelected(item);
}
}
| |
/*******************************************************************************
* Copyright 2015 Junichi Tatemura
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*******************************************************************************/
package com.nec.strudel.management.resource;
import java.lang.annotation.Annotation;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.util.HashMap;
import java.util.Map;
/**
* The information of a resource consists of
* <ul>
* <li>domain: the domain of this resource type.
* <li>type: the type of resource. The pair of (domain, type) should uniquely
* identify the class (type) of this resource.
* <li>name: the name of resource, which can be assigned to individual instances
* of this resource type.
* <li>id: the id of resource, which can be assigned to individual instances of
* this resource type. When it is used, a tuple (domain, type, name, id) should
* uniquely identify the instance of this resource.
* <li>description: the description of this resource (type).
* <li>attributes (ResourceAttribute): A set of named attributes with access
* methods (getters and/or setters).
* </ul>
*
* @author tatemura
*
*/
public class ResourceInfo {
private String domain;
private String type;
private String name;
private String id;
private String description;
private ResourceAttribute[] attributes = new ResourceAttribute[0];
public ResourceInfo() {
}
public String getDomain() {
return domain;
}
public void setDomain(String domain) {
this.domain = domain;
}
public String getType() {
return type;
}
public void setType(String type) {
this.type = type;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public String getId() {
return id;
}
public void setId(String id) {
this.id = id;
}
public String getDescription() {
return description;
}
public void setDescription(String description) {
this.description = description;
}
public ResourceAttribute[] getAttributes() {
return attributes;
}
public void setAttributes(ResourceAttribute... attributes) {
this.attributes = attributes;
}
/**
* <ul>
* <li>domain name: if it is specified in the managed object annotation use
* it. Otherwise, use the package name of this object.
* <li>type: use the specified one if it is in the ManagedObject annotation.
* Otherwise, use the class name of this object.
* <li>name: use the result of a method call if the ResourceName annotation
* is specified. Otherwise, omit this property.
* <li>id: use the result of a method call if the ResourceId annotation is
* specified. Otherwise, omit this property.
* </ul>
*
* @param resource
* object
* @return resource info
*/
public static ResourceInfo of(Object resource) {
Class<?> cls = resource.getClass();
ResourceInfo info = of(cls);
String name = findValueByMethod(resource, ResourceName.class);
if (name != null) {
info.setName(name);
}
String id = findValueByMethod(resource, ResourceId.class);
if (id != null) {
info.setId(id);
}
return info;
}
public static ResourceInfo of(Class<?> cls) {
ManagedResource resource = cls.getAnnotation(ManagedResource.class);
String domain = "";
String type = "";
String description = "";
if (resource != null) {
domain = resource.domain();
type = resource.type();
description = resource.description();
}
if (domain.isEmpty()) {
domain = packageNameOf(cls);
}
if (type.isEmpty()) {
type = classNameOf(cls);
}
ResourceInfo info = new ResourceInfo();
info.setDomain(domain);
info.setType(type);
info.setDescription(description);
info.setAttributes(findAttrs(cls));
return info;
}
private static String packageNameOf(Class<?> cls) {
String cname = cls.getName();
return cname.substring(0, cname.lastIndexOf("."));
}
private static String classNameOf(Class<?> cls) {
String cname = cls.getName();
return cname.substring(cname.lastIndexOf(".") + 1);
}
private static String findValueByMethod(Object obj,
Class<? extends Annotation> ann) {
Class<?> cls = obj.getClass();
Method method = findMethod(cls, ann);
if (method != null) {
try {
Object value = method.invoke(obj);
if (value != null) {
return value.toString();
}
} catch (IllegalAccessException ex) {
// ignore (return null)
} catch (IllegalArgumentException ex) {
// ignore (return null)
} catch (InvocationTargetException ex) {
// ignore (return null)
}
}
return null;
}
private static Method findMethod(Class<?> cls,
Class<? extends Annotation> ann) {
for (Method m : cls.getMethods()) {
if (m.getAnnotation(ann) != null) {
return m;
}
}
return null;
}
private static ResourceAttribute[] findAttrs(Class<?> cls) {
Map<String, ResourceAttribute> attrs =
new HashMap<String, ResourceAttribute>();
for (Method m : cls.getMethods()) {
Getter getter = m.getAnnotation(Getter.class);
if (getter != null) {
String name = toAttrName(getter, m);
ResourceAttribute attr = attrs.get(name);
if (attr == null) {
attr = new ResourceAttribute(name);
attrs.put(name, attr);
}
attr.setDescription(getter.description());
attr.setGetter(m);
} else {
Setter setter = m.getAnnotation(Setter.class);
if (setter != null) {
String name = toAttrName(setter, m);
ResourceAttribute attr = attrs.get(name);
if (attr == null) {
attr = new ResourceAttribute(name);
attrs.put(name, attr);
}
attr.setSetter(m);
}
}
}
return attrs.values().toArray(
new ResourceAttribute[attrs.size()]);
}
static String toAttrName(Getter getter, Method method) {
if (getter.name().isEmpty()) {
String methodName = method.getName();
if (methodName.startsWith("get")) {
return methodName.substring("get".length());
} else {
return methodName;
}
} else {
return getter.name();
}
}
static String toAttrName(Setter setter, Method method) {
if (setter.name().isEmpty()) {
String methodName = method.getName();
if (methodName.startsWith("set")) {
return methodName.substring("set".length());
} else {
return methodName;
}
} else {
return setter.name();
}
}
}
| |
/*
* Copyright (C) 2013 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package android.util;
import com.google.j2objc.annotations.WeakOuter;
import java.util.Collection;
import java.util.Map;
import java.util.Set;
/**
* ArrayMap is a generic key->value mapping data structure that is
* designed to be more memory efficient than a traditional {@link java.util.HashMap}.
* It keeps its mappings in an array data structure -- an integer array of hash
* codes for each item, and an Object array of the key/value pairs. This allows it to
* avoid having to create an extra object for every entry put in to the map, and it
* also tries to control the growth of the size of these arrays more aggressively
* (since growing them only requires copying the entries in the array, not rebuilding
* a hash map).
*
* <p>Note that this implementation is not intended to be appropriate for data structures
* that may contain large numbers of items. It is generally slower than a traditional
* HashMap, since lookups require a binary search and adds and removes require inserting
* and deleting entries in the array. For containers holding up to hundreds of items,
* the performance difference is not significant, less than 50%.</p>
*
* <p>Because this container is intended to better balance memory use, unlike most other
* standard Java containers it will shrink its array as items are removed from it. Currently
* you have no control over this shrinking -- if you set a capacity and then remove an
* item, it may reduce the capacity to better match the current size. In the future an
* explicit call to set the capacity should turn off this aggressive shrinking behavior.</p>
*/
public final class ArrayMap<K, V> implements Map<K, V> {
private static final boolean DEBUG = false;
private static final String TAG = "ArrayMap";
/**
* The minimum amount by which the capacity of a ArrayMap will increase.
* This is tuned to be relatively space-efficient.
*/
private static final int BASE_SIZE = 4;
/**
* Maximum number of entries to have in array caches.
*/
private static final int CACHE_SIZE = 10;
/**
* @hide Special immutable empty ArrayMap.
*/
public static final ArrayMap EMPTY = new ArrayMap(true);
/**
* Caches of small array objects to avoid spamming garbage. The cache
* Object[] variable is a pointer to a linked list of array objects.
* The first entry in the array is a pointer to the next array in the
* list; the second entry is a pointer to the int[] hash code array for it.
*/
static Object[] mBaseCache;
static int mBaseCacheSize;
static Object[] mTwiceBaseCache;
static int mTwiceBaseCacheSize;
/**
* Special hash array value that indicates the container is immutable.
*/
static final int[] EMPTY_IMMUTABLE_INTS = new int[0];
int[] mHashes;
Object[] mArray;
int mSize;
MapCollections<K, V> mCollections;
int indexOf(Object key, int hash) {
final int N = mSize;
// Important fast case: if nothing is in here, nothing to look for.
if (N == 0) {
return ~0;
}
int index = ContainerHelpers.binarySearch(mHashes, N, hash);
// If the hash code wasn't found, then we have no entry for this key.
if (index < 0) {
return index;
}
// If the key at the returned index matches, that's what we want.
if (key.equals(mArray[index<<1])) {
return index;
}
// Search for a matching key after the index.
int end;
for (end = index + 1; end < N && mHashes[end] == hash; end++) {
if (key.equals(mArray[end << 1])) return end;
}
// Search for a matching key before the index.
for (int i = index - 1; i >= 0 && mHashes[i] == hash; i--) {
if (key.equals(mArray[i << 1])) return i;
}
// Key not found -- return negative value indicating where a
// new entry for this key should go. We use the end of the
// hash chain to reduce the number of array entries that will
// need to be copied when inserting.
return ~end;
}
int indexOfNull() {
final int N = mSize;
// Important fast case: if nothing is in here, nothing to look for.
if (N == 0) {
return ~0;
}
int index = ContainerHelpers.binarySearch(mHashes, N, 0);
// If the hash code wasn't found, then we have no entry for this key.
if (index < 0) {
return index;
}
// If the key at the returned index matches, that's what we want.
if (null == mArray[index<<1]) {
return index;
}
// Search for a matching key after the index.
int end;
for (end = index + 1; end < N && mHashes[end] == 0; end++) {
if (null == mArray[end << 1]) return end;
}
// Search for a matching key before the index.
for (int i = index - 1; i >= 0 && mHashes[i] == 0; i--) {
if (null == mArray[i << 1]) return i;
}
// Key not found -- return negative value indicating where a
// new entry for this key should go. We use the end of the
// hash chain to reduce the number of array entries that will
// need to be copied when inserting.
return ~end;
}
private void allocArrays(final int size) {
if (mHashes == EMPTY_IMMUTABLE_INTS) {
throw new UnsupportedOperationException("ArrayMap is immutable");
}
if (size == (BASE_SIZE*2)) {
synchronized (ArrayMap.class) {
if (mTwiceBaseCache != null) {
final Object[] array = mTwiceBaseCache;
mArray = array;
mTwiceBaseCache = (Object[])array[0];
mHashes = (int[])array[1];
array[0] = array[1] = null;
mTwiceBaseCacheSize--;
if (DEBUG) Log.d(TAG, "Retrieving 2x cache " + mHashes
+ " now have " + mTwiceBaseCacheSize + " entries");
return;
}
}
} else if (size == BASE_SIZE) {
synchronized (ArrayMap.class) {
if (mBaseCache != null) {
final Object[] array = mBaseCache;
mArray = array;
mBaseCache = (Object[])array[0];
mHashes = (int[])array[1];
array[0] = array[1] = null;
mBaseCacheSize--;
if (DEBUG) Log.d(TAG, "Retrieving 1x cache " + mHashes
+ " now have " + mBaseCacheSize + " entries");
return;
}
}
}
mHashes = new int[size];
mArray = new Object[size<<1];
}
private static void freeArrays(final int[] hashes, final Object[] array, final int size) {
if (hashes.length == (BASE_SIZE*2)) {
synchronized (ArrayMap.class) {
if (mTwiceBaseCacheSize < CACHE_SIZE) {
array[0] = mTwiceBaseCache;
array[1] = hashes;
for (int i=(size<<1)-1; i>=2; i--) {
array[i] = null;
}
mTwiceBaseCache = array;
mTwiceBaseCacheSize++;
if (DEBUG) Log.d(TAG, "Storing 2x cache " + array
+ " now have " + mTwiceBaseCacheSize + " entries");
}
}
} else if (hashes.length == BASE_SIZE) {
synchronized (ArrayMap.class) {
if (mBaseCacheSize < CACHE_SIZE) {
array[0] = mBaseCache;
array[1] = hashes;
for (int i=(size<<1)-1; i>=2; i--) {
array[i] = null;
}
mBaseCache = array;
mBaseCacheSize++;
if (DEBUG) Log.d(TAG, "Storing 1x cache " + array
+ " now have " + mBaseCacheSize + " entries");
}
}
}
}
/**
* Create a new empty ArrayMap. The default capacity of an array map is 0, and
* will grow once items are added to it.
*/
public ArrayMap() {
mHashes = ContainerHelpers.EMPTY_INTS;
mArray = ContainerHelpers.EMPTY_OBJECTS;
mSize = 0;
}
/**
* Create a new ArrayMap with a given initial capacity.
*/
public ArrayMap(int capacity) {
if (capacity == 0) {
mHashes = ContainerHelpers.EMPTY_INTS;
mArray = ContainerHelpers.EMPTY_OBJECTS;
} else {
allocArrays(capacity);
}
mSize = 0;
}
private ArrayMap(boolean immutable) {
mHashes = EMPTY_IMMUTABLE_INTS;
mArray = ContainerHelpers.EMPTY_OBJECTS;
mSize = 0;
}
/**
* Create a new ArrayMap with the mappings from the given ArrayMap.
*/
public ArrayMap(ArrayMap map) {
this();
if (map != null) {
putAll(map);
}
}
/**
* Make the array map empty. All storage is released.
*/
@Override
public void clear() {
if (mSize > 0) {
freeArrays(mHashes, mArray, mSize);
mHashes = ContainerHelpers.EMPTY_INTS;
mArray = ContainerHelpers.EMPTY_OBJECTS;
mSize = 0;
}
}
/**
* @hide
* Like {@link #clear}, but doesn't reduce the capacity of the ArrayMap.
*/
public void erase() {
if (mSize > 0) {
final int N = mSize<<1;
final Object[] array = mArray;
for (int i=0; i<N; i++) {
array[i] = null;
}
mSize = 0;
}
}
/**
* Ensure the array map can hold at least <var>minimumCapacity</var>
* items.
*/
public void ensureCapacity(int minimumCapacity) {
if (mHashes.length < minimumCapacity) {
final int[] ohashes = mHashes;
final Object[] oarray = mArray;
allocArrays(minimumCapacity);
if (mSize > 0) {
System.arraycopy(ohashes, 0, mHashes, 0, mSize);
System.arraycopy(oarray, 0, mArray, 0, mSize<<1);
}
freeArrays(ohashes, oarray, mSize);
}
}
/**
* Check whether a key exists in the array.
*
* @param key The key to search for.
* @return Returns true if the key exists, else false.
*/
@Override
public boolean containsKey(Object key) {
return key == null ? (indexOfNull() >= 0) : (indexOf(key, key.hashCode()) >= 0);
}
int indexOfValue(Object value) {
final int N = mSize*2;
final Object[] array = mArray;
if (value == null) {
for (int i=1; i<N; i+=2) {
if (array[i] == null) {
return i>>1;
}
}
} else {
for (int i=1; i<N; i+=2) {
if (value.equals(array[i])) {
return i>>1;
}
}
}
return -1;
}
/**
* Check whether a value exists in the array. This requires a linear search
* through the entire array.
*
* @param value The value to search for.
* @return Returns true if the value exists, else false.
*/
@Override
public boolean containsValue(Object value) {
return indexOfValue(value) >= 0;
}
/**
* Retrieve a value from the array.
* @param key The key of the value to retrieve.
* @return Returns the value associated with the given key,
* or null if there is no such key.
*/
@Override
public V get(Object key) {
final int index = key == null ? indexOfNull() : indexOf(key, key.hashCode());
return index >= 0 ? (V)mArray[(index<<1)+1] : null;
}
/**
* Return the key at the given index in the array.
* @param index The desired index, must be between 0 and {@link #size()}-1.
* @return Returns the key stored at the given index.
*/
public K keyAt(int index) {
return (K)mArray[index << 1];
}
/**
* Return the value at the given index in the array.
* @param index The desired index, must be between 0 and {@link #size()}-1.
* @return Returns the value stored at the given index.
*/
public V valueAt(int index) {
return (V)mArray[(index << 1) + 1];
}
/**
* Set the value at a given index in the array.
* @param index The desired index, must be between 0 and {@link #size()}-1.
* @param value The new value to store at this index.
* @return Returns the previous value at the given index.
*/
public V setValueAt(int index, V value) {
index = (index << 1) + 1;
V old = (V)mArray[index];
mArray[index] = value;
return old;
}
/**
* Return true if the array map contains no items.
*/
@Override
public boolean isEmpty() {
return mSize <= 0;
}
/**
* Add a new value to the array map.
* @param key The key under which to store the value. <b>Must not be null.</b> If
* this key already exists in the array, its value will be replaced.
* @param value The value to store for the given key.
* @return Returns the old value that was stored for the given key, or null if there
* was no such key.
*/
@Override
public V put(K key, V value) {
final int hash;
int index;
if (key == null) {
hash = 0;
index = indexOfNull();
} else {
hash = key.hashCode();
index = indexOf(key, hash);
}
if (index >= 0) {
index = (index<<1) + 1;
final V old = (V)mArray[index];
mArray[index] = value;
return old;
}
index = ~index;
if (mSize >= mHashes.length) {
final int n = mSize >= (BASE_SIZE*2) ? (mSize+(mSize>>1))
: (mSize >= BASE_SIZE ? (BASE_SIZE*2) : BASE_SIZE);
if (DEBUG) Log.d(TAG, "put: grow from " + mHashes.length + " to " + n);
final int[] ohashes = mHashes;
final Object[] oarray = mArray;
allocArrays(n);
if (mHashes.length > 0) {
if (DEBUG) Log.d(TAG, "put: copy 0-" + mSize + " to 0");
System.arraycopy(ohashes, 0, mHashes, 0, ohashes.length);
System.arraycopy(oarray, 0, mArray, 0, oarray.length);
}
freeArrays(ohashes, oarray, mSize);
}
if (index < mSize) {
if (DEBUG) Log.d(TAG, "put: move " + index + "-" + (mSize-index)
+ " to " + (index+1));
System.arraycopy(mHashes, index, mHashes, index + 1, mSize - index);
System.arraycopy(mArray, index << 1, mArray, (index + 1) << 1, (mSize - index) << 1);
}
mHashes[index] = hash;
mArray[index<<1] = key;
mArray[(index<<1)+1] = value;
mSize++;
return null;
}
/**
* Special fast path for appending items to the end of the array without validation.
* The array must already be large enough to contain the item.
* @hide
*/
public void append(K key, V value) {
int index = mSize;
final int hash = key == null ? 0 : key.hashCode();
if (index >= mHashes.length) {
throw new IllegalStateException("Array is full");
}
if (index > 0 && mHashes[index-1] > hash) {
RuntimeException e = new RuntimeException("here");
e.fillInStackTrace();
Log.w(TAG, "New hash " + hash
+ " is before end of array hash " + mHashes[index-1]
+ " at index " + index + " key " + key, e);
put(key, value);
return;
}
mSize = index+1;
mHashes[index] = hash;
index <<= 1;
mArray[index] = key;
mArray[index+1] = value;
}
/**
* Perform a {@link #put(Object, Object)} of all key/value pairs in <var>array</var>
* @param array The array whose contents are to be retrieved.
*/
public void putAll(ArrayMap<? extends K, ? extends V> array) {
final int N = array.mSize;
ensureCapacity(mSize + N);
if (mSize == 0) {
if (N > 0) {
System.arraycopy(array.mHashes, 0, mHashes, 0, N);
System.arraycopy(array.mArray, 0, mArray, 0, N<<1);
mSize = N;
}
} else {
for (int i=0; i<N; i++) {
put(array.keyAt(i), array.valueAt(i));
}
}
}
/**
* Remove an existing key from the array map.
* @param key The key of the mapping to remove.
* @return Returns the value that was stored under the key, or null if there
* was no such key.
*/
@Override
public V remove(Object key) {
int index = key == null ? indexOfNull() : indexOf(key, key.hashCode());
if (index >= 0) {
return removeAt(index);
}
return null;
}
/**
* Remove the key/value mapping at the given index.
* @param index The desired index, must be between 0 and {@link #size()}-1.
* @return Returns the value that was stored at this index.
*/
public V removeAt(int index) {
final Object old = mArray[(index << 1) + 1];
if (mSize <= 1) {
// Now empty.
if (DEBUG) Log.d(TAG, "remove: shrink from " + mHashes.length + " to 0");
freeArrays(mHashes, mArray, mSize);
mHashes = ContainerHelpers.EMPTY_INTS;
mArray = ContainerHelpers.EMPTY_OBJECTS;
mSize = 0;
} else {
if (mHashes.length > (BASE_SIZE*2) && mSize < mHashes.length/3) {
// Shrunk enough to reduce size of arrays. We don't allow it to
// shrink smaller than (BASE_SIZE*2) to avoid flapping between
// that and BASE_SIZE.
final int n = mSize > (BASE_SIZE*2) ? (mSize + (mSize>>1)) : (BASE_SIZE*2);
if (DEBUG) Log.d(TAG, "remove: shrink from " + mHashes.length + " to " + n);
final int[] ohashes = mHashes;
final Object[] oarray = mArray;
allocArrays(n);
mSize--;
if (index > 0) {
if (DEBUG) Log.d(TAG, "remove: copy from 0-" + index + " to 0");
System.arraycopy(ohashes, 0, mHashes, 0, index);
System.arraycopy(oarray, 0, mArray, 0, index << 1);
}
if (index < mSize) {
if (DEBUG) Log.d(TAG, "remove: copy from " + (index+1) + "-" + mSize
+ " to " + index);
System.arraycopy(ohashes, index + 1, mHashes, index, mSize - index);
System.arraycopy(oarray, (index + 1) << 1, mArray, index << 1,
(mSize - index) << 1);
}
} else {
mSize--;
if (index < mSize) {
if (DEBUG) Log.d(TAG, "remove: move " + (index+1) + "-" + mSize
+ " to " + index);
System.arraycopy(mHashes, index + 1, mHashes, index, mSize - index);
System.arraycopy(mArray, (index + 1) << 1, mArray, index << 1,
(mSize - index) << 1);
}
mArray[mSize << 1] = null;
mArray[(mSize << 1) + 1] = null;
}
}
return (V)old;
}
/**
* Return the number of items in this array map.
*/
@Override
public int size() {
return mSize;
}
/**
* {@inheritDoc}
*
* <p>This implementation returns false if the object is not a map, or
* if the maps have different sizes. Otherwise, for each key in this map,
* values of both maps are compared. If the values for any key are not
* equal, the method returns false, otherwise it returns true.
*/
@Override
public boolean equals(Object object) {
if (this == object) {
return true;
}
if (object instanceof Map) {
Map<?, ?> map = (Map<?, ?>) object;
if (size() != map.size()) {
return false;
}
try {
for (int i=0; i<mSize; i++) {
K key = keyAt(i);
V mine = valueAt(i);
Object theirs = map.get(key);
if (mine == null) {
if (theirs != null || !map.containsKey(key)) {
return false;
}
} else if (!mine.equals(theirs)) {
return false;
}
}
} catch (NullPointerException ignored) {
return false;
} catch (ClassCastException ignored) {
return false;
}
return true;
}
return false;
}
/**
* {@inheritDoc}
*/
@Override
public int hashCode() {
final int[] hashes = mHashes;
final Object[] array = mArray;
int result = 0;
for (int i = 0, v = 1, s = mSize; i < s; i++, v+=2) {
Object value = array[v];
result += hashes[i] ^ (value == null ? 0 : value.hashCode());
}
return result;
}
/**
* {@inheritDoc}
*
* <p>This implementation composes a string by iterating over its mappings. If
* this map contains itself as a key or a value, the string "(this Map)"
* will appear in its place.
*/
@Override
public String toString() {
if (isEmpty()) {
return "{}";
}
StringBuilder buffer = new StringBuilder(mSize * 28);
buffer.append('{');
for (int i=0; i<mSize; i++) {
if (i > 0) {
buffer.append(", ");
}
Object key = keyAt(i);
if (key != this) {
buffer.append(key);
} else {
buffer.append("(this Map)");
}
buffer.append('=');
Object value = valueAt(i);
if (value != this) {
buffer.append(value);
} else {
buffer.append("(this Map)");
}
}
buffer.append('}');
return buffer.toString();
}
// ------------------------------------------------------------------------
// Interop with traditional Java containers. Not as efficient as using
// specialized collection APIs.
// ------------------------------------------------------------------------
private MapCollections<K, V> getCollection() {
@WeakOuter
class InteropMapCollections extends MapCollections<K, V> {
@Override
protected int colGetSize() {
return mSize;
}
@Override
protected Object colGetEntry(int index, int offset) {
return mArray[(index<<1) + offset];
}
@Override
protected int colIndexOfKey(Object key) {
return key == null ? indexOfNull() : indexOf(key, key.hashCode());
}
@Override
protected int colIndexOfValue(Object value) {
return indexOfValue(value);
}
@Override
protected Map<K, V> colGetMap() {
return ArrayMap.this;
}
@Override
protected void colPut(K key, V value) {
put(key, value);
}
@Override
protected V colSetValue(int index, V value) {
return setValueAt(index, value);
}
@Override
protected void colRemoveAt(int index) {
removeAt(index);
}
@Override
protected void colClear() {
clear();
}
}
if (mCollections == null) {
mCollections = new InteropMapCollections();
}
return mCollections;
}
/**
* Determine if the array map contains all of the keys in the given collection.
* @param collection The collection whose contents are to be checked against.
* @return Returns true if this array map contains a key for every entry
* in <var>collection</var>, else returns false.
*/
public boolean containsAll(Collection<?> collection) {
return MapCollections.containsAllHelper(this, collection);
}
/**
* Perform a {@link #put(Object, Object)} of all key/value pairs in <var>map</var>
* @param map The map whose contents are to be retrieved.
*/
@Override
public void putAll(Map<? extends K, ? extends V> map) {
ensureCapacity(mSize + map.size());
for (Map.Entry<? extends K, ? extends V> entry : map.entrySet()) {
put(entry.getKey(), entry.getValue());
}
}
/**
* Remove all keys in the array map that exist in the given collection.
* @param collection The collection whose contents are to be used to remove keys.
* @return Returns true if any keys were removed from the array map, else false.
*/
public boolean removeAll(Collection<?> collection) {
return MapCollections.removeAllHelper(this, collection);
}
/**
* Remove all keys in the array map that do <b>not</b> exist in the given collection.
* @param collection The collection whose contents are to be used to determine which
* keys to keep.
* @return Returns true if any keys were removed from the array map, else false.
*/
public boolean retainAll(Collection<?> collection) {
return MapCollections.retainAllHelper(this, collection);
}
/**
* Return a {@link java.util.Set} for iterating over and interacting with all mappings
* in the array map.
*
* <p><b>Note:</b> this is a very inefficient way to access the array contents, it
* requires generating a number of temporary objects.</p>
*
* <p><b>Note:</b></p> the semantics of this
* Set are subtly different than that of a {@link java.util.HashMap}: most important,
* the {@link java.util.Map.Entry Map.Entry} object returned by its iterator is a single
* object that exists for the entire iterator, so you can <b>not</b> hold on to it
* after calling {@link java.util.Iterator#next() Iterator.next}.</p>
*/
@Override
public Set<Map.Entry<K, V>> entrySet() {
return getCollection().getEntrySet();
}
/**
* Return a {@link java.util.Set} for iterating over and interacting with all keys
* in the array map.
*
* <p><b>Note:</b> this is a fairly inefficient way to access the array contents, it
* requires generating a number of temporary objects.</p>
*/
@Override
public Set<K> keySet() {
return getCollection().getKeySet();
}
/**
* Return a {@link java.util.Collection} for iterating over and interacting with all values
* in the array map.
*
* <p><b>Note:</b> this is a fairly inefficient way to access the array contents, it
* requires generating a number of temporary objects.</p>
*/
@Override
public Collection<V> values() {
return getCollection().getValues();
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.component.quickfixj;
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
import org.apache.camel.CamelContext;
import org.apache.camel.Endpoint;
import org.apache.camel.StartupListener;
import org.apache.camel.support.DefaultComponent;
import org.apache.camel.spi.Metadata;
import quickfix.LogFactory;
import quickfix.MessageFactory;
import quickfix.MessageStoreFactory;
import quickfix.SessionSettings;
public class QuickfixjComponent extends DefaultComponent implements StartupListener {
private static final String PARAMETER_LAZY_CREATE_ENGINE = "lazyCreateEngine";
private final Object engineInstancesLock = new Object();
private final Map<String, QuickfixjEngine> engines = new HashMap<>();
private final Map<String, QuickfixjEngine> provisionalEngines = new HashMap<>();
private final Map<String, QuickfixjEndpoint> endpoints = new HashMap<>();
private Map<String, QuickfixjConfiguration> configurations = new HashMap<>();
@Metadata(label = "advanced")
private MessageStoreFactory messageStoreFactory;
@Metadata(label = "advanced")
private LogFactory logFactory;
@Metadata(label = "advanced")
private MessageFactory messageFactory;
private boolean lazyCreateEngines;
public QuickfixjComponent() {
}
public QuickfixjComponent(CamelContext context) {
super(context);
}
@Override
protected Endpoint createEndpoint(String uri, String remaining, Map<String, Object> parameters) throws Exception {
// Look up the engine instance based on the settings file ("remaining")
QuickfixjEngine engine;
synchronized (engineInstancesLock) {
QuickfixjEndpoint endpoint = endpoints.get(uri);
if (endpoint == null) {
engine = engines.get(remaining);
if (engine == null) {
engine = provisionalEngines.get(remaining);
}
if (engine == null) {
QuickfixjConfiguration configuration = configurations.get(remaining);
SessionSettings settings;
if (configuration != null) {
settings = configuration.createSessionSettings();
} else {
settings = QuickfixjEngine.loadSettings(remaining);
}
Boolean lazyCreateEngineForEndpoint = super.getAndRemoveParameter(parameters, PARAMETER_LAZY_CREATE_ENGINE, Boolean.TYPE);
if (lazyCreateEngineForEndpoint == null) {
lazyCreateEngineForEndpoint = isLazyCreateEngines();
}
engine = new QuickfixjEngine(uri, settings, messageStoreFactory, logFactory, messageFactory,
lazyCreateEngineForEndpoint);
// only start engine if CamelContext is already started, otherwise the engines gets started
// automatic later when CamelContext has been started using the StartupListener
if (getCamelContext().getStatus().isStarted()) {
startQuickfixjEngine(engine);
engines.put(remaining, engine);
} else {
// engines to be started later
provisionalEngines.put(remaining, engine);
}
}
endpoint = new QuickfixjEndpoint(engine, uri, this);
endpoint.setConfigurationName(remaining);
endpoint.setLazyCreateEngine(engine.isLazy());
engine.addEventListener(endpoint);
endpoints.put(uri, endpoint);
}
return endpoint;
}
}
@Override
protected void doStart() throws Exception {
super.doStart();
// we defer starting quickfix engines till the onCamelContextStarted callback
}
@Override
protected void doStop() throws Exception {
// stop engines when stopping component
synchronized (engineInstancesLock) {
for (QuickfixjEngine engine : engines.values()) {
engine.stop();
}
}
super.doStop();
}
@Override
protected void doShutdown() throws Exception {
// cleanup when shutting down
engines.clear();
provisionalEngines.clear();
endpoints.clear();
super.doShutdown();
}
private void startQuickfixjEngine(QuickfixjEngine engine) throws Exception {
if (!engine.isLazy()) {
log.info("Starting QuickFIX/J engine: {}", engine.getUri());
engine.start();
} else {
log.info("QuickFIX/J engine: {} will start lazily", engine.getUri());
}
}
// Test Support
Map<String, QuickfixjEngine> getEngines() {
return Collections.unmodifiableMap(engines);
}
// Test Support
Map<String, QuickfixjEngine> getProvisionalEngines() {
return Collections.unmodifiableMap(provisionalEngines);
}
/**
* To use the given MessageFactory
*/
public void setMessageFactory(MessageFactory messageFactory) {
this.messageFactory = messageFactory;
}
/**
* To use the given LogFactory
*/
public void setLogFactory(LogFactory logFactory) {
this.logFactory = logFactory;
}
/**
* To use the given MessageStoreFactory
*/
public void setMessageStoreFactory(MessageStoreFactory messageStoreFactory) {
this.messageStoreFactory = messageStoreFactory;
}
public Map<String, QuickfixjConfiguration> getConfigurations() {
return configurations;
}
/**
* To use the given map of pre configured QuickFix configurations mapped to the key
*/
public void setConfigurations(Map<String, QuickfixjConfiguration> configurations) {
this.configurations = configurations;
}
public boolean isLazyCreateEngines() {
return this.lazyCreateEngines;
}
/**
* If set to <code>true</code>, the engines will be created and started when needed (when first message
* is send)
*/
public void setLazyCreateEngines(boolean lazyCreateEngines) {
this.lazyCreateEngines = lazyCreateEngines;
}
@Override
public void onCamelContextStarted(CamelContext camelContext, boolean alreadyStarted) throws Exception {
// only start quickfix engines when CamelContext have finished starting
synchronized (engineInstancesLock) {
for (QuickfixjEngine engine : engines.values()) {
startQuickfixjEngine(engine);
}
for (Map.Entry<String, QuickfixjEngine> entry : provisionalEngines.entrySet()) {
startQuickfixjEngine(entry.getValue());
engines.put(entry.getKey(), entry.getValue());
}
provisionalEngines.clear();
}
}
}
| |
/*************************************************************************
* Copyright (c) 2015 Lemberg Solutions
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
**************************************************************************/
package com.ls.demo.demo1;
import android.app.Activity;
import android.app.AlertDialog;
import android.content.Intent;
import android.content.res.AssetManager;
import android.graphics.Color;
import android.graphics.drawable.Drawable;
import android.location.Location;
import android.os.Bundle;
import android.util.Log;
import android.view.KeyEvent;
import android.view.Menu;
import android.view.MenuInflater;
import android.view.MenuItem;
import android.view.Window;
import android.view.WindowManager;
import android.widget.LinearLayout;
import com.ls.widgets.map.MapWidget;
import com.ls.widgets.map.config.OfflineMap;
import com.ls.widgets.map.events.MapTouchedEvent;
import com.ls.widgets.map.interfaces.Layer;
import com.ls.widgets.map.interfaces.MapEventsListener;
import com.ls.widgets.map.interfaces.OnLocationChangedListener;
import com.ls.widgets.map.interfaces.OnMapTouchListener;
import com.ls.widgets.map.model.MapLayer;
import java.io.IOException;
import java.io.InputStream;
public class BrowseMapActivity
extends Activity
{
/** Called when the activity is first created. */
private MapWidget mapWidget;
// private static long PIN_ID = 0xdb70bca16186d187L;
public static final long LAYER_ATTRACTIONS = 1000;
public static final long LAYER_KIDS = 2000;
public static final long LAYER_SPORT_AND_LEASURE = 3000;
public static final long PIN_LAYER = 4000;
private Model model;
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
requestWindowFeature(Window.FEATURE_NO_TITLE);
setContentView(R.layout.main);
getWindow().setFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN, WindowManager.LayoutParams.FLAG_FULLSCREEN);
this.model = new Model();
LinearLayout layout = (LinearLayout) findViewById(R.id.mainLayout);
mapWidget = new MapWidget(this, OfflineMap.MAP_ROOT);
// mapWidget.setMemoryEconomyMode(true);
mapWidget.setBackgroundColor(Color.GREEN);
MapLayer layer = mapWidget.createLayer(LAYER_ATTRACTIONS);
initLayer(layer, Model.CAT_MAIN_ATTRACTIONS);
layer = mapWidget.createLayer(LAYER_KIDS);
initLayer(layer, Model.CAT_KIDS);
layer = mapWidget.createLayer(LAYER_SPORT_AND_LEASURE);
initLayer(layer, Model.CAT_SPORT_AND_LEISURE);
mapWidget.getConfig().setMapCenteringEnabled(false);
mapWidget.createLayer(PIN_LAYER);
// layer.addTouchable(id, drawable, offsetX, offsetY)
// mapWidget.addLayer(2);
mapWidget.setAnimationEnabled(true);
mapWidget.setOnMapTouchListener(new OnMapTouchListener() {
@Override
public void onTouch(MapWidget v, MapTouchedEvent event){
AlertDialog.Builder builder = new AlertDialog.Builder(v.getContext());
builder.setMessage("OnTouch, X: " + event.getScreenX() + " Y: " + event.getScreenY()
+ " MAPX: " + event.getMapX() + " MAPY: " + event.getMapY() +
" Touched Count: " + event.getTouchedObjectIds().size());
builder.create().show();
}
});
mapWidget.addMapEventsListener(new MapEventsListener() {
public void onPreZoomOut()
{
Log.i("BrowseMapActivity", "On Map will zoom out");
}
public void onPreZoomIn()
{
Log.i("BrowseMapActivity", "On Map will zoom in");
}
public void onPostZoomOut()
{
Log.i("BrowseMapActivity", "On Map did zoom out");
}
public void onPostZoomIn()
{
Log.i("BrowseMapActivity", "On Map did zoom in");
}
});
mapWidget.setOnLocationChangedListener(new OnLocationChangedListener()
{
@Override
public void onLocationChanged(MapWidget v, Location location)
{
v.scrollMapTo(location);
}
});
mapWidget.setMinZoomLevel(1);
// mapWidget.setScale(2.0f);
layout.addView(mapWidget);
// Runnable runnable = new Runnable() {
// @Override
// public void run() {
// generateRandomMarkers();
// BitmapDrawable image = (BitmapDrawable) getResources().getDrawable(R.drawable.maps_blue_dot);
// if (image != null) {
// Layer layer = mapWidget.getLayerById(PIN_LAYER);
// MapObject object = new MapObject(PIN_ID, image, new Point(500, 250), PivotFactory.createPivotPoint(image, PivotPosition.PIVOT_CENTER), true);
// layer.addMapObject(object);
// // Pin pin = new Pin(PIN_ID, image, new android.graphics.Point(image.getBitmap().getWidth()/2, image.getBitmap().getHeight()/2));
// // pin.moveTo(500, 250);
// // mapWidget.getPinLayer().addPin(pin);
// // mapWidget.setPinLayerVisible(true);
// }
// }
// };
//
// Thread thread = new Thread(runnable);
// thread.setPriority(Thread.MIN_PRIORITY);
// thread.start();
}
// private void generateRandomMarkers() {
// try {
// final int GREEN_POINT_COUNT = 100;
// final int BLUE_POINT_COUNT = GREEN_POINT_COUNT;
// final int MAP_WIDTH = 1500;
// final int MAP_HEIGHT = 900;
//
// InputStream is = getAssets().open("other/trail_difficulty_green_circle.png");
// BitmapDrawable drawable = new BitmapDrawable(is);
// is.close();
// Random random = new Random(System.currentTimeMillis());
//
// for (int i = 0; i < GREEN_POINT_COUNT; ++i) {
// int randW = random.nextInt(MAP_WIDTH);
// int randH = random.nextInt(MAP_HEIGHT);
// BitmapDrawable dr = new BitmapDrawable(drawable.getBitmap());
//
// Layer layer = mapWidget.getLayer(0);
// layer.addMapObject(new MapObject(new Integer(random.nextInt(1000)), dr, randW, randH));
// }
//
//
// is = getAssets().open("other/trail_difficulty_blue_rect.png");
// drawable = new BitmapDrawable(is);
// is.close();
// for (int i = 0; i < BLUE_POINT_COUNT; ++i) {
// int randW = random.nextInt(MAP_WIDTH);
// int randH = random.nextInt(MAP_HEIGHT);
//
// BitmapDrawable dr = new BitmapDrawable(drawable.getBitmap());
//
// Layer layer = mapWidget.getLayer(1);
// layer.addMapObject(new MapObject(new Integer(random.nextInt(1000)), dr, randW, randH, true, false));
// }
//
// } catch (IOException e) {
// Log.e("TileManagerWorkerThread", "Exception: " + e);
// e.printStackTrace();
// }
// }
private void initLayer(Layer theLayer, String theCategoryId)
{
// List<com.ls.widgets.map.location.l> listLocations = this.model.getLocations();
//
// for (Location location : listLocations)
// {
// try
// {
// String categoryId = location.getCategoryId();
//
// if (categoryId.equals(theCategoryId))
// {
// Point point = location.getPoint();
// MapObject object = new MapObject(location.getId(),
// getIcon(categoryId),
// point,
// true, // Touchable
// false); // scalable
//
// theLayer.addMapObject(object);
// }
//
// } catch (IOException e)
// {
// e.printStackTrace();
// }
// }
theLayer.setVisible(true);
// if (settings.isCategorySelected(CategoryIds.LEISURE))
// {
// layerLeisure.enable();
// }
}
public Drawable getIcon(String theCatId) throws IOException
{
String path = "media/icons/";
if (Model.CAT_MAIN_ATTRACTIONS.equalsIgnoreCase(theCatId))
{
path += "map_icon_leisure.png";
} else if (Model.CAT_KIDS.equalsIgnoreCase(theCatId))
{
path += "map_icon_meals.png";
} else if (Model.CAT_SPORT_AND_LEISURE.equalsIgnoreCase(theCatId))
{
path += "map_icon_others_3.png";
}
AssetManager manager = getAssets();
InputStream input = manager.open(path);
return Drawable.createFromStream(input, null);
}
public boolean onCreateOptionsMenu(Menu menu) {
MenuInflater inflater = getMenuInflater();
inflater.inflate(R.menu.map_menu, menu);
return true;
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
// Handle item selection
int i = item.getItemId();
if (i == R.id.zoom_in) {
try {
mapWidget.zoomIn();
} catch (Exception e) {
Log.e("BrowseMapActivity", "Exception while zoom in. " + e);
}
return true;
}
else if (i == R.id.zoom_out) {
try {
mapWidget.zoomOut();
} catch (Exception e) {
Log.e("BrowseMapActivity", "Exception while zoom out. " + e);
}
return true;
}
else if (i == R.id.double_size) {
mapWidget.setScale(2.0f);
return true;
}
else if (i == R.id.original_size) {
mapWidget.setScale(1.0f);
return true;
}
else if (i == R.id.half_size) {
mapWidget.setScale(0.5f);
return true;
}
else if (i == R.id.open_map) {
Intent intent = new Intent(this, BrowseMapActivity.class);
startActivity(intent);
return true;
}
else {
return super.onOptionsItemSelected(item);
}
}
@Override
public boolean onKeyDown(int keyCode, KeyEvent event) {
Layer layer = null;
switch (keyCode) {
case KeyEvent.KEYCODE_1:
layer = mapWidget.getLayer(0);
break;
case KeyEvent.KEYCODE_2:
layer = mapWidget.getLayer(1);
break;
}
if (layer != null) {
layer.setVisible(!layer.isVisible());
return true;
} else
return super.onKeyDown(keyCode, event);
}
@Override
protected void onStop() {
super.onStop();
}
@Override
protected void onPause() {
super.onPause();
}
@Override
protected void onDestroy()
{
// mapWidget.destroy();
super.onDestroy();
}
};
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.druid.indexing.common.task;
import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonInclude;
import com.fasterxml.jackson.annotation.JsonInclude.Include;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.annotation.JsonTypeName;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.common.base.Optional;
import com.google.common.base.Preconditions;
import com.google.common.base.Throwables;
import com.google.common.collect.ImmutableList;
import com.google.common.hash.HashFunction;
import com.google.common.hash.Hashing;
import com.google.common.util.concurrent.ListenableFuture;
import org.apache.druid.data.input.FiniteFirehoseFactory;
import org.apache.druid.data.input.FirehoseFactory;
import org.apache.druid.data.input.FirehoseFactoryToInputSourceAdaptor;
import org.apache.druid.data.input.InputFormat;
import org.apache.druid.data.input.InputRow;
import org.apache.druid.data.input.InputSource;
import org.apache.druid.data.input.Rows;
import org.apache.druid.data.input.impl.InputRowParser;
import org.apache.druid.hll.HyperLogLogCollector;
import org.apache.druid.indexer.Checks;
import org.apache.druid.indexer.IngestionState;
import org.apache.druid.indexer.Property;
import org.apache.druid.indexer.TaskStatus;
import org.apache.druid.indexer.partitions.DynamicPartitionsSpec;
import org.apache.druid.indexer.partitions.HashedPartitionsSpec;
import org.apache.druid.indexer.partitions.PartitionsSpec;
import org.apache.druid.indexer.partitions.SecondaryPartitionType;
import org.apache.druid.indexing.common.IngestionStatsAndErrorsTaskReport;
import org.apache.druid.indexing.common.IngestionStatsAndErrorsTaskReportData;
import org.apache.druid.indexing.common.TaskRealtimeMetricsMonitorBuilder;
import org.apache.druid.indexing.common.TaskReport;
import org.apache.druid.indexing.common.TaskToolbox;
import org.apache.druid.indexing.common.actions.SegmentTransactionalInsertAction;
import org.apache.druid.indexing.common.actions.TaskActionClient;
import org.apache.druid.indexing.common.stats.TaskRealtimeMetricsMonitor;
import org.apache.druid.indexing.common.task.batch.parallel.PartialHashSegmentGenerateTask;
import org.apache.druid.indexing.common.task.batch.parallel.iterator.DefaultIndexTaskInputRowIteratorBuilder;
import org.apache.druid.indexing.common.task.batch.partition.CompletePartitionAnalysis;
import org.apache.druid.indexing.common.task.batch.partition.HashPartitionAnalysis;
import org.apache.druid.indexing.common.task.batch.partition.LinearPartitionAnalysis;
import org.apache.druid.indexing.common.task.batch.partition.PartitionAnalysis;
import org.apache.druid.indexing.overlord.sampler.InputSourceSampler;
import org.apache.druid.java.util.common.IAE;
import org.apache.druid.java.util.common.ISE;
import org.apache.druid.java.util.common.JodaUtils;
import org.apache.druid.java.util.common.StringUtils;
import org.apache.druid.java.util.common.UOE;
import org.apache.druid.java.util.common.granularity.Granularity;
import org.apache.druid.java.util.common.guava.Comparators;
import org.apache.druid.java.util.common.logger.Logger;
import org.apache.druid.java.util.common.parsers.CloseableIterator;
import org.apache.druid.segment.IndexMerger;
import org.apache.druid.segment.IndexSpec;
import org.apache.druid.segment.incremental.AppendableIndexSpec;
import org.apache.druid.segment.incremental.ParseExceptionHandler;
import org.apache.druid.segment.incremental.RowIngestionMeters;
import org.apache.druid.segment.indexing.BatchIOConfig;
import org.apache.druid.segment.indexing.DataSchema;
import org.apache.druid.segment.indexing.IngestionSpec;
import org.apache.druid.segment.indexing.RealtimeIOConfig;
import org.apache.druid.segment.indexing.TuningConfig;
import org.apache.druid.segment.indexing.granularity.ArbitraryGranularitySpec;
import org.apache.druid.segment.indexing.granularity.GranularitySpec;
import org.apache.druid.segment.realtime.FireDepartment;
import org.apache.druid.segment.realtime.FireDepartmentMetrics;
import org.apache.druid.segment.realtime.appenderator.Appenderator;
import org.apache.druid.segment.realtime.appenderator.AppenderatorConfig;
import org.apache.druid.segment.realtime.appenderator.BaseAppenderatorDriver;
import org.apache.druid.segment.realtime.appenderator.BatchAppenderatorDriver;
import org.apache.druid.segment.realtime.appenderator.SegmentsAndCommitMetadata;
import org.apache.druid.segment.realtime.appenderator.TransactionalSegmentPublisher;
import org.apache.druid.segment.realtime.firehose.ChatHandler;
import org.apache.druid.segment.writeout.SegmentWriteOutMediumFactory;
import org.apache.druid.server.security.Action;
import org.apache.druid.server.security.AuthorizerMapper;
import org.apache.druid.timeline.DataSegment;
import org.apache.druid.timeline.partition.HashBasedNumberedShardSpec;
import org.apache.druid.timeline.partition.NumberedShardSpec;
import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
import org.joda.time.Interval;
import org.joda.time.Period;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
import javax.servlet.http.HttpServletRequest;
import javax.ws.rs.GET;
import javax.ws.rs.Path;
import javax.ws.rs.Produces;
import javax.ws.rs.QueryParam;
import javax.ws.rs.core.Context;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response;
import java.io.File;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Set;
import java.util.SortedSet;
import java.util.TreeMap;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.TimeoutException;
import java.util.function.Function;
import java.util.function.Predicate;
public class IndexTask extends AbstractBatchIndexTask implements ChatHandler
{
public static final HashFunction HASH_FUNCTION = Hashing.murmur3_128();
private static final Logger log = new Logger(IndexTask.class);
private static final String TYPE = "index";
private static String makeGroupId(IndexIngestionSpec ingestionSchema)
{
return makeGroupId(ingestionSchema.ioConfig.appendToExisting, ingestionSchema.dataSchema.getDataSource());
}
private static String makeGroupId(boolean isAppendToExisting, String dataSource)
{
if (isAppendToExisting) {
// Shared locking group for all tasks that append, since they are OK to run concurrently.
return StringUtils.format("%s_append_%s", TYPE, dataSource);
} else {
// Return null, one locking group per task.
return null;
}
}
private final IndexIngestionSpec ingestionSchema;
private IngestionState ingestionState;
@MonotonicNonNull
private ParseExceptionHandler determinePartitionsParseExceptionHandler;
@MonotonicNonNull
private ParseExceptionHandler buildSegmentsParseExceptionHandler;
@MonotonicNonNull
private AuthorizerMapper authorizerMapper;
@MonotonicNonNull
private RowIngestionMeters determinePartitionsMeters;
@MonotonicNonNull
private RowIngestionMeters buildSegmentsMeters;
@Nullable
private String errorMsg;
@JsonCreator
public IndexTask(
@JsonProperty("id") final String id,
@JsonProperty("resource") final TaskResource taskResource,
@JsonProperty("spec") final IndexIngestionSpec ingestionSchema,
@JsonProperty("context") final Map<String, Object> context
)
{
this(
id,
makeGroupId(ingestionSchema),
taskResource,
ingestionSchema.dataSchema.getDataSource(),
ingestionSchema,
context
);
}
public IndexTask(
String id,
String groupId,
TaskResource resource,
String dataSource,
IndexIngestionSpec ingestionSchema,
Map<String, Object> context
)
{
super(
getOrMakeId(id, TYPE, dataSource),
groupId,
resource,
dataSource,
context
);
this.ingestionSchema = ingestionSchema;
this.ingestionState = IngestionState.NOT_STARTED;
}
@Override
public String getType()
{
return TYPE;
}
@Override
public boolean isReady(TaskActionClient taskActionClient) throws Exception
{
final IndexTuningConfig tuningConfig = getIngestionSchema().getTuningConfig();
if (tuningConfig != null && tuningConfig.getPartitionsSpec() != null) {
if (tuningConfig.getPartitionsSpec().getType() != SecondaryPartitionType.LINEAR
&& tuningConfig.getPartitionsSpec().getType() != SecondaryPartitionType.HASH) {
throw new UOE("partitionsSpec[%s] is not supported", tuningConfig.getPartitionsSpec().getClass().getName());
}
}
return determineLockGranularityAndTryLock(taskActionClient, ingestionSchema.dataSchema.getGranularitySpec());
}
@Override
public boolean requireLockExistingSegments()
{
return isGuaranteedRollup(ingestionSchema.ioConfig, ingestionSchema.tuningConfig)
|| !ingestionSchema.ioConfig.isAppendToExisting();
}
@Override
public List<DataSegment> findSegmentsToLock(TaskActionClient taskActionClient, List<Interval> intervals)
throws IOException
{
return findInputSegments(
getDataSource(),
taskActionClient,
intervals,
ingestionSchema.ioConfig.firehoseFactory
);
}
@Override
public boolean isPerfectRollup()
{
return isGuaranteedRollup(ingestionSchema.ioConfig, ingestionSchema.tuningConfig);
}
@Nullable
@Override
public Granularity getSegmentGranularity()
{
final GranularitySpec granularitySpec = ingestionSchema.getDataSchema().getGranularitySpec();
if (granularitySpec instanceof ArbitraryGranularitySpec) {
return null;
} else {
return granularitySpec.getSegmentGranularity();
}
}
@GET
@Path("/unparseableEvents")
@Produces(MediaType.APPLICATION_JSON)
public Response getUnparseableEvents(
@Context final HttpServletRequest req,
@QueryParam("full") String full
)
{
IndexTaskUtils.datasourceAuthorizationCheck(req, Action.READ, getDataSource(), authorizerMapper);
Map<String, List<String>> events = new HashMap<>();
boolean needsDeterminePartitions = false;
boolean needsBuildSegments = false;
if (full != null) {
needsDeterminePartitions = true;
needsBuildSegments = true;
} else {
switch (ingestionState) {
case DETERMINE_PARTITIONS:
needsDeterminePartitions = true;
break;
case BUILD_SEGMENTS:
case COMPLETED:
needsBuildSegments = true;
break;
default:
break;
}
}
if (needsDeterminePartitions) {
events.put(
RowIngestionMeters.DETERMINE_PARTITIONS,
IndexTaskUtils.getMessagesFromSavedParseExceptions(
determinePartitionsParseExceptionHandler.getSavedParseExceptions()
)
);
}
if (needsBuildSegments) {
events.put(
RowIngestionMeters.BUILD_SEGMENTS,
IndexTaskUtils.getMessagesFromSavedParseExceptions(
buildSegmentsParseExceptionHandler.getSavedParseExceptions()
)
);
}
return Response.ok(events).build();
}
private Map<String, Object> doGetRowStats(String full)
{
Map<String, Object> returnMap = new HashMap<>();
Map<String, Object> totalsMap = new HashMap<>();
Map<String, Object> averagesMap = new HashMap<>();
boolean needsDeterminePartitions = false;
boolean needsBuildSegments = false;
if (full != null) {
needsDeterminePartitions = true;
needsBuildSegments = true;
} else {
switch (ingestionState) {
case DETERMINE_PARTITIONS:
needsDeterminePartitions = true;
break;
case BUILD_SEGMENTS:
case COMPLETED:
needsBuildSegments = true;
break;
default:
break;
}
}
if (needsDeterminePartitions) {
totalsMap.put(
RowIngestionMeters.DETERMINE_PARTITIONS,
determinePartitionsMeters.getTotals()
);
averagesMap.put(
RowIngestionMeters.DETERMINE_PARTITIONS,
determinePartitionsMeters.getMovingAverages()
);
}
if (needsBuildSegments) {
totalsMap.put(
RowIngestionMeters.BUILD_SEGMENTS,
buildSegmentsMeters.getTotals()
);
averagesMap.put(
RowIngestionMeters.BUILD_SEGMENTS,
buildSegmentsMeters.getMovingAverages()
);
}
returnMap.put("totals", totalsMap);
returnMap.put("movingAverages", averagesMap);
return returnMap;
}
@GET
@Path("/rowStats")
@Produces(MediaType.APPLICATION_JSON)
public Response getRowStats(
@Context final HttpServletRequest req,
@QueryParam("full") String full
)
{
IndexTaskUtils.datasourceAuthorizationCheck(req, Action.READ, getDataSource(), authorizerMapper);
return Response.ok(doGetRowStats(full)).build();
}
@GET
@Path("/liveReports")
@Produces(MediaType.APPLICATION_JSON)
public Response getLiveReports(
@Context final HttpServletRequest req,
@QueryParam("full") String full
)
{
IndexTaskUtils.datasourceAuthorizationCheck(req, Action.READ, getDataSource(), authorizerMapper);
Map<String, Object> returnMap = new HashMap<>();
Map<String, Object> ingestionStatsAndErrors = new HashMap<>();
Map<String, Object> payload = new HashMap<>();
Map<String, Object> events = getTaskCompletionUnparseableEvents();
payload.put("ingestionState", ingestionState);
payload.put("unparseableEvents", events);
payload.put("rowStats", doGetRowStats(full));
ingestionStatsAndErrors.put("taskId", getId());
ingestionStatsAndErrors.put("payload", payload);
ingestionStatsAndErrors.put("type", "ingestionStatsAndErrors");
returnMap.put("ingestionStatsAndErrors", ingestionStatsAndErrors);
return Response.ok(returnMap).build();
}
@JsonProperty("spec")
public IndexIngestionSpec getIngestionSchema()
{
return ingestionSchema;
}
@Override
public TaskStatus runTask(final TaskToolbox toolbox)
{
try {
log.debug("Found chat handler of class[%s]", toolbox.getChatHandlerProvider().getClass().getName());
if (toolbox.getChatHandlerProvider().get(getId()).isPresent()) {
// This is a workaround for ParallelIndexSupervisorTask to avoid double registering when it runs in the
// sequential mode. See ParallelIndexSupervisorTask.runSequential().
// Note that all HTTP endpoints are not available in this case. This works only for
// ParallelIndexSupervisorTask because it doesn't support APIs for live ingestion reports.
log.warn("Chat handler is already registered. Skipping chat handler registration.");
} else {
toolbox.getChatHandlerProvider().register(getId(), this, false);
}
this.authorizerMapper = toolbox.getAuthorizerMapper();
this.determinePartitionsMeters = toolbox.getRowIngestionMetersFactory().createRowIngestionMeters();
this.buildSegmentsMeters = toolbox.getRowIngestionMetersFactory().createRowIngestionMeters();
this.determinePartitionsParseExceptionHandler = new ParseExceptionHandler(
determinePartitionsMeters,
ingestionSchema.getTuningConfig().isLogParseExceptions(),
ingestionSchema.getTuningConfig().getMaxParseExceptions(),
ingestionSchema.getTuningConfig().getMaxSavedParseExceptions()
);
this.buildSegmentsParseExceptionHandler = new ParseExceptionHandler(
buildSegmentsMeters,
ingestionSchema.getTuningConfig().isLogParseExceptions(),
ingestionSchema.getTuningConfig().getMaxParseExceptions(),
ingestionSchema.getTuningConfig().getMaxSavedParseExceptions()
);
final boolean determineIntervals = !ingestionSchema.getDataSchema()
.getGranularitySpec()
.bucketIntervals()
.isPresent();
final InputSource inputSource = ingestionSchema.getIOConfig().getNonNullInputSource(
ingestionSchema.getDataSchema().getParser()
);
final File tmpDir = toolbox.getIndexingTmpDir();
ingestionState = IngestionState.DETERMINE_PARTITIONS;
// Initialize maxRowsPerSegment and maxTotalRows lazily
final IndexTuningConfig tuningConfig = ingestionSchema.tuningConfig;
final PartitionsSpec partitionsSpec = tuningConfig.getGivenOrDefaultPartitionsSpec();
final PartitionAnalysis partitionAnalysis = determineShardSpecs(
toolbox,
inputSource,
tmpDir,
partitionsSpec
);
final List<Interval> allocateIntervals = new ArrayList<>(partitionAnalysis.getAllIntervalsToIndex());
final DataSchema dataSchema;
if (determineIntervals) {
if (!determineLockGranularityAndTryLock(toolbox.getTaskActionClient(), allocateIntervals)) {
throw new ISE("Failed to get locks for intervals[%s]", allocateIntervals);
}
dataSchema = ingestionSchema.getDataSchema().withGranularitySpec(
ingestionSchema.getDataSchema()
.getGranularitySpec()
.withIntervals(JodaUtils.condenseIntervals(allocateIntervals))
);
} else {
dataSchema = ingestionSchema.getDataSchema();
}
ingestionState = IngestionState.BUILD_SEGMENTS;
return generateAndPublishSegments(
toolbox,
dataSchema,
inputSource,
tmpDir,
partitionAnalysis
);
}
catch (Exception e) {
log.error(e, "Encountered exception in %s.", ingestionState);
errorMsg = Throwables.getStackTraceAsString(e);
toolbox.getTaskReportFileWriter().write(getId(), getTaskCompletionReports());
return TaskStatus.failure(
getId(),
errorMsg
);
}
finally {
toolbox.getChatHandlerProvider().unregister(getId());
}
}
private Map<String, TaskReport> getTaskCompletionReports()
{
return TaskReport.buildTaskReports(
new IngestionStatsAndErrorsTaskReport(
getId(),
new IngestionStatsAndErrorsTaskReportData(
ingestionState,
getTaskCompletionUnparseableEvents(),
getTaskCompletionRowStats(),
errorMsg
)
)
);
}
private Map<String, Object> getTaskCompletionUnparseableEvents()
{
Map<String, Object> unparseableEventsMap = new HashMap<>();
List<String> determinePartitionsParseExceptionMessages = IndexTaskUtils.getMessagesFromSavedParseExceptions(
determinePartitionsParseExceptionHandler.getSavedParseExceptions()
);
List<String> buildSegmentsParseExceptionMessages = IndexTaskUtils.getMessagesFromSavedParseExceptions(
buildSegmentsParseExceptionHandler.getSavedParseExceptions()
);
if (determinePartitionsParseExceptionMessages != null || buildSegmentsParseExceptionMessages != null) {
unparseableEventsMap.put(RowIngestionMeters.DETERMINE_PARTITIONS, determinePartitionsParseExceptionMessages);
unparseableEventsMap.put(RowIngestionMeters.BUILD_SEGMENTS, buildSegmentsParseExceptionMessages);
}
return unparseableEventsMap;
}
private Map<String, Object> getTaskCompletionRowStats()
{
Map<String, Object> metrics = new HashMap<>();
metrics.put(
RowIngestionMeters.DETERMINE_PARTITIONS,
determinePartitionsMeters.getTotals()
);
metrics.put(
RowIngestionMeters.BUILD_SEGMENTS,
buildSegmentsMeters.getTotals()
);
return metrics;
}
/**
* Determines intervals and shardSpecs for input data. This method first checks that it must determine intervals and
* shardSpecs by itself. Intervals must be determined if they are not specified in {@link GranularitySpec}.
* ShardSpecs must be determined if the perfect rollup must be guaranteed even though the number of shards is not
* specified in {@link IndexTuningConfig}.
* <p/>
* If both intervals and shardSpecs don't have to be determined, this method simply returns {@link ShardSpecs} for the
* given intervals. Here, if {@link HashedPartitionsSpec#numShards} is not specified, {@link NumberedShardSpec} is
* used.
* <p/>
* If one of intervals or shardSpecs need to be determined, this method reads the entire input for determining one of
* them. If the perfect rollup must be guaranteed, {@link HashBasedNumberedShardSpec} is used for hash partitioning
* of input data. In the future we may want to also support single-dimension partitioning.
*
* @return a map indicating how many shardSpecs need to be created per interval.
*/
private PartitionAnalysis determineShardSpecs(
final TaskToolbox toolbox,
final InputSource inputSource,
final File tmpDir,
@Nonnull final PartitionsSpec partitionsSpec
) throws IOException
{
final ObjectMapper jsonMapper = toolbox.getJsonMapper();
final GranularitySpec granularitySpec = ingestionSchema.getDataSchema().getGranularitySpec();
// Must determine intervals if unknown, since we acquire all locks before processing any data.
final boolean determineIntervals = !granularitySpec.bucketIntervals().isPresent();
// Must determine partitions if rollup is guaranteed and the user didn't provide a specific value.
final boolean determineNumPartitions = partitionsSpec.needsDeterminePartitions(false);
// if we were given number of shards per interval and the intervals, we don't need to scan the data
if (!determineNumPartitions && !determineIntervals) {
log.info("Skipping determine partition scan");
if (partitionsSpec.getType() == SecondaryPartitionType.HASH) {
return PartialHashSegmentGenerateTask.createHashPartitionAnalysisFromPartitionsSpec(
granularitySpec,
(HashedPartitionsSpec) partitionsSpec,
null // not overriding numShards
);
} else if (partitionsSpec.getType() == SecondaryPartitionType.LINEAR) {
return createLinearPartitionAnalysis(granularitySpec, (DynamicPartitionsSpec) partitionsSpec);
} else {
throw new UOE("%s", partitionsSpec.getClass().getName());
}
} else {
// determine intervals containing data and prime HLL collectors
log.info("Determining intervals and shardSpecs");
return createShardSpecsFromInput(
jsonMapper,
ingestionSchema,
inputSource,
tmpDir,
granularitySpec,
partitionsSpec,
determineIntervals
);
}
}
private static LinearPartitionAnalysis createLinearPartitionAnalysis(
GranularitySpec granularitySpec,
@Nonnull DynamicPartitionsSpec partitionsSpec
)
{
final SortedSet<Interval> intervals = granularitySpec.bucketIntervals().get();
final int numBucketsPerInterval = 1;
final LinearPartitionAnalysis partitionAnalysis = new LinearPartitionAnalysis(partitionsSpec);
intervals.forEach(interval -> partitionAnalysis.updateBucket(interval, numBucketsPerInterval));
return partitionAnalysis;
}
private PartitionAnalysis createShardSpecsFromInput(
ObjectMapper jsonMapper,
IndexIngestionSpec ingestionSchema,
InputSource inputSource,
File tmpDir,
GranularitySpec granularitySpec,
@Nonnull PartitionsSpec partitionsSpec,
boolean determineIntervals
) throws IOException
{
assert partitionsSpec.getType() != SecondaryPartitionType.RANGE;
long determineShardSpecsStartMillis = System.currentTimeMillis();
final Map<Interval, Optional<HyperLogLogCollector>> hllCollectors = collectIntervalsAndShardSpecs(
jsonMapper,
ingestionSchema,
inputSource,
tmpDir,
granularitySpec,
partitionsSpec,
determineIntervals
);
final PartitionAnalysis<Integer, ?> partitionAnalysis;
if (partitionsSpec.getType() == SecondaryPartitionType.LINEAR) {
partitionAnalysis = new LinearPartitionAnalysis((DynamicPartitionsSpec) partitionsSpec);
} else if (partitionsSpec.getType() == SecondaryPartitionType.HASH) {
partitionAnalysis = new HashPartitionAnalysis((HashedPartitionsSpec) partitionsSpec);
} else {
throw new UOE("%s", partitionsSpec.getClass().getName());
}
for (final Map.Entry<Interval, Optional<HyperLogLogCollector>> entry : hllCollectors.entrySet()) {
final Interval interval = entry.getKey();
final int numBucketsPerInterval;
if (partitionsSpec.getType() == SecondaryPartitionType.HASH) {
final HashedPartitionsSpec hashedPartitionsSpec = (HashedPartitionsSpec) partitionsSpec;
final HyperLogLogCollector collector = entry.getValue().orNull();
if (partitionsSpec.needsDeterminePartitions(false)) {
final long numRows = Preconditions.checkNotNull(collector, "HLL collector").estimateCardinalityRound();
final int nonNullMaxRowsPerSegment = partitionsSpec.getMaxRowsPerSegment() == null
? PartitionsSpec.DEFAULT_MAX_ROWS_PER_SEGMENT
: partitionsSpec.getMaxRowsPerSegment();
numBucketsPerInterval = (int) Math.ceil((double) numRows / nonNullMaxRowsPerSegment);
log.info(
"Estimated [%,d] rows of data for interval [%s], creating [%,d] shards",
numRows,
interval,
numBucketsPerInterval
);
} else {
numBucketsPerInterval = hashedPartitionsSpec.getNumShards() == null ? 1 : hashedPartitionsSpec.getNumShards();
log.info("Creating [%,d] buckets for interval [%s]", numBucketsPerInterval, interval);
}
} else {
numBucketsPerInterval = 1;
}
partitionAnalysis.updateBucket(interval, numBucketsPerInterval);
}
log.info("Found intervals and shardSpecs in %,dms", System.currentTimeMillis() - determineShardSpecsStartMillis);
return partitionAnalysis;
}
private Map<Interval, Optional<HyperLogLogCollector>> collectIntervalsAndShardSpecs(
ObjectMapper jsonMapper,
IndexIngestionSpec ingestionSchema,
InputSource inputSource,
File tmpDir,
GranularitySpec granularitySpec,
@Nonnull PartitionsSpec partitionsSpec,
boolean determineIntervals
) throws IOException
{
final Map<Interval, Optional<HyperLogLogCollector>> hllCollectors = new TreeMap<>(
Comparators.intervalsByStartThenEnd()
);
final Granularity queryGranularity = granularitySpec.getQueryGranularity();
final Predicate<InputRow> rowFilter = inputRow -> {
if (inputRow == null) {
return false;
}
if (determineIntervals) {
return true;
}
final Optional<Interval> optInterval = granularitySpec.bucketInterval(inputRow.getTimestamp());
return optInterval.isPresent();
};
try (final CloseableIterator<InputRow> inputRowIterator = AbstractBatchIndexTask.inputSourceReader(
tmpDir,
ingestionSchema.getDataSchema(),
inputSource,
inputSource.needsFormat() ? getInputFormat(ingestionSchema) : null,
rowFilter,
determinePartitionsMeters,
determinePartitionsParseExceptionHandler
)) {
while (inputRowIterator.hasNext()) {
final InputRow inputRow = inputRowIterator.next();
final Interval interval;
if (determineIntervals) {
interval = granularitySpec.getSegmentGranularity().bucket(inputRow.getTimestamp());
} else {
final Optional<Interval> optInterval = granularitySpec.bucketInterval(inputRow.getTimestamp());
// this interval must exist since it passed the rowFilter
assert optInterval.isPresent();
interval = optInterval.get();
}
if (partitionsSpec.needsDeterminePartitions(false)) {
hllCollectors.computeIfAbsent(interval, intv -> Optional.of(HyperLogLogCollector.makeLatestCollector()));
List<Object> groupKey = Rows.toGroupKey(
queryGranularity.bucketStart(inputRow.getTimestamp()).getMillis(),
inputRow
);
hllCollectors.get(interval).get()
.add(HASH_FUNCTION.hashBytes(jsonMapper.writeValueAsBytes(groupKey)).asBytes());
} else {
// we don't need to determine partitions but we still need to determine intervals, so add an Optional.absent()
// for the interval and don't instantiate a HLL collector
hllCollectors.putIfAbsent(interval, Optional.absent());
}
determinePartitionsMeters.incrementProcessed();
}
}
// These metrics are reported in generateAndPublishSegments()
if (determinePartitionsMeters.getThrownAway() > 0) {
log.warn("Unable to find a matching interval for [%,d] events", determinePartitionsMeters.getThrownAway());
}
if (determinePartitionsMeters.getUnparseable() > 0) {
log.warn("Unable to parse [%,d] events", determinePartitionsMeters.getUnparseable());
}
return hllCollectors;
}
/**
* This method reads input data row by row and adds the read row to a proper segment using {@link BaseAppenderatorDriver}.
* If there is no segment for the row, a new one is created. Segments can be published in the middle of reading inputs
* if {@link DynamicPartitionsSpec} is used and one of below conditions are satisfied.
*
* <ul>
* <li>
* If the number of rows in a segment exceeds {@link DynamicPartitionsSpec#maxRowsPerSegment}
* </li>
* <li>
* If the number of rows added to {@link BaseAppenderatorDriver} so far exceeds {@link DynamicPartitionsSpec#maxTotalRows}
* </li>
* </ul>
* <p>
* At the end of this method, all the remaining segments are published.
*
* @return the last {@link TaskStatus}
*/
private TaskStatus generateAndPublishSegments(
final TaskToolbox toolbox,
final DataSchema dataSchema,
final InputSource inputSource,
final File tmpDir,
final PartitionAnalysis partitionAnalysis
) throws IOException, InterruptedException
{
final FireDepartment fireDepartmentForMetrics =
new FireDepartment(dataSchema, new RealtimeIOConfig(null, null), null);
FireDepartmentMetrics buildSegmentsFireDepartmentMetrics = fireDepartmentForMetrics.getMetrics();
if (toolbox.getMonitorScheduler() != null) {
final TaskRealtimeMetricsMonitor metricsMonitor = TaskRealtimeMetricsMonitorBuilder.build(
this,
fireDepartmentForMetrics,
buildSegmentsMeters
);
toolbox.getMonitorScheduler().addMonitor(metricsMonitor);
}
final PartitionsSpec partitionsSpec = partitionAnalysis.getPartitionsSpec();
final IndexTuningConfig tuningConfig = ingestionSchema.getTuningConfig();
final long pushTimeout = tuningConfig.getPushTimeout();
final SegmentAllocatorForBatch segmentAllocator;
final SequenceNameFunction sequenceNameFunction;
switch (partitionsSpec.getType()) {
case HASH:
case RANGE:
final SegmentAllocatorForBatch localSegmentAllocator = SegmentAllocators.forNonLinearPartitioning(
toolbox,
getDataSource(),
getId(),
dataSchema.getGranularitySpec(),
null,
(CompletePartitionAnalysis) partitionAnalysis
);
sequenceNameFunction = localSegmentAllocator.getSequenceNameFunction();
segmentAllocator = localSegmentAllocator;
break;
case LINEAR:
segmentAllocator = SegmentAllocators.forLinearPartitioning(
toolbox,
getId(),
null,
dataSchema,
getTaskLockHelper(),
ingestionSchema.getIOConfig().isAppendToExisting(),
partitionAnalysis.getPartitionsSpec()
);
sequenceNameFunction = segmentAllocator.getSequenceNameFunction();
break;
default:
throw new UOE("[%s] secondary partition type is not supported", partitionsSpec.getType());
}
final TransactionalSegmentPublisher publisher = (segmentsToBeOverwritten, segmentsToPublish, commitMetadata) ->
toolbox.getTaskActionClient()
.submit(SegmentTransactionalInsertAction.overwriteAction(segmentsToBeOverwritten, segmentsToPublish));
String effectiveId = getContextValue(CompactionTask.CTX_KEY_APPENDERATOR_TRACKING_TASK_ID, null);
if (effectiveId == null) {
effectiveId = getId();
}
final Appenderator appenderator = BatchAppenderators.newAppenderator(
effectiveId,
toolbox.getAppenderatorsManager(),
buildSegmentsFireDepartmentMetrics,
toolbox,
dataSchema,
tuningConfig,
buildSegmentsMeters,
buildSegmentsParseExceptionHandler
);
boolean exceptionOccurred = false;
try (final BatchAppenderatorDriver driver = BatchAppenderators.newDriver(appenderator, toolbox, segmentAllocator)) {
driver.startJob();
InputSourceProcessor.process(
dataSchema,
driver,
partitionsSpec,
inputSource,
inputSource.needsFormat() ? getInputFormat(ingestionSchema) : null,
tmpDir,
sequenceNameFunction,
new DefaultIndexTaskInputRowIteratorBuilder(),
buildSegmentsMeters,
buildSegmentsParseExceptionHandler,
pushTimeout
);
// If we use timeChunk lock, then we don't have to specify what segments will be overwritten because
// it will just overwrite all segments overlapped with the new segments.
final Set<DataSegment> inputSegments = getTaskLockHelper().isUseSegmentLock()
? getTaskLockHelper().getLockedExistingSegments()
: null;
final boolean storeCompactionState = getContextValue(
Tasks.STORE_COMPACTION_STATE_KEY,
Tasks.DEFAULT_STORE_COMPACTION_STATE
);
final Function<Set<DataSegment>, Set<DataSegment>> annotateFunction =
compactionStateAnnotateFunction(
storeCompactionState,
toolbox,
ingestionSchema.getTuningConfig()
);
// Probably we can publish atomicUpdateGroup along with segments.
final SegmentsAndCommitMetadata published =
awaitPublish(driver.publishAll(inputSegments, publisher, annotateFunction), pushTimeout);
appenderator.close();
ingestionState = IngestionState.COMPLETED;
if (published == null) {
log.error("Failed to publish segments, aborting!");
errorMsg = "Failed to publish segments.";
toolbox.getTaskReportFileWriter().write(getId(), getTaskCompletionReports());
return TaskStatus.failure(
getId(),
errorMsg
);
} else {
log.info(
"Processed[%,d] events, unparseable[%,d], thrownAway[%,d].",
buildSegmentsMeters.getProcessed(),
buildSegmentsMeters.getUnparseable(),
buildSegmentsMeters.getThrownAway()
);
log.info("Published [%s] segments", published.getSegments().size());
log.debugSegments(published.getSegments(), "Published segments");
toolbox.getTaskReportFileWriter().write(getId(), getTaskCompletionReports());
return TaskStatus.success(getId());
}
}
catch (TimeoutException | ExecutionException e) {
exceptionOccurred = true;
throw new RuntimeException(e);
}
catch (Exception e) {
exceptionOccurred = true;
throw e;
}
finally {
if (exceptionOccurred) {
appenderator.closeNow();
} else {
appenderator.close();
}
}
}
private static SegmentsAndCommitMetadata awaitPublish(
ListenableFuture<SegmentsAndCommitMetadata> publishFuture,
long publishTimeout
) throws ExecutionException, InterruptedException, TimeoutException
{
if (publishTimeout == 0) {
return publishFuture.get();
} else {
return publishFuture.get(publishTimeout, TimeUnit.MILLISECONDS);
}
}
private static InputFormat getInputFormat(IndexIngestionSpec ingestionSchema)
{
return ingestionSchema.getIOConfig().getNonNullInputFormat();
}
public static class IndexIngestionSpec extends IngestionSpec<IndexIOConfig, IndexTuningConfig>
{
private final DataSchema dataSchema;
private final IndexIOConfig ioConfig;
private final IndexTuningConfig tuningConfig;
@JsonCreator
public IndexIngestionSpec(
@JsonProperty("dataSchema") DataSchema dataSchema,
@JsonProperty("ioConfig") IndexIOConfig ioConfig,
@JsonProperty("tuningConfig") IndexTuningConfig tuningConfig
)
{
super(dataSchema, ioConfig, tuningConfig);
if (dataSchema.getParserMap() != null && ioConfig.getInputSource() != null) {
throw new IAE("Cannot use parser and inputSource together. Try using inputFormat instead of parser.");
}
if (ioConfig.getInputSource() != null && ioConfig.getInputSource().needsFormat()) {
Checks.checkOneNotNullOrEmpty(
ImmutableList.of(
new Property<>("parser", dataSchema.getParserMap()),
new Property<>("inputFormat", ioConfig.getInputFormat())
)
);
}
this.dataSchema = dataSchema;
this.ioConfig = ioConfig;
this.tuningConfig = tuningConfig == null ? new IndexTuningConfig() : tuningConfig;
}
@Override
@JsonProperty("dataSchema")
public DataSchema getDataSchema()
{
return dataSchema;
}
@Override
@JsonProperty("ioConfig")
public IndexIOConfig getIOConfig()
{
return ioConfig;
}
@Override
@JsonProperty("tuningConfig")
public IndexTuningConfig getTuningConfig()
{
return tuningConfig;
}
}
@JsonTypeName("index")
public static class IndexIOConfig implements BatchIOConfig
{
private static final boolean DEFAULT_APPEND_TO_EXISTING = false;
private final FirehoseFactory firehoseFactory;
private final InputSource inputSource;
private final InputFormat inputFormat;
private final boolean appendToExisting;
@JsonCreator
public IndexIOConfig(
@Deprecated @JsonProperty("firehose") @Nullable FirehoseFactory firehoseFactory,
@JsonProperty("inputSource") @Nullable InputSource inputSource,
@JsonProperty("inputFormat") @Nullable InputFormat inputFormat,
@JsonProperty("appendToExisting") @Nullable Boolean appendToExisting
)
{
Checks.checkOneNotNullOrEmpty(
ImmutableList.of(new Property<>("firehose", firehoseFactory), new Property<>("inputSource", inputSource))
);
if (firehoseFactory != null && inputFormat != null) {
throw new IAE("Cannot use firehose and inputFormat together. Try using inputSource instead of firehose.");
}
this.firehoseFactory = firehoseFactory;
this.inputSource = inputSource;
this.inputFormat = inputFormat;
this.appendToExisting = appendToExisting == null ? DEFAULT_APPEND_TO_EXISTING : appendToExisting;
}
// old constructor for backward compatibility
@Deprecated
public IndexIOConfig(FirehoseFactory firehoseFactory, @Nullable Boolean appendToExisting)
{
this(firehoseFactory, null, null, appendToExisting);
}
@Nullable
@JsonProperty("firehose")
@JsonInclude(Include.NON_NULL)
@Deprecated
public FirehoseFactory getFirehoseFactory()
{
return firehoseFactory;
}
@Nullable
@Override
@JsonProperty
public InputSource getInputSource()
{
return inputSource;
}
/**
* Returns {@link InputFormat}. Can be null if {@link DataSchema#parserMap} is specified.
* Also can be null in {@link InputSourceSampler}.
*/
@Nullable
@Override
@JsonProperty
public InputFormat getInputFormat()
{
return inputFormat;
}
public InputSource getNonNullInputSource(@Nullable InputRowParser inputRowParser)
{
if (inputSource == null) {
return new FirehoseFactoryToInputSourceAdaptor(
(FiniteFirehoseFactory) firehoseFactory,
inputRowParser
);
} else {
return inputSource;
}
}
public InputFormat getNonNullInputFormat()
{
return Preconditions.checkNotNull(inputFormat, "inputFormat");
}
@Override
@JsonProperty
public boolean isAppendToExisting()
{
return appendToExisting;
}
}
public static class IndexTuningConfig implements AppenderatorConfig
{
private static final IndexSpec DEFAULT_INDEX_SPEC = new IndexSpec();
private static final int DEFAULT_MAX_PENDING_PERSISTS = 0;
private static final boolean DEFAULT_GUARANTEE_ROLLUP = false;
private static final boolean DEFAULT_REPORT_PARSE_EXCEPTIONS = false;
private static final long DEFAULT_PUSH_TIMEOUT = 0;
private final AppendableIndexSpec appendableIndexSpec;
private final int maxRowsInMemory;
private final long maxBytesInMemory;
private final int maxColumnsToMerge;
// null if all partitionsSpec related params are null. see getDefaultPartitionsSpec() for details.
@Nullable
private final PartitionsSpec partitionsSpec;
private final IndexSpec indexSpec;
private final IndexSpec indexSpecForIntermediatePersists;
private final File basePersistDirectory;
private final int maxPendingPersists;
/**
* This flag is to force _perfect rollup mode_. {@link IndexTask} will scan the whole input data twice to 1) figure
* out proper shard specs for each segment and 2) generate segments. Note that perfect rollup mode basically assumes
* that no more data will be appended in the future. As a result, in perfect rollup mode,
* {@link HashBasedNumberedShardSpec} is used for shards.
*/
private final boolean forceGuaranteedRollup;
private final boolean reportParseExceptions;
private final long pushTimeout;
private final boolean logParseExceptions;
private final int maxParseExceptions;
private final int maxSavedParseExceptions;
@Nullable
private final SegmentWriteOutMediumFactory segmentWriteOutMediumFactory;
@Nullable
private static PartitionsSpec getPartitionsSpec(
boolean forceGuaranteedRollup,
@Nullable PartitionsSpec partitionsSpec,
@Nullable Integer maxRowsPerSegment,
@Nullable Long maxTotalRows,
@Nullable Integer numShards,
@Nullable List<String> partitionDimensions
)
{
if (partitionsSpec == null) {
if (forceGuaranteedRollup) {
if (maxRowsPerSegment != null
|| numShards != null
|| (partitionDimensions != null && !partitionDimensions.isEmpty())) {
return new HashedPartitionsSpec(maxRowsPerSegment, numShards, partitionDimensions);
} else {
return null;
}
} else {
if (maxRowsPerSegment != null || maxTotalRows != null) {
return new DynamicPartitionsSpec(maxRowsPerSegment, maxTotalRows);
} else {
return null;
}
}
} else {
if (forceGuaranteedRollup) {
if (!partitionsSpec.isForceGuaranteedRollupCompatibleType()) {
throw new IAE(partitionsSpec.getClass().getSimpleName() + " cannot be used for perfect rollup");
}
} else {
if (!(partitionsSpec instanceof DynamicPartitionsSpec)) {
throw new IAE("DynamicPartitionsSpec must be used for best-effort rollup");
}
}
return partitionsSpec;
}
}
@JsonCreator
public IndexTuningConfig(
@JsonProperty("targetPartitionSize") @Deprecated @Nullable Integer targetPartitionSize,
@JsonProperty("maxRowsPerSegment") @Deprecated @Nullable Integer maxRowsPerSegment,
@JsonProperty("appendableIndexSpec") @Nullable AppendableIndexSpec appendableIndexSpec,
@JsonProperty("maxRowsInMemory") @Nullable Integer maxRowsInMemory,
@JsonProperty("maxBytesInMemory") @Nullable Long maxBytesInMemory,
@JsonProperty("maxTotalRows") @Deprecated @Nullable Long maxTotalRows,
@JsonProperty("rowFlushBoundary") @Deprecated @Nullable Integer rowFlushBoundary_forBackCompatibility,
@JsonProperty("numShards") @Deprecated @Nullable Integer numShards,
@JsonProperty("partitionDimensions") @Deprecated @Nullable List<String> partitionDimensions,
@JsonProperty("partitionsSpec") @Nullable PartitionsSpec partitionsSpec,
@JsonProperty("indexSpec") @Nullable IndexSpec indexSpec,
@JsonProperty("indexSpecForIntermediatePersists") @Nullable IndexSpec indexSpecForIntermediatePersists,
@JsonProperty("maxPendingPersists") @Nullable Integer maxPendingPersists,
@JsonProperty("forceGuaranteedRollup") @Nullable Boolean forceGuaranteedRollup,
@JsonProperty("reportParseExceptions") @Deprecated @Nullable Boolean reportParseExceptions,
@JsonProperty("publishTimeout") @Deprecated @Nullable Long publishTimeout,
@JsonProperty("pushTimeout") @Nullable Long pushTimeout,
@JsonProperty("segmentWriteOutMediumFactory") @Nullable
SegmentWriteOutMediumFactory segmentWriteOutMediumFactory,
@JsonProperty("logParseExceptions") @Nullable Boolean logParseExceptions,
@JsonProperty("maxParseExceptions") @Nullable Integer maxParseExceptions,
@JsonProperty("maxSavedParseExceptions") @Nullable Integer maxSavedParseExceptions,
@JsonProperty("maxColumnsToMerge") @Nullable Integer maxColumnsToMerge
)
{
this(
appendableIndexSpec,
maxRowsInMemory != null ? maxRowsInMemory : rowFlushBoundary_forBackCompatibility,
maxBytesInMemory != null ? maxBytesInMemory : 0,
getPartitionsSpec(
forceGuaranteedRollup == null ? DEFAULT_GUARANTEE_ROLLUP : forceGuaranteedRollup,
partitionsSpec,
maxRowsPerSegment == null ? targetPartitionSize : maxRowsPerSegment,
maxTotalRows,
numShards,
partitionDimensions
),
indexSpec,
indexSpecForIntermediatePersists,
maxPendingPersists,
forceGuaranteedRollup,
reportParseExceptions,
pushTimeout != null ? pushTimeout : publishTimeout,
null,
segmentWriteOutMediumFactory,
logParseExceptions,
maxParseExceptions,
maxSavedParseExceptions,
maxColumnsToMerge
);
Preconditions.checkArgument(
targetPartitionSize == null || maxRowsPerSegment == null,
"Can't use targetPartitionSize and maxRowsPerSegment together"
);
}
private IndexTuningConfig()
{
this(null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null);
}
private IndexTuningConfig(
@Nullable AppendableIndexSpec appendableIndexSpec,
@Nullable Integer maxRowsInMemory,
@Nullable Long maxBytesInMemory,
@Nullable PartitionsSpec partitionsSpec,
@Nullable IndexSpec indexSpec,
@Nullable IndexSpec indexSpecForIntermediatePersists,
@Nullable Integer maxPendingPersists,
@Nullable Boolean forceGuaranteedRollup,
@Nullable Boolean reportParseExceptions,
@Nullable Long pushTimeout,
@Nullable File basePersistDirectory,
@Nullable SegmentWriteOutMediumFactory segmentWriteOutMediumFactory,
@Nullable Boolean logParseExceptions,
@Nullable Integer maxParseExceptions,
@Nullable Integer maxSavedParseExceptions,
@Nullable Integer maxColumnsToMerge
)
{
this.appendableIndexSpec = appendableIndexSpec == null ? DEFAULT_APPENDABLE_INDEX : appendableIndexSpec;
this.maxRowsInMemory = maxRowsInMemory == null ? TuningConfig.DEFAULT_MAX_ROWS_IN_MEMORY : maxRowsInMemory;
// initializing this to 0, it will be lazily initialized to a value
// @see #getMaxBytesInMemoryOrDefault()
this.maxBytesInMemory = maxBytesInMemory == null ? 0 : maxBytesInMemory;
this.maxColumnsToMerge = maxColumnsToMerge == null
? IndexMerger.UNLIMITED_MAX_COLUMNS_TO_MERGE
: maxColumnsToMerge;
this.partitionsSpec = partitionsSpec;
this.indexSpec = indexSpec == null ? DEFAULT_INDEX_SPEC : indexSpec;
this.indexSpecForIntermediatePersists = indexSpecForIntermediatePersists == null ?
this.indexSpec : indexSpecForIntermediatePersists;
this.maxPendingPersists = maxPendingPersists == null ? DEFAULT_MAX_PENDING_PERSISTS : maxPendingPersists;
this.forceGuaranteedRollup = forceGuaranteedRollup == null ? DEFAULT_GUARANTEE_ROLLUP : forceGuaranteedRollup;
this.reportParseExceptions = reportParseExceptions == null
? DEFAULT_REPORT_PARSE_EXCEPTIONS
: reportParseExceptions;
this.pushTimeout = pushTimeout == null ? DEFAULT_PUSH_TIMEOUT : pushTimeout;
this.basePersistDirectory = basePersistDirectory;
this.segmentWriteOutMediumFactory = segmentWriteOutMediumFactory;
if (this.reportParseExceptions) {
this.maxParseExceptions = 0;
this.maxSavedParseExceptions = maxSavedParseExceptions == null ? 0 : Math.min(1, maxSavedParseExceptions);
} else {
this.maxParseExceptions = maxParseExceptions == null
? TuningConfig.DEFAULT_MAX_PARSE_EXCEPTIONS
: maxParseExceptions;
this.maxSavedParseExceptions = maxSavedParseExceptions == null
? TuningConfig.DEFAULT_MAX_SAVED_PARSE_EXCEPTIONS
: maxSavedParseExceptions;
}
this.logParseExceptions = logParseExceptions == null
? TuningConfig.DEFAULT_LOG_PARSE_EXCEPTIONS
: logParseExceptions;
}
@Override
public IndexTuningConfig withBasePersistDirectory(File dir)
{
return new IndexTuningConfig(
appendableIndexSpec,
maxRowsInMemory,
maxBytesInMemory,
partitionsSpec,
indexSpec,
indexSpecForIntermediatePersists,
maxPendingPersists,
forceGuaranteedRollup,
reportParseExceptions,
pushTimeout,
dir,
segmentWriteOutMediumFactory,
logParseExceptions,
maxParseExceptions,
maxSavedParseExceptions,
maxColumnsToMerge
);
}
public IndexTuningConfig withPartitionsSpec(PartitionsSpec partitionsSpec)
{
return new IndexTuningConfig(
appendableIndexSpec,
maxRowsInMemory,
maxBytesInMemory,
partitionsSpec,
indexSpec,
indexSpecForIntermediatePersists,
maxPendingPersists,
forceGuaranteedRollup,
reportParseExceptions,
pushTimeout,
basePersistDirectory,
segmentWriteOutMediumFactory,
logParseExceptions,
maxParseExceptions,
maxSavedParseExceptions,
maxColumnsToMerge
);
}
@JsonProperty
@Override
public AppendableIndexSpec getAppendableIndexSpec()
{
return appendableIndexSpec;
}
@JsonProperty
@Override
public int getMaxRowsInMemory()
{
return maxRowsInMemory;
}
@JsonProperty
@Override
public long getMaxBytesInMemory()
{
return maxBytesInMemory;
}
@JsonProperty
@Nullable
@Override
public PartitionsSpec getPartitionsSpec()
{
return partitionsSpec;
}
public PartitionsSpec getGivenOrDefaultPartitionsSpec()
{
if (partitionsSpec != null) {
return partitionsSpec;
}
return forceGuaranteedRollup
? new HashedPartitionsSpec(null, null, null)
: new DynamicPartitionsSpec(null, null);
}
@JsonProperty
@Override
public IndexSpec getIndexSpec()
{
return indexSpec;
}
@JsonProperty
@Override
public IndexSpec getIndexSpecForIntermediatePersists()
{
return indexSpecForIntermediatePersists;
}
@JsonProperty
@Override
public int getMaxPendingPersists()
{
return maxPendingPersists;
}
/**
* Always returns true, doesn't affect the version being built.
*/
@Deprecated
@JsonProperty
public boolean isBuildV9Directly()
{
return true;
}
@JsonProperty
public boolean isForceGuaranteedRollup()
{
return forceGuaranteedRollup;
}
@JsonProperty
@Override
public boolean isReportParseExceptions()
{
return reportParseExceptions;
}
@JsonProperty
public long getPushTimeout()
{
return pushTimeout;
}
@Nullable
@Override
@JsonProperty
public SegmentWriteOutMediumFactory getSegmentWriteOutMediumFactory()
{
return segmentWriteOutMediumFactory;
}
@Override
@JsonProperty
public int getMaxColumnsToMerge()
{
return maxColumnsToMerge;
}
@JsonProperty
public boolean isLogParseExceptions()
{
return logParseExceptions;
}
@JsonProperty
public int getMaxParseExceptions()
{
return maxParseExceptions;
}
@JsonProperty
public int getMaxSavedParseExceptions()
{
return maxSavedParseExceptions;
}
/**
* Return the max number of rows per segment. This returns null if it's not specified in tuningConfig.
* Deprecated in favor of {@link #getGivenOrDefaultPartitionsSpec()}.
*/
@Nullable
@Override
@Deprecated
@JsonProperty
public Integer getMaxRowsPerSegment()
{
return partitionsSpec == null ? null : partitionsSpec.getMaxRowsPerSegment();
}
/**
* Return the max number of total rows in appenderator. This returns null if it's not specified in tuningConfig.
* Deprecated in favor of {@link #getGivenOrDefaultPartitionsSpec()}.
*/
@Override
@Nullable
@Deprecated
@JsonProperty
public Long getMaxTotalRows()
{
return partitionsSpec instanceof DynamicPartitionsSpec
? ((DynamicPartitionsSpec) partitionsSpec).getMaxTotalRows()
: null;
}
@Deprecated
@Nullable
@JsonProperty
public Integer getNumShards()
{
return partitionsSpec instanceof HashedPartitionsSpec
? ((HashedPartitionsSpec) partitionsSpec).getNumShards()
: null;
}
@Deprecated
@JsonProperty
public List<String> getPartitionDimensions()
{
return partitionsSpec instanceof HashedPartitionsSpec
? ((HashedPartitionsSpec) partitionsSpec).getPartitionDimensions()
: Collections.emptyList();
}
@Override
public File getBasePersistDirectory()
{
return basePersistDirectory;
}
@Override
public Period getIntermediatePersistPeriod()
{
return new Period(Integer.MAX_VALUE); // intermediate persist doesn't make much sense for batch jobs
}
@Override
public boolean equals(Object o)
{
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
IndexTuningConfig that = (IndexTuningConfig) o;
return Objects.equals(appendableIndexSpec, that.appendableIndexSpec) &&
maxRowsInMemory == that.maxRowsInMemory &&
maxBytesInMemory == that.maxBytesInMemory &&
maxColumnsToMerge == that.maxColumnsToMerge &&
maxPendingPersists == that.maxPendingPersists &&
forceGuaranteedRollup == that.forceGuaranteedRollup &&
reportParseExceptions == that.reportParseExceptions &&
pushTimeout == that.pushTimeout &&
logParseExceptions == that.logParseExceptions &&
maxParseExceptions == that.maxParseExceptions &&
maxSavedParseExceptions == that.maxSavedParseExceptions &&
Objects.equals(partitionsSpec, that.partitionsSpec) &&
Objects.equals(indexSpec, that.indexSpec) &&
Objects.equals(indexSpecForIntermediatePersists, that.indexSpecForIntermediatePersists) &&
Objects.equals(basePersistDirectory, that.basePersistDirectory) &&
Objects.equals(segmentWriteOutMediumFactory, that.segmentWriteOutMediumFactory);
}
@Override
public int hashCode()
{
return Objects.hash(
appendableIndexSpec,
maxRowsInMemory,
maxBytesInMemory,
maxColumnsToMerge,
partitionsSpec,
indexSpec,
indexSpecForIntermediatePersists,
basePersistDirectory,
maxPendingPersists,
forceGuaranteedRollup,
reportParseExceptions,
pushTimeout,
logParseExceptions,
maxParseExceptions,
maxSavedParseExceptions,
segmentWriteOutMediumFactory
);
}
@Override
public String toString()
{
return "IndexTuningConfig{" +
"maxRowsInMemory=" + maxRowsInMemory +
", maxBytesInMemory=" + maxBytesInMemory +
", maxColumnsToMerge=" + maxColumnsToMerge +
", partitionsSpec=" + partitionsSpec +
", indexSpec=" + indexSpec +
", indexSpecForIntermediatePersists=" + indexSpecForIntermediatePersists +
", basePersistDirectory=" + basePersistDirectory +
", maxPendingPersists=" + maxPendingPersists +
", forceGuaranteedRollup=" + forceGuaranteedRollup +
", reportParseExceptions=" + reportParseExceptions +
", pushTimeout=" + pushTimeout +
", logParseExceptions=" + logParseExceptions +
", maxParseExceptions=" + maxParseExceptions +
", maxSavedParseExceptions=" + maxSavedParseExceptions +
", segmentWriteOutMediumFactory=" + segmentWriteOutMediumFactory +
'}';
}
}
}
| |
/*
* Copyright (c) 2003-2012 Fred Hutchinson Cancer Research Center
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.fhcrc.cpl.toolbox.proteomics.feature;
import org.fhcrc.cpl.toolbox.datastructure.Pair;
import org.fhcrc.cpl.toolbox.proteomics.MSRun;
import org.fhcrc.cpl.toolbox.proteomics.feature.extraInfo.*;
import org.fhcrc.cpl.toolbox.proteomics.feature.FeatureGrouper;
import org.fhcrc.cpl.toolbox.proteomics.Clusterer2D;
import org.fhcrc.cpl.toolbox.ApplicationContext;
import org.fhcrc.cpl.toolbox.commandline.arguments.BooleanArgumentDefinition;
import org.fhcrc.cpl.toolbox.commandline.arguments.ArgumentValidationException;
import org.fhcrc.cpl.toolbox.proteomics.MassCalibrationUtilities;
import org.fhcrc.cpl.toolbox.proteomics.feature.filehandler.*;
import org.apache.commons.beanutils.BeanUtils;
import org.apache.commons.lang.math.IntRange;
import java.awt.*;
import java.io.*;
import java.util.*;
import java.util.List;
import org.apache.log4j.Logger;
/**
* User: migra
* Date: Sep 13, 2004
* Time: 3:05:56 PM
*/
public class FeatureSet implements Cloneable
{
static Logger _log = Logger.getLogger(FeatureSet.class);
public static final int FEATURE_FILE_FORMAT_MSINSPECTTSV = 0;
public static final int FEATURE_FILE_FORMAT_APML = 1;
public static final int FEATURE_FILE_FORMAT_HARDKLOR = 2;
public static final int DEFAULT_FEATURE_FILE_FORMAT = FEATURE_FILE_FORMAT_MSINSPECTTSV;
//maintain loading status
protected int _loadStatus = FEATURESET_LOAD_NOT_LOADED;
protected String _loadStatusMessage = null;
protected Feature[] _features;
private Map<String, Object> _properties = new HashMap<String, Object>();
private String _tag; // optional tag for this feature set
private Color _color;
private int _style = 0;
private File _sourceFile;
private boolean _displayed = true;
//loading status codes
public static final int FEATURESET_LOAD_NOT_LOADED = -1;
public static final int FEATURESET_LOAD_SUCCESS = 0;
public static final int FEATURESET_LOAD_ERROR_FILE_NOT_FOUND = 1;
public static final int FEATURESET_LOAD_ERROR_BAD_FILE_FORMAT = 2;
public static final int FEATURESET_LOAD_ERROR_NO_FEATURES_FOUND = 3;
public static final int FEATURESET_LOAD_ERROR_UNKNOWN = 4;
//different types of intensities to use in quantitation
public static final int TOTAL_INTENSITY=0;
public static final int MAX_INTENSITY=1;
public static final int RECALCULATED_INTENSITY=2;
//default to totalIntensity
public static final int DEFAULT_INTENSITY_TYPE=TOTAL_INTENSITY;
// How many daltons to extract from run on either side of each feature,
// when comparing features
static final int MZ_WINDOW_SIZE = 3;
// Each scan contains a list of (mz, intensity) pairs. The m/z values are typically not sampled
// regularly, so we often resample onto a regular grid before doing other processing. This value
// sets the number of samples to provide per one m/z unit. The value 36 is typical throughout
// msInspect (changing it could actually be a little tricky).
static final int RESAMPLING_FREQUENCY = 36;
//track the known extra information types for this FeatureSet
protected List<FeatureExtraInformationDef> extraInformationTypes;
//no-arg constructor used by feature-file handlers
public FeatureSet()
{
}
public FeatureSet(File file, Color color)
{
//initialize loading status values
setLoadStatus(FEATURESET_LOAD_NOT_LOADED);
setLoadStatusMessage("Features not yet loaded");
//this used to throw a NullPointerException
if (file == null)
{
setLoadStatus(FEATURESET_LOAD_ERROR_FILE_NOT_FOUND);
setLoadStatusMessage("Error loading features: no file specified");
return;
}
try
{
_sourceFile = file;
// ApplicationContext.setMessage("Loading file " + file);
loadFeatureFile(file);
//check for load success before continuing
if (getLoadStatus() == FEATURESET_LOAD_SUCCESS)
{
Arrays.sort(_features, new Feature.MzScanAscComparator());
_color = color;
}
}
catch (Exception e)
{
setLoadStatus(FEATURESET_LOAD_ERROR_UNKNOWN);
_log.error("Feature-loading exception",e);
setLoadStatusMessage("Unknown error loading features from file ");
}
if (getLoadStatus() != FEATURESET_LOAD_SUCCESS)
{
//all the non-success statuses require the filename appended
setLoadStatusMessage(getLoadStatusMessage() + file.getName());
}
}
public FeatureSet(File file) throws Exception
{
this(file, null);
}
public FeatureSet(Feature[] features)
{
_features = features;
inferExtraInformationTypesFromFeatures();
}
/**
* For each feature, determine its set of extra information types,
* using the property names set on the feature.
*
* Set the full list of information types on this FeatureSet to
* that set.
*
* Return value is null so as to avoid confusion -- this does affect the set
*/
public void inferExtraInformationTypesFromFeatures()
{
Set<FeatureExtraInformationDef> extraInfoSet =
new HashSet<FeatureExtraInformationDef>();
for (Feature feature : _features)
{
for (FeatureExtraInformationDef featureInfoDef :
feature.determineExtraInformationTypes())
{
extraInfoSet.add(featureInfoDef);
}
}
removeAllExtraInformationTypes();
for (FeatureExtraInformationDef infoDef : extraInfoSet)
{
addExtraInformationType(infoDef);
}
}
public FeatureSet(Feature[] features, Color color)
{
this(features);
_color = color;
}
public FeatureSet(Spectrum.Peak[] peaks, Color color)
{
_color = color;
_features = new Feature[peaks.length];
for (int i = 0; i < peaks.length; i++)
{
Spectrum.Peak p = peaks[i];
_features[i] = new Feature(p.scan, p.scan, p.scan, p.mz, p.intensity, 1, 0.0F, p.intensity);
}
}
//dmay adding 12/19/2005
public int getLoadStatus()
{
return _loadStatus;
}
public void setLoadStatus(int loadStatus)
{
_loadStatus = loadStatus;
}
public String getLoadStatusMessage()
{
return _loadStatusMessage;
}
public void setLoadStatusMessage(String loadStatusMessage)
{
_loadStatusMessage = loadStatusMessage;
}
/**
* Populate the Time attribute of every Feature in a FeatureSet
* with the time corresponding to its scan.
*
* This is rather inefficient. It would be much, much better if MSRun
* kept ahold of a map of scan numbers to times.
* //TODO: when we start getting pepXml files with retention times
//TODO: populated, need to check and see if they're populated
//TODO: before going back to run
* @param run
*/
public void populateTimesForMS2Features(MSRun run)
{
// Feature[] featuresCopy = new Feature[featureSet.getFeatures().length];
// System.arraycopy(featureSet.getFeatures(), 0, featuresCopy,
// 0, featuresCopy.length);
// Arrays.sort(featuresCopy, new Feature.ScanAscComparator());
for (Feature feature : getFeatures())
{
//first hoping that time is populated.
//If not (if time <= 0), recalculating from scan
//if this throws an NPE, so be it
if (feature.getTime() <= 0)
{
int ms2ScanIndex =
run.getIndexForMS2ScanNum(feature.getScan());
if (ms2ScanIndex < 0)
ms2ScanIndex = -ms2ScanIndex;
MSRun.MSScan scan = run.getMS2Scan(ms2ScanIndex);
feature.setTime((float)
scan.getDoubleRetentionTime());
}
}
}
/**
* Populate the startTime and endTime properties of every Feature in a FeatureSet
* with the times corresponding to its start and end scans
* @param run
*/
public void populateTimesForMS1Features(MSRun run)
{
for (Feature feature : getFeatures())
{
//first, set the time appropriately (if it's already set, believe that setting)
//Then, check whether scanFirst and scanLast are the same as scan. If so, set the
//start and end times the same. Otherwise, check the run to find the real ones out.
//All this complexity is to avoid hitting the run, which is expensive.
if (feature.getTime() <= 0)
feature.setTime((float) run.getScan(run.getIndexForScanNum(feature.getScan())).getDoubleRetentionTime());
if (feature.scanFirst == feature.scan)
TimeExtraInfoDef.setStartTime(feature, feature.getTime());
else
{
int fScanFirstIndex = run.getIndexForScanNum(feature.scanFirst);
TimeExtraInfoDef.setStartTime(feature,
run.getScan(fScanFirstIndex).getDoubleRetentionTime());
}
if (feature.scanLast == feature.scan)
TimeExtraInfoDef.setEndTime(feature, feature.getTime());
else
{
int fScanLastIndex = run.getIndexForScanNum(feature.scanLast);
TimeExtraInfoDef.setEndTime(feature,
run.getScan(fScanLastIndex).getDoubleRetentionTime());
}
}
}
/**
*
* Select a subset of a feature array.
* Input array MUST be sorted with the Spectrum.Feature.MzScanAscComparator
*
* @param features
* @param sel What features to select. Features will not be merged
* @return
*/
public static Feature[] selectFeatures(Feature[] features, FeatureSelector sel)
{
ArrayList<Feature> list = new ArrayList<Feature>();
for (Feature f : features)
{
if (f.intensity >= sel.getMinIntensity() &&
f.charge <= sel.getMaxCharge() && f.charge >= sel.getMinCharge() &&
f.mz >= sel.getMinMz() && f.mz <= sel.getMaxMz() &&
f.mass >= sel.getMinMass() && f.mass <= sel.getMaxMass() &&
f.peaks >= sel.getMinPeaks() && f.peaks <= sel.getMaxPeaks() &&
f.scan >= sel.getScanFirst() && f.scan <= sel.getScanLast() &&
f.kl <= sel.getMaxKL() && f.scanCount >= sel.getMinScans() &&
f.totalIntensity >= sel.getMinTotalIntensity() &&
f.time >= sel.getMinTime() && f.time <= sel.getMaxTime() &&
MS2ExtraInfoDef.getPeptideProphet(f) >= sel.getMinPProphet() &&
(sel.getMaxAMTFDR() == 1 || (AmtExtraInfoDef.hasMatchFDR(f) &&
AmtExtraInfoDef.getMatchFDR(f) < sel.getMaxAMTFDR())) &&
(sel.getMaxMassDeviationPPM() == Integer.MAX_VALUE ||
Math.abs(MassCalibrationUtilities.calculateMassDefectDeviationPPM(f.getMass(),
MassCalibrationUtilities.DEFAULT_THEORETICAL_MASS_WAVELENGTH)) <=
sel.getMaxMassDeviationPPM()) &&
f.getSumSquaresDist() <= sel.getMaxSumSquaresDist() &&
(!sel.isAccurateMzOnly() || f.isAccurateMZ())
)
{
list.add(f);
}
}
return list.toArray(new Feature[list.size()]);
}
public static Feature[] mergeFeatures(Feature[] features, FeatureSelector sel)
{
if (null == features)
return null;
ArrayList<Feature> featureRangeList;
while (true) //Loop until we can't compress any more
{
int trail = 0;
featureRangeList = new ArrayList<Feature>();
for (int i = 0; i < features.length; i++)
{
Feature f = features[i];
if (f.intensity >= sel.getMinIntensity() && f.charge <= sel.getMaxCharge() &&
f.mz >= sel.getMinMz() && f.mz <= sel.getMaxMz() && f.kl <= sel.getMaxKL()
&& f.peaks >= sel.getMinPeaks()
&& f.scan >= sel.getScanFirst() && f.scan <= sel.getScanLast()
//TODO: properly this should be handled in such a way that I don't have to
//reference this column name
&& f.getIntProperty("peptideprophet",0) >= sel.getMinPProphet())
{
boolean newRange = true;
//Scan all the feature ranges & merge.
int j;
for (j = trail; j < featureRangeList.size(); j++)
{
Feature fr = (Feature) featureRangeList.get(j);
float ppmGap = (f.mz - fr.mz);
if (ppmGap > sel.maxMzGap)
{
//Don't need to look at this next time since we're sorted.
trail = j + 1;
continue;
}
if (fr.isFeatureInRange(f, sel.maxScanGap, sel.maxMzGap))
{
fr.addFeatureToRange(f);
newRange = false;
break;
}
if (-ppmGap > sel.maxMzGap)
break;
}
if (newRange)
featureRangeList.add(new Feature(f));
}
}
if (featureRangeList.size() == features.length)
break; //OK, no more compressing can be done
features = (Feature[]) featureRangeList.toArray(new Feature[featureRangeList.size()]);
Arrays.sort(features, new Feature.MzScanAscComparator());
}
//Now filter by length
ArrayList<Feature> filteredByLength = new ArrayList<Feature>();
for (Feature fr : featureRangeList)
{
if (fr.getScanCount() >= sel.getMinScans())
filteredByLength.add(fr);
}
//Re-sort because merging might lead to subtly out-of-order peaks
Feature[] ranges = (Feature[]) filteredByLength.toArray(new Feature[filteredByLength.size()]);
Arrays.sort(ranges, new Feature.MzScanAscComparator());
return ranges;
}
public Feature[] getFeatures(FeatureSelector sel)
{
if (null == _features)
return null;
return selectFeatures(_features, sel);
}
public Feature[] getFeatures()
{
return _features;
}
public void setFeatures(Feature[] features)
{
_features = features;
}
public String getTag()
{
return _tag;
}
public void setTag(String tag)
{
_tag = tag;
}
public FeatureSet filter(FeatureSelector sel)
{
FeatureSet fs = (FeatureSet)this.clone();
Feature[] features = getFeatures(sel);
fs.setFeatures(features);
fs.setColor(this.getColor());
Map<String,Object> properties = new HashMap<String,Object>();
properties.putAll(this.getProperties());
if (null != this.getSourceFile())
properties.put("origSourceFile", this.getSourceFile().getPath());
properties.put("filter", sel.toString());
fs.setProperties(properties);
return fs;
}
public Feature findNearestFeature(int scan, float mz)
{
return findNearestFeature(scan, mz, Integer.MAX_VALUE, Float.MAX_VALUE);
}
public int findNearestFeatureIndex(int scan, float mz, int maxScanDistance, float maxMzDistance)
{
Feature feature = new Feature(scan, mz, 1);
int index = Arrays.binarySearch(_features, feature, new Feature.MzScanAscComparator());
double minDistance = Float.MAX_VALUE;
int nearestFeature = -1;
if (index >= 0)
return index;
int pos = -index - 1;
//Didn't find an exact match. Search for nearest feature in 2 dimensions
for (int i = pos - 1; i >= 0; i--)
{
Feature f = _features[i];
float mzDist = Math.abs(mz - f.mz);
if (mzDist > maxMzDistance || mzDist > minDistance)
break;
int scanDist = Math.abs(scan - f.scan);
if (scanDist > maxScanDistance)
continue;
double distance = Math.sqrt(scanDist * scanDist + mzDist * mzDist);
if (distance < minDistance)
{
minDistance = distance;
nearestFeature = i;
}
}
for (int i = pos; i < _features.length; i++)
{
Feature f = _features[i];
float mzDist = Math.abs(mz - f.mz);
if (mzDist > minDistance || mzDist > maxMzDistance) //Not going to get any closer since sorted by Mz
return nearestFeature;
int scanDist = Math.abs(scan - f.scan);
if (scanDist > maxScanDistance)
continue;
double distance = Math.sqrt(scanDist * scanDist + mzDist * mzDist);
if (distance < minDistance)
{
minDistance = distance;
nearestFeature = i;
}
}
return nearestFeature;
}
public Feature findNearestFeature(int scan, float mz, int maxScanDistance, float maxMzDistance)
{
int index = findNearestFeatureIndex(scan, mz, maxScanDistance, maxMzDistance);
return index < 0 ? null : _features[index];
}
public Object clone()
{
try
{
FeatureSet fs = (FeatureSet) super.clone();
fs.setProperties(this._properties);
fs.setTag(this._tag);
fs.setSourceFile(this._sourceFile); // Used for column headers
return fs;
}
catch (CloneNotSupportedException x)
{
return null;
}
}
/**
* Deep-copies everything except the properties array. The properties array
* is shallow-copied. This is done because we've done nothing to make sure that
* the items in the properties array will clone() nicely. Sheer laziness, really.
* @return
*/
public FeatureSet deepCopy()
{
Feature[] features = new Feature[_features.length];
for (int i = 0; i < _features.length; i++)
features[i] = (Feature) _features[i].clone();
FeatureSet fs = new FeatureSet(features, _color);
fs.setDisplayed(_displayed);
fs.setSourceFile(new File(_sourceFile.getAbsolutePath()));
//shallow copy of the properties array.
HashMap propertiesCopy = new HashMap();
for (Object propertyKey : _properties.keySet())
propertiesCopy.put(propertyKey, _properties.get(propertyKey));
fs.setProperties(propertiesCopy);
return fs;
}
/**
* Combine features that represent the same peptide. Features
* must be within a scanDiff,massDiff window to be considered same.
* <p/>
* This removes redunancy caused by multiple charge states. It
* also combines features of the same mass/charge that are close
* together (e.g. a small feature that is split by ion competition)
* <p/>
* CONSIDER(mbellew) the mass (mz) tolerance could be tighter
* for features w/ same charge
*
* @param scanDiff
* @param massDiff
* @param sumIntensities If this is true, resulting features have intensity and
* totalIntensity values that are the sums of component features. If false,
* resulting features retain their original intensities
* charge state
* @return
*/
public FeatureSet deconvolute(int scanDiff, double massDiff, boolean sumIntensities)
{
FeatureGrouper grouper = new FeatureGrouper();
grouper.addSet(this);
grouper.setGroupByMass(true);
grouper.split2D(massDiff, scanDiff);
Clusterer2D.BucketSummary[] buckets = grouper.summarize();
Feature[] deconvoluted = new Feature[buckets.length];
int numPeptideConflicts = 0;
int numPreservedPeptides = 0;
for (int i = 0; i < buckets.length; i++)
{
Clusterer2D.BucketSummary bucket = buckets[i];
Feature deconvolutedFeature = null;
if (bucket.featureCount == 1)
{
deconvolutedFeature = (Feature) FeatureGrouper.getFeatures(bucket)[0].clone();
deconvolutedFeature.setChargeStates(1);
}
else
{
Feature[] bucketFeatures = FeatureGrouper.getFeatures(bucket);
Feature best = bucketFeatures[0];
float sumIntensity = 0.0f;
float sumTotalIntensity = 0.0f;
//NOTE: this won't work with stuff of charge > 10. Then again, what will?
int[] chargeStateCounts = new int[10];
String description = "";
for (Feature f : bucketFeatures)
{
if (description.length() > 0)
description += ", ";
chargeStateCounts[f.charge]++;
description += f.charge;
if (null != f.getDescription())
description += " (" + f.getDescription() + ")";
sumIntensity += f.intensity;
sumTotalIntensity += f.totalIntensity;
if (f.totalIntensity > best.totalIntensity)
best = f;
}
deconvolutedFeature = (Feature) best.clone();
int numChargeStates = 0;
for (int j=0; j<chargeStateCounts.length; j++)
if (chargeStateCounts[j] > 0)
numChargeStates++;
if (sumIntensities)
{
deconvolutedFeature.setIntensity(sumIntensity);
deconvolutedFeature.setTotalIntensity(sumTotalIntensity);
}
deconvolutedFeature.setChargeStates(numChargeStates);
deconvolutedFeature.setDescription(description);
//if there's MS2 data in this FeatureSet, then we need to make sure
//that the collapsed feature contains the peptide and protein ID carried
//by its components.
//If there are conflicts, we leave the existing ID on the collapsed feature
//alone, or if it had none, don't assign
//TODO: somehow move this to MS2ExtraInfoDef?
if (this.hasExtraInformationType(MS2ExtraInfoDef.getSingletonInstance()))
{
Set<String> featurePeptides = new HashSet<String>();
Set<String> featureProteins = new HashSet<String>();
for (Feature f : bucketFeatures)
{
String featurePeptide = MS2ExtraInfoDef.getFirstPeptide(f);
if (featurePeptide != null)
{
featurePeptides.add(featurePeptide);
String featureProtein = MS2ExtraInfoDef.getFirstProtein(f);
if (featureProtein != null)
featureProteins.add(featureProtein);
}
}
if (featurePeptides.size() == 1 &&
MS2ExtraInfoDef.getFirstPeptide(deconvolutedFeature) == null)
{
MS2ExtraInfoDef.setSinglePeptide(deconvolutedFeature,
featurePeptides.iterator().next());
numPreservedPeptides++;
if (featureProteins.size() == 1 &&
MS2ExtraInfoDef.getFirstProtein(deconvolutedFeature) == null)
MS2ExtraInfoDef.addProtein(deconvolutedFeature,
featureProteins.iterator().next());
}
else
{
if (featurePeptides.size() > 1)
numPeptideConflicts++;
}
}
}
deconvolutedFeature.comprised = FeatureGrouper.getFeatures(bucket);
deconvoluted[i] = deconvolutedFeature;
}
//reporting on peptides preserved and conflicts
if (this.hasExtraInformationType(MS2ExtraInfoDef.getSingletonInstance()))
{
_log.debug("deconvolute: peptides actively preserved: " + numPreservedPeptides);
_log.debug("deconvolute: peptide conflicts: " + numPeptideConflicts);
}
FeatureSet fs = (FeatureSet) this.clone();
//Make map modifiable & set properties.
Map props = new HashMap();
props.putAll(this.getProperties());
if (null != this.getSourceFile())
props.put("origSourceFile", this.getSourceFile());
props.put("deconvoluteScanDiff", String.valueOf(scanDiff));
props.put("deconvoluteMassDiff", String.valueOf(massDiff));
fs.setFeatures(deconvoluted);
fs.setProperties(props);
return fs;
}
/**
* Relative quantitation using ICAT label defaults...
*/
public FeatureSet icat()
{
return quant(AnalyzeICAT.icatLabel);
}
/**
* Relative quantitiation using explicit label...
*/
public FeatureSet quant(float light, float heavy, char residue, int maxLabelCount)
{
return quant(light, heavy, residue, maxLabelCount, null);
}
/**
* Relative quantitiation using explicit label...
*/
public FeatureSet quant(float light, float heavy, char residue, int maxLabelCount, MSRun run)
{
float delta = heavy - light;
AnalyzeICAT.IsotopicLabel label = new AnalyzeICAT.IsotopicLabel(light, delta, residue, maxLabelCount);
return quant(label, run);
}
public FeatureSet quant(AnalyzeICAT.IsotopicLabel label)
{
return quant(label, null);
}
//This comparator is used to sort pairs in increasing order of first scan.
//This makes accessing the MS1 data faster, since data access is more localized
static Comparator comparePairScanAsc = new Comparator()
{
public int compare(Object a, Object b)
{
Feature lightA = (Feature) ((Pair) a).first;
Feature heavyA = (Feature) ((Pair) a).second;
Feature lightB = (Feature) ((Pair) b).first;
Feature heavyB = (Feature) ((Pair) b).second;
IntRange aRange = Feature.findOverlappingScanRange(lightA, heavyA);
IntRange bRange = Feature.findOverlappingScanRange(lightB, heavyB);
int aScan = (aRange == null ? 0 : aRange.getMinimumInteger());
int bScan = (bRange == null ? 0 : bRange.getMinimumInteger());
return aScan == bScan ? 0 : aScan < bScan ? -1 : 1;
}
};
//by default, use totalIntensity of each partner
public FeatureSet quant(AnalyzeICAT.IsotopicLabel label, MSRun run)
{
return quant(label, TOTAL_INTENSITY, run);
}
/**
* Relative quantitiation using explicit label...
*/
public FeatureSet quant(AnalyzeICAT.IsotopicLabel label, int intensityType, MSRun run)
{
return quant(label, intensityType, run, AnalyzeICAT.DEFAULT_DELTA_MASS,
AnalyzeICAT.DEFAULT_DELTA_MASS_TYPE, AnalyzeICAT.DEFAULT_DELTA_TIME);
}
public FeatureSet quant(AnalyzeICAT.IsotopicLabel label, int intensityType, MSRun run,
float massTolerance, int massToleranceType, float timeTolerance)
{
boolean pairsOnly = false;
ArrayList pairs = AnalyzeICAT.analyze(getFeatures(), label, massTolerance,
massToleranceType, timeTolerance);
// TODO: option to output unpaired features
Map<Feature,Feature> icatFeatures = new IdentityHashMap<Feature,Feature>(3 * pairs.size());
ArrayList<Feature> list = new ArrayList<Feature>(_features.length);
//
// output paired features
//
// Consider May want to make ratio MAX_VALUE or MIN_VALUE when heavy is zero.
// +/- Inf (or Nan) may confuse downstream tools.
// Give us the option of using either the total, recalculated, or maximum intensity
// of each partner. Total has problems when one partner shows longer
// elution time (or runs into a different co-eluting peptide).
// TODO: This is just a surrogate for using the same range of scans
// for each partner (max intensity has problems too).
// Recalculated intensity requires access to the MS1 run
if (intensityType == TOTAL_INTENSITY)
Collections.sort(pairs,comparePairScanAsc);
for (int i = 0; i < pairs.size(); i++)
{
Pair p = (Pair)pairs.get(i);
Feature light = (Feature)p.first;
Feature heavy = (Feature)p.second;
Feature f = new Feature(light);
f.setTotalIntensity(heavy.totalIntensity + light.totalIntensity);
if (intensityType == TOTAL_INTENSITY)
{
IsotopicLabelExtraInfoDef.setHeavyIntensity(f,heavy.totalIntensity);
IsotopicLabelExtraInfoDef.setLightIntensity(f,light.totalIntensity);
}
else if (intensityType == RECALCULATED_INTENSITY)
{
if (run == null)
{
_log.error("No run specified, unable to recalculate intensities.");
return null;
}
IntRange overlappingScanRange = Feature.findOverlappingScanRange(light, heavy);
IsotopicLabelExtraInfoDef.setLightIntensity(f,
light.calculateFeatureIntensityInRange(run, MZ_WINDOW_SIZE, overlappingScanRange,
RESAMPLING_FREQUENCY));
IsotopicLabelExtraInfoDef.setHeavyIntensity(f,
heavy.calculateFeatureIntensityInRange(run, MZ_WINDOW_SIZE, overlappingScanRange,
RESAMPLING_FREQUENCY));
}
else if (intensityType == MAX_INTENSITY)
{
IsotopicLabelExtraInfoDef.setHeavyIntensity(f,heavy.intensity);
IsotopicLabelExtraInfoDef.setLightIntensity(f,light.intensity);
// smearing the total out doesn't seem to work better than just using the max
// f.setHeavyIntensity(heavy.totalIntensity/heavy.scanCount);
// f.setLightIntensity(light.totalIntensity/light.scanCount);
}
IsotopicLabelExtraInfoDef.setRatio(f,
IsotopicLabelExtraInfoDef.getLightIntensity(f) /
IsotopicLabelExtraInfoDef.getHeavyIntensity(f));
IsotopicLabelExtraInfoDef.setLabelCount(f,
Math.round((heavy.mass - light.mass) / label.getHeavy()));
f.setProperty("label", label);
f.setChargeStates(Math.max(light.getChargeStates(), heavy.getChargeStates()));
//we used to do this in order to subtract out the light label. Not doing that any more
//(mass will be consistent with m/z and charge), so no need to update mass at all
// f.updateMass();
//deal with any peptide identifications, likely supplied by AMT.
//If both light and heavy have the same ID, or any in common, or only one has an ID,
//keep it. If light and heavy have different IDs, toss them both out
List<String> heavyPeptides = MS2ExtraInfoDef.getPeptideList(heavy);
List<String> lightPeptides = MS2ExtraInfoDef.getPeptideList(heavy);
if (heavyPeptides != null || lightPeptides != null)
{
if (heavyPeptides == null)
{
MS2ExtraInfoDef.setPeptideList(f, lightPeptides.get(0));
}
else if (lightPeptides == null)
{
MS2ExtraInfoDef.setPeptideList(f, heavyPeptides.get(0));
}
else
{
//both heavy and light peptides exist.
if (heavyPeptides.size() == 1 && lightPeptides.size() == 1)
{
if (heavyPeptides.get(0).equals(lightPeptides.get(0)))
MS2ExtraInfoDef.setPeptideList(f, heavyPeptides.get(0));
else
MS2ExtraInfoDef.removeAllPeptides(f);
}
else
{
Set<String> commonPeptides = new HashSet<String>();
for (String heavyPeptide : heavyPeptides)
if (lightPeptides.contains(heavyPeptide))
commonPeptides.add(heavyPeptide);
if (commonPeptides.size() == 0)
MS2ExtraInfoDef.removeAllPeptides(f);
else
MS2ExtraInfoDef.setPeptideList(f, commonPeptides.iterator().next());
}
}
//now that we've figured out what peptide to assign, make sure it has the
//right number of labeled residues. If not, unset.
if (MS2ExtraInfoDef.getFirstPeptide(f) != null)
{
int numLabeledResidues = 0;
String featurePeptide = MS2ExtraInfoDef.getFirstPeptide(f);
for (int j=0; j<featurePeptide.length(); j++)
if (featurePeptide.charAt(j) == label.getResidue())
numLabeledResidues++;
if (numLabeledResidues != IsotopicLabelExtraInfoDef.getLabelCount(f))
{
// if (numLabeledResidues > 0) System.err.println("Tossing: " + featurePeptide + ", " + numLabeledResidues + ", " + IsotopicLabelExtraInfoDef.getLabelCount(f));
MS2ExtraInfoDef.removeAllPeptides(f);
}
// else
// System.err.println("Saving: " + featurePeptide + ", " + numLabeledResidues);
}
}
list.add(f);
icatFeatures.put(light, light);
icatFeatures.put(heavy, heavy);
}
//
// output remaining features
//
if (!pairsOnly)
{
for (int i=0 ; i<_features.length ; i++)
{
Feature f = _features[i];
if (icatFeatures.containsKey(f))
continue;
list.add(new Feature(f));
}
}
FeatureSet fs = (FeatureSet)this.clone();
fs.addExtraInformationType(new IsotopicLabelExtraInfoDef());
fs.getProperties().put("label", label.toString());
fs.setFeatures(list.toArray(new Feature[0]));
return fs;
}
/**
* Return the best hit from a List of FeatureSets
*/
public static Feature hitTest(java.util.List featureSets, int x, float y, int maxScan, float maxMz)
{
Feature feature = null;
double minDistance = Double.MAX_VALUE;
if (null == featureSets)
return null;
for (int i = 0; i < featureSets.size(); i++)
{
FeatureSet fs = (FeatureSet)featureSets.get(i);
Feature feat = fs.findNearestFeature(x, (float)y, maxScan, maxMz);
if (null == feat)
continue;
double distance = Math.sqrt(Math.pow(feat.scan - x, 2) + Math.pow(feat.mz - y, 2));
if (distance < minDistance)
feature = feat;
}
return feature;
}
/*
* @param file the file containing features to load
*/
public void loadFeatureFile(File file) throws Exception
{
//first check if the file exists
if (file == null || !file.exists())
{
setLoadStatus(FEATURESET_LOAD_ERROR_FILE_NOT_FOUND);
setLoadStatusMessage("Error loading features: unable to find file ");
return;
}
FeatureSetFileHandler fileHandler = null;
if (PepXMLFeatureFileHandler.getSingletonInstance().canHandleFile(file))
{
//try loading it as a pepXML file
_log.debug("Loading as PepXML file");
fileHandler = PepXMLFeatureFileHandler.getSingletonInstance();
}
else if (APMLFeatureFileHandler.getSingletonInstance().canHandleFile(file))
{
//try loading it as an APML file
_log.debug("Loading as APML file");
fileHandler = APMLFeatureFileHandler.getSingletonInstance();
}
else if (HardklorFeatureFileHandler.getSingletonInstance().canHandleFile(file))
{
//try loading it as a Hardklor file
_log.debug("Loading as Hardklor file");
fileHandler = HardklorFeatureFileHandler.getSingletonInstance();
}
else if (NativeTSVFeatureFileHandler.getSingletonInstance().canHandleFile(file))
{
//if not an xml file, assume tab-separated value file
_log.debug("Loading as msInspect .tsv file");
fileHandler = NativeTSVFeatureFileHandler.getSingletonInstance();
}
else
{
throw new IllegalArgumentException("Unknown feature file type. Doesn't seem to be APML, pepXML or msInspect TSV file. Quitting.");
}
try
{
FeatureSet loadedFeatureSet = fileHandler.loadFeatureSet(file);
//This is a bit cumbersome: load up the file in the handler, then take
//the resulting FeatureSet and copy all the important stuff here.
_features = loadedFeatureSet.getFeatures();
_log.debug("Loaded " + _features.length + " features from file");
setProperties(loadedFeatureSet._properties);
for(FeatureExtraInformationDef infoType : loadedFeatureSet.getExtraInformationTypes())
addExtraInformationType(infoType);
setTag(loadedFeatureSet._tag);
setSourceFile(file);
//if we got here, load was successful
setLoadStatus(FeatureSet.FEATURESET_LOAD_SUCCESS);
setLoadStatusMessage(_features.length + " Features loaded successfully");
}
catch (IOException e)
{
//in case of Exceptions, assume no features found,
//problem with file format
_log.error("User attempted to load bad feature file, filename = " + file.getName() +
", exception message = " + e.getMessage(),e);
setLoadStatus(FeatureSet.FEATURESET_LOAD_ERROR_BAD_FILE_FORMAT);
setLoadStatusMessage("Error loading features: bad file format in file ");
}
if (getLoadStatus() != FEATURESET_LOAD_SUCCESS)
return;
//"success" case
//if no features found, report
if (_features == null || _features.length == 0)
{
_log.info("User attempted to load file with no features");
setLoadStatus(FEATURESET_LOAD_ERROR_NO_FEATURES_FOUND);
setLoadStatusMessage("Error loading features: no features found in file ");
return;
}
}
public Color getColor()
{
return null == _color ? Color.RED : _color;
}
public void setColor(Color color)
{
_color = color;
}
public int getStyle()
{
return _style;
}
public void setStyle(int style)
{
_style = style;
}
public File getSourceFile()
{
return _sourceFile;
}
public void setSourceFile(File sourceFile)
{
_sourceFile = sourceFile;
}
public boolean isDisplayed()
{
return _displayed;
}
public void setDisplayed(boolean displayed)
{
_displayed = displayed;
}
public Map<String,Object> getProperties()
{
return _properties;
}
public Object getProperty(String propertyName)
{
if (null == _properties)
return null;
return _properties.get(propertyName);
}
public void setProperties(Map<String,Object> properties)
{
if (null == properties)
_properties = new HashMap<String,Object>();
else
_properties = new HashMap<String,Object>(properties);
}
public void setProperty(String propertyName, Object propertyValue)
{
if (null == _properties)
_properties = new HashMap<String,Object>();
_properties.put(propertyName, propertyValue);
if (_log.isDebugEnabled())
{
String className = null;
if (propertyValue != null)
className = propertyValue.getClass().getName();
_log.debug("setProperty: " + propertyName + ", class " + className);
}
}
public void save() throws IOException
{
save(getSourceFile());
}
public void save(File file) throws IOException
{
save(file, false);
}
public void save(File file, boolean dumpWindow) throws IOException
{
save(file, dumpWindow, NativeTSVFeatureFileHandler.FILE_TYPE_NAME);
}
public void save(PrintWriter outPW, boolean dumpWindow, String fileType) throws IOException
{
FeatureSetFileHandler fileHandler = null;
if (APMLFeatureFileHandler.FILE_TYPE_NAME.equals(fileType))
{
fileHandler = new APMLFeatureFileHandler();
}
else if (HardklorFeatureFileHandler.FILE_TYPE_NAME.equals(fileType))
{
fileHandler = new HardklorFeatureFileHandler();
}
else
{
fileHandler = new NativeTSVFeatureFileHandler();
}
fileHandler.setDumpWindow(dumpWindow);
fileHandler.saveFeatureSet(this, outPW);
}
public void save(File outFile, boolean dumpWindow, String fileType) throws IOException
{
FeatureSetFileHandler fileHandler = null;
if (APMLFeatureFileHandler.FILE_TYPE_NAME.equals(fileType))
{
fileHandler = new APMLFeatureFileHandler();
}
else if (HardklorFeatureFileHandler.FILE_TYPE_NAME.equals(fileType))
{
fileHandler = new HardklorFeatureFileHandler();
}
else
{
fileHandler = new NativeTSVFeatureFileHandler();
}
fileHandler.setDumpWindow(dumpWindow);
fileHandler.saveFeatureSet(this, outFile);
}
public void save(PrintWriter out)
{
save(out, false);
}
public void save(PrintWriter out, boolean dumpWindow)
{
NativeTSVFeatureFileHandler tsvFileHandler = new NativeTSVFeatureFileHandler();
tsvFileHandler.setDumpWindow(dumpWindow);
tsvFileHandler.saveFeatureSet(this, out);
}
public void savePepXml(File outFile)
throws IOException
{
savePepXml(outFile, 1);
}
/**
* For saving in pepXml format
* @param outFile
*/
public void savePepXml(File outFile, int firstSpectrumQueryIndex)
throws IOException
{
PepXMLFeatureFileHandler pepXmlFileHandler = new PepXMLFeatureFileHandler();
pepXmlFileHandler.setFirstSpectrumQueryIndex(firstSpectrumQueryIndex);
pepXmlFileHandler.saveFeatureSet(this, outFile);
}
public static class FeatureSelector implements Cloneable
{
int minCharge = -10;
int maxCharge = 10;
// UNDONE: default these ranges based on getLowMz(), getHighMz()
float maxMz = 10000;
float minMz = 0f;
float minIntensity = 0f;
private float minTotalIntensity = 0f;
int minScans = 0;
int scanFirst = 0;
int scanLast = Integer.MAX_VALUE;
double maxKL = Double.MAX_VALUE;
int minPeaks = 0;
int maxPeaks = Integer.MAX_VALUE;
float minMass = 0f;
float maxMass = Float.MAX_VALUE;
float minTime = 0f;
float maxTime = Float.MAX_VALUE;
int maxMassDeviationPPM = Integer.MAX_VALUE;
//dhmay adding 2/25/2007
float maxSumSquaresDist = Float.MAX_VALUE;
float minPProphet = 0;
float maxAMTFDR = 1f;
int maxScanGap = 3;
float maxMzGap = .12f;
//dhmay adding 2/25/2009
boolean accurateMzOnly = false;
public boolean equals(Object o)
{
if (null == o || !(o instanceof FeatureSelector))
return false;
FeatureSelector fs = (FeatureSelector) o;
return getMinCharge() == fs.getMinCharge() && getMaxCharge() == fs.getMaxCharge() &&
getMaxMz() == fs.getMaxMz() && getMinMz() == fs.getMinMz() &&
getMinIntensity() == fs.getMinIntensity() && getMinScans() == fs.getMinScans() &&
maxScanGap == fs.maxScanGap && maxMzGap == fs.maxMzGap && getScanFirst() == fs.getScanFirst() &&
getScanLast() == fs.getScanLast() &&
getMaxKL() == fs.getMaxKL() && getMinPeaks() == fs.getMinPeaks()
&& getMaxPeaks() == fs.getMaxPeaks()
&& getMinTotalIntensity() == fs.getMinTotalIntensity()
&& getMinMass() == fs.getMinMass() && getMaxMass() == fs.getMaxMass()
&& getMinTime() == fs.getMinTime() && getMaxTime() == fs.getMaxTime()
&& getMinPProphet() == fs.getMinPProphet() &&
getMaxMassDeviationPPM() == fs.getMaxMassDeviationPPM() &&
getMaxSumSquaresDist() == fs.getMaxSumSquaresDist() &&
isAccurateMzOnly() == fs.isAccurateMzOnly() &&
fs.getMaxAMTFDR() == getMaxAMTFDR();
}
public String toString()
{
StringBuffer sb = new StringBuffer();
FeatureSelector unchanged = new FeatureSelector();
String[] props = new String[] {"minCharge", "maxCharge", "minMz", "maxMz", "minMass", "maxMass", "minIntensity", "minTotalIntensity", "maxKL",
"minPeaks", "maxPeaks", "scanFirst", "scanLast", "minTime", "maxTime", "minScans", "minPProphet",
"maxMassDeviationPPM","maxSumSquaresDist","accurateMzOnly","maxAMTFDR"};
try
{
for (String prop : props)
{
Object val = BeanUtils.getSimpleProperty(this, prop);
Object orig = BeanUtils.getSimpleProperty(unchanged, prop);
if (!val.equals(orig))
sb.append(" --" + prop + "=" + val.toString());
}
}
catch (Exception x)
{
ApplicationContext.errorMessage("FeatureSelector: ", x);
}
return sb.toString();
}
public boolean setFilterParam(String paramName, String paramVal)
{
//TODO: Should use reflection here since all names match
if ("--minMz".equalsIgnoreCase(paramName))
setMinMz(Float.parseFloat(paramVal));
else if ("--maxMz".equalsIgnoreCase(paramName))
setMaxMz(Float.parseFloat(paramVal));
else if ("--minMass".equalsIgnoreCase(paramName))
setMinMass(Float.parseFloat(paramVal));
else if ("--maxMass".equalsIgnoreCase(paramName))
setMaxMass(Float.parseFloat(paramVal));
else if ("--minCharge".equalsIgnoreCase(paramName))
setMinCharge(Integer.parseInt(paramVal));
else if ("--maxCharge".equalsIgnoreCase(paramName))
setMaxCharge(Integer.parseInt(paramVal));
else if ("--minPeaks".equalsIgnoreCase(paramName))
setMinPeaks(Integer.parseInt(paramVal));
else if ("--maxPeaks".equalsIgnoreCase(paramName))
setMaxPeaks(Integer.parseInt(paramVal));
else if ("--minScanCount".equalsIgnoreCase(paramName))
setMinScans(Integer.parseInt(paramVal));
else if ("--scanFirst".equalsIgnoreCase(paramName))
setScanFirst(Integer.parseInt(paramVal));
else if ("--scanLast".equalsIgnoreCase(paramName))
setScanLast(Integer.parseInt(paramVal));
else if ("--minScans".equalsIgnoreCase(paramName))
setMinScans(Integer.parseInt(paramVal));
else if ("--maxKL".equalsIgnoreCase(paramName))
setMaxKL(Double.parseDouble(paramVal));
else if ("--minIntensity".equalsIgnoreCase(paramName))
setMinIntensity(Float.parseFloat(paramVal));
else if ("--minTime".equalsIgnoreCase(paramName))
setMinTime(Float.parseFloat(paramVal));
else if ("--maxTime".equalsIgnoreCase(paramName))
setMaxTime(Float.parseFloat(paramVal));
else if ("--minTotalIntensity".equalsIgnoreCase(paramName))
setMinTotalIntensity(Float.parseFloat(paramVal));
else if ("--minPProphet".equalsIgnoreCase(paramName))
setMinPProphet(Float.parseFloat(paramVal));
else if ("--maxMassDeviationPPM".equalsIgnoreCase(paramName))
setMaxMassDeviationPPM(Integer.parseInt(paramVal));
else if ("--maxSumSquaresDist".equalsIgnoreCase(paramName))
setMaxSumSquaresDist(Integer.parseInt(paramVal));
else if ("--accMzOnly".equalsIgnoreCase(paramName))
try
{
setAccurateMzOnly((Boolean) new BooleanArgumentDefinition("dummy").convertArgumentValue(paramVal));
}
catch (ArgumentValidationException e)
{
}
else if ("--maxamtfdr".equalsIgnoreCase(paramName))
setMaxAMTFDR(Float.parseFloat(paramVal));
else
return false;
return true;
}
public Object clone()
{
try
{
return super.clone();
}
catch (Exception x)
{
//Impossible
return null;
}
}
public int getMaxMassDeviationPPM()
{
return maxMassDeviationPPM;
}
public void setMaxMassDeviationPPM(int maxMassDeviationPPM)
{
this.maxMassDeviationPPM = maxMassDeviationPPM;
}
public int getMinCharge()
{
return minCharge;
}
public void setMinCharge(int minCharge)
{
this.minCharge = minCharge;
}
public int getMaxCharge()
{
return maxCharge;
}
public void setMaxCharge(int maxCharge)
{
this.maxCharge = maxCharge;
}
public float getMaxMz()
{
return maxMz;
}
public void setMaxMz(float maxMz)
{
this.maxMz = maxMz;
}
public float getMinMz()
{
return minMz;
}
public void setMinMz(float minMz)
{
this.minMz = minMz;
}
public float getMinPProphet()
{
return minPProphet;
}
public void setMinPProphet(float minPProphet)
{
this.minPProphet = minPProphet;
}
public float getMinIntensity()
{
return minIntensity;
}
public void setMinIntensity(float minIntensity)
{
this.minIntensity = minIntensity;
}
public int getMinScans()
{
return minScans;
}
public void setMinScans(int minScans)
{
this.minScans = minScans;
}
public int getScanFirst()
{
return scanFirst;
}
public void setScanFirst(int scanFirst)
{
this.scanFirst = scanFirst;
}
public int getScanLast()
{
return scanLast;
}
public void setScanLast(int scanLast)
{
this.scanLast = scanLast;
}
public double getMaxKL()
{
return maxKL;
}
public void setMaxKL(double maxKL)
{
this.maxKL = maxKL;
}
public int getMinPeaks()
{
return minPeaks;
}
public void setMinPeaks(int minPeaks)
{
this.minPeaks = minPeaks;
}
public int getMaxPeaks()
{
return maxPeaks;
}
public void setMaxPeaks(int maxPeaks)
{
this.maxPeaks = maxPeaks;
}
public float getMinTotalIntensity()
{
return minTotalIntensity;
}
public void setMinTotalIntensity(float minTotalIntensity)
{
this.minTotalIntensity = minTotalIntensity;
}
public float getMinMass()
{
return minMass;
}
public void setMinMass(float minMass)
{
this.minMass = minMass;
}
public float getMaxMass()
{
return maxMass;
}
public void setMaxMass(float maxMass)
{
this.maxMass = maxMass;
}
public float getMinTime()
{
return minTime;
}
public void setMinTime(float minTime)
{
this.minTime = minTime;
}
public float getMaxTime()
{
return maxTime;
}
public void setMaxTime(float maxTime)
{
this.maxTime = maxTime;
}
public float getMaxSumSquaresDist()
{
return maxSumSquaresDist;
}
public void setMaxSumSquaresDist(float maxSumSquaresDistance)
{
this.maxSumSquaresDist = maxSumSquaresDistance;
}
public boolean isAccurateMzOnly()
{
return accurateMzOnly;
}
public void setAccurateMzOnly(boolean accurateMzOnly)
{
this.accurateMzOnly = accurateMzOnly;
}
public float getMaxAMTFDR()
{
return maxAMTFDR;
}
public void setMaxAMTFDR(float maxAMTFDR)
{
this.maxAMTFDR = maxAMTFDR;
}
}
public List<FeatureExtraInformationDef> getExtraInformationTypes()
{
if (extraInformationTypes == null)
{
extraInformationTypes = new ArrayList<FeatureExtraInformationDef>();
}
return extraInformationTypes;
}
public FeatureExtraInformationDef[] getExtraInformationTypesArray()
{
return getExtraInformationTypes().toArray(new FeatureExtraInformationDef[0]);
}
public void addExtraInformationType(FeatureExtraInformationDef infoType)
{
if (!getExtraInformationTypes().contains(infoType))
getExtraInformationTypes().add(infoType);
}
public boolean hasExtraInformationType(FeatureExtraInformationDef infoType)
{
return getExtraInformationTypes().contains(infoType);
}
public void removeAllExtraInformationTypes()
{
extraInformationTypes = new ArrayList<FeatureExtraInformationDef>();
}
}
| |
/*
* Created on 25.10.2006
*/
package net.demo.http;
import java.io.BufferedReader;
import java.io.FileReader;
import java.io.IOException;
import java.util.HashMap;
import java.util.StringTokenizer;
/**
* <pre>
* Copyright (c) 2006 Dominik Schulz
* Copyright (c) 2006 Florian Lindner
* Copyright (c) 2006 Philip Hartmann
*
* This file is part of jHTTPd.
*
* jHTTPd is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
*
* jHTTPd is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with jHTTPd; if not, write to the Free Software
* Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
* </pre>
*
*
* This class stores our webserver config. Since we will have more configuration
* options than a simple port I think this would make sense.
*
* @link http://www.faqs.org/rfcs/rfc1945.html
* @author Dominik
*/
public class Config {
private int runningThreads = 0;
/**
* The port to bind the listen socket to
*/
private int port = 0;
private String serverRoot = ".";
private String defaultCharset = "ISO-8859-1";
private String defaultType = "text/plain";
private boolean directoryListView = false;
private HashMap<String, String> mimeTypes = new HashMap<String, String>();
public static final String CRLF = "\r\n";
public static final String CMD_QUIT = "QUIT";
public static final String CMD_MIME = "MIME";
public static final String CMD_CONFIG = "CONFIG";
public static final String CONFIG_FILE = "jhttpd.conf";
public static final String HTTP_METHOD_GET = "GET";
public static final String HTTP_METHOD_HEAD = "HEAD";
public static final String HTTP_METHOD_POST = "POST";
public static final String PRODUCT_NAME = "jHTTPd";
public static final String PRODUCT_VERSION = "0.1a";
public static final String SIGNATURE = PRODUCT_NAME + " " + PRODUCT_VERSION + " HTTP 1.0 Server";
/**
* Successful 2xx This class of status code indicates that the client's
* request was successfully received, understood, and accepted. 200 OK The
* request has succeeded. The information returned with the response is
* dependent on the method used in the request, as follows: GET an entity
* corresponding to the requested resource is sent in the response; HEAD the
* response must only contain the header information and no Entity-Body;
* POST an entity describing or containing the result of the action.
*/
public static final String HTTP_STATUS_200 = "HTTP/1.0 200 OK";
/**
* 201 Created The request has been fulfilled and resulted in a new resource
* being created. The newly created resource can be referenced by the URI(s)
* returned in the entity of the response. The origin server should create
* the resource before using this Status-Code. If the action cannot be
* carried out immediately, the server must include in the response body a
* description of when the resource will be available; otherwise, the server
* should respond with 202 (accepted). Of the methods defined by this
* specification, only POST can create a resource.
*/
public static final String HTTP_STATUS_201 = "HTTP/1.0 201 Created";
/**
* 202 Accepted The request has been accepted for processing, but the
* processing has not been completed. The request may or may not eventually
* be acted upon, as it may be disallowed when processing actually takes
* place. There is no facility for re-sending a status code from an
* asynchronous operation such as this. The 202 response is intentionally
* non-committal. Its purpose is to allow a server to accept a request for
* some other process (perhaps a batch-oriented process that is only run
* once per day) without requiring that the user agent's connection to the
* server persist until the process is completed. The entity returned with
* this response should include an indication of the request's current
* status and either a pointer to a status monitor or some estimate of when
* the user can expect the request to be fulfilled.
*/
public static final String HTTP_STATUS_202 = "HTTP/1.0 202 Accepted";
/**
* 204 No Content The server has fulfilled the request but there is no new
* information to send back. If the client is a user agent, it should not
* change its document view from that which caused the request to be
* generated. This response is primarily intended to allow input for scripts
* or other actions to take place without causing a change to the user
* agent's active document view. The response may include new
* metainformation in the form of entity headers, which should apply to the
* document currently in the user agent's active view.
*/
public static final String HTTP_STATUS_204 = "HTTP/1.0 204 No Content";
/**
* 300 Multiple Choices This response code is not directly used by HTTP/1.0
* applications, but serves as the default for interpreting the 3xx class of
* responses. The requested resource is available at one or more locations.
* Unless it was a HEAD request, the response should include an entity
* containing a list of resource characteristics and locations from which
* the user or user agent can choose the one most appropriate. If the server
* has a preferred choice, it should include the URL in a Location field;
* user agents may use this field value for automatic redirection.
*/
public static final String HTTP_STATUS_300 = "HTTP/1.0 300 Multiple Choices";
/**
* 301 Moved Permanently The requested resource has been assigned a new
* permanent URL and any future references to this resource should be done
* using that URL. Clients with link editing capabilities should
* automatically relink references to the Request-URI to the new reference
* returned by the server, where possible. The new URL must be given by the
* Location field in the response. Unless it was a HEAD request, the
* Entity-Body of the response should contain a short note with a hyperlink
* to the new URL. If the 301 status code is received in response to a
* request using the POST method, the user agent must not automatically
* redirect the request unless it can be confirmed by the user, since this
* might change the conditions under which the request was issued. Note:
* When automatically redirecting a POST request after receiving a 301
* status code, some existing user agents will erroneously change it into a
* GET request.
*/
public static final String HTTP_STATUS_301 = "HTTP/1.0 301 Moved Permanently";
/**
* 302 Moved Temporarily The requested resource resides temporarily under a
* different URL. Since the redirection may be altered on occasion, the
* client should continue to use the Request-URI for future requests. The
* URL must be given by the Location field in the response. Unless it was a
* HEAD request, the Entity-Body of the response should contain a short note
* with a hyperlink to the new URI(s). If the 302 status code is received in
* response to a request using the POST method, the user agent must not
* automatically redirect the request unless it can be confirmed by the
* user, since this might change the conditions under which the request was
* issued. Note: When automatically redirecting a POST request after
* receiving a 302 status code, some existing user agents will erroneously
* change it into a GET request.
*/
public static final String HTTP_STATUS_302 = "HTTP/1.0 302 Moved Temporarily";
/**
* 304 Not Modified If the client has performed a conditional GET request
* and access is allowed, but the document has not been modified since the
* date and time specified in the If-Modified-Since field, the server must
* respond with this status code and not send an Entity-Body to the client.
* Header fields contained in the response should only include information
* which is relevant to cache managers or which may have changed
* independently of the entity's Last-Modified date. Examples of relevant
* header fields include: Date, Server, and Expires. A cache should update
* its cached entity to reflect any new field values given in the 304
* response.
*/
public static final String HTTP_STATUS_304 = "HTTP/1.0 304 Not Modified";
/**
* 9.4 Client Error 4xx The 4xx class of status code is intended for cases
* in which the client seems to have erred. If the client has not completed
* the request when a 4xx code is received, it should immediately cease
* sending data to the server. Except when responding to a HEAD request, the
* server should include an entity containing an explanation of the error
* situation, and whether it is a temporary or permanent condition. These
* status codes are applicable to any request method. Note: If the client is
* sending data, server implementations on TCP should be careful to ensure
* that the client acknowledges receipt of the packet(s) containing the
* response prior to closing the input connection. If the client continues
* sending data to the server after the close, the server's controller will
* send a reset packet to the client, which may erase the client's
* unacknowledged input buffers before they can be read and interpreted by
* the HTTP application. 400 Bad Request The request could not be understood
* by the server due to malformed syntax. The client should not repeat the
* request without modifications.
*/
public static final String HTTP_STATUS_400 = "HTTP/1.0 400 Bad Request";
/**
* 401 Unauthorized The request requires user authentication. The response
* must include a WWW-Authenticate header field (Section 10.16) containing a
* challenge applicable to the requested resource. The client may repeat the
* request with a suitable Authorization header field (Section 10.2). If the
* request already included Authorization credentials, then the 401 response
* indicates that authorization has been refused for those credentials. If
* the 401 response contains the same challenge as the prior response, and
* the user agent has already attempted authentication at least once, then
* the user should be presented the entity that was given in the response,
* since that entity may include relevant diagnostic information. HTTP
* access authentication is explained in Section 11.
*/
public static final String HTTP_STATUS_401 = "HTTP/1.0 401 Unauthorized";
/**
* 403 Forbidden The server understood the request, but is refusing to
* fulfill it. Authorization will not help and the request should not be
* repeated. If the request method was not HEAD and the server wishes to
* make public why the request has not been fulfilled, it should describe
* the reason for the refusal in the entity body. This status code is
* commonly used when the server does not wish to reveal exactly why the
* request has been refused, or when no other response is applicable.
*/
public static final String HTTP_STATUS_403 = "HTTP/1.0 403 Forbidden";
/**
* 404 Not Found The server has not found anything matching the Request-URI.
* No indication is given of whether the condition is temporary or
* permanent. If the server does not wish to make this information available
* to the client, the status code 403 (forbidden) can be used instead.
*/
public static final String HTTP_STATUS_404 = "HTTP/1.0 404 Not Found";
/**
* 9.5 Server Error 5xx Response status codes beginning with the digit "5"
* indicate cases in which the server is aware that it has erred or is
* incapable of performing the request. If the client has not completed the
* request when a 5xx code is received, it should immediately cease sending
* data to the server. Except when responding to a HEAD request, the server
* should include an entity containing an explanation of the error
* situation, and whether it is a temporary or permanent condition. These
* response codes are applicable to any request method and there are no
* required header fields. 500 Internal Server Error The server encountered
* an unexpected condition which prevented it from fulfilling the request.
*/
public static final String HTTP_STATUS_500 = "HTTP/1.0 500 Internal Server Error";
/**
* 501 Not Implemented The server does not support the functionality
* required to fulfill the request. This is the appropriate response when
* the server does not recognize the request method and is not capable of
* supporting it for any resource.
*/
public static final String HTTP_STATUS_501 = "HTTP/1.0 501 Not Implemented";
/**
* 502 Bad Gateway The server, while acting as a gateway or proxy, received
* an invalid response from the upstream server it accessed in attempting to
* fulfill the request.
*/
public static final String HTTP_STATUS_502 = "HTTP/1.0 502 Bad Gateway";
/**
* 503 Service Unavailable The server is currently unable to handle the
* request due to a temporary overloading or maintenance of the server. The
* implication is that this is a temporary condition which will be
* alleviated after some delay. Note: The existence of the 503 status code
* does not imply that a server must use it when becoming overloaded. Some
* servers may wish to simply refuse the connection.
*/
public static final String HTTP_STATUS_503 = "HTTP/1.0 503 Service Unavailable";
public static final String CMD_STATUS = "STATUS";
public void setPort(int port) {
this.port = port;
}
public void setPort(String port) {
this.port = Integer.parseInt(port);
}
public int getPort() {
return this.port;
}
public String getMimeType(String extension) {
// System.err.print("Config#getMimeType extension: " + extension);
if (mimeTypes.containsKey(extension)) {
// System.err.println(" corresponding MIME-Type: " +
// mimeTypes.get(extension));
return mimeTypes.get(extension);
} else {
// System.err.println(" corresponding MIME-Type: " +
// this.defaultType);
return this.defaultType;
}
}
public String getMimeType() {
return this.defaultType;
}
public boolean readConfig(String configFile) throws IOException {
BufferedReader in = new BufferedReader(new FileReader(configFile));
String currentLine = "";
while ((currentLine = in.readLine()) != null) {
if (!currentLine.startsWith("#")) { // skip comments
StringTokenizer tokens = new StringTokenizer(currentLine);
String currentToken = tokens.nextToken();
if (currentToken.equals("AddType")) {
String value = tokens.nextToken();
while (tokens.hasMoreTokens()) {
String key = tokens.nextToken();
mimeTypes.put(key, value);
}
} else if (currentToken.equals("ServerRoot")) {
this.serverRoot = tokens.nextToken();
} else if (currentToken.equals("Listen")) {
this.port = Integer.parseInt(tokens.nextToken());
} else if (currentToken.equals("AddDefaultCharset")) {
this.defaultCharset = tokens.nextToken();
} else if (currentToken.equals("DefaultType")) {
this.defaultType = tokens.nextToken();
} else if (currentToken.equals("DirectoryListView")) {
if (tokens.nextToken().toLowerCase().equals("yes")) {
this.directoryListView = true;
} else {
this.directoryListView = false;
}
}
}
}
return true;
}
/**
* @return the directoryListView
*/
public boolean isDirectoryListView() {
return directoryListView;
}
/**
* @return the defaultCharset
*/
public String getDefaultCharset() {
return defaultCharset;
}
/**
* @return the serverRoot
*/
public String getServerRoot() {
return serverRoot;
}
public synchronized void countDown() {
runningThreads--;
}
public synchronized void countUp() {
runningThreads++;
}
/**
* @return the runningThreads
*/
public int getRunningThreads() {
return runningThreads;
}
/**
* @return the mimeTypes
*/
public HashMap<String, String> getMimeTypes() {
return mimeTypes;
}
}
| |
/*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
/*
* This code was generated by https://github.com/googleapis/google-api-java-client-services/
* Modify at your own risk.
*/
package com.google.api.services.cloudsearch.v1.model;
/**
* Represents an item to be pushed to the indexing queue.
*
* <p> This is the Java data model class that specifies how to parse/serialize into the JSON that is
* transmitted over HTTP when working with the Cloud Search API. For a detailed explanation see:
* <a href="https://developers.google.com/api-client-library/java/google-http-java-client/json">https://developers.google.com/api-client-library/java/google-http-java-client/json</a>
* </p>
*
* @author Google, Inc.
*/
@SuppressWarnings("javadoc")
public final class PushItem extends com.google.api.client.json.GenericJson {
/**
* Content hash of the item according to the repository. If specified, this is used to determine
* how to modify this item's status. Setting this field and the type field results in argument
* error. The maximum length is 2048 characters.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String contentHash;
/**
* Metadata hash of the item according to the repository. If specified, this is used to determine
* how to modify this item's status. Setting this field and the type field results in argument
* error. The maximum length is 2048 characters.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String metadataHash;
/**
* Provides additional document state information for the connector, such as an alternate
* repository ID and other metadata. The maximum length is 8192 bytes.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String payload;
/**
* Queue to which this item belongs to. The default queue is chosen if this field is not
* specified. The maximum length is 512 characters.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String queue;
/**
* Populate this field to store Connector or repository error details. This information is
* displayed in the Admin Console. This field may only be populated when the Type is
* REPOSITORY_ERROR.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private RepositoryError repositoryError;
/**
* Structured data hash of the item according to the repository. If specified, this is used to
* determine how to modify this item's status. Setting this field and the type field results in
* argument error. The maximum length is 2048 characters.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String structuredDataHash;
/**
* The type of the push operation that defines the push behavior.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String type;
/**
* Content hash of the item according to the repository. If specified, this is used to determine
* how to modify this item's status. Setting this field and the type field results in argument
* error. The maximum length is 2048 characters.
* @return value or {@code null} for none
*/
public java.lang.String getContentHash() {
return contentHash;
}
/**
* Content hash of the item according to the repository. If specified, this is used to determine
* how to modify this item's status. Setting this field and the type field results in argument
* error. The maximum length is 2048 characters.
* @param contentHash contentHash or {@code null} for none
*/
public PushItem setContentHash(java.lang.String contentHash) {
this.contentHash = contentHash;
return this;
}
/**
* Metadata hash of the item according to the repository. If specified, this is used to determine
* how to modify this item's status. Setting this field and the type field results in argument
* error. The maximum length is 2048 characters.
* @return value or {@code null} for none
*/
public java.lang.String getMetadataHash() {
return metadataHash;
}
/**
* Metadata hash of the item according to the repository. If specified, this is used to determine
* how to modify this item's status. Setting this field and the type field results in argument
* error. The maximum length is 2048 characters.
* @param metadataHash metadataHash or {@code null} for none
*/
public PushItem setMetadataHash(java.lang.String metadataHash) {
this.metadataHash = metadataHash;
return this;
}
/**
* Provides additional document state information for the connector, such as an alternate
* repository ID and other metadata. The maximum length is 8192 bytes.
* @see #decodePayload()
* @return value or {@code null} for none
*/
public java.lang.String getPayload() {
return payload;
}
/**
* Provides additional document state information for the connector, such as an alternate
* repository ID and other metadata. The maximum length is 8192 bytes.
* @see #getPayload()
* @return Base64 decoded value or {@code null} for none
*
* @since 1.14
*/
public byte[] decodePayload() {
return com.google.api.client.util.Base64.decodeBase64(payload);
}
/**
* Provides additional document state information for the connector, such as an alternate
* repository ID and other metadata. The maximum length is 8192 bytes.
* @see #encodePayload()
* @param payload payload or {@code null} for none
*/
public PushItem setPayload(java.lang.String payload) {
this.payload = payload;
return this;
}
/**
* Provides additional document state information for the connector, such as an alternate
* repository ID and other metadata. The maximum length is 8192 bytes.
* @see #setPayload()
*
* <p>
* The value is encoded Base64 or {@code null} for none.
* </p>
*
* @since 1.14
*/
public PushItem encodePayload(byte[] payload) {
this.payload = com.google.api.client.util.Base64.encodeBase64URLSafeString(payload);
return this;
}
/**
* Queue to which this item belongs to. The default queue is chosen if this field is not
* specified. The maximum length is 512 characters.
* @return value or {@code null} for none
*/
public java.lang.String getQueue() {
return queue;
}
/**
* Queue to which this item belongs to. The default queue is chosen if this field is not
* specified. The maximum length is 512 characters.
* @param queue queue or {@code null} for none
*/
public PushItem setQueue(java.lang.String queue) {
this.queue = queue;
return this;
}
/**
* Populate this field to store Connector or repository error details. This information is
* displayed in the Admin Console. This field may only be populated when the Type is
* REPOSITORY_ERROR.
* @return value or {@code null} for none
*/
public RepositoryError getRepositoryError() {
return repositoryError;
}
/**
* Populate this field to store Connector or repository error details. This information is
* displayed in the Admin Console. This field may only be populated when the Type is
* REPOSITORY_ERROR.
* @param repositoryError repositoryError or {@code null} for none
*/
public PushItem setRepositoryError(RepositoryError repositoryError) {
this.repositoryError = repositoryError;
return this;
}
/**
* Structured data hash of the item according to the repository. If specified, this is used to
* determine how to modify this item's status. Setting this field and the type field results in
* argument error. The maximum length is 2048 characters.
* @return value or {@code null} for none
*/
public java.lang.String getStructuredDataHash() {
return structuredDataHash;
}
/**
* Structured data hash of the item according to the repository. If specified, this is used to
* determine how to modify this item's status. Setting this field and the type field results in
* argument error. The maximum length is 2048 characters.
* @param structuredDataHash structuredDataHash or {@code null} for none
*/
public PushItem setStructuredDataHash(java.lang.String structuredDataHash) {
this.structuredDataHash = structuredDataHash;
return this;
}
/**
* The type of the push operation that defines the push behavior.
* @return value or {@code null} for none
*/
public java.lang.String getType() {
return type;
}
/**
* The type of the push operation that defines the push behavior.
* @param type type or {@code null} for none
*/
public PushItem setType(java.lang.String type) {
this.type = type;
return this;
}
@Override
public PushItem set(String fieldName, Object value) {
return (PushItem) super.set(fieldName, value);
}
@Override
public PushItem clone() {
return (PushItem) super.clone();
}
}
| |
/*
* Copyright (c) 2009 University of Durham, England All rights reserved.
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met: *
* Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer. * Redistributions in binary
* form must reproduce the above copyright notice, this list of conditions and
* the following disclaimer in the documentation and/or other materials provided
* with the distribution. * Neither the name of 'SynergyNet' nor the names of
* its contributors may be used to endorse or promote products derived from this
* software without specific prior written permission. THIS SOFTWARE IS PROVIDED
* BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
* EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT,
* INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package apps.conceptmap;
// import java.io.File;
// import synergynet.table.apps.conceptmap.graphcomponents.nodes.PDFNode;
// import synergynet.table.apps.conceptmap.graphcomponents.nodes.PPTNode;
// import synergynet.table.apps.conceptmap.graphcomponents.nodes.TextNode;
import synergynetframework.appsystem.contentsystem.ContentSystem;
import synergynetframework.appsystem.table.appdefinitions.DefaultSynergyNetApp;
import synergynetframework.appsystem.table.appregistry.ApplicationInfo;
import synergynetframework.appsystem.table.appregistry.menucontrol.HoldTopRightConfirmVisualExit;
import apps.conceptmap.graphcomponents.nodes.GraphNode;
import apps.conceptmap.graphcomponents.nodes.ImageTextNode;
import apps.conceptmap.utility.GraphManager;
/**
* The Class ConceptMapApp.
*/
public class ConceptMapApp extends DefaultSynergyNetApp {
/** The instance. */
private static ConceptMapApp instance;
/**
* Gets the single instance of ConceptMapApp.
*
* @return single instance of ConceptMapApp
*/
public static ConceptMapApp getInstance() {
return instance;
}
/** The content. */
protected ContentSystem content;
/** The gmanager. */
protected GraphManager gmanager;
/**
* Instantiates a new concept map app.
*
* @param info
* the info
*/
public ConceptMapApp(ApplicationInfo info) {
super(info);
}
/*
* (non-Javadoc)
* @see
* synergynetframework.appsystem.table.appdefinitions.SynergyNetApp#addContent
* ()
*/
@Override
public void addContent() {
instance = this;
content = ContentSystem.getContentSystemForSynergyNetApp(this);
gmanager = new GraphManager(content);
setMenuController(new HoldTopRightConfirmVisualExit(this));
ImageTextNode trees = new ImageTextNode(content, gmanager);
trees.setImageResource(ConceptMapApp.class
.getResource("sampleconceptmap/tree.jpg"));
trees.setText("Trees");
trees.getImageTextLabel().setBorderSize(20);
trees.getImageTextLabel().setBorderColour(GraphConfig.nodeBorderColor);
trees.getImageTextLabel().setBackgroundColour(
GraphConfig.nodeBackgroundColor);
trees.getImageTextLabel().setFont(GraphConfig.nodeTextFont);
trees.getImageTextLabel()
.setTextColour(GraphConfig.nodeForegroundColor);
trees.setLinkButtonLocation(GraphNode.TOP_RIGHT_CORNER);
trees.setEditPointLocation(GraphNode.TOP_LEFT_CORNER);
trees.setCloseButtonLocation(GraphNode.TOP_LEFT_CORNER);
// trees.setScale(0.8f);
trees.setLocation(100, 100);
ImageTextNode wood = new ImageTextNode(content, gmanager);
wood.setImageResource(ConceptMapApp.class
.getResource("sampleconceptmap/wood.jpg"));
wood.setText("Wood");
wood.getImageTextLabel().setBorderSize(20);
wood.getImageTextLabel().setBorderColour(GraphConfig.nodeBorderColor);
wood.getImageTextLabel().setBackgroundColour(
GraphConfig.nodeBackgroundColor);
wood.getImageTextLabel().setFont(GraphConfig.nodeTextFont);
wood.getImageTextLabel().setTextColour(GraphConfig.nodeForegroundColor);
wood.setLinkButtonLocation(GraphNode.TOP_RIGHT_CORNER);
wood.setEditPointLocation(GraphNode.TOP_LEFT_CORNER);
wood.setCloseButtonLocation(GraphNode.TOP_LEFT_CORNER);
// wood.setScale(0.8f);
wood.setLocation(100, 100);
ImageTextNode house = new ImageTextNode(content, gmanager);
house.setImageResource(ConceptMapApp.class
.getResource("sampleconceptmap/house.gif"));
house.setText("House");
house.getImageTextLabel().setBorderSize(20);
house.getImageTextLabel().setBorderColour(GraphConfig.nodeBorderColor);
house.getImageTextLabel().setBackgroundColour(
GraphConfig.nodeBackgroundColor);
house.getImageTextLabel().setFont(GraphConfig.nodeTextFont);
house.getImageTextLabel()
.setTextColour(GraphConfig.nodeForegroundColor);
house.setLinkButtonLocation(GraphNode.TOP_RIGHT_CORNER);
house.setEditPointLocation(GraphNode.TOP_LEFT_CORNER);
house.setCloseButtonLocation(GraphNode.TOP_LEFT_CORNER);
// house.setScale(0.8f);
house.setLocation(100, 100);
ImageTextNode oxygen = new ImageTextNode(content, gmanager);
oxygen.setImageResource(ConceptMapApp.class
.getResource("sampleconceptmap/oxygen.jpg"));
oxygen.setText("Oxygen");
oxygen.getImageTextLabel().setBorderSize(20);
oxygen.getImageTextLabel().setBorderColour(GraphConfig.nodeBorderColor);
oxygen.getImageTextLabel().setBackgroundColour(
GraphConfig.nodeBackgroundColor);
oxygen.getImageTextLabel().setFont(GraphConfig.nodeTextFont);
oxygen.getImageTextLabel().setTextColour(
GraphConfig.nodeForegroundColor);
oxygen.setLinkButtonLocation(GraphNode.TOP_RIGHT_CORNER);
oxygen.setEditPointLocation(GraphNode.TOP_LEFT_CORNER);
oxygen.setCloseButtonLocation(GraphNode.TOP_LEFT_CORNER);
// oxygen.setScale(0.8f);
oxygen.setLocation(100, 100);
ImageTextNode animals = new ImageTextNode(content, gmanager);
animals.setImageResource(ConceptMapApp.class
.getResource("sampleconceptmap/animals.jpg"));
animals.setText("Animals");
animals.getImageTextLabel().setBorderSize(20);
animals.getImageTextLabel()
.setBorderColour(GraphConfig.nodeBorderColor);
animals.getImageTextLabel().setBackgroundColour(
GraphConfig.nodeBackgroundColor);
animals.getImageTextLabel().setFont(GraphConfig.nodeTextFont);
animals.getImageTextLabel().setTextColour(
GraphConfig.nodeForegroundColor);
animals.setLinkButtonLocation(GraphNode.TOP_RIGHT_CORNER);
animals.setEditPointLocation(GraphNode.TOP_LEFT_CORNER);
animals.setCloseButtonLocation(GraphNode.TOP_LEFT_CORNER);
// animals.setScale(0.8f);
animals.setLocation(100, 100);
}
/**
* Gets the content system.
*
* @return the content system
*/
public ContentSystem getContentSystem() {
return content;
}
/**
* Gets the graph manager.
*
* @return the graph manager
*/
public GraphManager getGraphManager() {
return gmanager;
}
/*
* (non-Javadoc)
* @see
* synergynetframework.appsystem.table.appdefinitions.DefaultSynergyNetApp
* #stateUpdate(float)
*/
@Override
protected void stateUpdate(float tpf) {
super.stateUpdate(tpf);
if (content != null) {
content.update(tpf);
}
}
}
| |
package com.carlosefonseca.common.widgets;
import android.annotation.TargetApi;
import android.content.Context;
import android.os.Build;
import android.os.Handler;
import android.text.Editable;
import android.text.InputFilter;
import android.text.Spanned;
import android.text.TextWatcher;
import android.util.AttributeSet;
import android.view.KeyEvent;
import android.view.LayoutInflater;
import android.view.View;
import android.widget.Button;
import android.widget.EditText;
import android.widget.LinearLayout;
import android.support.annotation.Nullable;
import com.carlosefonseca.common.R;
@SuppressWarnings("UnusedDeclaration")
public class HorizontalNumberPicker extends LinearLayout {
private EditText textView;
private Button minusBT, plusBT;
private HorizontalNumberPicker view;
int value = 0;
int input;
int filteredInput;
int min = 0;
int max = Integer.MAX_VALUE;
int id = Integer.MIN_VALUE;
CFNumberPickerDelegate delegate;
public HorizontalNumberPicker(Context context) {
super(context);
init(context);
}
public HorizontalNumberPicker(Context context, AttributeSet attrs) {
super(context, attrs);
init(context);
}
@TargetApi(Build.VERSION_CODES.HONEYCOMB)
public HorizontalNumberPicker(Context context, AttributeSet attrs, int defStyle) {
super(context, attrs, defStyle);
init(context);
}
private void init(Context context) {LayoutInflater layoutInflater = (LayoutInflater) context.getSystemService(Context.LAYOUT_INFLATER_SERVICE);
view = (HorizontalNumberPicker) layoutInflater.inflate(R.layout.cf_number_picker, this);
assert view != null;
minusBT = (Button) view.findViewById(R.id.minus);
plusBT = (Button) view.findViewById(R.id.plus);
if (!view.isInEditMode()) {
minusBT.setOnClickListener(minus_action);
plusBT.setOnClickListener(plus_action);
}
textView = (EditText) findViewById(R.id.value);
if (!view.isInEditMode()) {
textView.addTextChangedListener(text_action);
textView.setOnKeyListener(key_listener);
// Digits only
textView.setFilters(new InputFilter[]{filter});
}
}
/*
CONFIG
*/
@Override
public int getId() {
return id;
}
@Override
public void setId(int id) {
this.id = id;
}
public HorizontalNumberPicker setDelegate(CFNumberPickerDelegate delegate) {
this.delegate = delegate;
return this;
}
public int getMin() {
return min;
}
public void setMin(int min) {
this.min = min;
}
public int getMax() {
return max;
}
public void setMax(int max) {
this.max = max;
}
public int getValue() {
return value;
}
public void setValue(int value) {
this.value = value;
updateNumberDisplay();
}
/*
ACTIONS
*/
OnClickListener minus_action = new OnClickListener() {
@Override
public void onClick(View view) {
value = Math.max(--value, min);
updateNumberDisplay();
notifyValueChange();
}
};
OnClickListener plus_action = new OnClickListener() {
@Override
public void onClick(View view) {
value = Math.min(++value, max);
updateNumberDisplay();
notifyValueChange();
}
};
TextWatcher text_action = new TextWatcher() {
@Override
public void beforeTextChanged(CharSequence charSequence, int i, int i2, int i3) {
}
@Override
public void onTextChanged(CharSequence arg0, int arg1, int arg2, int arg3) {
textChanged(arg0);
}
@Override
public void afterTextChanged(Editable editable) {
if (input != filteredInput) {
editable.clear();
editable.append(String.valueOf(filteredInput));
filteredInput = 0;
input = 0;
}
}
};
private void textChanged(CharSequence text) {
if (text.length() == 0) {
value = 0;
} else {
input = Integer.parseInt(String.valueOf(text));
filteredInput = Math.min(input, max);
filteredInput = Math.max(filteredInput, min);
value = filteredInput;
}
notifyValueChange();
}
/**
* Handles Backspace.
*/
OnKeyListener key_listener = new OnKeyListener() {
@Override
public boolean onKey(final View view, int keyCode, KeyEvent keyEvent) {
if (keyCode == KeyEvent.KEYCODE_DEL) {
new Handler().postDelayed(new Runnable() {
@Override
public void run() {
textChanged(((EditText) view).getText());
}
},10);
}
return false;
}
};
private void updateNumberDisplay() {
textView.setText(""+value);
}
@Override
public void setEnabled(boolean enabled) {
super.setEnabled(enabled);
minusBT.setEnabled(enabled);
plusBT.setEnabled(enabled);
textView.setEnabled(enabled);
}
/*
DELEGATE
*/
public interface CFNumberPickerDelegate {
public void valueChanged(int value);
}
public void notifyValueChange() {
if (delegate != null) {
delegate.valueChanged(value);
}
}
/*
STUFF
*/
final InputFilter filter = new InputFilter() {
@Override
@Nullable
public CharSequence filter(CharSequence source, int start, int end, Spanned dest, int dstart, int dend) {
for (int i = start; i < end; i++) {
if (!Character.isDigit(source.charAt(i))) {
return "";
}
}
return null;
}
};
}
| |
package cz.habarta.typescript.generator.compiler;
import cz.habarta.typescript.generator.DateMapping;
import cz.habarta.typescript.generator.EnumMapping;
import cz.habarta.typescript.generator.Extension;
import cz.habarta.typescript.generator.IdentifierCasing;
import cz.habarta.typescript.generator.NullabilityDefinition;
import cz.habarta.typescript.generator.OptionalPropertiesDeclaration;
import cz.habarta.typescript.generator.RestNamespacing;
import cz.habarta.typescript.generator.Settings;
import cz.habarta.typescript.generator.TsParameter;
import cz.habarta.typescript.generator.TsProperty;
import cz.habarta.typescript.generator.TsType;
import cz.habarta.typescript.generator.TypeProcessor;
import cz.habarta.typescript.generator.TypeScriptGenerator;
import cz.habarta.typescript.generator.emitter.EmitterExtension;
import cz.habarta.typescript.generator.emitter.TsAccessibilityModifier;
import cz.habarta.typescript.generator.emitter.TsAliasModel;
import cz.habarta.typescript.generator.emitter.TsAssignmentExpression;
import cz.habarta.typescript.generator.emitter.TsBeanCategory;
import cz.habarta.typescript.generator.emitter.TsBeanModel;
import cz.habarta.typescript.generator.emitter.TsCallExpression;
import cz.habarta.typescript.generator.emitter.TsConstructorModel;
import cz.habarta.typescript.generator.emitter.TsEnumModel;
import cz.habarta.typescript.generator.emitter.TsExpression;
import cz.habarta.typescript.generator.emitter.TsExpressionStatement;
import cz.habarta.typescript.generator.emitter.TsHelper;
import cz.habarta.typescript.generator.emitter.TsIdentifierReference;
import cz.habarta.typescript.generator.emitter.TsMemberExpression;
import cz.habarta.typescript.generator.emitter.TsMethodModel;
import cz.habarta.typescript.generator.emitter.TsModel;
import cz.habarta.typescript.generator.emitter.TsModifierFlags;
import cz.habarta.typescript.generator.emitter.TsObjectLiteral;
import cz.habarta.typescript.generator.emitter.TsParameterModel;
import cz.habarta.typescript.generator.emitter.TsPropertyDefinition;
import cz.habarta.typescript.generator.emitter.TsPropertyModel;
import cz.habarta.typescript.generator.emitter.TsReturnStatement;
import cz.habarta.typescript.generator.emitter.TsStatement;
import cz.habarta.typescript.generator.emitter.TsStringLiteral;
import cz.habarta.typescript.generator.emitter.TsSuperExpression;
import cz.habarta.typescript.generator.emitter.TsTaggedTemplateLiteral;
import cz.habarta.typescript.generator.emitter.TsTemplateLiteral;
import cz.habarta.typescript.generator.emitter.TsThisExpression;
import cz.habarta.typescript.generator.parser.BeanModel;
import cz.habarta.typescript.generator.parser.EnumModel;
import cz.habarta.typescript.generator.parser.MethodModel;
import cz.habarta.typescript.generator.parser.MethodParameterModel;
import cz.habarta.typescript.generator.parser.Model;
import cz.habarta.typescript.generator.parser.PathTemplate;
import cz.habarta.typescript.generator.parser.PropertyAccess;
import cz.habarta.typescript.generator.parser.PropertyModel;
import cz.habarta.typescript.generator.parser.RestApplicationModel;
import cz.habarta.typescript.generator.parser.RestMethodModel;
import cz.habarta.typescript.generator.parser.RestQueryParam;
import cz.habarta.typescript.generator.type.JTypeWithNullability;
import cz.habarta.typescript.generator.util.GenericsResolver;
import cz.habarta.typescript.generator.util.Pair;
import cz.habarta.typescript.generator.util.Utils;
import java.lang.annotation.Annotation;
import java.lang.reflect.Type;
import java.lang.reflect.TypeVariable;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.Comparator;
import java.util.LinkedHashMap;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Set;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import java.util.stream.Collectors;
import java.util.stream.Stream;
/**
* Compiles Java model to TypeScript model.
* <ol>
* <li>
* Transforms Model to TsModel.
* TypeProcessor (chain) is used to transform Java types to TypeScript types.
* Symbols are used instead of final type names.
* </li>
* <li>
* Applies needed transformations:
* <ul>
* <li>Dates to strings or numbers.</li>
* <li>Enums to string literal union types.</li>
* </ul>
* </li>
* <li>
* Resolves Symbols type names. This maps Java class names to TypeScript identifiers using any relevant options from Settings.
* </li>
* </ol>
*/
public class ModelCompiler {
private final Settings settings;
private final TypeProcessor typeProcessor;
public ModelCompiler(Settings settings, TypeProcessor typeProcessor) {
this.settings = settings;
this.typeProcessor = typeProcessor;
}
public enum TransformationPhase {
BeforeTsModel,
BeforeEnums,
BeforeSymbolResolution,
AfterDeclarationSorting,
}
public TsModel javaToTypeScript(Model model) {
final SymbolTable symbolTable = new SymbolTable(settings);
final List<Extension.TransformerDefinition> extensionTransformers = getExtensionTransformers();
model = applyExtensionModelTransformers(symbolTable, model, extensionTransformers);
TsModel tsModel = processModel(symbolTable, model);
tsModel = addCustomTypeAliases(symbolTable, tsModel);
tsModel = removeInheritedProperties(symbolTable, tsModel);
tsModel = addImplementedProperties(symbolTable, tsModel);
tsModel = sortPropertiesDeclarations(symbolTable, tsModel);
if (settings.generateConstructors) {
tsModel = addConstructors(symbolTable, tsModel);
}
// REST
if (settings.isGenerateRest()) {
final Symbol responseSymbol = createRestResponseType(symbolTable, tsModel);
final TsType optionsType = settings.restOptionsType != null
? new TsType.VerbatimType(settings.restOptionsType)
: null;
final TsType.GenericVariableType optionsGenericVariable = settings.restOptionsTypeIsGeneric
? new TsType.GenericVariableType(settings.restOptionsType)
: null;
final List<RestApplicationModel> restApplicationsWithInterface = model.getRestApplications().stream()
.filter(restApplication -> restApplication.getType().generateInterface.apply(settings))
.collect(Collectors.toList());
final List<RestApplicationModel> restApplicationsWithClient = model.getRestApplications().stream()
.filter(restApplication -> restApplication.getType().generateClient.apply(settings))
.collect(Collectors.toList());
if (!restApplicationsWithInterface.isEmpty()) {
createRestInterfaces(tsModel, symbolTable, restApplicationsWithInterface, responseSymbol, optionsGenericVariable, optionsType);
}
if (!restApplicationsWithClient.isEmpty()) {
createRestClients(tsModel, symbolTable, restApplicationsWithClient, responseSymbol, optionsGenericVariable, optionsType);
}
}
// dates
tsModel = transformDates(symbolTable, tsModel);
// enums
tsModel = applyExtensionTransformers(symbolTable, model, tsModel, TransformationPhase.BeforeEnums, extensionTransformers);
tsModel = addEnumValuesToJavadoc(tsModel);
if (settings.enumMemberCasing != null && settings.enumMemberCasing != IdentifierCasing.keepOriginal) {
tsModel = transformEnumMembersCase(tsModel);
}
if (!settings.areDefaultStringEnumsOverriddenByExtension()) {
if (settings.mapEnum == null || settings.mapEnum == EnumMapping.asUnion || settings.mapEnum == EnumMapping.asInlineUnion) {
tsModel = transformEnumsToUnions(tsModel);
}
if (settings.mapEnum == EnumMapping.asInlineUnion) {
tsModel = inlineEnums(tsModel, symbolTable);
}
if (settings.mapEnum == EnumMapping.asNumberBasedEnum) {
tsModel = transformEnumsToNumberBasedEnum(tsModel);
}
}
// after enum transformations transform Maps with rest of the enums (not unions) used in keys
tsModel = transformNonStringEnumKeyMaps(symbolTable, tsModel);
// tagged unions
tsModel = createAndUseTaggedUnions(symbolTable, tsModel);
// nullable types and optional properties
tsModel = makeUndefinablePropertiesAndParametersOptional(symbolTable, tsModel);
tsModel = transformNullableTypes(symbolTable, tsModel);
tsModel = eliminateUndefinedFromOptionalPropertiesAndParameters(symbolTable, tsModel);
tsModel = transformOptionalProperties(symbolTable, tsModel);
tsModel = applyExtensionTransformers(symbolTable, model, tsModel, TransformationPhase.BeforeSymbolResolution, extensionTransformers);
symbolTable.resolveSymbolNames();
tsModel = sortTypeDeclarations(symbolTable, tsModel);
tsModel = applyExtensionTransformers(symbolTable, model, tsModel, TransformationPhase.AfterDeclarationSorting, extensionTransformers);
return tsModel;
}
private List<Extension.TransformerDefinition> getExtensionTransformers() {
final List<Extension.TransformerDefinition> transformers = new ArrayList<>();
for (EmitterExtension emitterExtension : settings.extensions) {
if (emitterExtension instanceof Extension) {
final Extension extension = (Extension) emitterExtension;
transformers.addAll(extension.getTransformers());
}
}
return transformers;
}
private static Model applyExtensionModelTransformers(SymbolTable symbolTable, Model model,
List<Extension.TransformerDefinition> transformerDefinitions
) {
for (Extension.TransformerDefinition definition : transformerDefinitions) {
if (definition.phase == TransformationPhase.BeforeTsModel) {
model = definition.transformer.transformModel(symbolTable, model);
}
}
return model;
}
private static TsModel applyExtensionTransformers(SymbolTable symbolTable, Model model, TsModel tsModel,
TransformationPhase phase, List<Extension.TransformerDefinition> transformerDefinitions
) {
final TsModelTransformer.Context context = new TsModelTransformer.Context(symbolTable, model);
for (Extension.TransformerDefinition definition : transformerDefinitions) {
if (definition.phase == phase) {
tsModel = definition.tsTransformer.transformModel(context, tsModel);
}
}
return tsModel;
}
public TsType javaToTypeScript(Type type) {
final BeanModel beanModel = new BeanModel(Object.class, Object.class, null, null, null, Collections.<Type>emptyList(),
Collections.singletonList(new PropertyModel("property", type, false, null, null, null, null, null)), null);
final Model model = new Model(Collections.singletonList(beanModel), Collections.<EnumModel>emptyList(), null);
final TsModel tsModel = javaToTypeScript(model);
return tsModel.getBeans().get(0).getProperties().get(0).getTsType();
}
private TsModel processModel(SymbolTable symbolTable, Model model) {
final Map<Type, List<BeanModel>> children = createChildrenMap(model);
final List<TsBeanModel> beans = new ArrayList<>();
for (BeanModel bean : model.getBeans()) {
beans.add(processBean(symbolTable, model, children, bean));
}
final List<TsEnumModel> enums = new ArrayList<>();
final List<TsEnumModel> stringEnums = new ArrayList<>();
for (EnumModel enumModel : model.getEnums()) {
final TsEnumModel tsEnumModel = processEnum(symbolTable, enumModel);
enums.add(tsEnumModel);
if (tsEnumModel.getKind() == EnumKind.StringBased) {
stringEnums.add(tsEnumModel);
}
}
return new TsModel().withBeans(beans).withEnums(enums).withOriginalStringEnums(stringEnums);
}
private Map<Type, List<BeanModel>> createChildrenMap(Model model) {
final Map<Type, List<BeanModel>> children = new LinkedHashMap<>();
for (BeanModel bean : model.getBeans()) {
for (Type ancestor : bean.getParentAndInterfaces()) {
final Type processedAncestor = Utils.getRawClassOrNull(ancestor);
if (!children.containsKey(processedAncestor)) {
children.put(processedAncestor, new ArrayList<>());
}
children.get(processedAncestor).add(bean);
}
}
return children;
}
private <T> TsBeanModel processBean(SymbolTable symbolTable, Model model, Map<Type, List<BeanModel>> children, BeanModel bean) {
final boolean isClass = mappedToClass(bean.getOrigin());
final List<TsType> extendsList = new ArrayList<>();
final List<TsType> implementsList = new ArrayList<>();
final TsType parentType = typeFromJava(symbolTable, bean.getParent());
if (parentType != null && !parentType.equals(TsType.Any)) {
final boolean isParentMappedToClass = mappedToClass(getOriginClass(symbolTable, parentType));
if (isClass && !isParentMappedToClass) {
implementsList.add(parentType);
} else {
extendsList.add(parentType);
}
}
final List<TsType> interfaces = new ArrayList<>();
for (Type aInterface : bean.getInterfaces()) {
final TsType interfaceType = typeFromJava(symbolTable, aInterface);
if (!interfaceType.equals(TsType.Any)) {
interfaces.add(interfaceType);
}
}
if (isClass) {
implementsList.addAll(interfaces);
} else {
extendsList.addAll(interfaces);
}
final List<TsPropertyModel> properties = processProperties(symbolTable, model, bean);
boolean isTaggedUnion = false;
if (bean.getDiscriminantProperty() != null && bean.getProperty(bean.getDiscriminantProperty()) == null) {
isTaggedUnion = true;
boolean isDisciminantProperty = true;
final List<BeanModel> selfAndDescendants = getSelfAndDescendants(bean, children);
final List<TsType.StringLiteralType> literals = new ArrayList<>();
for (BeanModel descendant : selfAndDescendants) {
if (descendant.getDiscriminantProperty() == null || descendant.getProperty(bean.getDiscriminantProperty()) != null) {
// do not handle bean as tagged union if any descendant or it itself has duplicate discriminant property
isTaggedUnion = false;
isDisciminantProperty = false;
}
if (descendant.getDiscriminantLiteral() != null) {
literals.add(new TsType.StringLiteralType(descendant.getDiscriminantLiteral()));
}
}
final List<BeanModel> descendants = selfAndDescendants.subList(1, selfAndDescendants.size());
for (BeanModel descendant : descendants) {
// do not handle bean as tagged union if any descendant has "non-related" generic parameter
final List<String> mappedGenericVariables = GenericsResolver.mapGenericVariablesToBase(descendant.getOrigin(), bean.getOrigin());
if (mappedGenericVariables.contains(null)) {
isTaggedUnion = false;
}
}
final TsType discriminantType = isDisciminantProperty && !literals.isEmpty()
? new TsType.UnionType(literals)
: TsType.String;
final TsModifierFlags modifiers = TsModifierFlags.None.setReadonly(settings.declarePropertiesAsReadOnly);
properties.add(0, new TsPropertyModel(bean.getDiscriminantProperty(), discriminantType, modifiers, /*ownProperty*/ true, null));
}
final TsBeanModel tsBean = new TsBeanModel(
bean.getOrigin(),
TsBeanCategory.Data,
isClass,
symbolTable.getSymbol(bean.getOrigin()),
getTypeParameters(bean.getOrigin()),
parentType,
extendsList,
implementsList,
properties,
/*constructor*/ null,
/*methods*/ null,
bean.getComments());
return isTaggedUnion
? tsBean.withTaggedUnion(bean.getTaggedUnionClasses(), bean.getDiscriminantProperty(), bean.getDiscriminantLiteral())
: tsBean;
}
private boolean mappedToClass(Class<?> cls) {
return cls != null && !cls.isInterface() && settings.getMapClassesAsClassesFilter().test(cls.getName());
}
private static List<TsType.GenericVariableType> getTypeParameters(Class<?> cls) {
final List<TsType.GenericVariableType> typeParameters = new ArrayList<>();
for (TypeVariable<?> typeParameter : cls.getTypeParameters()) {
typeParameters.add(new TsType.GenericVariableType(typeParameter.getName()));
}
return typeParameters;
}
private List<TsPropertyModel> processProperties(SymbolTable symbolTable, Model model, BeanModel bean) {
return processProperties(symbolTable, model, bean, "", "");
}
private List<TsPropertyModel> processProperties(SymbolTable symbolTable, Model model, BeanModel bean, String prefix, String suffix) {
final List<TsPropertyModel> properties = new ArrayList<>();
for (PropertyModel property : bean.getProperties()) {
boolean pulled = false;
final PropertyModel.PullProperties pullProperties = property.getPullProperties();
if (pullProperties != null) {
final Type type = JTypeWithNullability.getPlainType(property.getType());
if (type instanceof Class<?>) {
final BeanModel pullBean = model.getBean((Class<?>) type);
if (pullBean != null) {
properties.addAll(processProperties(symbolTable, model, pullBean, prefix + pullProperties.prefix, pullProperties.suffix + suffix));
pulled = true;
}
}
}
if (!pulled) {
properties.add(processProperty(symbolTable, bean, property, prefix, suffix));
}
}
return properties;
}
private static List<BeanModel> getSelfAndDescendants(BeanModel bean, Map<Type, List<BeanModel>> children) {
final List<BeanModel> descendants = new ArrayList<>();
descendants.add(bean);
final List<BeanModel> directDescendants = children.get(bean.getOrigin());
if (directDescendants != null) {
for (BeanModel descendant : directDescendants) {
descendants.addAll(getSelfAndDescendants(descendant, children));
}
}
return descendants;
}
private TsPropertyModel processProperty(SymbolTable symbolTable, BeanModel bean, PropertyModel property, String prefix, String suffix) {
final TsType type = typeFromJava(symbolTable, property.getType(), property.getContext(), property.getName(), bean.getOrigin());
final TsType tsType = property.isOptional() ? type.optional() : type;
final TsModifierFlags modifiers = TsModifierFlags.None.setReadonly(settings.declarePropertiesAsReadOnly);
final List<String> comments = settings.generateReadonlyAndWriteonlyJSDocTags
? Utils.concat(property.getComments(), getPropertyAccessComments(property.getAccess()))
: property.getComments();
return new TsPropertyModel(prefix + property.getName() + suffix, tsType, modifiers, /*ownProperty*/ false, comments);
}
private static List<String> getPropertyAccessComments(PropertyAccess access) {
final String accessTag =
access == PropertyAccess.ReadOnly ? "@readonly" :
access == PropertyAccess.WriteOnly ? "@writeonly" :
null;
return accessTag != null ? Collections.singletonList(accessTag) : null;
}
private TsEnumModel processEnum(SymbolTable symbolTable, EnumModel enumModel) {
final Symbol beanIdentifier = symbolTable.getSymbol(enumModel.getOrigin());
TsEnumModel tsEnumModel = TsEnumModel.fromEnumModel(beanIdentifier, enumModel, isEnumNonConst(enumModel));
return tsEnumModel;
}
private boolean isEnumNonConst(EnumModel enumModel) {
boolean isNonConst = settings.nonConstEnums;
if (!isNonConst) {
for (Class<? extends Annotation> nonConstAnnotation : settings.nonConstEnumAnnotations) {
if (enumModel.getOrigin().isAnnotationPresent(nonConstAnnotation)) {
isNonConst = true;
break;
}
}
}
return isNonConst;
}
private TsType typeFromJava(SymbolTable symbolTable, Type javaType) {
return typeFromJava(symbolTable, javaType, null, null);
}
private TsType typeFromJava(SymbolTable symbolTable, Type javaType, String usedInProperty, Class<?> usedInClass) {
return typeFromJava(symbolTable, javaType, null, usedInProperty, usedInClass);
}
private TsType typeFromJava(SymbolTable symbolTable, Type javaType, Object typeContext, String usedInProperty, Class<?> usedInClass) {
if (javaType == null) {
return null;
}
final TypeProcessor.Context context = new TypeProcessor.Context(symbolTable, typeProcessor, typeContext);
final TypeProcessor.Result result = context.processType(javaType);
if (result != null) {
return result.getTsType();
} else {
if (usedInClass != null && usedInProperty != null) {
TypeScriptGenerator.getLogger().warning(String.format("Unsupported type '%s' used in '%s.%s'", javaType, usedInClass.getSimpleName(), usedInProperty));
} else {
TypeScriptGenerator.getLogger().warning(String.format("Unsupported type '%s'", javaType));
}
return TsType.Any;
}
}
private TsModel addCustomTypeAliases(SymbolTable symbolTable, TsModel tsModel) {
final List<TsAliasModel> aliases = new ArrayList<>(tsModel.getTypeAliases());
for (Settings.CustomTypeAlias customTypeAlias : settings.getValidatedCustomTypeAliases()) {
final Symbol name = symbolTable.getSyntheticSymbol(customTypeAlias.tsType.rawName);
final List<TsType.GenericVariableType> typeParameters = customTypeAlias.tsType.typeParameters != null
? customTypeAlias.tsType.typeParameters.stream()
.map(TsType.GenericVariableType::new)
.collect(Collectors.toList())
: null;
final TsType definition = new TsType.VerbatimType(customTypeAlias.tsDefinition);
aliases.add(new TsAliasModel(null, name, typeParameters, definition, null));
}
return tsModel.withTypeAliases(aliases);
}
private TsModel removeInheritedProperties(SymbolTable symbolTable, TsModel tsModel) {
final List<TsBeanModel> beans = new ArrayList<>();
for (TsBeanModel bean : tsModel.getBeans()) {
final Map<String, TsType> inheritedPropertyTypes = getInheritedProperties(symbolTable, tsModel, bean.getAllParents());
final List<TsPropertyModel> properties = new ArrayList<>();
for (TsPropertyModel property : bean.getProperties()) {
if (property.isOwnProperty() || !Objects.equals(property.getTsType(), inheritedPropertyTypes.get(property.getName()))) {
properties.add(property);
}
}
beans.add(bean.withProperties(properties));
}
return tsModel.withBeans(beans);
}
private TsModel addImplementedProperties(SymbolTable symbolTable, TsModel tsModel) {
final List<TsBeanModel> beans = new ArrayList<>();
for (TsBeanModel bean : tsModel.getBeans()) {
if (bean.isClass()) {
final List<TsPropertyModel> resultProperties = new ArrayList<>(bean.getProperties());
final Set<String> classPropertyNames = new LinkedHashSet<>();
for (TsPropertyModel property : bean.getProperties()) {
classPropertyNames.add(property.getName());
}
classPropertyNames.addAll(getInheritedProperties(symbolTable, tsModel, bean.getExtendsList()).keySet());
final List<TsPropertyModel> implementedProperties = getImplementedProperties(symbolTable, tsModel, bean.getImplementsList());
Collections.reverse(implementedProperties);
for (TsPropertyModel implementedProperty : implementedProperties) {
if (!classPropertyNames.contains(implementedProperty.getName())) {
resultProperties.add(0, implementedProperty);
classPropertyNames.add(implementedProperty.getName());
}
}
beans.add(bean.withProperties(resultProperties));
} else {
beans.add(bean);
}
}
return tsModel.withBeans(beans);
}
private TsModel addConstructors(SymbolTable symbolTable, TsModel tsModel) {
final List<TsBeanModel> beans = new ArrayList<>();
for (TsBeanModel bean : tsModel.getBeans()) {
final Symbol beanIdentifier = symbolTable.getSymbol(bean.getOrigin());
final List<TsType.GenericVariableType> typeParameters = getTypeParameters(bean.getOrigin());
final TsType.ReferenceType dataType = typeParameters.isEmpty()
? new TsType.ReferenceType(beanIdentifier)
: new TsType.GenericReferenceType(beanIdentifier, typeParameters);
final List<TsStatement> body = new ArrayList<>();
if (bean.getParent() != null) {
body.add(new TsExpressionStatement(
new TsCallExpression(
new TsSuperExpression(),
new TsIdentifierReference("data")
)
));
}
for (TsPropertyModel property : bean.getProperties()) {
final Map<String, TsType> inheritedProperties = ModelCompiler.getInheritedProperties(symbolTable, tsModel, Utils.listFromNullable(bean.getParent()));
if (!inheritedProperties.containsKey(property.getName())) {
body.add(new TsExpressionStatement(new TsAssignmentExpression(
new TsMemberExpression(new TsThisExpression(), property.name),
new TsMemberExpression(new TsIdentifierReference("data"), property.name)
)));
}
}
if (bean.isClass()) {
final TsConstructorModel constructor = new TsConstructorModel(
TsModifierFlags.None,
Arrays.asList(new TsParameterModel("data", dataType)),
body,
/*comments*/ null
);
beans.add(bean.withConstructor(constructor));
} else {
beans.add(bean);
}
}
return tsModel.withBeans(beans);
}
public static Map<String, TsType> getInheritedProperties(SymbolTable symbolTable, TsModel tsModel, List<TsType> parents) {
final Map<String, TsType> properties = new LinkedHashMap<>();
for (TsType parentType : parents) {
final TsBeanModel parent = tsModel.getBean(getOriginClass(symbolTable, parentType));
if (parent != null) {
properties.putAll(getInheritedProperties(symbolTable, tsModel, parent.getAllParents()));
for (TsPropertyModel property : parent.getProperties()) {
properties.put(property.getName(), property.getTsType());
}
}
}
return properties;
}
private static List<TsPropertyModel> getImplementedProperties(SymbolTable symbolTable, TsModel tsModel, List<TsType> interfaces) {
final List<TsPropertyModel> properties = new ArrayList<>();
for (TsType aInterface : interfaces) {
final TsBeanModel bean = tsModel.getBean(getOriginClass(symbolTable, aInterface));
if (bean != null) {
properties.addAll(getImplementedProperties(symbolTable, tsModel, bean.getExtendsList()));
properties.addAll(bean.getProperties());
}
}
return properties;
}
private Symbol createRestResponseType(SymbolTable symbolTable, TsModel tsModel) {
// response type
final Symbol responseSymbol = symbolTable.getSyntheticSymbol("RestResponse");
final TsType.GenericVariableType varR = new TsType.GenericVariableType("R");
final TsAliasModel responseTypeAlias;
if (settings.restResponseType != null) {
responseTypeAlias = new TsAliasModel(null, responseSymbol, Arrays.asList(varR), new TsType.VerbatimType(settings.restResponseType), null);
} else {
final TsType.GenericReferenceType responseTypeDefinition = new TsType.GenericReferenceType(symbolTable.getSyntheticSymbol("Promise"), varR);
responseTypeAlias = new TsAliasModel(null, responseSymbol, Arrays.asList(varR), responseTypeDefinition, null);
}
tsModel.getTypeAliases().add(responseTypeAlias);
return responseSymbol;
}
private void createRestInterfaces(TsModel tsModel, SymbolTable symbolTable, List<RestApplicationModel> restApplications,
Symbol responseSymbol, TsType.GenericVariableType optionsGenericVariable, TsType optionsType) {
final List<TsType.GenericVariableType> typeParameters = Utils.listFromNullable(optionsGenericVariable);
final Map<Symbol, List<TsMethodModel>> groupedMethods = processRestMethods(tsModel, restApplications, symbolTable, null, responseSymbol, optionsType, false);
for (Map.Entry<Symbol, List<TsMethodModel>> entry : groupedMethods.entrySet()) {
final TsBeanModel interfaceModel = new TsBeanModel(null, TsBeanCategory.Service, false, entry.getKey(), typeParameters, null, null, null, null, null, entry.getValue(), null);
tsModel.getBeans().add(interfaceModel);
}
}
private void createRestClients(TsModel tsModel, SymbolTable symbolTable, List<RestApplicationModel> restApplications,
Symbol responseSymbol, TsType.GenericVariableType optionsGenericVariable, TsType optionsType) {
final Symbol httpClientSymbol = symbolTable.getSyntheticSymbol("HttpClient");
final List<TsType.GenericVariableType> typeParameters = Utils.listFromNullable(optionsGenericVariable);
// HttpClient interface
final TsType.GenericVariableType returnGenericVariable = new TsType.GenericVariableType("R");
tsModel.getBeans().add(new TsBeanModel(null, TsBeanCategory.ServicePrerequisite, false, httpClientSymbol, typeParameters, null, null, null, null, null, Arrays.asList(
new TsMethodModel("request", TsModifierFlags.None, Arrays.asList(returnGenericVariable), Arrays.asList(
new TsParameterModel("requestConfig", new TsType.ObjectType(
new TsProperty("method", TsType.String),
new TsProperty("url", TsType.String),
new TsProperty("queryParams", new TsType.OptionalType(TsType.Any)),
new TsProperty("data", new TsType.OptionalType(TsType.Any)),
new TsProperty("copyFn", new TsType.OptionalType(new TsType.FunctionType(Arrays.asList(new TsParameter("data", returnGenericVariable)), returnGenericVariable))),
optionsType != null ? new TsProperty("options", new TsType.OptionalType(optionsType)) : null
))
), new TsType.GenericReferenceType(responseSymbol, returnGenericVariable), null, null)
), null));
// application client classes
final TsType.ReferenceType httpClientType = optionsGenericVariable != null
? new TsType.GenericReferenceType(httpClientSymbol, optionsGenericVariable)
: new TsType.ReferenceType(httpClientSymbol);
final TsConstructorModel constructor = new TsConstructorModel(
TsModifierFlags.None,
Arrays.asList(new TsParameterModel(TsAccessibilityModifier.Protected, "httpClient", httpClientType)),
Collections.<TsStatement>emptyList(),
null
);
final boolean bothInterfacesAndClients = settings.generateJaxrsApplicationInterface || settings.generateSpringApplicationInterface;
final String groupingSuffix = bothInterfacesAndClients ? null : "Client";
final Map<Symbol, List<TsMethodModel>> groupedMethods = processRestMethods(tsModel, restApplications, symbolTable, groupingSuffix, responseSymbol, optionsType, true);
for (Map.Entry<Symbol, List<TsMethodModel>> entry : groupedMethods.entrySet()) {
final Symbol symbol = bothInterfacesAndClients ? symbolTable.addSuffixToSymbol(entry.getKey(), "Client") : entry.getKey();
final TsType interfaceType = bothInterfacesAndClients ? new TsType.ReferenceType(entry.getKey()) : null;
final TsBeanModel clientModel = new TsBeanModel(null, TsBeanCategory.Service, true, symbol, typeParameters, null, null,
Utils.listFromNullable(interfaceType), null, constructor, entry.getValue(), null);
tsModel.getBeans().add(clientModel);
}
// helper
tsModel.getHelpers().add(TsHelper.loadFromResource("/helpers/uriEncoding.ts"));
}
private Map<Symbol, List<TsMethodModel>> processRestMethods(TsModel tsModel, List<RestApplicationModel> restApplications, SymbolTable symbolTable, String nameSuffix, Symbol responseSymbol, TsType optionsType, boolean implement) {
final Map<Symbol, List<TsMethodModel>> result = new LinkedHashMap<>();
final Map<Symbol, List<Pair<RestApplicationModel, RestMethodModel>>> groupedMethods = groupingByMethodContainer(restApplications, symbolTable, nameSuffix);
for (Map.Entry<Symbol, List<Pair<RestApplicationModel, RestMethodModel>>> entry : groupedMethods.entrySet()) {
result.put(entry.getKey(), processRestMethodGroup(tsModel, symbolTable, entry.getValue(), responseSymbol, optionsType, implement));
}
return result;
}
private List<TsMethodModel> processRestMethodGroup(TsModel tsModel, SymbolTable symbolTable, List<Pair<RestApplicationModel, RestMethodModel>> methods, Symbol responseSymbol, TsType optionsType, boolean implement) {
final List<TsMethodModel> resultMethods = new ArrayList<>();
final Map<String, Long> methodNamesCount = groupingByMethodName(methods);
for (Pair<RestApplicationModel, RestMethodModel> pair : methods) {
final RestApplicationModel restApplication = pair.getValue1();
final RestMethodModel method = pair.getValue2();
final boolean createLongName = methodNamesCount.get(method.getName()) > 1;
resultMethods.add(processRestMethod(tsModel, symbolTable, restApplication.getApplicationPath(), responseSymbol, method, createLongName, optionsType, implement));
}
return resultMethods;
}
private Map<Symbol, List<Pair<RestApplicationModel, RestMethodModel>>> groupingByMethodContainer(List<RestApplicationModel> restApplications, SymbolTable symbolTable, String nameSuffix) {
return restApplications.stream()
.flatMap(restApplication -> restApplication.getMethods().stream().map(method -> Pair.of(restApplication, method)))
.collect(Collectors.groupingBy(
pair -> getContainerSymbol(pair.getValue1(), symbolTable, nameSuffix, pair.getValue2()),
Utils.toSortedList(Comparator.comparing(pair -> pair.getValue2().getPath()))
));
}
private Symbol getContainerSymbol(RestApplicationModel restApplication, SymbolTable symbolTable, String nameSuffix, RestMethodModel method) {
if (settings.restNamespacing == RestNamespacing.perResource) {
return symbolTable.getSymbol(method.getRootResource(), nameSuffix);
}
if (settings.restNamespacing == RestNamespacing.byAnnotation) {
final Annotation annotation = method.getRootResource().getAnnotation(settings.restNamespacingAnnotation);
final String element = settings.restNamespacingAnnotationElement != null ? settings.restNamespacingAnnotationElement : "value";
final String annotationValue = Utils.getAnnotationElementValue(annotation, element, String.class);
if (annotationValue != null) {
if (isValidIdentifierName(annotationValue)) {
return symbolTable.getSyntheticSymbol(annotationValue, nameSuffix);
} else {
TypeScriptGenerator.getLogger().warning(String.format("Ignoring annotation value '%s' since it is not a valid identifier, '%s' will be in default namespace", annotationValue, method.getOriginClass().getName() + "." + method.getName()));
}
}
}
final String applicationName = getApplicationName(restApplication);
return symbolTable.getSyntheticSymbol(applicationName, nameSuffix);
}
private static String getApplicationName(RestApplicationModel restApplication) {
return restApplication.getApplicationName() != null ? restApplication.getApplicationName() : "RestApplication";
}
private static Map<String, Long> groupingByMethodName(List<Pair<RestApplicationModel, RestMethodModel>> methods) {
return methods.stream()
.map(pair -> pair.getValue2())
.collect(Collectors.groupingBy(RestMethodModel::getName, Collectors.counting()));
}
private TsMethodModel processRestMethod(TsModel tsModel, SymbolTable symbolTable, String pathPrefix, Symbol responseSymbol, RestMethodModel method, boolean createLongName, TsType optionsType, boolean implement) {
final String path = Utils.joinPath(pathPrefix, method.getPath());
final PathTemplate pathTemplate = PathTemplate.parse(path);
final List<String> comments = Utils.concat(method.getComments(), Arrays.asList(
"HTTP " + method.getHttpMethod() + " /" + path,
"Java method: " + method.getOriginClass().getName() + "." + method.getName()
));
final List<TsParameterModel> parameters = new ArrayList<>();
// path params
for (MethodParameterModel parameter : method.getPathParams()) {
parameters.add(processParameter(symbolTable, method, parameter));
}
// entity param
if (method.getEntityParam() != null) {
parameters.add(processParameter(symbolTable, method, method.getEntityParam()));
}
// query params
final List<RestQueryParam> queryParams = method.getQueryParams();
final TsParameterModel queryParameter;
if (queryParams != null && !queryParams.isEmpty()) {
final List<TsType> types = new ArrayList<>();
if (queryParams.stream().anyMatch(param -> param instanceof RestQueryParam.Map)) {
types.add(new TsType.IndexedArrayType(TsType.String, TsType.Any));
} else {
final List<TsProperty> currentSingles = new ArrayList<>();
final Runnable flushSingles = () -> {
if (!currentSingles.isEmpty()) {
types.add(new TsType.ObjectType(currentSingles));
currentSingles.clear();
}
};
for (RestQueryParam restQueryParam : queryParams) {
if (restQueryParam instanceof RestQueryParam.Single) {
final MethodParameterModel queryParam = ((RestQueryParam.Single) restQueryParam).getQueryParam();
final TsType type = typeFromJava(symbolTable, queryParam.getType(), method.getName(), method.getOriginClass());
currentSingles.add(new TsProperty(queryParam.getName(), restQueryParam.required ? type : new TsType.OptionalType(type)));
}
if (restQueryParam instanceof RestQueryParam.Bean) {
final BeanModel queryBean = ((RestQueryParam.Bean) restQueryParam).getBean();
flushSingles.run();
final Symbol queryParamsSymbol = symbolTable.getSymbol(queryBean.getOrigin(), "QueryParams");
if (tsModel.getBean(queryParamsSymbol) == null) {
tsModel.getBeans().add(new TsBeanModel(
queryBean.getOrigin(),
TsBeanCategory.Data,
/*isClass*/false,
queryParamsSymbol,
/*typeParameters*/null,
/*parent*/null,
/*extendsList*/null,
/*implementsList*/null,
processProperties(symbolTable, null, queryBean),
/*constructor*/null,
/*methods*/null,
/*comments*/null
));
}
types.add(new TsType.ReferenceType(queryParamsSymbol));
}
}
flushSingles.run();
}
boolean allQueryParamsOptional = queryParams.stream().noneMatch(queryParam -> queryParam.required);
TsType.IntersectionType queryParamType = new TsType.IntersectionType(types);
queryParameter = new TsParameterModel("queryParams", allQueryParamsOptional ? new TsType.OptionalType(queryParamType) : queryParamType);
parameters.add(queryParameter);
} else {
queryParameter = null;
}
if (optionsType != null) {
final TsParameterModel optionsParameter = new TsParameterModel("options", new TsType.OptionalType(optionsType));
parameters.add(optionsParameter);
}
// return type
final TsType returnType = typeFromJava(symbolTable, method.getReturnType(), method.getName(), method.getOriginClass());
final TsType wrappedReturnType = new TsType.GenericReferenceType(responseSymbol, returnType);
// method name
final String nameSuffix;
if (createLongName) {
nameSuffix = "$" + method.getHttpMethod() + "$" + pathTemplate.format("", "", false)
.replaceAll("/", "_")
.replaceAll("\\W", "");
} else {
nameSuffix = "";
}
// implementation
final List<TsStatement> body;
if (implement) {
body = new ArrayList<>();
body.add(new TsReturnStatement(
new TsCallExpression(
new TsMemberExpression(new TsMemberExpression(new TsThisExpression(), "httpClient"), "request"),
new TsObjectLiteral(
new TsPropertyDefinition("method", new TsStringLiteral(method.getHttpMethod())),
new TsPropertyDefinition("url", processPathTemplate(pathTemplate)),
queryParameter != null ? new TsPropertyDefinition("queryParams", new TsIdentifierReference("queryParams")) : null,
method.getEntityParam() != null ? new TsPropertyDefinition("data", new TsIdentifierReference(method.getEntityParam().getName())) : null,
optionsType != null ? new TsPropertyDefinition("options", new TsIdentifierReference("options")) : null
)
)
));
} else {
body = null;
}
// method
final TsMethodModel tsMethodModel = new TsMethodModel(method.getName() + nameSuffix, TsModifierFlags.None, null, parameters, wrappedReturnType, body, comments);
return tsMethodModel;
}
private TsParameterModel processParameter(SymbolTable symbolTable, MethodModel method, MethodParameterModel parameter) {
final String parameterName = parameter.getName();
final TsType parameterType = typeFromJava(symbolTable, parameter.getType(), method.getName(), method.getOriginClass());
return new TsParameterModel(parameterName, parameterType);
}
private static TsTemplateLiteral processPathTemplate(PathTemplate pathTemplate) {
final List<TsExpression> spans = new ArrayList<>();
for (PathTemplate.Part part : pathTemplate.getParts()) {
if (part instanceof PathTemplate.Literal) {
final PathTemplate.Literal literal = (PathTemplate.Literal) part;
spans.add(new TsStringLiteral(literal.getLiteral()));
}
if (part instanceof PathTemplate.Parameter) {
final PathTemplate.Parameter parameter = (PathTemplate.Parameter) part;
spans.add(new TsIdentifierReference(parameter.getValidName()));
}
}
return new TsTaggedTemplateLiteral(new TsIdentifierReference("uriEncoding"), spans);
}
private TsModel transformDates(SymbolTable symbolTable, TsModel tsModel) {
final TsAliasModel dateAsNumber = new TsAliasModel(null, symbolTable.getSyntheticSymbol("DateAsNumber"), null, TsType.Number, null);
final TsAliasModel dateAsString = new TsAliasModel(null, symbolTable.getSyntheticSymbol("DateAsString"), null, TsType.String, null);
final LinkedHashSet<TsAliasModel> typeAliases = new LinkedHashSet<>(tsModel.getTypeAliases());
final TsModel model = transformBeanPropertyTypes(tsModel, new TsType.Transformer() {
@Override
public TsType transform(TsType.Context context, TsType type) {
if (type == TsType.Date) {
if (settings.mapDate == DateMapping.asNumber) {
typeAliases.add(dateAsNumber);
return new TsType.ReferenceType(dateAsNumber.getName());
}
if (settings.mapDate == DateMapping.asString) {
typeAliases.add(dateAsString);
return new TsType.ReferenceType(dateAsString.getName());
}
}
return type;
}
});
return model.withTypeAliases(new ArrayList<>(typeAliases));
}
static List<String> splitIdentifierIntoWords(String identifier) {
final String pattern = String.join("|",
"_", // example: UPPER CASE
"(?<=\\p{javaUpperCase})" + "(?=\\p{javaUpperCase}\\p{javaLowerCase})", // example: XML Http
"(?<=[^_\\p{javaUpperCase}])" + "(?=\\p{javaUpperCase})", // example: camel Case
"(?<=[\\p{javaUpperCase}\\p{javaLowerCase}])" + "(?=[^\\p{javaUpperCase}\\p{javaLowerCase}])", // example: string 2
"(?<=[^_\\p{javaUpperCase}\\p{javaLowerCase}])" + "(?=[\\p{javaUpperCase}\\p{javaLowerCase}])" // example: 2 json
);
return Arrays.asList(identifier.split(pattern));
}
private String convertIdentifierCasing(String identifier) {
final List<String> words = splitIdentifierIntoWords(identifier);
final String pascalCase = words.stream()
.map(word -> word.substring(0, 1).toUpperCase() + word.substring(1).toLowerCase())
.collect(Collectors.joining());
if (settings.enumMemberCasing == IdentifierCasing.PascalCase) {
return pascalCase;
}
if (settings.enumMemberCasing == IdentifierCasing.camelCase) {
return pascalCase.substring(0, 1).toLowerCase() + pascalCase.substring(1);
}
return identifier;
}
private TsModel transformEnumMembersCase(TsModel tsModel) {
final List<TsEnumModel> originalEnums = tsModel.getEnums();
final LinkedHashSet<TsEnumModel> enums = new LinkedHashSet<>();
for (TsEnumModel enumModel : originalEnums) {
final List<EnumMemberModel> members = new ArrayList<>();
for (EnumMemberModel member : enumModel.getMembers()) {
members.add(member.withPropertyName(convertIdentifierCasing(member.getPropertyName())));
}
enums.add(enumModel.withMembers(members));
}
return tsModel.withRemovedEnums(originalEnums).withAddedEnums(new ArrayList<>(enums));
}
private TsModel transformEnumsToUnions(TsModel tsModel) {
final List<TsEnumModel> stringEnums = tsModel.getEnums(EnumKind.StringBased);
final LinkedHashSet<TsAliasModel> typeAliases = new LinkedHashSet<>(tsModel.getTypeAliases());
for (TsEnumModel enumModel : stringEnums) {
final List<TsType> values = new ArrayList<>();
for (EnumMemberModel member : enumModel.getMembers()) {
values.add(member.getEnumValue() instanceof Number
? new TsType.NumberLiteralType((Number) member.getEnumValue())
: new TsType.StringLiteralType(String.valueOf(member.getEnumValue()))
);
}
final TsType union = new TsType.UnionType(values);
typeAliases.add(new TsAliasModel(enumModel.getOrigin(), enumModel.getName(), null, union, enumModel.getComments()));
}
return tsModel.withRemovedEnums(stringEnums).withTypeAliases(new ArrayList<>(typeAliases));
}
private TsModel inlineEnums(final TsModel tsModel, final SymbolTable symbolTable) {
final Set<TsAliasModel> inlinedAliases = new LinkedHashSet<>();
final TsModel newTsModel = transformBeanPropertyTypes(tsModel, new TsType.Transformer() {
@Override
public TsType transform(TsType.Context context, TsType tsType) {
if (tsType instanceof TsType.EnumReferenceType) {
final TsAliasModel alias = tsModel.getTypeAlias(getOriginClass(symbolTable, tsType));
if (alias != null) {
inlinedAliases.add(alias);
return alias.getDefinition();
}
}
return tsType;
}
});
return newTsModel.withRemovedTypeAliases(new ArrayList<>(inlinedAliases));
}
private TsModel transformEnumsToNumberBasedEnum(TsModel tsModel) {
final List<TsEnumModel> stringEnums = tsModel.getEnums(EnumKind.StringBased);
final LinkedHashSet<TsEnumModel> enums = new LinkedHashSet<>();
for (TsEnumModel enumModel : stringEnums) {
final List<EnumMemberModel> members = new ArrayList<>();
for (EnumMemberModel member : enumModel.getMembers()) {
members.add(new EnumMemberModel(member.getPropertyName(), (Number) null, member.getOriginalField(), member.getComments()));
}
enums.add(enumModel.withMembers(members));
}
return tsModel.withRemovedEnums(stringEnums).withAddedEnums(new ArrayList<>(enums));
}
private TsModel transformNonStringEnumKeyMaps(SymbolTable symbolTable, TsModel tsModel) {
return transformBeanPropertyTypes(tsModel, new TsType.Transformer() {
@Override
public TsType transform(TsType.Context context, TsType tsType) {
if (tsType instanceof TsType.MappedType) {
final TsType.MappedType mappedType = (TsType.MappedType) tsType;
if (mappedType.parameterType instanceof TsType.EnumReferenceType) {
final TsType.EnumReferenceType enumType = (TsType.EnumReferenceType) mappedType.parameterType;
final Class<?> enumClass = symbolTable.getSymbolClass(enumType.symbol);
final TsEnumModel enumModel = tsModel.getEnums().stream()
.filter(model -> Objects.equals(model.getOrigin(), enumClass))
.findFirst()
.orElse(null);
if (settings.mapEnum == EnumMapping.asNumberBasedEnum
|| enumModel != null && enumModel.getKind() == EnumKind.NumberBased
|| enumModel != null && enumModel.getMembers().stream().anyMatch(member -> !(member.getEnumValue() instanceof String))) {
return new TsType.IndexedArrayType(TsType.String, mappedType.type);
}
}
}
return tsType;
}
});
}
private static TsModel addEnumValuesToJavadoc(TsModel tsModel) {
return tsModel.withEnums(tsModel.getEnums().stream()
.map(enumModel -> addEnumValuesToJavadoc(enumModel))
.collect(Collectors.toList())
);
}
private static TsEnumModel addEnumValuesToJavadoc(TsEnumModel enumModel) {
final boolean hasComments = enumModel.getComments() != null && !enumModel.getComments().isEmpty();
final boolean hasMemberComments = enumModel.getMembers().stream()
.anyMatch(enumMember -> enumMember.getComments() != null && !enumMember.getComments().isEmpty());
if (hasComments || hasMemberComments) {
return enumModel.withComments(Stream
.of(
Utils.listFromNullable(enumModel.getComments()).stream(),
(hasComments ? Stream.of("") : Stream.<String>empty()),
Stream.of("Values:"),
enumModel.getMembers().stream()
.map(enumMember -> "- `" + enumMember.getEnumValue() + "`"
+ (enumMember.getComments() != null
? " - " + String.join(" ", enumMember.getComments())
: ""))
)
.flatMap(s -> s)
.collect(Collectors.toList())
);
} else {
return enumModel;
}
}
private TsModel createAndUseTaggedUnions(final SymbolTable symbolTable, TsModel tsModel) {
if (settings.disableTaggedUnions) {
return tsModel;
}
// create tagged unions
final List<TsBeanModel> beans = new ArrayList<>();
final LinkedHashSet<TsAliasModel> typeAliases = new LinkedHashSet<>(tsModel.getTypeAliases());
for (TsBeanModel bean : tsModel.getBeans()) {
if (!bean.getTaggedUnionClasses().isEmpty() && bean.getDiscriminantProperty() != null) {
final Symbol unionName = symbolTable.getSymbol(bean.getOrigin(), "Union");
final boolean isGeneric = !bean.getTypeParameters().isEmpty();
final List<TsType> unionTypes = new ArrayList<>();
for (Class<?> cls : bean.getTaggedUnionClasses()) {
final TsType type;
if (isGeneric && cls.getTypeParameters().length != 0) {
final List<String> mappedGenericVariables = GenericsResolver.mapGenericVariablesToBase(cls, bean.getOrigin());
type = new TsType.GenericReferenceType(
symbolTable.getSymbol(cls),
mappedGenericVariables.stream()
.map(TsType.GenericVariableType::new)
.collect(Collectors.toList()));
} else {
type = new TsType.ReferenceType(symbolTable.getSymbol(cls));
}
unionTypes.add(type);
}
final TsType.UnionType union = new TsType.UnionType(unionTypes);
final TsAliasModel tsAliasModel = new TsAliasModel(bean.getOrigin(), unionName, bean.getTypeParameters(), union, null);
beans.add(bean.withTaggedUnionAlias(tsAliasModel));
typeAliases.add(tsAliasModel);
} else {
beans.add(bean);
}
}
final TsModel modelWithTaggedUnions = tsModel.withBeans(beans).withTypeAliases(new ArrayList<>(typeAliases));
// use tagged unions
final TsModel modelWithUsedTaggedUnions = transformBeanPropertyTypes(modelWithTaggedUnions, new TsType.Transformer() {
@Override
public TsType transform(TsType.Context context, TsType tsType) {
final Class<?> cls = getOriginClass(symbolTable, tsType);
if (cls != null) {
final Symbol unionSymbol = symbolTable.hasSymbol(cls, "Union");
if (unionSymbol != null) {
if (tsType instanceof TsType.GenericReferenceType) {
final TsType.GenericReferenceType genericReferenceType = (TsType.GenericReferenceType) tsType;
return new TsType.GenericReferenceType(unionSymbol, genericReferenceType.typeArguments);
} else {
return new TsType.ReferenceType(unionSymbol);
}
}
}
return tsType;
}
});
return modelWithUsedTaggedUnions;
}
// example: transforms property `text: string | undefined` to `text?: string | undefined`
private TsModel makeUndefinablePropertiesAndParametersOptional(final SymbolTable symbolTable, TsModel tsModel) {
final NullabilityDefinition nullabilityDefinition = settings.getNullabilityDefinition();
if (!nullabilityDefinition.containsUndefined()) {
return tsModel;
}
return tsModel.withBeans(tsModel.getBeans().stream()
.map(bean -> {
bean = bean.withProperties(bean.getProperties().stream()
.map(property -> property.withTsType(makeNullableTypeOptional(property.getTsType())))
.collect(Collectors.toList())
);
bean = bean.withMethods(bean.getMethods().stream()
.map(method -> method.withParameters(method.getParameters().stream()
.map(parameter -> parameter.withTsType(makeNullableTypeOptional(parameter.getTsType())))
.collect(Collectors.toList())
))
.collect(Collectors.toList())
);
return bean;
})
.collect(Collectors.toList())
);
}
private static TsType makeNullableTypeOptional(TsType type) {
return type instanceof TsType.NullableType
? new TsType.OptionalType(type)
: type;
}
private TsModel transformNullableTypes(final SymbolTable symbolTable, TsModel tsModel) {
final AtomicBoolean declareNullableType = new AtomicBoolean(false);
final NullabilityDefinition nullabilityDefinition = settings.getNullabilityDefinition();
TsModel transformedModel = transformBeanPropertyTypes(tsModel, new TsType.Transformer() {
@Override
public TsType transform(TsType.Context context, TsType tsType) {
if (tsType instanceof TsType.NullableType) {
final TsType.NullableType nullableType = (TsType.NullableType) tsType;
if (nullabilityDefinition.isInline()) {
return new TsType.UnionType(nullableType.type).add(nullabilityDefinition.getTypes());
} else {
declareNullableType.set(true);
}
}
return tsType;
}
});
// type Nullable<T> = T | ...
if (declareNullableType.get()) {
final TsType.GenericVariableType tVar = new TsType.GenericVariableType("T");
transformedModel = transformedModel.withAddedTypeAliases(Arrays.asList(new TsAliasModel(
/*origin*/ null,
symbolTable.getSyntheticSymbol(TsType.NullableType.AliasName),
Arrays.asList(tVar),
new TsType.UnionType(tVar).add(nullabilityDefinition.getTypes()),
/*comments*/ null
)));
}
return transformedModel;
}
// example: transforms property `text?: string | null | undefined` to `text?: string | null`
private TsModel eliminateUndefinedFromOptionalPropertiesAndParameters(final SymbolTable symbolTable, TsModel tsModel) {
return tsModel.withBeans(tsModel.getBeans().stream()
.map(bean -> {
bean = bean.withProperties(bean.getProperties().stream()
.map(property -> property.withTsType(eliminateUndefinedFromOptionalType(property.getTsType())))
.collect(Collectors.toList())
);
bean = bean.withMethods(bean.getMethods().stream()
.map(method -> method.withParameters(method.getParameters().stream()
.map(parameter -> parameter.withTsType(eliminateUndefinedFromOptionalType(parameter.getTsType())))
.collect(Collectors.toList())
))
.collect(Collectors.toList())
);
return bean;
})
.collect(Collectors.toList())
);
}
private static TsType eliminateUndefinedFromOptionalType(TsType type) {
if (type instanceof TsType.OptionalType) {
final TsType.OptionalType optionalType = (TsType.OptionalType) type;
if (optionalType.type instanceof TsType.UnionType) {
final TsType.UnionType unionType = (TsType.UnionType) optionalType.type;
if (unionType.types.contains(TsType.Undefined)) {
return new TsType.OptionalType(unionType.remove(Arrays.asList(TsType.Undefined)));
}
}
}
return type;
}
private TsModel transformOptionalProperties(final SymbolTable symbolTable, TsModel tsModel) {
return tsModel.withBeans(tsModel.getBeans().stream()
.map(bean -> {
if (bean.getCategory() != TsBeanCategory.Data) {
return bean;
}
return bean.withProperties(bean.getProperties().stream()
.map(property -> {
if (property.getTsType() instanceof TsType.OptionalType) {
final TsType.OptionalType optionalType = (TsType.OptionalType) property.getTsType();
if (settings.optionalPropertiesDeclaration == OptionalPropertiesDeclaration.nullableType) {
return property.withTsType(
TsType.UnionType.combine(Arrays.asList(optionalType.type, TsType.Null)));
}
if (settings.optionalPropertiesDeclaration == OptionalPropertiesDeclaration.questionMarkAndNullableType) {
return property.withTsType(
new TsType.OptionalType(
TsType.UnionType.combine(Arrays.asList(optionalType.type, TsType.Null))));
}
if (settings.optionalPropertiesDeclaration == OptionalPropertiesDeclaration.nullableAndUndefinableType) {
return property.withTsType(
TsType.UnionType.combine(Arrays.asList(optionalType.type, TsType.Null, TsType.Undefined)));
}
if (settings.optionalPropertiesDeclaration == OptionalPropertiesDeclaration.undefinableType) {
return property.withTsType(
TsType.UnionType.combine(Arrays.asList(optionalType.type, TsType.Undefined)));
}
}
return property;
})
.collect(Collectors.toList())
);
})
.collect(Collectors.toList())
);
}
private TsModel sortPropertiesDeclarations(SymbolTable symbolTable, TsModel tsModel) {
if (settings.sortDeclarations) {
for (TsBeanModel bean : tsModel.getBeans()) {
Collections.sort(bean.getProperties());
}
}
return tsModel;
}
private TsModel sortTypeDeclarations(SymbolTable symbolTable, TsModel tsModel) {
final List<TsBeanModel> beans = tsModel.getBeans();
final List<TsAliasModel> aliases = tsModel.getTypeAliases();
final List<TsEnumModel> enums = tsModel.getEnums();
if (settings.sortDeclarations || settings.sortTypeDeclarations) {
Collections.sort(beans);
Collections.sort(aliases);
Collections.sort(enums);
}
final LinkedHashSet<TsBeanModel> orderedBeans = new LinkedHashSet<>();
for (TsBeanModel bean : beans) {
addOrderedClass(symbolTable, tsModel, bean, orderedBeans);
}
return tsModel
.withBeans(new ArrayList<>(orderedBeans))
.withTypeAliases(aliases)
.withEnums(enums);
}
private static void addOrderedClass(SymbolTable symbolTable, TsModel tsModel, TsBeanModel bean, LinkedHashSet<TsBeanModel> orderedBeans) {
// for classes first add their parents to ordered list
if (bean.isClass() && bean.getParent() != null) {
final TsBeanModel parentBean = tsModel.getBean(getOriginClass(symbolTable, bean.getParent()));
if (parentBean != null) {
addOrderedClass(symbolTable, tsModel, parentBean, orderedBeans);
}
}
// add current bean to the ordered list
orderedBeans.add(bean);
}
private static TsModel transformBeanPropertyTypes(TsModel tsModel, TsType.Transformer transformer) {
final List<TsBeanModel> newBeans = new ArrayList<>();
for (TsBeanModel bean : tsModel.getBeans()) {
final TsType.Context context = new TsType.Context();
final List<TsPropertyModel> newProperties = new ArrayList<>();
for (TsPropertyModel property : bean.getProperties()) {
final TsType newType = TsType.transformTsType(context, property.getTsType(), transformer);
newProperties.add(property.withTsType(newType));
}
final List<TsMethodModel> newMethods = new ArrayList<>();
for (TsMethodModel method : bean.getMethods()) {
final List<TsParameterModel> newParameters = new ArrayList<>();
for (TsParameterModel parameter : method.getParameters()) {
final TsType newParameterType = TsType.transformTsType(context, parameter.getTsType(), transformer);
newParameters.add(new TsParameterModel(parameter.getAccessibilityModifier(), parameter.getName(), newParameterType));
}
final TsType newReturnType = TsType.transformTsType(context, method.getReturnType(), transformer);
newMethods.add(new TsMethodModel(method.getName(), method.getModifiers(), method.getTypeParameters(), newParameters, newReturnType, method.getBody(), method.getComments()));
}
newBeans.add(bean.withProperties(newProperties).withMethods(newMethods));
}
return tsModel.withBeans(newBeans);
}
private static Class<?> getOriginClass(SymbolTable symbolTable, TsType type) {
if (type instanceof TsType.ReferenceType) {
final TsType.ReferenceType referenceType = (TsType.ReferenceType) type;
return symbolTable.getSymbolClass(referenceType.symbol);
}
return null;
}
public static String getValidIdentifierName(String name) {
final String identifier = removeInvalidIdentifierCharacters(replaceDashPattern(name));
final String prefix = SymbolTable.isReservedWord(identifier) ? "_" : "";
return prefix + identifier;
}
private static String replaceDashPattern(String name) {
final StringBuffer sb = new StringBuffer();
final Matcher matcher = Pattern.compile("-[^-]").matcher(name);
while (matcher.find()) {
matcher.appendReplacement(sb, Matcher.quoteReplacement("" + Character.toUpperCase(matcher.group().charAt(1))));
}
matcher.appendTail(sb);
return sb.toString();
}
private static String removeInvalidIdentifierCharacters(String name) {
final StringBuilder sb = new StringBuilder();
for (char c : name.toCharArray()) {
if (sb.length() == 0 ? isValidIdentifierStart(c) : isValidIdentifierPart(c)) {
sb.append(c);
}
}
return sb.toString();
}
public static boolean isValidIdentifierName(String name) {
if (name == null || name.isEmpty()) {
return false;
}
for (int i = 0; i < name.length(); i++) {
final char c = name.charAt(i);
if (i == 0 ? !isValidIdentifierStart(c) : !isValidIdentifierPart(c)) {
return false;
}
}
return true;
}
// https://github.com/Microsoft/TypeScript/blob/master/doc/spec-ARCHIVED.md#222-property-names
// http://www.ecma-international.org/ecma-262/6.0/index.html#sec-names-and-keywords
private static boolean isValidIdentifierStart(char start) {
return Character.isUnicodeIdentifierStart(start) || start == '$' || start == '_';
}
private static boolean isValidIdentifierPart(char c) {
return Character.isUnicodeIdentifierPart(c) || c == '$' || c == '_' || c == '\u200C' || c == '\u200D';
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.jackrabbit.oak.plugins.index.solr.query;
import java.util.Collections;
import java.util.LinkedList;
import java.util.List;
import org.apache.jackrabbit.oak.api.Result;
import org.apache.jackrabbit.oak.api.Type;
import org.apache.jackrabbit.oak.plugins.index.solr.configuration.DefaultSolrConfiguration;
import org.apache.jackrabbit.oak.plugins.index.solr.configuration.OakSolrConfiguration;
import org.apache.jackrabbit.oak.plugins.index.solr.configuration.OakSolrConfigurationProvider;
import org.apache.jackrabbit.oak.plugins.index.solr.server.SolrServerProvider;
import org.apache.jackrabbit.oak.plugins.memory.PropertyValues;
import org.apache.jackrabbit.oak.InitialContentHelper;
import org.apache.jackrabbit.oak.query.NodeStateNodeTypeInfoProvider;
import org.apache.jackrabbit.oak.query.QueryEngineSettings;
import org.apache.jackrabbit.oak.query.ast.NodeTypeInfo;
import org.apache.jackrabbit.oak.query.ast.NodeTypeInfoProvider;
import org.apache.jackrabbit.oak.query.ast.Operator;
import org.apache.jackrabbit.oak.query.ast.SelectorImpl;
import org.apache.jackrabbit.oak.query.index.FilterImpl;
import org.apache.jackrabbit.oak.spi.query.Cursor;
import org.apache.jackrabbit.oak.spi.query.Filter;
import org.apache.jackrabbit.oak.spi.query.IndexRow;
import org.apache.jackrabbit.oak.spi.query.QueryIndex;
import org.apache.jackrabbit.oak.spi.state.NodeBuilder;
import org.apache.jackrabbit.oak.spi.state.NodeState;
import org.apache.solr.client.solrj.SolrClient;
import org.apache.solr.client.solrj.response.QueryResponse;
import org.apache.solr.common.SolrDocument;
import org.apache.solr.common.SolrDocumentList;
import org.apache.solr.common.params.SolrParams;
import org.junit.Before;
import org.junit.Test;
import static org.apache.jackrabbit.JcrConstants.JCR_PRIMARYTYPE;
import static org.apache.jackrabbit.oak.plugins.memory.EmptyNodeState.EMPTY_NODE;
import static org.junit.Assert.*;
import static org.mockito.Matchers.any;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
/**
* Tests for {@link SolrQueryIndex}
*/
public class SolrQueryIndexTest {
private NodeState nodeState;
@Before
public void setUp() throws Exception {
NodeState root = EMPTY_NODE;
NodeBuilder builder = root.builder();
builder.child("oak:index").child("solr")
.setProperty(JCR_PRIMARYTYPE, "oak:QueryIndexDefinition")
.setProperty("type", "solr")
.child("server").setProperty("solrServerType", "embedded");
nodeState = builder.getNodeState();
}
@Test
public void testNoIndexPlanWithNoRestrictions() throws Exception {
SelectorImpl selector = mock(SelectorImpl.class);
SolrQueryIndex solrQueryIndex = new SolrQueryIndex(null, null, null);
FilterImpl filter = new FilterImpl(selector, "", new QueryEngineSettings());
List<QueryIndex.OrderEntry> sortOrder = new LinkedList<QueryIndex.OrderEntry>();
List<QueryIndex.IndexPlan> plans = solrQueryIndex.getPlans(filter, sortOrder, nodeState);
assertEquals(0, plans.size());
}
@Test
public void testNoPlanWithPathRestrictions() throws Exception {
SelectorImpl selector = newSelector(nodeState, "a");
SolrQueryIndex solrQueryIndex = new SolrQueryIndex(null, null, null);
FilterImpl filter = new FilterImpl(selector, "select * from [nt:base] as a where isdescendantnode(a, '/test')", new QueryEngineSettings());
filter.restrictPath("/test", Filter.PathRestriction.ALL_CHILDREN);
List<QueryIndex.OrderEntry> sortOrder = new LinkedList<QueryIndex.OrderEntry>();
List<QueryIndex.IndexPlan> plans = solrQueryIndex.getPlans(filter, sortOrder, nodeState);
assertEquals(0, plans.size());
}
@Test
public void testNoPlanWithOnlyPathRestrictionsEnabled() throws Exception {
NodeBuilder builder = nodeState.builder();
builder.child("oak:index").child("solr").setProperty("pathRestrictions", true);
nodeState = builder.getNodeState();
SelectorImpl selector = newSelector(nodeState, "a");
SolrQueryIndex solrQueryIndex = new SolrQueryIndex(null, null, null);
FilterImpl filter = new FilterImpl(selector, "select * from [nt:base] as a where isdescendantnode(a, '/test')", new QueryEngineSettings());
filter.restrictPath("/test", Filter.PathRestriction.ALL_CHILDREN);
List<QueryIndex.OrderEntry> sortOrder = new LinkedList<QueryIndex.OrderEntry>();
List<QueryIndex.IndexPlan> plans = solrQueryIndex.getPlans(filter, sortOrder, nodeState);
assertEquals(0, plans.size());
}
@Test
public void testPlanWithPropertyAndPathRestrictionsEnabled() throws Exception {
NodeBuilder builder = nodeState.builder();
builder.child("oak:index").child("solr")
.setProperty("pathRestrictions", true)
.setProperty("propertyRestrictions", true);
nodeState = builder.getNodeState();
SelectorImpl selector = newSelector(nodeState, "a");
SolrQueryIndex solrQueryIndex = new SolrQueryIndex(null, null, null);
FilterImpl filter = new FilterImpl(selector, "select * from [nt:base] as a where isdescendantnode(a, '/test')", new QueryEngineSettings());
filter.restrictPath("/test", Filter.PathRestriction.ALL_CHILDREN);
filter.restrictProperty("foo", Operator.EQUAL, PropertyValues.newString("bar"));
List<QueryIndex.OrderEntry> sortOrder = new LinkedList<QueryIndex.OrderEntry>();
List<QueryIndex.IndexPlan> plans = solrQueryIndex.getPlans(filter, sortOrder, nodeState);
assertEquals(1, plans.size());
}
@Test
public void testNoPlanWithPropertyRestrictions() throws Exception {
SelectorImpl selector = newSelector(nodeState, "a");
SolrQueryIndex solrQueryIndex = new SolrQueryIndex(null, null, null);
FilterImpl filter = new FilterImpl(selector, "select * from [nt:base] as a where name = 'hello')", new QueryEngineSettings());
filter.restrictProperty("name", Operator.EQUAL, PropertyValues.newString("hello"));
List<QueryIndex.OrderEntry> sortOrder = new LinkedList<QueryIndex.OrderEntry>();
List<QueryIndex.IndexPlan> plans = solrQueryIndex.getPlans(filter, sortOrder, nodeState);
assertEquals(0, plans.size());
}
@Test
public void testPlanWithPropertyRestrictionsEnabled() throws Exception {
NodeBuilder builder = nodeState.builder();
builder.child("oak:index").child("solr")
.setProperty("propertyRestrictions", true);
nodeState = builder.getNodeState();
SelectorImpl selector = newSelector(nodeState, "a");
SolrQueryIndex solrQueryIndex = new SolrQueryIndex(null, null, null);
FilterImpl filter = new FilterImpl(selector, "select * from [nt:base] as a where name = 'hello')", new QueryEngineSettings());
filter.restrictProperty("name", Operator.EQUAL, PropertyValues.newString("hello"));
List<QueryIndex.OrderEntry> sortOrder = new LinkedList<QueryIndex.OrderEntry>();
List<QueryIndex.IndexPlan> plans = solrQueryIndex.getPlans(filter, sortOrder, nodeState);
assertEquals(1, plans.size());
}
@Test
public void testNoPlanWithPrimaryTypeRestrictions() throws Exception {
SelectorImpl selector = newSelector(nodeState, "a");
SolrQueryIndex solrQueryIndex = new SolrQueryIndex(null, null, null);
FilterImpl filter = new FilterImpl(selector, "select * from [nt:base] as a where jcr:primaryType = 'nt:unstructured')", new QueryEngineSettings());
filter.restrictProperty("jcr:primaryType", Operator.EQUAL, PropertyValues.newString("nt:unstructured"));
List<QueryIndex.OrderEntry> sortOrder = new LinkedList<QueryIndex.OrderEntry>();
List<QueryIndex.IndexPlan> plans = solrQueryIndex.getPlans(filter, sortOrder, nodeState);
assertEquals(0, plans.size());
}
@Test
public void testNoPlanWithOnlyPrimaryTypeRestrictionsEnabled() throws Exception {
NodeBuilder builder = nodeState.builder();
builder.child("oak:index").child("solr").setProperty("primaryTypes", true);
nodeState = builder.getNodeState();
SelectorImpl selector = newSelector(nodeState, "a");
SolrQueryIndex solrQueryIndex = new SolrQueryIndex(null, null, null);
FilterImpl filter = new FilterImpl(selector, "select * from [nt:base] as a where jcr:primaryType = 'nt:unstructured')", new QueryEngineSettings());
filter.restrictProperty("jcr:primaryType", Operator.EQUAL, PropertyValues.newString("nt:unstructured"));
List<QueryIndex.OrderEntry> sortOrder = new LinkedList<QueryIndex.OrderEntry>();
List<QueryIndex.IndexPlan> plans = solrQueryIndex.getPlans(filter, sortOrder, nodeState);
assertEquals(0, plans.size());
}
@Test
public void testPlanWithPropertyAndPrimaryTypeRestrictionsEnabled() throws Exception {
NodeBuilder builder = nodeState.builder();
builder.child("oak:index").child("solr")
.setProperty("propertyRestrictions", true)
.setProperty("primaryTypes", true);
nodeState = builder.getNodeState();
SelectorImpl selector = newSelector(nodeState, "a");
SolrQueryIndex solrQueryIndex = new SolrQueryIndex(null, null, null);
FilterImpl filter = new FilterImpl(selector, "select * from [nt:base] as a where jcr:primaryType = 'nt:unstructured')", new QueryEngineSettings());
filter.restrictProperty("jcr:primaryType", Operator.EQUAL, PropertyValues.newString("nt:unstructured"));
filter.restrictProperty("name", Operator.EQUAL, PropertyValues.newString("hello"));
List<QueryIndex.OrderEntry> sortOrder = new LinkedList<QueryIndex.OrderEntry>();
List<QueryIndex.IndexPlan> plans = solrQueryIndex.getPlans(filter, sortOrder, nodeState);
assertEquals(1, plans.size());
}
@Test
public void testNoPlanWithPropertyRestrictionsEnabledButPropertyIgnored() throws Exception {
NodeBuilder builder = nodeState.builder();
builder.child("oak:index").child("solr")
.setProperty("ignoredProperties", Collections.singleton("name"), Type.STRINGS)
.setProperty("propertyRestrictions", true);
nodeState = builder.getNodeState();
SelectorImpl selector = newSelector(nodeState, "a");
SolrQueryIndex solrQueryIndex = new SolrQueryIndex(null, null, null);
FilterImpl filter = new FilterImpl(selector, "select * from [nt:base] as a where name = 'hello')", new QueryEngineSettings());
filter.restrictProperty("name", Operator.EQUAL, PropertyValues.newString("hello"));
List<QueryIndex.OrderEntry> sortOrder = new LinkedList<QueryIndex.OrderEntry>();
List<QueryIndex.IndexPlan> plans = solrQueryIndex.getPlans(filter, sortOrder, nodeState);
assertEquals(0, plans.size()); // there's no plan matching the filter
}
@Test
public void testNoPlanWithPropertyRestrictionsEnabledButNotUsedProperty() throws Exception {
NodeBuilder builder = nodeState.builder();
builder.child("oak:index").child("solr")
.setProperty("usedProperties", Collections.singleton("foo"), Type.STRINGS)
.setProperty("propertyRestrictions", true);
nodeState = builder.getNodeState();
SelectorImpl selector = newSelector(nodeState, "a");
SolrQueryIndex solrQueryIndex = new SolrQueryIndex(null, null, null);
FilterImpl filter = new FilterImpl(selector, "select * from [nt:base] as a where name = 'hello')", new QueryEngineSettings());
filter.restrictProperty("name", Operator.EQUAL, PropertyValues.newString("hello"));
List<QueryIndex.OrderEntry> sortOrder = new LinkedList<QueryIndex.OrderEntry>();
List<QueryIndex.IndexPlan> plans = solrQueryIndex.getPlans(filter, sortOrder, nodeState);
assertEquals(0, plans.size());
}
@Test
public void testPlanWithPropertyRestrictionsEnabledAndUsedProperty() throws Exception {
NodeBuilder builder = nodeState.builder();
builder.child("oak:index").child("solr")
.setProperty("usedProperties", Collections.singleton("name"), Type.STRINGS)
.setProperty("propertyRestrictions", true);
nodeState = builder.getNodeState();
SelectorImpl selector = newSelector(nodeState, "a");
SolrQueryIndex solrQueryIndex = new SolrQueryIndex(null, null, null);
FilterImpl filter = new FilterImpl(selector, "select * from [nt:base] as a where name = 'hello')", new QueryEngineSettings());
filter.restrictProperty("name", Operator.EQUAL, PropertyValues.newString("hello"));
List<QueryIndex.OrderEntry> sortOrder = new LinkedList<QueryIndex.OrderEntry>();
List<QueryIndex.IndexPlan> plans = solrQueryIndex.getPlans(filter, sortOrder, nodeState);
assertEquals(1, plans.size());
}
@Test
public void testNoPlanWithPropertyNotListedInUsedProperties() throws Exception {
NodeBuilder builder = nodeState.builder();
builder.child("oak:index").child("solr")
.setProperty("usedProperties", Collections.singleton("name"), Type.STRINGS)
.setProperty("propertyRestrictions", true);
nodeState = builder.getNodeState();
SelectorImpl selector = newSelector(nodeState, "a");
SolrQueryIndex solrQueryIndex = new SolrQueryIndex(null, null, null);
FilterImpl filter = new FilterImpl(selector, "select * from [nt:base] as a where foo = 'bar')", new QueryEngineSettings());
filter.restrictProperty("foo", Operator.EQUAL, PropertyValues.newString("bar"));
List<QueryIndex.OrderEntry> sortOrder = new LinkedList<QueryIndex.OrderEntry>();
List<QueryIndex.IndexPlan> plans = solrQueryIndex.getPlans(filter, sortOrder, nodeState);
assertEquals(0, plans.size());
}
@Test
public void testUnion() throws Exception {
SelectorImpl selector = mock(SelectorImpl.class);
SolrQueryIndex solrQueryIndex = new SolrQueryIndex(null, null, null);
String sqlQuery = "select [jcr:path], [jcr:score], [rep:excerpt] from [nt:hierarchyNode] as a where" +
" isdescendantnode(a, '/content') and contains([jcr:content/*], 'founded') union select [jcr:path]," +
" [jcr:score], [rep:excerpt] from [nt:hierarchyNode] as a where isdescendantnode(a, '/content') and " +
"contains([jcr:content/jcr:title], 'founded') union select [jcr:path], [jcr:score], [rep:excerpt]" +
" from [nt:hierarchyNode] as a where isdescendantnode(a, '/content') and " +
"contains([jcr:content/jcr:description], 'founded') order by [jcr:score] desc";
FilterImpl filter = new FilterImpl(selector, sqlQuery, new QueryEngineSettings());
List<QueryIndex.OrderEntry> sortOrder = new LinkedList<QueryIndex.OrderEntry>();
List<QueryIndex.IndexPlan> plans = solrQueryIndex.getPlans(filter, sortOrder, nodeState);
assertEquals(0, plans.size());
}
@Test
public void testSize() throws Exception {
NodeState root = InitialContentHelper.INITIAL_CONTENT;
SelectorImpl selector = newSelector(root, "a");
String sqlQuery = "select [jcr:path], [jcr:score] from [nt:base] as a where" +
" contains([jcr:content/*], 'founded')";
SolrServerProvider solrServerProvider = mock(SolrServerProvider.class);
OakSolrConfigurationProvider configurationProvider = mock(OakSolrConfigurationProvider.class);
OakSolrConfiguration configuration = new DefaultSolrConfiguration() {
@Override
public boolean useForPropertyRestrictions() {
return true;
}
};
when(configurationProvider.getConfiguration()).thenReturn(configuration);
SolrQueryIndex solrQueryIndex = new SolrQueryIndex(null, configurationProvider, solrServerProvider);
FilterImpl filter = new FilterImpl(selector, sqlQuery, new QueryEngineSettings());
List<QueryIndex.IndexPlan> plans = solrQueryIndex.getPlans(filter, null, root);
for (QueryIndex.IndexPlan p : plans) {
Cursor cursor = solrQueryIndex.query(p, root);
assertNotNull(cursor);
long sizeExact = cursor.getSize(Result.SizePrecision.EXACT, 100000);
long sizeApprox = cursor.getSize(Result.SizePrecision.APPROXIMATION, 100000);
long sizeFastApprox = cursor.getSize(Result.SizePrecision.FAST_APPROXIMATION, 100000);
assertTrue(Math.abs(sizeExact - sizeApprox) < 10);
assertTrue(Math.abs(sizeExact - sizeFastApprox) > 10000);
}
}
@Test
public void testNoMoreThanThreeSolrRequests() throws Exception {
NodeState root = InitialContentHelper.INITIAL_CONTENT;
SelectorImpl selector = newSelector(root, "a");
String sqlQuery = "select [jcr:path], [jcr:score] from [nt:base] as a where" +
" contains([jcr:content/*], 'founded')";
SolrClient solrServer = mock(SolrClient.class);
SolrServerProvider solrServerProvider = mock(SolrServerProvider.class);
when(solrServerProvider.getSearchingSolrServer()).thenReturn(solrServer);
OakSolrConfigurationProvider configurationProvider = mock(OakSolrConfigurationProvider.class);
OakSolrConfiguration configuration = new DefaultSolrConfiguration() {
@Override
public boolean useForPropertyRestrictions() {
return true;
}
@Override
public int getRows() {
return 10;
}
};
when(configurationProvider.getConfiguration()).thenReturn(configuration);
SolrQueryIndex solrQueryIndex = new SolrQueryIndex(null, configurationProvider, solrServerProvider);
FilterImpl filter = new FilterImpl(selector, sqlQuery, new QueryEngineSettings());
CountingResponse response = new CountingResponse(0);
when(solrServer.query(any(SolrParams.class))).thenReturn(response);
List<QueryIndex.IndexPlan> plans = solrQueryIndex.getPlans(filter, null, root);
for (QueryIndex.IndexPlan p : plans) {
Cursor cursor = solrQueryIndex.query(p, root);
assertNotNull(cursor);
while (cursor.hasNext()) {
IndexRow row = cursor.next();
assertNotNull(row);
}
assertEquals(3, response.getCounter());
}
}
@Test
public void testNoNegativeCost() throws Exception {
NodeState root = InitialContentHelper.INITIAL_CONTENT;
NodeBuilder builder = root.builder();
builder.child("oak:index").child("solr")
.setProperty("usedProperties", Collections.singleton("name"), Type.STRINGS)
.setProperty("propertyRestrictions", true)
.setProperty("type", "solr");
nodeState = builder.getNodeState();
SelectorImpl selector = newSelector(root, "a");
String query = "select * from [nt:base] as a where native('solr','select?q=searchKeywords:\"foo\"^20 text:\"foo\"^1 " +
"description:\"foo\"^8 something:\"foo\"^3 headline:\"foo\"^5 title:\"foo\"^10 &q.op=OR'";
String sqlQuery = "select * from [nt:base] a where native('solr','" + query + "'";
SolrClient solrServer = mock(SolrClient.class);
SolrServerProvider solrServerProvider = mock(SolrServerProvider.class);
when(solrServerProvider.getSearchingSolrServer()).thenReturn(solrServer);
OakSolrConfigurationProvider configurationProvider = mock(OakSolrConfigurationProvider.class);
OakSolrConfiguration configuration = new DefaultSolrConfiguration() {
@Override
public boolean useForPropertyRestrictions() {
return true;
}
@Override
public int getRows() {
return 10;
}
};
when(configurationProvider.getConfiguration()).thenReturn(configuration);
SolrQueryIndex solrQueryIndex = new SolrQueryIndex(null, configurationProvider, solrServerProvider);
FilterImpl filter = new FilterImpl(selector, sqlQuery, new QueryEngineSettings());
filter.restrictProperty("native*solr", Operator.EQUAL, PropertyValues.newString(query));
CountingResponse response = new CountingResponse(0);
when(solrServer.query(any(SolrParams.class))).thenReturn(response);
List<QueryIndex.IndexPlan> plans = solrQueryIndex.getPlans(filter, null, nodeState);
for (QueryIndex.IndexPlan p : plans) {
double costPerEntry = p.getCostPerEntry();
assertTrue(costPerEntry >= 0);
double costPerExecution = p.getCostPerExecution();
assertTrue(costPerExecution >= 0);
long estimatedEntryCount = p.getEstimatedEntryCount();
assertTrue(estimatedEntryCount >= 0);
double c = p.getCostPerExecution() + estimatedEntryCount * p.getCostPerEntry();
assertTrue(c >= 0);
}
}
private static SelectorImpl newSelector(NodeState root, String name) {
NodeTypeInfoProvider types = new NodeStateNodeTypeInfoProvider(root);
NodeTypeInfo type = types.getNodeTypeInfo("nt:base");
return new SelectorImpl(type, name);
}
private class CountingResponse extends QueryResponse {
private int counter;
public CountingResponse(int counter) {
this.counter = counter;
}
@Override
public SolrDocumentList getResults() {
SolrDocumentList results = new SolrDocumentList();
for (int i = 0; i < 1000; i++) {
results.add(new SolrDocument());
}
results.setNumFound(1000);
counter++;
return results;
}
public int getCounter() {
return counter;
}
}
}
| |
/*
* Copyright 2011 Google Inc. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.walkaround.slob.server;
import com.google.appengine.api.datastore.Entity;
import com.google.appengine.api.datastore.FetchOptions;
import com.google.appengine.api.datastore.Key;
import com.google.appengine.api.datastore.Query;
import com.google.appengine.api.taskqueue.Queue;
import com.google.appengine.api.taskqueue.TaskHandle;
import com.google.appengine.api.taskqueue.TaskOptions;
import com.google.appengine.tools.development.testing.LocalDatastoreServiceTestConfig;
import com.google.appengine.tools.development.testing.LocalServiceTestHelper;
import com.google.common.collect.ImmutableList;
import com.google.walkaround.slob.shared.ChangeData;
import com.google.walkaround.slob.shared.ChangeRejected;
import com.google.walkaround.slob.shared.ClientId;
import com.google.walkaround.slob.shared.InvalidSnapshot;
import com.google.walkaround.slob.shared.SlobId;
import com.google.walkaround.slob.shared.SlobModel;
import com.google.walkaround.slob.shared.SlobModel.Slob;
import com.google.walkaround.util.server.RetryHelper.PermanentFailure;
import com.google.walkaround.util.server.RetryHelper.RetryableFailure;
import com.google.walkaround.util.server.appengine.CheckedDatastore.CheckedIterator;
import com.google.walkaround.util.server.appengine.CheckedDatastore.CheckedPreparedQuery;
import com.google.walkaround.util.server.appengine.CheckedDatastore.CheckedTransaction;
import junit.framework.TestCase;
import java.util.List;
import java.util.Map;
import java.util.logging.Logger;
import javax.annotation.Nullable;
/**
* @author ohler@google.com (Christian Ohler)
*/
public class MutationLogTest extends TestCase {
@SuppressWarnings("unused")
private static final Logger log = Logger.getLogger(MutationLogTest.class.getName());
// NOTE(ohler): testSizeEstimates() does NOT pass regardless of what these
// constants are. It was off by one when I ran with other values. Still good
// enough, didn't investigate in detail.
private static final String SNAPSHOT_STRING =
"snapshotsnapshotsnapshotsnapshotsnapshotsnapshotsnapshotsnapshotsnapshotsnapshotsnapshot"
+ "snapshotsnapshotsnapshotsnapshotsnapshotsnapshotsnapshotsnapshotsnapshotsnapshotsnapshot";
private static final String ROOT_ENTITY_KIND = "Wavelet";
private static final String DELTA_ENTITY_KIND = "WaveletDelta";
private static final String SNAPSHOT_ENTITY_KIND = "WaveletSnapshot";
private static final String OBJECT_ID = "obj";
private static class TestModel implements SlobModel {
public class TestObject implements Slob {
@Override @Nullable public String snapshot() {
return SNAPSHOT_STRING;
}
public void apply(ChangeData<String> payload) throws ChangeRejected {
// accept any payload, do nothing with it
}
public String getIndexedContent() {
throw new AssertionError("Not implemented");
}
}
@Override
public Slob create(@Nullable String snapshot) throws InvalidSnapshot {
return new TestObject();
}
@Override
public List<String> transform(List<ChangeData<String>> clientOps,
List<ChangeData<String>> serverOps) throws ChangeRejected {
throw new AssertionError("Not implemented");
}
}
private final LocalServiceTestHelper helper =
new LocalServiceTestHelper(
new LocalDatastoreServiceTestConfig());
@Override protected void setUp() throws Exception {
super.setUp();
helper.setUp();
}
@Override protected void tearDown() throws Exception {
helper.tearDown();
super.tearDown();
}
public void testSizeEstimates() throws Exception {
CheckedTransaction tx = new CheckedTransaction() {
@Override
public Entity get(Key key) throws PermanentFailure, RetryableFailure {
return null;
}
@Override
public Map<Key, Entity> get(Iterable<Key> keys) throws PermanentFailure, RetryableFailure {
throw new AssertionError("Not implemented");
}
@Override
public CheckedPreparedQuery prepare(Query q) {
return new CheckedPreparedQuery() {
public CheckedIterator asIterator(final FetchOptions options)
throws PermanentFailure, RetryableFailure {
return CheckedIterator.EMPTY;
}
public List<Entity> asList(final FetchOptions options)
throws PermanentFailure, RetryableFailure {
return ImmutableList.of();
}
public int countEntities(final FetchOptions options)
throws PermanentFailure, RetryableFailure {
throw new AssertionError("Not implemented");
}
public Entity asSingleEntity() throws PermanentFailure, RetryableFailure {
throw new AssertionError("Not implemented");
}
};
}
@Override
public Key put(Entity e) throws PermanentFailure, RetryableFailure {
throw new AssertionError("Not implemented");
}
@Override
public List<Key> put(Iterable<Entity> e) throws PermanentFailure, RetryableFailure {
throw new AssertionError("Not implemented");
}
@Override
public void delete(Key... keys) throws PermanentFailure, RetryableFailure {
throw new AssertionError("Not implemented");
}
@Override
public TaskHandle enqueueTask(Queue queue, TaskOptions task)
throws PermanentFailure, RetryableFailure {
throw new AssertionError("Not implemented");
}
@Override
public void rollback() {
throw new AssertionError("Not implemented");
}
@Override
public void commit() throws PermanentFailure, RetryableFailure {
throw new AssertionError("Not implemented");
}
@Override
public boolean isActive() {
throw new AssertionError("Not implemented");
}
@Override
public void close() {
throw new AssertionError("Not implemented");
}
};
SlobId objectId = new SlobId(OBJECT_ID);
ClientId clientId = new ClientId("s");
String payload = "{\"a\": 5}";
// I didn't track down exactly where the 12 comes from.
int encodingOverhead = 12;
int idSize = ROOT_ENTITY_KIND.length() + OBJECT_ID.length() + encodingOverhead;
int versionSize = 8;
int deltaSize = idSize + DELTA_ENTITY_KIND.length() + versionSize
+ MutationLog.DELTA_CLIENT_ID_PROPERTY.length() + clientId.getId().length()
+ MutationLog.DELTA_OP_PROPERTY.length() + payload.length();
int snapshotSize = idSize + SNAPSHOT_ENTITY_KIND.length() + versionSize
+ MutationLog.SNAPSHOT_DATA_PROPERTY.length() + SNAPSHOT_STRING.length();
log.info("deltaSize=" + deltaSize
+ ", snapshotSize=" + snapshotSize);
assertEquals(56, deltaSize);
assertEquals(225, snapshotSize);
ChangeData<String> delta = new ChangeData<String>(clientId, payload);
MutationLog mutationLog =
new MutationLog(ROOT_ENTITY_KIND, DELTA_ENTITY_KIND, SNAPSHOT_ENTITY_KIND,
new MutationLog.DefaultDeltaEntityConverter(),
tx, objectId, new TestModel());
MutationLog.Appender appender = mutationLog.prepareAppender().getAppender();
assertEquals(0, appender.estimatedBytesStaged());
appender.append(delta);
assertEquals( 1 * deltaSize + 0 * snapshotSize, appender.estimatedBytesStaged());
appender.append(delta);
assertEquals( 2 * deltaSize + 0 * snapshotSize, appender.estimatedBytesStaged());
appender.append(delta);
assertEquals( 3 * deltaSize + 0 * snapshotSize, appender.estimatedBytesStaged());
appender.append(delta);
assertEquals( 4 * deltaSize + 0 * snapshotSize, appender.estimatedBytesStaged());
appender.append(delta);
assertEquals( 5 * deltaSize + 0 * snapshotSize, appender.estimatedBytesStaged());
appender.append(delta);
assertEquals( 6 * deltaSize + 0 * snapshotSize, appender.estimatedBytesStaged());
appender.append(delta);
assertEquals( 7 * deltaSize + 0 * snapshotSize, appender.estimatedBytesStaged());
appender.append(delta);
assertEquals( 8 * deltaSize + 0 * snapshotSize, appender.estimatedBytesStaged());
appender.append(delta);
assertEquals( 9 * deltaSize + 1 * snapshotSize, appender.estimatedBytesStaged());
appender.append(delta);
assertEquals(10 * deltaSize + 1 * snapshotSize, appender.estimatedBytesStaged());
appender.append(delta);
assertEquals(11 * deltaSize + 1 * snapshotSize, appender.estimatedBytesStaged());
appender.append(delta);
assertEquals(12 * deltaSize + 1 * snapshotSize, appender.estimatedBytesStaged());
appender.append(delta);
assertEquals(13 * deltaSize + 1 * snapshotSize, appender.estimatedBytesStaged());
appender.append(delta);
assertEquals(14 * deltaSize + 2 * snapshotSize, appender.estimatedBytesStaged());
appender.append(delta);
assertEquals(15 * deltaSize + 2 * snapshotSize, appender.estimatedBytesStaged());
}
}
| |
/*
* Copyright 2000-2014 Vaadin Ltd.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package com.vaadin.ui;
import java.lang.reflect.Method;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Locale;
import java.util.Set;
import java.util.StringTokenizer;
import java.util.logging.Logger;
import org.jsoup.nodes.Attribute;
import org.jsoup.nodes.Attributes;
import org.jsoup.nodes.Element;
import com.vaadin.event.ActionManager;
import com.vaadin.event.ConnectorActionManager;
import com.vaadin.event.ShortcutListener;
import com.vaadin.server.AbstractClientConnector;
import com.vaadin.server.AbstractErrorMessage.ContentMode;
import com.vaadin.server.ComponentSizeValidator;
import com.vaadin.server.ErrorMessage;
import com.vaadin.server.ErrorMessage.ErrorLevel;
import com.vaadin.server.Extension;
import com.vaadin.server.Resource;
import com.vaadin.server.Responsive;
import com.vaadin.server.SizeWithUnit;
import com.vaadin.server.Sizeable;
import com.vaadin.server.UserError;
import com.vaadin.server.VaadinSession;
import com.vaadin.shared.AbstractComponentState;
import com.vaadin.shared.ComponentConstants;
import com.vaadin.shared.ui.ComponentStateUtil;
import com.vaadin.shared.util.SharedUtil;
import com.vaadin.ui.Field.ValueChangeEvent;
import com.vaadin.ui.declarative.DesignAttributeHandler;
import com.vaadin.ui.declarative.DesignContext;
import com.vaadin.util.ReflectTools;
/**
* An abstract class that defines default implementation for the
* {@link Component} interface. Basic UI components that are not derived from an
* external component can inherit this class to easily qualify as Vaadin
* components. Most components in Vaadin do just that.
*
* @author Vaadin Ltd.
* @since 3.0
*/
@SuppressWarnings("serial")
public abstract class AbstractComponent extends AbstractClientConnector
implements Component {
/* Private members */
/**
* Application specific data object. The component does not use or modify
* this.
*/
private Object applicationData;
/**
* The internal error message of the component.
*/
private ErrorMessage componentError = null;
/**
* Locale of this component.
*/
private Locale locale;
/**
* The component should receive focus (if {@link Focusable}) when attached.
*/
private boolean delayedFocus;
/* Sizeable fields */
private float width = SIZE_UNDEFINED;
private float height = SIZE_UNDEFINED;
private Unit widthUnit = Unit.PIXELS;
private Unit heightUnit = Unit.PIXELS;
/**
* Keeps track of the Actions added to this component; the actual
* handling/notifying is delegated, usually to the containing window.
*/
private ConnectorActionManager actionManager;
private boolean visible = true;
private HasComponents parent;
private Boolean explicitImmediateValue;
protected static final String DESIGN_ATTR_PLAIN_TEXT = "plain-text";
/* Constructor */
/**
* Constructs a new Component.
*/
public AbstractComponent() {
// ComponentSizeValidator.setCreationLocation(this);
}
/* Get/Set component properties */
/*
* (non-Javadoc)
*
* @see com.vaadin.ui.Component#setId(java.lang.String)
*/
@Override
public void setId(String id) {
getState().id = id;
}
/*
* (non-Javadoc)
*
* @see com.vaadin.ui.Component#getId()
*/
@Override
public String getId() {
return getState(false).id;
}
/**
* @deprecated As of 7.0. Use {@link #setId(String)}
*/
@Deprecated
public void setDebugId(String id) {
setId(id);
}
/**
* @deprecated As of 7.0. Use {@link #getId()}
*/
@Deprecated
public String getDebugId() {
return getId();
}
/*
* Gets the component's style. Don't add a JavaDoc comment here, we use the
* default documentation from implemented interface.
*/
@Override
public String getStyleName() {
String s = "";
if (ComponentStateUtil.hasStyles(getState(false))) {
for (final Iterator<String> it = getState(false).styles.iterator(); it
.hasNext();) {
s += it.next();
if (it.hasNext()) {
s += " ";
}
}
}
return s;
}
/*
* Sets the component's style. Don't add a JavaDoc comment here, we use the
* default documentation from implemented interface.
*/
@Override
public void setStyleName(String style) {
if (style == null || "".equals(style)) {
getState().styles = null;
return;
}
if (getState().styles == null) {
getState().styles = new ArrayList<String>();
}
List<String> styles = getState().styles;
styles.clear();
StringTokenizer tokenizer = new StringTokenizer(style, " ");
while (tokenizer.hasMoreTokens()) {
styles.add(tokenizer.nextToken());
}
}
@Override
public void setPrimaryStyleName(String style) {
getState().primaryStyleName = style;
}
@Override
public String getPrimaryStyleName() {
return getState(false).primaryStyleName;
}
@Override
public void addStyleName(String style) {
if (style == null || "".equals(style)) {
return;
}
if (style.contains(" ")) {
// Split space separated style names and add them one by one.
StringTokenizer tokenizer = new StringTokenizer(style, " ");
while (tokenizer.hasMoreTokens()) {
addStyleName(tokenizer.nextToken());
}
return;
}
if (getState().styles == null) {
getState().styles = new ArrayList<String>();
}
List<String> styles = getState().styles;
if (!styles.contains(style)) {
styles.add(style);
}
}
@Override
public void removeStyleName(String style) {
if (ComponentStateUtil.hasStyles(getState())) {
StringTokenizer tokenizer = new StringTokenizer(style, " ");
while (tokenizer.hasMoreTokens()) {
getState().styles.remove(tokenizer.nextToken());
}
}
}
/**
* Adds or removes a style name. Multiple styles can be specified as a
* space-separated list of style names.
*
* If the {@code add} parameter is true, the style name is added to the
* component. If the {@code add} parameter is false, the style name is
* removed from the component.
* <p>
* Functionally this is equivalent to using {@link #addStyleName(String)} or
* {@link #removeStyleName(String)}
*
* @since 7.5
* @param style
* the style name to be added or removed
* @param add
* <code>true</code> to add the given style, <code>false</code>
* to remove it
* @see #addStyleName(String)
* @see #removeStyleName(String)
*/
public void setStyleName(String style, boolean add) {
if (add) {
addStyleName(style);
} else {
removeStyleName(style);
}
}
/*
* Get's the component's caption. Don't add a JavaDoc comment here, we use
* the default documentation from implemented interface.
*/
@Override
public String getCaption() {
return getState(false).caption;
}
/**
* Sets the component's caption <code>String</code>. Caption is the visible
* name of the component. This method will trigger a
* {@link RepaintRequestEvent}.
*
* @param caption
* the new caption <code>String</code> for the component.
*/
@Override
public void setCaption(String caption) {
getState().caption = caption;
}
/**
* Sets whether the caption is rendered as HTML.
* <p>
* If set to true, the captions are rendered in the browser as HTML and the
* developer is responsible for ensuring no harmful HTML is used. If set to
* false, the caption is rendered in the browser as plain text.
* <p>
* The default is false, i.e. to render that caption as plain text.
*
* @param captionAsHtml
* true if the captions are rendered as HTML, false if rendered
* as plain text
*/
public void setCaptionAsHtml(boolean captionAsHtml) {
getState().captionAsHtml = captionAsHtml;
}
/**
* Checks whether captions are rendered as HTML
* <p>
* The default is false, i.e. to render that caption as plain text.
*
* @return true if the captions are rendered as HTML, false if rendered as
* plain text
*/
public boolean isCaptionAsHtml() {
return getState(false).captionAsHtml;
}
/*
* Don't add a JavaDoc comment here, we use the default documentation from
* implemented interface.
*/
@Override
public Locale getLocale() {
if (locale != null) {
return locale;
}
HasComponents parent = getParent();
if (parent != null) {
return parent.getLocale();
}
final VaadinSession session = getSession();
if (session != null) {
return session.getLocale();
}
return null;
}
/**
* Sets the locale of this component.
*
* <pre>
* // Component for which the locale is meaningful
* InlineDateField date = new InlineDateField("Datum");
*
* // German language specified with ISO 639-1 language
* // code and ISO 3166-1 alpha-2 country code.
* date.setLocale(new Locale("de", "DE"));
*
* date.setResolution(DateField.RESOLUTION_DAY);
* layout.addComponent(date);
* </pre>
*
*
* @param locale
* the locale to become this component's locale.
*/
public void setLocale(Locale locale) {
this.locale = locale;
if (locale != null && isAttached()) {
getUI().getLocaleService().addLocale(locale);
}
markAsDirty();
}
/*
* Gets the component's icon resource. Don't add a JavaDoc comment here, we
* use the default documentation from implemented interface.
*/
@Override
public Resource getIcon() {
return getResource(ComponentConstants.ICON_RESOURCE);
}
/**
* Sets the component's icon. This method will trigger a
* {@link RepaintRequestEvent}.
*
* @param icon
* the icon to be shown with the component's caption.
*/
@Override
public void setIcon(Resource icon) {
setResource(ComponentConstants.ICON_RESOURCE, icon);
}
/*
* (non-Javadoc)
*
* @see com.vaadin.ui.Component#isEnabled()
*/
@Override
public boolean isEnabled() {
return getState(false).enabled;
}
/*
* (non-Javadoc)
*
* @see com.vaadin.ui.Component#setEnabled(boolean)
*/
@Override
public void setEnabled(boolean enabled) {
getState().enabled = enabled;
}
/*
* (non-Javadoc)
*
* @see com.vaadin.client.Connector#isConnectorEnabled()
*/
@Override
public boolean isConnectorEnabled() {
if (!isVisible()) {
return false;
} else if (!isEnabled()) {
return false;
} else if (!super.isConnectorEnabled()) {
return false;
} else if ((getParent() instanceof SelectiveRenderer)
&& !((SelectiveRenderer) getParent()).isRendered(this)) {
return false;
} else {
return true;
}
}
/**
* Returns the explicitly set immediate value.
*
* @return the explicitly set immediate value or null if
* {@link #setImmediate(boolean)} has not been explicitly invoked
*/
protected Boolean getExplicitImmediateValue() {
return explicitImmediateValue;
}
/**
* Returns the immediate mode of the component.
* <p>
* Certain operations such as adding a value change listener will set the
* component into immediate mode if {@link #setImmediate(boolean)} has not
* been explicitly called with false.
*
* @return true if the component is in immediate mode (explicitly or
* implicitly set), false if the component if not in immediate mode
*/
public boolean isImmediate() {
if (explicitImmediateValue != null) {
return explicitImmediateValue;
} else if (hasListeners(ValueChangeEvent.class)) {
/*
* Automatic immediate for fields that developers are interested
* about.
*/
return true;
} else {
return false;
}
}
/**
* Sets the component's immediate mode to the specified status.
*
* @param immediate
* the boolean value specifying if the component should be in the
* immediate mode after the call.
*/
public void setImmediate(boolean immediate) {
explicitImmediateValue = immediate;
getState().immediate = immediate;
}
/*
* (non-Javadoc)
*
* @see com.vaadin.ui.Component#isVisible()
*/
@Override
public boolean isVisible() {
return visible;
}
/*
* (non-Javadoc)
*
* @see com.vaadin.ui.Component#setVisible(boolean)
*/
@Override
public void setVisible(boolean visible) {
if (isVisible() == visible) {
return;
}
this.visible = visible;
if (visible) {
/*
* If the visibility state is toggled from invisible to visible it
* affects all children (the whole hierarchy) in addition to this
* component.
*/
markAsDirtyRecursive();
}
if (getParent() != null) {
// Must always repaint the parent (at least the hierarchy) when
// visibility of a child component changes.
getParent().markAsDirty();
}
}
/*
* (non-Javadoc)
*
* @see com.vaadin.ui.Component#getDescription()
*/
@Override
public String getDescription() {
return getState(false).description;
}
/**
* Sets the component's description. See {@link #getDescription()} for more
* information on what the description is. This method will trigger a
* {@link RepaintRequestEvent}.
*
* The description is displayed as HTML in tooltips or directly in certain
* components so care should be taken to avoid creating the possibility for
* HTML injection and possibly XSS vulnerabilities.
*
* @param description
* the new description string for the component.
*/
public void setDescription(String description) {
getState().description = description;
}
/*
* Gets the component's parent component. Don't add a JavaDoc comment here,
* we use the default documentation from implemented interface.
*/
@Override
public HasComponents getParent() {
return parent;
}
@Override
public void setParent(HasComponents parent) {
// If the parent is not changed, don't do anything
if (parent == null ? this.parent == null : parent.equals(this.parent)) {
return;
}
if (parent != null && this.parent != null) {
throw new IllegalStateException(getClass().getName()
+ " already has a parent.");
}
// Send a detach event if the component is currently attached
if (isAttached()) {
detach();
}
// Connect to new parent
this.parent = parent;
// Send attach event if the component is now attached
if (isAttached()) {
attach();
}
}
/**
* Returns the closest ancestor with the given type.
* <p>
* To find the Window that contains the component, use {@code Window w =
* getParent(Window.class);}
* </p>
*
* @param <T>
* The type of the ancestor
* @param parentType
* The ancestor class we are looking for
* @return The first ancestor that can be assigned to the given class. Null
* if no ancestor with the correct type could be found.
*/
public <T extends HasComponents> T findAncestor(Class<T> parentType) {
HasComponents p = getParent();
while (p != null) {
if (parentType.isAssignableFrom(p.getClass())) {
return parentType.cast(p);
}
p = p.getParent();
}
return null;
}
/**
* Gets the error message for this component.
*
* @return ErrorMessage containing the description of the error state of the
* component or null, if the component contains no errors. Extending
* classes should override this method if they support other error
* message types such as validation errors or buffering errors. The
* returned error message contains information about all the errors.
*/
public ErrorMessage getErrorMessage() {
return componentError;
}
/**
* Gets the component's error message.
*
* @link Terminal.ErrorMessage#ErrorMessage(String, int)
*
* @return the component's error message.
*/
public ErrorMessage getComponentError() {
return componentError;
}
/**
* Sets the component's error message. The message may contain certain XML
* tags, for more information see
*
* @link Component.ErrorMessage#ErrorMessage(String, int)
*
* @param componentError
* the new <code>ErrorMessage</code> of the component.
*/
public void setComponentError(ErrorMessage componentError) {
this.componentError = componentError;
fireComponentErrorEvent();
markAsDirty();
}
/*
* Tests if the component is in read-only mode. Don't add a JavaDoc comment
* here, we use the default documentation from implemented interface.
*/
@Override
public boolean isReadOnly() {
return getState(false).readOnly;
}
/*
* Sets the component's read-only mode. Don't add a JavaDoc comment here, we
* use the default documentation from implemented interface.
*/
@Override
public void setReadOnly(boolean readOnly) {
getState().readOnly = readOnly;
}
/*
* Notify the component that it's attached to a window. Don't add a JavaDoc
* comment here, we use the default documentation from implemented
* interface.
*/
@Override
public void attach() {
super.attach();
if (delayedFocus) {
focus();
}
setActionManagerViewer();
if (locale != null) {
getUI().getLocaleService().addLocale(locale);
}
}
/*
* Detach the component from application. Don't add a JavaDoc comment here,
* we use the default documentation from implemented interface.
*/
@Override
public void detach() {
super.detach();
if (actionManager != null) {
// Remove any existing viewer. UI cast is just to make the
// compiler happy
actionManager.setViewer((UI) null);
}
}
/**
* Sets the focus for this component if the component is {@link Focusable}.
*/
protected void focus() {
if (this instanceof Focusable) {
final VaadinSession session = getSession();
if (session != null) {
getUI().setFocusedComponent((Focusable) this);
delayedFocus = false;
} else {
delayedFocus = true;
}
}
}
/**
* Build CSS compatible string representation of height.
*
* @return CSS height
*/
private String getCSSHeight() {
return getHeight() + getHeightUnits().getSymbol();
}
/**
* Build CSS compatible string representation of width.
*
* @return CSS width
*/
private String getCSSWidth() {
return getWidth() + getWidthUnits().getSymbol();
}
/**
* Returns the shared state bean with information to be sent from the server
* to the client.
*
* Subclasses should override this method and set any relevant fields of the
* state returned by super.getState().
*
* @since 7.0
*
* @return updated component shared state
*/
@Override
protected AbstractComponentState getState() {
return (AbstractComponentState) super.getState();
}
@Override
protected AbstractComponentState getState(boolean markAsDirty) {
return (AbstractComponentState) super.getState(markAsDirty);
}
@Override
public void beforeClientResponse(boolean initial) {
super.beforeClientResponse(initial);
// TODO This logic should be on the client side and the state should
// simply be a data object with "width" and "height".
if (getHeight() >= 0
&& (getHeightUnits() != Unit.PERCENTAGE || ComponentSizeValidator
.parentCanDefineHeight(this))) {
getState().height = "" + getCSSHeight();
} else {
getState().height = "";
}
if (getWidth() >= 0
&& (getWidthUnits() != Unit.PERCENTAGE || ComponentSizeValidator
.parentCanDefineWidth(this))) {
getState().width = "" + getCSSWidth();
} else {
getState().width = "";
}
ErrorMessage error = getErrorMessage();
if (null != error) {
getState().errorMessage = error.getFormattedHtmlMessage();
} else {
getState().errorMessage = null;
}
getState().immediate = isImmediate();
}
/* General event framework */
private static final Method COMPONENT_EVENT_METHOD = ReflectTools
.findMethod(Component.Listener.class, "componentEvent",
Component.Event.class);
/* Component event framework */
/*
* Registers a new listener to listen events generated by this component.
* Don't add a JavaDoc comment here, we use the default documentation from
* implemented interface.
*/
@Override
public void addListener(Component.Listener listener) {
addListener(Component.Event.class, listener, COMPONENT_EVENT_METHOD);
}
/*
* Removes a previously registered listener from this component. Don't add a
* JavaDoc comment here, we use the default documentation from implemented
* interface.
*/
@Override
public void removeListener(Component.Listener listener) {
removeListener(Component.Event.class, listener, COMPONENT_EVENT_METHOD);
}
/**
* Emits the component event. It is transmitted to all registered listeners
* interested in such events.
*/
protected void fireComponentEvent() {
fireEvent(new Component.Event(this));
}
/**
* Emits the component error event. It is transmitted to all registered
* listeners interested in such events.
*/
protected void fireComponentErrorEvent() {
fireEvent(new Component.ErrorEvent(getComponentError(), this));
}
/**
* Sets the data object, that can be used for any application specific data.
* The component does not use or modify this data.
*
* @param data
* the Application specific data.
* @since 3.1
*/
public void setData(Object data) {
applicationData = data;
}
/**
* Gets the application specific data. See {@link #setData(Object)}.
*
* @return the Application specific data set with setData function.
* @since 3.1
*/
public Object getData() {
return applicationData;
}
/* Sizeable and other size related methods */
/*
* (non-Javadoc)
*
* @see com.vaadin.Sizeable#getHeight()
*/
@Override
public float getHeight() {
return height;
}
/*
* (non-Javadoc)
*
* @see com.vaadin.server.Sizeable#getHeightUnits()
*/
@Override
public Unit getHeightUnits() {
return heightUnit;
}
/*
* (non-Javadoc)
*
* @see com.vaadin.server.Sizeable#getWidth()
*/
@Override
public float getWidth() {
return width;
}
/*
* (non-Javadoc)
*
* @see com.vaadin.server.Sizeable#getWidthUnits()
*/
@Override
public Unit getWidthUnits() {
return widthUnit;
}
/*
* (non-Javadoc)
*
* @see com.vaadin.server.Sizeable#setHeight(float, Unit)
*/
@Override
public void setHeight(float height, Unit unit) {
if (unit == null) {
throw new IllegalArgumentException("Unit can not be null");
}
this.height = height;
heightUnit = unit;
markAsDirty();
// ComponentSizeValidator.setHeightLocation(this);
}
/*
* (non-Javadoc)
*
* @see com.vaadin.server.Sizeable#setSizeFull()
*/
@Override
public void setSizeFull() {
setWidth(100, Unit.PERCENTAGE);
setHeight(100, Unit.PERCENTAGE);
}
/*
* (non-Javadoc)
*
* @see com.vaadin.server.Sizeable#setSizeUndefined()
*/
@Override
public void setSizeUndefined() {
setWidthUndefined();
setHeightUndefined();
}
/*
* (non-Javadoc)
*
* @see com.vaadin.server.Sizeable#setWidthUndefined()
*/
@Override
public void setWidthUndefined() {
setWidth(-1, Unit.PIXELS);
}
/*
* (non-Javadoc)
*
* @see com.vaadin.server.Sizeable#setHeightUndefined()
*/
@Override
public void setHeightUndefined() {
setHeight(-1, Unit.PIXELS);
}
/*
* (non-Javadoc)
*
* @see com.vaadin.server.Sizeable#setWidth(float, Unit)
*/
@Override
public void setWidth(float width, Unit unit) {
if (unit == null) {
throw new IllegalArgumentException("Unit can not be null");
}
this.width = width;
widthUnit = unit;
markAsDirty();
// ComponentSizeValidator.setWidthLocation(this);
}
/*
* (non-Javadoc)
*
* @see com.vaadin.server.Sizeable#setWidth(java.lang.String)
*/
@Override
public void setWidth(String width) {
SizeWithUnit size = SizeWithUnit.parseStringSize(width);
if (size != null) {
setWidth(size.getSize(), size.getUnit());
} else {
setWidth(-1, Unit.PIXELS);
}
}
/*
* (non-Javadoc)
*
* @see com.vaadin.server.Sizeable#setHeight(java.lang.String)
*/
@Override
public void setHeight(String height) {
SizeWithUnit size = SizeWithUnit.parseStringSize(height);
if (size != null) {
setHeight(size.getSize(), size.getUnit());
} else {
setHeight(-1, Unit.PIXELS);
}
}
/*
* (non-Javadoc)
*
* @see com.vaadin.ui.Component#readDesign(org.jsoup.nodes.Element,
* com.vaadin.ui.declarative.DesignContext)
*/
@Override
public void readDesign(Element design, DesignContext designContext) {
Attributes attr = design.attributes();
// handle default attributes
for (String attribute : getDefaultAttributes()) {
if (design.hasAttr(attribute)) {
DesignAttributeHandler.assignValue(this, attribute,
design.attr(attribute));
}
}
// handle immediate
if (attr.hasKey("immediate")) {
setImmediate(DesignAttributeHandler.getFormatter().parse(
attr.get("immediate"), Boolean.class));
}
// handle locale
if (attr.hasKey("locale")) {
setLocale(getLocaleFromString(attr.get("locale")));
}
// handle width and height
readSize(attr);
// handle component error
if (attr.hasKey("error")) {
UserError error = new UserError(attr.get("error"),
ContentMode.HTML, ErrorLevel.ERROR);
setComponentError(error);
}
// Tab index when applicable
if (design.hasAttr("tabindex") && this instanceof Focusable) {
((Focusable) this).setTabIndex(DesignAttributeHandler
.readAttribute("tabindex", design.attributes(),
Integer.class));
}
// check for unsupported attributes
Set<String> supported = new HashSet<String>();
supported.addAll(getDefaultAttributes());
supported.addAll(getCustomAttributes());
for (Attribute a : attr) {
if (!a.getKey().startsWith(":") && !supported.contains(a.getKey())) {
getLogger().info(
"Unsupported attribute found when reading from design : "
+ a.getKey());
}
}
}
/**
* Constructs a Locale corresponding to the given string. The string should
* consist of one, two or three parts with '_' between the different parts
* if there is more than one part. The first part specifies the language,
* the second part the country and the third part the variant of the locale.
*
* @param localeString
* the locale specified as a string
* @return the Locale object corresponding to localeString
*/
private Locale getLocaleFromString(String localeString) {
if (localeString == null) {
return null;
}
String[] parts = localeString.split("_");
if (parts.length > 3) {
throw new RuntimeException("Cannot parse the locale string: "
+ localeString);
}
switch (parts.length) {
case 1:
return new Locale(parts[0]);
case 2:
return new Locale(parts[0], parts[1]);
default:
return new Locale(parts[0], parts[1], parts[2]);
}
}
/**
* Toggles responsiveness of this component.
*
* @since 7.5.0
* @param responsive
* boolean enables responsiveness, false disables
*/
public void setResponsive(boolean responsive) {
if (responsive) {
// make responsive if necessary
if (!isResponsive()) {
Responsive.makeResponsive(this);
}
} else {
// remove responsive extensions
List<Extension> extensions = new ArrayList<Extension>(
getExtensions());
for (Extension e : extensions) {
if (e instanceof Responsive) {
removeExtension(e);
}
}
}
}
/**
* Returns true if the component is responsive
*
* @since 7.5.0
* @return true if the component is responsive
*/
public boolean isResponsive() {
for (Extension e : getExtensions()) {
if (e instanceof Responsive) {
return true;
}
}
return false;
}
/**
* Reads the size of this component from the given design attributes. If the
* attributes do not contain relevant size information, defaults is
* consulted.
*
* @param attributes
* the design attributes
* @param defaultInstance
* instance of the class that has default sizing.
*/
private void readSize(Attributes attributes) {
// read width
if (attributes.hasKey("width-auto") || attributes.hasKey("size-auto")) {
this.setWidth(null);
} else if (attributes.hasKey("width-full")
|| attributes.hasKey("size-full")) {
this.setWidth("100%");
} else if (attributes.hasKey("width")) {
this.setWidth(attributes.get("width"));
}
// read height
if (attributes.hasKey("height-auto") || attributes.hasKey("size-auto")) {
this.setHeight(null);
} else if (attributes.hasKey("height-full")
|| attributes.hasKey("size-full")) {
this.setHeight("100%");
} else if (attributes.hasKey("height")) {
this.setHeight(attributes.get("height"));
}
}
/**
* Writes the size related attributes for the component if they differ from
* the defaults
*
* @param component
* the component
* @param attributes
* the attribute map where the attribute are written
* @param defaultInstance
* the default instance of the class for fetching the default
* values
*/
private void writeSize(Attributes attributes, Component defaultInstance) {
if (hasEqualSize(defaultInstance)) {
// we have default values -> ignore
return;
}
boolean widthFull = getWidth() == 100f
&& getWidthUnits().equals(Sizeable.Unit.PERCENTAGE);
boolean heightFull = getHeight() == 100f
&& getHeightUnits().equals(Sizeable.Unit.PERCENTAGE);
boolean widthAuto = getWidth() == -1;
boolean heightAuto = getHeight() == -1;
// first try the full shorthands
if (widthFull && heightFull) {
attributes.put("size-full", "");
} else if (widthAuto && heightAuto) {
attributes.put("size-auto", "");
} else {
// handle width
if (!hasEqualWidth(defaultInstance)) {
if (widthFull) {
attributes.put("width-full", "");
} else if (widthAuto) {
attributes.put("width-auto", "");
} else {
String widthString = DesignAttributeHandler.getFormatter()
.format(getWidth()) + getWidthUnits().getSymbol();
attributes.put("width", widthString);
}
}
if (!hasEqualHeight(defaultInstance)) {
// handle height
if (heightFull) {
attributes.put("height-full", "");
} else if (heightAuto) {
attributes.put("height-auto", "");
} else {
String heightString = DesignAttributeHandler.getFormatter()
.format(getHeight()) + getHeightUnits().getSymbol();
attributes.put("height", heightString);
}
}
}
}
/**
* Test if the given component has equal width with this instance
*
* @param component
* the component for the width comparison
* @return true if the widths are equal
*/
private boolean hasEqualWidth(Component component) {
return getWidth() == component.getWidth()
&& getWidthUnits().equals(component.getWidthUnits());
}
/**
* Test if the given component has equal height with this instance
*
* @param component
* the component for the height comparison
* @return true if the heights are equal
*/
private boolean hasEqualHeight(Component component) {
return getHeight() == component.getHeight()
&& getHeightUnits().equals(component.getHeightUnits());
}
/**
* Test if the given components has equal size with this instance
*
* @param component
* the component for the size comparison
* @return true if the sizes are equal
*/
private boolean hasEqualSize(Component component) {
return hasEqualWidth(component) && hasEqualHeight(component);
}
/**
* Returns a collection of attributes that do not require custom handling
* when reading or writing design. These are typically attributes of some
* primitive type. The default implementation searches setters with
* primitive values
*
* @return a collection of attributes that can be read and written using the
* default approach.
*/
private Collection<String> getDefaultAttributes() {
Collection<String> attributes = DesignAttributeHandler
.getSupportedAttributes(this.getClass());
attributes.removeAll(getCustomAttributes());
return attributes;
}
/**
* Returns a collection of attributes that should not be handled by the
* basic implementation of the {@link readDesign} and {@link writeDesign}
* methods. Typically these are handled in a custom way in the overridden
* versions of the above methods
*
* @since 7.4
*
* @return the collection of attributes that are not handled by the basic
* implementation
*/
protected Collection<String> getCustomAttributes() {
ArrayList<String> l = new ArrayList<String>(
Arrays.asList(customAttributes));
if (this instanceof Focusable) {
l.add("tab-index");
l.add("tabindex");
}
return l;
}
private static final String[] customAttributes = new String[] { "width",
"height", "debug-id", "error", "width-auto", "height-auto",
"width-full", "height-full", "size-auto", "size-full", "immediate",
"locale", "read-only", "_id" };
/*
* (non-Javadoc)
*
* @see com.vaadin.ui.Component#writeDesign(org.jsoup.nodes.Element,
* com.vaadin.ui.declarative.DesignContext)
*/
@Override
public void writeDesign(Element design, DesignContext designContext) {
AbstractComponent def = designContext.getDefaultInstance(this);
Attributes attr = design.attributes();
// handle default attributes
for (String attribute : getDefaultAttributes()) {
DesignAttributeHandler.writeAttribute(this, attribute, attr, def);
}
// handle immediate
if (explicitImmediateValue != null) {
DesignAttributeHandler.writeAttribute("immediate", attr,
explicitImmediateValue, def.isImmediate(), Boolean.class);
}
// handle locale
if (getLocale() != null
&& (getParent() == null || !getLocale().equals(
getParent().getLocale()))) {
design.attr("locale", getLocale().toString());
}
// handle size
writeSize(attr, def);
// handle component error
String errorMsg = getComponentError() != null ? getComponentError()
.getFormattedHtmlMessage() : null;
String defErrorMsg = def.getComponentError() != null ? def
.getComponentError().getFormattedHtmlMessage() : null;
if (!SharedUtil.equals(errorMsg, defErrorMsg)) {
attr.put("error", errorMsg);
}
// handle tab index
if (this instanceof Focusable) {
DesignAttributeHandler.writeAttribute("tabindex", attr,
((Focusable) this).getTabIndex(),
((Focusable) def).getTabIndex(), Integer.class);
}
}
/*
* Actions
*/
/**
* Gets the {@link ActionManager} used to manage the
* {@link ShortcutListener}s added to this {@link Field}.
*
* @return the ActionManager in use
*/
protected ActionManager getActionManager() {
if (actionManager == null) {
actionManager = new ConnectorActionManager(this);
setActionManagerViewer();
}
return actionManager;
}
/**
* Set a viewer for the action manager to be the parent sub window (if the
* component is in a window) or the UI (otherwise). This is still a
* simplification of the real case as this should be handled by the parent
* VOverlay (on the client side) if the component is inside an VOverlay
* component.
*/
private void setActionManagerViewer() {
if (actionManager != null && getUI() != null) {
// Attached and has action manager
Window w = findAncestor(Window.class);
if (w != null) {
actionManager.setViewer(w);
} else {
actionManager.setViewer(getUI());
}
}
}
public void addShortcutListener(ShortcutListener shortcut) {
getActionManager().addAction(shortcut);
}
public void removeShortcutListener(ShortcutListener shortcut) {
if (actionManager != null) {
actionManager.removeAction(shortcut);
}
}
/**
* Determine whether a <code>content</code> component is equal to, or the
* ancestor of this component.
*
* @param content
* the potential ancestor element
* @return <code>true</code> if the relationship holds
*/
protected boolean isOrHasAncestor(Component content) {
if (content instanceof HasComponents) {
for (Component parent = this; parent != null; parent = parent
.getParent()) {
if (parent.equals(content)) {
return true;
}
}
}
return false;
}
private static final Logger getLogger() {
return Logger.getLogger(AbstractComponent.class.getName());
}
}
| |
/*******************************************************************************
* Copyright FUJITSU LIMITED 2017
*******************************************************************************/
package org.oscm.serviceprovisioningservice.bean;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import static org.mockito.Matchers.any;
import static org.mockito.Matchers.anyLong;
import static org.mockito.Matchers.anyString;
import static org.mockito.Matchers.eq;
import static org.mockito.Mockito.doNothing;
import static org.mockito.Mockito.doReturn;
import static org.mockito.Mockito.doThrow;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.spy;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
import java.math.BigDecimal;
import java.util.ArrayList;
import java.util.EnumSet;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import javax.ejb.SessionContext;
import javax.persistence.Query;
import org.junit.Before;
import org.junit.Test;
import org.mockito.ArgumentCaptor;
import org.mockito.Captor;
import org.mockito.MockitoAnnotations;
import org.oscm.dataservice.local.DataService;
import org.oscm.domobjects.CatalogEntry;
import org.oscm.domobjects.Marketplace;
import org.oscm.domobjects.Organization;
import org.oscm.domobjects.OrganizationRole;
import org.oscm.domobjects.OrganizationToRole;
import org.oscm.domobjects.PlatformUser;
import org.oscm.domobjects.Product;
import org.oscm.domobjects.PublicLandingpage;
import org.oscm.domobjects.RevenueShareModel;
import org.oscm.domobjects.Subscription;
import org.oscm.domobjects.enums.RevenueShareModelType;
import org.oscm.landingpageService.local.LandingpageServiceLocal;
import org.oscm.internal.types.enumtypes.OrganizationRoleType;
import org.oscm.internal.types.enumtypes.ServiceStatus;
import org.oscm.internal.types.enumtypes.ServiceType;
import org.oscm.internal.types.exception.IllegalArgumentException;
import org.oscm.internal.types.exception.ObjectNotFoundException;
import org.oscm.internal.types.exception.OperationNotPermittedException;
import org.oscm.internal.types.exception.SaaSSystemException;
import org.oscm.internal.types.exception.ServiceOperationException;
import org.oscm.internal.types.exception.ServiceStateException;
import org.oscm.internal.types.exception.ValidationException;
import org.oscm.internal.types.exception.ValidationException.ReasonEnum;
public class ServiceProvisioningPartnerServiceLocalBeanTest {
private ServiceProvisioningPartnerServiceLocalBean partnerBean;
private DataService ds;
private LandingpageServiceLocal lpService;
@Captor
ArgumentCaptor<Marketplace> lpCaptor;
@Captor
ArgumentCaptor<Product> productCaptor;
static boolean STATUS_CHECK_NEEDED = true;
static boolean STATUS_CHECK_NOT_NEEDED = false;
@Before
public void setup() {
MockitoAnnotations.initMocks(this);
partnerBean = spy(new ServiceProvisioningPartnerServiceLocalBean());
ds = mock(DataService.class);
lpService = mock(LandingpageServiceLocal.class);
partnerBean.dm = ds;
partnerBean.sessionCtx = mock(SessionContext.class);
partnerBean.landingpageService = lpService;
doNothing().when(lpService).removeProductFromLandingpage(
any(Marketplace.class), any(Product.class));
}
@Test(expected = ServiceStateException.class)
public void loadProduct_StatusCheck_DeletedService() throws Exception {
// given
Product prod = new Product();
prod.setStatus(ServiceStatus.DELETED);
doReturn(prod).when(ds).getReference(eq(Product.class),
eq(prod.getKey()));
// when
Product loadedProd = partnerBean.loadProduct(prod.getKey(),
STATUS_CHECK_NEEDED);
// then
assertEquals(prod, loadedProd);
}
@Test(expected = ServiceStateException.class)
public void loadProduct_StatusCheck_ObsoleteService() throws Exception {
// given
Product prod = new Product();
prod.setStatus(ServiceStatus.OBSOLETE);
doReturn(prod).when(ds).getReference(eq(Product.class),
eq(prod.getKey()));
// when
Product loadedProd = partnerBean.loadProduct(prod.getKey(),
STATUS_CHECK_NEEDED);
// then
assertEquals(prod, loadedProd);
}
@Test
public void loadProduct_StatusCheck_SuspendedService() throws Exception {
// given
Product prod = new Product();
prod.setStatus(ServiceStatus.SUSPENDED);
doReturn(prod).when(ds).getReference(eq(Product.class),
eq(prod.getKey()));
// when
Product loadedProd = partnerBean.loadProduct(prod.getKey(),
STATUS_CHECK_NEEDED);
// then
assertEquals(prod, loadedProd);
}
@Test
public void loadProduct_StatusCheck_InactiveService() throws Exception {
// given
Product prod = new Product();
prod.setStatus(ServiceStatus.INACTIVE);
doReturn(prod).when(ds).getReference(eq(Product.class),
eq(prod.getKey()));
// when
Product loadedProd = partnerBean.loadProduct(prod.getKey(),
STATUS_CHECK_NEEDED);
// then
assertEquals(prod, loadedProd);
}
@Test
public void loadProduct_StatusCheck_ActiveService() throws Exception {
// given
Product prod = new Product();
prod.setStatus(ServiceStatus.ACTIVE);
doReturn(prod).when(ds).getReference(eq(Product.class),
eq(prod.getKey()));
// when
Product loadedProd = partnerBean.loadProduct(prod.getKey(),
STATUS_CHECK_NEEDED);
// then
assertEquals(prod, loadedProd);
}
public void loadProduct_NoStatusCheck_DeletedService() throws Exception {
// given
Product prod = new Product();
prod.setStatus(ServiceStatus.DELETED);
doReturn(prod).when(ds).getReference(eq(Product.class),
eq(prod.getKey()));
// when
Product loadedProd = partnerBean.loadProduct(prod.getKey(),
STATUS_CHECK_NOT_NEEDED);
// then
assertEquals(prod, loadedProd);
}
public void loadProduct_NoStatusCheck_ObsoleteService() throws Exception {
// given
Product prod = new Product();
prod.setStatus(ServiceStatus.OBSOLETE);
doReturn(prod).when(ds).getReference(eq(Product.class),
eq(prod.getKey()));
// when
Product loadedProd = partnerBean.loadProduct(prod.getKey(),
STATUS_CHECK_NOT_NEEDED);
// then
assertEquals(prod, loadedProd);
}
@Test
public void loadProduct_NoStatusCheck_SuspendedService() throws Exception {
// given
Product prod = new Product();
prod.setStatus(ServiceStatus.SUSPENDED);
doReturn(prod).when(ds).getReference(eq(Product.class),
eq(prod.getKey()));
// when
Product loadedProd = partnerBean.loadProduct(prod.getKey(),
STATUS_CHECK_NOT_NEEDED);
// then
assertEquals(prod, loadedProd);
}
@Test
public void loadProduct_NoStatusCheck_InactiveService() throws Exception {
// given
Product prod = new Product();
prod.setStatus(ServiceStatus.INACTIVE);
doReturn(prod).when(ds).getReference(eq(Product.class),
eq(prod.getKey()));
// when
Product loadedProd = partnerBean.loadProduct(prod.getKey(),
STATUS_CHECK_NOT_NEEDED);
// then
assertEquals(prod, loadedProd);
}
@Test
public void loadProduct_NoStatusCheck_ActiveService() throws Exception {
// given
Product prod = new Product();
prod.setStatus(ServiceStatus.ACTIVE);
doReturn(prod).when(ds).getReference(eq(Product.class),
eq(prod.getKey()));
// when
Product loadedProd = partnerBean.loadProduct(prod.getKey(),
STATUS_CHECK_NOT_NEEDED);
// then
assertEquals(prod, loadedProd);
}
@Test
public void verifyOwningPermission_AsPlatformOperator() throws Exception {
// given
Organization org = new Organization();
Set<OrganizationToRole> roles = new HashSet<OrganizationToRole>();
roles.add(createOrgToRole(OrganizationRoleType.PLATFORM_OPERATOR));
roles.add(createOrgToRole(OrganizationRoleType.SUPPLIER));
roles.add(createOrgToRole(OrganizationRoleType.TECHNOLOGY_PROVIDER));
org.setGrantedRoles(roles);
PlatformUser user = new PlatformUser();
user.setOrganization(org);
doReturn(user).when(ds).getCurrentUser();
// when
partnerBean.verifyOwningPermission(new Product());
// then no check is done
}
@Test
public void verifyOwningPermission_AsMarketplaceOwner() throws Exception {
// given
Organization org = new Organization();
Set<OrganizationToRole> roles = new HashSet<OrganizationToRole>();
roles.add(createOrgToRole(OrganizationRoleType.MARKETPLACE_OWNER));
roles.add(createOrgToRole(OrganizationRoleType.SUPPLIER));
roles.add(createOrgToRole(OrganizationRoleType.TECHNOLOGY_PROVIDER));
org.setGrantedRoles(roles);
PlatformUser user = new PlatformUser();
user.setOrganization(org);
doReturn(user).when(ds).getCurrentUser();
// when
partnerBean.verifyOwningPermission(new Product());
// then no check is done
}
@Test(expected = OperationNotPermittedException.class)
public void verifyOwningPermission_AsSupTp() throws Exception {
// given
Organization org = new Organization();
Set<OrganizationToRole> roles = new HashSet<OrganizationToRole>();
roles.add(createOrgToRole(OrganizationRoleType.SUPPLIER));
roles.add(createOrgToRole(OrganizationRoleType.TECHNOLOGY_PROVIDER));
roles.add(createOrgToRole(OrganizationRoleType.CUSTOMER));
org.setGrantedRoles(roles);
PlatformUser user = new PlatformUser();
user.setOrganization(org);
doReturn(user).when(ds).getCurrentUser();
// when
partnerBean.verifyOwningPermission(new Product());
}
@Test(expected = OperationNotPermittedException.class)
public void verifyOwningPermission_AsBroker() throws Exception {
// given
Organization org = new Organization();
Set<OrganizationToRole> roles = new HashSet<OrganizationToRole>();
roles.add(createOrgToRole(OrganizationRoleType.BROKER));
roles.add(createOrgToRole(OrganizationRoleType.CUSTOMER));
org.setGrantedRoles(roles);
PlatformUser user = new PlatformUser();
user.setOrganization(org);
doReturn(user).when(ds).getCurrentUser();
// when
partnerBean.verifyOwningPermission(new Product());
}
@Test(expected = OperationNotPermittedException.class)
public void verifyOwningPermission_AsReseller() throws Exception {
// given
Organization org = new Organization();
Set<OrganizationToRole> roles = new HashSet<OrganizationToRole>();
roles.add(createOrgToRole(OrganizationRoleType.RESELLER));
roles.add(createOrgToRole(OrganizationRoleType.CUSTOMER));
org.setGrantedRoles(roles);
PlatformUser user = new PlatformUser();
user.setOrganization(org);
doReturn(user).when(ds).getCurrentUser();
// when
partnerBean.verifyOwningPermission(new Product());
}
@Test(expected = OperationNotPermittedException.class)
public void verifyOwningPermission_AsCustomer() throws Exception {
// given
Organization org = new Organization();
Set<OrganizationToRole> roles = new HashSet<OrganizationToRole>();
roles.add(createOrgToRole(OrganizationRoleType.CUSTOMER));
org.setGrantedRoles(roles);
PlatformUser user = new PlatformUser();
user.setOrganization(org);
doReturn(user).when(ds).getCurrentUser();
// when
partnerBean.verifyOwningPermission(new Product());
}
@Test
public void verifyOwningPermission_WithOwnProduct() throws Exception {
// given
Organization org = new Organization();
Set<OrganizationToRole> roles = new HashSet<OrganizationToRole>();
roles.add(createOrgToRole(OrganizationRoleType.SUPPLIER));
roles.add(createOrgToRole(OrganizationRoleType.TECHNOLOGY_PROVIDER));
roles.add(createOrgToRole(OrganizationRoleType.CUSTOMER));
org.setGrantedRoles(roles);
PlatformUser user = new PlatformUser();
user.setOrganization(org);
doReturn(user).when(ds).getCurrentUser();
Product prod = new Product();
prod.setVendor(org);
// when
partnerBean.verifyOwningPermission(prod);
// then no exception occurs
}
private OrganizationToRole createOrgToRole(OrganizationRoleType role) {
OrganizationToRole orgToRole = new OrganizationToRole();
OrganizationRole orgRole = new OrganizationRole();
orgRole.setRoleName(role);
orgToRole.setOrganizationRole(orgRole);
return orgToRole;
}
@Test
public void checkTemplateOrPartnerSpecificCopy_PartnerCopiesAllowed()
throws Exception {
// given a non subscription-specific copy
Product prod = new Product();
prod.setTemplate(new Product());
// when
partnerBean.checkTemplateOrPartnerSpecificCopy(prod);
}
@Test(expected = ServiceOperationException.class)
public void checkTemplateOrPartnerSpecificCopy_SubscriptionCopiesNotAllowed()
throws Exception {
// given a subscription-specific copy
Product prod = new Product();
prod.setTemplate(new Product());
prod.setOwningSubscription(new Subscription());
// when
partnerBean.checkTemplateOrPartnerSpecificCopy(prod);
}
@Test(expected = ServiceOperationException.class)
public void checkTemplateOrPartnerSpecificCopy_CustomerSpecificCopiesNotAllowed()
throws Exception {
// given a subscription-specific copy
Product prod = new Product();
prod.setTemplate(new Product());
prod.setTargetCustomer(new Organization());
// when
partnerBean.checkTemplateOrPartnerSpecificCopy(prod);
}
@Test(expected = SaaSSystemException.class)
public void validateRevenueShareOfProductCopy_NoPriceModel() {
// given a catalog entry with a product which is a partner-specific copy
// where the price model for the catalog entry is null
Product prod = new Product();
prod.setTemplate(new Product());
CatalogEntry entry = new CatalogEntry();
entry.setProduct(prod);
// when
partnerBean.validateRevenueShareOfProductCopy(entry);
}
@Test
public void validateRevenueShareOfProductCopy() {
// given a catalog entry with a product which is a partner-specific copy
// where the price model for the catalog entry is null
Product prod = new Product();
prod.setTemplate(new Product());
CatalogEntry entry = new CatalogEntry();
entry.setProduct(prod);
entry.setBrokerPriceModel(createRevenueModel(RevenueShareModelType.BROKER_REVENUE_SHARE));
entry.setResellerPriceModel(createRevenueModel(RevenueShareModelType.RESELLER_REVENUE_SHARE));
// when
partnerBean.validateRevenueShareOfProductCopy(entry);
}
@Test(expected = NullPointerException.class)
public void getPriceModelsForEntry_NoBrokerPriceModelForMarketplace() {
// given a catalog entry whose marketplace does not have a broker price
// model
CatalogEntry ce = new CatalogEntry();
ce.setMarketplace(new Marketplace("mId"));
// when
partnerBean.getPriceModelsForEntry(ce);
}
@Test(expected = NullPointerException.class)
public void getPriceModelsForEntry_NoResellerPriceModelForMarketplace() {
// given a catalog entry whose marketplace does not have a reseller
// price model
CatalogEntry ce = new CatalogEntry();
Marketplace mp = new Marketplace("mId");
mp.setBrokerPriceModel(createRevenueModel(RevenueShareModelType.BROKER_REVENUE_SHARE));
ce.setMarketplace(mp);
// when
partnerBean.getPriceModelsForEntry(ce);
}
@Test
public void getPriceModelsForEntry_NoBrokerPriceModelForCatalogEntry() {
// given a catalog entry with no broker price model defined.
CatalogEntry ce = new CatalogEntry();
Marketplace mp = new Marketplace("mId");
mp.setBrokerPriceModel(createRevenueModel(RevenueShareModelType.BROKER_REVENUE_SHARE));
mp.setResellerPriceModel(createRevenueModel(RevenueShareModelType.RESELLER_REVENUE_SHARE));
ce.setMarketplace(mp);
// when
Map<RevenueShareModelType, RevenueShareModel> revenueShareModels = partnerBean
.getPriceModelsForEntry(ce);
// then
assertNotNull(revenueShareModels);
RevenueShareModel brokerPriceModel = revenueShareModels
.get(RevenueShareModelType.BROKER_REVENUE_SHARE);
RevenueShareModel resellerPriceModel = revenueShareModels
.get(RevenueShareModelType.RESELLER_REVENUE_SHARE);
assertNotNull(brokerPriceModel);
assertNotNull(resellerPriceModel);
assertEquals(BigDecimal.ZERO, brokerPriceModel.getRevenueShare());
assertEquals(RevenueShareModelType.BROKER_REVENUE_SHARE,
brokerPriceModel.getRevenueShareModelType());
assertEquals(BigDecimal.ZERO, resellerPriceModel.getRevenueShare());
assertEquals(RevenueShareModelType.RESELLER_REVENUE_SHARE,
resellerPriceModel.getRevenueShareModelType());
}
@Test
public void getPriceModelsForEntry_NoResellerPriceModelForCatalogEntry() {
// given a catalog entry with no reseller price model defined.
CatalogEntry ce = new CatalogEntry();
Marketplace mp = new Marketplace("mId");
mp.setBrokerPriceModel(createRevenueModel(RevenueShareModelType.BROKER_REVENUE_SHARE));
mp.setResellerPriceModel(createRevenueModel(RevenueShareModelType.RESELLER_REVENUE_SHARE));
ce.setBrokerPriceModel(createRevenueModel(RevenueShareModelType.BROKER_REVENUE_SHARE));
ce.setMarketplace(mp);
// when
Map<RevenueShareModelType, RevenueShareModel> revenueShareModels = partnerBean
.getPriceModelsForEntry(ce);
// then
assertNotNull(revenueShareModels);
RevenueShareModel brokerPriceModel = revenueShareModels
.get(RevenueShareModelType.BROKER_REVENUE_SHARE);
RevenueShareModel resellerPriceModel = revenueShareModels
.get(RevenueShareModelType.RESELLER_REVENUE_SHARE);
assertNotNull(brokerPriceModel);
assertNotNull(resellerPriceModel);
assertEquals(BigDecimal.ZERO, brokerPriceModel.getRevenueShare());
assertEquals(RevenueShareModelType.BROKER_REVENUE_SHARE,
brokerPriceModel.getRevenueShareModelType());
assertEquals(BigDecimal.ZERO, resellerPriceModel.getRevenueShare());
assertEquals(RevenueShareModelType.RESELLER_REVENUE_SHARE,
resellerPriceModel.getRevenueShareModelType());
}
@Test(expected = IllegalArgumentException.class)
public void saveOperatorRevenueShare_IllegalArgumentException()
throws Exception {
// when
partnerBean.saveOperatorRevenueShare(101L, null, 1);
}
@Test(expected = ObjectNotFoundException.class)
public void saveOperatorRevenueShare_ObjectNotFound() throws Exception {
// given
doThrow(new ObjectNotFoundException()).when(partnerBean.dm)
.getReference(eq(Product.class), eq(101L));
// when
partnerBean.saveOperatorRevenueShare(101L, new RevenueShareModel(), 0);
}
@Test(expected = ServiceOperationException.class)
public void saveOperatorRevenueShare_ServiceOperationException()
throws Exception {
// given
Product template = givenProduct(100L);
Product product = givenProduct(101L);
product.setTemplate(template);
doReturn(product).when(partnerBean.dm).getReference(eq(Product.class),
eq(101L));
doNothing().when(partnerBean).verifyOwningPermission(eq(product));
// when
partnerBean.saveOperatorRevenueShare(101L, new RevenueShareModel(), 0);
}
@Test
public void saveOperatorRevenueShare_MandatoryForTemplates()
throws Exception {
// given
Product product = givenProduct(101L);
doReturn(product).when(partnerBean.dm).getReference(eq(Product.class),
eq(101L));
doNothing().when(partnerBean).verifyOwningPermission(eq(product));
doThrow(
new SaaSSystemException(
"Template without operator revenue share")).when(
partnerBean).validateOperatorRevenueShare(eq(product));
// when
try {
partnerBean.saveOperatorRevenueShare(101L, new RevenueShareModel(),
0);
fail();
} catch (SaaSSystemException e) {
assertTrue(e.getMessage().contains(
"Template without operator revenue share"));
}
}
@Test
public void saveOperatorRevenueShare_ValidationException() throws Exception {
// given
RevenueShareModel oldRevenue = givenRevenueShareModel(1L,
RevenueShareModelType.OPERATOR_REVENUE_SHARE);
oldRevenue.setRevenueShare(BigDecimal.valueOf(10L));
Product product = givenProduct(1L);
product.setType(ServiceType.TEMPLATE);
product.getCatalogEntries().get(0).setOperatorPriceModel(oldRevenue);
doReturn(product).when(partnerBean.dm).getReference(eq(Product.class),
eq(1L));
doNothing().when(partnerBean).verifyOwningPermission(eq(product));
RevenueShareModel newRevenue = givenRevenueShareModel(2L,
RevenueShareModelType.OPERATOR_REVENUE_SHARE);
newRevenue.setRevenueShare(BigDecimal.valueOf(101L));
try {
// when
partnerBean.saveOperatorRevenueShare(1L, newRevenue, 0);
fail();
} catch (ValidationException e) {
// then
assertEquals(ReasonEnum.VALUE_NOT_IN_RANGE, e.getReason());
assertEquals(
ServiceProvisioningPartnerServiceLocalBean.FIELD_REVENUE_SHARE
+ " for "
+ RevenueShareModelType.OPERATOR_REVENUE_SHARE,
e.getMember());
verify(partnerBean.sessionCtx, times(1)).setRollbackOnly();
}
}
@Test
public void saveOperatorRevenueShare() throws Exception {
// given
RevenueShareModel revenueToBeUpdated = givenRevenueShareModel(1L,
RevenueShareModelType.OPERATOR_REVENUE_SHARE);
revenueToBeUpdated.setRevenueShare(BigDecimal.valueOf(10L));
Product product = givenProduct(1L);
product.setType(ServiceType.TEMPLATE);
product.getCatalogEntries().get(0)
.setOperatorPriceModel(revenueToBeUpdated);
doReturn(product).when(partnerBean.dm).getReference(eq(Product.class),
eq(1L));
doNothing().when(partnerBean).verifyOwningPermission(eq(product));
RevenueShareModel newRevenue = givenRevenueShareModel(1L,
RevenueShareModelType.OPERATOR_REVENUE_SHARE);
newRevenue.setRevenueShare(BigDecimal.valueOf(20L));
// when
partnerBean.saveOperatorRevenueShare(1L, newRevenue, 0);
// then
assertEquals(BigDecimal.valueOf(20L),
revenueToBeUpdated.getRevenueShare());
}
@Test(expected = ObjectNotFoundException.class)
public void getOperatorRevenueShare_ObjectNotFound() throws Exception {
// given
doThrow(new ObjectNotFoundException()).when(partnerBean.dm)
.getReference(eq(Product.class), eq(101L));
// when
partnerBean.getOperatorRevenueShare(101L);
}
@Test(expected = OperationNotPermittedException.class)
public void getOperatorRevenueShare_OperationNotPermittedException()
throws Exception {
// given
Product product = givenProduct(101L);
doReturn(product).when(partnerBean.dm).getReference(eq(Product.class),
eq(101L));
doThrow(new OperationNotPermittedException()).when(partnerBean)
.verifyOwningPermission(eq(product));
// when
partnerBean.getOperatorRevenueShare(101L);
}
@Test
public void getOperatorRevenueShare_MandatoryForTemplates()
throws Exception {
// given
Product product = givenProduct(101L);
doReturn(product).when(partnerBean.dm).getReference(eq(Product.class),
eq(101L));
doNothing().when(partnerBean).verifyOwningPermission(eq(product));
doThrow(
new SaaSSystemException(
"Template without operator revenue share")).when(
partnerBean).validateOperatorRevenueShare(eq(product));
// when
try {
partnerBean.getOperatorRevenueShare(101L);
fail();
} catch (SaaSSystemException e) {
assertTrue(e.getMessage().contains(
"Template without operator revenue share"));
}
}
@Test
public void getOperatorRevenueShare() throws Exception {
// given
Product product = givenProduct(101L);
product.setType(ServiceType.TEMPLATE);
RevenueShareModel revenue = new RevenueShareModel();
revenue.setRevenueShare(BigDecimal.TEN);
product.getCatalogEntries().get(0).setOperatorPriceModel(revenue);
doReturn(product).when(partnerBean.dm).getReference(eq(Product.class),
eq(101L));
doNothing().when(partnerBean).verifyOwningPermission(eq(product));
// when
RevenueShareModel result = partnerBean.getOperatorRevenueShare(101L);
// then
assertEquals(BigDecimal.TEN, result.getRevenueShare());
}
@Test
public void getOperatorRevenueShare_ServiceCopy() throws Exception {
// given
Product product = givenProduct(101L);
product.setType(ServiceType.PARTNER_TEMPLATE);
doReturn(product).when(partnerBean.dm).getReference(eq(Product.class),
eq(101L));
doNothing().when(partnerBean).verifyOwningPermission(eq(product));
// when
RevenueShareModel result = partnerBean.getOperatorRevenueShare(101L);
// then
assertNull(result);
}
@Test
public void validateOperatorRevenueShare() {
// given
Product product = givenProduct(101L);
product.setType(ServiceType.TEMPLATE);
RevenueShareModel revenue = new RevenueShareModel();
revenue.setRevenueShare(BigDecimal.TEN);
product.getCatalogEntries().get(0).setOperatorPriceModel(revenue);
// when
partnerBean.validateOperatorRevenueShare(product);
}
@Test
public void validateOperatorRevenueShare_TemplateWithoutRevenue() {
// given
Product product = givenProduct(101L);
product.setType(ServiceType.TEMPLATE);
// when
try {
partnerBean.validateOperatorRevenueShare(product);
fail();
} catch (SaaSSystemException e) {
assertTrue(e
.getMessage()
.contains(
"The catalog entry for the service template 101 does not have an operator price model."));
}
}
@Test
public void validateOperatorRevenueShare_Copy() {
// given
Product product = givenProduct(101L);
product.setType(ServiceType.PARTNER_TEMPLATE);
partnerBean.validateOperatorRevenueShare(product);
}
@Test
public void validateOperatorRevenueShare_CopyWithRevenue() {
// given
Product product = givenProduct(101L);
product.setType(ServiceType.PARTNER_TEMPLATE);
RevenueShareModel revenue = new RevenueShareModel();
revenue.setRevenueShare(BigDecimal.TEN);
product.getCatalogEntries().get(0).setOperatorPriceModel(revenue);
// when
try {
partnerBean.validateOperatorRevenueShare(product);
fail();
} catch (SaaSSystemException e) {
assertTrue(e
.getMessage()
.contains(
"The catalog entry for the service copy 101 has an operator price model."));
}
}
@Test(expected = ObjectNotFoundException.class)
public void getDefaultOperatorRevenueShare_ObjectNotFound()
throws Exception {
// given
doThrow(new ObjectNotFoundException()).when(partnerBean.dm)
.getReference(eq(Product.class), eq(101L));
// when
partnerBean.getDefaultOperatorRevenueShare(101L);
}
@Test(expected = OperationNotPermittedException.class)
public void getDefaultOperatorRevenueShare_OperationNotPermittedException()
throws Exception {
// given
Product product = givenProduct(101L);
doReturn(product).when(partnerBean.dm).getReference(eq(Product.class),
eq(101L));
doThrow(new OperationNotPermittedException()).when(partnerBean)
.verifyOwningPermission(eq(product));
// when
partnerBean.getDefaultOperatorRevenueShare(101L);
}
@Test
public void getDefaultOperatorRevenueShare_SaaSSystemException()
throws Exception {
// given
Product product = givenProduct(101L);
doReturn(product).when(partnerBean.dm).getReference(eq(Product.class),
eq(101L));
doNothing().when(partnerBean).verifyOwningPermission(eq(product));
doThrow(
new SaaSSystemException(
"Template without operator revenue share")).when(
partnerBean).validateDefaultOperatorRevenueShare(eq(product));
// when
try {
partnerBean.getDefaultOperatorRevenueShare(101L);
fail();
} catch (SaaSSystemException e) {
assertTrue(e.getMessage().contains(
"Template without operator revenue share"));
}
}
@Test
public void getDefaultOperatorRevenueShare() throws Exception {
// given
Product product = givenProduct(101L);
RevenueShareModel revenue = new RevenueShareModel();
revenue.setRevenueShare(BigDecimal.TEN);
product.setVendor(givenOrganization("oId",
OrganizationRoleType.SUPPLIER));
product.getVendor().setOperatorPriceModel(revenue);
doReturn(product).when(partnerBean.dm).getReference(eq(Product.class),
eq(101L));
doNothing().when(partnerBean).verifyOwningPermission(eq(product));
// when
RevenueShareModel result = partnerBean
.getDefaultOperatorRevenueShare(101L);
// then
assertEquals(BigDecimal.TEN, result.getRevenueShare());
}
@Test
public void getDefaultOperatorRevenueShare_ServiceCopy() throws Exception {
// given
Product product = givenProduct(101L);
Organization supplier = new Organization();
product.setVendor(supplier);
doReturn(product).when(partnerBean.dm).getReference(eq(Product.class),
eq(101L));
doNothing().when(partnerBean).verifyOwningPermission(eq(product));
// when
RevenueShareModel result = partnerBean
.getDefaultOperatorRevenueShare(101L);
// then
assertNull(result);
}
@Test
public void validateDefaultOperatorRevenueShare() {
// given
Product product = givenProduct(101L);
RevenueShareModel revenue = new RevenueShareModel();
revenue.setRevenueShare(BigDecimal.TEN);
product.setVendor(givenOrganization("oId",
OrganizationRoleType.SUPPLIER));
product.getVendor().setOperatorPriceModel(revenue);
// when
partnerBean.validateDefaultOperatorRevenueShare(product);
}
@Test
public void validateDefaultOperatorRevenueShare_TemplateWithoutRevenue() {
// given
Product product = givenProduct(101L);
product.setVendor(givenOrganization("oId",
OrganizationRoleType.SUPPLIER));
// when
try {
partnerBean.validateDefaultOperatorRevenueShare(product);
fail();
} catch (SaaSSystemException e) {
assertTrue(e
.getMessage()
.contains(
"The supplier organization oId does not have an operator price model."));
}
}
@Test
public void validateDefaultOperatorRevenueShare_Copy() {
// given
Product product = givenProduct(101L);
product.setVendor(givenOrganization("oId",
OrganizationRoleType.TECHNOLOGY_PROVIDER));
partnerBean.validateDefaultOperatorRevenueShare(product);
}
@Test
public void validateDefaultOperatorRevenueShare_CopyWithRevenue() {
// given
Product product = givenProduct(101L);
RevenueShareModel revenue = new RevenueShareModel();
revenue.setRevenueShare(BigDecimal.TEN);
product.setVendor(givenOrganization("oId",
OrganizationRoleType.TECHNOLOGY_PROVIDER));
product.getVendor().setOperatorPriceModel(revenue);
// when
try {
partnerBean.validateDefaultOperatorRevenueShare(product);
fail();
} catch (SaaSSystemException e) {
assertTrue(e
.getMessage()
.contains(
"The non supplier organization oId has an operator price model."));
}
}
@Test
public void getRevenueShareModelsForProduct() throws Exception {
// given a product whose catalog entry has revenue share models
Product prod = new Product();
List<CatalogEntry> entries = new ArrayList<CatalogEntry>();
CatalogEntry ce = new CatalogEntry();
ce.setProduct(prod);
entries.add(ce);
prod.setCatalogEntries(entries);
ce.setBrokerPriceModel(createRevenueModel(RevenueShareModelType.BROKER_REVENUE_SHARE));
ce.setResellerPriceModel(createRevenueModel(RevenueShareModelType.RESELLER_REVENUE_SHARE));
prod.setTemplate(new Product());
ServiceProvisioningPartnerServiceLocalBean mockBean = spy(new ServiceProvisioningPartnerServiceLocalBean());
doReturn(prod).when(mockBean).loadProduct(prod.getKey(),
STATUS_CHECK_NEEDED);
doNothing().when(mockBean).verifyOwningPermission(prod);
// when
Map<RevenueShareModelType, RevenueShareModel> revenueShareModels = mockBean
.getRevenueShareModelsForProduct(prod.getKey(),
STATUS_CHECK_NEEDED);
RevenueShareModel brokerPriceModel = revenueShareModels
.get(RevenueShareModelType.BROKER_REVENUE_SHARE);
RevenueShareModel resellerPriceModel = revenueShareModels
.get(RevenueShareModelType.RESELLER_REVENUE_SHARE);
// then
assertNotNull(revenueShareModels);
assertNotNull(brokerPriceModel);
assertNotNull(resellerPriceModel);
assertEquals(BigDecimal.ZERO, brokerPriceModel.getRevenueShare());
assertEquals(RevenueShareModelType.BROKER_REVENUE_SHARE,
brokerPriceModel.getRevenueShareModelType());
assertEquals(BigDecimal.ZERO, resellerPriceModel.getRevenueShare());
assertEquals(RevenueShareModelType.RESELLER_REVENUE_SHARE,
resellerPriceModel.getRevenueShareModelType());
}
@Test(expected = SaaSSystemException.class)
public void getRevenueShareModelsForProduct_NoPriceModelForCopy()
throws Exception {
// given a product which is a partner-specific copy
// where the price model for the catalog entry is null
Product prod = new Product();
List<CatalogEntry> entries = new ArrayList<CatalogEntry>();
CatalogEntry ce = new CatalogEntry();
ce.setProduct(prod);
entries.add(ce);
prod.setCatalogEntries(entries);
prod.setTemplate(new Product());
doReturn(prod).when(partnerBean).loadProduct(prod.getKey(),
STATUS_CHECK_NEEDED);
doNothing().when(partnerBean).verifyOwningPermission(prod);
// when
partnerBean.getRevenueShareModelsForProduct(prod.getKey(),
STATUS_CHECK_NEEDED);
}
@Test
public void getPartnerRevenueShareForService_NoPriceModelAndMarketplaceForTemplate()
throws Exception {
// given a product which is a template where the price model of the
// catalog entry is null and where the marketplace of the catalog entry
// is null
Product prod = new Product();
List<CatalogEntry> entries = new ArrayList<CatalogEntry>();
CatalogEntry ce = new CatalogEntry();
ce.setProduct(prod);
entries.add(ce);
prod.setCatalogEntries(entries);
prod.setTemplate(prod);
doReturn(prod).when(partnerBean).loadProduct(prod.getKey(),
STATUS_CHECK_NEEDED);
doNothing().when(partnerBean).verifyOwningPermission(prod);
// when
partnerBean.getRevenueShareModelsForProduct(prod.getKey(),
STATUS_CHECK_NEEDED);
}
@Test(expected = NullPointerException.class)
public void getRevenueShareModelsForProduct_NoBrokerPriceModelForMarketplace()
throws Exception {
// given
Product prod = new Product();
List<CatalogEntry> entries = new ArrayList<CatalogEntry>();
CatalogEntry ce = new CatalogEntry();
ce.setMarketplace(new Marketplace("mId"));
entries.add(ce);
prod.setCatalogEntries(entries);
prod.setTemplate(prod);
doReturn(prod).when(partnerBean).loadProduct(prod.getKey(),
STATUS_CHECK_NEEDED);
doNothing().when(partnerBean).verifyOwningPermission(prod);
// when
partnerBean.getRevenueShareModelsForProduct(prod.getKey(),
STATUS_CHECK_NEEDED);
}
@Test(expected = NullPointerException.class)
public void getRevenueShareModelsForProduct_NoResellerPriceModelForMarketplace()
throws Exception {
// given
Product prod = new Product();
List<CatalogEntry> entries = new ArrayList<CatalogEntry>();
CatalogEntry ce = new CatalogEntry();
Marketplace mp = new Marketplace("mId");
mp.setBrokerPriceModel(createRevenueModel(RevenueShareModelType.BROKER_REVENUE_SHARE));
ce.setMarketplace(mp);
entries.add(ce);
prod.setCatalogEntries(entries);
prod.setTemplate(prod);
doReturn(prod).when(partnerBean).loadProduct(prod.getKey(),
STATUS_CHECK_NEEDED);
doNothing().when(partnerBean).verifyOwningPermission(prod);
// when
partnerBean.getRevenueShareModelsForProduct(prod.getKey(),
STATUS_CHECK_NEEDED);
}
@Test(expected = NullPointerException.class)
public void getRevenueShareModelsForProduct_AllStates_NoBrokerPriceModelForMarketplace()
throws Exception {
// given
Product prod = new Product();
List<CatalogEntry> entries = new ArrayList<CatalogEntry>();
CatalogEntry ce = new CatalogEntry();
ce.setMarketplace(new Marketplace("mId"));
entries.add(ce);
prod.setCatalogEntries(entries);
prod.setTemplate(prod);
doReturn(prod).when(partnerBean).loadProduct(prod.getKey(),
STATUS_CHECK_NOT_NEEDED);
doNothing().when(partnerBean).verifyOwningPermission(prod);
// when
partnerBean.getRevenueShareModelsForProduct(prod.getKey(),
STATUS_CHECK_NOT_NEEDED);
}
@Test(expected = SaaSSystemException.class)
public void getRevenueShareModelsForProduct_AllStates_NoPriceModelForCopy()
throws Exception {
// given a product which is a partner-specific copy
// where the price model for the catalog entry is null
Product prod = new Product();
List<CatalogEntry> entries = new ArrayList<CatalogEntry>();
CatalogEntry ce = new CatalogEntry();
ce.setProduct(prod);
entries.add(ce);
prod.setCatalogEntries(entries);
prod.setTemplate(new Product());
doReturn(prod).when(partnerBean).loadProduct(prod.getKey(),
STATUS_CHECK_NOT_NEEDED);
doNothing().when(partnerBean).verifyOwningPermission(prod);
// when
partnerBean.getRevenueShareModelsForProduct(prod.getKey(),
STATUS_CHECK_NOT_NEEDED);
}
@Test(expected = NullPointerException.class)
public void getRevenueShareModelsForProduct_AllStates_NoResellerPriceModelForMarketplace()
throws Exception {
// given
Product prod = new Product();
List<CatalogEntry> entries = new ArrayList<CatalogEntry>();
CatalogEntry ce = new CatalogEntry();
Marketplace mp = new Marketplace("mId");
mp.setBrokerPriceModel(createRevenueModel(RevenueShareModelType.BROKER_REVENUE_SHARE));
ce.setMarketplace(mp);
entries.add(ce);
prod.setCatalogEntries(entries);
prod.setTemplate(prod);
doReturn(prod).when(partnerBean).loadProduct(prod.getKey(),
STATUS_CHECK_NOT_NEEDED);
doNothing().when(partnerBean).verifyOwningPermission(prod);
// when
partnerBean.getRevenueShareModelsForProduct(prod.getKey(),
STATUS_CHECK_NOT_NEEDED);
}
@Test
public void getRevenueShareModelsForProduct_AllStates() throws Exception {
// given a product whose catalog entry has revenue share models
Product prod = new Product();
List<CatalogEntry> entries = new ArrayList<CatalogEntry>();
CatalogEntry ce = new CatalogEntry();
ce.setProduct(prod);
entries.add(ce);
prod.setCatalogEntries(entries);
ce.setBrokerPriceModel(createRevenueModel(RevenueShareModelType.BROKER_REVENUE_SHARE));
ce.setResellerPriceModel(createRevenueModel(RevenueShareModelType.RESELLER_REVENUE_SHARE));
prod.setTemplate(new Product());
ServiceProvisioningPartnerServiceLocalBean mockBean = spy(new ServiceProvisioningPartnerServiceLocalBean());
doReturn(prod).when(mockBean).loadProduct(prod.getKey(),
STATUS_CHECK_NOT_NEEDED);
doNothing().when(mockBean).verifyOwningPermission(prod);
// when
Map<RevenueShareModelType, RevenueShareModel> revenueShareModels = mockBean
.getRevenueShareModelsForProduct(prod.getKey(),
STATUS_CHECK_NOT_NEEDED);
RevenueShareModel brokerPriceModel = revenueShareModels
.get(RevenueShareModelType.BROKER_REVENUE_SHARE);
RevenueShareModel resellerPriceModel = revenueShareModels
.get(RevenueShareModelType.RESELLER_REVENUE_SHARE);
// then
assertNotNull(revenueShareModels);
assertNotNull(brokerPriceModel);
assertNotNull(resellerPriceModel);
assertEquals(BigDecimal.ZERO, brokerPriceModel.getRevenueShare());
assertEquals(RevenueShareModelType.BROKER_REVENUE_SHARE,
brokerPriceModel.getRevenueShareModelType());
assertEquals(BigDecimal.ZERO, resellerPriceModel.getRevenueShare());
assertEquals(RevenueShareModelType.RESELLER_REVENUE_SHARE,
resellerPriceModel.getRevenueShareModelType());
}
@Test
public void getRevenueShareModelsForProduct_AllStates_DELETEDStatus()
throws Exception {
// given a product whose catalog entry has revenue share models
Product prod = new Product();
List<CatalogEntry> entries = new ArrayList<CatalogEntry>();
CatalogEntry ce = new CatalogEntry();
ce.setProduct(prod);
entries.add(ce);
prod.setCatalogEntries(entries);
prod.setStatus(ServiceStatus.DELETED);
ce.setBrokerPriceModel(createRevenueModel(RevenueShareModelType.BROKER_REVENUE_SHARE));
ce.setResellerPriceModel(createRevenueModel(RevenueShareModelType.RESELLER_REVENUE_SHARE));
prod.setTemplate(new Product());
ServiceProvisioningPartnerServiceLocalBean mockBean = spy(new ServiceProvisioningPartnerServiceLocalBean());
doReturn(prod).when(mockBean).loadProduct(prod.getKey(),
STATUS_CHECK_NOT_NEEDED);
doNothing().when(mockBean).verifyOwningPermission(prod);
// when
Map<RevenueShareModelType, RevenueShareModel> revenueShareModels = mockBean
.getRevenueShareModelsForProduct(prod.getKey(),
STATUS_CHECK_NOT_NEEDED);
RevenueShareModel brokerPriceModel = revenueShareModels
.get(RevenueShareModelType.BROKER_REVENUE_SHARE);
RevenueShareModel resellerPriceModel = revenueShareModels
.get(RevenueShareModelType.RESELLER_REVENUE_SHARE);
// then
assertNotNull(revenueShareModels);
assertNotNull(brokerPriceModel);
assertNotNull(resellerPriceModel);
assertEquals(BigDecimal.ZERO, brokerPriceModel.getRevenueShare());
assertEquals(RevenueShareModelType.BROKER_REVENUE_SHARE,
brokerPriceModel.getRevenueShareModelType());
assertEquals(BigDecimal.ZERO, resellerPriceModel.getRevenueShare());
assertEquals(RevenueShareModelType.RESELLER_REVENUE_SHARE,
resellerPriceModel.getRevenueShareModelType());
}
@Test
public void getCatalogEntryForProduct() throws Exception {
// given
long serviceKey = 11111;
Product prod = new Product();
prod.setKey(serviceKey);
long catalogEntryKey = 22222;
CatalogEntry ce = new CatalogEntry();
ce.setKey(catalogEntryKey);
List<CatalogEntry> catalogEntries = new ArrayList<CatalogEntry>();
catalogEntries.add(ce);
prod.setCatalogEntries(catalogEntries);
doReturn(prod).when(partnerBean.dm).getReference(Product.class,
prod.getKey());
// when
CatalogEntry result = partnerBean.getCatalogEntryForProduct(serviceKey);
// then
assertNotNull(result);
}
@Test(expected = ObjectNotFoundException.class)
public void getPartnerProductsForTemplate_ObjectNotFoundExecption()
throws Exception {
// given
when(ds.getReference(eq(Product.class), anyLong())).thenThrow(
new ObjectNotFoundException());
// when
partnerBean.getPartnerProductsForTemplate(0);
// then
}
@Test(expected = ServiceOperationException.class)
public void getPartnerProductsForTemplate_NotTemplateError()
throws Exception {
// given
Product product = new Product();
product.setTemplate(new Product());
when(ds.getReference(eq(Product.class), anyLong())).thenReturn(product);
// when
partnerBean.getPartnerProductsForTemplate(1);
// then
}
private static RevenueShareModel createRevenueModel(
RevenueShareModelType type) {
RevenueShareModel m = new RevenueShareModel();
m.setRevenueShare(BigDecimal.ZERO);
m.setRevenueShareModelType(type);
return m;
}
@Test
public void executeQueryLoadTemplateServices() {
// given
Query q = mock(Query.class);
when(ds.createNamedQuery(anyString())).thenReturn(q);
doReturn(new ArrayList<Product>()).when(q).getResultList();
Organization vendor = new Organization();
vendor.setKey(1L);
// when
partnerBean.executeQueryLoadTemplateServices(
EnumSet.of(ServiceType.TEMPLATE), vendor);
// then
verify(q).setParameter("vendorKey", Long.valueOf(vendor.getKey()));
verify(q)
.setParameter("productTypes", EnumSet.of(ServiceType.TEMPLATE));
verify(q).setParameter("filterOutWithStatus",
EnumSet.of(ServiceStatus.OBSOLETE, ServiceStatus.DELETED));
}
@SuppressWarnings("unchecked")
@Test
public void loadSuppliedTemplateServices() {
// given
Organization o = new Organization();
PlatformUser u = new PlatformUser();
u.setOrganization(o);
when(ds.getCurrentUser()).thenReturn(u);
doReturn(new ArrayList<Product>()).when(partnerBean)
.executeQueryLoadTemplateServices(any(EnumSet.class),
any(Organization.class));
// when
partnerBean.loadSuppliedTemplateServices();
// then
verify(partnerBean).executeQueryLoadTemplateServices(
any(EnumSet.class), eq(o));
}
@Test
public void removeProductFromLandingpage_Ok() throws Exception {
// given
Product product = givenProduct(1111);
// when
partnerBean.removeProductFromLandingpage(product);
// then
verify(lpService, times(1)).removeProductFromLandingpage(
lpCaptor.capture(), productCaptor.capture());
assertEquals("mp", lpCaptor.getValue().getMarketplaceId());
assertEquals(1111, productCaptor.getValue().getKey());
}
private Product givenProduct(long key) {
Product prod = new Product();
prod.setKey(key);
List<CatalogEntry> entries = new ArrayList<CatalogEntry>();
CatalogEntry ce = new CatalogEntry();
ce.setProduct(prod);
Marketplace mp = new Marketplace();
mp.setMarketplaceId("mp");
PublicLandingpage landingPage = new PublicLandingpage();
landingPage.setMarketplace(mp);
mp.setPublicLandingpage(landingPage);
ce.setMarketplace(mp);
entries.add(ce);
prod.setCatalogEntries(entries);
return prod;
}
private Organization givenOrganization(String oId,
OrganizationRoleType... roles) {
Organization org = new Organization();
org.setOrganizationId(oId);
Set<OrganizationToRole> otrs = new HashSet<OrganizationToRole>();
for (OrganizationRoleType role : roles) {
otrs.add(createOrgToRole(role));
}
org.setGrantedRoles(otrs);
return org;
}
private RevenueShareModel givenRevenueShareModel(long key,
RevenueShareModelType type) {
RevenueShareModel revenue = new RevenueShareModel();
revenue.setKey(key);
revenue.setRevenueShareModelType(type);
revenue.setRevenueShare(BigDecimal.ZERO);
return revenue;
}
}
| |
/*
* Copyright 2017 Realm Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.realm;
import android.support.test.runner.AndroidJUnit4;
import org.junit.Test;
import org.junit.runner.RunWith;
import java.util.Date;
import io.realm.entities.AllJavaTypes;
import io.realm.entities.BacklinksTarget;
import io.realm.entities.NullTypes;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
@RunWith(AndroidJUnit4.class)
public class LinkingObjectsQueryTests extends QueryTests {
// All the basic tests for is[Not](Equal|Null) are in RealmQueryTests
// Query on a field descriptor starting with a backlink
// Build a simple object graph.
// The test objects are:
// gen1
// / \
// gen2A gen2B
// \\ //
// gen3
// / = object ref
// // = list ref
@Test
public void query_startWithBacklink() {
realm.beginTransaction();
AllJavaTypes gen1 = realm.createObject(AllJavaTypes.class, 10);
AllJavaTypes gen2A = realm.createObject(AllJavaTypes.class, 1);
gen2A.setFieldObject(gen1);
AllJavaTypes gen2B = realm.createObject(AllJavaTypes.class, 2);
gen2B.setFieldObject(gen1);
AllJavaTypes gen3 = realm.createObject(AllJavaTypes.class, 3);
RealmList<AllJavaTypes> parents = gen3.getFieldList();
parents.add(gen2A);
parents.add(gen2B);
realm.commitTransaction();
// row 0: backlink to rows 1 and 2; row 1 link to row 0, included
// row 1: no backlink, not included
// row 2: no backlink, not included
// row 3: no backlink, not included
// summary: 1 row (gen1)
RealmResults<AllJavaTypes> result = realm.where(AllJavaTypes.class)
.greaterThan(AllJavaTypes.FIELD_LO_OBJECT + "." + AllJavaTypes.FIELD_OBJECT + "." + AllJavaTypes.FIELD_ID, 1)
.findAll();
assertEquals(1, result.size());
assertTrue(result.contains(gen1));
}
// Query on a field descriptor that has a backlink in the middle
// Build a simple object graph.
// The test objects are:
// gen1
// / \
// gen2A gen2B
// \\ //
// gen3
// / = object ref
// // = list ref
@Test
public void query_backlinkInMiddle() {
realm.beginTransaction();
AllJavaTypes gen1 = realm.createObject(AllJavaTypes.class, 10);
AllJavaTypes gen2A = realm.createObject(AllJavaTypes.class, 1);
gen2A.setFieldObject(gen1);
AllJavaTypes gen2B = realm.createObject(AllJavaTypes.class, 2);
gen2B.setFieldObject(gen1);
AllJavaTypes gen3 = realm.createObject(AllJavaTypes.class, 3);
RealmList<AllJavaTypes> parents = gen3.getFieldList();
parents.add(gen2A);
parents.add(gen2B);
realm.commitTransaction();
// row 0: no link, not included
// row 1: link to row 0, backlink to rows 1 and 2, row 2 has id < 2, included
// row 2: link to row 0, backlink to rows 1 and 2, row 2 has id < 2, included
// row 3: no link, not included
// summary: 2 rows (gen2A and gen2B)
RealmResults<AllJavaTypes> result = realm.where(AllJavaTypes.class)
.lessThan(AllJavaTypes.FIELD_OBJECT + "." + AllJavaTypes.FIELD_LO_OBJECT + "." + AllJavaTypes.FIELD_ID, 2)
.findAll();
assertEquals(2, result.size());
assertTrue(result.contains(gen2A));
assertTrue(result.contains(gen2B));
}
// Tests isNotNull on link's nullable field.
@Test
public void isNull_object() {
populateTestRealmForNullTests(realm);
// 1 String
assertEquals(1, realm.where(NullTypes.class).isNull(
NullTypes.FIELD_LO_OBJECT + "." + NullTypes.FIELD_STRING_NULL).count());
// 2 Bytes
assertEquals(1, realm.where(NullTypes.class).isNull(
NullTypes.FIELD_LO_OBJECT + "." + NullTypes.FIELD_BYTES_NULL).count());
// 3 Boolean
assertEquals(1, realm.where(NullTypes.class).isNull(
NullTypes.FIELD_LO_OBJECT + "." + NullTypes.FIELD_BOOLEAN_NULL).count());
// 4 Byte
assertEquals(1, realm.where(NullTypes.class).isNull(
NullTypes.FIELD_LO_OBJECT + "." + NullTypes.FIELD_BYTE_NULL).count());
// 5 Short
assertEquals(1, realm.where(NullTypes.class).isNull(
NullTypes.FIELD_LO_OBJECT + "." + NullTypes.FIELD_SHORT_NULL).count());
// 6 Integer
assertEquals(1, realm.where(NullTypes.class).isNull(
NullTypes.FIELD_LO_OBJECT + "." + NullTypes.FIELD_INTEGER_NULL).count());
// 7 Long
assertEquals(1, realm.where(NullTypes.class).isNull(
NullTypes.FIELD_LO_OBJECT + "." + NullTypes.FIELD_LONG_NULL).count());
// 8 Float
assertEquals(1, realm.where(NullTypes.class).isNull(
NullTypes.FIELD_LO_OBJECT + "." + NullTypes.FIELD_FLOAT_NULL).count());
// 9 Double
assertEquals(1, realm.where(NullTypes.class).isNull(
NullTypes.FIELD_LO_OBJECT + "." + NullTypes.FIELD_DOUBLE_NULL).count());
// 10 Date
assertEquals(1, realm.where(NullTypes.class).isNull(
NullTypes.FIELD_LO_OBJECT + "." + NullTypes.FIELD_DATE_NULL).count());
}
// Tests isNull on link's nullable field.
@Test
public void isNull_list() {
populateTestRealmForNullTests(realm);
// 1 String
assertEquals(1, realm.where(NullTypes.class).isNull(
NullTypes.FIELD_LO_LIST + "." + NullTypes.FIELD_STRING_NULL).count());
// 2 Bytes
assertEquals(1, realm.where(NullTypes.class).isNull(
NullTypes.FIELD_LO_LIST + "." + NullTypes.FIELD_BYTES_NULL).count());
// 3 Boolean
assertEquals(1, realm.where(NullTypes.class).isNull(
NullTypes.FIELD_LO_LIST + "." + NullTypes.FIELD_BOOLEAN_NULL).count());
// 4 Byte
assertEquals(1, realm.where(NullTypes.class).isNull(
NullTypes.FIELD_LO_LIST + "." + NullTypes.FIELD_BYTE_NULL).count());
// 5 Short
assertEquals(1, realm.where(NullTypes.class).isNull(
NullTypes.FIELD_LO_LIST + "." + NullTypes.FIELD_SHORT_NULL).count());
// 6 Integer
assertEquals(1, realm.where(NullTypes.class).isNull(
NullTypes.FIELD_LO_LIST + "." + NullTypes.FIELD_INTEGER_NULL).count());
// 7 Long
assertEquals(1, realm.where(NullTypes.class).isNull(
NullTypes.FIELD_LO_LIST + "." + NullTypes.FIELD_LONG_NULL).count());
// 8 Float
assertEquals(1, realm.where(NullTypes.class).isNull(
NullTypes.FIELD_LO_LIST + "." + NullTypes.FIELD_FLOAT_NULL).count());
// 9 Double
assertEquals(1, realm.where(NullTypes.class).isNull(
NullTypes.FIELD_LO_LIST + "." + NullTypes.FIELD_DOUBLE_NULL).count());
// 10 Date
assertEquals(1, realm.where(NullTypes.class).isNull(
NullTypes.FIELD_LO_LIST + "." + NullTypes.FIELD_DATE_NULL).count());
}
@Test
public void isNull_unsupported() {
long result;
// Tests for other unsupported null types are in RealmQueryTests
try {
result = realm.where(NullTypes.class).isNull(NullTypes.FIELD_LO_OBJECT).count();
fail("isNull should throw on type LINKING_OBJECT(14) targeting an OBJECT");
} catch (IllegalArgumentException expected) {
assertEquals("Illegal Argument: LinkingObject from field fieldObjectNull is not nullable.", expected.getMessage());
}
try {
result = realm.where(NullTypes.class).isNull(NullTypes.FIELD_LO_LIST).count();
fail("isNull should throw on type LINKING_OBJECT(14) targeting a LIST");
} catch (IllegalArgumentException expected) {
assertEquals("Illegal Argument: LinkingObject from field fieldListNull is not nullable.", expected.getMessage());
}
}
@Test
public void isNull_unsupportedLinkedTypes() {
RealmQuery<NullTypes> result;
// Tests for other unsupported null types are in RealmQueryTests
try {
result = realm.where(NullTypes.class).isNull(NullTypes.FIELD_OBJECT_NULL + "." + NullTypes.FIELD_LO_OBJECT);
fail("isNull should throw on nested linked fields (LINKING_OBJECT => OBJECT)");
} catch (IllegalArgumentException expected) {
assertEquals("Illegal Argument: LinkingObject from field fieldObjectNull is not nullable.", expected.getMessage());
}
try {
result = realm.where(NullTypes.class).isNull(NullTypes.FIELD_OBJECT_NULL + "." + NullTypes.FIELD_LO_LIST);
fail("isNull should throw on nested linked fields (LINKING_OBJECT => LIST)");
} catch (IllegalArgumentException expected) {
assertEquals("Illegal Argument: LinkingObject from field fieldListNull is not nullable.", expected.getMessage());
}
}
// Tests isNotNull on link's nullable field.
@Test
public void isNotNull_object() {
populateTestRealmForNullTests(realm);
// 1 String
assertEquals(1, realm.where(NullTypes.class).isNotNull(
NullTypes.FIELD_LO_OBJECT + "." + NullTypes.FIELD_STRING_NULL).count());
// 2 Bytes
assertEquals(1, realm.where(NullTypes.class).isNotNull(
NullTypes.FIELD_LO_OBJECT + "." + NullTypes.FIELD_BYTES_NULL).count());
// 3 Boolean
assertEquals(1, realm.where(NullTypes.class).isNotNull(
NullTypes.FIELD_LO_OBJECT + "." + NullTypes.FIELD_BOOLEAN_NULL).count());
// 4 Byte
assertEquals(1, realm.where(NullTypes.class).isNotNull(
NullTypes.FIELD_LO_OBJECT + "." + NullTypes.FIELD_BYTE_NULL).count());
// 5 Short
assertEquals(1, realm.where(NullTypes.class).isNotNull(
NullTypes.FIELD_LO_OBJECT + "." + NullTypes.FIELD_SHORT_NULL).count());
// 6 Integer
assertEquals(1, realm.where(NullTypes.class).isNotNull(
NullTypes.FIELD_LO_OBJECT + "." + NullTypes.FIELD_INTEGER_NULL).count());
// 7 Long
assertEquals(1, realm.where(NullTypes.class).isNotNull(
NullTypes.FIELD_LO_OBJECT + "." + NullTypes.FIELD_LONG_NULL).count());
// 8 Float
assertEquals(1, realm.where(NullTypes.class).isNotNull(
NullTypes.FIELD_LO_OBJECT + "." + NullTypes.FIELD_FLOAT_NULL).count());
// 9 Double
assertEquals(1, realm.where(NullTypes.class).isNotNull(
NullTypes.FIELD_LO_OBJECT + "." + NullTypes.FIELD_DOUBLE_NULL).count());
// 10 Date
assertEquals(1, realm.where(NullTypes.class).isNotNull(
NullTypes.FIELD_LO_OBJECT + "." + NullTypes.FIELD_DATE_NULL).count());
}
// Tests isNotNull on link's nullable field.
@Test
public void isNotNull_list() {
populateTestRealmForNullTests(realm);
// 1 String
assertEquals(1, realm.where(NullTypes.class).isNotNull(
NullTypes.FIELD_LO_LIST + "." + NullTypes.FIELD_STRING_NULL).count());
// 2 Bytes
assertEquals(1, realm.where(NullTypes.class).isNotNull(
NullTypes.FIELD_LO_LIST + "." + NullTypes.FIELD_BYTES_NULL).count());
// 3 Boolean
assertEquals(1, realm.where(NullTypes.class).isNotNull(
NullTypes.FIELD_LO_LIST + "." + NullTypes.FIELD_BOOLEAN_NULL).count());
// 4 Byte
assertEquals(1, realm.where(NullTypes.class).isNotNull(
NullTypes.FIELD_LO_LIST + "." + NullTypes.FIELD_BYTE_NULL).count());
// 5 Short
assertEquals(1, realm.where(NullTypes.class).isNotNull(
NullTypes.FIELD_LO_LIST + "." + NullTypes.FIELD_SHORT_NULL).count());
// 6 Integer
assertEquals(1, realm.where(NullTypes.class).isNotNull(
NullTypes.FIELD_LO_LIST + "." + NullTypes.FIELD_INTEGER_NULL).count());
// 7 Long
assertEquals(1, realm.where(NullTypes.class).isNotNull(
NullTypes.FIELD_LO_LIST + "." + NullTypes.FIELD_LONG_NULL).count());
// 8 Float
assertEquals(1, realm.where(NullTypes.class).isNotNull(
NullTypes.FIELD_LO_LIST + "." + NullTypes.FIELD_FLOAT_NULL).count());
// 9 Double
assertEquals(1, realm.where(NullTypes.class).isNotNull(
NullTypes.FIELD_LO_LIST + "." + NullTypes.FIELD_DOUBLE_NULL).count());
// 10 Date
assertEquals(1, realm.where(NullTypes.class).isNotNull(
NullTypes.FIELD_LO_LIST + "." + NullTypes.FIELD_DATE_NULL).count());
}
@Test
public void isNotNull_unsupported() {
long result;
// Tests for other unsupported not null types are in RealmQueryTests
try {
result = realm.where(NullTypes.class).isNotNull(NullTypes.FIELD_LO_OBJECT).count();
fail("isNotNull should throw on type LINKING_OBJECT(14) targeting an OBJECT");
} catch (IllegalArgumentException expected) {
assertEquals("Illegal Argument: LinkingObject from field fieldObjectNull is not nullable.", expected.getMessage());
}
try {
result = realm.where(NullTypes.class).isNotNull(NullTypes.FIELD_LO_LIST).count();
fail("isNotNull should throw on type LINKING_OBJECT(14) targeting a LIST");
} catch (IllegalArgumentException expected) {
assertEquals("Illegal Argument: LinkingObject from field fieldListNull is not nullable.", expected.getMessage());
}
}
@Test
public void isNotNull_unsupportedLinkedTypes() {
RealmQuery<NullTypes> result;
// Tests for other unsupported not null types are in RealmQueryTests
try {
result = realm.where(NullTypes.class).isNotNull(NullTypes.FIELD_OBJECT_NULL + "." + NullTypes.FIELD_LO_OBJECT);
fail("isNotNull should throw on nested linked fields (LINKING_OBJECT => OBJECT)");
} catch (IllegalArgumentException expected) {
assertEquals("Illegal Argument: LinkingObject from field fieldObjectNull is not nullable.", expected.getMessage());
}
try {
result = realm.where(NullTypes.class).isNotNull(NullTypes.FIELD_OBJECT_NULL + "." + NullTypes.FIELD_LO_LIST);
fail("isNotNull should throw on nested linked fields (LINKING_OBJECT => LIST)");
} catch (IllegalArgumentException expected) {
assertEquals("Illegal Argument: LinkingObject from field fieldListNull is not nullable.", expected.getMessage());
}
}
@Test
public void isEmpty_linkingObjects() {
createIsEmptyDataSet(realm);
for (RealmFieldType type : SUPPORTED_IS_EMPTY_TYPES) {
switch (type) {
case LINKING_OBJECTS:
// Row 0: backlink to row 0; not included
// Row 1: backlink to row 1; not included
// Row 2: no backlink; included
assertEquals(1, realm.where(AllJavaTypes.class).isEmpty(AllJavaTypes.FIELD_LO_OBJECT).count());
// Only row 1 has a linklist (and a backlink)
assertEquals(1, realm.where(AllJavaTypes.class).isEmpty(AllJavaTypes.FIELD_LO_LIST).count());
break;
default:
// tested in RealmQueryTests
}
}
}
@Test
public void isEmpty_multipleModelClasses() {
createLinkedDataSet(realm);
assertEquals(1, realm.where(BacklinksTarget.class).isEmpty(BacklinksTarget.FIELD_PARENTS).count());
}
@Test(expected = IllegalArgumentException.class)
public void equalTo_linkingObjectLast() {
createLinkedDataSet(realm);
realm.where(BacklinksTarget.class).equalTo(BacklinksTarget.FIELD_PARENTS, "parents");
}
@Test
public void isEmpty_acrossLink() {
createIsEmptyDataSet(realm);
for (RealmFieldType type : SUPPORTED_IS_EMPTY_TYPES) {
switch (type) {
case LINKING_OBJECTS:
// Rows 0 and 1 are not included as they are linked to another row through FIELD_OBJECT
// Row 2 is included (no link)
assertEquals(1, realm.where(AllJavaTypes.class).isEmpty(AllJavaTypes.FIELD_OBJECT + "." + AllJavaTypes.FIELD_LO_OBJECT).count());
// Row 0 has link to row 0 which has a backlink (list); not included
// Row 1 has link to row 1 which has a backlink (list); not included
// Row 2 has no link; included
assertEquals(1, realm.where(AllJavaTypes.class).isEmpty(AllJavaTypes.FIELD_OBJECT + "." + AllJavaTypes.FIELD_LO_LIST).count());
break;
default:
// tested in RealmQueryTests
}
}
}
@Test
public void isEmpty_acrossLinkingObjectObjectLink() {
createIsEmptyDataSet(realm);
for (RealmFieldType type : SUPPORTED_IS_EMPTY_TYPES) {
switch (type) {
case STRING:
assertEquals(1, realm.where(AllJavaTypes.class).isEmpty(AllJavaTypes.FIELD_LO_OBJECT + "." + AllJavaTypes.FIELD_STRING).count());
break;
case BINARY:
assertEquals(1, realm.where(AllJavaTypes.class).isEmpty(AllJavaTypes.FIELD_LO_OBJECT + "." + AllJavaTypes.FIELD_BINARY).count());
break;
case LIST:
// Row 0: backlink to row 0, linklist is empty; included
// Row 1: backlink to row 1, linklist to row 0; not included
// Row 2: no backlink; included
assertEquals(2, realm.where(AllJavaTypes.class).isEmpty(AllJavaTypes.FIELD_LO_OBJECT + "." + AllJavaTypes.FIELD_LIST).count());
break;
case LINKING_OBJECTS:
// Both row 0 and 1 have a link/backlink; not included
// row 2 has no link/backlink and an empty list; included
assertEquals(1, realm.where(AllJavaTypes.class).isEmpty(AllJavaTypes.FIELD_LO_OBJECT + "." + AllJavaTypes.FIELD_LO_OBJECT).count());
assertEquals(1, realm.where(AllJavaTypes.class).isEmpty(AllJavaTypes.FIELD_LO_OBJECT + "." + AllJavaTypes.FIELD_LO_LIST).count());
break;
default:
fail("Unknown type: " + type);
}
}
}
@Test
public void isEmpty_acrossLinkingObjectListLink() {
createIsEmptyDataSet(realm);
assertEquals(3, realm.where(AllJavaTypes.class).findAll().size());
for (RealmFieldType type : SUPPORTED_IS_EMPTY_TYPES) {
switch (type) {
case STRING:
// Row 2 included (has no backlink)
assertEquals(1, realm.where(AllJavaTypes.class).isEmpty(AllJavaTypes.FIELD_LO_LIST + "." + AllJavaTypes.FIELD_STRING).count());
break;
case BINARY:
// Row 2 included (has no backlink)
assertEquals(1, realm.where(AllJavaTypes.class).isEmpty(AllJavaTypes.FIELD_LO_LIST + "." + AllJavaTypes.FIELD_BINARY).count());
break;
case LIST:
assertEquals(1, realm.where(AllJavaTypes.class).isEmpty(AllJavaTypes.FIELD_LO_LIST + "." + AllJavaTypes.FIELD_LIST).count());
break;
case LINKING_OBJECTS:
// Row 0: Backlink (list) to row 1, row 1 backlink to row 1; not included
// Row 1: Backlink (list) to row 2, row 2 no backlink; included
// Row 2: No backlink (list); included
assertEquals(2, realm.where(AllJavaTypes.class).isEmpty(AllJavaTypes.FIELD_LO_LIST + "." + AllJavaTypes.FIELD_LO_OBJECT).count());
// Step 1:
// Row 0 skipped; FIELD_LO_LIST.count > 0
// Row 1 included; FIELD_LO_LIST.count() == 0
//
// Step 2: now checking Row 2
// Row 0 included: goes to Row 1 where FIELD_LO_LIST.count() == 0
assertEquals(2, realm.where(AllJavaTypes.class).isEmpty(AllJavaTypes.FIELD_LO_LIST + "." + AllJavaTypes.FIELD_LO_LIST).count());
break;
default:
fail("Unknown type: " + type);
}
}
}
@Test
public void isNotEmpty() {
createIsNotEmptyDataSet(realm);
for (RealmFieldType type : SUPPORTED_IS_NOT_EMPTY_TYPES) {
switch (type) {
case LINKING_OBJECTS:
// Row 0 and 1 have a link/backlink so no row is empty
assertEquals(0, realm.where(AllJavaTypes.class).isEmpty(AllJavaTypes.FIELD_LO_OBJECT).count());
assertEquals(1, realm.where(AllJavaTypes.class).isEmpty(AllJavaTypes.FIELD_LO_LIST).count());
break;
default:
// tested in RealmQueryTests
}
}
}
@Test
public void isNotEmpty_acrossLink() {
createIsNotEmptyDataSet(realm);
for (RealmFieldType type : SUPPORTED_IS_NOT_EMPTY_TYPES) {
switch (type) {
case LINKING_OBJECTS:
// tested in LinkingObjectsQueryTests;
// Row 0 and Row 1 have link/backlink - no empty
assertEquals(0, realm.where(AllJavaTypes.class).isEmpty(AllJavaTypes.FIELD_OBJECT + "." + AllJavaTypes.FIELD_LO_OBJECT).count());
assertEquals(1, realm.where(AllJavaTypes.class).isEmpty(AllJavaTypes.FIELD_OBJECT + "." + AllJavaTypes.FIELD_LO_LIST).count());
break;
default:
// tested in RealmQueryTests
}
}
}
@Test
public void isNotEmpty_acrossLinkingObjectObjectLink() {
createIsEmptyDataSet(realm);
for (RealmFieldType type : SUPPORTED_IS_EMPTY_TYPES) {
switch (type) {
case STRING:
// Row 0: Follow link to row 0, and FIELD_STRING is empty ("")
// Row 1: Follow link to row 1, and FIELD_STRING is not empty ("Foo")
assertEquals(1, realm.where(AllJavaTypes.class).isNotEmpty(AllJavaTypes.FIELD_LO_OBJECT + "." + AllJavaTypes.FIELD_STRING).count());
break;
case BINARY:
assertEquals(1, realm.where(AllJavaTypes.class).isNotEmpty(AllJavaTypes.FIELD_LO_OBJECT + "." + AllJavaTypes.FIELD_BINARY).count());
break;
case LIST:
assertEquals(1, realm.where(AllJavaTypes.class).isNotEmpty(AllJavaTypes.FIELD_LO_OBJECT + "." + AllJavaTypes.FIELD_LIST).count());
break;
case LINKING_OBJECTS:
// Both row 0 and 1 have a link/backlink
assertEquals(2, realm.where(AllJavaTypes.class).isNotEmpty(AllJavaTypes.FIELD_LO_OBJECT + "." + AllJavaTypes.FIELD_LO_OBJECT).count());
// Row 0: Backlink to row 0, backlink list to row 1; included
// Row 1: Backlink to row 1, backlink list to row 2; included
// Row 2: No backlink; not empty
assertEquals(2, realm.where(AllJavaTypes.class).isNotEmpty(AllJavaTypes.FIELD_LO_OBJECT + "." + AllJavaTypes.FIELD_LO_LIST).count());
break;
default:
fail("Unknown type: " + type);
}
}
}
@Test
public void isNotEmpty_acrossLinkingObjectListLink() {
createIsEmptyDataSet(realm);
assertEquals(3, realm.where(AllJavaTypes.class).findAll().size());
for (RealmFieldType type : SUPPORTED_IS_EMPTY_TYPES) {
switch (type) {
case STRING:
// Row 0: Backlink list to row 1, string not empty ("Foo"); included
// Row 1: Backlink list to row 2, string is empty; not included
// Row 2: No backlink list; not included
assertEquals(1, realm.where(AllJavaTypes.class).isNotEmpty(AllJavaTypes.FIELD_LO_LIST + "." + AllJavaTypes.FIELD_STRING).count());
break;
case BINARY:
assertEquals(1, realm.where(AllJavaTypes.class).isNotEmpty(AllJavaTypes.FIELD_LO_LIST + "." + AllJavaTypes.FIELD_BINARY).count());
break;
case LIST:
// Row 0: Backlink list to row 1, list to row 0; included
// Row 1: Backlink list to row 2, list to row 1; included
// Row 2: No backlink list; not included
assertEquals(2, realm.where(AllJavaTypes.class).isNotEmpty(AllJavaTypes.FIELD_LO_LIST + "." + AllJavaTypes.FIELD_LIST).count());
break;
case LINKING_OBJECTS:
assertEquals(1, realm.where(AllJavaTypes.class).isNotEmpty(AllJavaTypes.FIELD_LO_LIST + "." + AllJavaTypes.FIELD_LO_OBJECT).count());
// Row 0: Backlink list to row 1, backlink list to row 2; included
// Row 1: Backlink list to row 2, empty backlink list; not included
// Row 2: Empty backlink list; not included
assertEquals(1, realm.where(AllJavaTypes.class).isNotEmpty(AllJavaTypes.FIELD_LO_LIST + "." + AllJavaTypes.FIELD_LO_LIST).count());
break;
default:
fail("Unknown type: " + type);
}
}
}
// Similar to the version in TestHelper, but with more Backlinks
// Creates 3 NullTypes objects. The objects are self-referenced (link) in
// order to test link queries.
//
// +-+--------+------+---------+--------+--------------------+----------+
// | | string | link | numeric | binary | numeric (not null) | linklist |
// +-+--------+------+---------+--------+--------------------+----------+
// |0| Fish | 0 | 1 | {0} | 1 | [0] |
// |1| null | 2 | null | null | 0 | [2] |
// |2| Horse | null | 3 | {1,2} | 3 | null |
// +-+--------+------+---------+--------+--------------------+----------+
private void populateTestRealmForNullTests(Realm testRealm) {
// 1 String
String[] words = {"Fish", null, "Horse"};
// 2 Bytes
byte[][] binaries = {new byte[]{0}, null, new byte[]{1, 2}};
// 3 Boolean
Boolean[] booleans = {false, null, true};
// Numeric fields will be 1, 0/null, 3
// 10 Date
Date[] dates = {new Date(0), null, new Date(10000)};
NullTypes[] nullTypesArray = new NullTypes[3];
testRealm.beginTransaction();
for (int i = 0; i < 3; i++) {
NullTypes nullTypes = new NullTypes();
nullTypes.setId(i + 1);
// 1 String
nullTypes.setFieldStringNull(words[i]);
if (words[i] != null) {
nullTypes.setFieldStringNotNull(words[i]);
}
// 2 Bytes
nullTypes.setFieldBytesNull(binaries[i]);
if (binaries[i] != null) {
nullTypes.setFieldBytesNotNull(binaries[i]);
}
// 3 Boolean
nullTypes.setFieldBooleanNull(booleans[i]);
if (booleans[i] != null) {
nullTypes.setFieldBooleanNotNull(booleans[i]);
}
if (i != 1) {
int n = i + 1;
// 4 Byte
nullTypes.setFieldByteNull((byte) n);
nullTypes.setFieldByteNotNull((byte) n);
// 5 Short
nullTypes.setFieldShortNull((short) n);
nullTypes.setFieldShortNotNull((short) n);
// 6 Integer
nullTypes.setFieldIntegerNull(n);
nullTypes.setFieldIntegerNotNull(n);
// 7 Long
nullTypes.setFieldLongNull((long) n);
nullTypes.setFieldLongNotNull((long) n);
// 8 Float
nullTypes.setFieldFloatNull((float) n);
nullTypes.setFieldFloatNotNull((float) n);
// 9 Double
nullTypes.setFieldDoubleNull((double) n);
nullTypes.setFieldDoubleNotNull((double) n);
}
// 10 Date
nullTypes.setFieldDateNull(dates[i]);
if (dates[i] != null) {
nullTypes.setFieldDateNotNull(dates[i]);
}
nullTypesArray[i] = testRealm.copyToRealm(nullTypes);
}
nullTypesArray[0].setFieldObjectNull(nullTypesArray[0]);
nullTypesArray[1].setFieldObjectNull(nullTypesArray[2]);
nullTypesArray[2].setFieldObjectNull(null);
nullTypesArray[0].getFieldListNull().add(nullTypesArray[1]);
nullTypesArray[1].getFieldListNull().add(nullTypesArray[2]);
nullTypesArray[2].getFieldListNull().clear(); // just to be sure
testRealm.commitTransaction();
}
}
| |
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package dao;
import config.MySQL;
import dao.sql.BoletimSQL;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Types;
import java.util.HashMap;
import java.util.Map;
import java.util.logging.Level;
import java.util.logging.Logger;
import model.Aluno;
import model.Boletim;
import model.Disciplina;
import util.CRUD;
import util.JDBC;
import util.Model;
/**
*
* @author evson
*/
public final class BoletimDAO implements CRUD<Integer, Boletim> {
private static final BoletimDAO DAO = new BoletimDAO();
private final Connection CONNECTION = JDBC.getConnection(MySQL.DRIVER, MySQL.URL, MySQL.USER, MySQL.PASSWORD);
private final Map<Integer, Boletim> BOLETINS = new HashMap<>();
private BoletimDAO() {
}
public static BoletimDAO getInstance() {
return DAO;
}
public Map<Integer, Boletim> getBoletins() {
return BOLETINS;
}
@Override
public void create(Model model) {
try {
Boletim boletim = (Boletim) model;
CONNECTION.setAutoCommit(false);
PreparedStatement statement = CONNECTION.prepareStatement(BoletimSQL.CREATE, PreparedStatement.RETURN_GENERATED_KEYS);
if (boletim.getTesteB1() != null) {
statement.setDouble(1, boletim.getTesteB1());
}
else {
statement.setNull(1, Types.DOUBLE);
}
if (boletim.getProvaB1() != null) {
statement.setDouble(2, boletim.getProvaB1());
}
else {
statement.setNull(2, Types.DOUBLE);
}
if (boletim.getTesteB2() != null) {
statement.setDouble(3, boletim.getTesteB2());
}
else {
statement.setNull(3, Types.DOUBLE);
}
if (boletim.getProvaB2() != null) {
statement.setDouble(4, boletim.getProvaB2());
}
else {
statement.setNull(4, Types.DOUBLE);
}
statement.setInt(5, boletim.getAluno().getId());
statement.setInt(6, boletim.getDisciplina().getId());
statement.executeUpdate();
ResultSet result = statement.getGeneratedKeys();
result.next();
boletim.setId(result.getInt(1));
BOLETINS.put(boletim.getId(), boletim);
statement.close();
CONNECTION.commit();
CONNECTION.setAutoCommit(true);
}
catch (SQLException ex) {
Logger.getLogger(BoletimDAO.class.getName()).log(Level.SEVERE, null, ex);
try {
CONNECTION.rollback();
CONNECTION.setAutoCommit(true);
} catch (SQLException ex1) {
Logger.getLogger(BoletimDAO.class.getName()).log(Level.SEVERE, null, ex1);
}
}
catch (ClassCastException | NullPointerException ex) {
Logger.getLogger(BoletimDAO.class.getName()).log(Level.SEVERE, null, ex);
}
}
@Override
public Boletim get(Model model) {
Boletim boletim = null;
Aluno aluno;
Disciplina disciplina;
try {
boletim = BOLETINS.get(((Boletim) model).getId());
if (boletim == null) {
boletim = (Boletim) model;
PreparedStatement statement = CONNECTION.prepareStatement(BoletimSQL.GET);
ResultSet result;
statement.setInt(1, boletim.getId());
result = statement.executeQuery();
result.next();
boletim.setTesteB1(result.getDouble(2));
boletim.setProvaB1(result.getDouble(3));
boletim.setTesteB2(result.getDouble(4));
boletim.setProvaB2(result.getDouble(5));
aluno = AlunoDAO.getInstance().get(new Aluno(result.getInt(6), null, null, null));
disciplina = DisciplinaDAO.getInstance().get(new Disciplina(result.getInt(7), null, null));
aluno.addBoletim(boletim);
disciplina.addBoletim(boletim);
boletim.setAluno(aluno);
boletim.setDisciplina(disciplina);
BOLETINS.put(boletim.getId(), boletim);
result.close();
statement.close();
}
}
catch (ClassCastException | SQLException | NullPointerException ex) {
Logger.getLogger(BoletimDAO.class.getName()).log(Level.SEVERE, null, ex);
}
return boletim;
}
public Boletim get(Aluno aluno, Disciplina disciplina) {
Boletim boletim = null;
try {
for(Boletim b : BOLETINS.values()) {
if (b.getAluno().equals(aluno) && b.getDisciplina().equals(disciplina)) {
return b;
}
}
PreparedStatement statement = CONNECTION.prepareStatement(BoletimSQL.GET_ID_ALUNO_AND_ID_DISCIPLINA);
ResultSet result;
statement.setInt(1, aluno.getId());
statement.setInt(2, disciplina.getId());
result = statement.executeQuery();
result.next();
boletim = new Boletim(result.getInt(1), result.getDouble(2), result.getDouble(3), result.getDouble(4), result.getDouble(5), aluno, disciplina);
aluno.addBoletim(boletim);
disciplina.addBoletim(boletim);
BOLETINS.put(boletim.getId(), boletim);
result.close();
statement.close();
}
catch (ClassCastException | SQLException | NullPointerException ex) {
Logger.getLogger(BoletimDAO.class.getName()).log(Level.SEVERE, null, ex);
}
return boletim;
}
@Override
public Map<Integer, Boletim> query() {
Map<Integer, Boletim> boletins = new HashMap<>();
try {
PreparedStatement statement = CONNECTION.prepareStatement(BoletimSQL.QUERY);
ResultSet result = statement.executeQuery();
Boletim boletim;
Aluno aluno;
Disciplina disciplina;
while(result.next()) {
boletim = BOLETINS.get(result.getInt(1));
if (boletim == null) {
aluno = AlunoDAO.getInstance().get(new Aluno(result.getInt(6), null, null, null));
disciplina = DisciplinaDAO.getInstance().get(new Disciplina(result.getInt(7), null, null));
boletim = new Boletim(result.getInt(1), (Double)result.getObject(2), (Double)result.getObject(3), (Double)result.getObject(4), (Double)result.getObject(5), aluno, disciplina);
aluno.addBoletim(boletim);
disciplina.addBoletim(boletim);
BOLETINS.put(boletim.getId(), boletim);
}
boletins.put(boletim.getId(), boletim);
}
result.close();
statement.close();
}
catch (SQLException | NullPointerException ex) {
Logger.getLogger(BoletimDAO.class.getName()).log(Level.SEVERE, null, ex);
}
return boletins;
}
public Map<Integer, Boletim> query(Model model) {
Map<Integer, Boletim> boletins = new HashMap<>();
try {
PreparedStatement statement = null;
Boletim boletim;
Aluno aluno;
Disciplina disciplina;
if (model instanceof Aluno) {
aluno = (Aluno) model;
statement = CONNECTION.prepareStatement(BoletimSQL.QUERY_ID_ALUNO);
statement.setInt(1, aluno.getId());
ResultSet result = statement.executeQuery();
while(result.next()) {
boletim = BOLETINS.get(result.getInt(1));
if (boletim == null) {
disciplina = DisciplinaDAO.getInstance().get(new Disciplina(result.getInt(7), null, null));
boletim = new Boletim(result.getInt(1), result.getDouble(2), result.getDouble(3), result.getDouble(4), result.getDouble(5), aluno, disciplina);
aluno.addBoletim(boletim);
disciplina.addBoletim(boletim);
BOLETINS.put(boletim.getId(), boletim);
}
boletins.put(boletim.getId(), boletim);
}
result.close();
statement.close();
}
else if (model instanceof Disciplina) {
disciplina = (Disciplina) model;
statement = CONNECTION.prepareStatement(BoletimSQL.QUERY_ID_DISCIPLINA);
statement.setInt(1, disciplina.getId());
ResultSet result = statement.executeQuery();
while(result.next()) {
boletim = BOLETINS.get(result.getInt(1));
if (boletim == null) {
aluno = AlunoDAO.getInstance().get(new Aluno(result.getInt(6), null, null, null));
boletim = new Boletim(result.getInt(1), result.getDouble(2), result.getDouble(3), result.getDouble(4), result.getDouble(5), aluno, disciplina);
aluno.addBoletim(boletim);
disciplina.addBoletim(boletim);
BOLETINS.put(boletim.getId(), boletim);
}
boletins.put(boletim.getId(), boletim);
}
result.close();
statement.close();
}
}
catch (ClassCastException | SQLException | NullPointerException ex) {
Logger.getLogger(BoletimDAO.class.getName()).log(Level.SEVERE, null, ex);
}
return boletins;
}
@Override
public void update(Model model) {
try {
Boletim boletim = (Boletim) model;
CONNECTION.setAutoCommit(false);
PreparedStatement statement = CONNECTION.prepareStatement(BoletimSQL.UPDATE_ID);
statement.setDouble(1, boletim.getTesteB1());
statement.setDouble(2, boletim.getProvaB1());
statement.setDouble(3, boletim.getTesteB2());
statement.setDouble(4, boletim.getProvaB2());
statement.setInt(5, boletim.getId());
statement.executeUpdate();
BOLETINS.put(boletim.getId(), boletim);
statement.close();
CONNECTION.commit();
CONNECTION.setAutoCommit(true);
}
catch (SQLException ex) {
Logger.getLogger(BoletimDAO.class.getName()).log(Level.SEVERE, null, ex);
try {
CONNECTION.rollback();
CONNECTION.setAutoCommit(true);
} catch (SQLException ex1) {
Logger.getLogger(BoletimDAO.class.getName()).log(Level.SEVERE, null, ex1);
}
}
catch (ClassCastException | NullPointerException ex) {
Logger.getLogger(BoletimDAO.class.getName()).log(Level.SEVERE, null, ex);
}
}
public void delete() {
try {
CONNECTION.setAutoCommit(false);
PreparedStatement statement = CONNECTION.prepareStatement(BoletimSQL.DELETE);
statement.executeUpdate();
for (Aluno aluno : AlunoDAO.getInstance().getAlunos().values()) {
aluno.getBoletins().clear();
}
for (Disciplina disciplina : DisciplinaDAO.getInstance().getDisciplinas().values()) {
disciplina.getBoletins().clear();
}
BOLETINS.clear();
statement.close();
CONNECTION.commit();
CONNECTION.setAutoCommit(true);
}
catch (SQLException ex) {
Logger.getLogger(BoletimDAO.class.getName()).log(Level.SEVERE, null, ex);
try {
CONNECTION.rollback();
CONNECTION.setAutoCommit(true);
} catch (SQLException ex1) {
Logger.getLogger(BoletimDAO.class.getName()).log(Level.SEVERE, null, ex1);
}
}
catch (NullPointerException ex) {
Logger.getLogger(BoletimDAO.class.getName()).log(Level.SEVERE, null, ex);
}
}
@Override
public void delete(Model model) {
try {
CONNECTION.setAutoCommit(false);
PreparedStatement statement;
if (model instanceof Boletim) {
Boletim boletim = (Boletim) model;
statement = CONNECTION.prepareStatement(BoletimSQL.DELETE_ID);
statement.setInt(1, boletim.getId());
statement.executeUpdate();
boletim.getAluno().removeBoletim(boletim);
boletim.getDisciplina().removeBoletim(boletim);
BOLETINS.remove(boletim.getId());
statement.close();
}
else if (model instanceof Aluno) {
Aluno aluno = (Aluno) model;
statement = CONNECTION.prepareStatement(BoletimSQL.DELETE_ID_ALUNO);
statement.setInt(1, aluno.getId());
statement.executeUpdate();
for(Boletim boletim : aluno.getBoletins().values()) {
boletim.getDisciplina().removeBoletim(boletim);
BOLETINS.remove(boletim.getId());
}
aluno.getBoletins().clear();
statement.close();
}
else if (model instanceof Disciplina) {
Disciplina disciplina = (Disciplina) model;
statement = CONNECTION.prepareStatement(BoletimSQL.DELETE_ID_DISCIPLINA);
statement.setInt(1, disciplina.getId());
statement.executeUpdate();
for(Boletim boletim : disciplina.getBoletins().values()) {
boletim.getAluno().removeBoletim(boletim);
BOLETINS.remove(boletim.getId());
}
disciplina.getBoletins().clear();
statement.close();
}
CONNECTION.commit();
CONNECTION.setAutoCommit(true);
}
catch (SQLException ex) {
Logger.getLogger(BoletimDAO.class.getName()).log(Level.SEVERE, null, ex);
try {
CONNECTION.rollback();
CONNECTION.setAutoCommit(true);
} catch (SQLException ex1) {
Logger.getLogger(BoletimDAO.class.getName()).log(Level.SEVERE, null, ex1);
}
}
catch (ClassCastException | NullPointerException ex) {
Logger.getLogger(BoletimDAO.class.getName()).log(Level.SEVERE, null, ex);
}
}
public void delete(Aluno aluno, Disciplina disciplina) {
try {
CONNECTION.setAutoCommit(false);
PreparedStatement statement = CONNECTION.prepareStatement(BoletimSQL.DELETE_ID_ALUNO_AND_ID_DISCIPLINA);
statement.setInt(1, aluno.getId());
statement.setInt(2, disciplina.getId());
statement.executeUpdate();
for (Boletim boletim : aluno.getBoletins().values()) {
if (boletim.getDisciplina().equals(disciplina)) {
aluno.removeBoletim(boletim);
disciplina.removeBoletim(boletim);
BOLETINS.remove(boletim.getId());
break;
}
}
statement.close();
CONNECTION.commit();
CONNECTION.setAutoCommit(true);
}
catch (SQLException ex) {
Logger.getLogger(BoletimDAO.class.getName()).log(Level.SEVERE, null, ex);
try {
CONNECTION.rollback();
CONNECTION.setAutoCommit(true);
} catch (SQLException ex1) {
Logger.getLogger(BoletimDAO.class.getName()).log(Level.SEVERE, null, ex1);
}
}
catch (NullPointerException ex) {
Logger.getLogger(BoletimDAO.class.getName()).log(Level.SEVERE, null, ex);
}
}
}
| |
//
// ========================================================================
// Copyright (c) 1995-2014 Mort Bay Consulting Pty. Ltd.
// ------------------------------------------------------------------------
// All rights reserved. This program and the accompanying materials
// are made available under the terms of the Eclipse Public License v1.0
// and Apache License v2.0 which accompanies this distribution.
//
// The Eclipse Public License is available at
// http://www.eclipse.org/legal/epl-v10.html
//
// The Apache License v2.0 is available at
// http://www.opensource.org/licenses/apache2.0.php
//
// You may elect to redistribute this code under either of these licenses.
// ========================================================================
//
package org.eclipse.jetty.start;
import static org.eclipse.jetty.start.UsageException.*;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.nio.file.Path;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.ListIterator;
import java.util.Map;
import java.util.Set;
import java.util.StringTokenizer;
import org.eclipse.jetty.start.Props.Prop;
import org.eclipse.jetty.start.config.ConfigSource;
import org.eclipse.jetty.start.config.ConfigSources;
import org.eclipse.jetty.start.config.DirConfigSource;
/**
* The Arguments required to start Jetty.
*/
public class StartArgs
{
public static final String VERSION;
static
{
String ver = System.getProperty("jetty.version",null);
if (ver == null)
{
Package pkg = StartArgs.class.getPackage();
if ((pkg != null) && "Eclipse.org - Jetty".equals(pkg.getImplementationVendor()) && (pkg.getImplementationVersion() != null))
{
ver = pkg.getImplementationVersion();
}
}
if (ver == null)
{
ver = "TEST";
}
VERSION = ver;
System.setProperty("jetty.version",VERSION);
}
private static final String SERVER_MAIN = "org.eclipse.jetty.xml.XmlConfiguration";
/** List of enabled modules */
private Set<String> modules = new HashSet<>();
/** Map of enabled modules to the source of where that activation occurred */
private Map<String, List<String>> sources = new HashMap<>();
/** Map of properties to where that property was declared */
private Map<String, String> propertySource = new HashMap<>();
/** List of all active [files] sections from enabled modules */
private List<FileArg> files = new ArrayList<>();
/** List of all active [lib] sections from enabled modules */
private Classpath classpath;
/** List of all active [xml] sections from enabled modules */
private List<Path> xmls = new ArrayList<>();
/** JVM arguments, found via commmand line and in all active [exec] sections from enabled modules */
private List<String> jvmArgs = new ArrayList<>();
/** List of all xml references found directly on command line or start.ini */
private List<String> xmlRefs = new ArrayList<>();
/** List of all property references found directly on command line or start.ini */
private List<String> propertyFileRefs = new ArrayList<>();
/** List of all property files */
private List<Path> propertyFiles = new ArrayList<>();
private Props properties = new Props();
private Set<String> systemPropertyKeys = new HashSet<>();
private List<String> rawLibs = new ArrayList<>();
// jetty.base - build out commands
/** --add-to-startd=[module,[module]] */
private List<String> addToStartdIni = new ArrayList<>();
/** --add-to-start=[module,[module]] */
private List<String> addToStartIni = new ArrayList<>();
// module inspection commands
/** --write-module-graph=[filename] */
private String moduleGraphFilename;
/** Collection of all modules */
private Modules allModules;
/** Should the server be run? */
private boolean run = true;
private boolean download = false;
private boolean help = false;
private boolean stopCommand = false;
private boolean listModules = false;
private boolean listClasspath = false;
private boolean listConfig = false;
private boolean version = false;
private boolean dryRun = false;
private boolean exec = false;
private boolean approveAllLicenses = false;
private boolean testingMode = false;
public StartArgs()
{
classpath = new Classpath();
}
private void addFile(Module module, String uriLocation)
{
FileArg arg = new FileArg(module, uriLocation);
if (!files.contains(arg))
{
files.add(arg);
}
}
public void addSystemProperty(String key, String value)
{
this.systemPropertyKeys.add(key);
System.setProperty(key,value);
}
private void addUniqueXmlFile(String xmlRef, Path xmlfile) throws IOException
{
if (!FS.canReadFile(xmlfile))
{
throw new IOException("Cannot read file: " + xmlRef);
}
xmlfile = FS.toRealPath(xmlfile);
if (!xmls.contains(xmlfile))
{
xmls.add(xmlfile);
}
}
private void addUniquePropertyFile(String propertyFileRef, Path propertyFile) throws IOException
{
if (!FS.canReadFile(propertyFile))
{
throw new IOException("Cannot read file: " + propertyFileRef);
}
propertyFile = FS.toRealPath(propertyFile);
if (!propertyFiles.contains(propertyFile))
{
propertyFiles.add(propertyFile);
}
}
public void dumpActiveXmls(BaseHome baseHome)
{
System.out.println();
System.out.println("Jetty Active XMLs:");
System.out.println("------------------");
if (xmls.isEmpty())
{
System.out.println(" (no xml files specified)");
return;
}
for (Path xml : xmls)
{
System.out.printf(" %s%n",baseHome.toShortForm(xml.toAbsolutePath()));
}
}
public void dumpEnvironment(BaseHome baseHome)
{
// Java Details
System.out.println();
System.out.println("Java Environment:");
System.out.println("-----------------");
dumpSystemProperty("java.home");
dumpSystemProperty("java.vm.vendor");
dumpSystemProperty("java.vm.version");
dumpSystemProperty("java.vm.name");
dumpSystemProperty("java.vm.info");
dumpSystemProperty("java.runtime.name");
dumpSystemProperty("java.runtime.version");
dumpSystemProperty("java.io.tmpdir");
dumpSystemProperty("user.dir");
dumpSystemProperty("user.language");
dumpSystemProperty("user.country");
// Jetty Environment
System.out.println();
System.out.println("Jetty Environment:");
System.out.println("-----------------");
dumpProperty("jetty.version");
dumpProperty("jetty.home");
dumpProperty("jetty.base");
// Jetty Configuration Environment
System.out.println();
System.out.println("Config Search Order:");
System.out.println("--------------------");
for (ConfigSource config : baseHome.getConfigSources())
{
System.out.printf(" %s",config.getId());
if (config instanceof DirConfigSource)
{
DirConfigSource dirsource = (DirConfigSource)config;
if (dirsource.isPropertyBased())
{
System.out.printf(" -> %s",dirsource.getDir());
}
}
System.out.println();
}
// Jetty Se
System.out.println();
}
public void dumpJvmArgs()
{
System.out.println();
System.out.println("JVM Arguments:");
System.out.println("--------------");
if (jvmArgs.isEmpty())
{
System.out.println(" (no jvm args specified)");
return;
}
for (String jvmArgKey : jvmArgs)
{
String value = System.getProperty(jvmArgKey);
if (value != null)
{
System.out.printf(" %s = %s%n",jvmArgKey,value);
}
else
{
System.out.printf(" %s%n",jvmArgKey);
}
}
}
public void dumpProperties()
{
System.out.println();
System.out.println("Properties:");
System.out.println("-----------");
List<String> sortedKeys = new ArrayList<>();
for (Prop prop : properties)
{
if (prop.origin.equals(Props.ORIGIN_SYSPROP))
{
continue; // skip
}
sortedKeys.add(prop.key);
}
if (sortedKeys.isEmpty())
{
System.out.println(" (no properties specified)");
return;
}
Collections.sort(sortedKeys);
for (String key : sortedKeys)
{
dumpProperty(key);
}
}
private void dumpProperty(String key)
{
Prop prop = properties.getProp(key);
if (prop == null)
{
System.out.printf(" %s (not defined)%n",key);
}
else
{
System.out.printf(" %s = %s%n",key,properties.expand(prop.value));
if (StartLog.isDebugEnabled())
{
System.out.printf(" origin: %s%n",prop.origin);
while (prop.overrides != null)
{
prop = prop.overrides;
System.out.printf(" (overrides)%n");
System.out.printf(" %s = %s%n",key,properties.expand(prop.value));
System.out.printf(" origin: %s%n",prop.origin);
}
}
}
}
public void dumpSystemProperties()
{
System.out.println();
System.out.println("System Properties:");
System.out.println("------------------");
if (systemPropertyKeys.isEmpty())
{
System.out.println(" (no system properties specified)");
return;
}
List<String> sortedKeys = new ArrayList<>();
sortedKeys.addAll(systemPropertyKeys);
Collections.sort(sortedKeys);
for (String key : sortedKeys)
{
String value = System.getProperty(key);
System.out.printf(" %s = %s%n",key,properties.expand(value));
}
}
private void dumpSystemProperty(String key)
{
System.out.printf(" %s = %s%n",key,System.getProperty(key));
}
/**
* Ensure that the System Properties are set (if defined as a System property, or start.config property, or start.ini property)
*
* @param key
* the key to be sure of
*/
private void ensureSystemPropertySet(String key)
{
if (systemPropertyKeys.contains(key))
{
return; // done
}
if (properties.containsKey(key))
{
String val = properties.expand(properties.getString(key));
if (val == null)
{
return; // no value to set
}
// setup system property
systemPropertyKeys.add(key);
System.setProperty(key,val);
}
}
/**
* Expand any command line added <code>--lib</code> lib references.
*
* @param baseHome
* @throws IOException
*/
public void expandLibs(BaseHome baseHome) throws IOException
{
StartLog.debug("Expanding Libs");
for (String rawlibref : rawLibs)
{
StartLog.debug("rawlibref = " + rawlibref);
String libref = properties.expand(rawlibref);
StartLog.debug("expanded = " + libref);
// perform path escaping (needed by windows)
libref = libref.replaceAll("\\\\([^\\\\])","\\\\\\\\$1");
for (Path libpath : baseHome.getPaths(libref))
{
classpath.addComponent(libpath.toFile());
}
}
}
/**
* Build up the Classpath and XML file references based on enabled Module list.
*
* @param baseHome
* @param activeModules
* @throws IOException
*/
public void expandModules(BaseHome baseHome, List<Module> activeModules) throws IOException
{
StartLog.debug("Expanding Modules");
for (Module module : activeModules)
{
// Find and Expand Libraries
for (String rawlibref : module.getLibs())
{
StartLog.debug("rawlibref = " + rawlibref);
String libref = properties.expand(rawlibref);
StartLog.debug("expanded = " + libref);
for (Path libpath : baseHome.getPaths(libref))
{
classpath.addComponent(libpath.toFile());
}
}
for (String jvmArg : module.getJvmArgs())
{
exec = true;
jvmArgs.add(jvmArg);
}
// Find and Expand XML files
for (String xmlRef : module.getXmls())
{
// Straight Reference
Path xmlfile = baseHome.getPath(xmlRef);
addUniqueXmlFile(xmlRef,xmlfile);
}
// Register Download operations
for (String file : module.getFiles())
{
StartLog.debug("Adding module specified file: %s",file);
addFile(module,file);
}
}
}
public List<String> getAddToStartdIni()
{
return addToStartdIni;
}
public List<String> getAddToStartIni()
{
return addToStartIni;
}
public Modules getAllModules()
{
return allModules;
}
public Classpath getClasspath()
{
return classpath;
}
public Set<String> getEnabledModules()
{
return this.modules;
}
public List<FileArg> getFiles()
{
return files;
}
public List<String> getJvmArgs()
{
return jvmArgs;
}
public CommandLineBuilder getMainArgs(BaseHome baseHome, boolean addJavaInit) throws IOException
{
CommandLineBuilder cmd = new CommandLineBuilder();
if (addJavaInit)
{
cmd.addRawArg(CommandLineBuilder.findJavaBin());
for (String x : jvmArgs)
{
cmd.addRawArg(x);
}
cmd.addRawArg("-Djava.io.tmpdir=" + System.getProperty("java.io.tmpdir"));
cmd.addRawArg("-Djetty.home=" + baseHome.getHome());
cmd.addRawArg("-Djetty.base=" + baseHome.getBase());
// System Properties
for (String propKey : systemPropertyKeys)
{
String value = System.getProperty(propKey);
cmd.addEqualsArg("-D" + propKey,value);
}
cmd.addRawArg("-cp");
cmd.addRawArg(classpath.toString());
cmd.addRawArg(getMainClassname());
}
// Special Stop/Shutdown properties
ensureSystemPropertySet("STOP.PORT");
ensureSystemPropertySet("STOP.KEY");
ensureSystemPropertySet("STOP.WAIT");
// pass properties as args or as a file
if (dryRun || isExec())
{
for (Prop p : properties)
cmd.addRawArg(CommandLineBuilder.quote(p.key)+"="+CommandLineBuilder.quote(p.value));
}
else if (properties.size() > 0)
{
File prop_file = File.createTempFile("start",".properties");
prop_file.deleteOnExit();
try (FileOutputStream out = new FileOutputStream(prop_file))
{
properties.store(out,"start.jar properties");
}
cmd.addRawArg(prop_file.getAbsolutePath());
}
for (Path xml : xmls)
{
cmd.addRawArg(xml.toAbsolutePath().toString());
}
for (Path propertyFile : propertyFiles)
{
cmd.addRawArg(propertyFile.toAbsolutePath().toString());
}
return cmd;
}
public String getMainClassname()
{
String mainclass = System.getProperty("jetty.server",SERVER_MAIN);
return System.getProperty("main.class",mainclass);
}
public String getModuleGraphFilename()
{
return moduleGraphFilename;
}
public Props getProperties()
{
return properties;
}
public List<String> getSources(String module)
{
return sources.get(module);
}
public List<Path> getXmlFiles()
{
return xmls;
}
public boolean hasJvmArgs()
{
return jvmArgs.size() > 0;
}
public boolean hasSystemProperties()
{
for (String key : systemPropertyKeys)
{
// ignored keys
if ("jetty.home".equals(key) || "jetty.base".equals(key) || "main.class".equals(key))
{
// skip
continue;
}
return true;
}
return false;
}
public boolean isApproveAllLicenses()
{
return approveAllLicenses;
}
public boolean isDownload()
{
return download;
}
public boolean isDryRun()
{
return dryRun;
}
public boolean isExec()
{
return exec;
}
public boolean isNormalMainClass()
{
return SERVER_MAIN.equals(getMainClassname());
}
public boolean isHelp()
{
return help;
}
public boolean isListClasspath()
{
return listClasspath;
}
public boolean isListConfig()
{
return listConfig;
}
public boolean isListModules()
{
return listModules;
}
public boolean isRun()
{
return run;
}
public boolean isStopCommand()
{
return stopCommand;
}
public boolean isTestingModeEnabled()
{
return testingMode;
}
public boolean isVersion()
{
return version;
}
public void parse(ConfigSources sources)
{
ListIterator<ConfigSource> iter = sources.reverseListIterator();
while (iter.hasPrevious())
{
ConfigSource source = iter.previous();
for (RawArgs.Entry arg : source.getArgs())
{
parse(arg.getLine(),arg.getOrigin());
}
}
}
public void parse(final String rawarg, String source)
{
parse(rawarg,source,true);
}
/**
* Parse a single line of argument.
*
* @param rawarg the raw argument to parse
* @param source the origin of this line of argument
* @param replaceProps true if properties in this parse replace previous ones, false to not replace.
*/
private void parse(final String rawarg, String source, boolean replaceProps)
{
if (rawarg == null)
{
return;
}
StartLog.debug("parse(\"%s\", \"%s\", %b)",rawarg,source,replaceProps);
final String arg = rawarg.trim();
if (arg.length() <= 0)
{
return;
}
if (arg.startsWith("#"))
{
return;
}
if ("--help".equals(arg) || "-?".equals(arg))
{
help = true;
run = false;
return;
}
if ("--debug".equals(arg) || arg.startsWith("--start-log-file"))
{
// valid, but handled in StartLog instead
return;
}
if ("--testing-mode".equals(arg))
{
System.setProperty("org.eclipse.jetty.start.testing","true");
testingMode = true;
return;
}
if (arg.startsWith("--include-jetty-dir="))
{
// valid, but handled in ConfigSources instead
return;
}
if ("--stop".equals(arg))
{
stopCommand = true;
run = false;
return;
}
if (arg.startsWith("--download="))
{
addFile(null,Props.getValue(arg));
run = false;
download = true;
return;
}
if (arg.equals("--create-files"))
{
run = false;
download = true;
return;
}
if ("--list-classpath".equals(arg) || "--version".equals(arg) || "-v".equals(arg) || "--info".equals(arg))
{
listClasspath = true;
run = false;
return;
}
if ("--list-config".equals(arg))
{
listConfig = true;
run = false;
return;
}
if ("--dry-run".equals(arg) || "--exec-print".equals(arg))
{
dryRun = true;
run = false;
return;
}
// Enable forked execution of Jetty server
if ("--exec".equals(arg))
{
exec = true;
return;
}
// Enable forked execution of Jetty server
if ("--approve-all-licenses".equals(arg))
{
approveAllLicenses = true;
return;
}
// Arbitrary Libraries
if (arg.startsWith("--lib="))
{
String cp = Props.getValue(arg);
if (cp != null)
{
StringTokenizer t = new StringTokenizer(cp,File.pathSeparator);
while (t.hasMoreTokens())
{
rawLibs.add(t.nextToken());
}
}
return;
}
// Module Management
if ("--list-modules".equals(arg))
{
listModules = true;
run = false;
return;
}
// jetty.base build-out : add to ${jetty.base}/start.d/
if (arg.startsWith("--add-to-startd="))
{
List<String> moduleNames = Props.getValues(arg);
addToStartdIni.addAll(moduleNames);
run = false;
download = true;
return;
}
// jetty.base build-out : add to ${jetty.base}/start.ini
if (arg.startsWith("--add-to-start="))
{
List<String> moduleNames = Props.getValues(arg);
addToStartIni.addAll(moduleNames);
run = false;
download = true;
return;
}
// Enable a module
if (arg.startsWith("--module="))
{
List<String> moduleNames = Props.getValues(arg);
enableModules(source,moduleNames);
return;
}
// Create graphviz output of module graph
if (arg.startsWith("--write-module-graph="))
{
this.moduleGraphFilename = Props.getValue(arg);
run = false;
return;
}
// Start property (syntax similar to System property)
if (arg.startsWith("-D"))
{
String[] assign = arg.substring(2).split("=",2);
systemPropertyKeys.add(assign[0]);
switch (assign.length)
{
case 2:
System.setProperty(assign[0],assign[1]);
setProperty(assign[0],assign[1],source,replaceProps);
break;
case 1:
System.setProperty(assign[0],"");
setProperty(assign[0],"",source,replaceProps);
break;
default:
break;
}
return;
}
// Anything else with a "-" is considered a JVM argument
if (arg.startsWith("-"))
{
// Only add non-duplicates
if (!jvmArgs.contains(arg))
{
jvmArgs.add(arg);
}
return;
}
// Is this a raw property declaration?
int idx = arg.indexOf('=');
if (idx >= 0)
{
String key = arg.substring(0,idx);
String value = arg.substring(idx + 1);
if (replaceProps)
{
if (propertySource.containsKey(key))
{
StartLog.warn("Property %s in %s already set in %s",key,source,propertySource.get(key));
}
propertySource.put(key,source);
}
if ("OPTION".equals(key) || "OPTIONS".equals(key))
{
StringBuilder warn = new StringBuilder();
warn.append("The behavior of the argument ");
warn.append(arg).append(" (seen in ").append(source);
warn.append(") has changed, and is now considered a normal property. ");
warn.append(key).append(" no longer controls what libraries are on your classpath,");
warn.append(" use --module instead. See --help for details.");
StartLog.warn(warn.toString());
}
setProperty(key,value,source,replaceProps);
return;
}
// Is this an xml file?
if (FS.isXml(arg))
{
// only add non-duplicates
if (!xmlRefs.contains(arg))
{
xmlRefs.add(arg);
}
return;
}
if (FS.isPropertyFile(arg))
{
// only add non-duplicates
if (!propertyFileRefs.contains(arg))
{
propertyFileRefs.add(arg);
}
return;
}
// Anything else is unrecognized
throw new UsageException(ERR_BAD_ARG,"Unrecognized argument: \"%s\" in %s",arg,source);
}
private void enableModules(String source, List<String> moduleNames)
{
for (String moduleName : moduleNames)
{
modules.add(moduleName);
List<String> list = sources.get(moduleName);
if (list == null)
{
list = new ArrayList<String>();
sources.put(moduleName,list);
}
list.add(source);
}
}
public void parseModule(Module module)
{
if(module.hasDefaultConfig())
{
for(String line: module.getDefaultConfig())
{
parse(line,module.getFilesystemRef(),false);
}
}
}
public void resolveExtraXmls(BaseHome baseHome) throws IOException
{
// Find and Expand XML files
for (String xmlRef : xmlRefs)
{
// Straight Reference
Path xmlfile = baseHome.getPath(xmlRef);
if (!FS.exists(xmlfile))
{
xmlfile = baseHome.getPath("etc/" + xmlRef);
}
addUniqueXmlFile(xmlRef,xmlfile);
}
}
public void resolvePropertyFiles(BaseHome baseHome) throws IOException
{
// Find and Expand property files
for (String propertyFileRef : propertyFileRefs)
{
// Straight Reference
Path propertyFile = baseHome.getPath(propertyFileRef);
if (!FS.exists(propertyFile))
{
propertyFile = baseHome.getPath("etc/" + propertyFileRef);
}
addUniquePropertyFile(propertyFileRef,propertyFile);
}
}
public void setAllModules(Modules allModules)
{
this.allModules = allModules;
}
private void setProperty(String key, String value, String source, boolean replaceProp)
{
// Special / Prevent override from start.ini's
if (key.equals("jetty.home"))
{
properties.setProperty("jetty.home",System.getProperty("jetty.home"),source);
return;
}
// Special / Prevent override from start.ini's
if (key.equals("jetty.base"))
{
properties.setProperty("jetty.base",System.getProperty("jetty.base"),source);
return;
}
// Normal
if (replaceProp)
{
// always override
properties.setProperty(key,value,source);
}
else
{
// only set if unset
if (!properties.containsKey(key))
{
properties.setProperty(key,value,source);
}
}
}
public void setRun(boolean run)
{
this.run = run;
}
@Override
public String toString()
{
StringBuilder builder = new StringBuilder();
builder.append("StartArgs [enabledModules=");
builder.append(modules);
builder.append(", xmlRefs=");
builder.append(xmlRefs);
builder.append(", properties=");
builder.append(properties);
builder.append(", jvmArgs=");
builder.append(jvmArgs);
builder.append("]");
return builder.toString();
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.jackrabbit.oak.plugins.index.aggregate;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.NoSuchElementException;
import java.util.Set;
import java.util.concurrent.atomic.AtomicReference;
import org.apache.jackrabbit.oak.api.PropertyValue;
import org.apache.jackrabbit.oak.query.fulltext.FullTextAnd;
import org.apache.jackrabbit.oak.query.fulltext.FullTextExpression;
import org.apache.jackrabbit.oak.query.fulltext.FullTextOr;
import org.apache.jackrabbit.oak.query.fulltext.FullTextTerm;
import org.apache.jackrabbit.oak.query.fulltext.FullTextVisitor;
import org.apache.jackrabbit.oak.query.index.FilterImpl;
import org.apache.jackrabbit.oak.spi.query.Cursor;
import org.apache.jackrabbit.oak.spi.query.Cursors;
import org.apache.jackrabbit.oak.spi.query.Cursors.AbstractCursor;
import org.apache.jackrabbit.oak.spi.query.Filter;
import org.apache.jackrabbit.oak.spi.query.IndexRow;
import org.apache.jackrabbit.oak.spi.query.QueryIndex.FulltextQueryIndex;
import org.apache.jackrabbit.oak.spi.state.NodeState;
import com.google.common.base.Function;
import com.google.common.base.Predicates;
import com.google.common.collect.Iterators;
import com.google.common.collect.Lists;
/**
* A virtual full-text that can aggregate nodes based on aggregate definitions.
* Internally, it uses another full-text index.
*/
public class AggregateIndex implements FulltextQueryIndex {
private final FulltextQueryIndex baseIndex;
public AggregateIndex(FulltextQueryIndex baseIndex) {
this.baseIndex = baseIndex;
}
@Override
public double getCost(Filter filter, NodeState rootState) {
if (baseIndex == null) {
return Double.POSITIVE_INFINITY;
}
double localCost = Double.POSITIVE_INFINITY;
FullTextExpression e = filter.getFullTextConstraint();
if (e != null && hasCompositeExpression(e)) {
localCost = flattenCost(e, filter, baseIndex, rootState);
}
double baseCost = baseIndex.getCost(filter, rootState);
return Math.min(localCost, baseCost) - 0.05;
}
@Override
public Cursor query(Filter filter, NodeState rootState) {
// pass-through impl
if (baseIndex.getNodeAggregator() == null) {
return baseIndex.query(filter, rootState);
}
return newCursor(filter, baseIndex, rootState);
}
private static Cursor newCursor(Filter f, FulltextQueryIndex index,
NodeState state) {
FullTextExpression e = f.getFullTextConstraint();
if (hasCompositeExpression(e)) {
Cursor c = flatten(e, f, index, state);
if (c != null) {
return c;
}
}
return new AggregationCursor(index.query(newAggregationFilter(f, null),
state), index.getNodeAggregator(), state);
}
private static boolean hasCompositeExpression(FullTextExpression ft) {
if (ft == null) {
return false;
}
final AtomicReference<Boolean> composite = new AtomicReference<Boolean>();
composite.set(false);
ft.accept(new FullTextVisitor() {
@Override
public boolean visit(FullTextTerm term) {
return true;
}
@Override
public boolean visit(FullTextAnd and) {
composite.set(true);
return true;
}
@Override
public boolean visit(FullTextOr or) {
composite.set(true);
return true;
}
});
return composite.get() && !hasNegativeContains(ft);
}
private static boolean hasNegativeContains(FullTextExpression ft) {
if (ft == null) {
return false;
}
final AtomicReference<Boolean> hasNegative = new AtomicReference<Boolean>();
hasNegative.set(false);
ft.accept(new FullTextVisitor.FullTextVisitorBase() {
@Override
public boolean visit(FullTextTerm term) {
if (term.isNot()) {
hasNegative.set(true);
}
return true;
}
});
return hasNegative.get();
}
private static Cursor flatten(FullTextExpression constraint,
final Filter filter, final FulltextQueryIndex index,
final NodeState state) {
if (constraint == null) {
return null;
}
final AtomicReference<Cursor> result = new AtomicReference<Cursor>();
constraint.accept(new FullTextVisitor() {
@Override
public boolean visit(FullTextTerm term) {
result.set(filterToCursor(newAggregationFilter(filter, term),
index, state));
return true;
}
@Override
public boolean visit(FullTextAnd and) {
Iterator<FullTextExpression> iterator = and.list.iterator();
Cursor c = flatten(iterator.next(), filter, index, state);
while (iterator.hasNext()) {
FullTextExpression input = iterator.next();
Cursor newC = flatten(input, filter, index, state);
c = Cursors.newIntersectionCursor(c, newC,
filter.getQueryEngineSettings());
}
result.set(c);
return true;
}
@Override
public boolean visit(FullTextOr or) {
List<Cursor> cursors = Lists.transform(or.list,
new Function<FullTextExpression, Cursor>() {
@Override
public Cursor apply(FullTextExpression input) {
return flatten(input, filter, index, state);
}
});
result.set(Cursors.newConcatCursor(cursors,
filter.getQueryEngineSettings()));
return true;
}
});
return result.get();
}
private static double flattenCost(FullTextExpression constraint,
final Filter filter, final FulltextQueryIndex index,
final NodeState state) {
if (constraint == null) {
return Double.POSITIVE_INFINITY;
}
final AtomicReference<Double> result = new AtomicReference<Double>();
result.set(0d);
constraint.accept(new FullTextVisitor() {
@Override
public boolean visit(FullTextTerm term) {
result.set(result.get() + index.getCost(newAggregationFilter(filter, term), state));
return true;
}
@Override
public boolean visit(FullTextAnd and) {
for (FullTextExpression input : and.list) {
double d = flattenCost(input, filter, index, state);
result.set(result.get() + d);
}
return true;
}
@Override
public boolean visit(FullTextOr or) {
for (FullTextExpression input : or.list) {
double d = flattenCost(input, filter, index, state);
result.set(result.get() + d);
}
return true;
}
});
return result.get();
}
private static Cursor filterToCursor(Filter f, FulltextQueryIndex index,
NodeState state) {
return new AggregationCursor(index.query(f, state),
index.getNodeAggregator(), state);
}
private static Filter newAggregationFilter(Filter filter, FullTextExpression exp) {
FilterImpl f = new FilterImpl(filter);
// disables node type checks for now
f.setMatchesAllTypes(true);
if (exp != null) {
f.setFullTextConstraint(exp);
}
return f;
}
@Override
public String getPlan(Filter filter, NodeState rootState) {
if (baseIndex == null) {
return "aggregate no-index";
}
return "aggregate " + baseIndex.getPlan(filter, rootState);
}
@Override
public String getIndexName() {
if (baseIndex == null) {
return "aggregate no-index";
}
return "aggregate " + baseIndex.getIndexName();
}
@Override
public NodeAggregator getNodeAggregator() {
return baseIndex.getNodeAggregator();
}
/**
* An aggregation aware cursor.
*/
private static class AggregationCursor extends AbstractCursor {
private final Cursor cursor;
private final NodeAggregator aggregator;
private final NodeState rootState;
private boolean init;
private boolean closed;
/**
* the current row
*/
private IndexRow currentRow;
/**
* the path of the current item of the cursor
*/
private String currentPath;
/**
* all of the item's known aggregates
*/
private Iterator<String> aggregates;
/**
* should enforce uniqueness of the aggregated paths
*/
private Set<String> seenPaths = new HashSet<String>();
public AggregationCursor(Cursor cursor, NodeAggregator aggregator,
NodeState rootState) {
this.cursor = cursor;
this.aggregator = aggregator;
this.rootState = rootState;
}
@Override
public boolean hasNext() {
if (!closed && !init) {
fetchNext();
init = true;
}
return !closed;
}
private void fetchNext() {
if (aggregates != null && aggregates.hasNext()) {
currentPath = aggregates.next();
init = true;
return;
}
aggregates = null;
if (cursor.hasNext()) {
currentRow = cursor.next();
String path = currentRow.getPath();
aggregates = Iterators.filter(Iterators.concat(
Iterators.singletonIterator(path),
aggregator.getParents(rootState, path)), Predicates
.not(Predicates.in(seenPaths)));
fetchNext();
return;
}
closed = true;
}
@Override
public IndexRow next() {
if (!hasNext()) {
throw new NoSuchElementException();
}
seenPaths.add(currentPath);
init = false;
if (currentRow.getPath().equals(currentPath)) {
return currentRow;
}
// create a new overlayed index row,
// where the path is different but all other
// properties are kept
return new IndexRow() {
@Override
public String getPath() {
return currentPath;
}
@Override
public PropertyValue getValue(String columnName) {
return currentRow.getValue(columnName);
}
};
}
}
}
| |
/*
* DBeaver - Universal Database Manager
* Copyright (C) 2010-2021 DBeaver Corp and others
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jkiss.dbeaver.registry.maven;
import org.eclipse.core.runtime.IConfigurationElement;
import org.eclipse.core.runtime.Platform;
import org.jkiss.code.NotNull;
import org.jkiss.code.Nullable;
import org.jkiss.dbeaver.Log;
import org.jkiss.dbeaver.model.connection.DBPAuthInfo;
import org.jkiss.dbeaver.model.runtime.DBRProgressMonitor;
import org.jkiss.dbeaver.runtime.DBWorkbench;
import org.jkiss.dbeaver.runtime.encode.PasswordEncrypter;
import org.jkiss.dbeaver.runtime.encode.SimpleStringEncrypter;
import org.jkiss.dbeaver.utils.GeneralUtils;
import org.jkiss.utils.CommonUtils;
import org.jkiss.utils.xml.XMLBuilder;
import org.jkiss.utils.xml.XMLUtils;
import org.w3c.dom.Document;
import org.w3c.dom.Element;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.OutputStream;
import java.util.*;
public class MavenRegistry
{
private static final Log log = Log.getLog(MavenRegistry.class);
public static final String MAVEN_LOCAL_REPO_ID = "local";
public static final String MAVEN_LOCAL_REPO_NAME = "Local Repository";
public static final String MAVEN_LOCAL_REPO_FOLDER = "maven-local";
private static MavenRegistry instance = null;
private final List<String> ignoredArtifactVersions = new ArrayList<>();
public synchronized static MavenRegistry getInstance()
{
if (instance == null) {
instance = new MavenRegistry();
instance.init();
}
return instance;
}
private final List<MavenRepository> repositories = new ArrayList<>();
private MavenRepository localRepository;
// Cache for not found artifact ids. Avoid multiple remote metadata reading
private final Set<String> notFoundArtifacts = new HashSet<>();
private static final PasswordEncrypter ENCRYPTOR = new SimpleStringEncrypter();
private MavenRegistry()
{
}
boolean isVersionIgnored(String ref) {
for (String ver : ignoredArtifactVersions) {
if (ref.startsWith(ver)) {
return true;
}
}
return false;
}
private void init() {
loadStandardRepositories();
loadCustomRepositories();
sortRepositories();
}
private void loadStandardRepositories() {
// Load repositories info
{
IConfigurationElement[] extElements = Platform.getExtensionRegistry().getConfigurationElementsFor(MavenRepository.EXTENSION_ID);
for (IConfigurationElement ext : extElements) {
if ("repository".equals(ext.getName())) {
MavenRepository repository = new MavenRepository(ext);
repositories.add(repository);
} else if ("ignoreArtifactVersion".equals(ext.getName())) {
ignoredArtifactVersions.add(ext.getAttribute("id"));
}
}
}
// Create local repository
String localRepoURL;
try {
localRepoURL = Platform.getInstallLocation().getDataArea(MAVEN_LOCAL_REPO_FOLDER).toString();
} catch (IOException e) {
localRepoURL = Platform.getInstallLocation().getURL().toString() + "/" + MAVEN_LOCAL_REPO_FOLDER;
}
localRepository = new MavenRepository(
MAVEN_LOCAL_REPO_ID,
MAVEN_LOCAL_REPO_NAME,
localRepoURL,
MavenRepository.RepositoryType.LOCAL);
}
public void setCustomRepositories(List<MavenRepository> customRepositories) {
// Clear not-found cache
notFoundArtifacts.clear();
// Remove old custom repos
for (Iterator<MavenRepository> iter = this.repositories.iterator(); iter.hasNext(); ) {
if (iter.next().getType() == MavenRepository.RepositoryType.CUSTOM) {
iter.remove();
}
}
// Add new and reorder
this.repositories.addAll(customRepositories);
sortRepositories();
}
public void loadCustomRepositories() {
final File cfgFile = getConfigurationFile();
if (cfgFile.exists()) {
try {
final Document reposDocument = XMLUtils.parseDocument(cfgFile);
for (Element repoElement : XMLUtils.getChildElementList(reposDocument.getDocumentElement(), "repository")) {
String repoID = repoElement.getAttribute("id");
MavenRepository repo = findRepository(repoID);
if (repo == null) {
String repoName = repoElement.getAttribute("name");
String repoURL = repoElement.getAttribute("url");
repo = new MavenRepository(
repoID,
repoName,
repoURL,
MavenRepository.RepositoryType.CUSTOM);
List<String> scopes = new ArrayList<>();
for (Element scopeElement : XMLUtils.getChildElementList(repoElement, "scope")) {
scopes.add(scopeElement.getAttribute("group"));
}
repo.setScopes(scopes);
repositories.add(repo);
}
repo.setOrder(CommonUtils.toInt(repoElement.getAttribute("order")));
repo.setEnabled(CommonUtils.toBoolean(repoElement.getAttribute("enabled")));
final String authUser = repoElement.getAttribute("auth-user");
if (!CommonUtils.isEmpty(authUser)) {
repo.getAuthInfo().setUserName(authUser);
String authPassword = repoElement.getAttribute("auth-password");
if (!CommonUtils.isEmpty(authPassword)) {
repo.getAuthInfo().setUserPassword(ENCRYPTOR.decrypt(authPassword));
}
}
}
} catch (Exception e) {
log.error("Error parsing maven repositories configuration", e);
}
}
}
@NotNull
public List<MavenRepository> getRepositories() {
return repositories;
}
@Nullable
public MavenArtifactVersion findArtifact(@NotNull DBRProgressMonitor monitor, @Nullable MavenArtifactVersion owner, @NotNull MavenArtifactReference ref) {
String fullId = ref.getId();
if (notFoundArtifacts.contains(fullId)) {
return null;
}
MavenArtifactVersion artifact = findInRepositories(monitor, owner, ref);
if (artifact != null) {
return artifact;
}
// Not found
notFoundArtifacts.add(fullId);
return null;
}
public void resetArtifactInfo(MavenArtifactReference artifactReference) {
notFoundArtifacts.remove(artifactReference.getId());
for (MavenRepository repository : repositories) {
repository.resetArtifactCache(artifactReference);
}
localRepository.resetArtifactCache(artifactReference);
}
@Nullable
private MavenArtifactVersion findInRepositories(@NotNull DBRProgressMonitor monitor, MavenArtifactVersion owner, @NotNull MavenArtifactReference ref) {
MavenRepository currentRepository = owner == null ? null : owner.getArtifact().getRepository();
if (currentRepository != null) {
MavenArtifactVersion artifact = currentRepository.findArtifact(monitor, ref);
if (artifact != null) {
return artifact;
}
}
// Try all available repositories (without resolve)
for (MavenRepository repository : repositories) {
if (!repository.isEnabled()) {
continue;
}
if (repository != currentRepository) {
if (!repository.getScopes().isEmpty()) {
// Check scope (group id)
if (!repository.getScopes().contains(ref.getGroupId())) {
continue;
}
}
MavenArtifactVersion artifact = repository.findArtifact(monitor, ref);
if (artifact != null) {
return artifact;
}
}
}
if (owner != null) {
// Try context repositories
for (MavenRepository repository : owner.getActiveRepositories()) {
if (repository != currentRepository) {
MavenArtifactVersion artifact = repository.findArtifact(monitor, ref);
if (artifact != null) {
return artifact;
}
}
}
}
if (localRepository != currentRepository) {
MavenArtifactVersion artifact = localRepository.findArtifact(monitor, ref);
if (artifact != null) {
return artifact;
}
}
log.warn("Maven artifact '" + ref + "' not found in any available repository.");
return null;
}
public MavenRepository findRepository(String id) {
for (MavenRepository repository : repositories) {
if (repository.getId().equals(id)) {
return repository;
}
}
return null;
}
public void saveConfiguration() {
sortRepositories();
try (OutputStream is = new FileOutputStream(getConfigurationFile())) {
XMLBuilder xml = new XMLBuilder(is, GeneralUtils.UTF8_ENCODING);
xml.setButify(true);
try (final XMLBuilder.Element e1 = xml.startElement("maven")) {
for (MavenRepository repository : repositories) {
try (final XMLBuilder.Element e2 = xml.startElement("repository")) {
xml.addAttribute("id", repository.getId());
xml.addAttribute("order", repository.getOrder());
xml.addAttribute("enabled", repository.isEnabled());
if (repository.getType() != MavenRepository.RepositoryType.GLOBAL) {
xml.addAttribute("url", repository.getUrl());
xml.addAttribute("name", repository.getName());
if (!CommonUtils.isEmpty(repository.getDescription())) {
xml.addAttribute("description", repository.getDescription());
}
for (String scope : repository.getScopes()) {
try (final XMLBuilder.Element e3 = xml.startElement("scope")) {
xml.addAttribute("group", scope);
}
}
final DBPAuthInfo authInfo = repository.getAuthInfo();
if (!CommonUtils.isEmpty(authInfo.getUserName())) {
xml.addAttribute("auth-user", authInfo.getUserName());
if (!CommonUtils.isEmpty(authInfo.getUserPassword())) {
xml.addAttribute("auth-password", ENCRYPTOR.encrypt(authInfo.getUserPassword()));
}
}
}
}
}
}
xml.flush();
} catch (Exception e) {
log.error("Error saving Maven registry", e);
}
}
private void sortRepositories() {
repositories.sort(Comparator.comparingInt(MavenRepository::getOrder));
}
private static File getConfigurationFile()
{
return DBWorkbench.getPlatform().getConfigurationFile("maven-repositories.xml");
}
}
| |
/*
* DisplayPreferences2.java Created on March 31, 2010, 9:54 PM
*/
package multiplicity3.config.display;
import java.awt.Color;
import java.awt.Rectangle;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import java.awt.event.ItemEvent;
import java.util.Arrays;
import javax.swing.DefaultComboBoxModel;
import javax.swing.JCheckBox;
import javax.swing.JComboBox;
import javax.swing.JLabel;
import javax.swing.JPanel;
import javax.swing.JTextField;
import javax.swing.SwingConstants;
import multiplicity3.config.PreferencesItem;
import org.lwjgl.LWJGLException;
import org.lwjgl.opengl.Display;
import org.lwjgl.opengl.DisplayMode;
import com.jme3.system.AppSettings;
import com.jme3.system.JmeSystem;
/**
* @author dcs0ah1
*/
@SuppressWarnings(
{ "rawtypes", "unchecked" })
public class DisplayConfigPanel extends JPanel implements PreferencesItem
{
/** The Constant serialVersionUID. */
private static final long serialVersionUID = 8757133417077939163L;
/** The alpha bits field. */
private JTextField alphaBitsField = new JTextField();
/** The alpha bits label. */
private JLabel alphaBitsLabel = new JLabel();
/** The anti alias field. */
private JTextField antiAliasField = new JTextField();
/** The anti alias label. */
private JLabel antiAliasLabel = new JLabel();
/** The depth bits field. */
private JTextField depthBitsField = new JTextField();
/** The depth bits label. */
private JLabel depthBitsLabel = new JLabel();
/** The display selector. */
private JComboBox displaySelector = new JComboBox();
/** The display size label. */
private JLabel displaySizeLabel = new JLabel();
/** The display width field. */
private JTextField displayWidthField = new JTextField();
/** The display width label. */
private JLabel displayWidthLabel = new JLabel();
/** The full screen. */
private JCheckBox fullScreen = new JCheckBox();
/** The input type. */
private JLabel inputType = new JLabel();
/** The jcb. */
private JComboBox jcb = new JComboBox();
/** The prefs. */
private DisplayPrefsItem prefs;
/** The stencil bits field. */
private JTextField stencilBitsField = new JTextField();
/** The Stencil bits label. */
private JLabel StencilBitsLabel = new JLabel();
/** The tuio label. */
private JLabel tuioLabel = new JLabel();
/** The tuio textbox. */
private JTextField tuioTextbox = new JTextField();
/**
* Instantiates a new display config panel.
*
* @param prefs
* the prefs
*/
public DisplayConfigPanel(DisplayPrefsItem prefs)
{
this.prefs = prefs;
initComponents();
loadCurrentSettings();
}
/*
* (non-Javadoc)
* @see multiplicity3.config.PreferencesItem#getConfigurationPanel()
*/
@Override
public JPanel getConfigurationPanel()
{
return this;
}
/*
* (non-Javadoc)
* @see multiplicity3.config.PreferencesItem#getConfigurationPanelName()
*/
@Override
public String getConfigurationPanelName()
{
return "Display";
}
/**
* Gets the current display mode.
*
* @param modes
* the modes
* @return the current display mode
*/
public DisplayMode getCurrentDisplayMode(DisplayMode[] modes)
{
for (DisplayMode m : modes)
{
if ((m.getHeight() == prefs.getHeight()) && (m.getWidth() == prefs.getWidth()) && (m.getBitsPerPixel() == prefs.getBitsPerPixel()) && (m.getFrequency() == prefs.getFrequency()))
{
return m;
}
}
return null;
}
/**
* Display selector item state changed.
*
* @param evt
* the evt
*/
private void displaySelectorItemStateChanged(java.awt.event.ItemEvent evt)
{
if ((evt.getStateChange() == ItemEvent.SELECTED) && (displaySelector != null))
{
DisplayMode m = (DisplayMode) displaySelector.getSelectedItem();
setSelectedDisplayMode(m);
}
}
/**
* Gets the current display mode index.
*
* @param modes
* the modes
* @return the current display mode index
*/
private int getCurrentDisplayModeIndex(DisplayMode[] modes)
{
for (int i = 0; i < modes.length; i++)
{
DisplayMode m = modes[i];
if ((m.getHeight() == prefs.getHeight()) && (m.getWidth() == prefs.getWidth()) && (m.getBitsPerPixel() == prefs.getBitsPerPixel()) && (m.getFrequency() == prefs.getFrequency()))
{
return i;
}
}
return -1;
}
/**
* Gets the display modes.
*
* @return the display modes
*/
private DisplayMode[] getDisplayModes()
{
DisplayMode[] modes = null;
try
{
AppSettings settings = new AppSettings(true);
JmeSystem.initialize(settings);
modes = Display.getAvailableDisplayModes();
Arrays.sort(modes, new DisplayModeComparator());
Display.destroy();
}
catch (LWJGLException e)
{
e.printStackTrace();
}
return modes;
}
/**
* Gets the float from text field.
*
* @param tf
* the tf
* @param previousValue
* the previous value
* @return the float from text field
*/
private Float getFloatFromTextField(JTextField tf, float previousValue)
{
if (tf.getText().length() > 0)
{
try
{
float num = Float.parseFloat(tf.getText());
tf.setForeground(Color.black);
return num;
}
catch (NumberFormatException ex)
{
tf.setForeground(Color.red);
}
}
return previousValue;
}
/**
* Gets the integer from text field.
*
* @param tf
* the tf
* @param previousValue
* the previous value
* @return the integer from text field
*/
private int getIntegerFromTextField(JTextField tf, int previousValue)
{
if (tf.getText().length() > 0)
{
try
{
int num = Integer.parseInt(tf.getText());
tf.setForeground(Color.black);
return num;
}
catch (NumberFormatException ex)
{
tf.setForeground(Color.red);
}
}
return previousValue;
}
/**
* Inits the components.
*/
private void initComponents()
{
displaySizeLabel.setText("Display Size:");
displaySizeLabel.setName("jLabel1");
displaySelector.setModel(new DefaultComboBoxModel(new String[]
{ "Item 1", "Item 2", "Item 3", "Item 4" }));
displaySelector.setName("displaySelector");
displaySelector.addItemListener(new java.awt.event.ItemListener()
{
@Override
public void itemStateChanged(java.awt.event.ItemEvent evt)
{
displaySelectorItemStateChanged(evt);
}
});
fullScreen.setText("Full Screen");
fullScreen.setHorizontalTextPosition(SwingConstants.LEADING);
fullScreen.setName("fullScreen");
fullScreen.addActionListener(new java.awt.event.ActionListener()
{
@Override
public void actionPerformed(java.awt.event.ActionEvent evt)
{
prefs.setFullScreen(fullScreen.isSelected());
}
});
antiAliasLabel.setText("Anti-alias min samples:");
antiAliasLabel.setName("jLabel2");
antiAliasField.setText("jTextField1");
antiAliasField.setName("antiAlias");
antiAliasField.addKeyListener(new java.awt.event.KeyAdapter()
{
@Override
public void keyReleased(java.awt.event.KeyEvent evt)
{
prefs.setMinimumAntiAliasSamples(getIntegerFromTextField(antiAliasField, prefs.getMinimumAntiAliasSamples()));
}
});
StencilBitsLabel.setText("Stencil bits:");
StencilBitsLabel.setName("jLabel3");
stencilBitsField.setText("jTextField1");
stencilBitsField.setName("stencilBits");
stencilBitsField.addKeyListener(new java.awt.event.KeyAdapter()
{
@Override
public void keyReleased(java.awt.event.KeyEvent evt)
{
prefs.setStencilBits(getIntegerFromTextField(stencilBitsField, prefs.getStencilBits()));
}
});
alphaBitsLabel.setText("Alpha bits:");
alphaBitsLabel.setName("jLabel4");
alphaBitsField.setText("jTextField1");
alphaBitsField.setName("alphaBits");
alphaBitsField.addKeyListener(new java.awt.event.KeyAdapter()
{
@Override
public void keyReleased(java.awt.event.KeyEvent evt)
{
prefs.setAlphaBits(getIntegerFromTextField(alphaBitsField, prefs.getAlphaBits()));
}
});
depthBitsLabel.setText("Depth bits:");
depthBitsLabel.setName("jLabel5");
depthBitsField.setText("jTextField1");
depthBitsField.setName("depthBits");
depthBitsField.addKeyListener(new java.awt.event.KeyAdapter()
{
@Override
public void keyReleased(java.awt.event.KeyEvent evt)
{
prefs.setDepthBits(getIntegerFromTextField(depthBitsField, prefs.getDepthBits()));
}
});
displayWidthLabel.setText("Display Width (m):");
displayWidthLabel.setName("jLabel6");
displayWidthField.setText("jTextField1");
displayWidthField.setName("displayWidth");
displayWidthField.addKeyListener(new java.awt.event.KeyAdapter()
{
@Override
public void keyReleased(java.awt.event.KeyEvent evt)
{
prefs.setRealWidth(getFloatFromTextField(displayWidthField, prefs.getRealWidth()));
}
});
inputType.setText("Input Type:");
initInputSelector();
jcb.addActionListener(new ActionListener()
{
@Override
public void actionPerformed(ActionEvent e)
{
prefs.setInputType((String) jcb.getSelectedItem());
updateTuioOptionsVisibility();
}
});
tuioLabel.setText("TUIO Port: ");
tuioTextbox.setText("" + prefs.getTuioPort());
tuioTextbox.addKeyListener(new java.awt.event.KeyAdapter()
{
@Override
public void keyReleased(java.awt.event.KeyEvent evt)
{
prefs.setTuioPort(getIntegerFromTextField(tuioTextbox, prefs.getTuioPort()));
}
});
setLayout(null);
inputType.setBounds(new Rectangle(30, 30, 100, 24));
jcb.setBounds(new Rectangle(130, 30, 200, 24));
tuioLabel.setBounds(new Rectangle(30, 60, 250, 24));
tuioTextbox.setBounds(new Rectangle(130, 60, 150, 24));
displaySizeLabel.setBounds(new Rectangle(30, 90, 100, 24));
displaySelector.setBounds(new Rectangle(130, 90, 200, 24));
fullScreen.setBounds(new Rectangle(350, 90, 125, 24));
displayWidthLabel.setBounds(new Rectangle(30, 165, 175, 24));
displayWidthField.setBounds(new Rectangle(200, 165, 60, 24));
antiAliasLabel.setBounds(new Rectangle(30, 195, 175, 24));
antiAliasField.setBounds(new Rectangle(200, 195, 35, 24));
StencilBitsLabel.setBounds(new Rectangle(275, 195, 150, 24));
stencilBitsField.setBounds(new Rectangle(360, 195, 35, 24));
alphaBitsLabel.setBounds(new Rectangle(30, 225, 150, 24));
alphaBitsField.setBounds(new Rectangle(200, 225, 35, 24));
depthBitsLabel.setBounds(new Rectangle(275, 225, 150, 24));
depthBitsField.setBounds(new Rectangle(360, 225, 35, 24));
updateTuioOptionsVisibility();
add(inputType);
add(jcb);
add(tuioLabel);
add(tuioTextbox);
add(displaySizeLabel);
add(displaySelector);
add(fullScreen);
add(displayWidthLabel);
add(displayWidthField);
add(antiAliasLabel);
add(antiAliasField);
add(StencilBitsLabel);
add(stencilBitsField);
add(alphaBitsLabel);
add(alphaBitsField);
add(depthBitsLabel);
add(depthBitsField);
}
/**
* Inits the display selector.
*/
private void initDisplaySelector()
{
// get the saved display mode as loading the list causes a change
DisplayMode currentMode = getCurrentDisplayMode(getDisplayModes());
displaySelector.removeAllItems();
for (DisplayMode dm : getDisplayModes())
{
displaySelector.addItem(dm);
}
if (currentMode == null)
{
// we didn't already have a display, pick the first
currentMode = getDisplayModes()[0];
}
// restore saved display mode
setSelectedDisplayMode(currentMode);
displaySelector.setSelectedIndex(getCurrentDisplayModeIndex(getDisplayModes()));
}
/**
* Inits the full screen.
*/
private void initFullScreen()
{
fullScreen.setSelected(prefs.getFullScreen());
}
/**
* Inits the input selector.
*/
private void initInputSelector()
{
jcb.removeAllItems();
for (String input : DisplayPrefsItem.INPUT_TYPES)
{
jcb.addItem(input);
}
jcb.setSelectedItem(prefs.getInputType());
}
/**
* Load current settings.
*/
private void loadCurrentSettings()
{
initDisplaySelector();
initFullScreen();
antiAliasField.setText("" + prefs.getMinimumAntiAliasSamples());
alphaBitsField.setText("" + prefs.getAlphaBits());
stencilBitsField.setText("" + prefs.getStencilBits());
depthBitsField.setText("" + prefs.getDepthBits());
displayWidthField.setText("" + prefs.getRealWidth());
}
/**
* Sets the selected display mode.
*
* @param m
* the new selected display mode
*/
private void setSelectedDisplayMode(DisplayMode m)
{
prefs.setWidth(m.getWidth());
prefs.setHeight(m.getHeight());
prefs.setBitsPerPixel(m.getBitsPerPixel());
prefs.setFrequency(m.getFrequency());
}
/**
* Update tuio options visibility.
*/
private void updateTuioOptionsVisibility()
{
tuioLabel.setVisible(prefs.getInputType().equals(DisplayPrefsItem.INPUT_TYPES[1]));
tuioTextbox.setVisible(prefs.getInputType().equals(DisplayPrefsItem.INPUT_TYPES[1]));
}
}
| |
/**
* Handles game turn management.
*/
package com.jrdbnntt.aggravation.game;
import java.awt.Color;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
import java.util.Random;
import java.util.TreeMap;
import javax.swing.JButton;
import com.jrdbnntt.aggravation.Aggravation;
import com.jrdbnntt.aggravation.Util.Log;
import com.jrdbnntt.aggravation.board.Marble;
import com.jrdbnntt.aggravation.board.space.HomeSpace;
import com.jrdbnntt.aggravation.board.space.Space;
import com.jrdbnntt.aggravation.game.Player.Status;
public class Game implements ActionListener {
public static enum Status {
NEW, STARTED, ENDED,
WAITING_FOR_ROLL,
WAITING_FOR_MARBLE_SELECTION,
WAITING_FOR_MOVE_CHOICE,
PROCESSING
}
public static final String
AK_ROLL = "AK_ROLL";
private static Game currInstance;
private static final int DIE_SIDES = 6;
private Player[] players = new Player[Aggravation.MAX_PLAYERS];
private ArrayList<Integer> turnOrder = new ArrayList<Integer>(); //Randomized order of player indexes
private GameDisplay display;
private Game.Status currentStatus = Game.Status.NEW;
private int roll = 0; //current turn role
private Random rand = new Random(System.currentTimeMillis());
//Turn vars
private int currPlayerIndex; //Current index in turnOrder
private Player currPlayer; //Ref to current player
private Map<Space,ArrayList<Space>> allPossDst; //possible moves from choice
private Space selectedSource; //has the marble
private Space selectedDestination; //has no marble, where it is moved to
private boolean marbleMoved;
public Game() {
init();
}
public static void load() {
Game.currInstance = new Game();
}
public static Game getCurrentInstance() {
return Game.currInstance;
}
/**
* Initializes empty game=
*/
public void init() {
//initialize with a null set of players
for(int i = 0; i < this.players.length; ++i)
this.players[i] = null;
this.setStatus(Game.Status.NEW);
Log.v("GAME","Initialized.");
}
public void start(Player[] pSet) {
this.setStatus(Game.Status.STARTED);
definePlayers(pSet);
this.display = new GameDisplay();
this.startTurn();
}
public void end() {
this.setStatus(Game.Status.ENDED);
display.getToolBox().addLogMessage(currPlayer + " has won!", false);
//update statuses
for(int i : turnOrder)
players[i].setStatus(Player.Status.LOSER);
currPlayer.setStatus(Player.Status.WINNER);
display.refresh();
}
/**
* Sets up the players in the game
*/
private void definePlayers(Player[] pSet) {
//prompt for user creation TODO
for(int i = 0; i < pSet.length; ++i) {
this.players[i] = pSet[i];
}
//Create turn order
turnOrder = new ArrayList<Integer>();
for(int i = 0; i < Aggravation.MAX_PLAYERS; ++i)
if(this.players[i] != null)
turnOrder.add(i);
Collections.shuffle(turnOrder);
currPlayerIndex = 0;
String str = turnOrder.size()+" Players defined with order ";
for(int i : turnOrder)
str+= i+" ";
Log.v("GAME", str);
}
/**
* Updates player information based upon current player + game status
*/
public void updatePlayers() {
//Reset statuses
for(int i : turnOrder)
players[i].setStatus(Player.Status.WAITING);
currPlayer.setStatus(Player.Status.CURRENT_PLAYER);
}
/**
* Retrieve player by index
* @param i
* @throws NullPointerException when player has not been set
* @return
*/
public Player getPlayer(int i) throws NullPointerException {
return this.players[i];
}
public ArrayList<Integer> getTurnOrder() {
return this.turnOrder;
}
public Player getCurrentPlayer() {
return players[turnOrder.get(currPlayerIndex)];
}
public GameDisplay getDisplay() {
return this.display;
}
/**
* Handle the current turn
*/
private void startTurn() {
currPlayer = getCurrentPlayer();
Log.d("GAME", "New turn started for Player #"
+ turnOrder.get(currPlayerIndex) + ", \'"
+ currPlayer+"\'");
marbleMoved = false;
display.getToolBox().addLogMessage(
currPlayer
+" it is your turn to roll!",false);
display.getToolBox().getRollButton().setEnabled(true);
display.getToolBox().getRollButton().addActionListener(this);
updatePlayers();
this.setStatus(Game.Status.WAITING_FOR_ROLL);
display.refresh();
}
private void endCurrentTurn() {
boolean gameOver = true;
for(Space s : display.getBoard().getPlayerHomes(currPlayer)) {
if(!s.hasMarble())
gameOver = false;
}
if(gameOver) {
this.end();
this.setStatus(Game.Status.ENDED);
} else {
//Switch to next player, or stay the same if 6
if(roll != 6 || !marbleMoved) {
++currPlayerIndex;
if(currPlayerIndex == turnOrder.size())
currPlayerIndex = 0;
}
this.startTurn();
}
}
@Override
public void actionPerformed(ActionEvent e) {
Log.v("GAME-ACTION", e.getActionCommand());
switch(e.getActionCommand()) {
case Game.AK_ROLL:
display.getToolBox().getRollButton().setEnabled(false);
if(this.currentStatus == Game.Status.WAITING_FOR_ROLL) {
this.setStatus(Game.Status.PROCESSING);
roll = this.rand.nextInt(Game.DIE_SIDES)+1;
display.getToolBox().addLogMessage(currPlayer + " rolls " + roll,false);
Log.d("GAME", "Player "+currPlayer + " rolled a " + roll);
this.findPossibleDestinations();
String str = "Possible Destinations ("+allPossDst.size()+"): ";
for(Space key : allPossDst.keySet()) {
str += key + "[";
for(Space dst : allPossDst.get(key))
str += dst + " ";
str += "] ";
}
Log.d("GAME", str);
if(allPossDst.isEmpty()) {
//player cannot move
Log.d("GAME", "Player cannot move, skipping");
display.getToolBox().addLogMessage(currPlayer+" cannot move any marbles!");
display.refresh();
this.endCurrentTurn();
} else {
this.setStatus(Game.Status.WAITING_FOR_MARBLE_SELECTION);
display.getToolBox().addLogMessage(currPlayer + ", Please select one of your marbles to move "+roll);
display.refresh();
}
}
}
}
private void setStatus(Status s) {
Log.d("GAME-Status Change",s.name());
this.currentStatus = s;
}
public Game.Status getStatus() {
return this.currentStatus;
}
public void onSpaceClicked(Space s) {
final String KEY = "GAME-SpaceSelect";
switch(this.currentStatus) {
case WAITING_FOR_MARBLE_SELECTION:
if(chooseMarbleSource(s)) {
Log.d(KEY, currPlayer + " selected marble at " + s);
}
break;
case WAITING_FOR_MOVE_CHOICE:
if(checkMarbleDestination(s)) {
this.setStatus(Game.Status.PROCESSING);
Log.d(KEY, currPlayer + " selected open space at " + s);
this.selectedDestination = s;
this.makeMove();
} else if(chooseMarbleSource(s)) {
Log.d(KEY, currPlayer + " selected marble at " + s);
} else {
Log.d("GAME", "Player " + currPlayer+ " chose invalid move for marble at "+selectedSource);
}
break;
default:
break;
}
}
/**
* Checks to see if the current player can move this marble, then does so if is valid.
* @return
*/
private boolean chooseMarbleSource(Space s) {
if(s.hasMarble() && s.getMarble().getOwner() == currPlayer) {
this.setStatus(Game.Status.PROCESSING);
this.selectedSource = s;
s.setFocus(true);
if(allPossDst.get(selectedSource) == null) {
//player cannot move
Log.d("GAME", "Player "+currPlayer+" selected invalid marble at " + selectedSource);
s.setFocus(false);
selectedSource = null;
setStatus(Game.Status.WAITING_FOR_MARBLE_SELECTION);
display.getToolBox().addLogMessage("You cannot move that marble!");
display.refresh();
return false;
} else {
this.setStatus(Game.Status.WAITING_FOR_MOVE_CHOICE);
display.getToolBox().addLogMessage(currPlayer+", Please select a space to move the marble " + roll);
display.refresh();
return true;
}
} else {
return false;
}
}
/**
* Checks to see if the current player can move the currentSource here.
* @return
*/
private boolean checkMarbleDestination(Space s) {
for(Space valid : allPossDst.get(selectedSource))
if(s == valid)
return true;
return false;
}
/**
* Performs marble movement
*/
private void makeMove() {
Marble m = this.selectedSource.getMarble();
//make swap
this.selectedSource.clearMarble();
this.selectedSource.setFocus(false);
this.selectedDestination.setMarble(m);
Log.d("GAME-Move", "Marble moved from "+this.selectedSource+" to "+this.selectedDestination);
this.marbleMoved = true;
this.endCurrentTurn();
}
/**
* Finds all the possible moves the player could chose
*/
private void findPossibleDestinations() {
allPossDst = new HashMap<Space, ArrayList<Space>>();
for(Space initial : display.getBoard().getPlayerMarbleSpaces(currPlayer)) {
ArrayList<Space> possDst = new ArrayList<Space>();
switch(initial.getType()) {
case BASE:
if(roll == 1 || roll == 6) {
//can exit base
Space start = display.getBoard().getPlayerStart(currPlayer);
if(!start.hasMarble() || start.getMarble().getOwner() != currPlayer)
possDst.add(start);
}
break;
case CENTER:
if(roll == 1) {
//can exit center
Space[] corners = display.getBoard().getCorners();
for(Space c : corners) {
if(!c.hasMarble() || c.getMarble().getOwner() != currPlayer)
possDst.add(c);
}
}
break;
case LOOP:
case CORNER:
case HOME:
findPaths(initial, possDst, initial, roll);
}
if(!possDst.isEmpty())
allPossDst.put(initial, possDst);
}
}
private void findPaths(Space initial, ArrayList<Space> possDst, Space src, int moves) {
Boolean goodDst;
if(moves == 0) {
possDst.add(src);
} else {
Space[] adj = display.getBoard().getNextSpaces(src);
for(Space s : adj) {
//End path if cannot continue
Log.e("PATH", ""+src);
if(!s.hasMarble() || s.getMarble().getOwner() != currPlayer) {
goodDst = true;
switch(src.getType()) {
case LOOP:
goodDst = !((s.getType() == Space.Type.HOME &&
((HomeSpace)s).getOwner() != currPlayer) || //cannot enter another player's home
src == display.getBoard().getPlayerHomeEntrance(currPlayer)); //must go home if at entrance
break;
case CENTER:
goodDst = moves == roll; //can only move once from center
break;
case CORNER:
goodDst = !((s.getType() == Space.Type.CORNER &&
initial.getType() != Space.Type.CORNER) || //have moved along non-corner
(src != initial &&
initial.getType() == Space.Type.CORNER &&
s.getType() != Space.Type.CORNER)); //only moving among corners
break;
default: break;
}
if(goodDst)
findPaths(initial, possDst,s, moves - 1);
}
}
}
}
}
| |
/* The contents of this file are subject to the license and copyright terms
* detailed in the license directory at the root of the source tree (also
* available online at http://fedora-commons.org/license/).
*/
package org.fcrepo.server.security;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.net.URI;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Hashtable;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import javax.xml.transform.Transformer;
import javax.xml.transform.stream.StreamResult;
import javax.xml.transform.stream.StreamSource;
import org.fcrepo.common.Constants;
import org.fcrepo.server.Server;
import org.fcrepo.server.config.ModuleConfiguration;
import org.fcrepo.server.errors.GeneralException;
import org.fcrepo.server.validation.ValidationUtility;
import org.fcrepo.utilities.FileUtils;
import org.fcrepo.utilities.XmlTransformUtility;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.jboss.security.xacml.sunxacml.AbstractPolicy;
import org.jboss.security.xacml.sunxacml.EvaluationCtx;
import org.jboss.security.xacml.sunxacml.PolicySet;
import org.jboss.security.xacml.sunxacml.attr.AttributeValue;
import org.jboss.security.xacml.sunxacml.attr.BagAttribute;
import org.jboss.security.xacml.sunxacml.attr.StringAttribute;
import org.jboss.security.xacml.sunxacml.combine.OrderedDenyOverridesPolicyAlg;
import org.jboss.security.xacml.sunxacml.combine.PolicyCombiningAlgorithm;
import org.jboss.security.xacml.sunxacml.cond.EvaluationResult;
import org.jboss.security.xacml.sunxacml.ctx.Status;
import org.jboss.security.xacml.sunxacml.finder.PolicyFinder;
import org.jboss.security.xacml.sunxacml.finder.PolicyFinderResult;
/**
* XACML PolicyFinder for Fedora.
* <p>
* This provides repository-wide policies and object-specific policies,
* when available.
*/
public class PolicyFinderModule
extends org.jboss.security.xacml.sunxacml.finder.PolicyFinderModule {
private static final Logger logger =
LoggerFactory.getLogger(PolicyFinderModule.class);
private static final List<String> ERROR_CODE_LIST = new ArrayList<String>(1);
static {
ERROR_CODE_LIST.add(Status.STATUS_PROCESSING_ERROR);
}
private static final String DEFAULT = "default";
private static final String DEFAULT_XACML_COMBINING_ALGORITHM = "org.jboss.security.xacml.sunxacml.combine.OrderedDenyOverridesPolicyAlg";
private static final String XACML_DIST_BASE = "fedora-internal-use";
private static final String DEFAULT_REPOSITORY_POLICIES_DIRECTORY =
XACML_DIST_BASE
+ "/fedora-internal-use-repository-policies-approximating-2.0";
private static final String BACKEND_POLICIES_ACTIVE_DIRECTORY =
XACML_DIST_BASE + "/fedora-internal-use-backend-service-policies";
private static final String BE_SECURITY_XML_LOCATION =
"config/beSecurity.xml";
private static final String BACKEND_POLICIES_XSL_LOCATION =
XACML_DIST_BASE + "/build-backend-policy.xsl";
private static final String COMBINING_ALGORITHM_KEY = "XACML-COMBINING-ALGORITHM";
private static final String REPOSITORY_POLICIES_DIRECTORY_KEY =
"REPOSITORY-POLICIES-DIRECTORY";
private static final String POLICY_SCHEMA_PATH_KEY = "POLICY-SCHEMA-PATH";
private static final String VALIDATE_REPOSITORY_POLICIES_KEY =
"VALIDATE-REPOSITORY-POLICIES";
private static final String VALIDATE_OBJECT_POLICIES_FROM_DATASTREAM_KEY =
"VALIDATE-OBJECT-POLICIES-FROM-DATASTREAM";
private static final URI STRING_ATTRIBUTE = URI.create(StringAttribute.identifier);
private static final URI EMPTY_URI = URI.create("");
@SuppressWarnings("unchecked")
private static final PolicySet EMPTY_SET = toPolicySet(Collections.EMPTY_LIST, new OrderedDenyOverridesPolicyAlg());
private final PolicyCombiningAlgorithm m_combiningAlgorithm;
private final String m_serverHome;
private final String m_repositoryPolicyDirectoryPath;
private final String m_repositoryBackendPolicyDirectoryPath;
private final boolean m_validateRepositoryPolicies;
private final boolean m_validateObjectPoliciesFromDatastream;
private final PolicyParser m_policyParser;
private final PolicyLoader m_policyLoader;
private final List<AbstractPolicy> m_repositoryPolicies;
private PolicySet m_repositoryPolicySet = EMPTY_SET;
public PolicyFinderModule(Server server,
PolicyLoader policyLoader,
ModuleConfiguration authorizationConfig)
throws GeneralException {
m_serverHome = server.getHomeDir().getAbsolutePath();
m_policyLoader = policyLoader;
m_repositoryBackendPolicyDirectoryPath = m_serverHome + File.separator
+ BACKEND_POLICIES_ACTIVE_DIRECTORY;
String repositoryPolicyDirectoryPath =
authorizationConfig.getParameter(REPOSITORY_POLICIES_DIRECTORY_KEY, true);
if (repositoryPolicyDirectoryPath == null) repositoryPolicyDirectoryPath = "";
m_repositoryPolicyDirectoryPath = repositoryPolicyDirectoryPath;
String combAlgClass = authorizationConfig.getParameter(COMBINING_ALGORITHM_KEY);
if (combAlgClass == null) combAlgClass = DEFAULT_XACML_COMBINING_ALGORITHM;
try {
m_combiningAlgorithm =
(PolicyCombiningAlgorithm) Class
.forName(combAlgClass).newInstance();
} catch (Exception e) {
throw new GeneralException(e.getMessage(), e);
}
String validatePolicies = authorizationConfig.getParameter(VALIDATE_REPOSITORY_POLICIES_KEY);
try {
m_validateRepositoryPolicies = (validatePolicies != null) ? Boolean.parseBoolean(validatePolicies) : false;
} catch (Exception e) {
throw new GeneralException("bad init parm boolean value for "
+ VALIDATE_REPOSITORY_POLICIES_KEY, e);
}
validatePolicies = authorizationConfig.getParameter(VALIDATE_OBJECT_POLICIES_FROM_DATASTREAM_KEY);
try {
m_validateObjectPoliciesFromDatastream = (validatePolicies != null) ? Boolean.parseBoolean(validatePolicies) : false;
} catch (Exception e) {
throw new GeneralException("bad init parm boolean value for "
+ VALIDATE_OBJECT_POLICIES_FROM_DATASTREAM_KEY, e);
}
// Initialize the policy parser given the POLICY_SCHEMA_PATH_KEY
String schemaPath = authorizationConfig.getParameter(POLICY_SCHEMA_PATH_KEY);
if (schemaPath != null) {
File schema;
if (schemaPath.startsWith(File.separator)){ // absolute
schema = new File(schemaPath);
} else {
schema = new File(new File(m_serverHome), schemaPath);
}
try {
FileInputStream in = new FileInputStream(schema);
m_policyParser = new PolicyParser(in);
ValidationUtility.setPolicyParser(m_policyParser);
} catch (Exception e) {
throw new GeneralException("Error loading policy"
+ " schema: " + schema.getAbsolutePath(), e);
}
} else {
throw new GeneralException("Policy schema path not"
+ " specified. Must be given as " + POLICY_SCHEMA_PATH_KEY);
}
m_repositoryPolicies = new ArrayList<AbstractPolicy>();
}
/**
* Does nothing at init time.
*/
@Override
public void init(PolicyFinder finder) {
try {
logger.info("Loading repository policies...");
setupActivePolicyDirectories();
m_repositoryPolicies.clear();
Map<String,AbstractPolicy> repositoryPolicies =
m_policyLoader.loadPolicies(m_policyParser,
m_validateRepositoryPolicies,
new File(m_repositoryBackendPolicyDirectoryPath));
repositoryPolicies.putAll(
m_policyLoader.loadPolicies(m_policyParser,
m_validateRepositoryPolicies,
new File(m_repositoryPolicyDirectoryPath)));
m_repositoryPolicies.addAll(repositoryPolicies.values());
m_repositoryPolicySet = toPolicySet(m_repositoryPolicies, m_combiningAlgorithm);
} catch (Throwable t) {
logger.error("Error loading repository policies: " + t.toString(), t);
}
}
private final void generateBackendPolicies() throws Exception {
logger.info("Generating backend policies...");
FileUtils.deleteContents(new File(m_repositoryBackendPolicyDirectoryPath));
BackendPolicies backendPolicies =
new BackendPolicies(m_serverHome + File.separator
+ BE_SECURITY_XML_LOCATION);
Hashtable<String, String> tempfiles = backendPolicies.generateBackendPolicies();
try {
Iterator<String> iterator = tempfiles.keySet().iterator();
Transformer transformer = null;
while (iterator.hasNext()) {
if (transformer == null) {
File f =
new File(m_serverHome + File.separator
+ BACKEND_POLICIES_XSL_LOCATION); // <<stylesheet
// location
StreamSource ss = new StreamSource(f);
transformer = XmlTransformUtility.getTransformer(ss); // xformPath
} else {
transformer.reset();
}
String key = iterator.next();
File infile = new File(tempfiles.get(key));
FileInputStream fis = new FileInputStream(infile);
FileOutputStream fos =
new FileOutputStream(m_repositoryBackendPolicyDirectoryPath
+ File.separator + key);
transformer.transform(new StreamSource(fis),
new StreamResult(fos));
}
} finally {
// we're done with temp files now, so delete them
Iterator<String> iter = tempfiles.keySet().iterator();
while (iter.hasNext()) {
File tempFile = new File(tempfiles.get(iter.next()));
tempFile.delete();
}
}
}
private void setupActivePolicyDirectories() throws Exception {
File repoPolicyDir = new File(m_repositoryPolicyDirectoryPath + File.separator + DEFAULT);
if (!repoPolicyDir.exists()){
repoPolicyDir.mkdirs();
File source = new File(m_serverHome + File.separator + DEFAULT_REPOSITORY_POLICIES_DIRECTORY);
FileUtils.copy(source, repoPolicyDir);
}
generateBackendPolicies();
}
/**
* Always returns true, indicating that this impl supports finding policies
* based on a request.
*/
@Override
public boolean isRequestSupported() {
return true;
}
/**
* Gets a deny-biased policy set that includes all repository-wide and
* object-specific policies.
*/
@Override
public PolicyFinderResult findPolicy(EvaluationCtx context) {
PolicyFinderResult policyFinderResult = null;
PolicySet policySet = m_repositoryPolicySet;
try {
String pid = getPid(context);
if (pid != null && !pid.isEmpty()) {
AbstractPolicy objectPolicyFromObject =
m_policyLoader.loadObjectPolicy(m_policyParser.copy(),
pid,
m_validateObjectPoliciesFromDatastream);
if (objectPolicyFromObject != null) {
List<AbstractPolicy> policies = new ArrayList<AbstractPolicy>(m_repositoryPolicies);
policies.add(objectPolicyFromObject);
policySet = toPolicySet(policies, m_combiningAlgorithm);
}
}
policyFinderResult = new PolicyFinderResult(policySet);
} catch (Exception e) {
logger.warn("PolicyFinderModule seriously failed to evaluate a policy ", e);
policyFinderResult =
new PolicyFinderResult(new Status(ERROR_CODE_LIST, e
.getMessage()));
}
return policyFinderResult;
}
// get the pid from the context, or null if unable
public static String getPid(EvaluationCtx context) {
EvaluationResult attribute
= context.getResourceAttribute(STRING_ATTRIBUTE,
Constants.OBJECT.PID.attributeId,
null);
BagAttribute element = getAttributeFromEvaluationResult(attribute);
if (element == null) {
logger.debug("PolicyFinderModule:getPid exit on can't get pid on request callback");
return null;
}
if (!(element.getType().equals(STRING_ATTRIBUTE))) {
logger.debug("PolicyFinderModule:getPid exit on couldn't get pid from xacml request non-string returned");
return null;
}
return (element.size() == 1) ? (String) element.getValue() : null;
}
// copy of code in AttributeFinderModule; consider refactoring
private static final BagAttribute getAttributeFromEvaluationResult(EvaluationResult attribute) {
if (attribute.indeterminate()) {
return null;
}
if (attribute.getStatus() != null
&& !Status.STATUS_OK.equals(attribute.getStatus())) {
return null;
}
AttributeValue attributeValue = attribute.getAttributeValue();
if (!(attributeValue instanceof BagAttribute)) {
return null;
}
return (BagAttribute) attributeValue;
}
private static PolicySet toPolicySet(List<AbstractPolicy> policies, PolicyCombiningAlgorithm alg) {
return new PolicySet(EMPTY_URI,
alg,
null /*
* no general target beyond those of
* multiplexed individual policies
*/,
policies);
}
}
| |
/*
* Copyright 2014-2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.cognitosync.model;
import java.io.Serializable;
import javax.annotation.Generated;
import com.amazonaws.AmazonWebServiceRequest;
/**
* <p>
* The input for the SetIdentityPoolConfiguration operation.
* </p>
*
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/cognito-sync-2014-06-30/SetIdentityPoolConfiguration"
* target="_top">AWS API Documentation</a>
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class SetIdentityPoolConfigurationRequest extends com.amazonaws.AmazonWebServiceRequest implements Serializable, Cloneable {
/**
* <p>
* A name-spaced GUID (for example, us-east-1:23EC4050-6AEA-7089-A2DD-08002EXAMPLE) created by Amazon Cognito. This
* is the ID of the pool to modify.
* </p>
*/
private String identityPoolId;
/**
* <p>
* Options to apply to this identity pool for push synchronization.
* </p>
*/
private PushSync pushSync;
/** Options to apply to this identity pool for Amazon Cognito streams. */
private CognitoStreams cognitoStreams;
/**
* <p>
* A name-spaced GUID (for example, us-east-1:23EC4050-6AEA-7089-A2DD-08002EXAMPLE) created by Amazon Cognito. This
* is the ID of the pool to modify.
* </p>
*
* @param identityPoolId
* A name-spaced GUID (for example, us-east-1:23EC4050-6AEA-7089-A2DD-08002EXAMPLE) created by Amazon
* Cognito. This is the ID of the pool to modify.
*/
public void setIdentityPoolId(String identityPoolId) {
this.identityPoolId = identityPoolId;
}
/**
* <p>
* A name-spaced GUID (for example, us-east-1:23EC4050-6AEA-7089-A2DD-08002EXAMPLE) created by Amazon Cognito. This
* is the ID of the pool to modify.
* </p>
*
* @return A name-spaced GUID (for example, us-east-1:23EC4050-6AEA-7089-A2DD-08002EXAMPLE) created by Amazon
* Cognito. This is the ID of the pool to modify.
*/
public String getIdentityPoolId() {
return this.identityPoolId;
}
/**
* <p>
* A name-spaced GUID (for example, us-east-1:23EC4050-6AEA-7089-A2DD-08002EXAMPLE) created by Amazon Cognito. This
* is the ID of the pool to modify.
* </p>
*
* @param identityPoolId
* A name-spaced GUID (for example, us-east-1:23EC4050-6AEA-7089-A2DD-08002EXAMPLE) created by Amazon
* Cognito. This is the ID of the pool to modify.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public SetIdentityPoolConfigurationRequest withIdentityPoolId(String identityPoolId) {
setIdentityPoolId(identityPoolId);
return this;
}
/**
* <p>
* Options to apply to this identity pool for push synchronization.
* </p>
*
* @param pushSync
* Options to apply to this identity pool for push synchronization.
*/
public void setPushSync(PushSync pushSync) {
this.pushSync = pushSync;
}
/**
* <p>
* Options to apply to this identity pool for push synchronization.
* </p>
*
* @return Options to apply to this identity pool for push synchronization.
*/
public PushSync getPushSync() {
return this.pushSync;
}
/**
* <p>
* Options to apply to this identity pool for push synchronization.
* </p>
*
* @param pushSync
* Options to apply to this identity pool for push synchronization.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public SetIdentityPoolConfigurationRequest withPushSync(PushSync pushSync) {
setPushSync(pushSync);
return this;
}
/**
* Options to apply to this identity pool for Amazon Cognito streams.
*
* @param cognitoStreams
* Options to apply to this identity pool for Amazon Cognito streams.
*/
public void setCognitoStreams(CognitoStreams cognitoStreams) {
this.cognitoStreams = cognitoStreams;
}
/**
* Options to apply to this identity pool for Amazon Cognito streams.
*
* @return Options to apply to this identity pool for Amazon Cognito streams.
*/
public CognitoStreams getCognitoStreams() {
return this.cognitoStreams;
}
/**
* Options to apply to this identity pool for Amazon Cognito streams.
*
* @param cognitoStreams
* Options to apply to this identity pool for Amazon Cognito streams.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public SetIdentityPoolConfigurationRequest withCognitoStreams(CognitoStreams cognitoStreams) {
setCognitoStreams(cognitoStreams);
return this;
}
/**
* Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be
* redacted from this string using a placeholder value.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getIdentityPoolId() != null)
sb.append("IdentityPoolId: ").append(getIdentityPoolId()).append(",");
if (getPushSync() != null)
sb.append("PushSync: ").append(getPushSync()).append(",");
if (getCognitoStreams() != null)
sb.append("CognitoStreams: ").append(getCognitoStreams());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof SetIdentityPoolConfigurationRequest == false)
return false;
SetIdentityPoolConfigurationRequest other = (SetIdentityPoolConfigurationRequest) obj;
if (other.getIdentityPoolId() == null ^ this.getIdentityPoolId() == null)
return false;
if (other.getIdentityPoolId() != null && other.getIdentityPoolId().equals(this.getIdentityPoolId()) == false)
return false;
if (other.getPushSync() == null ^ this.getPushSync() == null)
return false;
if (other.getPushSync() != null && other.getPushSync().equals(this.getPushSync()) == false)
return false;
if (other.getCognitoStreams() == null ^ this.getCognitoStreams() == null)
return false;
if (other.getCognitoStreams() != null && other.getCognitoStreams().equals(this.getCognitoStreams()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getIdentityPoolId() == null) ? 0 : getIdentityPoolId().hashCode());
hashCode = prime * hashCode + ((getPushSync() == null) ? 0 : getPushSync().hashCode());
hashCode = prime * hashCode + ((getCognitoStreams() == null) ? 0 : getCognitoStreams().hashCode());
return hashCode;
}
@Override
public SetIdentityPoolConfigurationRequest clone() {
return (SetIdentityPoolConfigurationRequest) super.clone();
}
}
| |
/**
* Copyright (C) 2015 Maxime Falaize (maxime.falaize@gmail.com)
* <p/>
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* <p/>
* http://www.apache.org/licenses/LICENSE-2.0
* <p/>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.mfalaize.zipdiff;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.util.*;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import java.util.zip.ZipEntry;
import java.util.zip.ZipFile;
import java.util.zip.ZipInputStream;
/**
* Checks and compiles differences between two zip files.
* It also has the ability to exclude entries from the comparison
* based on a regular expression.
*
* @author Sean C. Sullivan
*/
public class DifferenceCalculator {
private static final Logger LOGGER = LoggerFactory.getLogger(DifferenceCalculator.class);
private ZipFile file1;
private ZipFile file2;
private boolean ignoreTimestamps = false;
private boolean ignoreCVSFiles = false;
private boolean compareCRCValues = true;
private Pattern filesToIgnorePattern;
/**
* Constructor taking 2 filenames to compare
*
* @throws java.io.IOException
*/
public DifferenceCalculator(String filename1, String filename2) throws java.io.IOException {
this(new File(filename1), new File(filename2));
}
/**
* Constructor taking 2 Files to compare
*
* @throws java.io.IOException
*/
public DifferenceCalculator(File f1, File f2) throws java.io.IOException {
this(new ZipFile(f1), new ZipFile(f2));
}
/**
* Constructor taking 2 ZipFiles to compare
*/
public DifferenceCalculator(ZipFile zf1, ZipFile zf2) {
file1 = zf1;
file2 = zf2;
}
/**
* @param patterns A set of regular expressions that when matched against a ZipEntry
* then that ZipEntry will be ignored from the comparison.
* @see java.util.regex
*/
public void setFilenameRegexToIgnore(Set<String> patterns) {
if (patterns == null || patterns.isEmpty()) {
filesToIgnorePattern = null;
} else {
String regex = "";
for (String pattern : patterns) {
if (regex.length() > 0) {
regex += "|";
}
regex += "(" + pattern + ")";
}
filesToIgnorePattern = Pattern.compile(regex);
LOGGER.debug("Regular expression is : " + regex);
}
}
/**
* returns true if fileToIgnorePattern matches the filename given.
*
* @param filepath The file path
* @param entryName The name of the file to check to see if it should be ignored.
* @return true if the file should be ignored.
*/
protected boolean ignoreThisFile(String filepath, String entryName) {
if (entryName == null) {
return false;
} else if (isCVSFile(filepath, entryName) && (ignoreCVSFiles())) {
return true;
} else if (filesToIgnorePattern == null) {
return false;
} else {
Matcher m = filesToIgnorePattern.matcher(entryName);
boolean match = m.matches();
if (match) {
LOGGER.debug("Found a match against : " + entryName + " so excluding");
}
return match;
}
}
protected boolean isCVSFile(String filepath, String entryName) {
return entryName != null && ((filepath.contains("CVS/")) || (entryName.contains("CVS/")));
}
/**
* Ensure that the comparison checks against the CRCs of the entries.
*
* @param b true ensures that CRCs will be checked
*/
public void setCompareCRCValues(boolean b) {
compareCRCValues = b;
}
/**
* @return true if this instance will check the CRCs of each ZipEntry
*/
public boolean getCompareCRCValues() {
return compareCRCValues;
}
/**
* Opens the ZipFile and builds up a map of all the entries. The key is the name of
* the entry and the value is the ZipEntry itself.
*
* @param zf The ZipFile for which to build up the map of ZipEntries
* @return The map containing all the ZipEntries. The key being the name of the ZipEntry.
* @throws java.io.IOException
*/
protected Map<String, ZipEntry> buildZipEntryMap(ZipFile zf) throws java.io.IOException {
Map<String, ZipEntry> zipEntryMap = new HashMap<String, ZipEntry>();
try {
Enumeration<? extends ZipEntry> entries = zf.entries();
while (entries.hasMoreElements()) {
ZipEntry entry = entries.nextElement();
InputStream is = null;
try {
is = zf.getInputStream(entry);
processZipEntry("", entry, is, zipEntryMap);
} finally {
if (is != null) {
is.close();
}
}
}
} finally {
zf.close();
}
return zipEntryMap;
}
/**
* Will place ZipEntries for a given ZipEntry into the given Map. More ZipEntries will result
* if zipEntry is itself a ZipFile. All embedded ZipFiles will be processed with their names
* prefixed onto the names of their ZipEntries.
*
* @param prefix The prefix of the ZipEntry that should be added to the key. Typically used
* when processing embedded ZipFiles. The name of the embedded ZipFile would be the prefix of
* all the embedded ZipEntries.
* @param zipEntry The ZipEntry to place into the Map. If it is a ZipFile then all its ZipEntries
* will also be placed in the Map.
* @param is The InputStream of the corresponding ZipEntry.
* @param zipEntryMap The Map in which to place all the ZipEntries into. The key will
* be the name of the ZipEntry.
* @throws IOException
*/
protected void processZipEntry(String prefix, ZipEntry zipEntry, InputStream is, Map<String, ZipEntry> zipEntryMap) throws IOException {
if (ignoreThisFile(prefix, zipEntry.getName())) {
LOGGER.debug("ignoring file: " + zipEntry.getName());
} else {
String name = prefix + zipEntry.getName();
LOGGER.debug("processing ZipEntry: " + name);
if (zipEntry.isDirectory()) {
zipEntryMap.put(name, zipEntry);
} else if (isZipFile(name)) {
processEmbeddedZipFile(zipEntry.getName() + "/", is, zipEntryMap);
zipEntryMap.put(name, zipEntry);
} else {
zipEntryMap.put(name, zipEntry);
}
}
}
protected void processEmbeddedZipFile(String prefix, InputStream is, Map<String, ZipEntry> m) throws java.io.IOException {
ZipInputStream zis = new ZipInputStream(is);
ZipEntry entry = zis.getNextEntry();
while (entry != null) {
processZipEntry(prefix, entry, zis, m);
zis.closeEntry();
entry = zis.getNextEntry();
}
}
/**
* Returns true if the filename has a valid zip extension.
* i.e. jar, war, ear, zip etc.
*
* @param filename The name of the file to check.
* @return true if it has a valid extension.
*/
public static boolean isZipFile(String filename) {
boolean result;
if (filename == null) {
result = false;
} else {
String lowercaseName = filename.toLowerCase();
result = lowercaseName.endsWith(".zip") || lowercaseName.endsWith(".ear") || lowercaseName.endsWith(".war")
|| lowercaseName.endsWith(".rar") || lowercaseName.endsWith(".jar");
}
return result;
}
/**
* Calculates all the differences between two zip files.
* It builds up the 2 maps of ZipEntries for the two files
* and then compares them.
*
* @param zf1 The first ZipFile to compare
* @param zf2 The second ZipFile to compare
* @return All the differences between the two files.
* @throws java.io.IOException
*/
protected Differences calculateDifferences(ZipFile zf1, ZipFile zf2) throws java.io.IOException {
Map<String, ZipEntry> map1 = buildZipEntryMap(zf1);
Map<String, ZipEntry> map2 = buildZipEntryMap(zf2);
return calculateDifferences(map1, map2);
}
/**
* Given two Maps of ZipEntries it will generate a Differences of all the
* differences found between the two maps.
*
* @return All the differences found between the two maps
*/
protected Differences calculateDifferences(Map<String, ZipEntry> m1, Map<String, ZipEntry> m2) {
Differences d = new Differences();
Set<String> names1 = m1.keySet();
Set<String> names2 = m2.keySet();
Set<String> allNames = new HashSet<String>();
allNames.addAll(names1);
allNames.addAll(names2);
for (String name : allNames) {
if (!ignoreThisFile("", name)) {
if (names1.contains(name) && (!names2.contains(name))) {
d.fileRemoved(name, m1.get(name));
} else if (names2.contains(name) && (!names1.contains(name))) {
d.fileAdded(name, m2.get(name));
} else if (names1.contains(name) && (names2.contains(name))) {
ZipEntry entry1 = m1.get(name);
ZipEntry entry2 = m2.get(name);
if (!entriesMatch(entry1, entry2)) {
d.fileChanged(name, entry1, entry2);
}
} else {
throw new IllegalStateException("unexpected state");
}
}
}
return d;
}
/**
* returns true if the two entries are equivalent in type, name, size, compressed size
* and time or CRC.
*
* @param entry1 The first ZipEntry to compare
* @param entry2 The second ZipEntry to compare
* @return true if the entries are equivalent.
*/
protected boolean entriesMatch(ZipEntry entry1, ZipEntry entry2) {
boolean result;
result =
(entry1.isDirectory() == entry2.isDirectory())
&& (entry1.getSize() == entry2.getSize())
&& (entry1.getCompressedSize() == entry2.getCompressedSize())
&& (entry1.getName().equals(entry2.getName()));
if (!isIgnoringTimestamps()) {
result = result && (entry1.getTime() == entry2.getTime());
}
if (getCompareCRCValues()) {
result = result && (entry1.getCrc() == entry2.getCrc());
}
return result;
}
public void setIgnoreTimestamps(boolean b) {
ignoreTimestamps = b;
}
public boolean isIgnoringTimestamps() {
return ignoreTimestamps;
}
public boolean ignoreCVSFiles() {
return ignoreCVSFiles;
}
public void setIgnoreCVSFiles(boolean b) {
ignoreCVSFiles = b;
}
/**
* @return all the differences found between the two zip files.
* @throws java.io.IOException
*/
public Differences getDifferences() throws java.io.IOException {
Differences d = calculateDifferences(file1, file2);
d.setFilename1(file1.getName());
d.setFilename2(file2.getName());
return d;
}
}
| |
/*
* Copyright (c) 2003-2006 jMonkeyEngine
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are
* met:
*
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
*
* * Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* * Neither the name of 'jMonkeyEngine' nor the names of its contributors
* may be used to endorse or promote products derived from this software
* without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
* TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
* PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
* CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
* EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
* PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
* LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
* NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package slickdesktop;
import java.awt.AWTEvent;
import java.awt.BorderLayout;
import java.awt.Color;
import java.awt.Component;
import java.awt.Container;
import java.awt.Dimension;
import java.awt.Frame;
import java.awt.Graphics;
import java.awt.Graphics2D;
import java.awt.KeyboardFocusManager;
import java.awt.Point;
import java.awt.Rectangle;
import java.awt.RenderingHints;
import java.awt.event.ContainerEvent;
import java.awt.event.ContainerListener;
import java.awt.event.FocusEvent;
import java.awt.event.InputEvent;
import java.awt.event.KeyEvent;
import java.awt.event.MouseAdapter;
import java.awt.event.MouseEvent;
import java.beans.PropertyVetoException;
import java.lang.reflect.InvocationTargetException;
import java.nio.IntBuffer;
import java.util.HashMap;
import java.util.Map;
import javax.swing.JComponent;
import javax.swing.JDesktopPane;
import javax.swing.JInternalFrame;
import javax.swing.JPanel;
import javax.swing.JRootPane;
import javax.swing.JScrollPane;
import javax.swing.JTabbedPane;
import javax.swing.JViewport;
import javax.swing.Popup;
import javax.swing.PopupFactory;
import javax.swing.RepaintManager;
import javax.swing.SwingUtilities;
import javax.swing.event.ChangeEvent;
import javax.swing.event.ChangeListener;
import javax.swing.plaf.basic.BasicInternalFrameUI;
import org.lwjgl.BufferUtils;
import org.lwjgl.opengl.GL11;
import org.newdawn.slick.GameContainer;
import org.newdawn.slick.Input;
/**
*
* @author bjgil
*/
public class SlickDesktop
implements org.newdawn.slick.InputListener
{
private SlickDesktopImageGraphics graphics;
private JDesktopPane desktop;
private org.newdawn.slick.Image texture;
private boolean initialized;
private int width;
private int height;
private boolean showingJFrame = false;
private final Frame awtWindow;
private int desktopWidth;
private int desktopHeight;
private boolean synchronizingThreadsOnUpdate;
private static int desktopsUsed = 0;
private final LockRunnable paintLockRunnable = new LockRunnable();
/**
* @see #setShowingJFrame
* @return true if frame is displayed
*/
public boolean isShowingJFrame() {
return showingJFrame;
}
/**
* @param showingJFrame true to display the desktop in a JFrame instead on this quad.
* @deprecated for debuggin only
*/
public void setShowingJFrame( boolean showingJFrame ) {
this.showingJFrame = showingJFrame;
awtWindow.setVisible( showingJFrame );
awtWindow.repaint();
}
public SlickDesktop( String name ) {
// super( name );
// inputHandler = new InputHandler();
awtWindow = new Frame() {
private static final long serialVersionUID = 1L;
public boolean isShowing() {
return true;
}
public boolean isVisible() {
if ( awtWindow.isFocusableWindow()
&& new Throwable().getStackTrace()[1].getMethodName().startsWith( "requestFocus" ) ) {
return false;
}
return initialized || super.isVisible();
}
public Graphics getGraphics() {
if ( !showingJFrame ) {
return graphics == null ? super.getGraphics() : graphics.create();
}
return super.getGraphics();
}
public boolean isFocused() {
return true;
}
};
awtWindow.setFocusableWindowState( false );
Container contentPane = awtWindow;
awtWindow.setUndecorated( true );
dontDrawBackground( contentPane );
desktop = new JDesktopPane() {
private static final long serialVersionUID = 1L;
public void paint( Graphics g ) {
if ( !isShowingJFrame() ) {
g.clearRect( 0, 0, getWidth(), getHeight() );
}
super.paint( g );
}
public boolean isOptimizedDrawingEnabled() {
return false;
}
};
// desktop.setDragMode( JDesktopPane.OUTLINE_DRAG_MODE );
new ScrollPaneRepaintFixListener().addTo( desktop );
final Color transparent = new Color( 0, 0, 0, 0 );
desktop.setBackground( transparent );
desktop.setFocusable( true );
desktop.addMouseListener( new MouseAdapter() {
public void mousePressed( MouseEvent e ) {
desktop.requestFocusInWindow();
}
} );
// this internal frame is a workaround for key binding problems in JDK1.5
// todo: this workaround does not seem to work on mac
if ( System.getProperty( "os.name" ).toLowerCase().indexOf( "mac" ) < 0 ) {
final JInternalFrame internalFrame = new JInternalFrame();
internalFrame.setUI( new BasicInternalFrameUI( internalFrame ) {
protected void installComponents() {
}
} );
internalFrame.setOpaque( false );
internalFrame.setBackground( null );
internalFrame.getContentPane().setLayout( new BorderLayout() );
internalFrame.getContentPane().add( desktop, BorderLayout.CENTER );
internalFrame.setVisible( true );
internalFrame.setBorder( null );
contentPane.add( internalFrame );
}
else {
// this would have suited for JDK1.4:
contentPane.add( desktop, BorderLayout.CENTER );
}
awtWindow.pack();
RepaintManager.currentManager( null ).setDoubleBufferingEnabled( false );
}
public SlickDesktop( String name, final int width, final int height ) {
this( name, width, height, false );
}
public SlickDesktop( String name, final int width, final int height, boolean mipMapping ) {
this( name );
setup( width, height );
}
public void setup( int width, int height ) {
if ( initialized ) {
throw new IllegalStateException( "may be called only once" );
}
this.width = powerOf2SizeIfNeeded( width );
this.height = powerOf2SizeIfNeeded( height );
desktop.setPreferredSize( new Dimension( width, height ) );
desktopWidth = width;
desktopHeight = height;
awtWindow.pack();
graphics = SlickDesktopImageGraphics.createInstance( this.width, this.height/*, mipMapping ? 2 : 0*/ );
if( graphics == null )
{
throw new RuntimeException( "SD2_ImageGraphics - could not instantiate graphics object" );
}
if ( desktopsUsed == 0 ) {
PopupFactory.setSharedInstance( new MyPopupFactory() );
}
desktopsUsed++;
this.setFocusOwner( desktop );
initialized = true;
setSynchronizingThreadsOnUpdate( true );
desktop.repaint();
}
/**
* @return true if update and swing thread should be synchronized (avoids flickering, eats some performance)
*/
public boolean isSynchronizingThreadsOnUpdate() {
return synchronizingThreadsOnUpdate;
}
/**
* Choose if update and swing thread should be synchronized (avoids flickering, eats some performance)
*
* @param synchronizingThreadsOnUpdate true to synchronize
*/
public void setSynchronizingThreadsOnUpdate( boolean synchronizingThreadsOnUpdate ) {
if ( this.synchronizingThreadsOnUpdate != synchronizingThreadsOnUpdate ) {
this.synchronizingThreadsOnUpdate = synchronizingThreadsOnUpdate;
}
}
private void enableAntiAlias( Graphics2D graphics ) {
RenderingHints hints = graphics.getRenderingHints();
if ( hints == null ) {
hints = new RenderingHints( RenderingHints.KEY_ANTIALIASING, RenderingHints.VALUE_ANTIALIAS_ON );
}
else {
hints.put( RenderingHints.KEY_ANTIALIASING, RenderingHints.VALUE_ANTIALIAS_ON );
}
graphics.setRenderingHints( hints );
}
private static void dontDrawBackground( Container container ) {
if ( container != null ) {
container.setBackground( null );
if ( container instanceof JComponent ) {
final JComponent component = ( (JComponent) container );
component.setOpaque( false );
}
dontDrawBackground( container.getParent() );
}
}
private static int powerOf2SizeIfNeeded( int size/*, boolean generateMipMaps*/ ) {
int powerOf2Size = 1;
while ( powerOf2Size < size ) {
powerOf2Size <<= 1;
}
IntBuffer temp = BufferUtils.createIntBuffer(16);
GL11.glGetInteger(GL11.GL_MAX_TEXTURE_SIZE, temp);
int max = temp.get(0);
if( powerOf2Size > max )
{
powerOf2Size = max;
}
return powerOf2Size;
}
private org.newdawn.slick.Image ret = null;
public org.newdawn.slick.Image render( GameContainer gameContainer, org.newdawn.slick.Graphics g ) {
if ( graphics.isDirty() ) {
ret = null;
final boolean synchronizingThreadsOnUpdate = this.synchronizingThreadsOnUpdate;
if ( synchronizingThreadsOnUpdate ) {
synchronized ( paintLockRunnable ) {
try {
paintLockRunnable.wait = true;
SwingUtilities.invokeLater( paintLockRunnable );
paintLockRunnable.wait( 100 );
} catch ( InterruptedException e ) {
e.printStackTrace();
}
}
}
try {
if ( graphics != null ) {
ret = graphics.render( g, false );
}
} finally {
if ( synchronizingThreadsOnUpdate ) {
synchronized ( paintLockRunnable ) {
paintLockRunnable.notifyAll();
}
}
}
}
return ret;
}
public JDesktopPane getJDesktop() {
return desktop;
}
public Component getFocusOwner() {
if ( !focusCleared ) {
return this.awtWindow.getFocusOwner();
}
return null;
}
private class LockRunnable implements Runnable {
private boolean wait = false;
public void run() {
synchronized ( paintLockRunnable ) {
notifyAll();
if ( wait ) {
try {
//wait for repaint to finish
wait = false;
paintLockRunnable.wait( 200 );
} catch ( InterruptedException e ) {
e.printStackTrace();
}
}
}
}
}
/**
* Call this method of the desktop is no longer needed. Removes this from the scenegraph, later use is not
* possible any more.
*/
public void dispose() {
if ( desktop != null ) {
try {
SwingUtilities.invokeAndWait( new Runnable() {
public void run() {
desktop.removeAll();
awtWindow.dispose();
}
} );
} catch ( InterruptedException e ) {
e.printStackTrace();
} catch ( InvocationTargetException e ) {
e.printStackTrace();
}
desktop = null;
desktopsUsed--;
if ( desktopsUsed == 0 ) {
PopupFactory.setSharedInstance( new PopupFactory() );
}
}
}
private Component lastComponent;
private Component grabbedMouse;
private int grabbedMouseButton;
private int downX = 0;
private int downY = 0;
private long lastClickTime = 0;
private int clickCount = 0;
private static final int MAX_CLICKED_OFFSET = 4;
public Component componentAt( int x, int y ) {
Component component = componentAt( x, y, desktop, true );
if ( component != desktop ) {
return component;
}
return null;
}
private Component componentAt( int x, int y, Component parent, boolean scanRootPanes ) {
if ( scanRootPanes && parent instanceof JRootPane ) {
JRootPane rootPane = (JRootPane) parent;
parent = rootPane.getContentPane();
}
Component child = parent;
if ( !parent.contains( x, y ) ) {
child = null;
}
else {
synchronized ( parent.getTreeLock() ) {
if ( parent instanceof Container ) {
Container container = (Container) parent;
int ncomponents = container.getComponentCount();
for ( int i = 0; i < ncomponents; i++ ) {
Component comp = container.getComponent( i );
if ( comp != null
&& comp.isVisible()
&& comp.contains( x - comp.getX(), y - comp.getY() ) ) {
child = comp;
break;
}
}
}
}
}
if ( child != null ) {
if ( parent instanceof JTabbedPane && child != parent ) {
child = ( (JTabbedPane) parent ).getSelectedComponent();
}
x -= child.getX();
y -= child.getY();
}
return child != parent && child != null ? componentAt( x, y, child, scanRootPanes ) : child;
}
private boolean focusCleared = false;
public void setFocusOwner( Component comp ) {
if ( comp == null || comp.isFocusable() ) {
for ( Component p = comp; p != null; p = p.getParent() ) {
if ( p instanceof JInternalFrame ) {
try {
( (JInternalFrame) p ).setSelected( true );
} catch ( PropertyVetoException e ) {
e.printStackTrace();
}
}
}
awtWindow.setFocusableWindowState( true );
Component oldFocusOwner = getFocusOwner();
if ( comp == desktop ) {
comp = null;
}
if ( oldFocusOwner != comp ) {
if ( oldFocusOwner != null ) {
dispatchEvent( oldFocusOwner, new FocusEvent( oldFocusOwner,
FocusEvent.FOCUS_LOST, false, comp ) );
}
KeyboardFocusManager.getCurrentKeyboardFocusManager().clearGlobalFocusOwner();
if ( comp != null ) {
dispatchEvent( comp, new FocusEvent( comp,
FocusEvent.FOCUS_GAINED, false, oldFocusOwner ) );
}
}
awtWindow.setFocusableWindowState( false );
}
focusCleared = comp == null;
}
private void dispatchEvent( final Component receiver, final AWTEvent event ) {
if ( getModalComponent() == null || SwingUtilities.isDescendingFrom( receiver, getModalComponent() ) ) {
if ( !SwingUtilities.isEventDispatchThread() ) {
throw new IllegalStateException( "not in swing thread!" );
}
receiver.dispatchEvent( event );
}
}
private Input input = null;
public void setInput(Input input)
{
this.input = input;
input.addListener( this );
}
public boolean isAcceptingInput()
{
return !showingJFrame;
}
public void inputEnded()
{
}
public void keyPressed(int keyCode, char character)
{
createKeyEvent( keyCode, character, true );
}
public void keyReleased(int keyCode, char character)
{
createKeyEvent( keyCode, character, false );
}
private void createKeyEvent( final int keyCode, final char character, final boolean pressed )
{
try {
SwingUtilities.invokeAndWait( new Runnable() {
public void run() {
sendAWTKeyEvent( keyCode, pressed, character );
}
} );
} catch ( InterruptedException e ) {
e.printStackTrace();
} catch ( InvocationTargetException e ) {
e.printStackTrace();
}
}
private static Int anInt = new Int( 0 );
private static class Int {
public Int( int value ) {
this.value = value;
}
public boolean equals( Object obj ) {
return obj instanceof Int && ( (Int) obj ).value == value;
}
public int hashCode() {
return value;
}
int value;
}
private static class Char {
public Char( char value ) {
this.value = value;
}
char value;
}
/**
* From keyCode (Int) to character (Char)
*/
private Map<Int,Char> characters = new HashMap<Int,Char>();
private void sendAWTKeyEvent( int keyCode, boolean pressed, char character ) {
keyCode = AWTKeyInput.toAWTCode( keyCode );
if ( keyCode != 0 ) {
Component focusOwner = getFocusOwner();
if ( focusOwner == null ) {
focusOwner = desktop;
}
if ( character == '\0' ) {
character = KeyEvent.CHAR_UNDEFINED;
}
if ( focusOwner != null ) {
if ( pressed ) {
KeyEvent event = new KeyEvent( focusOwner, KeyEvent.KEY_PRESSED,
System.currentTimeMillis(), getCurrentModifiers( -1 ),
keyCode, character );
dispatchEvent( focusOwner, event );
anInt.value = keyCode;
Char c = characters.get( anInt );
if ( c == null ) {
characters.put( new Int( keyCode ), new Char( character ) );
}
else {
c.value = character;
}
if ( character != KeyEvent.CHAR_UNDEFINED ) {
dispatchEvent( focusOwner, new KeyEvent( focusOwner, KeyEvent.KEY_TYPED,
System.currentTimeMillis(), getCurrentModifiers( -1 ),
0, character ) );
}
}
if ( !pressed ) {
anInt.value = keyCode;
Char c = characters.get( anInt );
if ( c != null ) {
character = c.value;
//TODO: repeat input
// if ( character != KeyEvent.CHAR_UNDEFINED ) {
// dispatchEvent( focusOwner, new KeyEvent( focusOwner, KeyEvent.KEY_TYPED,
// System.currentTimeMillis(), getCurrentModifiers( -1 ),
// 0, character ) );
// }
}
dispatchEvent( focusOwner, new KeyEvent( focusOwner, KeyEvent.KEY_RELEASED,
System.currentTimeMillis(), getCurrentModifiers( -1 ),
keyCode, character ) );
}
}
}
}
public void mouseWheelMoved(int i)
{
}
public void mousePressed(int button, int x, int y)
{
createMouseEvent( button, x, y, true );
}
public void mouseReleased(int button, int x, int y)
{
createMouseEvent( button, x, y, false );
}
private void createMouseEvent( final int button, final int x, final int y, final boolean pressed )
{
try {
SwingUtilities.invokeAndWait( new Runnable() {
public void run() {
sendAWTMouseEvent( x, y, pressed, button + 1 );
}
} );
} catch ( InterruptedException e ) {
e.printStackTrace();
} catch ( InvocationTargetException e ) {
e.printStackTrace();
}
}
private boolean useConvertPoint = true;
private Point convertPoint( Component parent, int x, int y, Component comp ) {
if ( useConvertPoint ) {
try {
return SwingUtilities.convertPoint( parent, x, y, comp );
} catch ( InternalError e ) {
useConvertPoint = false;
}
}
if ( comp != null ) {
while ( comp != parent ) {
x -= comp.getX();
y -= comp.getY();
if ( comp.getParent() == null ) {
break;
}
comp = comp.getParent();
}
}
return new Point( x, y );
}
private int getButtonMask( int swingButton ) {
int buttonMask = 0;
if ( input.isMouseButtonDown( 0 ) || swingButton == MouseEvent.BUTTON1 ) {
buttonMask |= InputEvent.BUTTON1_MASK;
buttonMask |= InputEvent.BUTTON1_DOWN_MASK;
}
if ( input.isMouseButtonDown( 1 ) || swingButton == MouseEvent.BUTTON2 ) {
buttonMask |= InputEvent.BUTTON2_MASK;
buttonMask |= InputEvent.BUTTON2_DOWN_MASK;
}
if ( input.isMouseButtonDown( 2 ) || swingButton == MouseEvent.BUTTON3 ) {
buttonMask |= InputEvent.BUTTON3_MASK;
buttonMask |= InputEvent.BUTTON3_DOWN_MASK;
}
return buttonMask;
}
private int getCurrentModifiers( int swingBtton ) {
int modifiers = 0;
if ( isKeyDown( Input.KEY_LMENU ) ) {
modifiers |= InputEvent.ALT_DOWN_MASK;
modifiers |= InputEvent.ALT_MASK;
}
if ( isKeyDown( Input.KEY_RMENU ) ) {
modifiers |= InputEvent.ALT_GRAPH_DOWN_MASK;
modifiers |= InputEvent.ALT_GRAPH_MASK;
}
if ( isKeyDown( Input.KEY_LCONTROL ) || isKeyDown( Input.KEY_RCONTROL ) ) {
modifiers |= InputEvent.CTRL_DOWN_MASK;
modifiers |= InputEvent.CTRL_MASK;
}
if ( isKeyDown( Input.KEY_LSHIFT ) || isKeyDown( Input.KEY_RSHIFT ) ) {
modifiers |= InputEvent.SHIFT_DOWN_MASK;
modifiers |= InputEvent.SHIFT_MASK;
}
return modifiers | getButtonMask( swingBtton );
}
private boolean isKeyDown( int key ) {
return input.isKeyDown( key );
}
private void sendEnteredEvent( Component comp, Component lastComponent, int buttonMask, Point pos ) {
if ( comp != null && comp != lastComponent ) {
sendEnteredEvent( comp.getParent(), lastComponent, buttonMask, pos );
pos = convertPoint( lastComponent, pos.x, pos.y, comp );
final MouseEvent event = new MouseEvent( comp,
MouseEvent.MOUSE_ENTERED,
System.currentTimeMillis(), buttonMask, pos.x, pos.y, 0, false, 0 );
dispatchEvent( comp, event );
}
}
private void sendExitedEvent( Component lastComponent, int buttonMask, Point pos ) {
final MouseEvent event = new MouseEvent( lastComponent,
MouseEvent.MOUSE_EXITED,
System.currentTimeMillis(), buttonMask, pos.x, pos.y, 1, false, 0 );
dispatchEvent( lastComponent, event );
}
private static final int DOUBLE_CLICK_TIME = 300;
private void sendAWTMouseEvent( int x, int y, boolean pressed, int swingButton ) {
Component comp = componentAt( x, y, desktop, false );
final int eventType;
if ( swingButton > MouseEvent.NOBUTTON ) {
eventType = pressed ? MouseEvent.MOUSE_PRESSED : MouseEvent.MOUSE_RELEASED;
}
else {
eventType = getButtonMask( MouseEvent.NOBUTTON ) == 0 ? MouseEvent.MOUSE_MOVED : MouseEvent.MOUSE_DRAGGED;
}
final long time = System.currentTimeMillis();
if ( lastComponent != comp ) {
//enter/leave events
while ( lastComponent != null && ( comp == null || !SwingUtilities.isDescendingFrom( comp, lastComponent ) ) )
{
final Point pos = convertPoint( desktop, x, y, lastComponent );
sendExitedEvent( lastComponent, getCurrentModifiers( swingButton ), pos );
lastComponent = lastComponent.getParent();
}
final Point pos = convertPoint( desktop, x, y, lastComponent );
if ( lastComponent == null ) {
lastComponent = desktop;
}
sendEnteredEvent( comp, lastComponent, getCurrentModifiers( swingButton ), pos );
lastComponent = comp;
downX = Integer.MIN_VALUE;
downY = Integer.MIN_VALUE;
lastClickTime = 0;
}
if ( comp != null ) {
boolean clicked = false;
if ( swingButton > MouseEvent.NOBUTTON ) {
if ( pressed ) {
grabbedMouse = comp;
grabbedMouseButton = swingButton;
downX = x;
downY = y;
setFocusOwner( componentAt( x, y, desktop, true ) );
}
else if ( grabbedMouseButton == swingButton && grabbedMouse != null ) {
comp = grabbedMouse;
grabbedMouse = null;
if ( Math.abs( downX - x ) <= MAX_CLICKED_OFFSET && Math.abs( downY - y ) < MAX_CLICKED_OFFSET ) {
if ( lastClickTime + DOUBLE_CLICK_TIME > time ) {
clickCount++;
}
else {
clickCount = 1;
}
clicked = true;
lastClickTime = time;
}
downX = Integer.MIN_VALUE;
downY = Integer.MIN_VALUE;
}
}
else if ( grabbedMouse != null ) {
comp = grabbedMouse;
}
final Point pos = convertPoint( desktop, x, y, comp );
final MouseEvent event = new MouseEvent( comp,
eventType,
time, getCurrentModifiers( swingButton ), pos.x, pos.y, clickCount,
swingButton == MouseEvent.BUTTON2 && pressed, // todo: should this be platform dependent? (e.g. mac)
swingButton >= 0 ? swingButton : 0 );
dispatchEvent( comp, event );
if ( clicked ) {
// CLICKED seems to need special glass pane handling o_O
comp = componentAt( x, y, desktop, true );
final Point clickedPos = convertPoint( desktop, x, y, comp );
final MouseEvent clickedEvent = new MouseEvent( comp,
MouseEvent.MOUSE_CLICKED,
time, getCurrentModifiers( swingButton ), clickedPos.x, clickedPos.y, clickCount,
false, swingButton );
dispatchEvent( comp, clickedEvent );
}
}
else if ( pressed ) {
// clicked no component at all
setFocusOwner( null );
}
}
public void mouseMoved(int oldx, int oldy, int newx, int newy)
{
final int awtX = newx;
final int awtY = newy;
try {
SwingUtilities.invokeAndWait( new Runnable() {
public void run() {
sendAWTMouseEvent( awtX, awtY, false, MouseEvent.NOBUTTON );
}
} );
} catch ( InterruptedException e ) {
e.printStackTrace();
} catch ( InvocationTargetException e ) {
e.printStackTrace();
}
}
public void controllerLeftPressed(int i)
{
}
public void controllerLeftReleased(int i)
{
}
public void controllerRightPressed(int i)
{
}
public void controllerRightReleased(int i)
{
}
public void controllerUpPressed(int i)
{
}
public void controllerUpReleased(int i)
{
}
public void controllerDownPressed(int i)
{
}
public void controllerDownReleased(int i)
{
}
public void controllerButtonPressed(int i, int i0)
{
}
public void controllerButtonReleased(int i, int i0)
{
}
/**
* @return current modal component
* @see #setModalComponent(java.awt.Component)
*/
public Component getModalComponent() {
return this.modalComponent;
}
/**
* @see #setModalComponent(java.awt.Component)
*/
private Component modalComponent;
/**
* Filter the swing event to allow events to the specified component and its children only.
* Note: this does not prevent shortcuts and mnemonics to work for the other components!
*
* @param value component that can be exclusively accessed (including children)
*/
public void setModalComponent( final Component value ) {
this.modalComponent = value;
}
private static class LightWeightPopup extends Popup {
private static final Integer INTEGER_MAX_VALUE = Integer.MAX_VALUE;
public LightWeightPopup( JComponent desktop ) {
this.desktop = desktop;
new ScrollPaneRepaintFixListener().addTo( panel );
}
private final JComponent desktop;
JPanel panel = new JPanel( new BorderLayout() );
public void adjust( Component owner, Component contents, int x, int y ) {
panel.setVisible( false );
desktop.add( panel, INTEGER_MAX_VALUE );
panel.removeAll();
panel.add( contents, BorderLayout.CENTER );
if ( contents instanceof JComponent ) {
JComponent jComponent = (JComponent) contents;
jComponent.setDoubleBuffered( false );
}
panel.setSize( panel.getPreferredSize() );
y = Math.min( y, desktop.getHeight() - panel.getHeight() );
x = Math.min( x, desktop.getWidth() - panel.getWidth() );
panel.setLocation( x, y );
contents.invalidate();
panel.validate();
}
public void show() {
panel.setVisible( true );
}
public void hide() {
Rectangle bounds = panel.getBounds();
desktop.remove( panel );
desktop.repaint( bounds );
}
}
private static class MyPopupFactory extends PopupFactory {
private final PopupFactory defaultPopupFactory = new PopupFactory();
public Popup getPopup( Component owner, Component contents, int x, int y ) throws IllegalArgumentException {
while ( !( owner instanceof JDesktopPane ) ) {
owner = owner.getParent();
if ( owner == null ) {
System.out.println( "SlickDesktop Popup creation failed, default popup created - desktop not found in component hierarchy of " + owner );
return defaultPopupFactory.getPopup( owner, contents, x, y );
}
}
SlickDesktop.LightWeightPopup popup = new SlickDesktop.LightWeightPopup( (JComponent) owner );
popup.adjust( owner, contents, x, y );
return popup;
}
}
private static class ScrollPaneRepaintFixListener implements ContainerListener {
public void componentAdded( ContainerEvent e ) {
Component child = e.getChild();
componentAdded( child );
}
private void componentAdded( Component child ) {
if ( child instanceof Container ) {
Container container = (Container) child;
addTo( container );
container.addContainerListener( this );
}
if ( child instanceof JScrollPane ) {
final JScrollPane scrollPane = (JScrollPane) child;
// note: the listener added here is only a fix for repaint problems with scrolling
subscribeRepaintListener( scrollPane.getViewport() );
}
}
private void addTo( Container container ) {
container.addContainerListener( this );
for ( int i = 0; i < container.getComponentCount(); i++ ) {
componentAdded( container.getComponent( i ) );
}
}
private void removeFrom( Container container ) {
container.removeContainerListener( this );
for ( int i = 0; i < container.getComponentCount(); i++ ) {
componentRemoved( container.getComponent( i ) );
}
}
private void subscribeRepaintListener( JViewport viewport ) {
for ( int i = 0; i < viewport.getChangeListeners().length; i++ ) {
ChangeListener listener = viewport.getChangeListeners()[i];
if ( listener instanceof ScrollPaneRepaintChangeListener ) {
// listener already subscribed
return;
}
}
viewport.addChangeListener( new ScrollPaneRepaintChangeListener( viewport ) );
}
public void componentRemoved( ContainerEvent e ) {
Component child = e.getChild();
componentRemoved( child );
}
private void componentRemoved( Component child ) {
if ( child instanceof Container ) {
Container container = (Container) child;
removeFrom( container );
}
}
private static class ScrollPaneRepaintChangeListener implements ChangeListener {
private final Component component;
public ScrollPaneRepaintChangeListener( Component component ) {
this.component = component;
}
public void stateChanged( ChangeEvent e ) {
component.repaint();
}
}
}
}
| |
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0 and the Server Side Public License, v 1; you may not use this file except
* in compliance with, at your election, the Elastic License 2.0 or the Server
* Side Public License, v 1.
*/
package org.elasticsearch.packaging.test;
import org.elasticsearch.packaging.util.FileUtils;
import org.elasticsearch.packaging.util.Packages;
import org.elasticsearch.packaging.util.Shell.Result;
import org.junit.BeforeClass;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.List;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import static java.nio.file.StandardOpenOption.APPEND;
import static org.elasticsearch.packaging.util.FileExistenceMatchers.fileDoesNotExist;
import static org.elasticsearch.packaging.util.FileExistenceMatchers.fileExists;
import static org.elasticsearch.packaging.util.FileUtils.append;
import static org.elasticsearch.packaging.util.FileUtils.assertPathsDoNotExist;
import static org.elasticsearch.packaging.util.FileUtils.assertPathsExist;
import static org.elasticsearch.packaging.util.FileUtils.fileWithGlobExist;
import static org.elasticsearch.packaging.util.FileUtils.mv;
import static org.elasticsearch.packaging.util.FileUtils.rm;
import static org.elasticsearch.packaging.util.FileUtils.slurp;
import static org.elasticsearch.packaging.util.Packages.SYSTEMD_SERVICE;
import static org.elasticsearch.packaging.util.Packages.assertInstalled;
import static org.elasticsearch.packaging.util.Packages.assertRemoved;
import static org.elasticsearch.packaging.util.Packages.installPackage;
import static org.elasticsearch.packaging.util.Packages.remove;
import static org.elasticsearch.packaging.util.Packages.restartElasticsearch;
import static org.elasticsearch.packaging.util.Packages.verifyPackageInstallation;
import static org.elasticsearch.packaging.util.Platforms.getOsRelease;
import static org.elasticsearch.packaging.util.Platforms.isSystemd;
import static org.hamcrest.CoreMatchers.equalTo;
import static org.hamcrest.CoreMatchers.not;
import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.emptyString;
import static org.hamcrest.core.Is.is;
import static org.junit.Assume.assumeThat;
import static org.junit.Assume.assumeTrue;
public class PackageTests extends PackagingTestCase {
@BeforeClass
public static void filterDistros() {
assumeTrue("rpm or deb", distribution.isPackage());
}
public void test10InstallPackage() throws Exception {
assertRemoved(distribution());
installation = installPackage(sh, distribution());
assertInstalled(distribution());
verifyPackageInstallation(installation, distribution(), sh);
setFileSuperuser("test_superuser", "test_superuser_password");
}
public void test20PluginsCommandWhenNoPlugins() {
assertThat(sh.run(installation.bin("elasticsearch-plugin") + " list").stdout, is(emptyString()));
}
public void test30DaemonIsNotEnabledOnRestart() {
if (isSystemd()) {
sh.run("systemctl daemon-reload");
String isEnabledOutput = sh.runIgnoreExitCode("systemctl is-enabled elasticsearch.service").stdout.trim();
assertThat(isEnabledOutput, equalTo("disabled"));
}
}
public void test31InstallDoesNotStartServer() {
assertThat(sh.run("ps aux").stdout, not(containsString("org.elasticsearch.bootstrap.Elasticsearch")));
}
private void assertRunsWithJavaHome() throws Exception {
byte[] originalEnvFile = Files.readAllBytes(installation.envFile);
try {
Files.write(installation.envFile, List.of("ES_JAVA_HOME=" + systemJavaHome), APPEND);
startElasticsearch();
runElasticsearchTests();
stopElasticsearch();
} finally {
Files.write(installation.envFile, originalEnvFile);
}
assertThat(FileUtils.slurpAllLogs(installation.logs, "elasticsearch.log", "elasticsearch*.log.gz"), containsString(systemJavaHome));
}
public void test32JavaHomeOverride() throws Exception {
// we always run with java home when no bundled jdk is included, so this test would be repetitive
assumeThat(distribution().hasJdk, is(true));
assertRunsWithJavaHome();
}
public void test33RunsIfJavaNotOnPath() throws Exception {
assumeThat(distribution().hasJdk, is(true));
// we don't require java be installed but some images have it
String backupPath = "/usr/bin/java." + getClass().getSimpleName() + ".bak";
if (Files.exists(Paths.get("/usr/bin/java"))) {
sh.run("sudo mv /usr/bin/java " + backupPath);
}
try {
startElasticsearch();
runElasticsearchTests();
stopElasticsearch();
} finally {
if (Files.exists(Paths.get(backupPath))) {
sh.run("sudo mv " + backupPath + " /usr/bin/java");
}
}
}
public void test34CustomJvmOptionsDirectoryFile() throws Exception {
setHeap("512m");
startElasticsearch();
final String nodesResponse = makeRequest("https://localhost:9200/_nodes");
assertThat(nodesResponse, containsString("\"heap_init_in_bytes\":536870912"));
stopElasticsearch();
}
public void test40StartServer() throws Exception {
String start = sh.runIgnoreExitCode("date ").stdout.trim();
startElasticsearch();
String journalEntries = sh.runIgnoreExitCode(
"journalctl _SYSTEMD_UNIT=elasticsearch.service " + "--since \"" + start + "\" --output cat | wc -l"
).stdout.trim();
assertThat(journalEntries, equalTo("0"));
assertPathsExist(installation.pidDir.resolve("elasticsearch.pid"));
assertPathsExist(installation.logs.resolve("elasticsearch_server.json"));
runElasticsearchTests();
verifyPackageInstallation(installation, distribution(), sh); // check startup script didn't change permissions
stopElasticsearch();
}
public void test42BundledJdkRemoved() throws Exception {
assumeThat(distribution().hasJdk, is(true));
Path relocatedJdk = installation.bundledJdk.getParent().resolve("jdk.relocated");
try {
mv(installation.bundledJdk, relocatedJdk);
assertRunsWithJavaHome();
} finally {
mv(relocatedJdk, installation.bundledJdk);
}
}
public void test50Remove() throws Exception {
// add fake bin directory as if a plugin was installed
Files.createDirectories(installation.bin.resolve("myplugin"));
remove(distribution());
// removing must stop the service
assertThat(sh.run("ps aux").stdout, not(containsString("org.elasticsearch.bootstrap.Elasticsearch")));
if (isSystemd()) {
final int statusExitCode;
// Before version 231 systemctl returned exit code 3 for both services that were stopped, and nonexistent
// services [1]. In version 231 and later it returns exit code 4 for non-existent services.
//
// The exception is Centos, OEL or RHEL 7 where it returns exit code 4 for non-existent services from a systemd reporting a
// version earlier than 231. Centos 6 does not have an /etc/os-release, but that's fine because it also doesn't use systemd.
//
// [1] https://github.com/systemd/systemd/pull/3385
if (getOsRelease().contains("ID=\"centos\"")
|| getOsRelease().contains("ID=\"ol\"")
|| getOsRelease().contains("ID=\"rhel\"")) {
statusExitCode = 4;
} else {
final Result versionResult = sh.run("systemctl --version");
final Matcher matcher = Pattern.compile("^systemd (\\d+)").matcher(versionResult.stdout);
matcher.find();
final int version = Integer.parseInt(matcher.group(1));
statusExitCode = version < 231 ? 3 : 4;
}
assertThat(sh.runIgnoreExitCode("systemctl status elasticsearch.service").exitCode, is(statusExitCode));
assertThat(sh.runIgnoreExitCode("systemctl is-enabled elasticsearch.service").exitCode, is(1));
}
assertPathsDoNotExist(
installation.bin,
installation.lib,
installation.modules,
installation.plugins,
installation.logs,
installation.pidDir
);
assertThat(SYSTEMD_SERVICE, fileDoesNotExist());
}
public void test60Reinstall() throws Exception {
try {
install();
assertInstalled(distribution());
verifyPackageInstallation(installation, distribution(), sh);
remove(distribution());
assertRemoved(distribution());
} finally {
cleanup();
}
}
public void test70RestartServer() throws Exception {
try {
install();
assertInstalled(distribution());
// Recreate file realm users that have been deleted in earlier tests
setFileSuperuser("test_superuser", "test_superuser_password");
startElasticsearch();
restartElasticsearch(sh, installation);
runElasticsearchTests();
stopElasticsearch();
} finally {
cleanup();
}
}
public void test71JvmOptionsTotalMemoryOverride() throws Exception {
try {
install();
assertPathsExist(installation.envFile);
setHeap(null);
// Recreate file realm users that have been deleted in earlier tests
setFileSuperuser("test_superuser", "test_superuser_password");
withCustomConfig(tempConf -> {
// Work as though total system memory is 850MB
append(installation.envFile, "ES_JAVA_OPTS=\"-Des.total_memory_bytes=891289600\"");
startElasticsearch();
final String nodesStatsResponse = makeRequest("https://localhost:9200/_nodes/stats");
assertThat(nodesStatsResponse, containsString("\"adjusted_total_in_bytes\":891289600"));
// 40% of 850MB
assertThat(sh.run("ps auwwx").stdout, containsString("-Xms340m -Xmx340m"));
stopElasticsearch();
});
} finally {
cleanup();
}
}
public void test72TestRuntimeDirectory() throws Exception {
try {
install();
FileUtils.rm(installation.pidDir);
startElasticsearch();
assertPathsExist(installation.pidDir);
stopElasticsearch();
} finally {
cleanup();
}
}
public void test73gcLogsExist() throws Exception {
install();
startElasticsearch();
// it can be gc.log or gc.log.0.current
assertThat(installation.logs, fileWithGlobExist("gc.log*"));
stopElasticsearch();
}
// TEST CASES FOR SYSTEMD ONLY
/**
* # Simulates the behavior of a system restart:
* # the PID directory is deleted by the operating system
* # but it should not block ES from starting
* # see https://github.com/elastic/elasticsearch/issues/11594
*/
public void test80DeletePID_DIRandRestart() throws Exception {
assumeTrue(isSystemd());
rm(installation.pidDir);
sh.run("systemd-tmpfiles --create");
startElasticsearch();
final Path pidFile = installation.pidDir.resolve("elasticsearch.pid");
assertThat(pidFile, fileExists());
stopElasticsearch();
}
public void test81CustomPathConfAndJvmOptions() throws Exception {
assumeTrue(isSystemd());
assertPathsExist(installation.envFile);
stopElasticsearch();
// Recreate file realm users that have been deleted in earlier tests
setFileSuperuser("test_superuser", "test_superuser_password");
withCustomConfig(tempConf -> {
append(installation.envFile, "ES_JAVA_OPTS=\"-Xmx512m -Xms512m -XX:-UseCompressedOops\"");
startElasticsearch();
final String nodesResponse = makeRequest("https://localhost:9200/_nodes");
assertThat(nodesResponse, containsString("\"heap_init_in_bytes\":536870912"));
assertThat(nodesResponse, containsString("\"using_compressed_ordinary_object_pointers\":\"false\""));
stopElasticsearch();
});
cleanup();
}
public void test83SystemdMask() throws Exception {
try {
assumeTrue(isSystemd());
sh.run("systemctl mask systemd-sysctl.service");
install();
sh.run("systemctl unmask systemd-sysctl.service");
} finally {
cleanup();
}
}
public void test84serviceFileSetsLimits() throws Exception {
// Limits are changed on systemd platforms only
assumeTrue(isSystemd());
install();
startElasticsearch();
final Path pidFile = installation.pidDir.resolve("elasticsearch.pid");
assertThat(pidFile, fileExists());
String pid = slurp(pidFile).trim();
String maxFileSize = sh.run("cat /proc/%s/limits | grep \"Max file size\" | awk '{ print $4 }'", pid).stdout.trim();
assertThat(maxFileSize, equalTo("unlimited"));
String maxProcesses = sh.run("cat /proc/%s/limits | grep \"Max processes\" | awk '{ print $3 }'", pid).stdout.trim();
assertThat(maxProcesses, equalTo("4096"));
String maxOpenFiles = sh.run("cat /proc/%s/limits | grep \"Max open files\" | awk '{ print $4 }'", pid).stdout.trim();
assertThat(maxOpenFiles, equalTo("65535"));
String maxAddressSpace = sh.run("cat /proc/%s/limits | grep \"Max address space\" | awk '{ print $4 }'", pid).stdout.trim();
assertThat(maxAddressSpace, equalTo("unlimited"));
stopElasticsearch();
}
public void test90DoNotCloseStderrWhenQuiet() throws Exception {
assumeTrue(isSystemd());
assertPathsExist(installation.envFile);
stopElasticsearch();
withCustomConfig(tempConf -> {
// Create a startup problem by adding an invalid YAML line to the config
append(tempConf.resolve("elasticsearch.yml"), "discovery.seed_hosts:15172.30.5.3416172.30.5.35, 172.30.5.17]\n");
// Make sure we don't pick up the journal entries for previous ES instances.
Packages.JournaldWrapper journald = new Packages.JournaldWrapper(sh);
runElasticsearchStartCommand(null, true, false);
assertBusy(() -> {
final Result logs = journald.getLogs();
assertThat(logs.stdout, containsString("Failed to load settings from [elasticsearch.yml]"));
});
});
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.filecache;
import java.io.File;
import java.io.IOException;
import java.net.URI;
import java.net.URISyntaxException;
import java.text.DateFormat;
import java.util.Collections;
import java.util.Date;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.LinkedList;
import java.util.Map;
import java.util.Random;
import java.util.Set;
import java.util.TreeMap;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.filecache.TaskDistributedCacheManager.CacheFile;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.FileUtil;
import org.apache.hadoop.fs.LocalDirAllocator;
import org.apache.hadoop.fs.LocalFileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.permission.FsAction;
import org.apache.hadoop.fs.permission.FsPermission;
import org.apache.hadoop.mapred.InvalidJobConfException;
import org.apache.hadoop.mapred.TaskController;
import org.apache.hadoop.mapred.TaskTracker;
import org.apache.hadoop.mapreduce.JobContext;
import org.apache.hadoop.mapreduce.JobID;
import org.apache.hadoop.security.Credentials;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.util.RunJar;
import org.apache.hadoop.mapreduce.security.TokenCache;
/**
* Manages a single machine's instance of a cross-job
* cache. This class would typically be instantiated
* by a TaskTracker (or something that emulates it,
* like LocalJobRunner).
*
* <b>This class is internal to Hadoop, and should not be treated as a public
* interface.</b>
*/
public class TrackerDistributedCacheManager {
// cacheID to cacheStatus mapping
private TreeMap<String, CacheStatus> cachedArchives =
new TreeMap<String, CacheStatus>();
private Map<JobID, TaskDistributedCacheManager> jobArchives =
Collections.synchronizedMap(
new HashMap<JobID, TaskDistributedCacheManager>());
private final TaskController taskController;
private static final FsPermission PUBLIC_CACHE_OBJECT_PERM =
FsPermission.createImmutable((short) 0755);
// For holding the properties of each cache directory
static class CacheDir {
long size;
long subdirs;
}
private TreeMap<Path, CacheDir> baseDirProperties =
new TreeMap<Path, CacheDir>();
// default total cache size (10GB)
private static final long DEFAULT_CACHE_SIZE = 10737418240L;
private static final long DEFAULT_CACHE_SUBDIR_LIMIT = 10000;
private long allowedCacheSize;
private long allowedCacheSubdirs;
private static final Log LOG =
LogFactory.getLog(TrackerDistributedCacheManager.class);
private final LocalFileSystem localFs;
private LocalDirAllocator lDirAllocator;
private Configuration trackerConf;
private static final Random random = new Random();
public TrackerDistributedCacheManager(Configuration conf,
TaskController controller
) throws IOException {
this.localFs = FileSystem.getLocal(conf);
this.trackerConf = conf;
this.lDirAllocator = new LocalDirAllocator("mapred.local.dir");
// setting the cache size to a default of 10GB
this.allowedCacheSize = conf.getLong
("local.cache.size", DEFAULT_CACHE_SIZE);
// setting the cache number of subdirectories limit to a default of 10000
this.allowedCacheSubdirs = conf.getLong
("mapreduce.tasktracker.local.cache.numberdirectories",
DEFAULT_CACHE_SUBDIR_LIMIT);
this.taskController = controller;
}
/**
* Get the locally cached file or archive; it could either be
* previously cached (and valid) or copy it from the {@link FileSystem} now.
*
* @param cache the cache to be localized, this should be specified as
* new URI(scheme://scheme-specific-part/absolute_path_to_file#LINKNAME).
* @param conf The Configuration file which contains the filesystem
* @param subDir The base cache subDir where you want to localize the
* files/archives
* @param fileStatus The file status on the dfs.
* @param isArchive if the cache is an archive or a file. In case it is an
* archive with a .zip or .jar or .tar or .tgz or .tar.gz extension it will
* be unzipped/unjarred/untarred automatically
* and the directory where the archive is unzipped/unjarred/untarred is
* returned as the Path.
* In case of a file, the path to the file is returned
* @param confFileStamp this is the hdfs file modification timestamp to verify
* that the file to be cached hasn't changed since the job started
* @param isPublic to know the cache file is accessible to public or private
* @return the path to directory where the archives are unjarred in case of
* archives, the path to the file where the file is copied locally
* @throws IOException
*/
Path getLocalCache(URI cache, Configuration conf,
String subDir, FileStatus fileStatus,
boolean isArchive, long confFileStamp,
boolean isPublic, CacheFile file) throws IOException {
String key;
String user = getLocalizedCacheOwner(isPublic);
key = getKey(cache, conf, confFileStamp, user);
CacheStatus lcacheStatus;
Path localizedPath = null;
Path localPath = null;
synchronized (cachedArchives) {
lcacheStatus = cachedArchives.get(key);
if (lcacheStatus == null) {
// was never localized
String uniqueString
= (String.valueOf(random.nextLong())
+ "_" + cache.hashCode()
+ "_" + (confFileStamp % Integer.MAX_VALUE));
String cachePath = new Path (subDir,
new Path(uniqueString, makeRelative(cache, conf))).toString();
localPath = lDirAllocator.getLocalPathForWrite(cachePath,
fileStatus.getLen(), trackerConf, isPublic);
lcacheStatus =
new CacheStatus(new Path(localPath.toString().replace(cachePath, "")),
localPath, new Path(subDir), uniqueString,
isPublic ? null : user);
cachedArchives.put(key, lcacheStatus);
}
//mark the cache for use.
file.setStatus(lcacheStatus);
synchronized (lcacheStatus) {
lcacheStatus.refcount++;
}
}
try {
// do the localization, after releasing the global lock
synchronized (lcacheStatus) {
if (!lcacheStatus.isInited()) {
if (isPublic) {
localizedPath = localizePublicCacheObject(conf,
cache,
confFileStamp,
lcacheStatus, fileStatus,
isArchive);
} else {
localizedPath = localPath;
if (!isArchive) {
//for private archives, the lengths come over RPC from the
//JobLocalizer since the JobLocalizer is the one who expands
//archives and gets the total length
lcacheStatus.size = fileStatus.getLen();
// Increase the size and sub directory count of the cache
// from baseDirSize and baseDirNumberSubDir.
addCacheInfoUpdate(lcacheStatus);
}
}
lcacheStatus.initComplete();
} else {
localizedPath = checkCacheStatusValidity(conf, cache, confFileStamp,
lcacheStatus, fileStatus, isArchive);
}
}
// try deleting stuff if you can
long size = 0;
long numberSubdirs = 0;
synchronized (lcacheStatus) {
synchronized (baseDirProperties) {
CacheDir cacheDir = baseDirProperties.get(lcacheStatus.getBaseDir());
if (cacheDir != null) {
size = cacheDir.size;
numberSubdirs = cacheDir.subdirs;
} else {
LOG.warn("Cannot find size and number of subdirectories of" +
" baseDir: " + lcacheStatus.getBaseDir());
}
}
}
if (allowedCacheSize < size || allowedCacheSubdirs < numberSubdirs) {
// try some cache deletions
compactCache(conf);
}
} catch (IOException ie) {
synchronized (lcacheStatus) {
// release this cache
lcacheStatus.refcount -= 1;
throw ie;
}
}
return localizedPath;
}
/**
* This is the opposite of getlocalcache. When you are done with
* using the cache, you need to release the cache
* @param cache The cache URI to be released
* @param conf configuration which contains the filesystem the cache
* is contained in.
* @param timeStamp the timestamp on the file represented by the cache URI
* @param owner the owner of the localized file
* @throws IOException
*/
void releaseCache(CacheStatus status) throws IOException {
synchronized (status) {
status.refcount--;
}
}
void setSize(CacheStatus status, long size) throws IOException {
if (size != 0) {
synchronized (status) {
status.size = size;
addCacheInfoUpdate(status);
}
}
}
/*
* This method is called from unit tests.
*/
int getReferenceCount(CacheStatus status) throws IOException {
synchronized (status) {
return status.refcount;
}
}
/**
* Get the user who should "own" the localized distributed cache file.
* If the cache is public, the tasktracker user is the owner. If private,
* the user that the task is running as, is the owner.
* @param isPublic
* @return the owner as a shortname string
* @throws IOException
*/
static String getLocalizedCacheOwner(boolean isPublic) throws IOException {
String user;
if (isPublic) {
user = UserGroupInformation.getLoginUser().getShortUserName();
} else {
user = UserGroupInformation.getCurrentUser().getShortUserName();
}
return user;
}
// To delete the caches which have a refcount of zero
private void compactCache(Configuration conf) throws IOException {
List<CacheStatus> deleteList = new LinkedList<CacheStatus>();
// try deleting cache Status with refcount of zero
synchronized (cachedArchives) {
for (Iterator<String> it = cachedArchives.keySet().iterator();
it.hasNext();) {
String cacheId = it.next();
CacheStatus lcacheStatus = cachedArchives.get(cacheId);
// if reference count is zero
// mark the cache for deletion
if (lcacheStatus.refcount == 0) {
// delete this cache entry from the global list
// and mark the localized file for deletion
deleteList.add(lcacheStatus);
it.remove();
}
}
}
// do the deletion, after releasing the global lock
for (CacheStatus lcacheStatus : deleteList) {
synchronized (lcacheStatus) {
Path potentialDeletee = lcacheStatus.localizedLoadPath;
Path localizedDir = lcacheStatus.getLocalizedUniqueDir();
if (lcacheStatus.user == null) {
LOG.info("Deleted path " + localizedDir);
try {
localFs.delete(localizedDir, true);
} catch (IOException e) {
LOG.warn("Could not delete distributed cache empty directory "
+ localizedDir, e);
}
} else {
LOG.info("Deleted path " + localizedDir + " as " + lcacheStatus.user);
String base = lcacheStatus.getBaseDir().toString();
String userDir = TaskTracker.getUserDir(lcacheStatus.user);
int skip = base.length() + 1 + userDir.length() + 1;
String relative = localizedDir.toString().substring(skip);
taskController.deleteAsUser(lcacheStatus.user, relative);
}
deleteCacheInfoUpdate(lcacheStatus);
}
}
}
/*
* Returns the relative path of the dir this cache will be localized in
* relative path that this cache will be localized in. For
* hdfs://hostname:port/absolute_path -- the relative path is
* hostname/absolute path -- if it is just /absolute_path -- then the
* relative path is hostname of DFS this mapred cluster is running
* on/absolute_path
*/
String makeRelative(URI cache, Configuration conf)
throws IOException {
String host = cache.getHost();
if (host == null) {
host = cache.getScheme();
}
if (host == null) {
URI defaultUri = FileSystem.get(conf).getUri();
host = defaultUri.getHost();
if (host == null) {
host = defaultUri.getScheme();
}
}
String path = host + cache.getPath();
path = path.replace(":/","/"); // remove windows device colon
return path;
}
private Path checkCacheStatusValidity(Configuration conf,
URI cache, long confFileStamp,
CacheStatus cacheStatus,
FileStatus fileStatus,
boolean isArchive
) throws IOException {
FileSystem fs = FileSystem.get(cache, conf);
// Has to be
if (!ifExistsAndFresh(conf, fs, cache, confFileStamp,
cacheStatus, fileStatus)) {
throw new IOException("Stale cache file: " + cacheStatus.localizedLoadPath +
" for cache-file: " + cache);
}
LOG.info(String.format("Using existing cache of %s->%s",
cache.toString(), cacheStatus.localizedLoadPath));
return cacheStatus.localizedLoadPath;
}
/**
* Returns a boolean to denote whether a cache file is visible to all(public)
* or not
* @param conf
* @param uri
* @return true if the path in the uri is visible to all, false otherwise
* @throws IOException
*/
static boolean isPublic(Configuration conf, URI uri) throws IOException {
FileSystem fs = FileSystem.get(uri, conf);
Path current = new Path(uri.getPath());
//the leaf level file should be readable by others
if (!checkPermissionOfOther(fs, current, FsAction.READ)) {
return false;
}
current = current.getParent();
while (current != null) {
//the subdirs in the path should have execute permissions for others
if (!checkPermissionOfOther(fs, current, FsAction.EXECUTE)) {
return false;
}
current = current.getParent();
}
return true;
}
/**
* Checks for a given path whether the Other permissions on it
* imply the permission in the passed FsAction
* @param fs
* @param path
* @param action
* @return true if the path in the uri is visible to all, false otherwise
* @throws IOException
*/
private static boolean checkPermissionOfOther(FileSystem fs, Path path,
FsAction action) throws IOException {
FileStatus status = fs.getFileStatus(path);
FsPermission perms = status.getPermission();
FsAction otherAction = perms.getOtherAction();
if (otherAction.implies(action)) {
return true;
}
return false;
}
private static Path createRandomPath(Path base) throws IOException {
return new Path(base.toString() + "-work-" + random.nextLong());
}
/**
* Download a given path to the local file system.
* @param conf the job's configuration
* @param source the source to copy from
* @param destination where to copy the file. must be local fs
* @param desiredTimestamp the required modification timestamp of the source
* @param isArchive is this an archive that should be expanded
* @param permission the desired permissions of the file.
* @return for archives, the number of bytes in the unpacked directory
* @throws IOException
*/
public static long downloadCacheObject(Configuration conf,
URI source,
Path destination,
long desiredTimestamp,
boolean isArchive,
FsPermission permission
) throws IOException {
FileSystem sourceFs = FileSystem.get(source, conf);
FileSystem localFs = FileSystem.getLocal(conf);
Path sourcePath = new Path(source.getPath());
long modifiedTime =
sourceFs.getFileStatus(sourcePath).getModificationTime();
if (modifiedTime != desiredTimestamp) {
DateFormat df = DateFormat.getDateTimeInstance(DateFormat.SHORT,
DateFormat.SHORT);
throw new IOException("The distributed cache object " + source +
" changed during the job from " +
df.format(new Date(desiredTimestamp)) + " to " +
df.format(new Date(modifiedTime)));
}
Path parchive = null;
if (isArchive) {
parchive = new Path(destination, destination.getName());
} else {
parchive = destination;
}
// if the file already exists, we are done
if (localFs.exists(parchive)) {
return 0;
}
// the final directory for the object
Path finalDir = parchive.getParent();
// the work directory for the object
Path workDir = createRandomPath(finalDir);
LOG.info("Creating " + destination.getName() + " in " + workDir + " with " +
permission);
if (!localFs.mkdirs(workDir, permission)) {
throw new IOException("Mkdirs failed to create directory " + workDir);
}
Path workFile = new Path(workDir, parchive.getName());
sourceFs.copyToLocalFile(sourcePath, workFile);
localFs.setPermission(workFile, permission);
if (isArchive) {
String tmpArchive = workFile.getName().toLowerCase();
File srcFile = new File(workFile.toString());
File destDir = new File(workDir.toString());
LOG.info(String.format("Extracting %s to %s",
srcFile.toString(), destDir.toString()));
if (tmpArchive.endsWith(".jar")) {
RunJar.unJar(srcFile, destDir);
} else if (tmpArchive.endsWith(".zip")) {
FileUtil.unZip(srcFile, destDir);
} else if (isTarFile(tmpArchive)) {
FileUtil.unTar(srcFile, destDir);
} else {
LOG.warn(String.format(
"Cache file %s specified as archive, but not valid extension.",
srcFile.toString()));
// else will not do anyhting
// and copy the file into the dir as it is
}
FileUtil.chmod(destDir.toString(), "ugo+rx", true);
}
// promote the output to the final location
if (!localFs.rename(workDir, finalDir)) {
localFs.delete(workDir, true);
if (!localFs.exists(finalDir)) {
throw new IOException("Failed to promote distributed cache object " +
workDir + " to " + finalDir);
}
// someone else promoted first
return 0;
}
LOG.info(String.format("Cached %s as %s",
source.toString(), destination.toString()));
long cacheSize =
FileUtil.getDU(new File(parchive.getParent().toString()));
return cacheSize;
}
//the method which actually copies the caches locally and unjars/unzips them
// and does chmod for the files
Path localizePublicCacheObject(Configuration conf,
URI cache, long confFileStamp,
CacheStatus cacheStatus,
FileStatus fileStatus,
boolean isArchive) throws IOException {
long size = downloadCacheObject(conf, cache, cacheStatus.localizedLoadPath,
confFileStamp, isArchive,
PUBLIC_CACHE_OBJECT_PERM);
cacheStatus.size = size;
// Increase the size and sub directory count of the cache
// from baseDirSize and baseDirNumberSubDir.
addCacheInfoUpdate(cacheStatus);
LOG.info(String.format("Cached %s as %s",
cache.toString(), cacheStatus.localizedLoadPath));
return cacheStatus.localizedLoadPath;
}
private static boolean isTarFile(String filename) {
return (filename.endsWith(".tgz") || filename.endsWith(".tar.gz") ||
filename.endsWith(".tar"));
}
// Checks if the cache has already been localized and is fresh
private boolean ifExistsAndFresh(Configuration conf, FileSystem fs,
URI cache, long confFileStamp,
CacheStatus lcacheStatus,
FileStatus fileStatus)
throws IOException {
long dfsFileStamp;
if (fileStatus != null) {
dfsFileStamp = fileStatus.getModificationTime();
} else {
dfsFileStamp = DistributedCache.getTimestamp(conf, cache);
}
// ensure that the file on hdfs hasn't been modified since the job started
if (dfsFileStamp != confFileStamp) {
LOG.fatal("File: " + cache + " has changed on HDFS since job started");
throw new IOException("File: " + cache +
" has changed on HDFS since job started");
}
return true;
}
String getKey(URI cache, Configuration conf, long timeStamp, String user)
throws IOException {
return makeRelative(cache, conf) + String.valueOf(timeStamp) + user;
}
/**
* This method create symlinks for all files in a given dir in another
* directory.
*
* Should not be used outside of DistributedCache code.
*
* @param conf the configuration
* @param jobCacheDir the target directory for creating symlinks
* @param workDir the directory in which the symlinks are created
* @throws IOException
*/
public static void createAllSymlink(Configuration conf, File jobCacheDir,
File workDir)
throws IOException{
if ((jobCacheDir == null || !jobCacheDir.isDirectory()) ||
workDir == null || (!workDir.isDirectory())) {
return;
}
boolean createSymlink = DistributedCache.getSymlink(conf);
if (createSymlink){
File[] list = jobCacheDir.listFiles();
for (int i=0; i < list.length; i++){
String target = list[i].getAbsolutePath();
String link = new File(workDir, list[i].getName()).toString();
LOG.info(String.format("Creating symlink: %s <- %s", target, link));
int ret = FileUtil.symLink(target, link);
if (ret != 0) {
LOG.warn(String.format("Failed to create symlink: %s <- %s", target,
link));
}
}
}
}
static class CacheStatus {
// the local load path of this cache
Path localizedLoadPath;
//the base dir where the cache lies
Path localizedBaseDir;
//the size of this cache
long size;
// number of instances using this cache
int refcount;
// is it initialized ?
boolean inited = false;
// The sub directory (tasktracker/archive or tasktracker/user/archive),
// under which the file will be localized
Path subDir;
// unique string used in the construction of local load path
String uniqueString;
// The user that owns the cache entry or null if it is public
final String user;
public CacheStatus(Path baseDir, Path localLoadPath, Path subDir,
String uniqueString, String user) {
super();
this.localizedLoadPath = localLoadPath;
this.refcount = 0;
this.localizedBaseDir = baseDir;
this.size = 0;
this.subDir = subDir;
this.uniqueString = uniqueString;
this.user = user;
}
Path getBaseDir(){
return this.localizedBaseDir;
}
// mark it as initialized
void initComplete() {
inited = true;
}
// is it initialized?
boolean isInited() {
return inited;
}
Path getLocalizedUniqueDir() {
return new Path(localizedBaseDir, new Path(subDir, uniqueString));
}
}
/**
* Clear the entire contents of the cache and delete the backing files. This
* should only be used when the server is reinitializing, because the users
* are going to lose their files.
*/
public void purgeCache() {
synchronized (cachedArchives) {
for (Map.Entry<String,CacheStatus> f: cachedArchives.entrySet()) {
try {
localFs.delete(f.getValue().localizedLoadPath, true);
} catch (IOException ie) {
LOG.debug("Error cleaning up cache", ie);
}
}
cachedArchives.clear();
}
}
public TaskDistributedCacheManager
newTaskDistributedCacheManager(JobID jobId,
Configuration taskConf) throws IOException {
TaskDistributedCacheManager result =
new TaskDistributedCacheManager(this, taskConf);
jobArchives.put(jobId, result);
return result;
}
public void setArchiveSizes(JobID jobId, long[] sizes) throws IOException {
TaskDistributedCacheManager mgr = jobArchives.get(jobId);
if (mgr != null) {
mgr.setSizes(sizes);
}
}
/**
* Determines timestamps of files to be cached, and stores those
* in the configuration. This is intended to be used internally by JobClient
* after all cache files have been added.
*
* This is an internal method!
*
* @param job Configuration of a job.
* @throws IOException
*/
public static void determineTimestamps(Configuration job) throws IOException {
URI[] tarchives = DistributedCache.getCacheArchives(job);
if (tarchives != null) {
FileStatus status = DistributedCache.getFileStatus(job, tarchives[0]);
StringBuffer archiveFileSizes =
new StringBuffer(String.valueOf(status.getLen()));
StringBuffer archiveTimestamps =
new StringBuffer(String.valueOf(status.getModificationTime()));
for (int i = 1; i < tarchives.length; i++) {
status = DistributedCache.getFileStatus(job, tarchives[i]);
archiveFileSizes.append(",");
archiveFileSizes.append(String.valueOf(status.getLen()));
archiveTimestamps.append(",");
archiveTimestamps.append(String.valueOf(
status.getModificationTime()));
}
job.set(DistributedCache.CACHE_ARCHIVES_SIZES,
archiveFileSizes.toString());
DistributedCache.setArchiveTimestamps(job, archiveTimestamps.toString());
}
URI[] tfiles = DistributedCache.getCacheFiles(job);
if (tfiles != null) {
FileStatus status = DistributedCache.getFileStatus(job, tfiles[0]);
StringBuffer fileSizes =
new StringBuffer(String.valueOf(status.getLen()));
StringBuffer fileTimestamps = new StringBuffer(String.valueOf(
status.getModificationTime()));
for (int i = 1; i < tfiles.length; i++) {
status = DistributedCache.getFileStatus(job, tfiles[i]);
fileSizes.append(",");
fileSizes.append(String.valueOf(status.getLen()));
fileTimestamps.append(",");
fileTimestamps.append(String.valueOf(status.getModificationTime()));
}
job.set(DistributedCache.CACHE_FILES_SIZES, fileSizes.toString());
DistributedCache.setFileTimestamps(job, fileTimestamps.toString());
}
}
/**
* Determines the visibilities of the distributed cache files and
* archives. The visibility of a cache path is "public" if the leaf component
* has READ permissions for others, and the parent subdirs have
* EXECUTE permissions for others
* @param job
* @throws IOException
*/
public static void determineCacheVisibilities(Configuration job)
throws IOException {
URI[] tarchives = DistributedCache.getCacheArchives(job);
if (tarchives != null) {
StringBuffer archiveVisibilities =
new StringBuffer(String.valueOf(isPublic(job, tarchives[0])));
for (int i = 1; i < tarchives.length; i++) {
archiveVisibilities.append(",");
archiveVisibilities.append(String.valueOf(isPublic(job, tarchives[i])));
}
setArchiveVisibilities(job, archiveVisibilities.toString());
}
URI[] tfiles = DistributedCache.getCacheFiles(job);
if (tfiles != null) {
StringBuffer fileVisibilities =
new StringBuffer(String.valueOf(isPublic(job, tfiles[0])));
for (int i = 1; i < tfiles.length; i++) {
fileVisibilities.append(",");
fileVisibilities.append(String.valueOf(isPublic(job, tfiles[i])));
}
setFileVisibilities(job, fileVisibilities.toString());
}
}
private static boolean[] parseBooleans(String[] strs) {
if (null == strs) {
return null;
}
boolean[] result = new boolean[strs.length];
for(int i=0; i < strs.length; ++i) {
result[i] = Boolean.parseBoolean(strs[i]);
}
return result;
}
/**
* Get the booleans on whether the files are public or not. Used by
* internal DistributedCache and MapReduce code.
* @param conf The configuration which stored the timestamps
* @return array of booleans
* @throws IOException
*/
public static boolean[] getFileVisibilities(Configuration conf) {
return parseBooleans(conf.getStrings(JobContext.CACHE_FILE_VISIBILITIES));
}
/**
* Get the booleans on whether the archives are public or not. Used by
* internal DistributedCache and MapReduce code.
* @param conf The configuration which stored the timestamps
* @return array of booleans
*/
public static boolean[] getArchiveVisibilities(Configuration conf) {
return parseBooleans(conf.getStrings(JobContext.
CACHE_ARCHIVES_VISIBILITIES));
}
/**
* This is to check the public/private visibility of the archives to be
* localized.
*
* @param conf Configuration which stores the timestamp's
* @param booleans comma separated list of booleans (true - public)
* The order should be the same as the order in which the archives are added.
*/
static void setArchiveVisibilities(Configuration conf, String booleans) {
conf.set(JobContext.CACHE_ARCHIVES_VISIBILITIES, booleans);
}
/**
* This is to check the public/private visibility of the files to be localized
*
* @param conf Configuration which stores the timestamp's
* @param booleans comma separated list of booleans (true - public)
* The order should be the same as the order in which the files are added.
*/
static void setFileVisibilities(Configuration conf, String booleans) {
conf.set(JobContext.CACHE_FILE_VISIBILITIES, booleans);
}
/**
* For each archive or cache file - get the corresponding delegation token
* @param job
* @param credentials
* @throws IOException
*/
public static void getDelegationTokens(Configuration job,
Credentials credentials)
throws IOException {
URI[] tarchives = DistributedCache.getCacheArchives(job);
URI[] tfiles = DistributedCache.getCacheFiles(job);
int size = (tarchives!=null? tarchives.length : 0) + (tfiles!=null ? tfiles.length :0);
Path[] ps = new Path[size];
int i = 0;
if (tarchives != null) {
for (i=0; i < tarchives.length; i++) {
ps[i] = new Path(tarchives[i].toString());
}
}
if (tfiles != null) {
for(int j=0; j< tfiles.length; j++) {
ps[i+j] = new Path(tfiles[j].toString());
}
}
TokenCache.obtainTokensForNamenodes(credentials, ps, job);
}
/**
* This is part of the framework API. It's called within the job
* submission code only, not by users. In the non-error case it has
* no side effects and returns normally. If there's a URI in both
* mapred.cache.files and mapred.cache.archives, it throws its
* exception.
* @param conf a {@link Configuration} to be cheked for duplication
* in cached URIs
* @throws InvalidJobConfException
**/
public static void validate(Configuration conf)
throws InvalidJobConfException {
final String[] archiveStrings
= conf.getStrings(DistributedCache.CACHE_ARCHIVES);
final String[] fileStrings = conf.getStrings(DistributedCache.CACHE_FILES);
Path thisSubject = null;
if (archiveStrings != null && fileStrings != null) {
final Set<Path> archivesSet = new HashSet<Path>();
for (String archiveString : archiveStrings) {
archivesSet.add(coreLocation(archiveString, conf));
}
for (String fileString : fileStrings) {
thisSubject = coreLocation(fileString, conf);
if (archivesSet.contains(thisSubject)) {
throw new InvalidJobConfException
("The core URI, \""
+ thisSubject
+ "\" is listed both in " + DistributedCache.CACHE_FILES
+ " and in " + DistributedCache.CACHE_ARCHIVES + " .");
}
}
}
}
private static Path coreLocation(String uriString, Configuration conf)
throws InvalidJobConfException {
// lose the fragment, if it's likely to be a symlink name
if (DistributedCache.getSymlink(conf)) {
try {
URI uri = new URI(uriString);
uriString
= (new URI(uri.getScheme(), uri.getAuthority(), uri.getPath(),
null, null)
.toString());
} catch (URISyntaxException e) {
throw new InvalidJobConfException
("Badly formatted URI: " + uriString, e);
}
}
Path path = new Path(uriString);
try {
path = path.makeQualified(path.getFileSystem(conf));
} catch (IOException e) {
throw new InvalidJobConfException
("Invalid file system in distributed cache for the URI: "
+ uriString, e);
}
return path;
}
/**
* Decrement the size and sub directory count of the cache from baseDirSize
* and baseDirNumberSubDir. Have to lock lcacheStatus before calling this.
* @param cacheStatus cache status of the cache is deleted
*/
private void deleteCacheInfoUpdate(CacheStatus cacheStatus) {
if (!cacheStatus.inited) {
// if it is not created yet, do nothing.
return;
}
// decrement the size of the cache from baseDirSize
synchronized (baseDirProperties) {
CacheDir cacheDir = baseDirProperties.get(cacheStatus.getBaseDir());
if (cacheDir != null) {
cacheDir.size -= cacheStatus.size;
cacheDir.subdirs--;
} else {
LOG.warn("Cannot find size and number of subdirectories of" +
" baseDir: " + cacheStatus.getBaseDir());
}
}
}
/**
* Update the maps baseDirSize and baseDirNumberSubDir when adding cache.
* Increase the size and sub directory count of the cache from baseDirSize
* and baseDirNumberSubDir. Have to lock lcacheStatus before calling this.
* @param cacheStatus cache status of the cache is added
*/
private void addCacheInfoUpdate(CacheStatus cacheStatus) {
long cacheSize = cacheStatus.size;
// decrement the size of the cache from baseDirSize
synchronized (baseDirProperties) {
CacheDir cacheDir = baseDirProperties.get(cacheStatus.getBaseDir());
if (cacheDir != null) {
cacheDir.size += cacheSize;
cacheDir.subdirs++;
} else {
cacheDir = new CacheDir();
cacheDir.size = cacheSize;
cacheDir.subdirs = 1;
baseDirProperties.put(cacheStatus.getBaseDir(), cacheDir);
}
}
}
}
| |
/*
* Copyright 2011 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.dogecoin.core;
import com.google.dogecoin.net.discovery.PeerDiscovery;
import com.google.dogecoin.net.discovery.PeerDiscoveryException;
import com.google.dogecoin.params.UnitTestParams;
import com.google.dogecoin.store.MemoryBlockStore;
import com.google.dogecoin.utils.TestUtils;
import com.google.dogecoin.utils.Threading;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.Lists;
import com.google.common.util.concurrent.SettableFuture;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
import java.math.BigInteger;
import java.net.InetSocketAddress;
import java.util.*;
import java.util.concurrent.*;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicInteger;
import static org.junit.Assert.*;
// TX announcement and broadcast is tested in TransactionBroadcastTest.
@RunWith(value = Parameterized.class)
public class PeerGroupTest extends TestWithPeerGroup {
static final NetworkParameters params = UnitTestParams.get();
private BlockingQueue<Peer> connectedPeers;
private BlockingQueue<Peer> disconnectedPeers;
private PeerEventListener listener;
private Map<Peer, AtomicInteger> peerToMessageCount;
@Parameterized.Parameters
public static Collection<ClientType[]> parameters() {
return Arrays.asList(new ClientType[] {ClientType.NIO_CLIENT_MANAGER},
new ClientType[] {ClientType.BLOCKING_CLIENT_MANAGER});
}
public PeerGroupTest(ClientType clientType) {
super(clientType);
}
@Override
@Before
public void setUp() throws Exception {
peerToMessageCount = new HashMap<Peer, AtomicInteger>();
connectedPeers = new LinkedBlockingQueue<Peer>();
disconnectedPeers = new LinkedBlockingQueue<Peer>();
listener = new AbstractPeerEventListener() {
@Override
public void onPeerConnected(Peer peer, int peerCount) {
connectedPeers.add(peer);
}
@Override
public void onPeerDisconnected(Peer peer, int peerCount) {
disconnectedPeers.add(peer);
}
@Override
public Message onPreMessageReceived(Peer peer, Message m) {
AtomicInteger messageCount = peerToMessageCount.get(peer);
if (messageCount == null) {
messageCount = new AtomicInteger(0);
peerToMessageCount.put(peer, messageCount);
}
messageCount.incrementAndGet();
// Just pass the message right through for further processing.
return m;
}
};
super.setUp(new MemoryBlockStore(UnitTestParams.get()));
peerGroup.addWallet(wallet);
}
@After
public void tearDown() throws Exception {
super.tearDown();
Utils.finishMockSleep();
peerGroup.stopAndWait();
}
@Test
public void listener() throws Exception {
peerGroup.startAndWait();
peerGroup.addEventListener(listener);
// Create a couple of peers.
InboundMessageQueuer p1 = connectPeer(1);
InboundMessageQueuer p2 = connectPeer(2);
connectedPeers.take();
connectedPeers.take();
pingAndWait(p1);
pingAndWait(p2);
Threading.waitForUserCode();
assertEquals(0, disconnectedPeers.size());
p1.close();
disconnectedPeers.take();
assertEquals(0, disconnectedPeers.size());
p2.close();
disconnectedPeers.take();
assertEquals(0, disconnectedPeers.size());
assertTrue(peerGroup.removeEventListener(listener));
assertFalse(peerGroup.removeEventListener(listener));
}
@Test
public void peerDiscoveryPolling() throws InterruptedException {
// Check that if peer discovery fails, we keep trying until we have some nodes to talk with.
final CountDownLatch latch = new CountDownLatch(1);
final AtomicBoolean result = new AtomicBoolean();
peerGroup.addPeerDiscovery(new PeerDiscovery() {
public InetSocketAddress[] getPeers(long unused, TimeUnit unused2) throws PeerDiscoveryException {
if (!result.getAndSet(true)) {
// Pretend we are not connected to the internet.
throw new PeerDiscoveryException("test failure");
} else {
// Return a bogus address.
latch.countDown();
return new InetSocketAddress[]{new InetSocketAddress("localhost", 1)};
}
}
public void shutdown() {
}
});
peerGroup.startAndWait();
latch.await();
// Check that we did indeed throw an exception. If we got here it means we threw and then PeerGroup tried
// again a bit later.
assertTrue(result.get());
}
@Test
public void receiveTxBroadcast() throws Exception {
// Check that when we receive transactions on all our peers, we do the right thing.
peerGroup.startAndWait();
// Create a couple of peers.
InboundMessageQueuer p1 = connectPeer(1);
InboundMessageQueuer p2 = connectPeer(2);
// Check the peer accessors.
assertEquals(2, peerGroup.numConnectedPeers());
Set<Peer> tmp = new HashSet<Peer>(peerGroup.getConnectedPeers());
Set<Peer> expectedPeers = new HashSet<Peer>();
expectedPeers.add(peerOf(p1));
expectedPeers.add(peerOf(p2));
assertEquals(tmp, expectedPeers);
BigInteger value = Utils.toNanoCoins(1, 0);
Transaction t1 = TestUtils.createFakeTx(unitTestParams, value, address);
InventoryMessage inv = new InventoryMessage(unitTestParams);
inv.addTransaction(t1);
// Note: we start with p2 here to verify that transactions are downloaded from whichever peer announces first
// which does not have to be the same as the download peer (which is really the "block download peer").
inbound(p2, inv);
assertTrue(outbound(p2) instanceof GetDataMessage);
inbound(p1, inv);
assertNull(outbound(p1)); // Only one peer is used to download.
inbound(p2, t1);
assertNull(outbound(p1));
// Asks for dependency.
GetDataMessage getdata = (GetDataMessage) outbound(p2);
assertNotNull(getdata);
inbound(p2, new NotFoundMessage(unitTestParams, getdata.getItems()));
pingAndWait(p2);
assertEquals(value, wallet.getBalance(Wallet.BalanceType.ESTIMATED));
peerGroup.stopAndWait();
}
@Test
public void singleDownloadPeer1() throws Exception {
// Check that we don't attempt to retrieve blocks on multiple peers.
peerGroup.startAndWait();
// Create a couple of peers.
InboundMessageQueuer p1 = connectPeer(1);
InboundMessageQueuer p2 = connectPeer(2);
assertEquals(2, peerGroup.numConnectedPeers());
// Set up a little block chain. We heard about b1 but not b2 (it is pending download). b3 is solved whilst we
// are downloading the chain.
Block b1 = TestUtils.createFakeBlock(blockStore).block;
blockChain.add(b1);
Block b2 = TestUtils.makeSolvedTestBlock(b1);
Block b3 = TestUtils.makeSolvedTestBlock(b2);
// Peer 1 and 2 receives an inv advertising a newly solved block.
InventoryMessage inv = new InventoryMessage(params);
inv.addBlock(b3);
// Only peer 1 tries to download it.
inbound(p1, inv);
pingAndWait(p1);
assertTrue(outbound(p1) instanceof GetDataMessage);
assertNull(outbound(p2));
// Peer 1 goes away, peer 2 becomes the download peer and thus queries the remote mempool.
final SettableFuture<Void> p1CloseFuture = SettableFuture.create();
peerOf(p1).addEventListener(new AbstractPeerEventListener() {
@Override
public void onPeerDisconnected(Peer peer, int peerCount) {
p1CloseFuture.set(null);
}
});
closePeer(peerOf(p1));
p1CloseFuture.get();
// Peer 2 fetches it next time it hears an inv (should it fetch immediately?).
inbound(p2, inv);
assertTrue(outbound(p2) instanceof GetDataMessage);
peerGroup.stop();
}
@Test
public void singleDownloadPeer2() throws Exception {
// Check that we don't attempt multiple simultaneous block chain downloads, when adding a new peer in the
// middle of an existing chain download.
// Create a couple of peers.
peerGroup.startAndWait();
// Create a couple of peers.
InboundMessageQueuer p1 = connectPeer(1);
// Set up a little block chain.
Block b1 = TestUtils.createFakeBlock(blockStore).block;
Block b2 = TestUtils.makeSolvedTestBlock(b1);
Block b3 = TestUtils.makeSolvedTestBlock(b2);
// Expect a zero hash getblocks on p1. This is how the process starts.
peerGroup.startBlockChainDownload(new AbstractPeerEventListener() {
});
GetBlocksMessage getblocks = (GetBlocksMessage) outbound(p1);
assertEquals(Sha256Hash.ZERO_HASH, getblocks.getStopHash());
// We give back an inv with some blocks in it.
InventoryMessage inv = new InventoryMessage(params);
inv.addBlock(b1);
inv.addBlock(b2);
inv.addBlock(b3);
inbound(p1, inv);
assertTrue(outbound(p1) instanceof GetDataMessage);
// We hand back the first block.
inbound(p1, b1);
// Now we successfully connect to another peer. There should be no messages sent.
InboundMessageQueuer p2 = connectPeer(2);
Message message = (Message)outbound(p2);
assertNull(message == null ? "" : message.toString(), message);
peerGroup.stop();
}
@Test
public void transactionConfidence() throws Exception {
// Checks that we correctly count how many peers broadcast a transaction, so we can establish some measure of
// its trustworthyness assuming an untampered with internet connection.
peerGroup.startAndWait();
final Transaction[] event = new Transaction[2];
peerGroup.addEventListener(new AbstractPeerEventListener() {
@Override
public void onTransaction(Peer peer, Transaction t) {
event[0] = t;
}
}, Threading.SAME_THREAD);
InboundMessageQueuer p1 = connectPeer(1);
InboundMessageQueuer p2 = connectPeer(2);
InboundMessageQueuer p3 = connectPeer(3);
Transaction tx = TestUtils.createFakeTx(params, Utils.toNanoCoins(20, 0), address);
InventoryMessage inv = new InventoryMessage(params);
inv.addTransaction(tx);
// Peer 2 advertises the tx but does not receive it yet.
inbound(p2, inv);
assertTrue(outbound(p2) instanceof GetDataMessage);
assertEquals(0, tx.getConfidence().numBroadcastPeers());
assertTrue(peerGroup.getMemoryPool().maybeWasSeen(tx.getHash()));
assertNull(event[0]);
// Peer 1 advertises the tx, we don't do anything as it's already been requested.
inbound(p1, inv);
assertNull(outbound(p1));
// Peer 2 gets sent the tx and requests the dependency.
inbound(p2, tx);
assertTrue(outbound(p2) instanceof GetDataMessage);
tx = event[0]; // We want to use the canonical copy delivered by the PeerGroup from now on.
assertNotNull(tx);
event[0] = null;
// Peer 1 (the download peer) advertises the tx, we download it.
inbound(p1, inv); // returns getdata
inbound(p1, tx); // returns nothing after a queue drain.
// Two peers saw this tx hash.
assertEquals(2, tx.getConfidence().numBroadcastPeers());
assertTrue(tx.getConfidence().wasBroadcastBy(peerOf(p1).getAddress()));
assertTrue(tx.getConfidence().wasBroadcastBy(peerOf(p2).getAddress()));
tx.getConfidence().addEventListener(new TransactionConfidence.Listener() {
public void onConfidenceChanged(Transaction tx, TransactionConfidence.Listener.ChangeReason reason) {
event[1] = tx;
}
});
// A straggler reports in.
inbound(p3, inv);
pingAndWait(p3);
Threading.waitForUserCode();
assertEquals(tx, event[1]);
assertEquals(3, tx.getConfidence().numBroadcastPeers());
assertTrue(tx.getConfidence().wasBroadcastBy(peerOf(p3).getAddress()));
}
@Test
public void testWalletCatchupTime() throws Exception {
// Check the fast catchup time was initialized to something around the current runtime minus a week.
// The wallet was already added to the peer in setup.
final int WEEK = 86400 * 3;
final long now = Utils.currentTimeMillis() / 1000;
peerGroup.startAndWait();
assertTrue(peerGroup.getFastCatchupTimeSecs() > now - WEEK - 10000);
Wallet w2 = new Wallet(params);
ECKey key1 = new ECKey();
key1.setCreationTimeSeconds(now - 86400); // One day ago.
w2.addKey(key1);
peerGroup.addWallet(w2);
peerGroup.waitForJobQueue();
assertEquals(peerGroup.getFastCatchupTimeSecs(), now - 86400 - WEEK);
// Adding a key to the wallet should update the fast catchup time, but asynchronously and in the background
// due to the need to avoid complicated lock inversions.
ECKey key2 = new ECKey();
key2.setCreationTimeSeconds(now - 100000);
w2.addKey(key2);
peerGroup.waitForJobQueue();
assertEquals(peerGroup.getFastCatchupTimeSecs(), now - WEEK - 100000);
}
@Test
public void noPings() throws Exception {
peerGroup.startAndWait();
peerGroup.setPingIntervalMsec(0);
VersionMessage versionMessage = new VersionMessage(params, 2);
versionMessage.clientVersion = FilteredBlock.MIN_PROTOCOL_VERSION;
versionMessage.localServices = VersionMessage.NODE_NETWORK;
connectPeer(1, versionMessage);
peerGroup.waitForPeers(1).get();
assertFalse(peerGroup.getConnectedPeers().get(0).getLastPingTime() < Long.MAX_VALUE);
}
@Test
public void pings() throws Exception {
peerGroup.startAndWait();
peerGroup.setPingIntervalMsec(100);
VersionMessage versionMessage = new VersionMessage(params, 2);
versionMessage.clientVersion = FilteredBlock.MIN_PROTOCOL_VERSION;
versionMessage.localServices = VersionMessage.NODE_NETWORK;
InboundMessageQueuer p1 = connectPeer(1, versionMessage);
Ping ping = (Ping) outbound(p1);
inbound(p1, new Pong(ping.getNonce()));
pingAndWait(p1);
assertTrue(peerGroup.getConnectedPeers().get(0).getLastPingTime() < Long.MAX_VALUE);
// The call to outbound should block until a ping arrives.
ping = (Ping) waitForOutbound(p1);
inbound(p1, new Pong(ping.getNonce()));
assertTrue(peerGroup.getConnectedPeers().get(0).getLastPingTime() < Long.MAX_VALUE);
}
@Test
public void downloadPeerSelection() throws Exception {
peerGroup.startAndWait();
VersionMessage versionMessage2 = new VersionMessage(params, 2);
versionMessage2.clientVersion = FilteredBlock.MIN_PROTOCOL_VERSION;
versionMessage2.localServices = VersionMessage.NODE_NETWORK;
VersionMessage versionMessage3 = new VersionMessage(params, 3);
versionMessage3.clientVersion = FilteredBlock.MIN_PROTOCOL_VERSION;
versionMessage3.localServices = VersionMessage.NODE_NETWORK;
assertNull(peerGroup.getDownloadPeer());
Peer a = connectPeer(1, versionMessage2).peer;
assertEquals(2, peerGroup.getMostCommonChainHeight());
assertEquals(a, peerGroup.getDownloadPeer());
connectPeer(2, versionMessage2);
assertEquals(2, peerGroup.getMostCommonChainHeight());
assertEquals(a, peerGroup.getDownloadPeer()); // No change.
Peer c = connectPeer(3, versionMessage3).peer;
assertEquals(2, peerGroup.getMostCommonChainHeight());
assertEquals(a, peerGroup.getDownloadPeer()); // No change yet.
connectPeer(4, versionMessage3);
assertEquals(3, peerGroup.getMostCommonChainHeight());
assertEquals(c, peerGroup.getDownloadPeer()); // Switch to first peer advertising new height.
// New peer with a higher protocol version but same chain height.
//TODO: When PeerGroup.selectDownloadPeer.PREFERRED_VERSION is not equal to vMinRequiredProtocolVersion,
// reenable this test
/*VersionMessage versionMessage4 = new VersionMessage(params, 3);
versionMessage4.clientVersion = 100000;
versionMessage4.localServices = VersionMessage.NODE_NETWORK;
InboundMessageQueuer d = connectPeer(5, versionMessage4);
assertEquals(d.peer, peerGroup.getDownloadPeer());*/
}
@Test
public void peerTimeoutTest() throws Exception {
peerGroup.startAndWait();
peerGroup.setConnectTimeoutMillis(100);
final SettableFuture<Void> peerConnectedFuture = SettableFuture.create();
final SettableFuture<Void> peerDisconnectedFuture = SettableFuture.create();
peerGroup.addEventListener(new AbstractPeerEventListener() {
@Override public void onPeerConnected(Peer peer, int peerCount) {
peerConnectedFuture.set(null);
}
@Override public void onPeerDisconnected(Peer peer, int peerCount) {
peerDisconnectedFuture.set(null);
}
}, Threading.SAME_THREAD);
connectPeerWithoutVersionExchange(0);
Thread.sleep(50);
assertFalse(peerConnectedFuture.isDone() || peerDisconnectedFuture.isDone());
Thread.sleep(60);
assertTrue(!peerConnectedFuture.isDone());
assertTrue(!peerConnectedFuture.isDone() && peerDisconnectedFuture.isDone());
}
@Test
public void peerPriority() throws Exception {
final List<InetSocketAddress> addresses = Lists.newArrayList(
new InetSocketAddress("localhost", 2000),
new InetSocketAddress("localhost", 2001),
new InetSocketAddress("localhost", 2002)
);
peerGroup.addEventListener(listener);
peerGroup.addPeerDiscovery(new PeerDiscovery() {
public InetSocketAddress[] getPeers(long unused, TimeUnit unused2) throws PeerDiscoveryException {
return addresses.toArray(new InetSocketAddress[addresses.size()]);
}
public void shutdown() {
}
});
peerGroup.setMaxConnections(3);
Utils.setMockSleep(true);
peerGroup.startAndWait();
handleConnectToPeer(0);
handleConnectToPeer(1);
handleConnectToPeer(2);
connectedPeers.take();
connectedPeers.take();
connectedPeers.take();
addresses.clear();
addresses.addAll(Lists.newArrayList(new InetSocketAddress("localhost", 2003)));
stopPeerServer(2);
assertEquals(2002, disconnectedPeers.take().getAddress().getPort()); // peer died
// discovers, connects to new peer
handleConnectToPeer(3);
assertEquals(2003, connectedPeers.take().getAddress().getPort());
stopPeerServer(1);
assertEquals(2001, disconnectedPeers.take().getAddress().getPort()); // peer died
// Alternates trying two offline peers
Utils.passMockSleep();
assertEquals(2001, disconnectedPeers.take().getAddress().getPort());
Utils.passMockSleep();
assertEquals(2002, disconnectedPeers.take().getAddress().getPort());
Utils.passMockSleep();
assertEquals(2001, disconnectedPeers.take().getAddress().getPort());
Utils.passMockSleep();
assertEquals(2002, disconnectedPeers.take().getAddress().getPort());
Utils.passMockSleep();
assertEquals(2001, disconnectedPeers.take().getAddress().getPort());
// Peer 2 comes online
startPeerServer(2);
Utils.passMockSleep();
handleConnectToPeer(2);
assertEquals(2002, connectedPeers.take().getAddress().getPort());
stopPeerServer(2);
assertEquals(2002, disconnectedPeers.take().getAddress().getPort()); // peer died
// Peer 2 is tried twice before peer 1, since it has a lower backoff due to recent success
Utils.passMockSleep();
assertEquals(2002, disconnectedPeers.take().getAddress().getPort());
Utils.passMockSleep();
assertEquals(2002, disconnectedPeers.take().getAddress().getPort());
Utils.passMockSleep();
assertEquals(2001, disconnectedPeers.take().getAddress().getPort());
}
@Test
public void testBloomOnP2Pubkey() throws Exception {
// Cover bug 513. When a relevant transaction with a p2pubkey output is found, the Bloom filter should be
// recalculated to include that transaction hash but not re-broadcast as the remote nodes should have followed
// the same procedure. However a new node that's connected should get the fresh filter.
peerGroup.startAndWait();
final ECKey key = wallet.getKeys().get(0);
// Create a couple of peers.
InboundMessageQueuer p1 = connectPeer(1);
InboundMessageQueuer p2 = connectPeer(2);
// Create a pay to pubkey tx.
Transaction tx = TestUtils.createFakeTx(params, Utils.COIN, key);
Transaction tx2 = new Transaction(params);
tx2.addInput(tx.getOutput(0));
TransactionOutPoint outpoint = tx2.getInput(0).getOutpoint();
assertTrue(p1.lastReceivedFilter.contains(key.getPubKey()));
assertFalse(p1.lastReceivedFilter.contains(tx.getHash().getBytes()));
inbound(p1, tx);
// p1 requests dep resolution, p2 is quiet.
assertTrue(outbound(p1) instanceof GetDataMessage);
final Sha256Hash dephash = tx.getInput(0).getOutpoint().getHash();
final InventoryItem inv = new InventoryItem(InventoryItem.Type.Transaction, dephash);
inbound(p1, new NotFoundMessage(params, ImmutableList.of(inv)));
assertNull(outbound(p1));
assertNull(outbound(p2));
peerGroup.waitForJobQueue();
// Now we connect p3 and there is a new bloom filter sent, that DOES match the relevant outpoint.
InboundMessageQueuer p3 = connectPeer(3);
assertTrue(p3.lastReceivedFilter.contains(key.getPubKey()));
assertTrue(p3.lastReceivedFilter.contains(outpoint.bitcoinSerialize()));
}
@Test
public void testBloomResendOnNewKey() throws Exception {
// Check that when we add a new key to the wallet, the Bloom filter is re-calculated and re-sent.
peerGroup.startAndWait();
// Create a couple of peers.
InboundMessageQueuer p1 = connectPeer(1);
InboundMessageQueuer p2 = connectPeer(2);
BloomFilter f1 = p1.lastReceivedFilter;
BloomFilter f2 = p2.lastReceivedFilter;
final ECKey key = new ECKey();
wallet.addKey(key);
peerGroup.waitForJobQueue();
BloomFilter f3 = (BloomFilter) outbound(p1);
BloomFilter f4 = (BloomFilter) outbound(p2);
assertTrue(outbound(p1) instanceof MemoryPoolMessage);
assertTrue(outbound(p2) instanceof MemoryPoolMessage);
assertNotEquals(f1, f3);
assertNotEquals(f2, f4);
assertEquals(f3, f4);
assertTrue(f3.contains(key.getPubKey()));
assertTrue(f3.contains(key.getPubKeyHash()));
assertFalse(f1.contains(key.getPubKey()));
assertFalse(f1.contains(key.getPubKeyHash()));
}
}
| |
/*
* Copyright (C) 2018 The Dagger Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package dagger.internal.codegen;
import static com.google.testing.compile.CompilationSubject.assertThat;
import static dagger.internal.codegen.Compilers.daggerCompiler;
import static dagger.internal.codegen.TestUtils.message;
import com.google.testing.compile.Compilation;
import com.google.testing.compile.JavaFileObjects;
import javax.tools.JavaFileObject;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.JUnit4;
@RunWith(JUnit4.class)
public final class ConflictingEntryPointsTest {
@Test
public void covariantType() {
JavaFileObject base1 =
JavaFileObjects.forSourceLines(
"test.Base1", //
"package test;",
"",
"interface Base1 {",
" Long foo();",
"}");
JavaFileObject base2 =
JavaFileObjects.forSourceLines(
"test.Base2", //
"package test;",
"",
"interface Base2 {",
" Number foo();",
"}");
JavaFileObject component =
JavaFileObjects.forSourceLines(
"test.TestComponent",
"package test;",
"",
"import dagger.BindsInstance;",
"import dagger.Component;",
"",
"@Component",
"interface TestComponent extends Base1, Base2 {",
"",
" @Component.Builder",
" interface Builder {",
" @BindsInstance Builder foo(Long foo);",
" @BindsInstance Builder foo(Number foo);",
" TestComponent build();",
" }",
"}");
Compilation compilation = daggerCompiler().compile(base1, base2, component);
assertThat(compilation).failed();
assertThat(compilation)
.hadErrorContaining(
message(
"conflicting entry point declarations:",
" Long test.Base1.foo()",
" Number test.Base2.foo()"))
.inFile(component)
.onLineContaining("interface TestComponent ");
}
@Test
public void covariantTypeFromGenericSupertypes() {
JavaFileObject base1 =
JavaFileObjects.forSourceLines(
"test.Base1", //
"package test;",
"",
"interface Base1<T> {",
" T foo();",
"}");
JavaFileObject base2 =
JavaFileObjects.forSourceLines(
"test.Base2", //
"package test;",
"",
"interface Base2<T> {",
" T foo();",
"}");
JavaFileObject component =
JavaFileObjects.forSourceLines(
"test.TestComponent",
"package test;",
"",
"import dagger.BindsInstance;",
"import dagger.Component;",
"",
"@Component",
"interface TestComponent extends Base1<Long>, Base2<Number> {",
"",
" @Component.Builder",
" interface Builder {",
" @BindsInstance Builder foo(Long foo);",
" @BindsInstance Builder foo(Number foo);",
" TestComponent build();",
" }",
"}");
Compilation compilation = daggerCompiler().compile(base1, base2, component);
assertThat(compilation).failed();
assertThat(compilation)
.hadErrorContaining(
message(
"conflicting entry point declarations:",
" Long test.Base1.foo()",
" Number test.Base2.foo()"))
.inFile(component)
.onLineContaining("interface TestComponent ");
}
@Test
public void differentQualifier() {
JavaFileObject base1 =
JavaFileObjects.forSourceLines(
"test.Base1", //
"package test;",
"",
"interface Base1 {",
" Object foo();",
"}");
JavaFileObject base2 =
JavaFileObjects.forSourceLines(
"test.Base2", //
"package test;",
"",
"import javax.inject.Named;",
"",
"interface Base2 {",
" @Named(\"foo\") Object foo();",
"}");
JavaFileObject component =
JavaFileObjects.forSourceLines(
"test.TestComponent",
"package test;",
"",
"import dagger.BindsInstance;",
"import dagger.Component;",
"import javax.inject.Named;",
"",
"@Component",
"interface TestComponent extends Base1, Base2 {",
"",
" @Component.Builder",
" interface Builder {",
" @BindsInstance Builder foo(Object foo);",
" @BindsInstance Builder namedFoo(@Named(\"foo\") Object foo);",
" TestComponent build();",
" }",
"}");
Compilation compilation = daggerCompiler().compile(base1, base2, component);
assertThat(compilation).failed();
assertThat(compilation)
.hadErrorContaining(
message(
"conflicting entry point declarations:",
" Object test.Base1.foo()",
" @Named(\"foo\") Object test.Base2.foo()"))
.inFile(component)
.onLineContaining("interface TestComponent ");
}
@Test
public void sameKey() {
JavaFileObject base1 =
JavaFileObjects.forSourceLines(
"test.Base1", //
"package test;",
"",
"interface Base1 {",
" Object foo();",
"}");
JavaFileObject base2 =
JavaFileObjects.forSourceLines(
"test.Base2", //
"package test;",
"",
"interface Base2 {",
" Object foo();",
"}");
JavaFileObject component =
JavaFileObjects.forSourceLines(
"test.TestComponent",
"package test;",
"",
"import dagger.BindsInstance;",
"import dagger.Component;",
"",
"@Component",
"interface TestComponent extends Base1, Base2 {",
"",
" @Component.Builder",
" interface Builder {",
" @BindsInstance Builder foo(Object foo);",
" TestComponent build();",
" }",
"}");
Compilation compilation = daggerCompiler().compile(base1, base2, component);
assertThat(compilation).succeeded();
}
@Test
public void sameQualifiedKey() {
JavaFileObject base1 =
JavaFileObjects.forSourceLines(
"test.Base1", //
"package test;",
"",
"import javax.inject.Named;",
"",
"interface Base1 {",
" @Named(\"foo\") Object foo();",
"}");
JavaFileObject base2 =
JavaFileObjects.forSourceLines(
"test.Base2", //
"package test;",
"",
"import javax.inject.Named;",
"",
"interface Base2 {",
" @Named(\"foo\") Object foo();",
"}");
JavaFileObject component =
JavaFileObjects.forSourceLines(
"test.TestComponent",
"package test;",
"",
"import dagger.BindsInstance;",
"import dagger.Component;",
"import javax.inject.Named;",
"",
"@Component",
"interface TestComponent extends Base1, Base2 {",
"",
" @Component.Builder",
" interface Builder {",
" @BindsInstance Builder foo(@Named(\"foo\") Object foo);",
" TestComponent build();",
" }",
"}");
Compilation compilation = daggerCompiler().compile(base1, base2, component);
assertThat(compilation).succeeded();
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.builder.component.dsl;
import javax.annotation.Generated;
import org.apache.camel.Component;
import org.apache.camel.builder.component.AbstractComponentBuilder;
import org.apache.camel.builder.component.ComponentBuilder;
import org.apache.camel.component.aws2.ecs.ECS2Component;
/**
* Manage AWS ECS cluster instances using AWS SDK version 2.x.
*
* Generated by camel-package-maven-plugin - do not edit this file!
*/
@Generated("org.apache.camel.maven.packaging.ComponentDslMojo")
public interface Aws2EcsComponentBuilderFactory {
/**
* AWS 2 Elastic Container Service (ECS) (camel-aws2-ecs)
* Manage AWS ECS cluster instances using AWS SDK version 2.x.
*
* Category: cloud,management
* Since: 3.1
* Maven coordinates: org.apache.camel:camel-aws2-ecs
*
* @return the dsl builder
*/
static Aws2EcsComponentBuilder aws2Ecs() {
return new Aws2EcsComponentBuilderImpl();
}
/**
* Builder for the AWS 2 Elastic Container Service (ECS) component.
*/
interface Aws2EcsComponentBuilder extends ComponentBuilder<ECS2Component> {
/**
* Component configuration.
*
* The option is a:
* <code>org.apache.camel.component.aws2.ecs.ECS2Configuration</code> type.
*
* Group: producer
*
* @param configuration the value to set
* @return the dsl builder
*/
default Aws2EcsComponentBuilder configuration(
org.apache.camel.component.aws2.ecs.ECS2Configuration configuration) {
doSetProperty("configuration", configuration);
return this;
}
/**
* To use a existing configured AWS ECS as client.
*
* The option is a:
* <code>software.amazon.awssdk.services.ecs.EcsClient</code> type.
*
* Group: producer
*
* @param ecsClient the value to set
* @return the dsl builder
*/
default Aws2EcsComponentBuilder ecsClient(
software.amazon.awssdk.services.ecs.EcsClient ecsClient) {
doSetProperty("ecsClient", ecsClient);
return this;
}
/**
* Whether the producer should be started lazy (on the first message).
* By starting lazy you can use this to allow CamelContext and routes to
* startup in situations where a producer may otherwise fail during
* starting and cause the route to fail being started. By deferring this
* startup to be lazy then the startup failure can be handled during
* routing messages via Camel's routing error handlers. Beware that when
* the first message is processed then creating and starting the
* producer may take a little time and prolong the total processing time
* of the processing.
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: producer
*
* @param lazyStartProducer the value to set
* @return the dsl builder
*/
default Aws2EcsComponentBuilder lazyStartProducer(
boolean lazyStartProducer) {
doSetProperty("lazyStartProducer", lazyStartProducer);
return this;
}
/**
* The operation to perform.
*
* The option is a:
* <code>org.apache.camel.component.aws2.ecs.ECS2Operations</code> type.
*
* Group: producer
*
* @param operation the value to set
* @return the dsl builder
*/
default Aws2EcsComponentBuilder operation(
org.apache.camel.component.aws2.ecs.ECS2Operations operation) {
doSetProperty("operation", operation);
return this;
}
/**
* If we want to use a POJO request as body or not.
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: producer
*
* @param pojoRequest the value to set
* @return the dsl builder
*/
default Aws2EcsComponentBuilder pojoRequest(boolean pojoRequest) {
doSetProperty("pojoRequest", pojoRequest);
return this;
}
/**
* To define a proxy host when instantiating the ECS client.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: producer
*
* @param proxyHost the value to set
* @return the dsl builder
*/
default Aws2EcsComponentBuilder proxyHost(java.lang.String proxyHost) {
doSetProperty("proxyHost", proxyHost);
return this;
}
/**
* To define a proxy port when instantiating the ECS client.
*
* The option is a: <code>java.lang.Integer</code> type.
*
* Group: producer
*
* @param proxyPort the value to set
* @return the dsl builder
*/
default Aws2EcsComponentBuilder proxyPort(java.lang.Integer proxyPort) {
doSetProperty("proxyPort", proxyPort);
return this;
}
/**
* To define a proxy protocol when instantiating the ECS client.
*
* The option is a:
* <code>software.amazon.awssdk.core.Protocol</code> type.
*
* Default: HTTPS
* Group: producer
*
* @param proxyProtocol the value to set
* @return the dsl builder
*/
default Aws2EcsComponentBuilder proxyProtocol(
software.amazon.awssdk.core.Protocol proxyProtocol) {
doSetProperty("proxyProtocol", proxyProtocol);
return this;
}
/**
* The region in which ECS client needs to work. When using this
* parameter, the configuration will expect the lowercase name of the
* region (for example ap-east-1) You'll need to use the name
* Region.EU_WEST_1.id().
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: producer
*
* @param region the value to set
* @return the dsl builder
*/
default Aws2EcsComponentBuilder region(java.lang.String region) {
doSetProperty("region", region);
return this;
}
/**
* If we want to trust all certificates in case of overriding the
* endpoint.
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: producer
*
* @param trustAllCertificates the value to set
* @return the dsl builder
*/
default Aws2EcsComponentBuilder trustAllCertificates(
boolean trustAllCertificates) {
doSetProperty("trustAllCertificates", trustAllCertificates);
return this;
}
/**
* Whether autowiring is enabled. This is used for automatic autowiring
* options (the option must be marked as autowired) by looking up in the
* registry to find if there is a single instance of matching type,
* which then gets configured on the component. This can be used for
* automatic configuring JDBC data sources, JMS connection factories,
* AWS Clients, etc.
*
* The option is a: <code>boolean</code> type.
*
* Default: true
* Group: advanced
*
* @param autowiredEnabled the value to set
* @return the dsl builder
*/
default Aws2EcsComponentBuilder autowiredEnabled(
boolean autowiredEnabled) {
doSetProperty("autowiredEnabled", autowiredEnabled);
return this;
}
/**
* Amazon AWS Access Key.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: security
*
* @param accessKey the value to set
* @return the dsl builder
*/
default Aws2EcsComponentBuilder accessKey(java.lang.String accessKey) {
doSetProperty("accessKey", accessKey);
return this;
}
/**
* Amazon AWS Secret Key.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: security
*
* @param secretKey the value to set
* @return the dsl builder
*/
default Aws2EcsComponentBuilder secretKey(java.lang.String secretKey) {
doSetProperty("secretKey", secretKey);
return this;
}
}
class Aws2EcsComponentBuilderImpl
extends
AbstractComponentBuilder<ECS2Component>
implements
Aws2EcsComponentBuilder {
@Override
protected ECS2Component buildConcreteComponent() {
return new ECS2Component();
}
private org.apache.camel.component.aws2.ecs.ECS2Configuration getOrCreateConfiguration(
org.apache.camel.component.aws2.ecs.ECS2Component component) {
if (component.getConfiguration() == null) {
component.setConfiguration(new org.apache.camel.component.aws2.ecs.ECS2Configuration());
}
return component.getConfiguration();
}
@Override
protected boolean setPropertyOnComponent(
Component component,
String name,
Object value) {
switch (name) {
case "configuration": ((ECS2Component) component).setConfiguration((org.apache.camel.component.aws2.ecs.ECS2Configuration) value); return true;
case "ecsClient": getOrCreateConfiguration((ECS2Component) component).setEcsClient((software.amazon.awssdk.services.ecs.EcsClient) value); return true;
case "lazyStartProducer": ((ECS2Component) component).setLazyStartProducer((boolean) value); return true;
case "operation": getOrCreateConfiguration((ECS2Component) component).setOperation((org.apache.camel.component.aws2.ecs.ECS2Operations) value); return true;
case "pojoRequest": getOrCreateConfiguration((ECS2Component) component).setPojoRequest((boolean) value); return true;
case "proxyHost": getOrCreateConfiguration((ECS2Component) component).setProxyHost((java.lang.String) value); return true;
case "proxyPort": getOrCreateConfiguration((ECS2Component) component).setProxyPort((java.lang.Integer) value); return true;
case "proxyProtocol": getOrCreateConfiguration((ECS2Component) component).setProxyProtocol((software.amazon.awssdk.core.Protocol) value); return true;
case "region": getOrCreateConfiguration((ECS2Component) component).setRegion((java.lang.String) value); return true;
case "trustAllCertificates": getOrCreateConfiguration((ECS2Component) component).setTrustAllCertificates((boolean) value); return true;
case "autowiredEnabled": ((ECS2Component) component).setAutowiredEnabled((boolean) value); return true;
case "accessKey": getOrCreateConfiguration((ECS2Component) component).setAccessKey((java.lang.String) value); return true;
case "secretKey": getOrCreateConfiguration((ECS2Component) component).setSecretKey((java.lang.String) value); return true;
default: return false;
}
}
}
}
| |
package org.yeastrc.ms.dao.run;
import java.util.ArrayList;
import java.util.List;
import org.yeastrc.ms.dao.BaseDAOTestCase;
import org.yeastrc.ms.domain.general.MsEnzyme;
import org.yeastrc.ms.domain.general.MsEnzymeIn;
import org.yeastrc.ms.domain.run.MsRun;
import org.yeastrc.ms.domain.run.MsRunIn;
import org.yeastrc.ms.domain.run.MsRunLocation;
import org.yeastrc.ms.domain.run.RunFileFormat;
public class MsRunDAOImplTest extends BaseDAOTestCase {
protected void setUp() throws Exception {
super.setUp();
resetDatabase();
addEnzymes();
}
protected void tearDown() throws Exception {
super.tearDown();
}
public void testSaveLoadAndDelete() {
MsRunIn run = createDefaultRun();
int runId = runDao.saveRun(run, "remoteDirectory");
MsRun runDb = runDao.loadRun(runId);
checkRun(run, runDb);
runDao.delete(runId);
assertNull(runDao.loadRun(runId));
}
public void testSaveAndLoadRunFileFormats() {
MsRunIn run = createRunForFormat(RunFileFormat.MS2);
int id1 = runDao.saveRun(run, "remoteDirectory");
MsRun runDb = runDao.loadRun(id1);
assertEquals(RunFileFormat.MS2, runDb.getRunFileFormat());
run = createRunForFormat(RunFileFormat.UNKNOWN);
int id2 = runDao.saveRun(run, "remoteDirectory");
runDb = runDao.loadRun(id2);
assertEquals(RunFileFormat.UNKNOWN, runDb.getRunFileFormat());
run = createRunForFormat(null);
int id3 = runDao.saveRun(run, "remoteDirectory");
runDb = runDao.loadRun(id3);
assertEquals(RunFileFormat.UNKNOWN, runDb.getRunFileFormat());
runDao.delete(id1);
runDao.delete(id2);
runDao.delete(id3);
assertNull(runDao.loadRun(id1));
assertNull(runDao.loadRun(id2));
assertNull(runDao.loadRun(id3));
}
public void testLoadRunsForFileNameAndSha1Sum() {
MsRunIn run = createDefaultRun();
int id1 = runDao.saveRun(run, "remoteDirectory");
run = createDefaultRun();
int id2 = runDao.saveRun(run, "remoteDirectory");
// List<Integer> runs = runDao.loadRunIdForFileNameAndSha1Sum(run.getFileName(), run.getSha1Sum());
// assertEquals(2, runs.size());
try {
runDao.loadRunIdForFileNameAndSha1Sum(run.getFileName(), run.getSha1Sum());
fail("Multiple entries with same filename and sha1sum -- should not happen in the real application");
}
catch(Exception e) {
assertEquals(e.getMessage(), "Failed to execute select statement: MsRun.selectRunIdsForFileNameAndSha1Sum");
}
runDao.delete(id1);
runDao.delete(id2);
assertNull(runDao.loadRun(id1));
assertNull(runDao.loadRun(id2));
}
public void testSaveAndLoadRunWithNoEnzymes() {
// create a run and save it
int runId = runDao.saveRun(createDefaultRun(), "remoteDirectory");
// read back the run
MsRun dbRun = runDao.loadRun(runId);
assertEquals(0, dbRun.getEnzymeList().size());
runDao.delete(runId);
assertNull(runDao.loadRun(runId));
}
public void testSaveAndLoadRunWithEnzymeInfo() {
// load some enzymes from the database
MsEnzyme enzyme1 = enzymeDao.loadEnzyme(1);
MsEnzyme enzyme2 = enzymeDao.loadEnzyme(2);
MsEnzyme enzyme3 = enzymeDao.loadEnzyme(3);
assertNotNull(enzyme1);
assertNotNull(enzyme2);
assertNotNull(enzyme3);
// create a run with enzyme information
List <MsEnzymeIn> enzymeList1 = new ArrayList<MsEnzymeIn>(2);
enzymeList1.add(enzyme1);
enzymeList1.add(enzyme2);
MsRunIn run1 = createRunWEnzymeInfo(enzymeList1);
// save the run
int runId_1 = runDao.saveRun(run1, "remoteDirectory");
// now read back the run and make sure it has the enzyme information
MsRun runFromDb_1 = runDao.loadRun(runId_1);
List<MsEnzyme> enzymes = runFromDb_1.getEnzymeList();
assertNotNull(enzymes);
assertEquals(2, enzymes.size());
// save another run for this experiment
List <MsEnzymeIn> enzymeList2 = new ArrayList<MsEnzymeIn>(1);
enzymeList2.add(enzyme3);
MsRunIn run2 = createRunWEnzymeInfo(enzymeList2);
// save the run
int runId_2 = runDao.saveRun(run2, "remoteDirectory");
// now read back the run and make sure it has the enzyme information
MsRun runFromDb_2 = runDao.loadRun(runId_2);
enzymes = runFromDb_2.getEnzymeList();
assertNotNull(enzymes);
assertEquals(1, enzymes.size());
checkEnzyme(enzyme3, enzymes.get(0));
runDao.delete(runId_1);
runDao.delete(runId_2);
assertNull(runDao.loadRun(runId_1));
assertNull(runDao.loadRun(runId_2));
}
public void testSaveAndDeleteRunsWithEnzymeInfoAndScans() {
// load some enzymes from the database
MsEnzyme enzyme1 = enzymeDao.loadEnzyme(1);
MsEnzyme enzyme2 = enzymeDao.loadEnzyme(2);
MsEnzyme enzyme3 = enzymeDao.loadEnzyme(3);
assertNotNull(enzyme1);
assertNotNull(enzyme2);
assertNotNull(enzyme3);
// create a run with enzyme information and save it
List <MsEnzymeIn> enzymeList1 = new ArrayList<MsEnzymeIn>(2);
enzymeList1.add(enzyme1);
enzymeList1.add(enzyme2);
MsRunIn run1 = createRunWEnzymeInfo(enzymeList1);
int runId_1 = runDao.saveRun(run1, "remoteDirectory");
// now read back the run and make sure it has the enzyme information
MsRun runFromDb_1 = runDao.loadRun(runId_1);
List<MsEnzyme> enzymes = runFromDb_1.getEnzymeList();
assertNotNull(enzymes);
assertEquals(2, enzymes.size());
// save another run
List <MsEnzymeIn> enzymeList2 = new ArrayList<MsEnzymeIn>(1);
enzymeList2.add(enzyme3);
MsRunIn run2 = createRunWEnzymeInfo(enzymeList2);
int runId_2 = runDao.saveRun(run2, "remoteDirectory");
// now read back the run and make sure it has the enzyme information
MsRun runFromDb_2 = runDao.loadRun(runId_2);
enzymes = runFromDb_1.getEnzymeList();
assertNotNull(enzymes);
assertEquals(2, enzymes.size());
// save some scans for the runs
saveScansForRun(runId_1, 10);
saveScansForRun(runId_2, 5);
// make sure the run and associated enzyme information got saved (RUN 1)
assertEquals(2, enzymeDao.loadEnzymesForRun(runId_1).size());
assertEquals(10, scanDao.loadScanIdsForRun(runId_1).size());
// make sure the run and associated enzyme information got saved (RUN 2)
assertEquals(1, enzymeDao.loadEnzymesForRun(runId_2).size());
assertEquals(5, scanDao.loadScanIdsForRun(runId_2).size());
// now delete the first run
runDao.delete(runId_1);
// make sure the run is deleted ...
assertNull(runDao.loadRun(runId_1));
// ... and the associated enzyme information is deleted ...
assertEquals(0, enzymeDao.loadEnzymesForRun(runId_1).size());
// ... and all scans for the run are deleted.
assertEquals(0, scanDao.loadScanIdsForRun(runId_1).size());
// make sure nothing was delete for Run 2
assertEquals(1, enzymeDao.loadEnzymesForRun(runId_2).size());
assertEquals(5, scanDao.loadScanIdsForRun(runId_2).size());
// now delete the second run
runDao.delete(runId_2);
// make sure the run is deleted ...
assertNull(runDao.loadRun(runId_2));
// ... and the associated enzyme information is deleted ...
assertEquals(0, enzymeDao.loadEnzymesForRun(runId_2).size());
// ... and all scans for the run are deleted.
assertEquals(0, scanDao.loadScanIdsForRun(runId_2).size());
}
public void testRunLocation() {
MsRunIn run1 = createDefaultRun();
String server = "my.host";
String remoteDir = "/my/server/directory";
int runId = runDao.saveRun(run1, remoteDir);
List<MsRunLocation> locDbList = runDao.loadLocationsForRun(runId);
assertEquals(1, locDbList.size());
MsRunLocation locDb = locDbList.get(0);
assertEquals(remoteDir, locDb.getServerDirectory());
assertEquals(runId, locDb.getRunId());
int matchingLocs = runDao.loadMatchingRunLocations(runId, remoteDir);
assertEquals(1, matchingLocs);
// save another location for the run
runDao.saveRunLocation("/my/server/directory/2", runId);
locDbList = runDao.loadLocationsForRun(runId);
assertEquals(2, locDbList.size());
assertEquals(locDbList.get(0).getRunId(), locDbList.get(1).getRunId());
assertNotSame(locDbList.get(0).getServerDirectory(), locDbList.get(1).getServerDirectory());
matchingLocs = runDao.loadMatchingRunLocations(runId, "/my/server/directory/2");
assertEquals(1, matchingLocs);
// try to find a matching location that does not exist
assertEquals(0, runDao.loadMatchingRunLocations(runId, "directory"));
runDao.delete(runId);
assertEquals(0, runDao.loadLocationsForRun(runId).size());
assertNull(runDao.loadRun(runId));
}
public static class MsRunTest implements MsRunIn {
private String sha1Sum;
private RunFileFormat runFileFormat;
private String instrumentVendor;
private String instrumentSN;
private String instrumentModel;
private String fileName;
private List<MsEnzymeIn> enzymeList = new ArrayList<MsEnzymeIn>();
private String dataType;
private String creationDate;
private String conversionSWVersion;
private String conversionSWOptions;
private String conversionSW;
private String comment;
private String aquisitionMethod;
public void setSha1Sum(String sha1Sum) {
this.sha1Sum = sha1Sum;
}
public void setRunFileFormat(RunFileFormat runFileFormat) {
this.runFileFormat = runFileFormat;
}
public void setInstrumentVendor(String instrumentVendor) {
this.instrumentVendor = instrumentVendor;
}
public void setInstrumentSN(String instrumentSN) {
this.instrumentSN = instrumentSN;
}
public void setInstrumentModel(String instrumentModel) {
this.instrumentModel = instrumentModel;
}
public void setFileName(String fileName) {
this.fileName = fileName;
}
public void setEnzymeList(List<MsEnzymeIn> enzymeList) {
this.enzymeList = enzymeList;
}
public void setDataType(String dataType) {
this.dataType = dataType;
}
public void setCreationDate(String creationDate) {
this.creationDate = creationDate;
}
public void setConversionSWVersion(String conversionSWVersion) {
this.conversionSWVersion = conversionSWVersion;
}
public void setConversionSWOptions(String conversionSWOptions) {
this.conversionSWOptions = conversionSWOptions;
}
public void setConversionSW(String conversionSW) {
this.conversionSW = conversionSW;
}
public void setComment(String comment) {
this.comment = comment;
}
public void setAcquisitionMethod(String squisitionMethod) {
this.aquisitionMethod = squisitionMethod;
}
public String getAcquisitionMethod() {
return this.aquisitionMethod;
}
public String getComment() {
return this.comment;
}
public String getConversionSW() {
return this.conversionSW;
}
public String getConversionSWOptions() {
return this.conversionSWOptions;
}
public String getConversionSWVersion() {
return this.conversionSWVersion;
}
public String getCreationDate() {
return this.creationDate;
}
public String getDataType() {
return this.dataType;
}
public List<MsEnzymeIn> getEnzymeList() {
return this.enzymeList;
}
public String getFileName() {
return this.fileName;
}
public String getInstrumentModel() {
return this.instrumentModel;
}
public String getInstrumentSN() {
return this.instrumentSN;
}
public String getInstrumentVendor() {
return this.instrumentVendor;
}
public RunFileFormat getRunFileFormat() {
return this.runFileFormat;
}
public String getSha1Sum() {
return this.sha1Sum;
}
}
}
| |
/*
* Copyright LWJGL. All rights reserved.
* License terms: https://www.lwjgl.org/license
* MACHINE GENERATED FILE, DO NOT EDIT
*/
package org.lwjgl.vulkan;
import javax.annotation.*;
import java.nio.*;
import org.lwjgl.*;
import org.lwjgl.system.*;
import static org.lwjgl.system.Checks.*;
import static org.lwjgl.system.MemoryUtil.*;
import static org.lwjgl.system.MemoryStack.*;
/**
* Structure specifying a sparse buffer memory bind operation.
*
* <h5>Valid Usage (Implicit)</h5>
*
* <ul>
* <li>{@code buffer} <b>must</b> be a valid {@code VkBuffer} handle</li>
* <li>{@code pBinds} <b>must</b> be a valid pointer to an array of {@code bindCount} valid {@link VkSparseMemoryBind} structures</li>
* <li>{@code bindCount} <b>must</b> be greater than 0</li>
* </ul>
*
* <h5>See Also</h5>
*
* <p>{@link VkBindSparseInfo}, {@link VkSparseMemoryBind}</p>
*
* <h3>Layout</h3>
*
* <pre><code>
* struct VkSparseBufferMemoryBindInfo {
* VkBuffer {@link #buffer};
* uint32_t {@link #bindCount};
* {@link VkSparseMemoryBind VkSparseMemoryBind} const * {@link #pBinds};
* }</code></pre>
*/
public class VkSparseBufferMemoryBindInfo extends Struct implements NativeResource {
/** The struct size in bytes. */
public static final int SIZEOF;
/** The struct alignment in bytes. */
public static final int ALIGNOF;
/** The struct member offsets. */
public static final int
BUFFER,
BINDCOUNT,
PBINDS;
static {
Layout layout = __struct(
__member(8),
__member(4),
__member(POINTER_SIZE)
);
SIZEOF = layout.getSize();
ALIGNOF = layout.getAlignment();
BUFFER = layout.offsetof(0);
BINDCOUNT = layout.offsetof(1);
PBINDS = layout.offsetof(2);
}
/**
* Creates a {@code VkSparseBufferMemoryBindInfo} instance at the current position of the specified {@link ByteBuffer} container. Changes to the buffer's content will be
* visible to the struct instance and vice versa.
*
* <p>The created instance holds a strong reference to the container object.</p>
*/
public VkSparseBufferMemoryBindInfo(ByteBuffer container) {
super(memAddress(container), __checkContainer(container, SIZEOF));
}
@Override
public int sizeof() { return SIZEOF; }
/** the {@code VkBuffer} object to be bound. */
@NativeType("VkBuffer")
public long buffer() { return nbuffer(address()); }
/** the number of {@link VkSparseMemoryBind} structures in the {@code pBinds} array. */
@NativeType("uint32_t")
public int bindCount() { return nbindCount(address()); }
/** a pointer to an array of {@link VkSparseMemoryBind} structures. */
@NativeType("VkSparseMemoryBind const *")
public VkSparseMemoryBind.Buffer pBinds() { return npBinds(address()); }
/** Sets the specified value to the {@link #buffer} field. */
public VkSparseBufferMemoryBindInfo buffer(@NativeType("VkBuffer") long value) { nbuffer(address(), value); return this; }
/** Sets the address of the specified {@link VkSparseMemoryBind.Buffer} to the {@link #pBinds} field. */
public VkSparseBufferMemoryBindInfo pBinds(@NativeType("VkSparseMemoryBind const *") VkSparseMemoryBind.Buffer value) { npBinds(address(), value); return this; }
/** Initializes this struct with the specified values. */
public VkSparseBufferMemoryBindInfo set(
long buffer,
VkSparseMemoryBind.Buffer pBinds
) {
buffer(buffer);
pBinds(pBinds);
return this;
}
/**
* Copies the specified struct data to this struct.
*
* @param src the source struct
*
* @return this struct
*/
public VkSparseBufferMemoryBindInfo set(VkSparseBufferMemoryBindInfo src) {
memCopy(src.address(), address(), SIZEOF);
return this;
}
// -----------------------------------
/** Returns a new {@code VkSparseBufferMemoryBindInfo} instance allocated with {@link MemoryUtil#memAlloc memAlloc}. The instance must be explicitly freed. */
public static VkSparseBufferMemoryBindInfo malloc() {
return wrap(VkSparseBufferMemoryBindInfo.class, nmemAllocChecked(SIZEOF));
}
/** Returns a new {@code VkSparseBufferMemoryBindInfo} instance allocated with {@link MemoryUtil#memCalloc memCalloc}. The instance must be explicitly freed. */
public static VkSparseBufferMemoryBindInfo calloc() {
return wrap(VkSparseBufferMemoryBindInfo.class, nmemCallocChecked(1, SIZEOF));
}
/** Returns a new {@code VkSparseBufferMemoryBindInfo} instance allocated with {@link BufferUtils}. */
public static VkSparseBufferMemoryBindInfo create() {
ByteBuffer container = BufferUtils.createByteBuffer(SIZEOF);
return wrap(VkSparseBufferMemoryBindInfo.class, memAddress(container), container);
}
/** Returns a new {@code VkSparseBufferMemoryBindInfo} instance for the specified memory address. */
public static VkSparseBufferMemoryBindInfo create(long address) {
return wrap(VkSparseBufferMemoryBindInfo.class, address);
}
/** Like {@link #create(long) create}, but returns {@code null} if {@code address} is {@code NULL}. */
@Nullable
public static VkSparseBufferMemoryBindInfo createSafe(long address) {
return address == NULL ? null : wrap(VkSparseBufferMemoryBindInfo.class, address);
}
/**
* Returns a new {@link VkSparseBufferMemoryBindInfo.Buffer} instance allocated with {@link MemoryUtil#memAlloc memAlloc}. The instance must be explicitly freed.
*
* @param capacity the buffer capacity
*/
public static VkSparseBufferMemoryBindInfo.Buffer malloc(int capacity) {
return wrap(Buffer.class, nmemAllocChecked(__checkMalloc(capacity, SIZEOF)), capacity);
}
/**
* Returns a new {@link VkSparseBufferMemoryBindInfo.Buffer} instance allocated with {@link MemoryUtil#memCalloc memCalloc}. The instance must be explicitly freed.
*
* @param capacity the buffer capacity
*/
public static VkSparseBufferMemoryBindInfo.Buffer calloc(int capacity) {
return wrap(Buffer.class, nmemCallocChecked(capacity, SIZEOF), capacity);
}
/**
* Returns a new {@link VkSparseBufferMemoryBindInfo.Buffer} instance allocated with {@link BufferUtils}.
*
* @param capacity the buffer capacity
*/
public static VkSparseBufferMemoryBindInfo.Buffer create(int capacity) {
ByteBuffer container = __create(capacity, SIZEOF);
return wrap(Buffer.class, memAddress(container), capacity, container);
}
/**
* Create a {@link VkSparseBufferMemoryBindInfo.Buffer} instance at the specified memory.
*
* @param address the memory address
* @param capacity the buffer capacity
*/
public static VkSparseBufferMemoryBindInfo.Buffer create(long address, int capacity) {
return wrap(Buffer.class, address, capacity);
}
/** Like {@link #create(long, int) create}, but returns {@code null} if {@code address} is {@code NULL}. */
@Nullable
public static VkSparseBufferMemoryBindInfo.Buffer createSafe(long address, int capacity) {
return address == NULL ? null : wrap(Buffer.class, address, capacity);
}
// -----------------------------------
/** Deprecated for removal in 3.4.0. Use {@link #malloc(MemoryStack)} instead. */
@Deprecated public static VkSparseBufferMemoryBindInfo mallocStack() { return malloc(stackGet()); }
/** Deprecated for removal in 3.4.0. Use {@link #calloc(MemoryStack)} instead. */
@Deprecated public static VkSparseBufferMemoryBindInfo callocStack() { return calloc(stackGet()); }
/** Deprecated for removal in 3.4.0. Use {@link #malloc(MemoryStack)} instead. */
@Deprecated public static VkSparseBufferMemoryBindInfo mallocStack(MemoryStack stack) { return malloc(stack); }
/** Deprecated for removal in 3.4.0. Use {@link #calloc(MemoryStack)} instead. */
@Deprecated public static VkSparseBufferMemoryBindInfo callocStack(MemoryStack stack) { return calloc(stack); }
/** Deprecated for removal in 3.4.0. Use {@link #malloc(int, MemoryStack)} instead. */
@Deprecated public static VkSparseBufferMemoryBindInfo.Buffer mallocStack(int capacity) { return malloc(capacity, stackGet()); }
/** Deprecated for removal in 3.4.0. Use {@link #calloc(int, MemoryStack)} instead. */
@Deprecated public static VkSparseBufferMemoryBindInfo.Buffer callocStack(int capacity) { return calloc(capacity, stackGet()); }
/** Deprecated for removal in 3.4.0. Use {@link #malloc(int, MemoryStack)} instead. */
@Deprecated public static VkSparseBufferMemoryBindInfo.Buffer mallocStack(int capacity, MemoryStack stack) { return malloc(capacity, stack); }
/** Deprecated for removal in 3.4.0. Use {@link #calloc(int, MemoryStack)} instead. */
@Deprecated public static VkSparseBufferMemoryBindInfo.Buffer callocStack(int capacity, MemoryStack stack) { return calloc(capacity, stack); }
/**
* Returns a new {@code VkSparseBufferMemoryBindInfo} instance allocated on the specified {@link MemoryStack}.
*
* @param stack the stack from which to allocate
*/
public static VkSparseBufferMemoryBindInfo malloc(MemoryStack stack) {
return wrap(VkSparseBufferMemoryBindInfo.class, stack.nmalloc(ALIGNOF, SIZEOF));
}
/**
* Returns a new {@code VkSparseBufferMemoryBindInfo} instance allocated on the specified {@link MemoryStack} and initializes all its bits to zero.
*
* @param stack the stack from which to allocate
*/
public static VkSparseBufferMemoryBindInfo calloc(MemoryStack stack) {
return wrap(VkSparseBufferMemoryBindInfo.class, stack.ncalloc(ALIGNOF, 1, SIZEOF));
}
/**
* Returns a new {@link VkSparseBufferMemoryBindInfo.Buffer} instance allocated on the specified {@link MemoryStack}.
*
* @param stack the stack from which to allocate
* @param capacity the buffer capacity
*/
public static VkSparseBufferMemoryBindInfo.Buffer malloc(int capacity, MemoryStack stack) {
return wrap(Buffer.class, stack.nmalloc(ALIGNOF, capacity * SIZEOF), capacity);
}
/**
* Returns a new {@link VkSparseBufferMemoryBindInfo.Buffer} instance allocated on the specified {@link MemoryStack} and initializes all its bits to zero.
*
* @param stack the stack from which to allocate
* @param capacity the buffer capacity
*/
public static VkSparseBufferMemoryBindInfo.Buffer calloc(int capacity, MemoryStack stack) {
return wrap(Buffer.class, stack.ncalloc(ALIGNOF, capacity, SIZEOF), capacity);
}
// -----------------------------------
/** Unsafe version of {@link #buffer}. */
public static long nbuffer(long struct) { return UNSAFE.getLong(null, struct + VkSparseBufferMemoryBindInfo.BUFFER); }
/** Unsafe version of {@link #bindCount}. */
public static int nbindCount(long struct) { return UNSAFE.getInt(null, struct + VkSparseBufferMemoryBindInfo.BINDCOUNT); }
/** Unsafe version of {@link #pBinds}. */
public static VkSparseMemoryBind.Buffer npBinds(long struct) { return VkSparseMemoryBind.create(memGetAddress(struct + VkSparseBufferMemoryBindInfo.PBINDS), nbindCount(struct)); }
/** Unsafe version of {@link #buffer(long) buffer}. */
public static void nbuffer(long struct, long value) { UNSAFE.putLong(null, struct + VkSparseBufferMemoryBindInfo.BUFFER, value); }
/** Sets the specified value to the {@code bindCount} field of the specified {@code struct}. */
public static void nbindCount(long struct, int value) { UNSAFE.putInt(null, struct + VkSparseBufferMemoryBindInfo.BINDCOUNT, value); }
/** Unsafe version of {@link #pBinds(VkSparseMemoryBind.Buffer) pBinds}. */
public static void npBinds(long struct, VkSparseMemoryBind.Buffer value) { memPutAddress(struct + VkSparseBufferMemoryBindInfo.PBINDS, value.address()); nbindCount(struct, value.remaining()); }
/**
* Validates pointer members that should not be {@code NULL}.
*
* @param struct the struct to validate
*/
public static void validate(long struct) {
check(memGetAddress(struct + VkSparseBufferMemoryBindInfo.PBINDS));
}
// -----------------------------------
/** An array of {@link VkSparseBufferMemoryBindInfo} structs. */
public static class Buffer extends StructBuffer<VkSparseBufferMemoryBindInfo, Buffer> implements NativeResource {
private static final VkSparseBufferMemoryBindInfo ELEMENT_FACTORY = VkSparseBufferMemoryBindInfo.create(-1L);
/**
* Creates a new {@code VkSparseBufferMemoryBindInfo.Buffer} instance backed by the specified container.
*
* Changes to the container's content will be visible to the struct buffer instance and vice versa. The two buffers' position, limit, and mark values
* will be independent. The new buffer's position will be zero, its capacity and its limit will be the number of bytes remaining in this buffer divided
* by {@link VkSparseBufferMemoryBindInfo#SIZEOF}, and its mark will be undefined.
*
* <p>The created buffer instance holds a strong reference to the container object.</p>
*/
public Buffer(ByteBuffer container) {
super(container, container.remaining() / SIZEOF);
}
public Buffer(long address, int cap) {
super(address, null, -1, 0, cap, cap);
}
Buffer(long address, @Nullable ByteBuffer container, int mark, int pos, int lim, int cap) {
super(address, container, mark, pos, lim, cap);
}
@Override
protected Buffer self() {
return this;
}
@Override
protected VkSparseBufferMemoryBindInfo getElementFactory() {
return ELEMENT_FACTORY;
}
/** @return the value of the {@link VkSparseBufferMemoryBindInfo#buffer} field. */
@NativeType("VkBuffer")
public long buffer() { return VkSparseBufferMemoryBindInfo.nbuffer(address()); }
/** @return the value of the {@link VkSparseBufferMemoryBindInfo#bindCount} field. */
@NativeType("uint32_t")
public int bindCount() { return VkSparseBufferMemoryBindInfo.nbindCount(address()); }
/** @return a {@link VkSparseMemoryBind.Buffer} view of the struct array pointed to by the {@link VkSparseBufferMemoryBindInfo#pBinds} field. */
@NativeType("VkSparseMemoryBind const *")
public VkSparseMemoryBind.Buffer pBinds() { return VkSparseBufferMemoryBindInfo.npBinds(address()); }
/** Sets the specified value to the {@link VkSparseBufferMemoryBindInfo#buffer} field. */
public VkSparseBufferMemoryBindInfo.Buffer buffer(@NativeType("VkBuffer") long value) { VkSparseBufferMemoryBindInfo.nbuffer(address(), value); return this; }
/** Sets the address of the specified {@link VkSparseMemoryBind.Buffer} to the {@link VkSparseBufferMemoryBindInfo#pBinds} field. */
public VkSparseBufferMemoryBindInfo.Buffer pBinds(@NativeType("VkSparseMemoryBind const *") VkSparseMemoryBind.Buffer value) { VkSparseBufferMemoryBindInfo.npBinds(address(), value); return this; }
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more contributor license
* agreements. See the NOTICE file distributed with this work for additional information regarding
* copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License. You may obtain a
* copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package org.apache.geode.admin.jmx.internal;
import javax.management.ObjectName;
import javax.management.modelmbean.ModelMBean;
import org.apache.geode.admin.AdminException;
import org.apache.geode.admin.GemFireHealth;
import org.apache.geode.admin.GemFireHealthConfig;
import org.apache.geode.admin.internal.GemFireHealthConfigImpl;
/**
* The JMX "managed resource" that represents the configuration for the health of GemFire.
* Basically, it provides the behavior of <code>GemFireHealthConfigImpl</code>, but does some JMX
* stuff like registering beans with the agent.
*
* <P>
*
* Unlike other <code>ManagedResource</code>s this class cannot simply subclass
* <code>GemFireHealthImpl</code> because it instances are serialized and sent to other VMs. This is
* problematic because the other VMs most likely do not have JMX classes like
* <code>ModelMBean</code> on their classpaths. So, instead we delegate all of the
* <code>GemFireHealthConfig</code> behavior to another object which IS serialized.
*
* @see GemFireHealthJmxImpl#createDistributedSystemHealthConfig
*
*
* @since GemFire 3.5
*/
@edu.umd.cs.findbugs.annotations.SuppressWarnings(
justification = "This class is deprecated. Also, any further changes so close to the release is inadvisable.")
public class GemFireHealthConfigJmxImpl
implements GemFireHealthConfig, ManagedResource, java.io.Serializable {
private static final long serialVersionUID = 1482719647163239953L;
/** The <code>GemFireHealth</code> that we help configure */
private GemFireHealth health;
/** The name of the MBean that will manage this resource */
private String mbeanName;
/** The ModelMBean that is configured to manage this resource */
private ModelMBean modelMBean;
/** The delegate that contains the real config state */
private GemFireHealthConfig delegate;
/** The object name of this managed resource */
private ObjectName objectName;
/////////////////////// Constructors ///////////////////////
/**
* Creates a new <code>GemFireHealthConfigJmxImpl</code> that configures the health monitoring of
* components running on the given host.
*/
GemFireHealthConfigJmxImpl(GemFireHealthJmxImpl health, String hostName) throws AdminException {
this.delegate = new GemFireHealthConfigImpl(hostName);
this.health = health;
this.mbeanName = new StringBuffer().append(MBEAN_NAME_PREFIX).append("GemFireHealthConfig,id=")
.append(MBeanUtil.makeCompliantMBeanNameProperty(health.getDistributedSystem().getId()))
.append(",host=")
.append((hostName == null ? "default" : MBeanUtil.makeCompliantMBeanNameProperty(hostName)))
.toString();
this.objectName = MBeanUtil.createMBean(this);
}
////////////////////// Instance Methods //////////////////////
/**
* Applies the changes made to this config back to the health monitor.
*
* @see GemFireHealth#setDistributedSystemHealthConfig
*/
public void applyChanges() {
String hostName = this.getHostName();
if (hostName == null) {
this.health.setDefaultGemFireHealthConfig(this);
} else {
this.health.setGemFireHealthConfig(hostName, this);
}
}
public String getMBeanName() {
return this.mbeanName;
}
public ModelMBean getModelMBean() {
return this.modelMBean;
}
public ObjectName getObjectName() {
return this.objectName;
}
public void setModelMBean(ModelMBean modelMBean) {
this.modelMBean = modelMBean;
}
public ManagedResourceType getManagedResourceType() {
return ManagedResourceType.GEMFIRE_HEALTH_CONFIG;
}
/**
* Replace this object with the delegate that can be properly serialized.
*/
public Object writeReplace() {
return this.delegate;
}
////////////////////// MemberHealthConfig //////////////////////
public long getMaxVMProcessSize() {
return delegate.getMaxVMProcessSize();
}
public void setMaxVMProcessSize(long size) {
delegate.setMaxVMProcessSize(size);
}
public long getMaxMessageQueueSize() {
return delegate.getMaxMessageQueueSize();
}
public void setMaxMessageQueueSize(long maxMessageQueueSize) {
delegate.setMaxMessageQueueSize(maxMessageQueueSize);
}
public long getMaxReplyTimeouts() {
return delegate.getMaxReplyTimeouts();
}
public void setMaxReplyTimeouts(long maxReplyTimeouts) {
delegate.setMaxReplyTimeouts(maxReplyTimeouts);
}
public double getMaxRetransmissionRatio() {
return delegate.getMaxRetransmissionRatio();
}
public void setMaxRetransmissionRatio(double ratio) {
delegate.setMaxRetransmissionRatio(ratio);
}
////////////////////// CacheHealthConfig //////////////////////
public long getMaxNetSearchTime() {
return delegate.getMaxNetSearchTime();
}
public void setMaxNetSearchTime(long maxNetSearchTime) {
delegate.setMaxNetSearchTime(maxNetSearchTime);
}
public long getMaxLoadTime() {
return delegate.getMaxLoadTime();
}
public void setMaxLoadTime(long maxLoadTime) {
delegate.setMaxLoadTime(maxLoadTime);
}
public double getMinHitRatio() {
return delegate.getMinHitRatio();
}
public void setMinHitRatio(double minHitRatio) {
delegate.setMinHitRatio(minHitRatio);
}
public long getMaxEventQueueSize() {
return delegate.getMaxEventQueueSize();
}
public void setMaxEventQueueSize(long maxEventQueueSize) {
delegate.setMaxEventQueueSize(maxEventQueueSize);
}
////////////////////// GemFireHealthConfig //////////////////////
public String getHostName() {
return delegate.getHostName();
}
public void setHealthEvaluationInterval(int interval) {
delegate.setHealthEvaluationInterval(interval);
}
public int getHealthEvaluationInterval() {
return delegate.getHealthEvaluationInterval();
}
public void cleanupResource() {}
}
| |
package io.advantageous.qbit.meta.swagger;
import io.advantageous.boon.core.Maps;
import io.advantageous.boon.core.TypeType;
import io.advantageous.boon.core.reflection.AnnotationData;
import io.advantageous.boon.core.reflection.ClassMeta;
import io.advantageous.boon.core.reflection.fields.FieldAccess;
import io.advantageous.qbit.meta.swagger.builders.DefinitionBuilder;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.lang.reflect.Type;
import java.util.Date;
import java.util.HashMap;
import java.util.Map;
public class DefinitionClassCollector {
private final Map<String, Definition> definitionMap = new HashMap<>();
private final Logger logger = LoggerFactory.getLogger(DefinitionClassCollector.class);
private final Map<Class, Schema> mappings = Maps.map(
/* Adding common primitive and basic type mappings. */
String.class, Schema.schema("string"),
StringBuffer.class, Schema.schema("string"),
Date.class, Schema.schemaWithFormat("string", "dateTime"),
Integer.class, Schema.schemaWithFormat("integer", "int32"),
int.class, Schema.schemaWithFormat("integer", "int32"),
Long.class, Schema.schemaWithFormat("integer", "int64"),
long.class, Schema.schemaWithFormat("integer", "int64"),
Float.class, Schema.schemaWithFormat("number", "float"),
float.class, Schema.schemaWithFormat("number", "float"),
Double.class, Schema.schemaWithFormat("number", "double"),
double.class, Schema.schemaWithFormat("number", "double"),
Boolean.class, Schema.schemaWithFormat("boolean", ""),
boolean.class, Schema.schemaWithFormat("boolean", ""),
byte.class, Schema.schemaWithFormat("string", "byte"),
Byte.class, Schema.schemaWithFormat("string", "byte")
);
{
/* Adding common primitive and basic type arrays. */
mappings.put(String[].class, Schema.array(mappings.get(String.class)));
mappings.put(StringBuffer[].class, Schema.array(mappings.get(StringBuffer.class)));
mappings.put(Date[].class, Schema.array(mappings.get(Date.class)));
mappings.put(int[].class, Schema.array(mappings.get(int.class)));
mappings.put(Integer[].class, Schema.array(mappings.get(Integer.class)));
mappings.put(long[].class, Schema.array(mappings.get(long.class)));
mappings.put(Long[].class, Schema.array(mappings.get(Long.class)));
mappings.put(Double[].class, Schema.array(mappings.get(Double.class)));
mappings.put(double[].class, Schema.array(mappings.get(double.class)));
mappings.put(Float[].class, Schema.array(mappings.get(Float.class)));
mappings.put(float[].class, Schema.array(mappings.get(float.class)));
mappings.put(Boolean[].class, Schema.array(mappings.get(Boolean.class)));
mappings.put(boolean[].class, Schema.array(mappings.get(boolean.class)));
mappings.put(byte[].class, Schema.array(mappings.get(byte.class)));
mappings.put(Byte[].class, Schema.array(mappings.get(Byte.class)));
}
public Schema getSchema(final Class<?> cls) {
return getSchemaWithComponentClass(cls, null);
}
public Schema getSchemaWithComponentClass(final Class<?> cls, Class<?> componentClass) {
Schema schema = mappings.get(cls);
if (schema != null) {
return schema;
}
if (cls != null) {
TypeType type = TypeType.getType(cls);
if (type.isArray()) {
final Schema componentSchema = Schema.definitionRef(cls.getComponentType().getSimpleName(), "");
return Schema.array(componentSchema, "");
} else if (type.isCollection()) {
if (componentClass != null) {
return Schema.array(Schema.definitionRef(componentClass.getSimpleName(), ""), "");
} else {
logger.info("Component class was null defaulting to string");
return Schema.array(Schema.definitionRef("string", ""), "");
}
}
return Schema.definitionRef(cls.getSimpleName(), "");
} else {
return Schema.schema("string");
}
}
public void addClass(final Class<?> cls) {
/*
Don't add void.
*/
if (cls == void.class || cls == Void.class) {
return;
}
/* If it is a common built in type, don't add. */
if (mappings.containsKey(cls)) {
return;
}
final ClassMeta<?> classMeta = ClassMeta.classMeta(cls);
addClass(classMeta);
}
private void addClass(final ClassMeta<?> classMeta) {
try {
if (definitionMap.containsKey(classMeta.name())) {
return;
}
definitionMap.put(classMeta.name(), null);
final DefinitionBuilder definitionBuilder = new DefinitionBuilder();
final String description = getDescription(classMeta);
definitionBuilder.setDescription(description);
Map<String, FieldAccess> fieldAccessMap = classMeta.fieldMap();
fieldAccessMap.entrySet().forEach(fieldAccessEntry -> {
final FieldAccess fieldAccess = fieldAccessEntry.getValue();
if (fieldAccess.ignore() || fieldAccess.isStatic()) {
return;
}
definitionBuilder.addProperty(fieldAccess.name(), convertFieldToSchema(fieldAccess));
});
final Definition definition = definitionBuilder.build();
definitionMap.put(classMeta.name(), definition);
}catch (Exception ex) {
logger.warn("Unable to add class " + classMeta.longName(), ex);
}
}
private Schema convertFieldToSchema(final FieldAccess fieldAccess) {
try {
final Class<?> type = fieldAccess.type();
final Schema schema = mappings.get(type);
final String description = getDescription(fieldAccess);
if (schema != null) {
if (description == null) {
return schema;
} else {
return Schema.schemaWithDescription(schema, description);
}
}
return convertFieldToComplexSchema(fieldAccess);
} catch (Exception ex) {
logger.warn("unable to convert field " + fieldAccess.name() + " from " + fieldAccess.declaringParent(), ex);
return Schema.schemaWithFormat("error", "error.see.logs");
}
}
private Schema convertFieldToComplexSchema(final FieldAccess fieldAccess) {
if (isArraySchema(fieldAccess)) {
return convertFieldToArraySchema(fieldAccess);
} else if (isMap(fieldAccess)) {
return convertFieldToMapSchema(fieldAccess);
} else {
return convertFieldToDefinitionRef(fieldAccess);
}
}
private Schema convertFieldToMapSchema(final FieldAccess fieldAccess) {
final Type[] actualTypeArguments = fieldAccess.getParameterizedType().getActualTypeArguments();
final String description = getDescription(fieldAccess);
if (actualTypeArguments[1] instanceof Class) {
Schema componentSchema = mappings.get(actualTypeArguments[1]);
/* If it was not in the mapping, then it is complex. */
if (componentSchema == null) {
if (!definitionMap.containsKey(fieldAccess.getComponentClass().getSimpleName())) {
addClass(fieldAccess.getComponentClass());
}
componentSchema = Schema.definitionRef(fieldAccess.getComponentClass().getSimpleName(), "");
}
return Schema.map(componentSchema, description);
} else {
return null;
}
}
private boolean isArraySchema(FieldAccess fieldAccess) {
switch (fieldAccess.typeEnum()) {
case SET:
return true;
case LIST:
return true;
case COLLECTION:
return true;
case ARRAY:
return true;
}
return false;
}
private boolean isMap(FieldAccess fieldAccess) {
switch (fieldAccess.typeEnum()) {
case MAP:
return true;
}
return false;
}
private Schema convertFieldToDefinitionRef(final FieldAccess fieldAccess) {
if (!definitionMap.containsKey(fieldAccess.type().getSimpleName())) {
addClass(fieldAccess.type());
}
final String description = getDescription(fieldAccess);
return Schema.definitionRef(fieldAccess.type().getSimpleName(), description);
}
private String getDescription(final FieldAccess fieldAccess) {
String description = "";
final Map<String, Object> descriptionMap = fieldAccess.getAnnotationData("Description");
if (descriptionMap != null) {
if (descriptionMap.containsKey("value")) {
description = descriptionMap.get("value").toString();
}
}
return description;
}
private String getDescription(ClassMeta classMeta) {
String description = "";
AnnotationData annotationData = classMeta.annotation("Description");
if (annotationData == null) {
return "";
}
final Map<String, Object> descriptionMap = annotationData.getValues();
if (descriptionMap != null) {
if (descriptionMap.containsKey("value")) {
description = descriptionMap.get("value").toString();
}
}
return description;
}
private Schema convertFieldToArraySchema(final FieldAccess fieldAccess) {
String description = getDescription(fieldAccess);
Schema componentSchema = mappings.get(fieldAccess.getComponentClass());
/* If it was not in the mapping, then it is complex. */
if (componentSchema == null) {
if (!definitionMap.containsKey(fieldAccess.getComponentClass().getSimpleName())) {
addClass(fieldAccess.getComponentClass());
}
componentSchema = Schema.definitionRef(fieldAccess.getComponentClass().getSimpleName(), description);
}
return Schema.array(componentSchema);
}
public Map<String, Definition> getDefinitionMap() {
return definitionMap;
}
}
| |
/*
* Copyright (c) 2016 Gridtec. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package at.gridtec.lambda4j.predicate;
import at.gridtec.lambda4j.Lambda;
import at.gridtec.lambda4j.consumer.BooleanConsumer;
import at.gridtec.lambda4j.consumer.LongConsumer2;
import at.gridtec.lambda4j.function.BooleanFunction;
import at.gridtec.lambda4j.function.LongFunction2;
import at.gridtec.lambda4j.function.conversion.BooleanToByteFunction;
import at.gridtec.lambda4j.function.conversion.BooleanToCharFunction;
import at.gridtec.lambda4j.function.conversion.BooleanToDoubleFunction;
import at.gridtec.lambda4j.function.conversion.BooleanToFloatFunction;
import at.gridtec.lambda4j.function.conversion.BooleanToIntFunction;
import at.gridtec.lambda4j.function.conversion.BooleanToLongFunction;
import at.gridtec.lambda4j.function.conversion.BooleanToShortFunction;
import at.gridtec.lambda4j.function.conversion.ByteToLongFunction;
import at.gridtec.lambda4j.function.conversion.CharToLongFunction;
import at.gridtec.lambda4j.function.conversion.FloatToLongFunction;
import at.gridtec.lambda4j.function.conversion.LongToByteFunction;
import at.gridtec.lambda4j.function.conversion.LongToCharFunction;
import at.gridtec.lambda4j.function.conversion.LongToDoubleFunction2;
import at.gridtec.lambda4j.function.conversion.LongToFloatFunction;
import at.gridtec.lambda4j.function.conversion.LongToIntFunction2;
import at.gridtec.lambda4j.function.conversion.LongToShortFunction;
import at.gridtec.lambda4j.function.conversion.ShortToLongFunction;
import at.gridtec.lambda4j.operator.unary.BooleanUnaryOperator;
import at.gridtec.lambda4j.operator.unary.LongUnaryOperator2;
import javax.annotation.Nonnegative;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
import java.util.Map;
import java.util.Objects;
import java.util.concurrent.ConcurrentHashMap;
import java.util.function.DoubleToLongFunction;
import java.util.function.IntToLongFunction;
import java.util.function.LongPredicate;
import java.util.function.LongUnaryOperator;
import java.util.function.ToLongFunction;
/**
* Represents an predicate (boolean-valued function) of one {@code long}-valued input argument.
* This is a primitive specialization of {@link Predicate2}.
* <p>
* This is a {@link FunctionalInterface} whose functional method is {@link #test(long)}.
*
* @apiNote This is a JDK lambda.
* @see Predicate2
*/
@SuppressWarnings("unused")
@FunctionalInterface
public interface LongPredicate2 extends Lambda, LongPredicate {
/**
* Constructs a {@link LongPredicate2} based on a lambda expression or a method reference. Thereby the given lambda
* expression or method reference is returned on an as-is basis to implicitly transform it to the desired type. With
* this method, it is possible to ensure that correct type is used from lambda expression or method reference.
*
* @param expression A lambda expression or (typically) a method reference, e.g. {@code this::method}
* @return A {@code LongPredicate2} from given lambda expression or method reference.
* @implNote This implementation allows the given argument to be {@code null}, but only if {@code null} given,
* {@code null} will be returned.
* @see <a href="https://docs.oracle.com/javase/tutorial/java/javaOO/lambdaexpressions.html#syntax">Lambda
* Expression</a>
* @see <a href="https://docs.oracle.com/javase/tutorial/java/javaOO/methodreferences.html">Method Reference</a>
*/
static LongPredicate2 of(@Nullable final LongPredicate2 expression) {
return expression;
}
/**
* Calls the given {@link LongPredicate} with the given argument and returns its result.
*
* @param predicate The predicate to be called
* @param value The argument to the predicate
* @return The result from the given {@code LongPredicate2}.
* @throws NullPointerException If given argument is {@code null}
*/
static boolean call(@Nonnull final LongPredicate predicate, long value) {
Objects.requireNonNull(predicate);
return predicate.test(value);
}
/**
* Creates a {@link LongPredicate2} which always returns a given value.
*
* @param ret The return value for the constant
* @return A {@code LongPredicate2} which always returns a given value.
*/
@Nonnull
static LongPredicate2 constant(boolean ret) {
return (value) -> ret;
}
/**
* Returns a {@link LongPredicate2} that always returns {@code true}.
*
* @return A {@link LongPredicate2} that always returns {@code true}.
* @see #alwaysFalse()
*/
@Nonnull
static LongPredicate2 alwaysTrue() {
return (value) -> true;
}
/**
* Returns a {@link LongPredicate2} that always returns {@code false}.
*
* @return A {@link LongPredicate2} that always returns {@code false}.
* @see #alwaysTrue()
*/
@Nonnull
static LongPredicate2 alwaysFalse() {
return (value) -> false;
}
/**
* Returns a {@link LongPredicate2} that tests if the given argument are <b>equal</b> to the one of this predicate.
*
* @param target The reference with which to compare for equality, which may be {@code null}
* @return A {@code LongPredicate2} that tests if the given argument are <b>equal</b> to the one of this predicate.
* @implNote This implementation checks equality according to {@link Objects#equals(Object)} operation for {@link
* Object} references and {@code value == target} operation for primitive values.
*/
@Nonnull
static LongPredicate2 isEqual(long target) {
return (value) -> (value == target);
}
/**
* Applies this predicate to the given argument.
*
* @param value The argument to the predicate
* @return The return value from the predicate, which is its result.
*/
boolean test(long value);
/**
* Returns the number of arguments for this predicate.
*
* @return The number of arguments for this predicate.
* @implSpec The default implementation always returns {@code 1}.
*/
@Nonnegative
default int arity() {
return 1;
}
/**
* Returns a composed {@link Predicate2} that first applies the {@code before} function to its input, and
* then applies this predicate to the result.
* If evaluation of either operation throws an exception, it is relayed to the caller of the composed operation.
*
* @param <A> The type of the argument to the given function, and of composed predicate
* @param before The function to apply before this predicate is applied
* @return A composed {@code Predicate2} that first applies the {@code before} function to its input, and then
* applies this predicate to the result.
* @throws NullPointerException If given argument is {@code null}
* @implSpec The input argument of this method is able to handle every type.
*/
@Nonnull
default <A> Predicate2<A> compose(@Nonnull final ToLongFunction<? super A> before) {
Objects.requireNonNull(before);
return (a) -> test(before.applyAsLong(a));
}
/**
* Returns a composed {@link BooleanUnaryOperator} that first applies the {@code before} function to its input, and
* then applies this predicate to the result. If evaluation of either operation throws an exception, it is relayed
* to the caller of the composed operation. This method is just convenience, to provide the ability to execute an
* operation which accepts {@code boolean} input, before this primitive predicate is executed.
*
* @param before The function to apply before this predicate is applied
* @return A composed {@code BooleanUnaryOperator} that first applies the {@code before} function to its input, and
* then applies this predicate to the result.
* @throws NullPointerException If given argument is {@code null}
* @implSpec The input argument of this method is a able to handle primitive values. In this case this is {@code
* boolean}.
*/
@Nonnull
default BooleanUnaryOperator composeFromBoolean(@Nonnull final BooleanToLongFunction before) {
Objects.requireNonNull(before);
return (value) -> test(before.applyAsLong(value));
}
/**
* Returns a composed {@link BytePredicate} that first applies the {@code before} function to
* its input, and then applies this predicate to the result.
* If evaluation of either operation throws an exception, it is relayed to the caller of the composed operation.
* This method is just convenience, to provide the ability to execute an operation which accepts {@code byte} input,
* before this primitive predicate is executed.
*
* @param before The function to apply before this predicate is applied
* @return A composed {@code BytePredicate} that first applies the {@code before} function to its input, and then
* applies this predicate to the result.
* @throws NullPointerException If given argument is {@code null}
* @implSpec The input argument of this method is a able to handle primitive values. In this case this is {@code
* byte}.
*/
@Nonnull
default BytePredicate composeFromByte(@Nonnull final ByteToLongFunction before) {
Objects.requireNonNull(before);
return (value) -> test(before.applyAsLong(value));
}
/**
* Returns a composed {@link CharPredicate} that first applies the {@code before} function to
* its input, and then applies this predicate to the result.
* If evaluation of either operation throws an exception, it is relayed to the caller of the composed operation.
* This method is just convenience, to provide the ability to execute an operation which accepts {@code char} input,
* before this primitive predicate is executed.
*
* @param before The function to apply before this predicate is applied
* @return A composed {@code CharPredicate} that first applies the {@code before} function to its input, and then
* applies this predicate to the result.
* @throws NullPointerException If given argument is {@code null}
* @implSpec The input argument of this method is a able to handle primitive values. In this case this is {@code
* char}.
*/
@Nonnull
default CharPredicate composeFromChar(@Nonnull final CharToLongFunction before) {
Objects.requireNonNull(before);
return (value) -> test(before.applyAsLong(value));
}
/**
* Returns a composed {@link DoublePredicate2} that first applies the {@code before} function to its input, and then
* applies this predicate to the result. If evaluation of either operation throws an exception, it is relayed to the
* caller of the composed operation. This method is just convenience, to provide the ability to execute an operation
* which accepts {@code double} input, before this primitive predicate is executed.
*
* @param before The function to apply before this predicate is applied
* @return A composed {@code DoublePredicate2} that first applies the {@code before} function to its input, and then
* applies this predicate to the result.
* @throws NullPointerException If given argument is {@code null}
* @implSpec The input argument of this method is a able to handle primitive values. In this case this is {@code
* double}.
*/
@Nonnull
default DoublePredicate2 composeFromDouble(@Nonnull final DoubleToLongFunction before) {
Objects.requireNonNull(before);
return (value) -> test(before.applyAsLong(value));
}
/**
* Returns a composed {@link FloatPredicate} that first applies the {@code before} function to its input, and then
* applies this predicate to the result. If evaluation of either operation throws an exception, it is relayed to the
* caller of the composed operation. This method is just convenience, to provide the ability to execute an operation
* which accepts {@code float} input, before this primitive predicate is executed.
*
* @param before The function to apply before this predicate is applied
* @return A composed {@code FloatPredicate} that first applies the {@code before} function to its input, and then
* applies this predicate to the result.
* @throws NullPointerException If given argument is {@code null}
* @implSpec The input argument of this method is a able to handle primitive values. In this case this is {@code
* float}.
*/
@Nonnull
default FloatPredicate composeFromFloat(@Nonnull final FloatToLongFunction before) {
Objects.requireNonNull(before);
return (value) -> test(before.applyAsLong(value));
}
/**
* Returns a composed {@link IntPredicate2} that first applies the {@code before} function to
* its input, and then applies this predicate to the result.
* If evaluation of either operation throws an exception, it is relayed to the caller of the composed operation.
* This method is just convenience, to provide the ability to execute an operation which accepts {@code int} input,
* before this primitive predicate is executed.
*
* @param before The function to apply before this predicate is applied
* @return A composed {@code IntPredicate2} that first applies the {@code before} function to its input, and then
* applies this predicate to the result.
* @throws NullPointerException If given argument is {@code null}
* @implSpec The input argument of this method is a able to handle primitive values. In this case this is {@code
* int}.
*/
@Nonnull
default IntPredicate2 composeFromInt(@Nonnull final IntToLongFunction before) {
Objects.requireNonNull(before);
return (value) -> test(before.applyAsLong(value));
}
/**
* Returns a composed {@link LongPredicate2} that first applies the {@code before} operator to
* its input, and then applies this predicate to the result.
* If evaluation of either operation throws an exception, it is relayed to the caller of the composed operation.
* This method is just convenience, to provide the ability to execute an operation which accepts {@code long} input,
* before this primitive predicate is executed.
*
* @param before The operator to apply before this predicate is applied
* @return A composed {@code LongPredicate2} that first applies the {@code before} operator to its input, and then
* applies this predicate to the result.
* @throws NullPointerException If given argument is {@code null}
* @implSpec The input argument of this method is a able to handle primitive values. In this case this is {@code
* long}.
*/
@Nonnull
default LongPredicate2 composeFromLong(@Nonnull final LongUnaryOperator before) {
Objects.requireNonNull(before);
return (value) -> test(before.applyAsLong(value));
}
/**
* Returns a composed {@link ShortPredicate} that first applies the {@code before} function to its input, and then
* applies this predicate to the result. If evaluation of either operation throws an exception, it is relayed to the
* caller of the composed operation. This method is just convenience, to provide the ability to execute an operation
* which accepts {@code short} input, before this primitive predicate is executed.
*
* @param before The function to apply before this predicate is applied
* @return A composed {@code ShortPredicate} that first applies the {@code before} function to its input, and then
* applies this predicate to the result.
* @throws NullPointerException If given argument is {@code null}
* @implSpec The input argument of this method is a able to handle primitive values. In this case this is {@code
* short}.
*/
@Nonnull
default ShortPredicate composeFromShort(@Nonnull final ShortToLongFunction before) {
Objects.requireNonNull(before);
return (value) -> test(before.applyAsLong(value));
}
/**
* Returns a composed {@link LongFunction2} that first applies this predicate to its input, and then applies the
* {@code after} function to the result.
* If evaluation of either operation throws an exception, it is relayed to the caller of the composed operation.
*
* @param <S> The type of return value from the {@code after} function, and of the composed function
* @param after The function to apply after this predicate is applied
* @return A composed {@code LongFunction2} that first applies this predicate to its input, and then applies the
* {@code after} function to the result.
* @throws NullPointerException If given argument is {@code null}
* @implSpec The input argument of this method is able to return every type.
*/
@Nonnull
default <S> LongFunction2<S> andThen(@Nonnull final BooleanFunction<? extends S> after) {
Objects.requireNonNull(after);
return (value) -> after.apply(test(value));
}
/**
* Returns a composed {@link LongPredicate2} that first applies this predicate to its input, and then applies the
* {@code after} operator to the result. If evaluation of either operation throws an exception, it is relayed to the
* caller of the composed operation. This method is just convenience, to provide the ability to transform this
* primitive predicate to an operation returning {@code boolean}.
*
* @param after The operator to apply after this predicate is applied
* @return A composed {@code LongPredicate2} that first applies this predicate to its input, and then applies the
* {@code after} operator to the result.
* @throws NullPointerException If given argument is {@code null}
* @implSpec The input argument of this method is a able to return primitive values. In this case this is {@code
* boolean}.
*/
@Nonnull
default LongPredicate2 andThenToBoolean(@Nonnull final BooleanUnaryOperator after) {
Objects.requireNonNull(after);
return (value) -> after.applyAsBoolean(test(value));
}
/**
* Returns a composed {@link LongToByteFunction} that first applies this predicate to its input, and then applies
* the {@code after} function to the result. If evaluation of either operation throws an exception, it is relayed to
* the caller of the composed operation. This method is just convenience, to provide the ability to transform this
* primitive predicate to an operation returning {@code byte}.
*
* @param after The function to apply after this predicate is applied
* @return A composed {@code LongToByteFunction} that first applies this predicate to its input, and then applies
* the {@code after} function to the result.
* @throws NullPointerException If given argument is {@code null}
* @implSpec The input argument of this method is a able to return primitive values. In this case this is {@code
* byte}.
*/
@Nonnull
default LongToByteFunction andThenToByte(@Nonnull final BooleanToByteFunction after) {
Objects.requireNonNull(after);
return (value) -> after.applyAsByte(test(value));
}
/**
* Returns a composed {@link LongToCharFunction} that first applies this predicate to its input, and then applies
* the {@code after} function to the result. If evaluation of either operation throws an exception, it is relayed to
* the caller of the composed operation. This method is just convenience, to provide the ability to transform this
* primitive predicate to an operation returning {@code char}.
*
* @param after The function to apply after this predicate is applied
* @return A composed {@code LongToCharFunction} that first applies this predicate to its input, and then applies
* the {@code after} function to the result.
* @throws NullPointerException If given argument is {@code null}
* @implSpec The input argument of this method is a able to return primitive values. In this case this is {@code
* char}.
*/
@Nonnull
default LongToCharFunction andThenToChar(@Nonnull final BooleanToCharFunction after) {
Objects.requireNonNull(after);
return (value) -> after.applyAsChar(test(value));
}
/**
* Returns a composed {@link LongToDoubleFunction2} that first applies this predicate to its input, and then applies
* the {@code after} function to the result. If evaluation of either operation throws an exception, it is relayed to
* the caller of the composed operation. This method is just convenience, to provide the ability to transform this
* primitive predicate to an operation returning {@code double}.
*
* @param after The function to apply after this predicate is applied
* @return A composed {@code LongToDoubleFunction2} that first applies this predicate to its input, and then applies
* the {@code after} function to the result.
* @throws NullPointerException If given argument is {@code null}
* @implSpec The input argument of this method is a able to return primitive values. In this case this is {@code
* double}.
*/
@Nonnull
default LongToDoubleFunction2 andThenToDouble(@Nonnull final BooleanToDoubleFunction after) {
Objects.requireNonNull(after);
return (value) -> after.applyAsDouble(test(value));
}
/**
* Returns a composed {@link LongToFloatFunction} that first applies this predicate to its input, and then applies
* the {@code after} function to the result. If evaluation of either operation throws an exception, it is relayed to
* the caller of the composed operation. This method is just convenience, to provide the ability to transform this
* primitive predicate to an operation returning {@code float}.
*
* @param after The function to apply after this predicate is applied
* @return A composed {@code LongToFloatFunction} that first applies this predicate to its input, and then applies
* the {@code after} function to the result.
* @throws NullPointerException If given argument is {@code null}
* @implSpec The input argument of this method is a able to return primitive values. In this case this is {@code
* float}.
*/
@Nonnull
default LongToFloatFunction andThenToFloat(@Nonnull final BooleanToFloatFunction after) {
Objects.requireNonNull(after);
return (value) -> after.applyAsFloat(test(value));
}
/**
* Returns a composed {@link LongToIntFunction2} that first applies this predicate to its input, and then applies
* the {@code after} function to the result. If evaluation of either operation throws an exception, it is relayed to
* the caller of the composed operation. This method is just convenience, to provide the ability to transform this
* primitive predicate to an operation returning {@code int}.
*
* @param after The function to apply after this predicate is applied
* @return A composed {@code LongToIntFunction2} that first applies this predicate to its input, and then applies
* the {@code after} function to the result.
* @throws NullPointerException If given argument is {@code null}
* @implSpec The input argument of this method is a able to return primitive values. In this case this is {@code
* int}.
*/
@Nonnull
default LongToIntFunction2 andThenToInt(@Nonnull final BooleanToIntFunction after) {
Objects.requireNonNull(after);
return (value) -> after.applyAsInt(test(value));
}
/**
* Returns a composed {@link LongUnaryOperator2} that first applies this predicate to its input, and then applies
* the {@code after} function to the result. If evaluation of either operation throws an exception, it is relayed to
* the caller of the composed operation. This method is just convenience, to provide the ability to transform this
* primitive predicate to an operation returning {@code long}.
*
* @param after The function to apply after this predicate is applied
* @return A composed {@code LongUnaryOperator2} that first applies this predicate to its input, and then applies
* the {@code after} function to the result.
* @throws NullPointerException If given argument is {@code null}
* @implSpec The input argument of this method is a able to return primitive values. In this case this is {@code
* long}.
*/
@Nonnull
default LongUnaryOperator2 andThenToLong(@Nonnull final BooleanToLongFunction after) {
Objects.requireNonNull(after);
return (value) -> after.applyAsLong(test(value));
}
/**
* Returns a composed {@link LongToShortFunction} that first applies this predicate to its input, and then applies
* the {@code after} function to the result. If evaluation of either operation throws an exception, it is relayed to
* the caller of the composed operation. This method is just convenience, to provide the ability to transform this
* primitive predicate to an operation returning {@code short}.
*
* @param after The function to apply after this predicate is applied
* @return A composed {@code LongToShortFunction} that first applies this predicate to its input, and then applies
* the {@code after} function to the result.
* @throws NullPointerException If given argument is {@code null}
* @implSpec The input argument of this method is a able to return primitive values. In this case this is {@code
* short}.
*/
@Nonnull
default LongToShortFunction andThenToShort(@Nonnull final BooleanToShortFunction after) {
Objects.requireNonNull(after);
return (value) -> after.applyAsShort(test(value));
}
/**
* Returns a composed {@link LongConsumer2} that fist applies this predicate to its input, and then consumes the
* result using the given {@link BooleanConsumer}. If evaluation of either operation throws an exception, it is
* relayed to the caller of the composed operation.
*
* @param consumer The operation which consumes the result from this operation
* @return A composed {@code LongConsumer2} that first applies this predicate to its input, and then consumes the
* result using the given {@code BooleanConsumer}.
* @throws NullPointerException If given argument is {@code null}
*/
@Nonnull
default LongConsumer2 consume(@Nonnull final BooleanConsumer consumer) {
Objects.requireNonNull(consumer);
return (value) -> consumer.accept(test(value));
}
/**
* Returns a {@link LongPredicate2} that represents the logical negation of this one.
*
* @return A {@code LongPredicate2} that represents the logical negation of this one.
*/
@Nonnull
default LongPredicate2 negate() {
return (value) -> !test(value);
}
/**
* Returns a composed {@link LongPredicate2} that represents a short-circuiting logical AND of this predicate and
* another. When evaluating the composed predicate, if this predicate is {@code false}, then the {@code other}
* predicate is not evaluated.
* <p>
* Any exceptions thrown during evaluation of either predicate is relayed to the caller; if evaluation of this
* {@code LongPredicate2} throws an exception, the {@code other} predicate will not be evaluated.
*
* @param other A {@code LongPredicate2} that will be logically-ANDed with this one
* @return A composed {@code LongPredicate2} that represents the short-circuiting logical AND of this predicate and
* the {@code other} predicate.
* @throws NullPointerException If given argument is {@code null}
* @see #or(LongPredicate)
* @see #xor(LongPredicate)
*/
@Nonnull
default LongPredicate2 and(@Nonnull final LongPredicate other) {
Objects.requireNonNull(other);
return (value) -> test(value) && other.test(value);
}
/**
* Returns a composed {@link LongPredicate2} that represents a short-circuiting logical OR of this predicate and
* another. When evaluating the composed predicate, if this predicate is {@code true}, then the {@code other}
* predicate is not evaluated.
* <p>
* Any exceptions thrown during evaluation of either predicate is relayed to the caller; if evaluation of this
* {@code LongPredicate2} throws an exception, the {@code other} predicate will not be evaluated.
*
* @param other A {@code LongPredicate2} that will be logically-ORed with this one
* @return A composed {@code LongPredicate2} that represents the short-circuiting logical OR of this predicate and
* the {@code other} predicate.
* @throws NullPointerException If given argument is {@code null}
* @see #and(LongPredicate)
* @see #xor(LongPredicate)
*/
@Nonnull
default LongPredicate2 or(@Nonnull final LongPredicate other) {
Objects.requireNonNull(other);
return (value) -> test(value) || other.test(value);
}
/**
* Returns a composed {@link LongPredicate2} that represents a short-circuiting logical XOR of this predicate and
* another. Any exceptions thrown during evaluation of either predicate is relayed to the caller; if evaluation of
* this {@code LongPredicate2} throws an exception, the {@code other} predicate will not be evaluated.
*
* @param other A {@code LongPredicate2} that will be logically-XORed with this one
* @return A composed {@code LongPredicate2} that represents the short-circuiting logical XOR of this predicate and
* the {@code other} predicate.
* @throws NullPointerException If given argument is {@code null}
* @see #and(LongPredicate)
* @see #or(LongPredicate)
*/
@Nonnull
default LongPredicate2 xor(@Nonnull final LongPredicate other) {
Objects.requireNonNull(other);
return (value) -> test(value) ^ other.test(value);
}
/**
* Returns a memoized (caching) version of this {@link LongPredicate2}. Whenever it is called, the mapping between
* the input parameter and the return value is preserved in a cache, making subsequent calls returning the memoized
* value instead of computing the return value again.
* <p>
* Unless the predicate and therefore the used cache will be garbage-collected, it will keep all memoized values
* forever.
*
* @return A memoized (caching) version of this {@code LongPredicate2}.
* @implSpec This implementation does not allow the input parameter or return value to be {@code null} for the
* resulting memoized predicate, as the cache used internally does not permit {@code null} keys or values.
* @implNote The returned memoized predicate can be safely used concurrently from multiple threads which makes it
* thread-safe.
*/
@Nonnull
default LongPredicate2 memoized() {
if (isMemoized()) {
return this;
} else {
final Map<Long, Boolean> cache = new ConcurrentHashMap<>();
final Object lock = new Object();
return (LongPredicate2 & Memoized) (value) -> {
final boolean returnValue;
synchronized (lock) {
returnValue = cache.computeIfAbsent(value, this::test);
}
return returnValue;
};
}
}
/**
* Returns a composed {@link Predicate2} which represents this {@link LongPredicate2}. Thereby the primitive
* input argument for this predicate is autoboxed. This method provides the possibility to use this
* {@code LongPredicate2} with methods provided by the {@code JDK}.
*
* @return A composed {@code Predicate2} which represents this {@code LongPredicate2}.
*/
@Nonnull
default Predicate2<Long> boxed() {
return this::test;
}
}
| |
package org.apache.lucene.analysis.tr;
import com.google.common.base.Stopwatch;
import zemberek.core.logging.Log;
import zemberek.core.text.TextUtil;
import zemberek.core.turkish.PrimaryPos;
import zemberek.core.turkish.StemAndEnding;
import zemberek.core.turkish.Turkish;
import zemberek.core.turkish.TurkishAlphabet;
import zemberek.morphology.analysis.RuleBasedAnalyzer;
import zemberek.morphology.analysis.SingleAnalysis;
import zemberek.morphology.analysis.WordAnalysis;
import zemberek.morphology.lexicon.RootLexicon;
import zemberek.morphology.morphotactics.InformalTurkishMorphotactics;
import zemberek.morphology.morphotactics.TurkishMorphotactics;
import java.util.Collections;
import java.util.List;
import java.util.concurrent.TimeUnit;
import java.util.stream.Collectors;
/**
* A variant of {@link zemberek.morphology.TurkishMorphology} simplified for a pre-tokenized input.
*/
public final class MyTurkishMorphology {
private final RootLexicon lexicon;
private final RuleBasedAnalyzer analyzer;
private final TurkishMorphotactics morphotactics;
private MyTurkishMorphology(MyTurkishMorphology.Builder builder) {
this.lexicon = builder.lexicon;
if (lexicon.isEmpty()) {
Log.warn("TurkishMorphology class is being instantiated with empty root lexicon.");
}
this.morphotactics = builder.informalAnalysis ?
new InformalTurkishMorphotactics(this.lexicon) : new TurkishMorphotactics(this.lexicon);
this.analyzer = builder.ignoreDiacriticsInAnalysis ?
RuleBasedAnalyzer.ignoreDiacriticsInstance(morphotactics) :
RuleBasedAnalyzer.instance(morphotactics);
}
public static MyTurkishMorphology createWithDefaults() {
Stopwatch sw = Stopwatch.createStarted();
MyTurkishMorphology instance = new MyTurkishMorphology.Builder().setLexicon(RootLexicon.getDefault()).build();
Log.info("Initialized in %d ms.", sw.elapsed(TimeUnit.MILLISECONDS));
return instance;
}
public static zemberek.morphology.TurkishMorphology create(RootLexicon lexicon) {
return new zemberek.morphology.TurkishMorphology.Builder().setLexicon(lexicon).build();
}
public TurkishMorphotactics getMorphotactics() {
return morphotactics;
}
public WordAnalysis analyze(String word) {
return analyzeWithoutCache(word);
}
public RootLexicon getLexicon() {
return lexicon;
}
/**
* Normalizes the input word and analyses it. If word cannot be parsed following occurs: - if
* input is a number, system tries to parse it by creating a number DictionaryEntry. - if input
* starts with a capital letter, or contains ['] adds a Dictionary entry as a proper noun. - if
* above options does not generate a result, it generates an UNKNOWN dictionary entry and returns
* a parse with it.
*
* @param word input word.
* @return WordAnalysis list.
*/
public static String normalizeForAnalysis(String word) {
// TODO: This may cause problems for some foreign words with letter I.
String s = word.toLowerCase(Turkish.LOCALE);
s = TurkishAlphabet.INSTANCE.normalizeCircumflex(s);
String noDot = s.replace(".", "");
if (noDot.length() == 0) {
noDot = s;
}
return TextUtil.normalizeApostrophes(noDot);
}
/**
* This should be the entry point to stemming
*
* @param word a word to be stemmed
* @return the stem of the word
*/
List<SingleAnalysis> analyzeList(String word) {
String s = normalizeForAnalysis(word);
if (s.length() == 0) {
System.out.println("empty " + word);
return Collections.emptyList();
}
List<SingleAnalysis> result;
if (TurkishAlphabet.INSTANCE.containsApostrophe(s)) {
s = TurkishAlphabet.INSTANCE.normalizeApostrophe(s);
result = analyzeWordsWithApostrophe(s);
} else {
result = analyzer.analyze(s);
}
if (result.size() == 0) {
System.out.println("unknown word: " + word);
return Collections.emptyList();
}
if (result.size() == 1 && result.get(0).getDictionaryItem().isUnknown()) {
return Collections.emptyList();
}
return result;
}
private WordAnalysis analyzeWithoutCache(String word) {
String s = normalizeForAnalysis(word);
if (s.length() == 0) {
System.out.println("empty " + word);
return WordAnalysis.EMPTY_INPUT_RESULT;
}
List<SingleAnalysis> result;
if (TurkishAlphabet.INSTANCE.containsApostrophe(s)) {
s = TurkishAlphabet.INSTANCE.normalizeApostrophe(s);
result = analyzeWordsWithApostrophe(s);
} else {
result = analyzer.analyze(s);
}
if (result.size() == 0) {
System.out.println("unknown word: " + word);
result = Collections.emptyList();
}
if (result.size() == 1 && result.get(0).getDictionaryItem().isUnknown()) {
result = Collections.emptyList();
}
return new WordAnalysis(word, s, result);
}
public List<SingleAnalysis> analyzeWordsWithApostrophe(String word) {
int index = word.indexOf('\'');
if (index <= 0 || index == word.length() - 1) {
return Collections.emptyList();
}
StemAndEnding se = new StemAndEnding(
word.substring(0, index),
word.substring(index + 1));
String stem = TurkishAlphabet.INSTANCE.normalize(se.stem);
String withoutQuote = word.replace("'", "");
List<SingleAnalysis> noQuotesParses = analyzer.analyze(withoutQuote);
if (noQuotesParses.size() == 0) {
return Collections.emptyList();
}
// TODO: this is somewhat a hack.Correct here once we decide what to do about
// words like "Hastanesi'ne". Should we accept Hastanesi or Hastane?
return noQuotesParses.stream()
.filter(
a -> a.getDictionaryItem().primaryPos == PrimaryPos.Noun &&
(a.containsMorpheme(TurkishMorphotactics.p3sg) || a.getStem().equals(stem)))
.collect(Collectors.toList());
}
public static MyTurkishMorphology.Builder builder() {
return new MyTurkishMorphology.Builder();
}
public static MyTurkishMorphology.Builder builder(RootLexicon lexicon) {
return new MyTurkishMorphology.Builder().setLexicon(lexicon);
}
public static class Builder {
RootLexicon lexicon = new RootLexicon();
boolean informalAnalysis = false;
boolean ignoreDiacriticsInAnalysis = false;
public MyTurkishMorphology.Builder setLexicon(RootLexicon lexicon) {
this.lexicon = lexicon;
return this;
}
public MyTurkishMorphology.Builder setLexicon(String... dictionaryLines) {
this.lexicon = RootLexicon.fromLines(dictionaryLines);
return this;
}
public MyTurkishMorphology.Builder useInformalAnalysis() {
this.informalAnalysis = true;
return this;
}
public MyTurkishMorphology.Builder ignoreDiacriticsInAnalysis() {
this.ignoreDiacriticsInAnalysis = true;
return this;
}
public MyTurkishMorphology build() {
return new MyTurkishMorphology(this);
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.jackrabbit.oak.plugins.document;
import static com.google.common.base.StandardSystemProperty.JAVA_SPECIFICATION_VERSION;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import static org.junit.Assume.assumeFalse;
import java.io.UnsupportedEncodingException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.UUID;
import org.apache.jackrabbit.oak.plugins.document.UpdateOp.Condition;
import org.apache.jackrabbit.oak.plugins.document.UpdateOp.Key;
import org.apache.jackrabbit.oak.plugins.document.util.Utils;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
public class BasicDocumentStoreTest extends AbstractDocumentStoreTest {
private static final Logger LOG = LoggerFactory.getLogger(BasicDocumentStoreTest.class);
public BasicDocumentStoreTest(DocumentStoreFixture dsf) {
super(dsf);
}
@Test
public void testAddAndRemove() {
String id = this.getClass().getName() + ".testAddAndRemove";
// remove if present
NodeDocument nd = super.ds.find(Collection.NODES, id);
if (nd != null) {
super.ds.remove(Collection.NODES, id);
}
// add
UpdateOp up = new UpdateOp(id, true);
up.set("_id", id);
assertTrue(super.ds.create(Collection.NODES, Collections.singletonList(up)));
removeMe.add(id);
}
@Test
public void testAddAndRemoveJournalEntry() {
// OAK-4021
String id = this.getClass().getName() + ".testAddAndRemoveJournalEntry";
// remove if present
Document d = super.ds.find(Collection.JOURNAL, id);
if (d != null) {
super.ds.remove(Collection.JOURNAL, id);
}
// add
UpdateOp up = new UpdateOp(id, true);
up.set("_id", id);
assertTrue(super.ds.create(Collection.JOURNAL, Collections.singletonList(up)));
}
@Test
public void testConditionalUpdate() {
String id = this.getClass().getName() + ".testConditionalUpdate";
// remove if present
NodeDocument nd = super.ds.find(Collection.NODES, id);
if (nd != null) {
super.ds.remove(Collection.NODES, id);
}
String existingProp = "_recoverylock";
String existingRevisionProp = "recoverylock";
String nonExistingProp = "_qux";
String nonExistingRevisionProp = "qux";
Revision r = new Revision(1, 1, 1);
// add
UpdateOp up = new UpdateOp(id, true);
up.set("_id", id);
up.set(existingProp, "lock");
up.setMapEntry(existingRevisionProp, r, "lock");
assertTrue(super.ds.create(Collection.NODES, Collections.singletonList(up)));
// updates
up = new UpdateOp(id, false);
up.set("_id", id);
up.notEquals(nonExistingProp, "none");
NodeDocument result = super.ds.findAndUpdate(Collection.NODES, up);
assertNotNull(result);
up = new UpdateOp(id, false);
up.set("_id", id);
up.equals(nonExistingProp, null);
result = super.ds.findAndUpdate(Collection.NODES, up);
assertNotNull(result);
up = new UpdateOp(id, false);
up.set("_id", id);
up.notEquals(nonExistingRevisionProp, r, "none");
result = super.ds.findAndUpdate(Collection.NODES, up);
assertNotNull(result);
up = new UpdateOp(id, false);
up.set("_id", id);
up.equals(nonExistingRevisionProp, r, null);
result = super.ds.findAndUpdate(Collection.NODES, up);
assertNotNull(result);
up = new UpdateOp(id, false);
up.set("_id", id);
up.equals(existingProp, "none");
result = super.ds.findAndUpdate(Collection.NODES, up);
assertNull(result);
up = new UpdateOp(id, false);
up.set("_id", id);
up.equals(existingProp, null);
result = super.ds.findAndUpdate(Collection.NODES, up);
assertNull(result);
up = new UpdateOp(id, false);
up.set("_id", id);
up.equals(existingRevisionProp, r, "none");
result = super.ds.findAndUpdate(Collection.NODES, up);
assertNull(result);
up = new UpdateOp(id, false);
up.set("_id", id);
up.equals(existingRevisionProp, r, null);
result = super.ds.findAndUpdate(Collection.NODES, up);
assertNull(result);
up = new UpdateOp(id, false);
up.set("_id", id);
up.notEquals(existingProp, "lock");
result = super.ds.findAndUpdate(Collection.NODES, up);
assertNull(result);
up = new UpdateOp(id, false);
up.set("_id", id);
up.equals(existingProp, null);
result = super.ds.findAndUpdate(Collection.NODES, up);
assertNull(result);
up = new UpdateOp(id, false);
up.set("_id", id);
up.notEquals(existingRevisionProp, r, "lock");
result = super.ds.findAndUpdate(Collection.NODES, up);
assertNull(result);
up = new UpdateOp(id, false);
up.set("_id", id);
up.equals(existingRevisionProp, r, null);
result = super.ds.findAndUpdate(Collection.NODES, up);
assertNull(result);
up = new UpdateOp(id, false);
up.set("_id", id);
up.equals(existingProp, "lock");
up.set(existingProp, "none");
result = super.ds.findAndUpdate(Collection.NODES, up);
assertNotNull(result);
up = new UpdateOp(id, false);
up.set("_id", id);
up.equals(existingRevisionProp, r, "lock");
up.setMapEntry(existingRevisionProp, r, "none");
result = super.ds.findAndUpdate(Collection.NODES, up);
assertNotNull(result);
removeMe.add(id);
}
@Test
public void testConditionalUpdateForbidden() {
String id = this.getClass().getName() + ".testConditionalupdateForbidden";
// remove if present
NodeDocument nd = super.ds.find(Collection.NODES, id);
if (nd != null) {
super.ds.remove(Collection.NODES, id);
}
try {
UpdateOp up = new UpdateOp(id, true);
up.set("_id", id);
up.equals("foo", "bar");
super.ds.create(Collection.NODES, Collections.singletonList(up));
fail("conditional create should fail");
}
catch (IllegalStateException expected) {
// reported by UpdateOp
}
UpdateOp cup = new UpdateOp(id, true);
cup.set("_id", id);
assertTrue(super.ds.create(Collection.NODES, Collections.singletonList(cup)));
removeMe.add(id);
try {
UpdateOp up = new UpdateOp(id, false);
up.set("_id", id);
up.equals("foo", "bar");
super.ds.createOrUpdate(Collection.NODES, up);
fail("conditional createOrUpdate should fail");
}
catch (IllegalArgumentException expected) {
// reported by DocumentStore
}
try {
UpdateOp up = new UpdateOp(id, false);
up.set("_id", id);
up.equals("foo", "bar");
super.ds.createOrUpdate(Collection.NODES, Collections.singletonList(up));
fail("conditional createOrUpdate should fail");
}
catch (IllegalArgumentException expected) {
// reported by DocumentStore
}
try {
UpdateOp up = new UpdateOp(id, false);
up.set("_id", id);
up.equals("foo", "bar");
super.ds.update(Collection.NODES, Collections.singletonList(id), up);
fail("conditional update should fail");
}
catch (IllegalArgumentException expected) {
// reported by DocumentStore
}
}
@Test
public void testMaxIdAscii() {
int result = testMaxId(true);
assertTrue("needs to support keys of 512 bytes length, but only supports " + result, result >= 512);
}
@Test
public void testMaxIdNonAscii() {
testMaxId(false);
}
@Test
public void testLongId() {
String id = "0:/" + generateId(2048, true);
assertNull("find() with ultra-long id needs to return 'null'", super.ds.find(Collection.NODES, id));
if (! super.dsname.contains("Memory")) {
UpdateOp up = new UpdateOp(id, true);
up.set("_id", id);
assertFalse("create() with ultra-long id needs to fail", super.ds.create(Collection.NODES, Collections.singletonList(up)));
}
}
private int testMaxId(boolean ascii) {
int min = 0;
int max = 32768;
int test = 0;
int last = 0;
while (max - min >= 2) {
test = (max + min) / 2;
String id = generateId(test, ascii);
// make sure it's gone before trying to create it
try {
super.ds.remove(Collection.NODES, id);
} catch (DocumentStoreException ignored) {
}
UpdateOp up = new UpdateOp(id, true);
up.set("_id", id);
boolean success = super.ds.create(Collection.NODES, Collections.singletonList(up));
if (success) {
// check that we really can read it
NodeDocument findme = super.ds.find(Collection.NODES, id, 0);
assertNotNull("failed to retrieve previously stored document", findme);
assertEquals(id, findme.getId());
super.ds.remove(Collection.NODES, id);
min = test;
last = test;
} else {
max = test;
}
}
LOG.info("max " + (ascii ? "ASCII ('0')" : "non-ASCII (U+1F4A9)") + " id length for " + super.dsname + " was " + last);
return last;
}
@Test
public void testMaxProperty() {
int min = 0;
int max = 1024 * 1024 * 8;
int test = 0;
int last = 0;
while (max - min >= 256) {
if (test == 0) {
test = max; // try largest first
} else {
test = (max + min) / 2;
}
String id = this.getClass().getName() + ".testMaxProperty-" + test;
String pval = generateString(test, true);
UpdateOp up = new UpdateOp(id, true);
up.set("_id", id);
up.set("foo", pval);
boolean success = super.ds.create(Collection.NODES, Collections.singletonList(up));
if (success) {
// check that we really can read it
NodeDocument findme = super.ds.find(Collection.NODES, id, 0);
assertNotNull("failed to retrieve previously stored document", findme);
super.ds.remove(Collection.NODES, id);
min = test;
last = test;
} else {
max = test;
}
}
LOG.info("max prop length for " + super.dsname + " was " + last);
}
@Test
public void testInterestingPropLengths() throws UnsupportedEncodingException {
int lengths[] = { 1, 10, 100, 1000, 2000, 3000, 4000, 5000, 6000, 7000, 8000, 9000, 10000, 11000, 12000, 13000, 14000,
15000, 16000, 20000 };
for (int test : lengths) {
String id = this.getClass().getName() + ".testInterestingPropLengths-" + test;
String pval = generateString(test, true);
UpdateOp up = new UpdateOp(id, true);
up.set("_id", id);
up.set("foo", pval);
super.ds.remove(Collection.NODES, id);
boolean success = super.ds.create(Collection.NODES, Collections.singletonList(up));
assertTrue("failed to insert a document with property of length " + test + "(ASCII) in " + super.dsname, success);
super.ds.remove(Collection.NODES, id);
}
for (int test : lengths) {
String id = this.getClass().getName() + ".testInterestingPropLengths-" + test;
String pval = generateString(test, false);
UpdateOp up = new UpdateOp(id, true);
up.set("_id", id);
up.set("foo", pval);
super.ds.remove(Collection.NODES, id);
boolean success = super.ds.create(Collection.NODES, Collections.singletonList(up));
assertTrue("failed to insert a document with property of length " + test
+ " (potentially non-ASCII, actual octet length with UTF-8 encoding: " + pval.getBytes("UTF-8").length + ") in "
+ super.dsname, success);
// check that update works as well
if (success) {
try {
super.ds.findAndUpdate(Collection.NODES, up);
} catch (Exception ex) {
ex.printStackTrace(System.err);
fail("failed to update a document with property of length " + test
+ " (potentially non-ASCII, actual octet length with UTF-8 encoding: " + pval.getBytes("UTF-8").length + ") in "
+ super.dsname);
}
}
super.ds.remove(Collection.NODES, id);
}
}
@Test
public void testRepeatingUpdatesOnSQLServer() {
// simulates two updates to trigger the off-by-one bug documented in OAK-3670
String id = this.getClass().getName() + ".testRepeatingUpdatesOnSQLServer";
// remove if present
NodeDocument nd = super.ds.find(Collection.NODES, id);
if (nd != null) {
super.ds.remove(Collection.NODES, id);
}
UpdateOp up = new UpdateOp(id, true);
up.set("_id", id);
assertTrue(super.ds.create(Collection.NODES, Collections.singletonList(up)));
removeMe.add(id);
up = new UpdateOp(id, false);
up.set("_id", id);
up.set("f0", generateConstantString(3000));
super.ds.update(Collection.NODES, Collections.singletonList(id), up);
up = new UpdateOp(id, false);
up.set("_id", id);
up.set("f1", generateConstantString(967));
super.ds.update(Collection.NODES, Collections.singletonList(id), up);
NodeDocument doc = super.ds.find(Collection.NODES, id, 0);
assertNotNull(doc);
}
@Test
public void testModifiedMaxUpdateQuery() {
String id = this.getClass().getName() + ".testModifiedMaxUpdate";
// create a test node
UpdateOp up = new UpdateOp(id, true);
up.set("_id", id);
up.set("_modified", 1000L);
boolean success = super.ds.create(Collection.NODES, Collections.singletonList(up));
assertTrue(success);
removeMe.add(id);
// update with smaller _modified
UpdateOp up2 = new UpdateOp(id, true);
up2.set("_id", id);
up2.max("_modified", 100L);
super.ds.findAndUpdate(Collection.NODES, up2);
super.ds.invalidateCache();
// this should find the document; will fail if the MAX operation wasn't applied to the indexed property
String startId = this.getClass().getName() + ".testModifiedMaxUpdatd";
String endId = this.getClass().getName() + ".testModifiedMaxUpdatf";
List<NodeDocument> results = super.ds.query(Collection.NODES, startId, endId, "_modified", 1000, 1);
assertEquals("document not found, maybe indexed _modified property not properly updated", 1, results.size());
}
@Test
public void testModifiedMaxUpdateQuery2() {
// test for https://issues.apache.org/jira/browse/OAK-4388
String id = this.getClass().getName() + ".testModifiedMaxUpdate2";
// create a test node
UpdateOp up = new UpdateOp(id, true);
up.set("_id", id);
up.set("_modified", 1000L);
boolean success = super.ds.create(Collection.NODES, Collections.singletonList(up));
assertTrue(success);
removeMe.add(id);
for (int i = 0; i < 25; i++) {
// update with smaller _modified
UpdateOp up2 = new UpdateOp(id, true);
up2.set("_id", id);
up2.max("_modified", 100L);
super.ds.findAndUpdate(Collection.NODES, up2);
super.ds.invalidateCache();
NodeDocument doc = super.ds.find(Collection.NODES, id, 0);
assertEquals("modified should not have been set back (test iteration " + i + ")", 1000, (long)doc.getModified());
}
}
@Test
public void testModifyModified() {
// https://issues.apache.org/jira/browse/OAK-2940
String id = this.getClass().getName() + ".testModifyModified";
// create a test node
UpdateOp up = new UpdateOp(id, true);
up.set("_id", id);
up.set("_modified", 1000L);
boolean success = super.ds.create(Collection.NODES, Collections.singletonList(up));
assertTrue(success);
removeMe.add(id);
// update with "max" operation
up = new UpdateOp(id, false);
up.set("_id", id);
up.max("_modified", 2000L);
super.ds.update(Collection.NODES, Collections.singletonList(id), up);
NodeDocument nd = super.ds.find(Collection.NODES, id, 0);
assertEquals(((Number)nd.get("_modified")).longValue(), 2000L);
// update with "set" operation
up = new UpdateOp(id, false);
up.set("_id", id);
up.set("_modified", 1500L);
super.ds.update(Collection.NODES, Collections.singletonList(id), up);
nd = super.ds.find(Collection.NODES, id, 0);
assertEquals(((Number)nd.get("_modified")).longValue(), 1500L);
}
@Test
public void testModifyDeletedOnce() {
// https://issues.apache.org/jira/browse/OAK-3852
String id = this.getClass().getName() + ".testModifyDeletedOnce";
// create a test node
UpdateOp up = new UpdateOp(id, true);
up.set("_id", id);
up.set(NodeDocument.DELETED_ONCE, Boolean.FALSE);
boolean success = super.ds.create(Collection.NODES, Collections.singletonList(up));
assertTrue(success);
removeMe.add(id);
NodeDocument nd = super.ds.find(Collection.NODES, id, 0);
assertNotNull(nd);
Boolean dovalue = (Boolean)nd.get(NodeDocument.DELETED_ONCE);
if (dovalue != null) {
// RDB persistence does not distinguish null and false
assertEquals(dovalue.booleanValue(), Boolean.FALSE);
}
// update
up = new UpdateOp(id, false);
up.set("_id", id);
up.set(NodeDocument.DELETED_ONCE, Boolean.TRUE);
super.ds.update(Collection.NODES, Collections.singletonList(id), up);
nd = super.ds.find(Collection.NODES, id, 0);
assertNotNull(nd);
assertNotNull(nd.get(NodeDocument.DELETED_ONCE));
assertEquals(((Boolean)nd.get(NodeDocument.DELETED_ONCE)).booleanValue(), Boolean.TRUE);
}
@Test
public void testInterestingStrings() {
// see OAK-3683
assumeFalse(dsf instanceof DocumentStoreFixture.MongoFixture
&& JAVA_SPECIFICATION_VERSION.value().equals("1.8"));
String[] tests = new String[] { "simple:foo", "cr:a\n\b", "dquote:a\"b", "bs:a\\b", "euro:a\u201c", "gclef:\uD834\uDD1E",
"tab:a\tb", "nul:a\u0000b", "brokensurrogate:\ud800" };
for (String t : tests) {
int pos = t.indexOf(":");
String testname = t.substring(0, pos);
String test = t.substring(pos + 1);
String id = this.getClass().getName() + ".testInterestingStrings-" + testname;
super.ds.remove(Collection.NODES, id);
UpdateOp up = new UpdateOp(id, true);
up.set("_id", id);
up.set("foo", test);
boolean success = super.ds.create(Collection.NODES, Collections.singletonList(up));
assertTrue("failed to insert a document with property value of " + test + " (" + testname + ") in " + super.dsname, success);
// re-read from persistence
super.ds.invalidateCache();
NodeDocument nd = super.ds.find(Collection.NODES, id);
assertEquals("failure to round-trip " + testname + " through " + super.dsname, test, nd.get("foo"));
super.ds.remove(Collection.NODES, id);
}
}
@Test
public void testCreatePartialFailure() {
String bid = this.getClass().getName() + ".testCreatePartialFailure-";
int cnt = 10;
assertTrue(cnt > 8);
// clear repo
for (int i = 0; i < cnt; i++) {
super.ds.remove(Collection.NODES, bid + i);
removeMe.add(bid + i);
}
// create one of the test nodes
int pre = cnt / 2;
UpdateOp up = new UpdateOp(bid + pre, true);
up.set("_id", bid + pre);
up.set("foo", "bar");
assertTrue(super.ds.create(Collection.NODES, Collections.singletonList(up)));
// batch create
Set<String> toCreate = new HashSet<String>();
Set<String> toCreateFailEarly = new HashSet<String>();
List<UpdateOp> ups = new ArrayList<UpdateOp>();
for (int i = 0; i < cnt; i++) {
UpdateOp op = new UpdateOp(bid + i, true);
op.set("_id", bid + i);
op.set("foo", "qux");
ups.add(op);
if (i != pre) {
toCreate.add(bid + i);
}
if (i < pre) {
toCreateFailEarly.add(bid + i);
}
}
assertFalse(super.ds.create(Collection.NODES, ups));
// check how many nodes are there
Set<String> created = new HashSet<String>();
for (int i = 0; i < cnt; i++) {
boolean present = null != super.ds.find(Collection.NODES, bid + i, 0);
if (i == pre && !present) {
fail(super.dsname + ": batch update removed previously existing node " + (bid + i));
} else if (present && i != pre) {
created.add(bid + i);
}
}
// diagnostics
toCreate.removeAll(created);
if (created.isEmpty()) {
LOG.info(super.dsname + ": create() apparently is atomic");
} else if (created.size() == toCreate.size()) {
LOG.info(super.dsname + ": create() apparently is best-effort");
} else if (created.equals(toCreateFailEarly)) {
LOG.info(super.dsname + ": create() stops at first failure");
} else {
LOG.info(super.dsname + ": create() created: " + created + ", missing: " + toCreate);
}
}
@Test
public void testDeleteNonExisting() {
String id = this.getClass().getName() + ".testDeleteNonExisting-" + UUID.randomUUID();
// delete is best effort
ds.remove(Collection.NODES, id);
}
@Test
public void testDeleteNonExistingMultiple() {
String id = this.getClass().getName() + ".testDeleteNonExistingMultiple-" + UUID.randomUUID();
// create a test node
UpdateOp up = new UpdateOp(id + "-2", true);
up.set("_id", id + "-2");
boolean success = super.ds.create(Collection.NODES, Collections.singletonList(up));
assertTrue(success);
List<String> todelete = new ArrayList<String>();
todelete.add(id + "-2");
todelete.add(id);
ds.remove(Collection.NODES, todelete);
// id-2 should be removed
Document d = ds.find(Collection.NODES, id + "-2");
assertTrue(d == null);
}
@Test
public void testUpdateMultiple() {
String id = this.getClass().getName() + ".testUpdateMultiple";
// create a test node
super.ds.remove(Collection.NODES, id);
UpdateOp up = new UpdateOp(id, true);
up.set("_id", id);
boolean success = super.ds.create(Collection.NODES, Collections.singletonList(up));
assertTrue(success);
removeMe.add(id);
// update a non-existing one and this one
List<String> toupdate = new ArrayList<String>();
toupdate.add(id + "-" + UUID.randomUUID());
toupdate.add(id);
UpdateOp up2 = new UpdateOp(id, false);
up2.set("foo", "bar");
ds.update(Collection.NODES, toupdate, up2);
// id should be updated
ds.invalidateCache();
Document d = ds.find(Collection.NODES, id);
assertNotNull(d);
assertEquals(id, d.getId());
assertEquals("bar", d.get("foo").toString());
}
@Test
public void testUpdateModified() {
String id = this.getClass().getName() + ".testUpdateModified";
// create a test node
super.ds.remove(Collection.SETTINGS, id);
UpdateOp up = new UpdateOp(id, true);
up.set("_id", id);
boolean success = super.ds.create(Collection.SETTINGS, Collections.singletonList(up));
assertTrue(success);
removeMeSettings.add(id);
Document d = super.ds.find(Collection.SETTINGS, id);
Object m = d.get("_modified");
assertNull("_modified should be null until set", m);
up = new UpdateOp(id, true);
up.set("_id", id);
up.set("_modified", 123L);
super.ds.findAndUpdate(Collection.SETTINGS, up);
d = super.ds.find(Collection.SETTINGS, id);
m = d.get("_modified");
assertNotNull("_modified should now be != null", m);
assertEquals("123", m.toString());
up = new UpdateOp(id, true);
up.set("_id", id);
up.max("_modified", 122L);
super.ds.findAndUpdate(Collection.SETTINGS, up);
d = super.ds.find(Collection.SETTINGS, id);
m = d.get("_modified");
assertNotNull("_modified should now be != null", m);
assertEquals("123", m.toString());
up = new UpdateOp(id, true);
up.set("_id", id);
up.max("_modified", 124L);
super.ds.findAndUpdate(Collection.SETTINGS, up);
ds.invalidateCache();
d = super.ds.find(Collection.SETTINGS, id);
m = d.get("_modified");
assertNotNull("_modified should now be != null", m);
assertEquals("124", m.toString());
}
@Test
public void testQuery() {
// create ten documents
String base = this.getClass().getName() + ".testQuery-";
for (int i = 0; i < 10; i++) {
String id = base + i;
UpdateOp up = new UpdateOp(id, true);
up.set("_id", id);
boolean success = super.ds.create(Collection.NODES, Collections.singletonList(up));
assertTrue("document with " + id + " not created", success);
removeMe.add(id);
}
List<String> result = getKeys(ds.query(Collection.NODES, base, base + "A", 5));
assertEquals(5, result.size());
assertTrue(result.contains(base + "4"));
assertFalse(result.contains(base + "5"));
result = getKeys(ds.query(Collection.NODES, base, base + "A", 20));
assertEquals(10, result.size());
assertTrue(result.contains(base + "0"));
assertTrue(result.contains(base + "9"));
}
@Test
public void testQueryBinary() {
// create ten documents
String base = this.getClass().getName() + ".testQueryBinary-";
for (int i = 0; i < 10; i++) {
String id = base + i;
UpdateOp up = new UpdateOp(id, true);
up.set("_id", id);
up.set(NodeDocument.HAS_BINARY_FLAG, i % 2L);
boolean success = super.ds.create(Collection.NODES, Collections.singletonList(up));
assertTrue("document with " + id + " not created", success);
removeMe.add(id);
}
List<String> result = getKeys(ds.query(Collection.NODES, base, base + "Z", NodeDocument.HAS_BINARY_FLAG,
NodeDocument.HAS_BINARY_VAL, 1000));
assertEquals(5, result.size());
assertTrue(result.contains(base + "1"));
assertFalse(result.contains(base + "0"));
}
@Test
public void testQueryDeletedOnce() {
// create ten documents
String base = this.getClass().getName() + ".testQueryDeletedOnce-";
for (int i = 0; i < 10; i++) {
String id = base + i;
UpdateOp up = new UpdateOp(id, true);
up.set("_id", id);
up.set(NodeDocument.DELETED_ONCE, Boolean.valueOf(i % 2 == 0));
boolean success = super.ds.create(Collection.NODES, Collections.singletonList(up));
assertTrue("document with " + id + " not created", success);
removeMe.add(id);
}
List<String> result = getKeys(ds.query(Collection.NODES, base, base + "Z", NodeDocument.DELETED_ONCE,
1L, 1000));
assertEquals(5, result.size());
assertTrue(result.contains(base + "0"));
assertFalse(result.contains(base + "1"));
}
@Test
public void testQueryCollation() {
// create ten documents
String base = "2:/" + this.getClass().getName() + ".testQueryCollation";
List<UpdateOp> creates = new ArrayList<UpdateOp>();
List<String> expected = new ArrayList<String>();
// test US-ASCII except control characters
for (char c : "!\"#$%&'()*+,-./0123456789:;<=>?@AZ[\\]^_`az{|}~".toCharArray()) {
String id = base + c;
UpdateOp up = new UpdateOp(id, true);
up.set("_id", id);
creates.add(up);
removeMe.add(id);
id = base + "/" + c;
up = new UpdateOp(id, true);
up.set("_id", id);
creates.add(up);
expected.add(id);
removeMe.add(id);
}
boolean success = super.ds.create(Collection.NODES, creates);
assertTrue("documents not created", success);
List<String> result = getKeys(ds.query(Collection.NODES, base + "/", base + "0", 1000));
List<String> diff = new ArrayList<String>();
diff.addAll(result);
diff.removeAll(expected);
if (!diff.isEmpty()) {
fail("unexpected query results (broken collation handling in persistence?): " + diff);
}
diff = new ArrayList<String>();
diff.addAll(expected);
diff.removeAll(result);
if (!diff.isEmpty()) {
fail("missing query results (broken collation handling in persistence?): " + diff);
}
assertEquals("incorrect result ordering in query result (broken collation handling in persistence?)", expected, result);
}
private List<String> getKeys(List<NodeDocument> docs) {
List<String> result = new ArrayList<String>();
for (NodeDocument doc : docs) {
result.add(doc.getId());
}
return result;
}
private static String generateId(int length, boolean ascii) {
StringBuffer sb = new StringBuffer();
for (int i = 0; i < length; i++) {
if (ascii) {
sb.append("0");
}
else {
sb.append(Character.toChars(0x1F4A9));
}
}
return sb.toString();
}
// make sure _collisionsModCount property is maintained properly when it exists
@Test
public void testCollisionsModCount() {
String id = this.getClass().getName() + ".testCollisionsModCount";
// remove if present
NodeDocument nd = super.ds.find(Collection.NODES, id);
if (nd != null) {
super.ds.remove(Collection.NODES, id);
}
// add
Revision revision = Revision.fromString("r0-0-1");
UpdateOp up = new UpdateOp(id, true);
up.set("_id", id);
up.setMapEntry("_collisions", revision, "foo");
assertTrue(super.ds.create(Collection.NODES, Collections.singletonList(up)));
removeMe.add(id);
// get it
nd = super.ds.find(Collection.NODES, id);
assertNotNull(nd);
Number cmc = (Number)nd.get("_collisionsModCount");
if (cmc == null) {
// not supported
}
else {
// update
Revision revision2 = Revision.fromString("r0-0-2");
UpdateOp up2 = new UpdateOp(id, false);
up2.set("_id", id);
up2.setMapEntry("_collisions", revision2, "foobar");
NodeDocument old = super.ds.findAndUpdate(Collection.NODES, up2);
assertNotNull(old);
nd = super.ds.find(Collection.NODES, id, 0);
assertNotNull(nd);
Number cmc2 = (Number)nd.get("_collisionsModCount");
assertNotNull(cmc2);
assertTrue(cmc2.longValue() > cmc.longValue());
// update
UpdateOp up3 = new UpdateOp(id, false);
up3.set("_id", id);
up3.set("foo", "bar");
old = super.ds.findAndUpdate(Collection.NODES, up3);
assertNotNull(old);
nd = super.ds.find(Collection.NODES, id, 0);
assertNotNull(nd);
Number cmc3 = (Number)nd.get("_collisionsModCount");
assertNotNull(cmc3);
assertTrue(cmc2.longValue() == cmc3.longValue());
}
}
@Test
public void description() throws Exception{
Map<String, String> desc = ds.getMetadata();
assertNotNull(desc.get("type"));
}
@Test
public void testServerTimeDiff() throws Exception {
UpdateOp up = new UpdateOp("0:/", true);
up.set("_id", "0:/");
super.ds.create(Collection.NODES, Collections.singletonList(up));
removeMe.add("0:/");
long td = super.ds.determineServerTimeDifferenceMillis();
LOG.info("Server time difference on " + super.dsname + ": " + td + "ms");
}
@Test
public void removeWithCondition() throws Exception {
Set<String> existingDocs = new HashSet<String>();
for (NodeDocument doc : Utils.getAllDocuments(ds)) {
existingDocs.add(doc.getPath());
}
List<UpdateOp> docs = Lists.newArrayList();
docs.add(newDocument("/foo", 100));
removeMe.add(Utils.getIdFromPath("/foo"));
docs.add(newDocument("/bar", 200));
removeMe.add(Utils.getIdFromPath("/bar"));
docs.add(newDocument("/baz", 300));
removeMe.add(Utils.getIdFromPath("/baz"));
ds.create(Collection.NODES, docs);
for (UpdateOp op : docs) {
assertNotNull(ds.find(Collection.NODES, op.getId()));
}
Map<String, Map<Key, Condition>> toRemove = Maps.newHashMap();
removeDocument(toRemove, "/foo", 100); // matches
removeDocument(toRemove, "/bar", 300); // modified differs
removeDocument(toRemove, "/qux", 100); // does not exist
removeDocument(toRemove, "/baz", 300); // matches
int removed = ds.remove(Collection.NODES, toRemove);
assertEquals(2, removed);
assertNotNull(ds.find(Collection.NODES, Utils.getIdFromPath("/bar")));
for (NodeDocument doc : Utils.getAllDocuments(ds)) {
if (!doc.getPath().equals("/bar") && !existingDocs.contains(doc.getPath())) {
fail("document must not exist: " + doc.getId());
}
}
}
@Test
public void removeInvalidatesCache() throws Exception {
String id = Utils.getIdFromPath("/foo");
removeMe.add(id);
ds.create(Collection.NODES, Collections.singletonList(newDocument("/foo", 1)));
Map<Key, Condition> conditions = Collections.emptyMap();
ds.remove(Collection.NODES, Collections.singletonMap(id, conditions));
assertNull(ds.getIfCached(Collection.NODES, id));
}
// OAK-3932
@Test
public void getIfCachedNonExistingDocument() throws Exception {
String id = Utils.getIdFromPath("/foo");
assertNull(ds.find(Collection.NODES, id));
assertNull(ds.getIfCached(Collection.NODES, id));
}
private UpdateOp newDocument(String path, long modified) {
String id = Utils.getIdFromPath(path);
UpdateOp op = new UpdateOp(id, true);
op.set(NodeDocument.MODIFIED_IN_SECS, modified);
op.set(Document.ID, id);
return op;
}
private void removeDocument(Map<String, Map<Key, Condition>> toRemove,
String path,
long modified) {
toRemove.put(Utils.getIdFromPath(path),
Collections.singletonMap(
new Key(NodeDocument.MODIFIED_IN_SECS, null),
Condition.newEqualsCondition(modified)));
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.runtime.operators.deduplicate.window;
import org.apache.flink.api.common.typeutils.TypeSerializer;
import org.apache.flink.runtime.checkpoint.OperatorSubtaskState;
import org.apache.flink.streaming.api.watermark.Watermark;
import org.apache.flink.streaming.util.KeyedOneInputStreamOperatorTestHarness;
import org.apache.flink.streaming.util.OneInputStreamOperatorTestHarness;
import org.apache.flink.table.data.RowData;
import org.apache.flink.table.runtime.keyselector.RowDataKeySelector;
import org.apache.flink.table.runtime.operators.window.slicing.SlicingWindowOperator;
import org.apache.flink.table.runtime.typeutils.PagedTypeSerializer;
import org.apache.flink.table.runtime.typeutils.RowDataSerializer;
import org.apache.flink.table.runtime.util.GenericRowRecordSortComparator;
import org.apache.flink.table.runtime.util.RowDataHarnessAssertor;
import org.apache.flink.table.types.logical.BigIntType;
import org.apache.flink.table.types.logical.LogicalType;
import org.apache.flink.table.types.logical.RowType;
import org.apache.flink.table.types.logical.VarCharType;
import org.apache.flink.table.utils.HandwrittenSelectorUtil;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
import java.time.ZoneId;
import java.util.Arrays;
import java.util.Collection;
import java.util.concurrent.ConcurrentLinkedQueue;
import static org.apache.flink.table.runtime.util.StreamRecordUtils.insertRecord;
import static org.apache.flink.table.runtime.util.TimeWindowUtil.toUtcTimestampMills;
import static org.junit.Assert.assertEquals;
/**
* Tests for window deduplicate operators created by {@link
* RowTimeWindowDeduplicateOperatorBuilder}.
*/
@RunWith(Parameterized.class)
public class RowTimeWindowDeduplicateOperatorTest {
private static final RowType INPUT_ROW_TYPE =
new RowType(
Arrays.asList(
new RowType.RowField("f0", new VarCharType(Integer.MAX_VALUE)),
new RowType.RowField("f1", new BigIntType()),
new RowType.RowField("f2", new BigIntType())));
private static final RowDataSerializer INPUT_ROW_SER = new RowDataSerializer(INPUT_ROW_TYPE);
private static final RowDataKeySelector KEY_SELECTOR =
HandwrittenSelectorUtil.getRowDataSelector(
new int[] {0}, INPUT_ROW_TYPE.getChildren().toArray(new LogicalType[0]));
private static final PagedTypeSerializer<RowData> KEY_SER =
(PagedTypeSerializer<RowData>) KEY_SELECTOR.getProducedType().toSerializer();
private static final int WINDOW_END_INDEX = 2;
private static final LogicalType[] OUTPUT_TYPES =
new LogicalType[] {
new VarCharType(Integer.MAX_VALUE), new BigIntType(), new BigIntType()
};
private static final TypeSerializer<RowData> OUT_SERIALIZER =
new RowDataSerializer(OUTPUT_TYPES);
private static final RowDataHarnessAssertor ASSERTER =
new RowDataHarnessAssertor(
OUTPUT_TYPES,
new GenericRowRecordSortComparator(0, new VarCharType(VarCharType.MAX_LENGTH)));
private static final ZoneId UTC_ZONE_ID = ZoneId.of("UTC");
private static final ZoneId SHANGHAI_ZONE_ID = ZoneId.of("Asia/Shanghai");
private final ZoneId shiftTimeZone;
public RowTimeWindowDeduplicateOperatorTest(ZoneId shiftTimeZone) {
this.shiftTimeZone = shiftTimeZone;
}
@Parameterized.Parameters(name = "TimeZone = {0}")
public static Collection<Object[]> runMode() {
return Arrays.asList(new Object[] {UTC_ZONE_ID}, new Object[] {SHANGHAI_ZONE_ID});
}
private static OneInputStreamOperatorTestHarness<RowData, RowData> createTestHarness(
SlicingWindowOperator<RowData, ?> operator) throws Exception {
return new KeyedOneInputStreamOperatorTestHarness<>(
operator, KEY_SELECTOR, KEY_SELECTOR.getProducedType());
}
@Test
public void testRowTimeWindowDeduplicateKeepFirstRow() throws Exception {
SlicingWindowOperator<RowData, ?> operator =
RowTimeWindowDeduplicateOperatorBuilder.builder()
.inputSerializer(INPUT_ROW_SER)
.shiftTimeZone(shiftTimeZone)
.keySerializer(KEY_SER)
.keepLastRow(false)
.rowtimeIndex(1)
.windowEndIndex(WINDOW_END_INDEX)
.build();
OneInputStreamOperatorTestHarness<RowData, RowData> testHarness =
createTestHarness(operator);
testHarness.setup(OUT_SERIALIZER);
testHarness.open();
// process elements
ConcurrentLinkedQueue<Object> expectedOutput = new ConcurrentLinkedQueue<>();
// add elements out-of-order
testHarness.processElement(
insertRecord("key2", 1L, toUtcTimestampMills(999L, shiftTimeZone)));
testHarness.processElement(
insertRecord("key2", 4L, toUtcTimestampMills(999L, shiftTimeZone)));
testHarness.processElement(
insertRecord("key2", 5L, toUtcTimestampMills(999L, shiftTimeZone)));
testHarness.processElement(
insertRecord("key2", 3L, toUtcTimestampMills(999L, shiftTimeZone)));
testHarness.processElement(
insertRecord("key2", 1002L, toUtcTimestampMills(1999L, shiftTimeZone)));
testHarness.processElement(
insertRecord("key2", 3007L, toUtcTimestampMills(3999L, shiftTimeZone)));
testHarness.processElement(
insertRecord("key2", 3008L, toUtcTimestampMills(3999L, shiftTimeZone)));
testHarness.processElement(
insertRecord("key2", 3001L, toUtcTimestampMills(3999L, shiftTimeZone)));
testHarness.processElement(
insertRecord("key1", 2L, toUtcTimestampMills(999L, shiftTimeZone)));
testHarness.processElement(
insertRecord("key1", 1L, toUtcTimestampMills(999L, shiftTimeZone)));
testHarness.processElement(
insertRecord("key1", 3L, toUtcTimestampMills(999L, shiftTimeZone)));
testHarness.processElement(
insertRecord("key1", 3L, toUtcTimestampMills(999L, shiftTimeZone)));
testHarness.processElement(
insertRecord("key1", 1004L, toUtcTimestampMills(1999L, shiftTimeZone)));
testHarness.processElement(
insertRecord("key1", 1006L, toUtcTimestampMills(1999L, shiftTimeZone)));
testHarness.processElement(
insertRecord("key1", 1007L, toUtcTimestampMills(1999L, shiftTimeZone)));
testHarness.processWatermark(new Watermark(999));
expectedOutput.add(insertRecord("key1", 1L, toUtcTimestampMills(999L, shiftTimeZone)));
expectedOutput.add(insertRecord("key2", 1L, toUtcTimestampMills(999L, shiftTimeZone)));
expectedOutput.add(new Watermark(999));
ASSERTER.assertOutputEqualsSorted(
"Output was not correct.", expectedOutput, testHarness.getOutput());
testHarness.processWatermark(new Watermark(1999));
expectedOutput.add(insertRecord("key1", 1004L, toUtcTimestampMills(1999L, shiftTimeZone)));
expectedOutput.add(insertRecord("key2", 1002L, toUtcTimestampMills(1999L, shiftTimeZone)));
expectedOutput.add(new Watermark(1999));
ASSERTER.assertOutputEqualsSorted(
"Output was not correct.", expectedOutput, testHarness.getOutput());
// do a snapshot, close and restore again
testHarness.prepareSnapshotPreBarrier(0L);
OperatorSubtaskState snapshot = testHarness.snapshot(0L, 0);
testHarness.close();
expectedOutput.clear();
testHarness = createTestHarness(operator);
testHarness.setup(OUT_SERIALIZER);
testHarness.initializeState(snapshot);
testHarness.open();
testHarness.processWatermark(new Watermark(3999));
expectedOutput.add(insertRecord("key2", 3001L, toUtcTimestampMills(3999L, shiftTimeZone)));
expectedOutput.add(new Watermark(3999));
ASSERTER.assertOutputEqualsSorted(
"Output was not correct.", expectedOutput, testHarness.getOutput());
// late element, should be dropped
testHarness.processElement(
insertRecord("key2", 3001L, toUtcTimestampMills(3500L, shiftTimeZone)));
testHarness.processWatermark(new Watermark(4999));
expectedOutput.add(new Watermark(4999));
ASSERTER.assertOutputEqualsSorted(
"Output was not correct.", expectedOutput, testHarness.getOutput());
assertEquals(1, operator.getNumLateRecordsDropped().getCount());
testHarness.close();
}
@Test
public void testRowTimeWindowDeduplicateKeepLastRow() throws Exception {
SlicingWindowOperator<RowData, ?> operator =
RowTimeWindowDeduplicateOperatorBuilder.builder()
.inputSerializer(INPUT_ROW_SER)
.shiftTimeZone(shiftTimeZone)
.keySerializer(KEY_SER)
.keepLastRow(true)
.rowtimeIndex(1)
.windowEndIndex(WINDOW_END_INDEX)
.build();
OneInputStreamOperatorTestHarness<RowData, RowData> testHarness =
createTestHarness(operator);
testHarness.setup(OUT_SERIALIZER);
testHarness.open();
// process elements
ConcurrentLinkedQueue<Object> expectedOutput = new ConcurrentLinkedQueue<>();
// add elements out-of-order
testHarness.processElement(
insertRecord("key2", 1L, toUtcTimestampMills(999L, shiftTimeZone)));
testHarness.processElement(
insertRecord("key2", 4L, toUtcTimestampMills(999L, shiftTimeZone)));
testHarness.processElement(
insertRecord("key2", 5L, toUtcTimestampMills(999L, shiftTimeZone)));
testHarness.processElement(
insertRecord("key2", 3L, toUtcTimestampMills(999L, shiftTimeZone)));
testHarness.processElement(
insertRecord("key2", 1002L, toUtcTimestampMills(1999L, shiftTimeZone)));
testHarness.processElement(
insertRecord("key2", 3007L, toUtcTimestampMills(3999L, shiftTimeZone)));
testHarness.processElement(
insertRecord("key2", 3008L, toUtcTimestampMills(3999L, shiftTimeZone)));
testHarness.processElement(
insertRecord("key2", 3001L, toUtcTimestampMills(3999L, shiftTimeZone)));
testHarness.processElement(
insertRecord("key1", 2L, toUtcTimestampMills(999L, shiftTimeZone)));
testHarness.processElement(
insertRecord("key1", 1L, toUtcTimestampMills(999L, shiftTimeZone)));
testHarness.processElement(
insertRecord("key1", 3L, toUtcTimestampMills(999L, shiftTimeZone)));
testHarness.processElement(
insertRecord("key1", 3L, toUtcTimestampMills(999L, shiftTimeZone)));
testHarness.processElement(
insertRecord("key1", 1004L, toUtcTimestampMills(1999L, shiftTimeZone)));
testHarness.processElement(
insertRecord("key1", 1006L, toUtcTimestampMills(1999L, shiftTimeZone)));
testHarness.processElement(
insertRecord("key1", 1007L, toUtcTimestampMills(1999L, shiftTimeZone)));
testHarness.processWatermark(new Watermark(999));
expectedOutput.add(insertRecord("key1", 3L, toUtcTimestampMills(999L, shiftTimeZone)));
expectedOutput.add(insertRecord("key2", 5L, toUtcTimestampMills(999L, shiftTimeZone)));
expectedOutput.add(new Watermark(999));
ASSERTER.assertOutputEqualsSorted(
"Output was not correct.", expectedOutput, testHarness.getOutput());
testHarness.processWatermark(new Watermark(1999));
expectedOutput.add(insertRecord("key1", 1007L, toUtcTimestampMills(1999L, shiftTimeZone)));
expectedOutput.add(insertRecord("key2", 1002L, toUtcTimestampMills(1999L, shiftTimeZone)));
expectedOutput.add(new Watermark(1999));
ASSERTER.assertOutputEqualsSorted(
"Output was not correct.", expectedOutput, testHarness.getOutput());
// do a snapshot, close and restore again
testHarness.prepareSnapshotPreBarrier(0L);
OperatorSubtaskState snapshot = testHarness.snapshot(0L, 0);
testHarness.close();
expectedOutput.clear();
testHarness = createTestHarness(operator);
testHarness.setup(OUT_SERIALIZER);
testHarness.initializeState(snapshot);
testHarness.open();
testHarness.processWatermark(new Watermark(3999));
expectedOutput.add(insertRecord("key2", 3008L, toUtcTimestampMills(3999L, shiftTimeZone)));
expectedOutput.add(new Watermark(3999));
ASSERTER.assertOutputEqualsSorted(
"Output was not correct.", expectedOutput, testHarness.getOutput());
// late element, should be dropped
testHarness.processElement(
insertRecord("key2", 3001L, toUtcTimestampMills(3500L, shiftTimeZone)));
testHarness.processWatermark(new Watermark(4999));
expectedOutput.add(new Watermark(4999));
ASSERTER.assertOutputEqualsSorted(
"Output was not correct.", expectedOutput, testHarness.getOutput());
assertEquals(1, operator.getNumLateRecordsDropped().getCount());
testHarness.close();
}
}
| |
/*
* Copyright 2020 Red Hat, Inc. and/or its affiliates.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.kie.pmml.compiler.commons.utils;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.Reader;
import java.nio.charset.Charset;
import java.nio.charset.StandardCharsets;
import java.util.Arrays;
import java.util.List;
import java.util.Optional;
import java.util.Random;
import java.util.stream.Collectors;
import java.util.stream.IntStream;
import javax.xml.bind.JAXBException;
import org.apache.commons.lang3.RandomStringUtils;
import org.dmg.pmml.DataField;
import org.dmg.pmml.DataType;
import org.dmg.pmml.FieldName;
import org.dmg.pmml.MathContext;
import org.dmg.pmml.MiningField;
import org.dmg.pmml.MiningFunction;
import org.dmg.pmml.Model;
import org.dmg.pmml.OpType;
import org.dmg.pmml.OutputField;
import org.dmg.pmml.PMML;
import org.dmg.pmml.ResultFeature;
import org.dmg.pmml.Target;
import org.dmg.pmml.Targets;
import org.dmg.pmml.mining.MiningModel;
import org.dmg.pmml.mining.Segment;
import org.junit.Test;
import org.xml.sax.SAXException;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertTrue;
import static org.kie.pmml.compiler.commons.utils.KiePMMLUtil.MODELNAME_TEMPLATE;
import static org.kie.pmml.compiler.commons.utils.KiePMMLUtil.SEGMENTID_TEMPLATE;
import static org.kie.pmml.compiler.commons.utils.KiePMMLUtil.SEGMENTMODELNAME_TEMPLATE;
import static org.kie.pmml.compiler.commons.utils.KiePMMLUtil.TARGETFIELD_TEMPLATE;
import static org.kie.pmml.compiler.commons.utils.KiePMMLUtil.getMiningTargetFields;
import static org.kie.test.util.filesystem.FileUtils.getFileInputStream;
public class KiePMMLUtilTest {
private static final String NO_MODELNAME_SAMPLE_NAME = "NoModelNameSample.pmml";
private static final String NO_MODELNAME_NO_SEGMENTID_SAMPLE_NAME = "NoModelNameNoSegmentIdSample.pmml";
private static final String NO_MODELNAME_NO_SEGMENT_ID_NOSEGMENT_TARGET_FIELD_SAMPLE =
"NoModelNameNoSegmentIdNoSegmentTargetFieldSample.pmml";
private static final String NO_TARGET_FIELD_SAMPLE = "NoTargetFieldSample.pmml";
private static final String MINING_WITH_SAME_NESTED_MODEL_NAMES = "MiningWithSameNestedModelNames.pmml";
@Test
public void loadString() throws IOException, JAXBException, SAXException {
commonLoadString(NO_MODELNAME_SAMPLE_NAME);
commonLoadString(NO_MODELNAME_NO_SEGMENTID_SAMPLE_NAME);
commonLoadString(MINING_WITH_SAME_NESTED_MODEL_NAMES);
}
@Test
public void loadFile() throws JAXBException, IOException, SAXException {
commonLoadFile(NO_MODELNAME_SAMPLE_NAME);
commonLoadFile(NO_MODELNAME_NO_SEGMENTID_SAMPLE_NAME);
commonLoadFile(MINING_WITH_SAME_NESTED_MODEL_NAMES);
}
@Test
public void populateMissingModelName() throws Exception {
final InputStream inputStream = getFileInputStream(NO_MODELNAME_SAMPLE_NAME);
final PMML pmml = org.jpmml.model.PMMLUtil.unmarshal(inputStream);
final Model toPopulate = pmml.getModels().get(0);
assertNull(toPopulate.getModelName());
KiePMMLUtil.populateMissingModelName(toPopulate, NO_MODELNAME_SAMPLE_NAME, 0);
assertNotNull(toPopulate.getModelName());
String expected = String.format(MODELNAME_TEMPLATE,
NO_MODELNAME_SAMPLE_NAME,
toPopulate.getClass().getSimpleName(),
0);
assertEquals(expected, toPopulate.getModelName());
}
@Test
public void populateMissingMiningTargetField() throws Exception {
final InputStream inputStream = getFileInputStream(NO_TARGET_FIELD_SAMPLE);
final PMML pmml = org.jpmml.model.PMMLUtil.unmarshal(inputStream);
final Model toPopulate = pmml.getModels().get(0);
List<MiningField> miningTargetFields = getMiningTargetFields(toPopulate.getMiningSchema().getMiningFields());
assertTrue(miningTargetFields.isEmpty());
assertNull(toPopulate.getTargets().getTargets().get(0).getField());
KiePMMLUtil.populateMissingMiningTargetField(toPopulate, pmml.getDataDictionary().getDataFields());
miningTargetFields = getMiningTargetFields(toPopulate.getMiningSchema().getMiningFields());
assertEquals(1, miningTargetFields.size());
final MiningField targetField = miningTargetFields.get(0);
assertTrue(pmml.getDataDictionary()
.getDataFields()
.stream()
.anyMatch(dataField -> dataField.getName().equals(targetField.getName())));
assertEquals(targetField.getName(), toPopulate.getTargets().getTargets().get(0).getField());
}
@Test
public void getTargetDataField() throws Exception {
final InputStream inputStream = getFileInputStream(NO_TARGET_FIELD_SAMPLE);
final PMML pmml = org.jpmml.model.PMMLUtil.unmarshal(inputStream);
final Model model = pmml.getModels().get(0);
Optional<DataField> optionalDataField = KiePMMLUtil.getTargetDataField(model);
assertTrue(optionalDataField.isPresent());
DataField retrieved = optionalDataField.get();
String expected= String.format(TARGETFIELD_TEMPLATE, "golfing");
assertEquals(expected, retrieved.getName().getValue());
}
@Test
public void getTargetDataType() {
MiningFunction miningFunction = MiningFunction.REGRESSION;
MathContext mathContext = MathContext.DOUBLE;
DataType retrieved = KiePMMLUtil.getTargetDataType(miningFunction, mathContext);
assertEquals(DataType.DOUBLE, retrieved);
mathContext = MathContext.FLOAT;
retrieved = KiePMMLUtil.getTargetDataType(miningFunction, mathContext);
assertEquals(DataType.FLOAT, retrieved);
miningFunction = MiningFunction.CLASSIFICATION;
retrieved = KiePMMLUtil.getTargetDataType(miningFunction, mathContext);
assertEquals(DataType.STRING, retrieved);
miningFunction = MiningFunction.CLUSTERING;
retrieved = KiePMMLUtil.getTargetDataType(miningFunction, mathContext);
assertEquals(DataType.STRING, retrieved);
List<MiningFunction> notMappedMiningFunctions = Arrays.asList(MiningFunction.ASSOCIATION_RULES,
MiningFunction.MIXED,
MiningFunction.SEQUENCES,
MiningFunction.TIME_SERIES);
notMappedMiningFunctions.forEach(minFun -> assertNull(KiePMMLUtil.getTargetDataType(minFun, MathContext.DOUBLE)));
}
@Test
public void getTargetOpType() {
MiningFunction miningFunction = MiningFunction.REGRESSION;
OpType retrieved = KiePMMLUtil.getTargetOpType(miningFunction);
assertEquals(OpType.CONTINUOUS, retrieved);
miningFunction = MiningFunction.CLASSIFICATION;
retrieved = KiePMMLUtil.getTargetOpType(miningFunction);
assertEquals(OpType.CATEGORICAL, retrieved);
miningFunction = MiningFunction.CLUSTERING;
retrieved = KiePMMLUtil.getTargetOpType(miningFunction);
assertEquals(OpType.CATEGORICAL, retrieved);
List<MiningFunction> notMappedMiningFunctions = Arrays.asList(MiningFunction.ASSOCIATION_RULES,
MiningFunction.MIXED,
MiningFunction.SEQUENCES,
MiningFunction.TIME_SERIES);
notMappedMiningFunctions.forEach(minFun -> assertNull(KiePMMLUtil.getTargetOpType(minFun)));
}
@Test
public void getTargetMiningField() {
final DataField dataField = new DataField();
dataField.setName(FieldName.create("FIELD_NAME"));
final MiningField retrieved = KiePMMLUtil.getTargetMiningField(dataField);
assertEquals(dataField.getName().getValue(), retrieved.getName().getValue());
assertEquals(MiningField.UsageType.TARGET, retrieved.getUsageType());
}
@Test
public void correctTargetFields() {
final MiningField miningField = new MiningField(FieldName.create("FIELD_NAME"));
final Targets targets = new Targets();
final Target namedTarget = new Target();
String targetName ="TARGET_NAME";
namedTarget.setField(FieldName.create(targetName));
final Target unnamedTarget = new Target();
targets.addTargets(namedTarget, unnamedTarget);
KiePMMLUtil.correctTargetFields(miningField, targets);
assertEquals(targetName, namedTarget.getField().getValue());
assertEquals(miningField.getName(), unnamedTarget.getField());
}
@Test
public void populateCorrectMiningModel() throws Exception {
final InputStream inputStream = getFileInputStream(NO_MODELNAME_NO_SEGMENT_ID_NOSEGMENT_TARGET_FIELD_SAMPLE);
final PMML pmml = org.jpmml.model.PMMLUtil.unmarshal(inputStream);
final Model retrieved = pmml.getModels().get(0);
assertTrue(retrieved instanceof MiningModel);
MiningModel miningModel = (MiningModel) retrieved;
miningModel.getSegmentation().getSegments().forEach(segment -> {
assertNull(segment.getId());
assertNull(segment.getModel().getModelName());
assertTrue(getMiningTargetFields(segment.getModel().getMiningSchema()).isEmpty());
});
KiePMMLUtil.populateCorrectMiningModel(miningModel);
miningModel.getSegmentation().getSegments().forEach(segment -> {
assertNotNull(segment.getId());
assertNotNull(segment.getModel().getModelName());
assertFalse(getMiningTargetFields(segment.getModel().getMiningSchema()).isEmpty());
});
}
@Test
public void populateCorrectSegmentId() throws Exception {
final InputStream inputStream = getFileInputStream(NO_MODELNAME_NO_SEGMENTID_SAMPLE_NAME);
final PMML pmml = org.jpmml.model.PMMLUtil.unmarshal(inputStream);
final Model retrieved = pmml.getModels().get(0);
assertTrue(retrieved instanceof MiningModel);
MiningModel miningModel = (MiningModel) retrieved;
Segment toPopulate = miningModel.getSegmentation().getSegments().get(0);
assertNull(toPopulate.getId());
String modelName = "MODEL_NAME";
int i = 0;
KiePMMLUtil.populateCorrectSegmentId(toPopulate, modelName, i);
assertNotNull(toPopulate.getId());
String expected = String.format(SEGMENTID_TEMPLATE,
modelName,
i);
assertEquals(expected, toPopulate.getId());
}
@Test
public void populateMissingSegmentModelName() throws Exception {
final InputStream inputStream = getFileInputStream(NO_MODELNAME_NO_SEGMENTID_SAMPLE_NAME);
final PMML pmml = org.jpmml.model.PMMLUtil.unmarshal(inputStream);
final Model retrieved = pmml.getModels().get(0);
assertTrue(retrieved instanceof MiningModel);
MiningModel miningModel = (MiningModel) retrieved;
Model toPopulate = miningModel.getSegmentation().getSegments().get(0).getModel();
assertNull(toPopulate.getModelName());
String segmentId = "SEG_ID";
KiePMMLUtil.populateMissingSegmentModelName(toPopulate, segmentId);
assertNotNull(toPopulate.getModelName());
String expected = String.format(SEGMENTMODELNAME_TEMPLATE,
segmentId,
toPopulate.getClass().getSimpleName());
assertEquals(expected, toPopulate.getModelName());
}
@Test
public void populateMissingTargetFieldInSegment() throws Exception {
final InputStream inputStream = getFileInputStream(NO_MODELNAME_NO_SEGMENT_ID_NOSEGMENT_TARGET_FIELD_SAMPLE);
final PMML pmml = org.jpmml.model.PMMLUtil.unmarshal(inputStream);
final Model retrieved = pmml.getModels().get(0);
assertTrue(retrieved instanceof MiningModel);
MiningModel miningModel = (MiningModel) retrieved;
Model toPopulate = miningModel.getSegmentation().getSegments().get(0).getModel();
assertTrue(getMiningTargetFields(toPopulate.getMiningSchema()).isEmpty());
KiePMMLUtil.populateMissingTargetFieldInSegment(retrieved.getMiningSchema(), toPopulate);
List<MiningField> childrenTargetFields = getMiningTargetFields(toPopulate.getMiningSchema());
assertFalse(childrenTargetFields.isEmpty());
getMiningTargetFields(miningModel.getMiningSchema()).forEach(parentTargetField -> assertTrue(childrenTargetFields.contains(parentTargetField)));
}
@Test
public void populateMissingOutputFieldDataType() {
Random random = new Random();
List<String> fieldNames = IntStream.range(0, 6)
.mapToObj(i -> RandomStringUtils.random(6, true, false))
.collect(Collectors.toList());
List<DataField> dataFields = fieldNames.stream()
.map(fieldName -> {
DataField toReturn = new DataField();
toReturn.setName(FieldName.create(fieldName));
DataType dataType = DataType.values()[random.nextInt(DataType.values().length)];
toReturn.setDataType(dataType);
return toReturn;
})
.collect(Collectors.toList());
List<MiningField> miningFields = IntStream.range(0, dataFields.size() - 1)
.mapToObj(dataFields::get)
.map(dataField -> {
MiningField toReturn = new MiningField();
toReturn.setName(FieldName.create(dataField.getName().getValue()));
toReturn.setUsageType(MiningField.UsageType.ACTIVE);
return toReturn;
})
.collect(Collectors.toList());
DataField lastDataField = dataFields.get(dataFields.size() - 1);
MiningField targetMiningField = new MiningField();
targetMiningField.setName(FieldName.create(lastDataField.getName().getValue()));
targetMiningField.setUsageType(MiningField.UsageType.TARGET);
miningFields.add(targetMiningField);
// Following OutputFields should be populated based on "ResultFeature.PROBABILITY"
List<OutputField> outputFields = IntStream.range(0, 3)
.mapToObj(i -> {
OutputField toReturn = new OutputField();
toReturn.setName(FieldName.create(RandomStringUtils.random(6, true, false)));
toReturn.setResultFeature(ResultFeature.PROBABILITY);
return toReturn;
})
.collect(Collectors.toList());
// Following OutputField should be populated based on "ResultFeature.PREDICTED_VALUE"
OutputField targetOutputField = new OutputField();
targetOutputField.setName(FieldName.create(RandomStringUtils.random(6, true, false)));
targetOutputField.setResultFeature(ResultFeature.PREDICTED_VALUE);
outputFields.add(targetOutputField);
// Following OutputField should be populated based on "TargetField" property
OutputField targetingOutputField = new OutputField();
targetingOutputField.setName(FieldName.create(RandomStringUtils.random(6, true, false)));
targetingOutputField.setTargetField(FieldName.create(targetMiningField.getName().getValue()));
outputFields.add(targetingOutputField);
outputFields.forEach(outputField -> assertNull(outputField.getDataType()));
IntStream.range(0, 2)
.forEach(i -> {
OutputField toAdd = new OutputField();
toAdd.setName(FieldName.create(RandomStringUtils.random(6, true, false)));
DataType dataType = DataType.values()[random.nextInt(DataType.values().length)];
toAdd.setDataType(dataType);
outputFields.add(toAdd);
});
KiePMMLUtil.populateMissingOutputFieldDataType(outputFields, miningFields, dataFields);
outputFields.forEach(outputField -> assertNotNull(outputField.getDataType()));
}
@Test
public void getSanitizedId() {
final String modelName = "MODEL_NAME";
String id = "2";
String expected = String.format(SEGMENTID_TEMPLATE, modelName, id);
String retrieved = KiePMMLUtil.getSanitizedId(id, modelName);
assertEquals(expected, retrieved);
id = "34.5";
expected = String.format(SEGMENTID_TEMPLATE, modelName, id);
retrieved = KiePMMLUtil.getSanitizedId(id, modelName);
assertEquals(expected, retrieved);
id = "3,45";
expected = String.format(SEGMENTID_TEMPLATE, modelName, id);
retrieved = KiePMMLUtil.getSanitizedId(id, modelName);
assertEquals(expected, retrieved);
}
@Test
public void getMiningTargetFieldsFromMiningSchema() throws Exception {
final InputStream inputStream = getFileInputStream(NO_MODELNAME_SAMPLE_NAME);
final PMML toPopulate = org.jpmml.model.PMMLUtil.unmarshal(inputStream);
final Model model = toPopulate.getModels().get(0);
List<MiningField> retrieved = KiePMMLUtil.getMiningTargetFields(model.getMiningSchema());
assertNotNull(retrieved);
assertEquals(1, retrieved.size());
MiningField targetField = retrieved.get(0);
assertEquals("car_location", targetField.getName().getValue());
assertEquals("target", targetField.getUsageType().value());
}
@Test
public void getMiningTargetFieldsFromMiningFields() throws Exception {
final InputStream inputStream = getFileInputStream(NO_MODELNAME_SAMPLE_NAME);
final PMML toPopulate = org.jpmml.model.PMMLUtil.unmarshal(inputStream);
final Model model = toPopulate.getModels().get(0);
List<MiningField> retrieved = KiePMMLUtil.getMiningTargetFields(model.getMiningSchema().getMiningFields());
assertNotNull(retrieved);
assertEquals(1, retrieved.size());
MiningField targetField = retrieved.get(0);
assertEquals("car_location", targetField.getName().getValue());
assertEquals("target", targetField.getUsageType().value());
}
private void commonLoadString(String fileName) throws IOException, JAXBException, SAXException {
InputStream inputStream = getFileInputStream(fileName);
StringBuilder textBuilder = new StringBuilder();
try (Reader reader = new BufferedReader(new InputStreamReader
(inputStream, Charset.forName(StandardCharsets.UTF_8.name())))) {
int c = 0;
while ((c = reader.read()) != -1) {
textBuilder.append((char) c);
}
}
PMML retrieved = KiePMMLUtil.load(textBuilder.toString());
commonValidatePMML(retrieved);
}
private void commonLoadFile(String fileName) throws IOException, JAXBException, SAXException {
PMML retrieved = KiePMMLUtil.load(getFileInputStream(fileName), fileName);
commonValidatePMML(retrieved);
}
private void commonValidatePMML(PMML toValidate) {
assertNotNull(toValidate);
for (Model model : toValidate.getModels()) {
assertNotNull(model.getModelName());
if (model instanceof MiningModel) {
commonValidateMiningModel((MiningModel) model);
}
}
}
private void commonValidateMiningModel(MiningModel toValidate) {
assertNotNull(toValidate);
for (Segment segment : toValidate.getSegmentation().getSegments()) {
assertNotNull(segment.getId());
Model segmentModel = segment.getModel();
assertNotNull(segmentModel.getModelName());
if (segmentModel instanceof MiningModel) {
commonValidateMiningModel((MiningModel) segmentModel);
}
}
List<String> modelNames = toValidate.getSegmentation().getSegments()
.stream()
.map(segment -> segment.getModel().getModelName())
.collect(Collectors.toList());
assertEquals(modelNames.size(), modelNames.stream().distinct().count());
}
}
| |
// -*- mode: java; c-basic-offset: 2; -*-
// Copyright 2009-2011 Google, All Rights reserved
// Copyright 2011-2012 MIT, All rights reserved
// Released under the MIT License https://raw.github.com/mit-cml/app-inventor/master/mitlicense.txt
// This work is licensed under a Creative Commons Attribution 3.0 Unported License.
package com.google.appinventor.components.runtime.util;
import com.google.appinventor.components.runtime.ReplForm;
import java.util.Enumeration;
import java.util.Formatter;
import java.util.Properties;
import java.io.BufferedInputStream;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.net.InetAddress;
import java.net.Socket;
import java.net.URL;
import java.net.URLConnection;
import javax.crypto.Mac;
import javax.crypto.spec.SecretKeySpec;
import android.os.Build;
import android.util.Log;
import com.google.appinventor.components.common.YaVersion;
import com.google.appinventor.components.runtime.util.AsynchUtil;
import kawa.standard.Scheme;
import gnu.expr.Language;
import android.content.Intent;
import android.content.pm.PackageInfo;
import android.content.pm.PackageManager.NameNotFoundException;
import android.net.Uri;
public class AppInvHTTPD extends NanoHTTPD {
private File rootDir;
private Language scheme;
private ReplForm form;
private boolean secure; // Should we only accept from 127.0.0.1?
private static final int YAV_SKEW_FORWARD = 1;
private static final int YAV_SKEW_BACKWARD = 4;
private static final String LOG_TAG = "AppInvHTTPD";
private static byte[] hmacKey;
private static int seq;
private static final String MIME_JSON = "application/json"; // Other mime types defined in NanoHTTPD
public AppInvHTTPD( int port, File wwwroot, boolean secure, ReplForm form) throws IOException
{
super(port, wwwroot);
this.rootDir = wwwroot;
this.scheme = Scheme.getInstance("scheme");
this.form = form;
this.secure = secure;
gnu.expr.ModuleExp.mustNeverCompile();
}
/**
*
* @param uri Percent-decoded URI without parameters, for example "/index.cgi"
* @param method "GET", "POST" etc.
* @param parms Parsed, percent decoded parameters from URI and, in case of POST, data.
* @param header Header entries, percent decoded
* @return HTTP response, see class Response for details
*/
public Response serve( String uri, String method, Properties header, Properties parms, Properties files, Socket mySocket )
{
Log.d(LOG_TAG, method + " '" + uri + "' " );
// Check to see where the connection is from. If we are in "secure" mode (aka running
// in the emulator or via the USB Cable, then we should only accept connections from 127.0.0.1
// which is the address that "adb" uses when forwarding the connection from the blocks
// editor to the Companion.
if (secure) {
InetAddress myAddress = mySocket.getInetAddress();
String hostAddress = myAddress.getHostAddress();
if (!hostAddress.equals("127.0.0.1")) {
Log.d(LOG_TAG, "Debug: hostAddress = " + hostAddress + " while in secure mode, closing connection.");
Response res = new Response(HTTP_OK, MIME_JSON, "{\"status\" : \"BAD\", \"message\" : \"Security Error: Invalid Source Location " + hostAddress + "\"}");
// Even though we are blowing this guy off, we return the headers below so the browser
// will deliver the status message above. Otherwise it won't due to browser security
// restrictions
res.addHeader("Access-Control-Allow-Origin", "*");
res.addHeader("Access-Control-Allow-Headers", "origin, content-type");
res.addHeader("Access-Control-Allow-Methods", "POST,OPTIONS,GET,HEAD,PUT");
res.addHeader("Allow", "POST,OPTIONS,GET,HEAD,PUT");
return (res);
}
}
if (method.equals("OPTIONS")) { // This is a complete hack. OPTIONS requests are used
// by Cross Origin Resource Sharing. We give a response
// that permits connections to us from Javascript
// loaded from other pages (like the App Inventor Blocks Editor)
Enumeration e = header.propertyNames();
while ( e.hasMoreElements())
{
String value = (String)e.nextElement();
Log.d(LOG_TAG, " HDR: '" + value + "' = '" +
header.getProperty( value ) + "'" );
}
Response res = new Response(HTTP_OK, MIME_PLAINTEXT, "OK");
res.addHeader("Access-Control-Allow-Origin", "*");
res.addHeader("Access-Control-Allow-Headers", "origin, content-type");
res.addHeader("Access-Control-Allow-Methods", "POST,OPTIONS,GET,HEAD,PUT");
res.addHeader("Allow", "POST,OPTIONS,GET,HEAD,PUT");
return (res);
}
if (uri.equals("/_newblocks")) { // Handle AJAX calls from the newblocks code
String inSeq = parms.getProperty("seq", "0");
int iseq = Integer.parseInt(inSeq);
String blockid = parms.getProperty("blockid");
String code = parms.getProperty("code");
String inMac = parms.getProperty("mac", "no key provided");
String compMac = "";
String input_code = code;
if (hmacKey != null) {
try {
Mac hmacSha1 = Mac.getInstance("HmacSHA1");
SecretKeySpec key = new SecretKeySpec(hmacKey, "RAW");
hmacSha1.init(key);
byte [] tmpMac = hmacSha1.doFinal((code + inSeq + blockid).getBytes());
StringBuffer sb = new StringBuffer(tmpMac.length * 2);
Formatter formatter = new Formatter(sb);
for (byte b : tmpMac)
formatter.format("%02x", b);
compMac = sb.toString();
} catch (Exception e) {
Log.e(LOG_TAG, "Error working with hmac", e);
form.dispatchErrorOccurredEvent(form, "AppInvHTTPD",
ErrorMessages.ERROR_REPL_SECURITY_ERROR, "Exception working on HMAC");
Response res = new Response(HTTP_OK, MIME_PLAINTEXT, "NOT");
return(res);
}
Log.d(LOG_TAG, "Incoming Mac = " + inMac);
Log.d(LOG_TAG, "Computed Mac = " + compMac);
Log.d(LOG_TAG, "Incoming seq = " + inSeq);
Log.d(LOG_TAG, "Computed seq = " + seq);
Log.d(LOG_TAG, "blockid = " + blockid);
if (!inMac.equals(compMac)) {
Log.e(LOG_TAG, "Hmac does not match");
form.dispatchErrorOccurredEvent(form, "AppInvHTTPD",
ErrorMessages.ERROR_REPL_SECURITY_ERROR, "Invalid HMAC");
Response res = new Response(HTTP_OK, MIME_JSON, "{\"status\" : \"BAD\", \"message\" : \"Security Error: Invalid MAC\"}");
return(res);
}
if ((seq != iseq) && (seq != (iseq+1))) {
Log.e(LOG_TAG, "Seq does not match");
form.dispatchErrorOccurredEvent(form, "AppInvHTTPD",
ErrorMessages.ERROR_REPL_SECURITY_ERROR, "Invalid Seq");
Response res = new Response(HTTP_OK, MIME_JSON, "{\"status\" : \"BAD\", \"message\" : \"Security Error: Invalid Seq\"}");
return(res);
}
// Seq Fixup: Sometimes the Companion doesn't increment it's seq if it is in the middle of a project switch
// so we tolerate an off-by-one here.
if (seq == (iseq+1))
Log.e(LOG_TAG, "Seq Fixup Invoked");
seq = iseq + 1;
} else { // No hmacKey
Log.e(LOG_TAG, "No HMAC Key");
form.dispatchErrorOccurredEvent(form, "AppInvHTTPD",
ErrorMessages.ERROR_REPL_SECURITY_ERROR, "No HMAC Key");
Response res = new Response(HTTP_OK, MIME_JSON, "{\"status\" : \"BAD\", \"message\" : \"Security Error: No HMAC Key\"}");
return(res);
}
code = "(begin (require <com.google.youngandroid.runtime>) (process-repl-input " + blockid + " (begin " +
code + " )))";
Log.d(LOG_TAG, "To Eval: " + code);
Response res;
try {
// Don't evaluate a simple "#f" which is used by the poller
if (input_code.equals("#f")) {
Log.e(LOG_TAG, "Skipping evaluation of #f");
} else {
scheme.eval(code);
}
res = new Response(HTTP_OK, MIME_JSON, RetValManager.fetch(false));
} catch (Throwable ex) {
Log.e(LOG_TAG, "newblocks: Scheme Failure", ex);
RetValManager.appendReturnValue(blockid, "BAD", ex.toString());
res = new Response(HTTP_OK, MIME_JSON, RetValManager.fetch(false));
}
res.addHeader("Access-Control-Allow-Origin", "*");
res.addHeader("Access-Control-Allow-Headers", "origin, content-type");
res.addHeader("Access-Control-Allow-Methods", "POST,OPTIONS,GET,HEAD,PUT");
res.addHeader("Allow", "POST,OPTIONS,GET,HEAD,PUT");
return(res);
} else if (uri.equals("/_values")) {
Response res = new Response(HTTP_OK, MIME_JSON, RetValManager.fetch(true)); // Blocking Fetch
res.addHeader("Access-Control-Allow-Origin", "*");
res.addHeader("Access-Control-Allow-Headers", "origin, content-type");
res.addHeader("Access-Control-Allow-Methods", "POST,OPTIONS,GET,HEAD,PUT");
res.addHeader("Allow", "POST,OPTIONS,GET,HEAD,PUT");
return(res);
} else if (uri.equals("/_getversion")) {
Response res;
try {
PackageInfo pInfo = form.getPackageManager().getPackageInfo(form.getPackageName(), 0);
String installer;
if (SdkLevel.getLevel() >= SdkLevel.LEVEL_ECLAIR) {
installer = EclairUtil.getInstallerPackageName("edu.mit.appinventor.aicompanion3", form);
} else {
installer = "Not Known"; // So we *will* auto-update old phones, no way to find out
// from wence they came!
}
// installer will be "com.android.vending" if installed from the play store.
String versionName = pInfo.versionName;
if (installer == null)
installer = "Not Known";
res = new Response(HTTP_OK, MIME_JSON, "{\"version\" : \"" + versionName +
"\", \"fingerprint\" : \"" + Build.FINGERPRINT + "\"," + " \"installer\" : \"" + installer + "\"}");
} catch (NameNotFoundException n) {
n.printStackTrace();
res = new Response(HTTP_OK, MIME_JSON, "{\"verison\" : \"Unknown\"");
}
res.addHeader("Access-Control-Allow-Origin", "*");
res.addHeader("Access-Control-Allow-Headers", "origin, content-type");
res.addHeader("Access-Control-Allow-Methods", "POST,OPTIONS,GET,HEAD,PUT");
res.addHeader("Allow", "POST,OPTIONS,GET,HEAD,PUT");
return (res);
} else if (uri.equals("/_update") || uri.equals("/_install")) { // Install a package, including a new companion
String url = parms.getProperty("url", "");
String inMac = parms.getProperty("mac", "");
String compMac;
if (!url.equals("") && (hmacKey != null) && !inMac.equals("")) {
try {
SecretKeySpec key = new SecretKeySpec(hmacKey, "RAW");
Mac hmacSha1 = Mac.getInstance("HmacSHA1");
hmacSha1.init(key);
byte [] tmpMac = hmacSha1.doFinal(url.getBytes());
StringBuffer sb = new StringBuffer(tmpMac.length * 2);
Formatter formatter = new Formatter(sb);
for (byte b : tmpMac)
formatter.format("%02x", b);
compMac = sb.toString();
} catch (Exception e) {
Log.e(LOG_TAG, "Error verifying update", e);
form.dispatchErrorOccurredEvent(form, "AppInvHTTPD",
ErrorMessages.ERROR_REPL_SECURITY_ERROR, "Exception working on HMAC for update");
Response res = new Response(HTTP_OK, MIME_JSON, "{\"status\" : \"BAD\", \"message\" : \"Security Error: Exception processing MAC\"}");
res.addHeader("Access-Control-Allow-Origin", "*");
res.addHeader("Access-Control-Allow-Headers", "origin, content-type");
res.addHeader("Access-Control-Allow-Methods", "POST,OPTIONS,GET,HEAD,PUT");
res.addHeader("Allow", "POST,OPTIONS,GET,HEAD,PUT");
return(res);
}
Log.d(LOG_TAG, "Incoming Mac (update) = " + inMac);
Log.d(LOG_TAG, "Computed Mac (update) = " + compMac);
if (!inMac.equals(compMac)) {
Log.e(LOG_TAG, "Hmac does not match");
form.dispatchErrorOccurredEvent(form, "AppInvHTTPD",
ErrorMessages.ERROR_REPL_SECURITY_ERROR, "Invalid HMAC (update)");
Response res = new Response(HTTP_OK, MIME_JSON, "{\"status\" : \"BAD\", \"message\" : \"Security Error: Invalid MAC\"}");
res.addHeader("Access-Control-Allow-Origin", "*");
res.addHeader("Access-Control-Allow-Headers", "origin, content-type");
res.addHeader("Access-Control-Allow-Methods", "POST,OPTIONS,GET,HEAD,PUT");
res.addHeader("Allow", "POST,OPTIONS,GET,HEAD,PUT");
return(res);
}
doPackageUpdate(url);
Response res = new Response(HTTP_OK, MIME_JSON, "{\"status\" : \"OK\", \"message\" : \"Update Should Happen\"}");
res.addHeader("Access-Control-Allow-Origin", "*");
res.addHeader("Access-Control-Allow-Headers", "origin, content-type");
res.addHeader("Access-Control-Allow-Methods", "POST,OPTIONS,GET,HEAD,PUT");
res.addHeader("Allow", "POST,OPTIONS,GET,HEAD,PUT");
return (res);
} else {
Response res = new Response(HTTP_OK, MIME_JSON, "{\"status\" : \"BAD\", \"message\" : \"Missing Parameters\"}");
res.addHeader("Access-Control-Allow-Origin", "*");
res.addHeader("Access-Control-Allow-Headers", "origin, content-type");
res.addHeader("Access-Control-Allow-Methods", "POST,OPTIONS,GET,HEAD,PUT");
res.addHeader("Allow", "POST,OPTIONS,GET,HEAD,PUT");
return(res);
}
} else if (uri.equals("/_package")) { // Handle installing a package
Response res;
String packageapk = parms.getProperty("package", null);
if (packageapk == null) {
res = new Response(HTTP_OK, MIME_PLAINTEXT, "NOT OK"); // Should really return an error code, but we don't look at it yet
return (res);
}
Log.d(LOG_TAG, rootDir + "/" + packageapk);
Intent intent = new Intent(Intent.ACTION_VIEW);
Uri packageuri = Uri.fromFile(new File(rootDir + "/" + packageapk));
intent.setDataAndType(packageuri, "application/vnd.android.package-archive");
form.startActivity(intent);
res = new Response(HTTP_OK, MIME_PLAINTEXT, "OK");
return (res);
}
if (method.equals("PUT")) { // Asset File Upload for newblocks
Boolean error = false;
String tmpFileName = (String) files.getProperty("content", null);
if (tmpFileName != null) { // We have content
File fileFrom = new File(tmpFileName);
String filename = parms.getProperty("filename", null);
if (filename != null) {
if (filename.startsWith("..") || filename.endsWith("..")
|| filename.indexOf("../") >= 0) {
Log.d(LOG_TAG, " Ignoring invalid filename: " + filename);
filename = null;
}
}
if (filename != null) { // We have a filename and it has not been declared
// invalid by the code above
File fileTo = new File(rootDir + "/" + filename);
if (!fileFrom.renameTo(fileTo)) { // First try rename
copyFile(fileFrom, fileTo);
fileFrom.delete(); // Remove temp file
}
} else {
fileFrom.delete(); // We have content but no file name
Log.e(LOG_TAG, "Received content without a file name!");
error = true;
}
} else {
Log.e(LOG_TAG, "Received PUT without content.");
error = true;
}
if (error) {
Response res = new Response(HTTP_OK, MIME_PLAINTEXT, "NOTOK");
res.addHeader("Access-Control-Allow-Origin", "*");
res.addHeader("Access-Control-Allow-Headers", "origin, content-type");
res.addHeader("Access-Control-Allow-Methods", "POST,OPTIONS,GET,HEAD,PUT");
res.addHeader("Allow", "POST,OPTIONS,GET,HEAD,PUT");
return (res);
} else {
Response res = new Response(HTTP_OK, MIME_PLAINTEXT, "OK");
res.addHeader("Access-Control-Allow-Origin", "*");
res.addHeader("Access-Control-Allow-Headers", "origin, content-type");
res.addHeader("Access-Control-Allow-Methods", "POST,OPTIONS,GET,HEAD,PUT");
res.addHeader("Allow", "POST,OPTIONS,GET,HEAD,PUT");
return (res);
}
}
Enumeration e = header.propertyNames();
while ( e.hasMoreElements())
{
String value = (String)e.nextElement();
Log.d(LOG_TAG, " HDR: '" + value + "' = '" +
header.getProperty( value ) + "'" );
}
e = parms.propertyNames();
while ( e.hasMoreElements())
{
String value = (String)e.nextElement();
Log.d(LOG_TAG, " PRM: '" + value + "' = '" +
parms.getProperty( value ) + "'" );
}
e = files.propertyNames();
while ( e.hasMoreElements())
{
String fieldname = (String)e.nextElement();
String tempLocation = (String) files.getProperty(fieldname);
String filename = (String) parms.getProperty(fieldname);
if (filename.startsWith("..") || filename.endsWith("..")
|| filename.indexOf("../") >= 0) {
Log.d(LOG_TAG, " Ignoring invalid filename: " + filename);
filename = null;
}
File fileFrom = new File(tempLocation);
if (filename == null) {
fileFrom.delete(); // Cleanup our mess (remove temp file).
} else {
File fileTo = new File(rootDir + "/" + filename);
if (!fileFrom.renameTo(fileTo)) { // First try rename, otherwise we have to copy
copyFile(fileFrom, fileTo);
fileFrom.delete(); // Cleanup temp file
}
}
Log.d(LOG_TAG, " UPLOADED: '" + filename + "' was at '" + tempLocation + "'");
Response res = new Response(HTTP_OK, MIME_PLAINTEXT, "OK");
res.addHeader("Access-Control-Allow-Origin", "*");
res.addHeader("Access-Control-Allow-Headers", "origin, content-type");
res.addHeader("Access-Control-Allow-Methods", "POST,OPTIONS,GET,HEAD,PUT");
res.addHeader("Allow", "POST,OPTIONS,GET,HEAD,PUT");
return(res);
}
return serveFile( uri, header, rootDir, true );
}
private void copyFile(File infile, File outfile) {
try {
FileInputStream in = new FileInputStream(infile);
FileOutputStream out = new FileOutputStream(outfile);
byte[] buffer = new byte[32768]; // 32K, probably too small
int len;
while ((len = in.read(buffer)) > 0) {
out.write(buffer, 0, len);
}
in.close();
out.close();
} catch (IOException e) {
e.printStackTrace();
}
}
/**
* @param inputKey String key to use the HTOP algorithm seed
*
*/
public static void setHmacKey(String inputKey) {
hmacKey = inputKey.getBytes();
seq = 1; // Initialize this now
}
private void doPackageUpdate(final String inurl) {
PackageInstaller.doPackageInstall(form, inurl);
}
}
| |
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0 and the Server Side Public License, v 1; you may not use this file except
* in compliance with, at your election, the Elastic License 2.0 or the Server
* Side Public License, v 1.
*/
package org.elasticsearch.common.ssl;
import javax.net.ssl.SSLContext;
import javax.net.ssl.X509ExtendedKeyManager;
import javax.net.ssl.X509ExtendedTrustManager;
import java.nio.file.Path;
import java.security.GeneralSecurityException;
import java.security.NoSuchAlgorithmException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashSet;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Objects;
import java.util.Set;
/**
* A object encapsulating all necessary configuration for an SSL context (client or server).
* The configuration itself is immutable, but the {@link #getKeyConfig() key config} and
* {@link #getTrustConfig() trust config} may depend on reading key and certificate material
* from files (see {@link #getDependentFiles()}, and the content of those files may change.
*/
public class SslConfiguration {
/**
* An ordered map of protocol algorithms to SSLContext algorithms. The map is ordered from most
* secure to least secure. The names in this map are taken from the
* <a href="https://docs.oracle.com/en/java/javase/11/docs/specs/security/standard-names.html#sslcontext-algorithms">
* Java Security Standard Algorithm Names Documentation for Java 11</a>.
*/
static final Map<String, String> ORDERED_PROTOCOL_ALGORITHM_MAP;
static {
LinkedHashMap<String, String> protocolAlgorithmMap = new LinkedHashMap<>();
try {
SSLContext.getInstance("TLSv1.3");
protocolAlgorithmMap.put("TLSv1.3", "TLSv1.3");
} catch (NoSuchAlgorithmException e) {
// ignore since we support JVMs using BCJSSE in FIPS mode which doesn't support TLSv1.3
}
protocolAlgorithmMap.put("TLSv1.2", "TLSv1.2");
protocolAlgorithmMap.put("TLSv1.1", "TLSv1.1");
protocolAlgorithmMap.put("TLSv1", "TLSv1");
protocolAlgorithmMap.put("SSLv3", "SSLv3");
protocolAlgorithmMap.put("SSLv2", "SSL");
protocolAlgorithmMap.put("SSLv2Hello", "SSL");
ORDERED_PROTOCOL_ALGORITHM_MAP = Collections.unmodifiableMap(protocolAlgorithmMap);
}
private final boolean explicitlyConfigured;
private final SslTrustConfig trustConfig;
private final SslKeyConfig keyConfig;
private final SslVerificationMode verificationMode;
private final SslClientAuthenticationMode clientAuth;
private final List<String> ciphers;
private final List<String> supportedProtocols;
public SslConfiguration(boolean explicitlyConfigured, SslTrustConfig trustConfig, SslKeyConfig keyConfig,
SslVerificationMode verificationMode, SslClientAuthenticationMode clientAuth,
List<String> ciphers, List<String> supportedProtocols) {
this.explicitlyConfigured = explicitlyConfigured;
if (ciphers == null || ciphers.isEmpty()) {
throw new SslConfigException("cannot configure SSL/TLS without any supported cipher suites");
}
if (supportedProtocols == null || supportedProtocols.isEmpty()) {
throw new SslConfigException("cannot configure SSL/TLS without any supported protocols");
}
this.trustConfig = Objects.requireNonNull(trustConfig, "trust config cannot be null");
this.keyConfig = Objects.requireNonNull(keyConfig, "key config cannot be null");
this.verificationMode = Objects.requireNonNull(verificationMode, "verification mode cannot be null");
this.clientAuth = Objects.requireNonNull(clientAuth, "client authentication cannot be null");
this.ciphers = Collections.unmodifiableList(ciphers);
this.supportedProtocols = Collections.unmodifiableList(supportedProtocols);
}
public SslTrustConfig getTrustConfig() {
return trustConfig;
}
public SslKeyConfig getKeyConfig() {
return keyConfig;
}
public SslVerificationMode getVerificationMode() {
return verificationMode;
}
public SslClientAuthenticationMode getClientAuth() {
return clientAuth;
}
public List<String> getCipherSuites() {
return ciphers;
}
public List<String> getSupportedProtocols() {
return supportedProtocols;
}
/**
* @return A collection of files that are used by this SSL configuration. If the contents of these files change, then any
* subsequent call to {@link #createSslContext()} (or similar methods) may create a context with different behaviour.
* It is recommended that these files be monitored for changes, and a new ssl-context is created whenever any of the files are modified.
*/
public Collection<Path> getDependentFiles() {
Set<Path> paths = new HashSet<>(keyConfig.getDependentFiles());
paths.addAll(trustConfig.getDependentFiles());
return paths;
}
/**
* @return A collection of {@link StoredCertificate certificates} that are used by this SSL configuration.
* This includes certificates used for identity (with a private key) and those used for trust, but excludes
* certificates that are provided by the JRE.
*/
public Collection<? extends StoredCertificate> getConfiguredCertificates() {
List<StoredCertificate> certificates = new ArrayList<>();
certificates.addAll(keyConfig.getConfiguredCertificates());
certificates.addAll(trustConfig.getConfiguredCertificates());
return certificates;
}
/**
* Dynamically create a new SSL context based on the current state of the configuration.
* Because the {@link #getKeyConfig() key config} and {@link #getTrustConfig() trust config} may change based on the
* contents of their referenced files (see {@link #getDependentFiles()}, consecutive calls to this method may
* return ssl-contexts with different configurations.
*/
public SSLContext createSslContext() {
final X509ExtendedKeyManager keyManager = keyConfig.createKeyManager();
final X509ExtendedTrustManager trustManager = trustConfig.createTrustManager();
try {
SSLContext sslContext = SSLContext.getInstance(contextProtocol());
sslContext.init(new X509ExtendedKeyManager[] { keyManager }, new X509ExtendedTrustManager[] { trustManager }, null);
return sslContext;
} catch (GeneralSecurityException e) {
throw new SslConfigException("cannot create ssl context", e);
}
}
/**
* Picks the best (highest security / most recent standard) SSL/TLS protocol (/version) that is supported by the
* {@link #getSupportedProtocols() configured protocols}.
*/
private String contextProtocol() {
if (supportedProtocols.isEmpty()) {
throw new SslConfigException("no SSL/TLS protocols have been configured");
}
for (Entry<String, String> entry : ORDERED_PROTOCOL_ALGORITHM_MAP.entrySet()) {
if (supportedProtocols.contains(entry.getKey())) {
return entry.getValue();
}
}
throw new SslConfigException("no supported SSL/TLS protocol was found in the configured supported protocols: "
+ supportedProtocols);
}
@Override
public String toString() {
return getClass().getSimpleName() + '{' +
"trustConfig=" + trustConfig +
", keyConfig=" + keyConfig +
", verificationMode=" + verificationMode +
", clientAuth=" + clientAuth +
", ciphers=" + ciphers +
", supportedProtocols=" + supportedProtocols +
'}';
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
final SslConfiguration that = (SslConfiguration) o;
return Objects.equals(this.trustConfig, that.trustConfig) &&
Objects.equals(this.keyConfig, that.keyConfig) &&
this.verificationMode == that.verificationMode &&
this.clientAuth == that.clientAuth &&
Objects.equals(this.ciphers, that.ciphers) &&
Objects.equals(this.supportedProtocols, that.supportedProtocols);
}
@Override
public int hashCode() {
return Objects.hash(trustConfig, keyConfig, verificationMode, clientAuth, ciphers, supportedProtocols);
}
public boolean isExplicitlyConfigured() {
return explicitlyConfigured;
}
}
| |
package me.etki.grac;
import com.google.common.base.Stopwatch;
import com.google.common.net.MediaType;
import me.etki.grac.application.ApplicationClient;
import me.etki.grac.application.ApplicationLevelInterceptor;
import me.etki.grac.common.SharedDefaults;
import me.etki.grac.concurrent.BasicCompletableFutureFactory;
import me.etki.grac.concurrent.CompletableFutureFactory;
import me.etki.grac.concurrent.DefaultDelayService;
import me.etki.grac.concurrent.DefaultScheduledExecutor;
import me.etki.grac.concurrent.DefaultTimeoutService;
import me.etki.grac.concurrent.DelayService;
import me.etki.grac.concurrent.ScheduledExecutor;
import me.etki.grac.concurrent.TimeoutService;
import me.etki.grac.io.CachingInputStreamWrapperFactory;
import me.etki.grac.io.DefaultSerializationManager;
import me.etki.grac.io.MarkResetStreamWrapperFactory;
import me.etki.grac.io.Serializer;
import me.etki.grac.io.SynchronousSerializationManager;
import me.etki.grac.policy.LoadBalancingPolicy;
import me.etki.grac.policy.RetryPolicy;
import me.etki.grac.transport.DefaultTransportManager;
import me.etki.grac.transport.Transport;
import me.etki.grac.transport.TransportInterceptor;
import me.etki.grac.transport.TransportManager;
import me.etki.grac.transport.TransportRegistry;
import me.etki.grac.transport.TransportRequestExecutor;
import me.etki.grac.transport.server.DefaultServerRegistry;
import me.etki.grac.transport.server.Server;
import me.etki.grac.transport.server.ServerProvider;
import me.etki.grac.transport.server.ServerRegistry;
import me.etki.grac.utility.StaticValidator;
import me.etki.grac.utility.TypeSpec;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.ArrayList;
import java.util.List;
import java.util.Objects;
import java.util.Optional;
import java.util.concurrent.Executor;
import java.util.concurrent.ScheduledExecutorService;
import java.util.function.Supplier;
import java.util.stream.Collectors;
import java.util.stream.StreamSupport;
/**
* @author Etki {@literal <etki@etki.name>}
* @version %I%, %G%
* @since 0.1.0
*/
public class ClientBuilder {
private static final Logger LOGGER = LoggerFactory.getLogger(ClientBuilder.class);
private List<Supplier<Serializer>> serializers = new ArrayList<>();
private List<Supplier<Transport>> transports = new ArrayList<>();
private List<Supplier<ApplicationLevelInterceptor>> interceptors = new ArrayList<>();
private List<Supplier<TransportInterceptor>> transportInterceptors = new ArrayList<>();
private Supplier<ScheduledExecutor> scheduler = SharedDefaults::getDefaultScheduler;
private Supplier<Executor> responseProcessingExecutor = SharedDefaults::getDefaultExecutor;
private ServerProvider serverProvider;
// todo: add possibility to set up fixed server provider just by specifying list of servers.
private List<Server> servers = new ArrayList<>();
private RetryPolicy retryPolicy = SharedDefaults.DEFAULT_RETRY_POLICY;
private LoadBalancingPolicy loadBalancingPolicy = SharedDefaults.DEFAULT_LOAD_BALANCING_POLICY;
private MediaType defaultSerializationType = SharedDefaults.JSON_MIME_TYPE;
// todo: add default content-type so servers wouldn't necessarily have to specify it
private MediaType defaultIncomingMimeType = SharedDefaults.JSON_MIME_TYPE;
private List<MediaType> acceptedMimeTypes = new ArrayList<>();
private List<TypeSpec> fallbackObjectTypes = new ArrayList<>();
private List<String> acceptedLocales = new ArrayList<>();
private boolean throwOnClientError = true;
private boolean throwOnServerError = true;
private boolean throwOnInvalidResponsePayloadType = true;
private long defaultTimeout = SharedDefaults.DEFAULT_REQUEST_TIMEOUT;
private String clientIdentifier = SharedDefaults.DEFAULT_CLIENT_IDENTIFIER;
private Supplier<MarkResetStreamWrapperFactory> markResetStreamWrapperFactory
= CachingInputStreamWrapperFactory::new;
private int inputStreamMarkLimit = SharedDefaults.DEFAULT_INPUT_STREAM_MARK_LIMIT;
// todo add upper bounds on submitted requests and concurrently executed requests
private int requestQueueSize = SharedDefaults.DEFAULT_REQUEST_QUEUE_SIZE;
private int concurrentRequestLimit = SharedDefaults.DEFAULT_CONCURRENT_REQUEST_LIMIT;
public ClientBuilder() {
withDefaults();
}
public ClientBuilder withDefaults() {
return this;
}
public ClientBuilder withSerializer(Serializer serializer) {
return withSerializer(() -> serializer);
}
public ClientBuilder withSerializer(Supplier<Serializer> serializer) {
serializers.add(serializer);
return this;
}
public ClientBuilder withSerializers(Iterable<Serializer> serializers) {
serializers.forEach(this::withSerializer);
return this;
}
public ClientBuilder setSerializers(Iterable<Serializer> serializers) {
this.serializers = StreamSupport.stream(serializers.spliterator(), false)
.map(serializer -> (Supplier<Serializer>) () -> serializer)
.collect(Collectors.toList());
return this;
}
public ClientBuilder withoutSerializers() {
serializers = new ArrayList<>();
return this;
}
public ClientBuilder withTransport(Transport transport) {
return withTransport(() -> transport);
}
public ClientBuilder withTransport(Supplier<Transport> transport) {
transports.add(transport);
return this;
}
public ClientBuilder withTransports(Iterable<Transport> transports) {
transports.forEach(this::withTransport);
return this;
}
public ClientBuilder setTransports(Iterable<Transport> transports) {
this.transports = StreamSupport.stream(transports.spliterator(), false)
.map(transport -> (Supplier<Transport>) () -> transport)
.collect(Collectors.toList());
return this;
}
public ClientBuilder withoutTransports() {
transports = new ArrayList<>();
return this;
}
public ClientBuilder withInterceptor(ApplicationLevelInterceptor interceptor) {
return withInterceptor(() -> interceptor);
}
public ClientBuilder withInterceptor(Supplier<ApplicationLevelInterceptor> interceptor) {
interceptors.add(interceptor);
return this;
}
public ClientBuilder withInterceptors(Iterable<ApplicationLevelInterceptor> interceptors) {
interceptors.forEach(this::withInterceptor);
return this;
}
public ClientBuilder setInterceptors(Iterable<ApplicationLevelInterceptor> interceptors) {
this.interceptors = StreamSupport.stream(interceptors.spliterator(), false)
.map(interceptor -> (Supplier<ApplicationLevelInterceptor>) () -> interceptor)
.collect(Collectors.toList());
return this;
}
public ClientBuilder withoutInterceptors() {
interceptors = new ArrayList<>();
return this;
}
public ClientBuilder withTransportInterceptor(TransportInterceptor interceptor) {
return withTransportInterceptor(() -> interceptor);
}
public ClientBuilder withTransportInterceptor(Supplier<TransportInterceptor> interceptor) {
transportInterceptors.add(interceptor);
return this;
}
public ClientBuilder withTransportInterceptors(Iterable<TransportInterceptor> interceptors) {
interceptors.forEach(this::withTransportInterceptor);
return this;
}
public ClientBuilder setTransportInterceptors(Iterable<TransportInterceptor> interceptors) {
transportInterceptors = StreamSupport
.stream(interceptors.spliterator(), false)
.map(interceptor -> (Supplier<TransportInterceptor>) () -> interceptor)
.collect(Collectors.toList());
return this;
}
public ClientBuilder withoutTransportInterceptors() {
transportInterceptors = new ArrayList<>();
return this;
}
public ClientBuilder withSchedulerService(ScheduledExecutorService scheduler) {
return withSchedulerService(() -> scheduler);
}
public ClientBuilder withSchedulerService(Supplier<ScheduledExecutorService> scheduler) {
return withScheduler(() -> new DefaultScheduledExecutor(scheduler.get()));
}
public ClientBuilder withScheduler(ScheduledExecutor scheduler) {
return withScheduler(() -> scheduler);
}
public ClientBuilder withScheduler(Supplier<ScheduledExecutor> scheduler) {
this.scheduler = scheduler;
return this;
}
public ClientBuilder withResponseProcessingExecutor(Executor executor) {
return withResponseProcessingExecutor(() -> executor);
}
public ClientBuilder withResponseProcessingExecutor(Supplier<Executor> executor) {
responseProcessingExecutor = executor;
return this;
}
public ClientBuilder withRetryPolicy(RetryPolicy retryPolicy) {
this.retryPolicy = retryPolicy;
return this;
}
public ClientBuilder withServerProvider(ServerProvider provider) {
serverProvider = provider;
return this;
}
public ClientBuilder withDefaultAcceptedType(MediaType acceptedType) {
acceptedMimeTypes.add(acceptedType);
return this;
}
public ClientBuilder withDefaultAcceptedTypes(Iterable<MediaType> acceptedTypes) {
acceptedTypes.forEach(this.acceptedMimeTypes::add);
return this;
}
public ClientBuilder withoutDefaultAcceptedTypes() {
acceptedMimeTypes = new ArrayList<>();
return this;
}
public ClientBuilder withDefaultSerializationType(MediaType serializationType) {
defaultSerializationType = serializationType;
return this;
}
public ClientBuilder withoutDefaultSerializationType() {
return withDefaultSerializationType(null);
}
public ClientBuilder withFallbackType(TypeSpec type) {
fallbackObjectTypes.add(type);
return this;
}
public ClientBuilder withFallbackTypes(Iterable<TypeSpec> types) {
types.forEach(type -> {
if (!fallbackObjectTypes.contains(type)) {
fallbackObjectTypes.add(type);
}
});
return this;
}
public ClientBuilder setFallbackObjectTypes(Iterable<TypeSpec> types) {
fallbackObjectTypes = StreamSupport.stream(types.spliterator(), false).collect(Collectors.toList());
return this;
}
public ClientBuilder withoutFallbackTypes() {
fallbackObjectTypes = new ArrayList<>();
return this;
}
public ClientBuilder withAcceptedLocale(String locale) {
acceptedLocales.add(locale);
return this;
}
public ClientBuilder withAcceptedLocales(Iterable<String> locales) {
locales.forEach(acceptedLocales::add);
return this;
}
public ClientBuilder setAcceptedLocales(Iterable<String> locales) {
this.acceptedLocales = StreamSupport.stream(locales.spliterator(), false).collect(Collectors.toList());
return this;
}
public ClientBuilder withoutAcceptedLocales() {
this.acceptedLocales = new ArrayList<>();
return this;
}
public ClientBuilder shouldThrowOnClientError(boolean throwOnClientError) {
this.throwOnClientError = throwOnClientError;
return this;
}
public ClientBuilder shouldThrowOnServerError(boolean throwOnServerError) {
this.throwOnServerError = throwOnServerError;
return this;
}
public ClientBuilder shouldThrowOnInvalidResponsePayloadType(boolean throwOnInvalidResponsePayloadType) {
this.throwOnInvalidResponsePayloadType = throwOnInvalidResponsePayloadType;
return this;
}
public ClientBuilder withClientIdentifier(String clientIdentifier) {
this.clientIdentifier = clientIdentifier;
return this;
}
public ClientBuilder withoutClientIdentifier() {
clientIdentifier = null;
return this;
}
public ClientBuilder withDefaultTimeout(long defaultTimeout) {
if (defaultTimeout < 1) {
throw new IllegalArgumentException("Timeout can't be less than 1");
}
this.defaultTimeout = defaultTimeout;
return this;
}
public ClientBuilder withMarkResetStreamWrapperFactory(Supplier<MarkResetStreamWrapperFactory> factory) {
Objects.requireNonNull(factory);
markResetStreamWrapperFactory = factory;
return this;
}
public ClientBuilder withMarkResetStreamWrapperFactory(MarkResetStreamWrapperFactory factory) {
return withMarkResetStreamWrapperFactory(() -> factory);
}
public Client build() {
LOGGER.debug("Building client");
Stopwatch timer = Stopwatch.createStarted();
// poor man's DI and validation
// todo make sure options are calculated before real work - and don't forget JMM is watching you
Client client = new DefaultClient(constructApplicationClient(), calculateRequestOptions(),
calculateClientOptions());
LOGGER.debug("Built generic rest api client in {}", timer);
return client;
}
private ClientOptions calculateClientOptions() {
return new ClientOptions()
.setThrowOnClientError(throwOnClientError)
.setThrowOnServerError(throwOnServerError)
.setThrowOnInvalidResponsePayloadType(throwOnInvalidResponsePayloadType);
}
private RequestOptions calculateRequestOptions() {
StaticValidator.requireNonNull(defaultTimeout, "Default timeout not set");
StaticValidator.requireNonNull(retryPolicy, "Default retry policy not set");
StaticValidator.requireNonNull(defaultSerializationType, "Default serialization type not set");
List<String> acceptedLocales = Optional.ofNullable(this.acceptedLocales).orElseGet(ArrayList::new);
List<MediaType> acceptedMimeTypes = Optional.ofNullable(this.acceptedMimeTypes).orElseGet(ArrayList::new);
List<TypeSpec> fallbackObjectTypes = Optional.ofNullable(this.fallbackObjectTypes).orElseGet(ArrayList::new);
if (acceptedLocales.isEmpty()) {
LOGGER.warn("No default accepted locales set, this may not be desired");
}
if (acceptedMimeTypes.isEmpty()) {
LOGGER.warn("No default accepted mime types set, this may leave target server without a hint in which " +
"mime type data should be returned");
}
return new RequestOptions()
.setRetryPolicy(retryPolicy)
.setSerializationType(defaultSerializationType)
.setAcceptedLocales(acceptedLocales)
.setAcceptedMimeTypes(acceptedMimeTypes)
.setFallbackObjectTypes(fallbackObjectTypes)
.setTimeout(defaultTimeout)
.setClientIdentifier(clientIdentifier);
}
private ApplicationClient constructApplicationClient() {
CompletableFutureFactory responseProcessingFutureFactory = constructResponseProcessingFutureFactory();
SynchronousSerializationManager serializationManager = constructSerializationManager();
TransportManager transportManager = constructTransportManager();
ApplicationClient applicationClient
= new ApplicationClient(transportManager, serializationManager, responseProcessingFutureFactory);
interceptors.stream().map(Supplier::get).forEach(applicationClient::addInterceptor);
return applicationClient;
}
private CompletableFutureFactory constructResponseProcessingFutureFactory() {
StaticValidator.requireNonNull(responseProcessingExecutor,
"Executor for processing responses is not specified");
Executor executor = responseProcessingExecutor.get();
StaticValidator.requireNonNull(executor, "Executor for processing responses is not specified");
return new BasicCompletableFutureFactory(executor);
}
private SynchronousSerializationManager constructSerializationManager() {
MarkResetStreamWrapperFactory factory = markResetStreamWrapperFactory.get();
if (factory == null) {
LOGGER.warn("No mark/reset stream wrapper factory specified; that means payload-based requests won't be " +
"retried, and multi-variant deserialization won't be possible");
factory = stream -> stream;
}
List<Serializer> serializers = this.serializers.stream()
.map(Supplier::get)
.filter(serializer -> {
if (serializer == null) {
LOGGER.warn("Null supplied instead of serializer");
return false;
}
return true;
})
.collect(Collectors.toList());
if (serializers.isEmpty()) {
LOGGER.warn("No serializers supplied. That wont necessarily lead to errors (e.g. you can safely use " +
"byte arrays and input streams as target type), but highly undesirable and probably " +
"indicates an error.");
}
return new DefaultSerializationManager(serializers, factory, inputStreamMarkLimit);
}
private TransportManager constructTransportManager() {
StaticValidator.requireNonNull(scheduler, "Scheduled executor is not set");
ScheduledExecutor scheduledExecutor = scheduler.get();
StaticValidator.requireNonNull(scheduledExecutor, "Scheduled executor is not set");
DelayService delayService = constructDelayService(scheduledExecutor);
TransportRegistry transports = constructTransportRegistry();
ServerRegistry servers = constructServerRegistry(transports);
TransportRequestExecutor requestExecutor = constructTransportRequestExecutor(transports, scheduledExecutor);
return new DefaultTransportManager(servers, requestExecutor, delayService, inputStreamMarkLimit);
}
private TransportRequestExecutor constructTransportRequestExecutor(
TransportRegistry transports,
ScheduledExecutor scheduledExecutor) {
TimeoutService timeoutService = constructTimeoutService(scheduledExecutor);
return new TransportRequestExecutor(transports, timeoutService);
}
private TransportRegistry constructTransportRegistry() {
List<Transport> transports = this.transports.stream()
.map(Supplier::get)
.filter(transport -> {
if (transport == null) {
LOGGER.warn("Empty transport supplier found");
return false;
}
return true;
})
.collect(Collectors.toList());
if (transports.isEmpty()) {
throw new IllegalStateException("No transports specified");
}
return new TransportRegistry(transports);
}
private ServerRegistry constructServerRegistry(TransportRegistry transports) {
StaticValidator.requireNonNull(serverProvider, "Server address provider is not set");
StaticValidator.requireNonNull(loadBalancingPolicy, "Load balancing policy is not set");
return new DefaultServerRegistry(serverProvider, transports, loadBalancingPolicy);
}
private TimeoutService constructTimeoutService(ScheduledExecutor executor) {
return new DefaultTimeoutService(executor);
}
private DelayService constructDelayService(ScheduledExecutor executor) {
return new DefaultDelayService(executor);
}
}
| |
/*
* Copyright 2010 Research Studios Austria Forschungsgesellschaft mBH
*
* This file is part of easyrec.
*
* easyrec is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* easyrec is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with easyrec. If not, see <http://www.gnu.org/licenses/>.
*/
package org.easyrec.store.dao.plugin.impl;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.easyrec.model.plugin.PluginVO;
import org.easyrec.plugin.model.Version;
import org.easyrec.store.dao.plugin.PluginDAO;
import org.easyrec.utils.spring.store.dao.DaoUtils;
import org.easyrec.utils.spring.store.dao.annotation.DAO;
import org.springframework.dao.DataAccessException;
import org.springframework.dao.DataIntegrityViolationException;
import org.springframework.jdbc.core.RowMapper;
import org.springframework.jdbc.core.support.AbstractLobCreatingPreparedStatementCallback;
import org.springframework.jdbc.core.support.JdbcDaoSupport;
import org.springframework.jdbc.support.lob.DefaultLobHandler;
import org.springframework.jdbc.support.lob.LobCreator;
import java.net.URI;
import java.sql.*;
import java.util.List;
import javax.sql.DataSource;
/**
* @author szavrel
*/
@DAO
public class PluginDAOMysqlImpl extends JdbcDaoSupport implements PluginDAO {
private final Log logger = LogFactory.getLog(this.getClass());
private DefaultLobHandler lobHandler;
private PlugInRowMapper pluginRowMapper = new PlugInRowMapper();
private PlugInInfoRowMapper pluginInfoRowMapper = new PlugInInfoRowMapper();
private static final String SQL_ADD_PLUGIN;
private static final String SQL_UPDATE_PLUGIN;
private static final String SQL_LOAD_PLUGIN;
private static final String SQL_LOAD_PLUGINS;
private static final String SQL_UPDATE_PLUGIN_STATE;
private static final String SQL_DELETE_PLUGIN;
static {
SQL_ADD_PLUGIN = new StringBuilder().append("INSERT INTO ").append(DEFAULT_TABLE_NAME).append(" (")
.append(DEFAULT_DISPLAYNAME_COLUMN_NAME).append(",").append(DEFAULT_PLUGINID_COLUMN_NAME).append(",")
.append(DEFAULT_VERSION_COLUMN_NAME).append(",").append(DEFAULT_ORIG_FILENAME_COLUMN_NAME).append(",")
.append(DEFAULT_STATE_COLUMN_NAME).append(",").append(DEFAULT_FILE_COLUMN_NAME).append(", ")
.append(DEFAULT_CHANGEDATE_COLUMN_NAME).append(") ").append("VALUES (?,?,?,?,?,?,?)").toString();
SQL_UPDATE_PLUGIN = new StringBuilder().append("UPDATE ").append(DEFAULT_TABLE_NAME).append(" SET ")
.append(DEFAULT_DISPLAYNAME_COLUMN_NAME).append("=?, ").append(DEFAULT_ORIG_FILENAME_COLUMN_NAME)
.append("=?, ").append(DEFAULT_STATE_COLUMN_NAME).append("=?, ").append(DEFAULT_FILE_COLUMN_NAME)
.append("=?, ").append(DEFAULT_CHANGEDATE_COLUMN_NAME).append("=? WHERE ")
.append(DEFAULT_PLUGINID_COLUMN_NAME).append("=? AND ").append(DEFAULT_VERSION_COLUMN_NAME).append("=?")
.toString();
SQL_LOAD_PLUGIN = new StringBuilder().append(" SELECT * FROM ").append(DEFAULT_TABLE_NAME).append(" WHERE ")
.append(DEFAULT_PLUGINID_COLUMN_NAME).append("=? AND ").append(DEFAULT_VERSION_COLUMN_NAME).append("=?")
.toString();
SQL_LOAD_PLUGINS = new StringBuilder().append(" SELECT * FROM ").append(DEFAULT_TABLE_NAME).append(" WHERE ")
.append(DEFAULT_STATE_COLUMN_NAME).append(" LIKE ?").toString();
SQL_UPDATE_PLUGIN_STATE = new StringBuilder().append(" UPDATE ").append(DEFAULT_TABLE_NAME).append(" SET ")
.append(DEFAULT_STATE_COLUMN_NAME).append("=? WHERE ").append(DEFAULT_PLUGINID_COLUMN_NAME)
.append("=? AND ").append(DEFAULT_VERSION_COLUMN_NAME).append("=?").toString();
SQL_DELETE_PLUGIN = new StringBuilder().append("DELETE FROM ").append(DEFAULT_TABLE_NAME).append(" WHERE ")
.append(DEFAULT_PLUGINID_COLUMN_NAME).append("=? AND ").append(DEFAULT_VERSION_COLUMN_NAME).append("=?")
.toString();
}
public PluginDAOMysqlImpl(DataSource dataSource) {
setDataSource(dataSource);
lobHandler = new DefaultLobHandler();
}
public void storePlugin(PluginVO plugin) {
final PluginVO pluginParam = plugin;
try {
getJdbcTemplate().execute(SQL_ADD_PLUGIN, new AbstractLobCreatingPreparedStatementCallback(lobHandler) {
@Override
protected void setValues(PreparedStatement ps, LobCreator lobCreator)
throws SQLException, DataAccessException {
ps.setString(1, pluginParam.getDisplayName());
ps.setString(2, pluginParam.getPluginId().getUri().toString());
ps.setString(3, pluginParam.getPluginId().getVersion().toString());
ps.setString(4, pluginParam.getOrigFilename());
ps.setString(5, pluginParam.getState());
lobCreator.setBlobAsBytes(ps, 6, pluginParam.getFile());
ps.setTimestamp(7, new Timestamp(System.currentTimeMillis()));
}
});
} catch (DataIntegrityViolationException e) {
logger.info("Updating plugin!");
getJdbcTemplate().execute(SQL_UPDATE_PLUGIN, new AbstractLobCreatingPreparedStatementCallback(lobHandler) {
@Override
protected void setValues(PreparedStatement ps, LobCreator lobCreator)
throws SQLException, DataAccessException {
ps.setString(1, pluginParam.getDisplayName());
ps.setString(2, pluginParam.getOrigFilename());
ps.setString(3, pluginParam.getState());
lobCreator.setBlobAsBytes(ps, 4, pluginParam.getFile());
ps.setTimestamp(5, new Timestamp(System.currentTimeMillis()));
ps.setString(6, pluginParam.getPluginId().getUri().toString());
ps.setString(7, pluginParam.getPluginId().getVersion().toString());
}
});
} catch (Exception ex) {
logger.error("An error occured storing the plugin! " + ex);
}
}
public void deletePlugin(URI pluginId, Version version) {
try {
Object[] args = {pluginId.toString(), version.toString()};
int[] argTypes = {Types.VARCHAR, Types.VARCHAR};
int rowsAffected = getJdbcTemplate().update(SQL_DELETE_PLUGIN, args, argTypes);
if (logger.isDebugEnabled()) {
logger.debug("Deleted " + rowsAffected + " plugins");
}
} catch (Exception e) {
logger.error("An error occured deleting a plugin");
}
}
public void updatePluginState(URI pluginId, Version version, String state) {
try {
Object[] args = {state, pluginId.toString(), version.toString()};
int[] argTypes = {Types.VARCHAR, Types.VARCHAR, Types.VARCHAR};
int rowsaffected = getJdbcTemplate().update(SQL_UPDATE_PLUGIN_STATE, args, argTypes);
if (logger.isDebugEnabled()) {
logger.debug("Updated " + rowsaffected + " plugin state");
}
} catch (Exception e) {
logger.error("An error occured updating a plugin state! " + e);
}
}
public PluginVO loadPlugin(URI pluginId, Version version) {
try {
Object[] args = {pluginId.toString(), version.toString()};
int[] argTypes = {Types.VARCHAR, Types.VARCHAR};
return getJdbcTemplate().query(SQL_LOAD_PLUGIN, args, argTypes, pluginRowMapper).get(0);
} catch (Exception e) {
logger.error("An error occured loading a plugin! " + e);
}
return null;
}
public List<PluginVO> loadPlugins() {
return loadPlugins(null);
}
public List<PluginVO> loadPlugins(String state) {
try {
if (state == null) state = "%";
Object[] args = {state};
int[] argTypes = {Types.VARCHAR};
return getJdbcTemplate().query(SQL_LOAD_PLUGINS, args, argTypes, pluginRowMapper);
} catch (Exception e) {
logger.error("An error occured loading all plugins! " + e);
}
return null;
}
public List<PluginVO> loadPluginInfos() {
return loadPluginInfos(null);
}
public List<PluginVO> loadPluginInfos(String state) {
try {
if (state == null) state = "%";
Object[] args = {state};
int[] argTypes = {Types.VARCHAR};
return getJdbcTemplate().query(SQL_LOAD_PLUGINS, args, argTypes, pluginInfoRowMapper);
} catch (Exception e) {
logger.error("An error occured loading all plugin Infos! " + e);
}
return null;
}
/******************************************************************************************/
/************************************** Rowmappers ****************************************/
/**
* **************************************************************************************
*/
private class PlugInRowMapper implements RowMapper<PluginVO> {
public PluginVO mapRow(ResultSet rs, int rowNum) throws SQLException {
URI pluginId = null;
try {
pluginId = new URI(DaoUtils.getStringIfPresent(rs, DEFAULT_PLUGINID_COLUMN_NAME));
} catch (Exception e) {
logger.debug(e);
}
return new PluginVO(DaoUtils.getIntegerIfPresent(rs, DEFAULT_ID_COLUMN_NAME),
DaoUtils.getStringIfPresent(rs, DEFAULT_DISPLAYNAME_COLUMN_NAME), pluginId,
new Version(DaoUtils.getStringIfPresent(rs, DEFAULT_VERSION_COLUMN_NAME)),
DaoUtils.getStringIfPresent(rs, DEFAULT_STATE_COLUMN_NAME),
lobHandler.getBlobAsBytes(rs, DEFAULT_FILE_COLUMN_NAME),
DaoUtils.getDateIfPresent(rs, DEFAULT_CHANGEDATE_COLUMN_NAME),
DaoUtils.getStringIfPresent(rs, DEFAULT_ORIG_FILENAME_COLUMN_NAME));
}
}
private class PlugInInfoRowMapper implements RowMapper<PluginVO> {
public PluginVO mapRow(ResultSet rs, int rowNum) throws SQLException {
URI pluginId = null;
try {
pluginId = new URI(DaoUtils.getStringIfPresent(rs, DEFAULT_PLUGINID_COLUMN_NAME));
} catch (Exception e) {
logger.debug(e);
}
return new PluginVO(DaoUtils.getIntegerIfPresent(rs, DEFAULT_ID_COLUMN_NAME),
DaoUtils.getStringIfPresent(rs, DEFAULT_DISPLAYNAME_COLUMN_NAME), pluginId,
new Version(DaoUtils.getStringIfPresent(rs, DEFAULT_VERSION_COLUMN_NAME)),
DaoUtils.getStringIfPresent(rs, DEFAULT_STATE_COLUMN_NAME), null,
DaoUtils.getDateIfPresent(rs, DEFAULT_CHANGEDATE_COLUMN_NAME),
DaoUtils.getStringIfPresent(rs, DEFAULT_ORIG_FILENAME_COLUMN_NAME));
}
}
}
| |
/*
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package org.apache.flink.runtime.source.coordinator;
import org.apache.flink.api.connector.source.SourceEvent;
import org.apache.flink.api.connector.source.mocks.MockSourceSplit;
import org.apache.flink.api.connector.source.mocks.MockSourceSplitSerializer;
import org.apache.flink.api.connector.source.mocks.MockSplitEnumerator;
import org.apache.flink.runtime.operators.coordination.OperatorEvent;
import org.apache.flink.runtime.source.event.AddSplitEvent;
import org.apache.flink.runtime.source.event.ReaderRegistrationEvent;
import org.apache.flink.runtime.source.event.SourceEventWrapper;
import org.junit.Test;
import java.io.IOException;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.concurrent.CompletableFuture;
import static org.apache.flink.runtime.source.coordinator.CoordinatorTestUtils.verifyAssignment;
import static org.apache.flink.runtime.source.coordinator.CoordinatorTestUtils.verifyException;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
/**
* Unit tests for {@link SourceCoordinator}.
*/
public class SourceCoordinatorTest extends SourceCoordinatorTestBase {
@Test
public void testThrowExceptionWhenNotStarted() {
// The following methods should only be invoked after the source coordinator has started.
String failureMessage = "Call should fail when source coordinator has not started yet.";
verifyException(() -> sourceCoordinator.checkpointComplete(100L),
failureMessage, "The coordinator has not started yet.");
verifyException(() -> sourceCoordinator.handleEventFromOperator(0, null),
failureMessage, "The coordinator has not started yet.");
verifyException(() -> sourceCoordinator.subtaskFailed(0, null),
failureMessage, "The coordinator has not started yet.");
verifyException(() -> sourceCoordinator.checkpointCoordinator(100L, new CompletableFuture<>()),
failureMessage, "The coordinator has not started yet.");
}
@Test
public void testRestCheckpointAfterCoordinatorStarted() throws Exception {
// The following methods should only be invoked after the source coordinator has started.
sourceCoordinator.start();
verifyException(() -> sourceCoordinator.resetToCheckpoint(null),
"Reset to checkpoint should fail after the coordinator has started",
String.format("The coordinator for source %s has started. The source coordinator state can " +
"only be reset to a checkpoint before it starts.", OPERATOR_NAME));
}
@Test
public void testStart() throws Exception {
assertFalse(enumerator.started());
sourceCoordinator.start();
assertTrue(enumerator.started());
}
@Test
public void testClosed() throws Exception {
assertFalse(enumerator.closed());
sourceCoordinator.start();
sourceCoordinator.close();
assertTrue(enumerator.closed());
}
@Test
public void testReaderRegistration() throws Exception {
sourceCoordinator.start();
sourceCoordinator.handleEventFromOperator(
0, new ReaderRegistrationEvent(0, "location_0"));
check(() -> {
assertEquals("2 splits should have been assigned to reader 0",
4, enumerator.getUnassignedSplits().size());
assertTrue(context.registeredReaders().containsKey(0));
assertTrue(enumerator.getHandledSourceEvent().isEmpty());
verifyAssignment(Arrays.asList("0", "3"), splitSplitAssignmentTracker.uncheckpointedAssignments().get(0));
});
}
@Test
public void testHandleSourceEvent() throws Exception {
sourceCoordinator.start();
SourceEvent sourceEvent = new SourceEvent() {};
sourceCoordinator.handleEventFromOperator(0, new SourceEventWrapper(sourceEvent));
check(() -> {
assertEquals(1, enumerator.getHandledSourceEvent().size());
assertEquals(sourceEvent, enumerator.getHandledSourceEvent().get(0));
});
}
@Test
public void testCheckpointCoordinatorAndRestore() throws Exception {
sourceCoordinator.start();
sourceCoordinator.handleEventFromOperator(
0, new ReaderRegistrationEvent(0, "location_0"));
final CompletableFuture<byte[]> checkpointFuture = new CompletableFuture<>();
sourceCoordinator.checkpointCoordinator(100L, checkpointFuture);
final byte[] bytes = checkpointFuture.get();
// restore from the checkpoints.
SourceCoordinator<?, ?> restoredCoordinator = getNewSourceCoordinator();
restoredCoordinator.resetToCheckpoint(bytes);
MockSplitEnumerator restoredEnumerator = (MockSplitEnumerator) restoredCoordinator.getEnumerator();
SourceCoordinatorContext restoredContext = restoredCoordinator.getContext();
assertEquals("2 splits should have been assigned to reader 0",
4, restoredEnumerator.getUnassignedSplits().size());
assertTrue(restoredEnumerator.getHandledSourceEvent().isEmpty());
assertEquals(1, restoredContext.registeredReaders().size());
assertTrue(restoredContext.registeredReaders().containsKey(0));
}
@Test
@SuppressWarnings("unchecked")
public void testSubtaskFailedAndRevertUncompletedAssignments() throws Exception {
sourceCoordinator.start();
// Assign some splits to reader 0 then take snapshot 100.
sourceCoordinator.handleEventFromOperator(
0, new ReaderRegistrationEvent(0, "location_0"));
final CompletableFuture<byte[]> checkpointFuture1 = new CompletableFuture<>();
sourceCoordinator.checkpointCoordinator(100L, checkpointFuture1);
checkpointFuture1.get();
// Add split 6, assign it to reader 0 and take another snapshot 101.
enumerator.addNewSplits(Collections.singletonList(new MockSourceSplit(6)));
final CompletableFuture<byte[]> checkpointFuture2 = new CompletableFuture<>();
sourceCoordinator.checkpointCoordinator(101L, checkpointFuture2);
checkpointFuture2.get();
// check the state.
check(() -> {
// There should be 4 unassigned splits.
assertEquals(4, enumerator.getUnassignedSplits().size());
verifyAssignment(
Arrays.asList("0", "3"),
splitSplitAssignmentTracker.assignmentsByCheckpointId().get(100L).get(0));
assertTrue(splitSplitAssignmentTracker.uncheckpointedAssignments().isEmpty());
verifyAssignment(Arrays.asList("0", "3"), splitSplitAssignmentTracker.assignmentsByCheckpointId(100L).get(0));
verifyAssignment(Arrays.asList("6"), splitSplitAssignmentTracker.assignmentsByCheckpointId(101L).get(0));
List<OperatorEvent> eventsToReader0 = operatorCoordinatorContext.getEventsToOperator().get(0);
assertEquals(2, eventsToReader0.size());
try {
verifyAssignment(Arrays.asList("0", "3"),
((AddSplitEvent<MockSourceSplit>) eventsToReader0.get(0)).splits(new MockSourceSplitSerializer()));
verifyAssignment(Arrays.asList("6"),
((AddSplitEvent<MockSourceSplit>) eventsToReader0.get(1)).splits(new MockSourceSplitSerializer()));
} catch (IOException e) {
fail("Failed to deserialize splits.");
}
});
// Fail reader 0.
sourceCoordinator.subtaskFailed(0, null);
// check the state again.
check(() -> {
//
assertFalse("Reader 0 should have been unregistered.",
context.registeredReaders().containsKey(0));
// The tracker should have reverted all the splits assignment to reader 0.
for (Map<Integer, ?> assignment : splitSplitAssignmentTracker.assignmentsByCheckpointId().values()) {
assertFalse("Assignment in uncompleted checkpoint should have been reverted.",
assignment.containsKey(0));
}
assertFalse(splitSplitAssignmentTracker.uncheckpointedAssignments().containsKey(0));
// The split enumerator should now contains the splits used to be assigned to reader 0.
assertEquals(7, enumerator.getUnassignedSplits().size());
});
}
@Test
public void testFailedSubtaskDoNotRevertCompletedCheckpoint() throws Exception {
sourceCoordinator.start();
// Assign some splits to reader 0 then take snapshot 100.
sourceCoordinator.handleEventFromOperator(
0, new ReaderRegistrationEvent(0, "location_0"));
final CompletableFuture<byte[]> checkpointFuture = new CompletableFuture<>();
sourceCoordinator.checkpointCoordinator(100L, checkpointFuture);
checkpointFuture.get();
// Complete checkpoint 100.
sourceCoordinator.checkpointComplete(100L);
// Fail reader 0.
sourceCoordinator.subtaskFailed(0, null);
check(() -> {
// Reader 0 hase been unregistered.
assertFalse(context.registeredReaders().containsKey(0));
// The assigned splits are not reverted.
assertEquals(4, enumerator.getUnassignedSplits().size());
assertFalse(splitSplitAssignmentTracker.uncheckpointedAssignments().containsKey(0));
assertTrue(splitSplitAssignmentTracker.assignmentsByCheckpointId().isEmpty());
});
}
// -------------------------------
private void check(Runnable runnable) {
try {
coordinatorExecutor.submit(runnable).get();
} catch (Exception e) {
fail("Test failed due to " + e);
}
}
}
| |
/*
* Copyright 2000-2016 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.openapi.vcs.changes.actions.diff;
import com.intellij.diff.DiffContentFactory;
import com.intellij.diff.DiffContentFactoryEx;
import com.intellij.diff.DiffRequestFactory;
import com.intellij.diff.DiffRequestFactoryImpl;
import com.intellij.diff.chains.DiffRequestProducer;
import com.intellij.diff.chains.DiffRequestProducerException;
import com.intellij.diff.contents.DiffContent;
import com.intellij.diff.impl.DiffViewerWrapper;
import com.intellij.diff.merge.MergeUtil;
import com.intellij.diff.requests.DiffRequest;
import com.intellij.diff.requests.ErrorDiffRequest;
import com.intellij.diff.requests.SimpleDiffRequest;
import com.intellij.diff.util.DiffUserDataKeys;
import com.intellij.diff.util.DiffUserDataKeysEx;
import com.intellij.diff.util.DiffUtil;
import com.intellij.diff.util.Side;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.progress.ProcessCanceledException;
import com.intellij.openapi.progress.ProgressIndicator;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.util.Comparing;
import com.intellij.openapi.util.Key;
import com.intellij.openapi.util.Ref;
import com.intellij.openapi.util.UserDataHolder;
import com.intellij.openapi.vcs.AbstractVcs;
import com.intellij.openapi.vcs.FilePath;
import com.intellij.openapi.vcs.VcsDataKeys;
import com.intellij.openapi.vcs.VcsException;
import com.intellij.openapi.vcs.changes.*;
import com.intellij.openapi.vcs.merge.MergeData;
import com.intellij.openapi.vfs.LocalFileSystem;
import com.intellij.openapi.vfs.VirtualFile;
import com.intellij.util.ExceptionUtil;
import com.intellij.util.ThreeState;
import com.intellij.util.containers.ContainerUtil;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.io.IOException;
import java.util.Collections;
import java.util.List;
import java.util.Map;
public class ChangeDiffRequestProducer implements DiffRequestProducer {
private static final Logger LOG = Logger.getInstance(ChangeDiffRequestProducer.class);
public static final Key<Change> CHANGE_KEY = Key.create("DiffRequestPresentable.Change");
@Nullable private final Project myProject;
@NotNull private final Change myChange;
@NotNull private final Map<Key, Object> myChangeContext;
private ChangeDiffRequestProducer(@Nullable Project project, @NotNull Change change, @NotNull Map<Key, Object> changeContext) {
myChange = change;
myProject = project;
myChangeContext = changeContext;
}
@NotNull
public Change getChange() {
return myChange;
}
@Nullable
public Project getProject() {
return myProject;
}
@NotNull
@Override
public String getName() {
return ChangesUtil.getFilePath(myChange).getPath();
}
public static boolean isEquals(@NotNull Change change1, @NotNull Change change2) {
if (!Comparing.equal(ChangesUtil.getBeforePath(change1), ChangesUtil.getBeforePath(change2)) ||
!Comparing.equal(ChangesUtil.getAfterPath(change1), ChangesUtil.getAfterPath(change2))) {
// we use file paths for hashCode, so removing this check might violate comparison contract
return false;
}
for (ChangeDiffViewerWrapperProvider provider : ChangeDiffViewerWrapperProvider.EP_NAME.getExtensions()) {
ThreeState equals = provider.isEquals(change1, change2);
if (equals == ThreeState.NO) return false;
}
for (ChangeDiffRequestProvider provider : ChangeDiffRequestProvider.EP_NAME.getExtensions()) {
ThreeState equals = provider.isEquals(change1, change2);
if (equals == ThreeState.YES) return true;
if (equals == ThreeState.NO) return false;
}
if (!Comparing.equal(change1.getClass(), change2.getClass())) return false;
if (!Comparing.equal(change1.getFileStatus(), change2.getFileStatus())) return false;
if (!isEquals(change1.getBeforeRevision(), change2.getBeforeRevision())) return false;
if (!isEquals(change1.getAfterRevision(), change2.getAfterRevision())) return false;
return true;
}
private static boolean isEquals(@Nullable ContentRevision revision1, @Nullable ContentRevision revision2) {
if (Comparing.equal(revision1, revision2)) return true;
if (revision1 instanceof CurrentContentRevision && revision2 instanceof CurrentContentRevision) {
VirtualFile vFile1 = ((CurrentContentRevision)revision1).getVirtualFile();
VirtualFile vFile2 = ((CurrentContentRevision)revision2).getVirtualFile();
return Comparing.equal(vFile1, vFile2);
}
return false;
}
public static int hashCode(@NotNull Change change) {
return hashCode(change.getBeforeRevision()) + 31 * hashCode(change.getAfterRevision());
}
private static int hashCode(@Nullable ContentRevision revision) {
return revision != null ? revision.getFile().hashCode() : 0;
}
@Nullable
public static ChangeDiffRequestProducer create(@Nullable Project project, @NotNull Change change) {
return create(project, change, Collections.emptyMap());
}
@Nullable
public static ChangeDiffRequestProducer create(@Nullable Project project,
@NotNull Change change,
@NotNull Map<Key, Object> changeContext) {
if (!canCreate(project, change)) return null;
return new ChangeDiffRequestProducer(project, change, changeContext);
}
public static boolean canCreate(@Nullable Project project, @NotNull Change change) {
for (ChangeDiffViewerWrapperProvider provider : ChangeDiffViewerWrapperProvider.EP_NAME.getExtensions()) {
if (provider.canCreate(project, change)) return true;
}
for (ChangeDiffRequestProvider provider : ChangeDiffRequestProvider.EP_NAME.getExtensions()) {
if (provider.canCreate(project, change)) return true;
}
ContentRevision bRev = change.getBeforeRevision();
ContentRevision aRev = change.getAfterRevision();
if (bRev == null && aRev == null) return false;
if (bRev != null && bRev.getFile().isDirectory()) return false;
if (aRev != null && aRev.getFile().isDirectory()) return false;
return true;
}
@NotNull
@Override
public DiffRequest process(@NotNull UserDataHolder context,
@NotNull ProgressIndicator indicator) throws DiffRequestProducerException, ProcessCanceledException {
try {
return loadCurrentContents(context, indicator);
}
catch (ProcessCanceledException | DiffRequestProducerException e) {
throw e;
}
catch (Exception e) {
LOG.warn(e);
throw new DiffRequestProducerException(e.getMessage());
}
}
@NotNull
protected DiffRequest loadCurrentContents(@NotNull UserDataHolder context,
@NotNull ProgressIndicator indicator) throws DiffRequestProducerException {
DiffRequestProducerException wrapperException = null;
DiffRequestProducerException requestException = null;
DiffViewerWrapper wrapper = null;
try {
for (ChangeDiffViewerWrapperProvider provider : ChangeDiffViewerWrapperProvider.EP_NAME.getExtensions()) {
if (provider.canCreate(myProject, myChange)) {
wrapper = provider.process(this, context, indicator);
break;
}
}
}
catch (DiffRequestProducerException e) {
wrapperException = e;
}
DiffRequest request = null;
try {
for (ChangeDiffRequestProvider provider : ChangeDiffRequestProvider.EP_NAME.getExtensions()) {
if (provider.canCreate(myProject, myChange)) {
request = provider.process(this, context, indicator);
break;
}
}
if (request == null) request = createRequest(myProject, myChange, context, indicator);
}
catch (DiffRequestProducerException e) {
requestException = e;
}
if (requestException != null && wrapperException != null) {
String message = requestException.getMessage() + "\n\n" + wrapperException.getMessage();
throw new DiffRequestProducerException(message);
}
if (requestException != null) {
request = new ErrorDiffRequest(getRequestTitle(myChange), requestException);
LOG.info("Request: " + requestException.getMessage());
}
if (wrapperException != null) {
LOG.info("Wrapper: " + wrapperException.getMessage());
}
request.putUserData(CHANGE_KEY, myChange);
request.putUserData(DiffViewerWrapper.KEY, wrapper);
for (Map.Entry<Key, Object> entry : myChangeContext.entrySet()) {
request.putUserData(entry.getKey(), entry.getValue());
}
DiffUtil.putDataKey(request, VcsDataKeys.CURRENT_CHANGE, myChange);
return request;
}
@NotNull
private DiffRequest createRequest(@Nullable Project project,
@NotNull Change change,
@NotNull UserDataHolder context,
@NotNull ProgressIndicator indicator) throws DiffRequestProducerException {
if (ChangesUtil.isTextConflictingChange(change)) { // three side diff
// FIXME: This part is ugly as a VCS merge subsystem itself.
FilePath path = ChangesUtil.getFilePath(change);
VirtualFile file = path.getVirtualFile();
if (file == null) {
file = LocalFileSystem.getInstance().refreshAndFindFileByPath(path.getPath());
}
if (file == null) throw new DiffRequestProducerException("Can't show merge conflict - file not found");
if (project == null) {
throw new DiffRequestProducerException("Can't show merge conflict - project is unknown");
}
final AbstractVcs vcs = ChangesUtil.getVcsForChange(change, project);
if (vcs == null || vcs.getMergeProvider() == null) {
throw new DiffRequestProducerException("Can't show merge conflict - operation nos supported");
}
try {
// FIXME: loadRevisions() can call runProcessWithProgressSynchronously() inside
final Ref<Throwable> exceptionRef = new Ref<>();
final Ref<MergeData> mergeDataRef = new Ref<>();
final VirtualFile finalFile = file;
ApplicationManager.getApplication().invokeAndWait(() -> {
try {
mergeDataRef.set(vcs.getMergeProvider().loadRevisions(finalFile));
}
catch (VcsException e) {
exceptionRef.set(e);
}
});
if (!exceptionRef.isNull()) {
Throwable e = exceptionRef.get();
if (e instanceof VcsException) throw (VcsException)e;
ExceptionUtil.rethrow(e);
}
MergeData mergeData = mergeDataRef.get();
ContentRevision bRev = change.getBeforeRevision();
ContentRevision aRev = change.getAfterRevision();
String beforeRevisionTitle = getRevisionTitle(bRev, "Your version");
String afterRevisionTitle = getRevisionTitle(aRev, "Server version");
String title = DiffRequestFactory.getInstance().getTitle(file);
List<String> titles = ContainerUtil.list(beforeRevisionTitle, "Base Version", afterRevisionTitle);
DiffContentFactory contentFactory = DiffContentFactory.getInstance();
List<DiffContent> contents = ContainerUtil.list(
contentFactory.createFromBytes(project, mergeData.CURRENT, file),
contentFactory.createFromBytes(project, mergeData.ORIGINAL, file),
contentFactory.createFromBytes(project, mergeData.LAST, file)
);
SimpleDiffRequest request = new SimpleDiffRequest(title, contents, titles);
MergeUtil.putRevisionInfos(request, mergeData);
return request;
}
catch (VcsException | IOException e) {
LOG.info(e);
throw new DiffRequestProducerException(e);
}
}
else {
ContentRevision bRev = change.getBeforeRevision();
ContentRevision aRev = change.getAfterRevision();
if (bRev == null && aRev == null) {
LOG.warn("Both revision contents are empty");
throw new DiffRequestProducerException("Bad revisions contents");
}
if (bRev != null) checkContentRevision(project, bRev, context, indicator);
if (aRev != null) checkContentRevision(project, aRev, context, indicator);
String title = getRequestTitle(change);
indicator.setIndeterminate(true);
DiffContent content1 = createContent(project, bRev, context, indicator);
DiffContent content2 = createContent(project, aRev, context, indicator);
final String userLeftRevisionTitle = (String)myChangeContext.get(DiffUserDataKeysEx.VCS_DIFF_LEFT_CONTENT_TITLE);
String beforeRevisionTitle = userLeftRevisionTitle != null ? userLeftRevisionTitle : getRevisionTitle(bRev, "Base version");
final String userRightRevisionTitle = (String)myChangeContext.get(DiffUserDataKeysEx.VCS_DIFF_RIGHT_CONTENT_TITLE);
String afterRevisionTitle = userRightRevisionTitle != null ? userRightRevisionTitle : getRevisionTitle(aRev, "Your version");
SimpleDiffRequest request = new SimpleDiffRequest(title, content1, content2, beforeRevisionTitle, afterRevisionTitle);
boolean bRevCurrent = bRev instanceof CurrentContentRevision;
boolean aRevCurrent = aRev instanceof CurrentContentRevision;
if (bRevCurrent && !aRevCurrent) request.putUserData(DiffUserDataKeys.MASTER_SIDE, Side.LEFT);
if (!bRevCurrent && aRevCurrent) request.putUserData(DiffUserDataKeys.MASTER_SIDE, Side.RIGHT);
return request;
}
}
@NotNull
public static String getRequestTitle(@NotNull Change change) {
ContentRevision bRev = change.getBeforeRevision();
ContentRevision aRev = change.getAfterRevision();
FilePath bPath = bRev != null ? bRev.getFile() : null;
FilePath aPath = aRev != null ? aRev.getFile() : null;
return DiffRequestFactoryImpl.getTitle(bPath, aPath, " -> ");
}
@NotNull
public static String getRevisionTitle(@Nullable ContentRevision revision, @NotNull String defaultValue) {
if (revision == null) return defaultValue;
String title = revision.getRevisionNumber().asString();
if (title == null || title.isEmpty()) return defaultValue;
return title;
}
@NotNull
public static DiffContent createContent(@Nullable Project project,
@Nullable ContentRevision revision,
@NotNull UserDataHolder context,
@NotNull ProgressIndicator indicator) throws DiffRequestProducerException {
try {
indicator.checkCanceled();
if (revision == null) return DiffContentFactory.getInstance().createEmpty();
FilePath filePath = revision.getFile();
DiffContentFactoryEx contentFactory = DiffContentFactoryEx.getInstanceEx();
if (revision instanceof CurrentContentRevision) {
VirtualFile vFile = ((CurrentContentRevision)revision).getVirtualFile();
if (vFile == null) throw new DiffRequestProducerException("Can't get current revision content");
return contentFactory.create(project, vFile);
}
if (revision instanceof ByteBackedContentRevision) {
byte[] revisionContent = ((ByteBackedContentRevision)revision).getContentAsBytes();
if (revisionContent == null) throw new DiffRequestProducerException("Can't get revision content");
return contentFactory.createFromBytes(project, revisionContent, filePath);
}
else {
String revisionContent = revision.getContent();
if (revisionContent == null) throw new DiffRequestProducerException("Can't get revision content");
return contentFactory.create(project, revisionContent, filePath);
}
}
catch (IOException | VcsException e) {
LOG.info(e);
throw new DiffRequestProducerException(e);
}
}
public static void checkContentRevision(@Nullable Project project,
@NotNull ContentRevision rev,
@NotNull UserDataHolder context,
@NotNull ProgressIndicator indicator) throws DiffRequestProducerException {
if (rev.getFile().isDirectory()) {
throw new DiffRequestProducerException("Can't show diff for directory");
}
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
ChangeDiffRequestProducer that = (ChangeDiffRequestProducer)o;
return myChange.equals(that.myChange);
}
@Override
public int hashCode() {
return myChange.hashCode();
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ignite.console.agent.rest;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import java.io.IOException;
import java.net.ConnectException;
import java.util.HashMap;
import java.util.Map;
import java.util.concurrent.TimeUnit;
import okhttp3.Dispatcher;
import okhttp3.FormBody;
import okhttp3.HttpUrl;
import okhttp3.MediaType;
import okhttp3.OkHttpClient;
import okhttp3.Request;
import okhttp3.RequestBody;
import okhttp3.Response;
import org.apache.ignite.console.demo.*;
import org.apache.ignite.internal.processors.rest.protocols.http.jetty.GridJettyObjectMapper;
import org.apache.log4j.Logger;
import static org.apache.ignite.internal.processors.rest.GridRestResponse.STATUS_AUTH_FAILED;
import static org.apache.ignite.internal.processors.rest.GridRestResponse.STATUS_FAILED;
import static org.apache.ignite.internal.processors.rest.GridRestResponse.STATUS_SUCCESS;
/**
* API to translate REST requests to Ignite cluster.
*/
public class RestExecutor {
/** */
private static final Logger log = Logger.getLogger(RestExecutor.class);
/** JSON object mapper. */
private static final ObjectMapper mapper = new GridJettyObjectMapper();
/** */
private final OkHttpClient httpClient;
/** Node URL. */
private String nodeUrl;
/**
* Default constructor.
*/
public RestExecutor(String nodeUrl) {
this.nodeUrl = nodeUrl;
Dispatcher dispatcher = new Dispatcher();
dispatcher.setMaxRequests(Integer.MAX_VALUE);
dispatcher.setMaxRequestsPerHost(Integer.MAX_VALUE);
httpClient = new OkHttpClient.Builder()
.readTimeout(0, TimeUnit.MILLISECONDS)
.dispatcher(dispatcher)
.build();
}
/**
* Stop HTTP client.
*/
public void stop() {
if (httpClient != null) {
httpClient.dispatcher().executorService().shutdown();
httpClient.dispatcher().cancelAll();
}
}
/** */
private RestResult sendRequest(boolean demo, String path, Map<String, Object> params,
String mtd, Map<String, Object> headers, String body) throws IOException {
if (demo && AgentClusterDemo.getDemoUrl() == null) {
try {
AgentClusterDemo.tryStart().await();
}
catch (InterruptedException ignore) {
throw new IllegalStateException("Failed to execute request because of embedded node for demo mode is not started yet.");
}
}
String url = demo ? AgentClusterDemo.getDemoUrl() : nodeUrl;
HttpUrl.Builder urlBuilder = HttpUrl.parse(url)
.newBuilder();
if (path != null)
urlBuilder.addPathSegment(path);
final Request.Builder reqBuilder = new Request.Builder();
if (headers != null) {
for (Map.Entry<String, Object> entry : headers.entrySet())
if (entry.getValue() != null)
reqBuilder.addHeader(entry.getKey(), entry.getValue().toString());
}
if ("GET".equalsIgnoreCase(mtd)) {
if (params != null) {
for (Map.Entry<String, Object> entry : params.entrySet()) {
if (entry.getValue() != null)
urlBuilder.addQueryParameter(entry.getKey(), entry.getValue().toString());
}
}
}
else if ("POST".equalsIgnoreCase(mtd)) {
if (body != null) {
MediaType contentType = MediaType.parse("text/plain");
reqBuilder.post(RequestBody.create(contentType, body));
}
else {
FormBody.Builder formBody = new FormBody.Builder();
if (params != null) {
for (Map.Entry<String, Object> entry : params.entrySet()) {
if (entry.getValue() != null)
formBody.add(entry.getKey(), entry.getValue().toString());
}
}
reqBuilder.post(formBody.build());
}
}
else
throw new IllegalArgumentException("Unknown HTTP-method: " + mtd);
reqBuilder.url(urlBuilder.build());
try (Response resp = httpClient.newCall(reqBuilder.build()).execute()) {
String content = resp.body().string();
if (resp.isSuccessful()) {
JsonNode node = mapper.readTree(content);
int status = node.get("successStatus").asInt();
switch (status) {
case STATUS_SUCCESS:
return RestResult.success(node.get("response").toString());
default:
return RestResult.fail(status, node.get("error").asText());
}
}
if (resp.code() == 401)
return RestResult.fail(STATUS_AUTH_FAILED, "Failed to authenticate in grid. Please check agent\'s login and password or node port.");
return RestResult.fail(STATUS_FAILED, "Failed connect to node and execute REST command.");
}
catch (ConnectException ignore) {
throw new ConnectException("Failed connect to node and execute REST command [url=" + urlBuilder + "]");
}
}
/**
* @param demo Is demo node request.
* @param path Path segment.
* @param params Params.
* @param mtd Method.
* @param headers Headers.
* @param body Body.
*/
public RestResult execute(boolean demo, String path, Map<String, Object> params,
String mtd, Map<String, Object> headers, String body) {
log.debug("Start execute REST command [method=" + mtd + ", uri=/" + (path == null ? "" : path) +
", parameters=" + params + "]");
try {
return sendRequest(demo, path, params, mtd, headers, body);
}
catch (Exception e) {
log.info("Failed to execute REST command [method=" + mtd + ", uri=/" + (path == null ? "" : path) +
", parameters=" + params + "]", e);
return RestResult.fail(404, e.getMessage());
}
}
/**
* @param demo Is demo node request.
*/
public RestResult topology(boolean demo, boolean full) throws IOException {
Map<String, Object> params = new HashMap<>(3);
params.put("cmd", "top");
params.put("attr", true);
params.put("mtr", full);
return sendRequest(demo, "ignite", params, "GET", null, null);
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package gls.ch06.s05;
import gls.ch06.s05.testClasses.Tt1cgi;
import gls.ch06.s05.testClasses.Tt1cgo;
import gls.ch06.s05.testClasses.Tt1gi;
import gls.ch06.s05.testClasses.Tt1go;
import groovy.lang.Closure;
import junit.framework.TestCase;
public class JName1Test extends TestCase {
public void testObjectSupportNameHandling() {
final Tt1go obj = new Tt1go(); // Test subclass of GroovyObjectSupport
final String newX = "new x";
final String newX1 = "new x1";
final String newX2 = "new x2";
final String newX3 = "new x3";
assertTrue(obj.getProperty("x") == obj.getX());
assertTrue(obj.getMetaClass().getAttribute(obj, "x") == obj.x);
assertTrue(obj.invokeMethod("x", new Object[]{}) == obj.x());
obj.setProperty("x", newX);
obj.getMetaClass().setAttribute(obj, "x", newX1);
assertTrue(obj.getProperty("x") == newX);
assertTrue(obj.getMetaClass().getAttribute(obj, "x") == newX1);
obj.setX(newX2);
obj.x = newX3;
assertTrue(obj.getProperty("x") == newX2);
assertTrue(obj.getMetaClass().getAttribute(obj, "x") == newX3);
}
public void testObjectSupportNameHandling1() {
final Tt1go obj = new Tt1go() {
}; // repeat test with subclass
final String newX = "new x";
final String newX1 = "new x1";
final String newX2 = "new x2";
final String newX3 = "new x3";
assertTrue(obj.getProperty("x") == obj.getX());
assertTrue(obj.getMetaClass().getAttribute(obj, "x") == obj.x);
assertTrue(obj.invokeMethod("x", new Object[]{}) == obj.x());
obj.setProperty("x", newX);
obj.getMetaClass().setAttribute(obj, "x", newX1);
assertTrue(obj.getProperty("x") == newX);
assertTrue(obj.getMetaClass().getAttribute(obj, "x") == newX1);
obj.setX(newX2);
obj.x = newX3;
assertTrue(obj.getProperty("x") == newX2);
assertTrue(obj.getMetaClass().getAttribute(obj, "x") == newX3);
}
public void testObjectSupportNameHandlingWitnClosureValues() {
final Tt1cgo obj = new Tt1cgo(); // Test subclass of GroovyObjectSupport
final Closure newX = new Closure(null) {
public Object doCall(final Object params) {
return "new x";
}
};
final Closure newX1 = new Closure(null) {
public Object doCall(final Object params) {
return "new x1";
}
};
final Closure newX2 = new Closure(null) {
public Object doCall(final Object params) {
return "new x2";
}
};
final Closure newX3 = new Closure(null) {
public Object doCall(final Object params) {
return "new x3";
}
};
assertTrue(((Closure) obj.getProperty("x")).call() == obj.getX().call());
assertTrue(((Closure) obj.getMetaClass().getAttribute(obj, "x")).call() == obj.x.call());
assertTrue(obj.invokeMethod("x", new Object[]{}) == obj.x());
obj.setProperty("x", newX);
obj.getMetaClass().setAttribute(obj, "x", newX1);
assertTrue(((Closure) obj.getProperty("x")).call() == newX.call());
assertTrue(((Closure) obj.getMetaClass().getAttribute(obj, "x")).call() == newX1.call());
obj.setX(newX2);
obj.x = newX3;
assertTrue(((Closure) obj.getProperty("x")).call() == newX2.call());
assertTrue(((Closure) obj.getMetaClass().getAttribute(obj, "x")).call() == newX3.call());
}
public void testObjectSupportNameHandlingWitnClosureValuesi() {
final Tt1cgo obj = new Tt1cgo() {
}; // repeat test with subclass
final Closure newX = new Closure(null) {
public Object doCall(final Object params) {
return "new x";
}
};
final Closure newX1 = new Closure(null) {
public Object doCall(final Object params) {
return "new x1";
}
};
final Closure newX2 = new Closure(null) {
public Object doCall(final Object params) {
return "new x2";
}
};
final Closure newX3 = new Closure(null) {
public Object doCall(final Object params) {
return "new x3";
}
};
assertTrue(((Closure) obj.getProperty("x")).call() == obj.getX().call());
assertTrue(((Closure) obj.getMetaClass().getAttribute(obj, "x")).call() == obj.x.call());
assertTrue(obj.invokeMethod("x", new Object[]{}) == obj.x());
obj.setProperty("x", newX);
obj.getMetaClass().setAttribute(obj, "x", newX1);
assertTrue(((Closure) obj.getProperty("x")).call() == newX.call());
assertTrue(((Closure) obj.getMetaClass().getAttribute(obj, "x")).call() == newX1.call());
obj.setX(newX2);
obj.x = newX3;
assertTrue(((Closure) obj.getProperty("x")).call() == newX2.call());
assertTrue(((Closure) obj.getMetaClass().getAttribute(obj, "x")).call() == newX3.call());
}
public void testMetaClassNameHandling() {
final Tt1gi obj = new Tt1gi(); // Test class implementing GroovyObject
final String newX = "new x";
final String newX1 = "new x1";
final String newX2 = "new x2";
final String newX3 = "new x3";
assertTrue("dynamic property".equals(obj.getProperty("x")));
assertTrue(obj.getMetaClass().getAttribute(obj, "x") == obj.x);
assertTrue("dynamic method".equals(obj.invokeMethod("x", new Object[]{})));
obj.setProperty("x", newX);
obj.getMetaClass().setAttribute(obj, "x", newX1);
assertTrue("dynamic property".equals(obj.getProperty("x")));
assertTrue(obj.getMetaClass().getAttribute(obj, "x") == newX1);
obj.setX(newX2);
obj.x = newX3;
assertTrue("dynamic property".equals(obj.getProperty("x")));
assertTrue(obj.getMetaClass().getAttribute(obj, "x") == newX3);
}
public void testMetaClassNameHandling1() {
final Tt1gi obj = new Tt1gi() {
}; // repeat test with subclass
final String newX = "new x";
final String newX1 = "new x1";
final String newX2 = "new x2";
final String newX3 = "new x3";
assertTrue("dynamic property".equals(obj.getProperty("x")));
assertTrue(obj.getMetaClass().getAttribute(obj, "x") == obj.x);
assertTrue("dynamic method".equals(obj.invokeMethod("x", new Object[]{})));
obj.setProperty("x", newX);
obj.getMetaClass().setAttribute(obj, "x", newX1);
assertTrue("dynamic property".equals(obj.getProperty("x")));
assertTrue(obj.getMetaClass().getAttribute(obj, "x") == newX1);
obj.setX(newX2);
obj.x = newX3;
assertTrue("dynamic property".equals(obj.getProperty("x")));
assertTrue(obj.getMetaClass().getAttribute(obj, "x") == newX3);
}
public void testMetaClassNameHandlingWithClosures() {
final Tt1cgi obj = new Tt1cgi(); // Test class implementing GroovyObject
final Closure newX = new Closure(null) {
public Object doCall(final Object params) {
return "new x";
}
};
final Closure newX1 = new Closure(null) {
public Object doCall(final Object params) {
return "new x1";
}
};
final Closure newX2 = new Closure(null) {
public Object doCall(final Object params) {
return "new x2";
}
};
final Closure newX3 = new Closure(null) {
public Object doCall(final Object params) {
return "new x3";
}
};
assertTrue(((Closure) obj.getProperty("x")).call() == obj.getX().call());
assertTrue(((Closure) obj.getMetaClass().getAttribute(obj, "x")).call() == obj.x.call());
assertTrue(obj.invokeMethod("x", new Object[]{}) == obj.x());
obj.setProperty("x", newX);
obj.getMetaClass().setAttribute(obj, "x", newX1);
assertTrue(((Closure) obj.getProperty("x")).call() == newX.call());
assertTrue(((Closure) obj.getMetaClass().getAttribute(obj, "x")).call() == newX1.call());
obj.setX(newX2);
obj.x = newX3;
assertTrue(((Closure) obj.getProperty("x")).call() == newX2.call());
assertTrue(((Closure) obj.getMetaClass().getAttribute(obj, "x")).call() == newX3.call());
}
public void testMetaClassNameHandlingWithClosures1() {
final Tt1cgi obj = new Tt1cgi() {
}; // repeat test with subclass
final Closure newX = new Closure(null) {
public Object doCall(final Object params) {
return "new x";
}
};
final Closure newX1 = new Closure(null) {
public Object doCall(final Object params) {
return "new x1";
}
};
final Closure newX2 = new Closure(null) {
public Object doCall(final Object params) {
return "new x2";
}
};
final Closure newX3 = new Closure(null) {
public Object doCall(final Object params) {
return "new x3";
}
};
assertTrue(((Closure) obj.getProperty("x")).call() == obj.getX().call());
assertTrue(((Closure) obj.getMetaClass().getAttribute(obj, "x")).call() == obj.x.call());
assertTrue(obj.invokeMethod("x", new Object[]{}) == obj.x());
obj.setProperty("x", newX);
obj.getMetaClass().setAttribute(obj, "x", newX1);
assertTrue(((Closure) obj.getProperty("x")).call() == newX.call());
assertTrue(((Closure) obj.getMetaClass().getAttribute(obj, "x")).call() == newX1.call());
obj.setX(newX2);
obj.x = newX3;
assertTrue(((Closure) obj.getProperty("x")).call() == newX2.call());
assertTrue(((Closure) obj.getMetaClass().getAttribute(obj, "x")).call() == newX3.call());
}
}
| |
/*
* Copyright (c) 2008-2018, Hazelcast, Inc. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.hazelcast.internal.ascii.rest;
import com.hazelcast.internal.ascii.NoOpCommand;
import com.hazelcast.nio.IOUtil;
import com.hazelcast.nio.ascii.TextDecoder;
import com.hazelcast.util.StringUtil;
import java.nio.BufferOverflowException;
import java.nio.ByteBuffer;
import static com.hazelcast.internal.ascii.TextCommandConstants.TextCommandType.HTTP_POST;
import static com.hazelcast.util.StringUtil.stringToBytes;
public class HttpPostCommand extends HttpCommand {
private static final int RADIX = 16;
@SuppressWarnings("checkstyle:magicnumber")
private static final int INITIAL_CAPACITY = 1 << 8;
// 65536, no specific reason, similar to UDP packet size limit
@SuppressWarnings("checkstyle:magicnumber")
private static final int MAX_CAPACITY = 1 << 16;
private static final byte LINE_FEED = 0x0A;
private static final byte CARRIAGE_RETURN = 0x0D;
private final TextDecoder decoder;
private boolean chunked;
private boolean nextLine;
private boolean readyToReadData;
private ByteBuffer data;
private String contentType;
private ByteBuffer lineBuffer = ByteBuffer.allocate(INITIAL_CAPACITY);
public HttpPostCommand(TextDecoder decoder, String uri) {
super(HTTP_POST, uri);
this.decoder = decoder;
}
/**
* POST /path HTTP/1.0
* User-Agent: HTTPTool/1.0
* Content-TextCommandType: application/x-www-form-urlencoded
* Content-Length: 45
* <next_line>
* <next_line>
* byte[45]
* <next_line>
*
* @param src
* @return
*/
@Override
public boolean readFrom(ByteBuffer src) {
boolean complete = doActualRead(src);
while (!complete && readyToReadData && chunked && src.hasRemaining()) {
complete = doActualRead(src);
}
if (complete) {
if (data != null) {
data.flip();
}
}
return complete;
}
private boolean doActualRead(ByteBuffer cb) {
if (readyToReadData) {
if (chunked && (data == null || !data.hasRemaining())) {
if (data != null && cb.hasRemaining()) {
readCRLFOrPositionChunkSize(cb);
}
boolean done = readChunkSize(cb);
if (done) {
return true;
}
}
IOUtil.copyToHeapBuffer(cb, data);
}
setReadyToReadData(cb);
return !chunked && ((data != null) && !data.hasRemaining());
}
private void setReadyToReadData(ByteBuffer cb) {
while (!readyToReadData && cb.hasRemaining()) {
byte b = cb.get();
if (b == CARRIAGE_RETURN) {
readLF(cb);
processLine(StringUtil.lowerCaseInternal(toStringAndClear(lineBuffer)));
if (nextLine) {
readyToReadData = true;
}
nextLine = true;
break;
}
nextLine = false;
appendToBuffer(b);
}
}
public byte[] getData() {
if (data == null) {
return null;
} else {
return data.array();
}
}
byte[] getContentType() {
if (contentType == null) {
return null;
} else {
return stringToBytes(contentType);
}
}
private void readCRLFOrPositionChunkSize(ByteBuffer cb) {
byte b = cb.get();
if (b == CARRIAGE_RETURN) {
readLF(cb);
} else {
cb.position(cb.position() - 1);
}
}
private void readLF(ByteBuffer cb) {
assert cb.hasRemaining() : "'\\n' should follow '\\r'";
byte b = cb.get();
if (b != LINE_FEED) {
throw new IllegalStateException("'\\n' should follow '\\r', but got '" + (char) b + "'");
}
}
private String toStringAndClear(ByteBuffer bb) {
if (bb == null) {
return "";
}
String result;
if (bb.position() == 0) {
result = "";
} else {
result = StringUtil.bytesToString(bb.array(), 0, bb.position());
}
bb.clear();
return result;
}
private boolean readChunkSize(ByteBuffer cb) {
boolean hasLine = false;
while (cb.hasRemaining()) {
byte b = cb.get();
if (b == CARRIAGE_RETURN) {
readLF(cb);
hasLine = true;
break;
}
appendToBuffer(b);
}
if (hasLine) {
String lineStr = toStringAndClear(lineBuffer).trim();
// hex string
int dataSize = lineStr.length() == 0 ? 0 : Integer.parseInt(lineStr, RADIX);
if (dataSize == 0) {
return true;
}
dataNullCheck(dataSize);
}
return false;
}
private void dataNullCheck(int dataSize) {
if (data != null) {
ByteBuffer newData = ByteBuffer.allocate(data.capacity() + dataSize);
newData.put(data.array());
data = newData;
} else {
data = ByteBuffer.allocate(dataSize);
}
}
private void appendToBuffer(byte b) {
if (!lineBuffer.hasRemaining()) {
expandBuffer();
}
lineBuffer.put(b);
}
private void expandBuffer() {
if (lineBuffer.capacity() == MAX_CAPACITY) {
throw new BufferOverflowException();
}
int capacity = lineBuffer.capacity() << 1;
ByteBuffer newBuffer = ByteBuffer.allocate(capacity);
lineBuffer.flip();
newBuffer.put(lineBuffer);
lineBuffer = newBuffer;
}
private void processLine(String currentLine) {
if (contentType == null && currentLine.startsWith(HEADER_CONTENT_TYPE)) {
contentType = currentLine.substring(currentLine.indexOf(' ') + 1);
} else if (data == null && currentLine.startsWith(HEADER_CONTENT_LENGTH)) {
data = ByteBuffer.allocate(Integer.parseInt(currentLine.substring(currentLine.indexOf(' ') + 1)));
} else if (!chunked && currentLine.startsWith(HEADER_CHUNKED)) {
chunked = true;
} else if (currentLine.startsWith(HEADER_EXPECT_100)) {
decoder.sendResponse(new NoOpCommand(RES_100));
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to you under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.calcite.rel.rules;
import org.apache.calcite.plan.Contexts;
import org.apache.calcite.plan.RelOptRuleCall;
import org.apache.calcite.plan.RelOptUtil;
import org.apache.calcite.plan.RelRule;
import org.apache.calcite.rel.RelNode;
import org.apache.calcite.rel.core.Join;
import org.apache.calcite.rel.core.JoinRelType;
import org.apache.calcite.rel.core.RelFactories;
import org.apache.calcite.rel.core.RelFactories.ProjectFactory;
import org.apache.calcite.rel.logical.LogicalJoin;
import org.apache.calcite.rel.type.RelDataType;
import org.apache.calcite.rel.type.RelDataTypeField;
import org.apache.calcite.rex.RexBuilder;
import org.apache.calcite.rex.RexInputRef;
import org.apache.calcite.rex.RexNode;
import org.apache.calcite.rex.RexShuttle;
import org.apache.calcite.tools.RelBuilder;
import org.apache.calcite.tools.RelBuilderFactory;
import org.apache.calcite.util.ImmutableBeans;
import java.util.List;
/**
* Planner rule that permutes the inputs to a
* {@link org.apache.calcite.rel.core.Join}.
*
* <p>Permutation of outer joins can be turned on/off by specifying the
* swapOuter flag in the constructor.
*
* <p>To preserve the order of columns in the output row, the rule adds a
* {@link org.apache.calcite.rel.core.Project}.
*
* @see CoreRules#JOIN_COMMUTE
* @see CoreRules#JOIN_COMMUTE_OUTER
*/
public class JoinCommuteRule
extends RelRule<JoinCommuteRule.Config>
implements TransformationRule {
/** Creates a JoinCommuteRule. */
protected JoinCommuteRule(Config config) {
super(config);
}
@Deprecated // to be removed before 2.0
public JoinCommuteRule(Class<? extends Join> clazz,
RelBuilderFactory relBuilderFactory, boolean swapOuter) {
this(Config.DEFAULT.withRelBuilderFactory(relBuilderFactory)
.as(Config.class)
.withOperandFor(clazz)
.withSwapOuter(swapOuter));
}
@Deprecated // to be removed before 2.0
public JoinCommuteRule(Class<? extends Join> clazz,
ProjectFactory projectFactory) {
this(clazz, RelBuilder.proto(Contexts.of(projectFactory)), false);
}
@Deprecated // to be removed before 2.0
public JoinCommuteRule(Class<? extends Join> clazz,
ProjectFactory projectFactory, boolean swapOuter) {
this(clazz, RelBuilder.proto(Contexts.of(projectFactory)), swapOuter);
}
//~ Methods ----------------------------------------------------------------
@Deprecated // to be removed before 2.0
public static RelNode swap(Join join) {
return swap(join, false,
RelFactories.LOGICAL_BUILDER.create(join.getCluster(), null));
}
@Deprecated // to be removed before 2.0
public static RelNode swap(Join join, boolean swapOuterJoins) {
return swap(join, swapOuterJoins,
RelFactories.LOGICAL_BUILDER.create(join.getCluster(), null));
}
/**
* Returns a relational expression with the inputs switched round. Does not
* modify <code>join</code>. Returns null if the join cannot be swapped (for
* example, because it is an outer join).
*
* @param join join to be swapped
* @param swapOuterJoins whether outer joins should be swapped
* @param relBuilder Builder for relational expressions
* @return swapped join if swapping possible; else null
*/
public static RelNode swap(Join join, boolean swapOuterJoins,
RelBuilder relBuilder) {
final JoinRelType joinType = join.getJoinType();
if (!swapOuterJoins && joinType != JoinRelType.INNER) {
return null;
}
final RexBuilder rexBuilder = join.getCluster().getRexBuilder();
final RelDataType leftRowType = join.getLeft().getRowType();
final RelDataType rightRowType = join.getRight().getRowType();
final VariableReplacer variableReplacer =
new VariableReplacer(rexBuilder, leftRowType, rightRowType);
final RexNode oldCondition = join.getCondition();
RexNode condition = variableReplacer.apply(oldCondition);
// NOTE jvs 14-Mar-2006: We preserve attribute semiJoinDone after the
// swap. This way, we will generate one semijoin for the original
// join, and one for the swapped join, and no more. This
// doesn't prevent us from seeing any new combinations assuming
// that the planner tries the desired order (semi-joins after swaps).
Join newJoin =
join.copy(join.getTraitSet(), condition, join.getRight(),
join.getLeft(), joinType.swap(), join.isSemiJoinDone());
final List<RexNode> exps =
RelOptUtil.createSwappedJoinExprs(newJoin, join, true);
return relBuilder.push(newJoin)
.project(exps, join.getRowType().getFieldNames())
.build();
}
@Override public boolean matches(RelOptRuleCall call) {
Join join = call.rel(0);
// SEMI and ANTI join cannot be swapped.
return join.getJoinType().projectsRight();
}
@Override public void onMatch(final RelOptRuleCall call) {
Join join = call.rel(0);
final RelNode swapped = swap(join, config.isSwapOuter(), call.builder());
if (swapped == null) {
return;
}
// The result is either a Project or, if the project is trivial, a
// raw Join.
final Join newJoin =
swapped instanceof Join
? (Join) swapped
: (Join) swapped.getInput(0);
call.transformTo(swapped);
// We have converted join='a join b' into swapped='select
// a0,a1,a2,b0,b1 from b join a'. Now register that project='select
// b0,b1,a0,a1,a2 from (select a0,a1,a2,b0,b1 from b join a)' is the
// same as 'b join a'. If we didn't do this, the swap join rule
// would fire on the new join, ad infinitum.
final RelBuilder relBuilder = call.builder();
final List<RexNode> exps =
RelOptUtil.createSwappedJoinExprs(newJoin, join, false);
relBuilder.push(swapped)
.project(exps, newJoin.getRowType().getFieldNames());
call.getPlanner().ensureRegistered(relBuilder.build(), newJoin);
}
//~ Inner Classes ----------------------------------------------------------
/**
* Walks over an expression, replacing references to fields of the left and
* right inputs.
*
* <p>If the field index is less than leftFieldCount, it must be from the
* left, and so has rightFieldCount added to it; if the field index is
* greater than leftFieldCount, it must be from the right, so we subtract
* leftFieldCount from it.</p>
*/
private static class VariableReplacer extends RexShuttle {
private final RexBuilder rexBuilder;
private final List<RelDataTypeField> leftFields;
private final List<RelDataTypeField> rightFields;
VariableReplacer(
RexBuilder rexBuilder,
RelDataType leftType,
RelDataType rightType) {
this.rexBuilder = rexBuilder;
this.leftFields = leftType.getFieldList();
this.rightFields = rightType.getFieldList();
}
@Override public RexNode visitInputRef(RexInputRef inputRef) {
int index = inputRef.getIndex();
if (index < leftFields.size()) {
// Field came from left side of join. Move it to the right.
return rexBuilder.makeInputRef(
leftFields.get(index).getType(),
rightFields.size() + index);
}
index -= leftFields.size();
if (index < rightFields.size()) {
// Field came from right side of join. Move it to the left.
return rexBuilder.makeInputRef(
rightFields.get(index).getType(),
index);
}
throw new AssertionError("Bad field offset: index=" + inputRef.getIndex()
+ ", leftFieldCount=" + leftFields.size()
+ ", rightFieldCount=" + rightFields.size());
}
}
/** Rule configuration. */
public interface Config extends RelRule.Config {
Config DEFAULT = EMPTY.as(Config.class)
.withOperandFor(LogicalJoin.class)
.withSwapOuter(false);
@Override default JoinCommuteRule toRule() {
return new JoinCommuteRule(this);
}
/** Defines an operand tree for the given classes. */
default Config withOperandFor(Class<? extends Join> joinClass) {
return withOperandSupplier(b ->
b.operand(joinClass)
// FIXME Enable this rule for joins with system fields
.predicate(j ->
j.getLeft().getId() != j.getRight().getId()
&& j.getSystemFieldList().isEmpty())
.anyInputs())
.as(Config.class);
}
/** Whether to swap outer joins. */
@ImmutableBeans.Property
@ImmutableBeans.BooleanDefault(false)
boolean isSwapOuter();
/** Sets {@link #isSwapOuter()}. */
Config withSwapOuter(boolean swapOuter);
}
}
| |
package org.concord.energy3d.scene;
import java.awt.*;
import java.awt.print.PageFormat;
import java.awt.print.Paper;
import java.awt.print.PrinterException;
import java.awt.print.PrinterJob;
import java.util.ArrayList;
import java.util.List;
import org.concord.energy3d.gui.MainFrame;
import org.concord.energy3d.gui.MainPanel;
import org.concord.energy3d.model.Foundation;
import org.concord.energy3d.model.HousePart;
import org.concord.energy3d.model.Roof;
import org.concord.energy3d.model.UserData;
import org.concord.energy3d.model.Wall;
import org.concord.energy3d.model.Window;
import org.concord.energy3d.scene.SceneManager.ViewMode;
import org.concord.energy3d.shapes.Annotation;
import org.concord.energy3d.shapes.Heliodon;
import org.concord.energy3d.util.ObjectCloner;
import org.concord.energy3d.util.Printout;
import com.ardor3d.bounding.BoundingBox;
import com.ardor3d.bounding.BoundingVolume.Type;
import com.ardor3d.bounding.OrientedBoundingBox;
import com.ardor3d.framework.CanvasRenderer;
import com.ardor3d.framework.Updater;
import com.ardor3d.math.ColorRGBA;
import com.ardor3d.math.MathUtils;
import com.ardor3d.math.Matrix3;
import com.ardor3d.math.Vector3;
import com.ardor3d.math.type.ReadOnlyVector3;
import com.ardor3d.renderer.Camera;
import com.ardor3d.scenegraph.Mesh;
import com.ardor3d.scenegraph.Node;
import com.ardor3d.scenegraph.Spatial;
import com.ardor3d.scenegraph.hint.CullHint;
import com.ardor3d.scenegraph.shape.Box;
import com.ardor3d.ui.text.BMText;
import com.ardor3d.ui.text.BMText.Align;
import com.ardor3d.util.ReadOnlyTimer;
import com.ardor3d.util.Timer;
public class PrintController implements Updater {
private static PrintController instance = new PrintController();
private static final double SPACE_BETWEEN_PAGES = 5.0;
private static final double exactFromPageToWorldCoord = 1.0 / 72.0 / 4.0 / 10.6 * 10.8;
private static double spaceBetweenParts = 0;
private final ArrayList<ReadOnlyVector3> printCenters = new ArrayList<>();
private final Timer timer = new Timer();
private final Node pagesRoot = new Node();
private List<HousePart> printParts;
private PageFormat pageFormat = new PageFormat();
private double labelHeight = 0.0;
private double pageWidth, pageHeight, pageLeft, pageRight, pageTop, pageBottom;
private double angle;
private int cols;
private int rows;
private boolean isPrintPreview = false;
private boolean init = false;
private boolean finish = false;
private boolean finished = true;
private boolean shadingSelected;
private boolean shadowSelected;
private boolean isScaleToFit = true;
private boolean restartFlag = false;
private boolean heliodonSelected;
public static PrintController getInstance() {
return instance;
}
public PrintController() {
final Paper paper = new Paper();
// paper.setSize(13 * 72, 19 * 72);
final int m = (int) (0.25 * 72);
paper.setImageableArea(m, m, paper.getWidth() - m * 2, paper.getHeight() - m * 2);
pageFormat.setPaper(paper);
}
@Override
public void init() {
}
@Override
public void update(final ReadOnlyTimer globalTimer) {
if (isPrintPreview) {
rotate();
}
if (isFinished()) {
return;
}
final Spatial originalHouseRoot = Scene.getOriginalHouseRoot();
if (init) {
init = false;
finish = false;
if (!isPrintPreview) {
Scene.getRoot().detachChild(pagesRoot);
pagesRoot.detachAllChildren();
for (final HousePart part : printParts) {
if (part instanceof Wall) {
((Wall) part).setBackMeshesVisible(true);
}
}
for (final HousePart part : printParts) {
part.hideLabels();
part.getOriginal().hideLabels();
}
} else {
printParts = new ArrayList<>(Scene.getInstance().getParts().size());
final boolean orgSolarHeatMap = SceneManager.getInstance().getSolarHeatMap();
SceneManager.getInstance().setSolarHeatMapWithoutUpdate(false);
for (final HousePart part : Scene.getInstance().getParts()) {
if (part.isPrintable()) {
final HousePart printPart = (HousePart) ObjectCloner.deepCopy(part);
printParts.add(printPart);
Scene.getRoot().attachChild(printPart.getRoot());
printPart.setOriginal(part);
printPart.flatten(1.0);
}
}
SceneManager.getInstance().setSolarHeatMapWithoutUpdate(orgSolarHeatMap);
final ArrayList<ArrayList<Spatial>> pages = new ArrayList<ArrayList<Spatial>>();
computePageDimension();
computePrintCenters(pages);
arrangePrintPages(pages);
if (!restartFlag) {
SceneManager.getInstance().updatePrintPreviewScene(true);
}
drawPrintParts(0);
}
originalHouseRoot.getSceneHints().setCullHint(CullHint.Always);
timer.reset();
}
final double viewSwitchDelay = 0.5;
if (!finish && (!isPrintPreview || timer.getTimeInSeconds() > viewSwitchDelay)) {
final double t = timer.getTimeInSeconds() - (isPrintPreview ? viewSwitchDelay : 0);
drawPrintParts(isPrintPreview ? t : 1 - t);
finish = t > 1;
if (finish) {
timer.reset();
}
}
if (finish) {
if (isPrintPreview) {
Scene.getRoot().attachChild(pagesRoot);
}
if (isPrintPreview && restartFlag) {
restartFlag = false;
}
final boolean doTheEndAnimation = timer.getTimeInSeconds() > viewSwitchDelay; // (time - startTime) > 1.0;
if (!isPrintPreview && doTheEndAnimation) {
originalHouseRoot.setRotation(new Matrix3().fromAngles(0, 0, 0));
angle = 0;
for (final HousePart housePart : printParts) {
Scene.getRoot().detachChild(housePart.getRoot());
}
printParts = null;
if (!isPrintPreview && restartFlag) {
/* to force redraw when animated back to normal scene */
Scene.getInstance().redrawAllNow(); // redraw does not stretch the walls of print parts the roof. there is also no need for redraw since nothing has changed
setPrintPreview(true);
return;
}
originalHouseRoot.setScale(1);
originalHouseRoot.setTranslation(0, 0, 0);
originalHouseRoot.updateGeometricState(timer.getTimePerFrame(), true);
final CanvasRenderer renderer = SceneManager.getInstance().getCanvas().getCanvasRenderer();
renderer.makeCurrentContext();
renderer.getRenderer().setBackgroundColor(ColorRGBA.BLACK);
renderer.releaseCurrentContext();
SceneManager.getInstance().setShading(shadingSelected);
SceneManager.getInstance().setShadow(shadowSelected);
Heliodon.getInstance().setVisible(heliodonSelected);
SceneManager.getInstance().updatePrintPreviewScene(false);
if (!doTheEndAnimation) { // to avoid concurrency exception
setFinished(true);
}
}
if (printParts != null) {
for (final HousePart part : printParts) {
if (part instanceof Foundation) {
part.getRoot().getSceneHints().setCullHint(isPrintPreview ? CullHint.Always : CullHint.Inherit);
}
}
}
if (isPrintPreview && printParts != null) {
for (final HousePart part : printParts) {
if (part instanceof Wall) {
((Wall) part).setBackMeshesVisible(false);
}
}
}
if (isPrintPreview || doTheEndAnimation) {
originalHouseRoot.getSceneHints().setCullHint(CullHint.Inherit);
if (isPrintPreview && printParts != null) {
int printSequence = 0;
for (final HousePart part : printParts) {
part.getOriginal().drawLabels(printSequence);
printSequence = part.drawLabels(printSequence);
}
SceneManager.getInstance().refresh();
}
setFinished(true);
}
}
}
private void drawPrintParts(double flattenTime) {
if (printParts == null) {
return;
}
if (flattenTime < 0) {
flattenTime = 0;
}
if (flattenTime > 1) {
flattenTime = 1;
}
for (final HousePart part : printParts) {
if (part.isPrintable()) {
part.flatten(flattenTime);
} else if (part instanceof Window) {
((Window) part).hideBars();
}
}
}
public void print() {
Scene.getOriginalHouseRoot().getSceneHints().setCullHint(CullHint.Always);
final Component canvas = (Component) SceneManager.getInstance().getCanvas();
final int resolutionHeight = 2;
final Dimension newSize = new Dimension(resolutionHeight * (int) pageFormat.getWidth(), resolutionHeight * (int) pageFormat.getHeight());
SceneManager.getInstance().resetCamera(ViewMode.PRINT);
final Dimension orgCanvasSize = canvas.getSize();
final Dimension canvasSize = (Dimension) orgCanvasSize.clone();
if (canvasSize.width % 4 != 0) {
canvasSize.width -= canvasSize.width % 4;
canvas.setSize(canvasSize);
canvas.validate();
}
final double ratio = (double) canvasSize.width / canvasSize.height;
final double cols = newSize.getWidth() / canvasSize.getWidth();
final double rows = newSize.getHeight() / canvasSize.getHeight();
final double pageWidth = PrintController.getInstance().getPageWidth() / cols;
final double pageHeight = PrintController.getInstance().getPageHeight() / rows;
if (ratio > pageWidth / pageHeight) {
SceneManager.getInstance().resizeCamera(pageHeight * ratio);
} else {
SceneManager.getInstance().resizeCamera(pageWidth);
}
SceneManager.getInstance().refresh();
final Printout printout = new Printout(pageFormat, newSize, pageWidth, pageHeight, printCenters);
print(0, printout, 0, 0, pageWidth, pageHeight);
Scene.getOriginalHouseRoot().getSceneHints().setCullHint(CullHint.Inherit);
canvas.setSize(orgCanvasSize);
canvas.validate();
SceneManager.getInstance().resetCamera(ViewMode.PRINT_PREVIEW);
SceneManager.getInstance().refresh();
}
private void print(final int pageNum, final Printout printout, final double x, final double y, final double w, final double h) {
final PrinterJob job = PrinterJob.getPrinterJob();
job.setPageable(printout);
if (job.printDialog()) {
try {
job.print();
} catch (final PrinterException exc) {
exc.printStackTrace();
}
}
}
public void setPrintPreview(final boolean printPreview) {
if (printPreview == isPrintPreview) {
return;
}
isPrintPreview = printPreview;
init = true;
setFinished(false);
if (printPreview) {
shadingSelected = SceneManager.getInstance().isShadingEnabled();
shadowSelected = SceneManager.getInstance().isShadowEnabled();
heliodonSelected = SceneManager.getInstance().isHeliodonVisible();
if (shadingSelected) {
SceneManager.getInstance().setShading(false);
}
if (shadowSelected) {
SceneManager.getInstance().setShadow(false);
}
if (heliodonSelected) {
Heliodon.getInstance().setVisible(false);
}
}
}
public boolean isPrintPreview() {
return isPrintPreview;
}
public void rotate() {
if (SceneManager.getInstance().getSpinView()) {
angle += 0.01;
Scene.getOriginalHouseRoot().setRotation(new Matrix3().fromAngles(0, 0, angle));
}
}
List<HousePart> getPrintParts() {
return printParts;
}
private void setFinished(final boolean finished) {
this.finished = finished;
MainPanel.getInstance().getPreviewButton().setEnabled(finished);
MainFrame.getInstance().getPreviewMenuItem().setEnabled(finished);
if (isPrintPreview() || finished) {
MainPanel.getInstance().setToolbarEnabledForPreview(!isPrintPreview());
}
}
public boolean isFinished() {
return finished;
}
private void computePageDimension() {
spaceBetweenParts = Scene.getInstance().areAnnotationsVisible() ? 3.0 : 0;
double fromPageToWorldCoord;
if (!isScaleToFit) {
fromPageToWorldCoord = exactFromPageToWorldCoord / (Scene.getInstance().getScale() / 10.0);
} else {
double maxWidth = 0;
double maxHeight = 0;
for (final HousePart printPart : printParts) {
if (printPart.isPrintable()) {
if (printPart instanceof Roof) {
for (final Spatial roofPartNode : ((Roof) printPart).getRoofPartsRoot().getChildren()) {
if (roofPartNode.getSceneHints().getCullHint() != CullHint.Always) {
final OrientedBoundingBox boundingBox = (OrientedBoundingBox) ((Node) roofPartNode).getChild(0).getWorldBound().asType(Type.OBB);
final double width = Math.min(boundingBox.getExtent().getX(), boundingBox.getExtent().getZ());
final double height = Math.max(boundingBox.getExtent().getX(), boundingBox.getExtent().getZ());
if (width > maxWidth) {
maxWidth = width;
}
if (height > maxHeight) {
maxHeight = height;
}
}
}
} else {
final OrientedBoundingBox boundingBox = (OrientedBoundingBox) printPart.getMesh().getWorldBound().asType(Type.OBB);
final double width = Math.min(boundingBox.getExtent().getX(), boundingBox.getExtent().getZ());
final double height = Math.max(boundingBox.getExtent().getX(), boundingBox.getExtent().getZ());
if (width > maxWidth) {
maxWidth = width;
}
if (height > maxHeight) {
maxHeight = height;
}
}
}
}
maxWidth *= 2;
maxHeight *= 2;
maxWidth += 2 * spaceBetweenParts;
maxHeight += 2 * spaceBetweenParts;
final double ratio = pageFormat.getImageableWidth() / pageFormat.getImageableHeight();
if (maxWidth / maxHeight > ratio) {
pageWidth = ratio < 1 ? Math.min(maxWidth, maxHeight) : Math.max(maxWidth, maxHeight);
pageHeight = pageWidth / ratio;
} else {
pageHeight = ratio < 1 ? Math.max(maxWidth, maxHeight) : Math.min(maxWidth, maxHeight);
pageWidth = pageHeight * ratio;
}
fromPageToWorldCoord = pageWidth / pageFormat.getImageableWidth();
}
pageLeft = pageFormat.getImageableX() * fromPageToWorldCoord + spaceBetweenParts / 2.0;
pageRight = (pageFormat.getImageableX() + pageFormat.getImageableWidth()) * fromPageToWorldCoord - spaceBetweenParts / 2.0;
pageTop = pageFormat.getImageableY() * fromPageToWorldCoord + spaceBetweenParts / 2.0;
if (labelHeight == 0.0) {
final BMText label = Annotation.makeNewLabel(1);
label.setFontScale(0.5);
labelHeight = label.getHeight();
}
pageBottom = (pageFormat.getImageableY() + pageFormat.getImageableHeight()) * fromPageToWorldCoord;
pageWidth = pageFormat.getWidth() * fromPageToWorldCoord;
pageHeight = pageFormat.getHeight() * fromPageToWorldCoord;
}
private void arrangePrintPages(final ArrayList<ArrayList<Spatial>> pages) {
final double ratio = (double) Camera.getCurrentCamera().getWidth() / Camera.getCurrentCamera().getHeight();
cols = (int) Math.round(Math.sqrt(pages.size() + 4) * ratio);
if (cols % 2 == 0) {
cols++;
}
rows = (int) Math.ceil((pages.size() + 4) / cols);
int pageNum = 0;
printCenters.clear();
for (final ArrayList<Spatial> page : pages) {
final Vector3 upperLeftCorner = new Vector3();
double x, z;
final BoundingBox originalHouseBoundingBox = (BoundingBox) Scene.getOriginalHouseRoot().getWorldBound().asType(Type.AABB);
final ReadOnlyVector3 originalHouseCenter = Scene.getOriginalHouseRoot().getWorldBound().getCenter();
final double minXDistance = originalHouseBoundingBox.getXExtent() + pageWidth / 2.0;
final double minYDistance = originalHouseBoundingBox.getZExtent();
do {
x = (pageNum % cols - cols / 2.0) * (pageWidth + SPACE_BETWEEN_PAGES) + originalHouseCenter.getX();
z = (pageNum / (double) cols) * (pageHeight + SPACE_BETWEEN_PAGES);
upperLeftCorner.setX(x - pageWidth / 2.0);
upperLeftCorner.setZ(z + pageHeight);
pageNum++;
} while (Math.abs(x - originalHouseCenter.getX()) < minXDistance && Math.abs(z - originalHouseCenter.getZ()) < minYDistance);
printCenters.add(upperLeftCorner);
for (final Spatial printSpatial : page) {
((UserData) printSpatial.getUserData()).getPrintCenter().addLocal(upperLeftCorner.multiply(1, 0, 1, null));
}
final Box box = new Box("Page Boundary");
final double y = Scene.getOriginalHouseRoot().getWorldBound().getCenter().getY();
box.setData(upperLeftCorner.add(0, y + 1, 0, null), upperLeftCorner.add(pageWidth, y + 1.2, -pageHeight, null));
box.setModelBound(new BoundingBox());
box.updateModelBound();
pagesRoot.attachChild(box);
final BMText footNote = Annotation.makeNewLabel(1);
final String url = Scene.getURL() != null ? Scene.getURL().getFile().substring(Scene.getURL().getFile().lastIndexOf('/') + 1) + " -" : "";
footNote.setText(url.replaceAll("%20", " ") + " Page " + printCenters.size() + " / " + pages.size() + " - http://energy3d.concord.org/");
footNote.setFontScale(0.5);
footNote.setAlign(Align.North);
footNote.setTranslation(upperLeftCorner.add(pageWidth / 2.0, 0.0, -pageBottom - spaceBetweenParts / 2.0, null));
pagesRoot.attachChild(footNote);
}
pagesRoot.updateGeometricState(0);
}
private void computePrintCenters(final ArrayList<ArrayList<Spatial>> pages) {
for (final HousePart printPart : printParts) {
if (printPart.isPrintable()) {
printPart.getRoot().updateWorldTransform(true);
printPart.getRoot().updateWorldBound(true);
if (printPart instanceof Roof) {
final Roof roof = (Roof) printPart;
for (final Spatial roofPart : roof.getRoofPartsRoot().getChildren()) {
if (roofPart.getSceneHints().getCullHint() != CullHint.Always) {
final Mesh mesh = (Mesh) ((Node) roofPart).getChild(0);
roof.setPrintVertical(roofPart, decideVertical(mesh));
computePrintCenterOf(mesh, pages);
}
}
} else {
final Mesh mesh = printPart.getMesh();
printPart.setPrintVertical(decideVertical(mesh));
computePrintCenterOf(mesh, pages);
}
}
}
}
private boolean decideVertical(final Mesh mesh) {
final OrientedBoundingBox bound = (OrientedBoundingBox) mesh.getWorldBound().asType(Type.OBB);
final boolean isMeshVertical = bound.getExtent().getX() < bound.getExtent().getZ();
final double imageableWidth = pageRight - pageLeft;
return isMeshVertical && bound.getExtent().getZ() * 2 < imageableWidth || !isMeshVertical && bound.getExtent().getX() * 2 > imageableWidth;
}
private void computePrintCenterOf(final Spatial printPart, final ArrayList<ArrayList<Spatial>> pages) {
boolean isFitted = false;
for (int pageNum = 0; pageNum < pages.size() && !isFitted; pageNum++) {
isFitted = fitInPage(printPart, pages.get(pageNum));
}
if (!isFitted) {
printPart.updateWorldBound(true);
final OrientedBoundingBox bounds = (OrientedBoundingBox) printPart.getWorldBound().asType(Type.OBB);
((UserData) printPart.getUserData()).setPrintCenter(new Vector3(bounds.getExtent().getX() + pageLeft,
Scene.getOriginalHouseRoot().getWorldBound().getCenter().getY(), -bounds.getExtent().getZ() - pageTop));
final ArrayList<Spatial> page = new ArrayList<>();
page.add(printPart);
pages.add(page);
}
}
private boolean fitInPage(final Spatial printPart, final ArrayList<Spatial> page) {
for (final Spatial neighborPart : page) {
final Vector3 neighborPartCenter = ((UserData) neighborPart.getUserData()).getPrintCenter();
final OrientedBoundingBox neighborBound = (OrientedBoundingBox) neighborPart.getWorldBound().asType(Type.OBB);
final OrientedBoundingBox printPartBound = (OrientedBoundingBox) printPart.getWorldBound().asType(Type.OBB);
final double xExtend = neighborBound.getExtent().getX() + printPartBound.getExtent().getX() + spaceBetweenParts;
final double zExtend = neighborBound.getExtent().getZ() + printPartBound.getExtent().getZ() + spaceBetweenParts;
for (double angleQuarter = 0; angleQuarter < 4; angleQuarter++) {
final boolean isHorizontal = angleQuarter % 2 == 0;
final Vector3 tryCenter = new Matrix3().fromAngles(0, angleQuarter * Math.PI / 2.0, 0).applyPost(new Vector3(isHorizontal ? xExtend : zExtend, 0, 0), null);
tryCenter.addLocal(neighborPartCenter);
if (!isHorizontal) {
tryCenter.setX(pageLeft + printPartBound.getExtent().getX());
}
if (!isHorizontal) {
tryCenter.setX(MathUtils.clamp(tryCenter.getX(), pageLeft + printPartBound.getExtent().getX(), pageRight - printPartBound.getExtent().getX()));
} else {
tryCenter.setZ(MathUtils.clamp(tryCenter.getZ(), -pageBottom + printPartBound.getExtent().getZ(), -pageTop - printPartBound.getExtent().getZ()));
}
tryCenter.setY(Scene.getOriginalHouseRoot().getWorldBound().getCenter().getY());
boolean collision = false;
if (tryCenter.getX() - printPartBound.getExtent().getX() < pageLeft - MathUtils.ZERO_TOLERANCE || tryCenter.getX()
+ printPartBound.getExtent().getX() > pageRight + MathUtils.ZERO_TOLERANCE || tryCenter.getZ()
+ printPartBound.getExtent().getZ() > -pageTop + MathUtils.ZERO_TOLERANCE || tryCenter.getZ() - printPartBound.getExtent().getZ() < -pageBottom - MathUtils.ZERO_TOLERANCE) {
collision = true;
} else {
for (final Spatial otherPart : page) {
printPartBound.setCenter(tryCenter);
final OrientedBoundingBox otherPartBound = (OrientedBoundingBox) otherPart.getWorldBound().asType(Type.OBB);
otherPartBound.setCenter(((UserData) otherPart.getUserData()).getPrintCenter());
if (printPartBound.getExtent().getX() + otherPartBound.getExtent().getX() > Math.abs(printPartBound.getCenter().getX()
- otherPartBound.getCenter().getX()) - spaceBetweenParts + MathUtils.ZERO_TOLERANCE
&& printPartBound.getExtent().getZ() + otherPartBound.getExtent().getZ() > Math.abs(printPartBound.getCenter().getZ()
- otherPartBound.getCenter().getZ()) - spaceBetweenParts + MathUtils.ZERO_TOLERANCE) {
collision = true;
break;
}
}
}
if (!collision) {
((UserData) printPart.getUserData()).setPrintCenter(tryCenter);
page.add(printPart);
return true;
}
}
}
return false;
}
public double getPageWidth() {
return pageWidth;
}
public double getPageHeight() {
return pageHeight;
}
ReadOnlyVector3 getZoomAllCameraLocation() {
final double pageHeight = getPageHeight() + SPACE_BETWEEN_PAGES;
final double w = cols * (getPageWidth() + SPACE_BETWEEN_PAGES);
final double h = rows * pageHeight;
if (Scene.getOriginalHouseRoot().getWorldBound() != null) {
return Scene.getOriginalHouseRoot().getWorldBound().getCenter().add(0, -Math.max(w, h), h / 2, null);
}
return new Vector3(0, -Math.max(w, h), h / 2);
}
public void pageSetup() {
EventQueue.invokeLater(() -> {
final PageFormat pf = PrinterJob.getPrinterJob().pageDialog(pageFormat);
if (pf != pageFormat) {
pageFormat = pf;
if (isPrintPreview()) {
restartAnimation();
}
}
});
}
public void setScaleToFit(final boolean scaleToFit) {
isScaleToFit = scaleToFit;
if (isPrintPreview()) {
restartAnimation();
}
}
public void restartAnimation() {
restartFlag = true;
setPrintPreview(false);
}
Node getPagesRoot() {
return pagesRoot;
}
}
| |
/*
* Copyright (C) 2019 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.android.exoplayer2;
import static com.google.common.truth.Truth.assertThat;
import static org.junit.Assert.assertNotSame;
import static org.junit.Assert.assertSame;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.ArgumentMatchers.isNull;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
import android.os.Looper;
import androidx.test.core.app.ApplicationProvider;
import androidx.test.ext.junit.runners.AndroidJUnit4;
import com.google.android.exoplayer2.analytics.AnalyticsCollector;
import com.google.android.exoplayer2.analytics.DefaultAnalyticsCollector;
import com.google.android.exoplayer2.analytics.PlayerId;
import com.google.android.exoplayer2.source.MediaSource;
import com.google.android.exoplayer2.source.ShuffleOrder;
import com.google.android.exoplayer2.testutil.FakeMediaSource;
import com.google.android.exoplayer2.testutil.FakeShuffleOrder;
import com.google.android.exoplayer2.util.Clock;
import com.google.android.exoplayer2.util.Util;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
/** Unit test for {@link MediaSourceList}. */
@RunWith(AndroidJUnit4.class)
public class MediaSourceListTest {
private static final int MEDIA_SOURCE_LIST_SIZE = 4;
private static final MediaItem MINIMAL_MEDIA_ITEM =
new MediaItem.Builder().setMediaId("").build();
private MediaSourceList mediaSourceList;
@Before
public void setUp() {
AnalyticsCollector analyticsCollector = new DefaultAnalyticsCollector(Clock.DEFAULT);
analyticsCollector.setPlayer(
new ExoPlayer.Builder(ApplicationProvider.getApplicationContext()).build(),
Looper.getMainLooper());
mediaSourceList =
new MediaSourceList(
mock(MediaSourceList.MediaSourceListInfoRefreshListener.class),
analyticsCollector,
Util.createHandlerForCurrentOrMainLooper(),
PlayerId.UNSET);
}
@Test
public void emptyMediaSourceList_expectConstantTimelineInstanceEMPTY() {
ShuffleOrder.DefaultShuffleOrder shuffleOrder =
new ShuffleOrder.DefaultShuffleOrder(/* length= */ 0);
List<MediaSourceList.MediaSourceHolder> fakeHolders = createFakeHolders();
Timeline timeline = mediaSourceList.setMediaSources(fakeHolders, shuffleOrder);
assertNotSame(timeline, Timeline.EMPTY);
// Remove all media sources.
timeline =
mediaSourceList.removeMediaSourceRange(
/* fromIndex= */ 0, /* toIndex= */ timeline.getWindowCount(), shuffleOrder);
assertSame(timeline, Timeline.EMPTY);
timeline = mediaSourceList.setMediaSources(fakeHolders, shuffleOrder);
assertNotSame(timeline, Timeline.EMPTY);
// Clear.
timeline = mediaSourceList.clear(shuffleOrder);
assertSame(timeline, Timeline.EMPTY);
}
@Test
public void prepareAndReprepareAfterRelease_expectSourcePreparationAfterMediaSourceListPrepare() {
MediaSource mockMediaSource1 = mock(MediaSource.class);
when(mockMediaSource1.getMediaItem()).thenReturn(MINIMAL_MEDIA_ITEM);
MediaSource mockMediaSource2 = mock(MediaSource.class);
when(mockMediaSource2.getMediaItem()).thenReturn(MINIMAL_MEDIA_ITEM);
mediaSourceList.setMediaSources(
createFakeHoldersWithSources(
/* useLazyPreparation= */ false, mockMediaSource1, mockMediaSource2),
new ShuffleOrder.DefaultShuffleOrder(/* length= */ 2));
// Verify prepare is called once on prepare.
verify(mockMediaSource1, times(0))
.prepareSource(
any(MediaSource.MediaSourceCaller.class), /* mediaTransferListener= */ isNull(), any());
verify(mockMediaSource2, times(0))
.prepareSource(
any(MediaSource.MediaSourceCaller.class), /* mediaTransferListener= */ isNull(), any());
mediaSourceList.prepare(/* mediaTransferListener= */ null);
assertThat(mediaSourceList.isPrepared()).isTrue();
// Verify prepare is called once on prepare.
verify(mockMediaSource1, times(1))
.prepareSource(
any(MediaSource.MediaSourceCaller.class), /* mediaTransferListener= */ isNull(), any());
verify(mockMediaSource2, times(1))
.prepareSource(
any(MediaSource.MediaSourceCaller.class), /* mediaTransferListener= */ isNull(), any());
mediaSourceList.release();
mediaSourceList.prepare(/* mediaTransferListener= */ null);
// Verify prepare is called a second time on re-prepare.
verify(mockMediaSource1, times(2))
.prepareSource(
any(MediaSource.MediaSourceCaller.class), /* mediaTransferListener= */ isNull(), any());
verify(mockMediaSource2, times(2))
.prepareSource(
any(MediaSource.MediaSourceCaller.class), /* mediaTransferListener= */ isNull(), any());
}
@Test
public void setMediaSources_mediaSourceListUnprepared_notUsingLazyPreparation() {
ShuffleOrder.DefaultShuffleOrder shuffleOrder =
new ShuffleOrder.DefaultShuffleOrder(/* length= */ 2);
MediaSource mockMediaSource1 = mock(MediaSource.class);
when(mockMediaSource1.getMediaItem()).thenReturn(MINIMAL_MEDIA_ITEM);
MediaSource mockMediaSource2 = mock(MediaSource.class);
when(mockMediaSource2.getMediaItem()).thenReturn(MINIMAL_MEDIA_ITEM);
List<MediaSourceList.MediaSourceHolder> mediaSources =
createFakeHoldersWithSources(
/* useLazyPreparation= */ false, mockMediaSource1, mockMediaSource2);
Timeline timeline = mediaSourceList.setMediaSources(mediaSources, shuffleOrder);
assertThat(timeline.getWindowCount()).isEqualTo(2);
assertThat(mediaSourceList.getSize()).isEqualTo(2);
// Assert holder offsets have been set properly
for (int i = 0; i < mediaSources.size(); i++) {
MediaSourceList.MediaSourceHolder mediaSourceHolder = mediaSources.get(i);
assertThat(mediaSourceHolder.isRemoved).isFalse();
assertThat(mediaSourceHolder.firstWindowIndexInChild).isEqualTo(i);
}
// Set media items again. The second holder is re-used.
MediaSource mockMediaSource3 = mock(MediaSource.class);
when(mockMediaSource3.getMediaItem()).thenReturn(MINIMAL_MEDIA_ITEM);
List<MediaSourceList.MediaSourceHolder> moreMediaSources =
createFakeHoldersWithSources(/* useLazyPreparation= */ false, mockMediaSource3);
moreMediaSources.add(mediaSources.get(1));
timeline = mediaSourceList.setMediaSources(moreMediaSources, shuffleOrder);
assertThat(mediaSourceList.getSize()).isEqualTo(2);
assertThat(timeline.getWindowCount()).isEqualTo(2);
for (int i = 0; i < moreMediaSources.size(); i++) {
MediaSourceList.MediaSourceHolder mediaSourceHolder = moreMediaSources.get(i);
assertThat(mediaSourceHolder.isRemoved).isFalse();
assertThat(mediaSourceHolder.firstWindowIndexInChild).isEqualTo(i);
}
// Expect removed holders and sources to be removed without releasing.
verify(mockMediaSource1, times(0)).releaseSource(any(MediaSource.MediaSourceCaller.class));
assertThat(mediaSources.get(0).isRemoved).isTrue();
// Expect re-used holder and source not to be removed.
verify(mockMediaSource2, times(0)).releaseSource(any(MediaSource.MediaSourceCaller.class));
assertThat(mediaSources.get(1).isRemoved).isFalse();
}
@Test
public void setMediaSources_mediaSourceListPrepared_notUsingLazyPreparation() {
ShuffleOrder.DefaultShuffleOrder shuffleOrder =
new ShuffleOrder.DefaultShuffleOrder(/* length= */ 2);
MediaSource mockMediaSource1 = mock(MediaSource.class);
when(mockMediaSource1.getMediaItem()).thenReturn(MINIMAL_MEDIA_ITEM);
MediaSource mockMediaSource2 = mock(MediaSource.class);
when(mockMediaSource2.getMediaItem()).thenReturn(MINIMAL_MEDIA_ITEM);
List<MediaSourceList.MediaSourceHolder> mediaSources =
createFakeHoldersWithSources(
/* useLazyPreparation= */ false, mockMediaSource1, mockMediaSource2);
mediaSourceList.prepare(/* mediaTransferListener= */ null);
mediaSourceList.setMediaSources(mediaSources, shuffleOrder);
// Verify sources are prepared.
verify(mockMediaSource1, times(1))
.prepareSource(
any(MediaSource.MediaSourceCaller.class), /* mediaTransferListener= */ isNull(), any());
verify(mockMediaSource2, times(1))
.prepareSource(
any(MediaSource.MediaSourceCaller.class), /* mediaTransferListener= */ isNull(), any());
// Set media items again. The second holder is re-used.
MediaSource mockMediaSource3 = mock(MediaSource.class);
when(mockMediaSource3.getMediaItem()).thenReturn(MINIMAL_MEDIA_ITEM);
List<MediaSourceList.MediaSourceHolder> moreMediaSources =
createFakeHoldersWithSources(/* useLazyPreparation= */ false, mockMediaSource3);
moreMediaSources.add(mediaSources.get(1));
mediaSourceList.setMediaSources(moreMediaSources, shuffleOrder);
// Expect removed holders and sources to be removed and released.
verify(mockMediaSource1, times(1)).releaseSource(any(MediaSource.MediaSourceCaller.class));
assertThat(mediaSources.get(0).isRemoved).isTrue();
// Expect re-used holder and source not to be removed but released.
verify(mockMediaSource2, times(1)).releaseSource(any(MediaSource.MediaSourceCaller.class));
assertThat(mediaSources.get(1).isRemoved).isFalse();
verify(mockMediaSource2, times(2))
.prepareSource(
any(MediaSource.MediaSourceCaller.class), /* mediaTransferListener= */ isNull(), any());
}
@Test
public void addMediaSources_mediaSourceListUnprepared_notUsingLazyPreparation_expectUnprepared() {
MediaSource mockMediaSource1 = mock(MediaSource.class);
when(mockMediaSource1.getMediaItem()).thenReturn(MINIMAL_MEDIA_ITEM);
MediaSource mockMediaSource2 = mock(MediaSource.class);
when(mockMediaSource2.getMediaItem()).thenReturn(MINIMAL_MEDIA_ITEM);
List<MediaSourceList.MediaSourceHolder> mediaSources =
createFakeHoldersWithSources(
/* useLazyPreparation= */ false, mockMediaSource1, mockMediaSource2);
mediaSourceList.addMediaSources(
/* index= */ 0, mediaSources, new ShuffleOrder.DefaultShuffleOrder(2));
assertThat(mediaSourceList.getSize()).isEqualTo(2);
// Verify lazy initialization does not call prepare on sources.
verify(mockMediaSource1, times(0))
.prepareSource(
any(MediaSource.MediaSourceCaller.class), /* mediaTransferListener= */ isNull(), any());
verify(mockMediaSource2, times(0))
.prepareSource(
any(MediaSource.MediaSourceCaller.class), /* mediaTransferListener= */ isNull(), any());
for (int i = 0; i < mediaSources.size(); i++) {
assertThat(mediaSources.get(i).firstWindowIndexInChild).isEqualTo(i);
assertThat(mediaSources.get(i).isRemoved).isFalse();
}
// Add for more sources in between.
List<MediaSourceList.MediaSourceHolder> moreMediaSources = createFakeHolders();
mediaSourceList.addMediaSources(
/* index= */ 1, moreMediaSources, new ShuffleOrder.DefaultShuffleOrder(/* length= */ 3));
assertThat(mediaSources.get(0).firstWindowIndexInChild).isEqualTo(0);
assertThat(moreMediaSources.get(0).firstWindowIndexInChild).isEqualTo(1);
assertThat(moreMediaSources.get(3).firstWindowIndexInChild).isEqualTo(4);
assertThat(mediaSources.get(1).firstWindowIndexInChild).isEqualTo(5);
}
@Test
public void addMediaSources_mediaSourceListPrepared_notUsingLazyPreparation_expectPrepared() {
MediaSource mockMediaSource1 = mock(MediaSource.class);
when(mockMediaSource1.getMediaItem()).thenReturn(MINIMAL_MEDIA_ITEM);
MediaSource mockMediaSource2 = mock(MediaSource.class);
when(mockMediaSource2.getMediaItem()).thenReturn(MINIMAL_MEDIA_ITEM);
mediaSourceList.prepare(/* mediaTransferListener= */ null);
mediaSourceList.addMediaSources(
/* index= */ 0,
createFakeHoldersWithSources(
/* useLazyPreparation= */ false, mockMediaSource1, mockMediaSource2),
new ShuffleOrder.DefaultShuffleOrder(/* length= */ 2));
// Verify prepare is called on sources when added.
verify(mockMediaSource1, times(1))
.prepareSource(
any(MediaSource.MediaSourceCaller.class), /* mediaTransferListener= */ isNull(), any());
verify(mockMediaSource2, times(1))
.prepareSource(
any(MediaSource.MediaSourceCaller.class), /* mediaTransferListener= */ isNull(), any());
}
@Test
public void moveMediaSources() {
ShuffleOrder.DefaultShuffleOrder shuffleOrder =
new ShuffleOrder.DefaultShuffleOrder(/* length= */ 4);
List<MediaSourceList.MediaSourceHolder> holders = createFakeHolders();
mediaSourceList.addMediaSources(/* index= */ 0, holders, shuffleOrder);
assertDefaultFirstWindowInChildIndexOrder(holders);
mediaSourceList.moveMediaSource(/* currentIndex= */ 0, /* newIndex= */ 3, shuffleOrder);
assertFirstWindowInChildIndices(holders, 3, 0, 1, 2);
mediaSourceList.moveMediaSource(/* currentIndex= */ 3, /* newIndex= */ 0, shuffleOrder);
assertDefaultFirstWindowInChildIndexOrder(holders);
mediaSourceList.moveMediaSourceRange(
/* fromIndex= */ 0, /* toIndex= */ 2, /* newFromIndex= */ 2, shuffleOrder);
assertFirstWindowInChildIndices(holders, 2, 3, 0, 1);
mediaSourceList.moveMediaSourceRange(
/* fromIndex= */ 2, /* toIndex= */ 4, /* newFromIndex= */ 0, shuffleOrder);
assertDefaultFirstWindowInChildIndexOrder(holders);
mediaSourceList.moveMediaSourceRange(
/* fromIndex= */ 0, /* toIndex= */ 2, /* newFromIndex= */ 2, shuffleOrder);
assertFirstWindowInChildIndices(holders, 2, 3, 0, 1);
mediaSourceList.moveMediaSourceRange(
/* fromIndex= */ 2, /* toIndex= */ 3, /* newFromIndex= */ 0, shuffleOrder);
assertFirstWindowInChildIndices(holders, 0, 3, 1, 2);
mediaSourceList.moveMediaSourceRange(
/* fromIndex= */ 3, /* toIndex= */ 4, /* newFromIndex= */ 1, shuffleOrder);
assertDefaultFirstWindowInChildIndexOrder(holders);
// No-ops.
mediaSourceList.moveMediaSourceRange(
/* fromIndex= */ 0, /* toIndex= */ 4, /* newFromIndex= */ 0, shuffleOrder);
assertDefaultFirstWindowInChildIndexOrder(holders);
mediaSourceList.moveMediaSourceRange(
/* fromIndex= */ 0, /* toIndex= */ 0, /* newFromIndex= */ 3, shuffleOrder);
assertDefaultFirstWindowInChildIndexOrder(holders);
}
@Test
public void removeMediaSources_whenUnprepared_expectNoRelease() {
MediaSource mockMediaSource1 = mock(MediaSource.class);
when(mockMediaSource1.getMediaItem()).thenReturn(MINIMAL_MEDIA_ITEM);
MediaSource mockMediaSource2 = mock(MediaSource.class);
when(mockMediaSource2.getMediaItem()).thenReturn(MINIMAL_MEDIA_ITEM);
MediaSource mockMediaSource3 = mock(MediaSource.class);
when(mockMediaSource3.getMediaItem()).thenReturn(MINIMAL_MEDIA_ITEM);
MediaSource mockMediaSource4 = mock(MediaSource.class);
when(mockMediaSource4.getMediaItem()).thenReturn(MINIMAL_MEDIA_ITEM);
ShuffleOrder.DefaultShuffleOrder shuffleOrder =
new ShuffleOrder.DefaultShuffleOrder(/* length= */ 4);
List<MediaSourceList.MediaSourceHolder> holders =
createFakeHoldersWithSources(
/* useLazyPreparation= */ false,
mockMediaSource1,
mockMediaSource2,
mockMediaSource3,
mockMediaSource4);
mediaSourceList.addMediaSources(/* index= */ 0, holders, shuffleOrder);
mediaSourceList.removeMediaSourceRange(/* fromIndex= */ 1, /* toIndex= */ 3, shuffleOrder);
assertThat(mediaSourceList.getSize()).isEqualTo(2);
MediaSourceList.MediaSourceHolder removedHolder1 = holders.remove(1);
MediaSourceList.MediaSourceHolder removedHolder2 = holders.remove(1);
assertDefaultFirstWindowInChildIndexOrder(holders);
assertThat(removedHolder1.isRemoved).isTrue();
assertThat(removedHolder2.isRemoved).isTrue();
verify(mockMediaSource1, times(0)).releaseSource(any(MediaSource.MediaSourceCaller.class));
verify(mockMediaSource2, times(0)).releaseSource(any(MediaSource.MediaSourceCaller.class));
verify(mockMediaSource3, times(0)).releaseSource(any(MediaSource.MediaSourceCaller.class));
verify(mockMediaSource4, times(0)).releaseSource(any(MediaSource.MediaSourceCaller.class));
}
@Test
public void removeMediaSources_whenPrepared_expectRelease() {
MediaSource mockMediaSource1 = mock(MediaSource.class);
when(mockMediaSource1.getMediaItem()).thenReturn(MINIMAL_MEDIA_ITEM);
MediaSource mockMediaSource2 = mock(MediaSource.class);
when(mockMediaSource2.getMediaItem()).thenReturn(MINIMAL_MEDIA_ITEM);
MediaSource mockMediaSource3 = mock(MediaSource.class);
when(mockMediaSource3.getMediaItem()).thenReturn(MINIMAL_MEDIA_ITEM);
MediaSource mockMediaSource4 = mock(MediaSource.class);
when(mockMediaSource4.getMediaItem()).thenReturn(MINIMAL_MEDIA_ITEM);
ShuffleOrder.DefaultShuffleOrder shuffleOrder =
new ShuffleOrder.DefaultShuffleOrder(/* length= */ 4);
List<MediaSourceList.MediaSourceHolder> holders =
createFakeHoldersWithSources(
/* useLazyPreparation= */ false,
mockMediaSource1,
mockMediaSource2,
mockMediaSource3,
mockMediaSource4);
mediaSourceList.prepare(/* mediaTransferListener */ null);
mediaSourceList.addMediaSources(/* index= */ 0, holders, shuffleOrder);
mediaSourceList.removeMediaSourceRange(/* fromIndex= */ 1, /* toIndex= */ 3, shuffleOrder);
assertThat(mediaSourceList.getSize()).isEqualTo(2);
holders.remove(2);
holders.remove(1);
assertDefaultFirstWindowInChildIndexOrder(holders);
verify(mockMediaSource1, times(0)).releaseSource(any(MediaSource.MediaSourceCaller.class));
verify(mockMediaSource2, times(1)).releaseSource(any(MediaSource.MediaSourceCaller.class));
verify(mockMediaSource3, times(1)).releaseSource(any(MediaSource.MediaSourceCaller.class));
verify(mockMediaSource4, times(0)).releaseSource(any(MediaSource.MediaSourceCaller.class));
}
@Test
public void release_mediaSourceListUnprepared_expectSourcesNotReleased() {
MediaSource mockMediaSource = mock(MediaSource.class);
when(mockMediaSource.getMediaItem()).thenReturn(MINIMAL_MEDIA_ITEM);
MediaSourceList.MediaSourceHolder mediaSourceHolder =
new MediaSourceList.MediaSourceHolder(mockMediaSource, /* useLazyPreparation= */ false);
mediaSourceList.setMediaSources(
Collections.singletonList(mediaSourceHolder),
new ShuffleOrder.DefaultShuffleOrder(/* length= */ 1));
verify(mockMediaSource, times(0))
.prepareSource(
any(MediaSource.MediaSourceCaller.class), /* mediaTransferListener= */ isNull(), any());
mediaSourceList.release();
verify(mockMediaSource, times(0)).releaseSource(any(MediaSource.MediaSourceCaller.class));
assertThat(mediaSourceHolder.isRemoved).isFalse();
}
@Test
public void release_mediaSourceListPrepared_expectSourcesReleasedNotRemoved() {
MediaSource mockMediaSource = mock(MediaSource.class);
when(mockMediaSource.getMediaItem()).thenReturn(MINIMAL_MEDIA_ITEM);
MediaSourceList.MediaSourceHolder mediaSourceHolder =
new MediaSourceList.MediaSourceHolder(mockMediaSource, /* useLazyPreparation= */ false);
mediaSourceList.prepare(/* mediaTransferListener= */ null);
mediaSourceList.setMediaSources(
Collections.singletonList(mediaSourceHolder),
new ShuffleOrder.DefaultShuffleOrder(/* length= */ 1));
verify(mockMediaSource, times(1))
.prepareSource(
any(MediaSource.MediaSourceCaller.class), /* mediaTransferListener= */ isNull(), any());
mediaSourceList.release();
verify(mockMediaSource, times(1)).releaseSource(any(MediaSource.MediaSourceCaller.class));
assertThat(mediaSourceHolder.isRemoved).isFalse();
}
@Test
public void clearMediaSourceList_expectSourcesReleasedAndRemoved() {
ShuffleOrder.DefaultShuffleOrder shuffleOrder =
new ShuffleOrder.DefaultShuffleOrder(/* length= */ 4);
MediaSource mockMediaSource1 = mock(MediaSource.class);
when(mockMediaSource1.getMediaItem()).thenReturn(MINIMAL_MEDIA_ITEM);
MediaSource mockMediaSource2 = mock(MediaSource.class);
when(mockMediaSource2.getMediaItem()).thenReturn(MINIMAL_MEDIA_ITEM);
List<MediaSourceList.MediaSourceHolder> holders =
createFakeHoldersWithSources(
/* useLazyPreparation= */ false, mockMediaSource1, mockMediaSource2);
mediaSourceList.setMediaSources(holders, shuffleOrder);
mediaSourceList.prepare(/* mediaTransferListener= */ null);
Timeline timeline = mediaSourceList.clear(shuffleOrder);
assertThat(timeline.isEmpty()).isTrue();
assertThat(holders.get(0).isRemoved).isTrue();
assertThat(holders.get(1).isRemoved).isTrue();
verify(mockMediaSource1, times(1)).releaseSource(any());
verify(mockMediaSource2, times(1)).releaseSource(any());
}
@Test
public void setMediaSources_expectTimelineUsesCustomShuffleOrder() {
Timeline timeline =
mediaSourceList.setMediaSources(createFakeHolders(), new FakeShuffleOrder(/* length=*/ 4));
assertTimelineUsesFakeShuffleOrder(timeline);
}
@Test
public void addMediaSources_expectTimelineUsesCustomShuffleOrder() {
Timeline timeline =
mediaSourceList.addMediaSources(
/* index= */ 0, createFakeHolders(), new FakeShuffleOrder(MEDIA_SOURCE_LIST_SIZE));
assertTimelineUsesFakeShuffleOrder(timeline);
}
@Test
public void moveMediaSources_expectTimelineUsesCustomShuffleOrder() {
ShuffleOrder shuffleOrder =
new ShuffleOrder.DefaultShuffleOrder(/* length= */ MEDIA_SOURCE_LIST_SIZE);
mediaSourceList.addMediaSources(/* index= */ 0, createFakeHolders(), shuffleOrder);
Timeline timeline =
mediaSourceList.moveMediaSource(
/* currentIndex= */ 0, /* newIndex= */ 1, new FakeShuffleOrder(MEDIA_SOURCE_LIST_SIZE));
assertTimelineUsesFakeShuffleOrder(timeline);
}
@Test
public void moveMediaSourceRange_expectTimelineUsesCustomShuffleOrder() {
ShuffleOrder shuffleOrder =
new ShuffleOrder.DefaultShuffleOrder(/* length= */ MEDIA_SOURCE_LIST_SIZE);
mediaSourceList.addMediaSources(/* index= */ 0, createFakeHolders(), shuffleOrder);
Timeline timeline =
mediaSourceList.moveMediaSourceRange(
/* fromIndex= */ 0,
/* toIndex= */ 2,
/* newFromIndex= */ 2,
new FakeShuffleOrder(MEDIA_SOURCE_LIST_SIZE));
assertTimelineUsesFakeShuffleOrder(timeline);
}
@Test
public void removeMediaSourceRange_expectTimelineUsesCustomShuffleOrder() {
ShuffleOrder shuffleOrder =
new ShuffleOrder.DefaultShuffleOrder(/* length= */ MEDIA_SOURCE_LIST_SIZE);
mediaSourceList.addMediaSources(/* index= */ 0, createFakeHolders(), shuffleOrder);
Timeline timeline =
mediaSourceList.removeMediaSourceRange(
/* fromIndex= */ 0, /* toIndex= */ 2, new FakeShuffleOrder(/* length= */ 2));
assertTimelineUsesFakeShuffleOrder(timeline);
}
@Test
public void setShuffleOrder_expectTimelineUsesCustomShuffleOrder() {
mediaSourceList.setMediaSources(
createFakeHolders(),
new ShuffleOrder.DefaultShuffleOrder(/* length= */ MEDIA_SOURCE_LIST_SIZE));
assertTimelineUsesFakeShuffleOrder(
mediaSourceList.setShuffleOrder(new FakeShuffleOrder(MEDIA_SOURCE_LIST_SIZE)));
}
// Internal methods.
private static void assertTimelineUsesFakeShuffleOrder(Timeline timeline) {
assertThat(
timeline.getNextWindowIndex(
/* windowIndex= */ 0, Player.REPEAT_MODE_OFF, /* shuffleModeEnabled= */ true))
.isEqualTo(-1);
assertThat(
timeline.getPreviousWindowIndex(
/* windowIndex= */ timeline.getWindowCount() - 1,
Player.REPEAT_MODE_OFF,
/* shuffleModeEnabled= */ true))
.isEqualTo(-1);
}
private static void assertDefaultFirstWindowInChildIndexOrder(
List<MediaSourceList.MediaSourceHolder> holders) {
int[] indices = new int[holders.size()];
for (int i = 0; i < indices.length; i++) {
indices[i] = i;
}
assertFirstWindowInChildIndices(holders, indices);
}
private static void assertFirstWindowInChildIndices(
List<MediaSourceList.MediaSourceHolder> holders, int... firstWindowInChildIndices) {
assertThat(holders).hasSize(firstWindowInChildIndices.length);
for (int i = 0; i < holders.size(); i++) {
assertThat(holders.get(i).firstWindowIndexInChild).isEqualTo(firstWindowInChildIndices[i]);
}
}
private static List<MediaSourceList.MediaSourceHolder> createFakeHolders() {
List<MediaSourceList.MediaSourceHolder> holders = new ArrayList<>();
for (int i = 0; i < MEDIA_SOURCE_LIST_SIZE; i++) {
holders.add(
new MediaSourceList.MediaSourceHolder(
new FakeMediaSource(), /* useLazyPreparation= */ true));
}
return holders;
}
private static List<MediaSourceList.MediaSourceHolder> createFakeHoldersWithSources(
boolean useLazyPreparation, MediaSource... sources) {
List<MediaSourceList.MediaSourceHolder> holders = new ArrayList<>();
for (MediaSource mediaSource : sources) {
holders.add(
new MediaSourceList.MediaSourceHolder(
mediaSource, /* useLazyPreparation= */ useLazyPreparation));
}
return holders;
}
}
| |
/**
* PepXmlGenericFileReader.java
* @author Vagisha Sharma
* Oct 5, 2009
* @version 1.0
*/
package org.yeastrc.ms.parser.pepxml;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.InputStream;
import java.math.BigDecimal;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import javax.xml.stream.XMLInputFactory;
import javax.xml.stream.XMLStreamException;
import javax.xml.stream.XMLStreamReader;
import org.apache.log4j.Logger;
import org.yeastrc.ms.domain.analysis.peptideProphet.GenericPeptideProphetResultIn;
import org.yeastrc.ms.domain.analysis.peptideProphet.PeptideProphetROC;
import org.yeastrc.ms.domain.analysis.peptideProphet.PeptideProphetROCPoint;
import org.yeastrc.ms.domain.analysis.peptideProphet.PeptideProphetResultDataIn;
import org.yeastrc.ms.domain.analysis.peptideProphet.PeptideProphetResultPeptideBuilder;
import org.yeastrc.ms.domain.analysis.peptideProphet.impl.PeptideProphetResultData;
import org.yeastrc.ms.domain.general.MsEnzyme.Sense;
import org.yeastrc.ms.domain.general.impl.Enzyme;
import org.yeastrc.ms.domain.protinfer.proteinProphet.Modification;
import org.yeastrc.ms.domain.search.MsResidueModificationIn;
import org.yeastrc.ms.domain.search.MsRunSearchIn;
import org.yeastrc.ms.domain.search.MsSearchIn;
import org.yeastrc.ms.domain.search.MsSearchResultIn;
import org.yeastrc.ms.domain.search.MsSearchResultPeptide;
import org.yeastrc.ms.domain.search.MsTerminalModificationIn;
import org.yeastrc.ms.domain.search.Param;
import org.yeastrc.ms.domain.search.Program;
import org.yeastrc.ms.domain.search.SearchFileFormat;
import org.yeastrc.ms.domain.search.MsTerminalModification.Terminal;
import org.yeastrc.ms.domain.search.impl.ParamBean;
import org.yeastrc.ms.domain.search.impl.ResidueModification;
import org.yeastrc.ms.domain.search.impl.RunSearchBean;
import org.yeastrc.ms.domain.search.impl.SearchDatabase;
import org.yeastrc.ms.domain.search.impl.TerminalModification;
import org.yeastrc.ms.domain.search.pepxml.PepXmlSearchScanIn;
import org.yeastrc.ms.parser.DataProviderException;
import org.yeastrc.ms.parser.PepxmlDataProvider;
import org.yeastrc.ms.parser.sqtFile.DbLocus;
import org.yeastrc.ms.util.AminoAcidUtilsFactory;
import org.yeastrc.ms.util.BaseAminoAcidUtils;
/**
*
*/
public abstract class PepXmlGenericFileReader <T extends PepXmlSearchScanIn<G, R>,
G extends GenericPeptideProphetResultIn<R>,
R extends MsSearchResultIn,
S extends MsSearchIn>
implements PepxmlDataProvider<T> {
String pepXmlFilePath;
private InputStream inputStr = null;
XMLStreamReader reader = null;
// these will be read once for the entire file
boolean isTppFile = false;
boolean refreshParserRun = false;
boolean peptideProphetRun = false;
private String peptideProphetVersion;
private PeptideProphetROC peptideProphetRoc;
// these will be read for each <msms_run_summary> element
private Program searchProgram;
List<MsResidueModificationIn> searchDynamicResidueMods;
List<MsResidueModificationIn> searchStaticResidueMods;
List<MsTerminalModificationIn> searchStaticTerminalMods;
List<MsTerminalModificationIn> searchDynamicTerminalMods;
List<Param> searchParams;
Enzyme enzyme;
SearchDatabase searchDatabase;
private String currentRunSearchName = null;
private Map<Character, BigDecimal> staticModMap;
private Map<Character, BigDecimal> dynamicModMap;
private PeptideProphetResultPeptideBuilder peptideResultBuilder;
private boolean atFirstSpectrumQueryElement = false;
private static final Logger log = Logger.getLogger(PepXmlGenericFileReader.class.getName());
public void open(String filePath) throws DataProviderException {
XMLInputFactory inputFactory = XMLInputFactory.newInstance();
try {
inputStr = new FileInputStream(filePath);
reader = inputFactory.createXMLStreamReader(inputStr);
peptideResultBuilder = PeptideProphetResultPeptideBuilder.getInstance();
readHeadersForPipelineAnalysis(reader);
}
catch (FileNotFoundException e) {
throw new DataProviderException("File not found: "+filePath, e);
}
catch (XMLStreamException e) {
throw new DataProviderException("Error reading file: "+filePath, e);
}
this.pepXmlFilePath = filePath;
}
@Override
public void close() {
if (reader != null) {
try {reader.close();}
catch (XMLStreamException e) {}
}
if(inputStr != null) {
try {inputStr.close();}
catch(IOException e){}
}
}
// -------------------------------------------------------------------------------------
// PIPELINE_ANALYSIS
// -------------------------------------------------------------------------------------
void readHeadersForPipelineAnalysis(XMLStreamReader reader) throws XMLStreamException,
DataProviderException {
// TODO read date here
// <msms_pipeline_analysis date="2009-09-14T16:38:30"
while(reader.hasNext()) {
if(reader.next() == XMLStreamReader.START_ELEMENT) {
if(reader.getLocalName().equalsIgnoreCase("msms_pipeline_analysis")) {
this.isTppFile = true;
}
else if(reader.getLocalName().equalsIgnoreCase("analysis_summary")) {
// refresh parser analysis
if(reader.getAttributeValue(null,"analysis").equalsIgnoreCase("database_refresh")) {
refreshParserRun = true;
}
// peptide prophet analysis
else if(reader.getAttributeValue(null,"analysis").equalsIgnoreCase("peptideprophet")) {
peptideProphetRun = true;
readPeptideProphetAnalysisSummary(reader);
}
}
// we have come too far
else if(reader.getLocalName().equalsIgnoreCase("msms_run_summary")) {
return;
}
}
}
}
// -------------------------------------------------------------------------------------
// PEPTIDE_PROPHET SUMMARY
// -------------------------------------------------------------------------------------
private void readPeptideProphetAnalysisSummary(XMLStreamReader reader) throws XMLStreamException {
boolean inPPAnalysis = false;
this.peptideProphetRoc = new PeptideProphetROC();
while(reader.hasNext()) {
int evtType = reader.next();
if(evtType == XMLStreamReader.START_ELEMENT) {
if (reader.getLocalName().equalsIgnoreCase("peptideprophet_summary")) {
this.peptideProphetVersion = reader.getAttributeValue(null, "version");
inPPAnalysis = true;
}
else if (reader.getLocalName().equalsIgnoreCase("roc_data_point") && inPPAnalysis) {
// <roc_data_point min_prob="0.99" sensitivity="0.4384" error="0.0024" num_corr="1123" num_incorr="3"/>
PeptideProphetROCPoint rocPoint = new PeptideProphetROCPoint();
rocPoint.setMinProbability(Double.parseDouble(reader.getAttributeValue(null, "min_prob")));
rocPoint.setSensitivity(Double.parseDouble(reader.getAttributeValue(null, "sensitivity")));
rocPoint.setError(Double.parseDouble(reader.getAttributeValue(null, "error")));
rocPoint.setNumCorrect(Integer.parseInt(reader.getAttributeValue(null, "num_corr")));
rocPoint.setNumIncorrect(Integer.parseInt(reader.getAttributeValue(null, "num_incorr")));
this.peptideProphetRoc.addRocPoint(rocPoint);
}
}
else if(evtType == XMLStreamReader.END_ELEMENT) {
// we have come to the end of what we need
if(reader.getLocalName().equalsIgnoreCase("peptideprophet_summary")) {
break;
}
}
}
}
String getFileDirectory() {
return new File(this.pepXmlFilePath).getParent();
}
public boolean isRefreshParserRun() {
return refreshParserRun;
}
public boolean isPeptideProphetRun() {
return peptideProphetRun;
}
public boolean isTPPFile() {
return this.isTppFile;
}
public String getPeptideProphetVersion() {
return this.peptideProphetVersion;
}
public PeptideProphetROC getPeptideProphetRoc() {
return peptideProphetRoc;
}
// -------------------------------------------------------------------------------------
// RUN SEARCH
// -------------------------------------------------------------------------------------
@Override
public boolean hasNextRunSearch() throws DataProviderException {
if (reader == null)
return false;
try {
while(reader.hasNext()) {
int evtType = reader.getEventType();
if (evtType == XMLStreamReader.START_ELEMENT) {
if (reader.getLocalName().equalsIgnoreCase("msms_run_summary")) {
// get the name of the input file
currentRunSearchName = new File(reader.getAttributeValue(null, "base_name")).getName();
// System.out.println(currentRunSearchName);
// re-initialize and read all summaries
readHeadersForRun(reader);
return true;
}
}
reader.next();
}
}
catch (XMLStreamException e) {
throw new DataProviderException("Error reading file: "+pepXmlFilePath, e);
}
return false;
}
@Override
public String getRunSearchName() {
return currentRunSearchName;
}
private void readHeadersForRun(XMLStreamReader reader) throws XMLStreamException,
DataProviderException {
initalizeFields();
readEnzyme(reader);
readRunSummary(reader);
}
private void initalizeFields() {
searchProgram = null;
searchParams = new ArrayList<Param>();
searchDynamicResidueMods = new ArrayList<MsResidueModificationIn>();
searchStaticResidueMods = new ArrayList<MsResidueModificationIn>();
searchStaticTerminalMods = new ArrayList<MsTerminalModificationIn>();
searchDynamicTerminalMods = new ArrayList<MsTerminalModificationIn>();
staticModMap = new HashMap<Character, BigDecimal>();
dynamicModMap = new HashMap<Character, BigDecimal>();
}
private void readEnzyme(XMLStreamReader reader) throws XMLStreamException {
while(reader.hasNext()) {
if(reader.next() == XMLStreamReader.END_ELEMENT) {
if(reader.getLocalName().equalsIgnoreCase("sample_enzyme"))
return;
}
if(reader.next() == XMLStreamReader.START_ELEMENT) {
if(reader.getLocalName().equalsIgnoreCase("sample_enzyme")) {
this.enzyme = new Enzyme();
enzyme.setName(reader.getAttributeValue(null,"name"));
}
else if(reader.getLocalName().equalsIgnoreCase("specificity")) {
enzyme.setCut(reader.getAttributeValue(null, "cut"));
enzyme.setNocut(reader.getAttributeValue(null, "no_cut"));
String sense = reader.getAttributeValue(null, "sense");
if(sense != null) {
if(sense.equals("C"))
enzyme.setSense(Sense.CTERM);
else
enzyme.setSense(Sense.NTERM);
}
}
}
}
}
private void readRunSummary(XMLStreamReader reader) throws XMLStreamException, DataProviderException {
while(reader.hasNext()) {
if(reader.next() == XMLStreamReader.START_ELEMENT) {
if(reader.getLocalName().equalsIgnoreCase("search_summary")) {
readRunSearchSummary(reader);
}
else if(reader.getLocalName().equalsIgnoreCase("analysis_timestamp")) {
// TODO are we interested in the contents of this element?
}
// we have come too far
else if(reader.getLocalName().equalsIgnoreCase("spectrum_query")) {
atFirstSpectrumQueryElement = true;
return;
}
}
}
}
private void readRunSearchSummary(XMLStreamReader reader) throws XMLStreamException, DataProviderException {
// first read the attributes that tell us the name of the search program and
// anything else we are interested in
// search_engine="MASCOT" precursor_mass_type="monoisotopic" fragment_mass_type="monoisotopic"
String value = reader.getAttributeValue(null,"search_engine");
if(value != null) {
this.searchProgram = PepXmlUtils.parseProgram(value);
}
value = reader.getAttributeValue(null,"precursor_mass_type");
if(value != null) {
Param param = new ParamBean("precursor_mass_type", value);
searchParams.add(param);
}
value = reader.getAttributeValue(null,"fragment_mass_type");
if(value != null) {
Param param = new ParamBean("fragment_mass_type", value);
searchParams.add(param);
}
// read other interesting elements within the search_summary element
while(reader.hasNext()) {
int evtType = reader.next();
if (evtType == XMLStreamReader.END_ELEMENT && reader.getLocalName().equalsIgnoreCase("search_summary")) {
return;
}
else if(evtType == XMLStreamReader.START_ELEMENT) {
if(reader.getLocalName().equalsIgnoreCase("search_database")) {
this.searchDatabase = new SearchDatabase();
searchDatabase.setServerPath(reader.getAttributeValue(null, "local_path"));
}
else if(reader.getLocalName().equalsIgnoreCase("enzymatic_search_constraint")) {
readMaxNumInternalCleavages(reader);
readMinEnzymaticTermini(reader);
}
else if (reader.getLocalName().equalsIgnoreCase("aminoacid_modification")) {
readResidueModification(reader);
}
else if (reader.getLocalName().equalsIgnoreCase("terminal_modification")) {
readTerminalModification(reader);
}
else if (reader.getLocalName().equalsIgnoreCase("parameter")) {
readParameters(reader);
}
}
}
}
private void readMinEnzymaticTermini(XMLStreamReader reader)
throws DataProviderException {
String value;
value = reader.getAttributeValue(null,"min_number_termini");
try {
Integer.parseInt(value);
Param param = new ParamBean("min_number_termini", value);
searchParams.add(param);
}
catch(NumberFormatException e) {
throw new DataProviderException("Invalid value for min_number_termini: "+value, e);
}
}
private void readMaxNumInternalCleavages(XMLStreamReader reader)
throws DataProviderException {
String value;
value = reader.getAttributeValue(null,"max_num_internal_cleavages");
if(value != null) {
try {
Integer.parseInt(value);
Param param = new ParamBean("max_num_internal_cleavages", value);
searchParams.add(param);
}
catch(NumberFormatException e) {
throw new DataProviderException("Invalid value for max_num_internal_cleavages: "+value, e);
}
}
}
private void readResidueModification(XMLStreamReader reader) throws XMLStreamException {
// <aminoacid_modification aminoacid="M" massdiff="15.9949" mass="147.0354" variable="Y" symbol="*"/>
// <aminoacid_modification aminoacid="C" massdiff="57.0215" mass="160.0306" variable="N"/>
String variable = reader.getAttributeValue(null, "variable");
// dynamic modifications
if("Y".equalsIgnoreCase(variable)) {
String aa = reader.getAttributeValue(null, "aminoacid");
String symbol = reader.getAttributeValue(null, "symbol");
String massdiff = reader.getAttributeValue(null, "massdiff");
ResidueModification mod = new ResidueModification();
mod.setModificationMass(new BigDecimal(massdiff));
if(symbol != null)
mod.setModificationSymbol(symbol.charAt(0));
mod.setModifiedResidue(aa.charAt(0));
this.searchDynamicResidueMods.add(mod);
this.dynamicModMap.put(aa.charAt(0), new BigDecimal(massdiff));
}
// static modifications
else if("N".equalsIgnoreCase(variable)) {
String aa = reader.getAttributeValue(null, "aminoacid");
String massdiff = reader.getAttributeValue(null, "massdiff");
String mass = reader.getAttributeValue(null, "mass");
if(Double.parseDouble(mass) - Double.parseDouble(massdiff) == 0) {
// If mass == massDiff this is not really a static modification
// This case happens in pepXML files generated with the Mascot to pepxml converter
// Non-standard amino acid and their masses are stuck in the
// <aminoacid_modification> elements
// Example: <aminoacid_modification aminoacid="X" mass="111.000000" massdiff="111.000000" variable="N"/>
// We will log a warning but save this as a static modification
log.warn("mass and massdiff have same value: "+aa+" mass: "+mass+" massdiff: "+massdiff);
if(AminoAcidUtilsFactory.getAminoAcidUtils().isAminoAcid(aa.charAt(0))) {
log.error("!!! mass and modmass same for a STANDARD AMINO ACID!!!");
}
}
ResidueModification mod = new ResidueModification();
mod.setModificationMass(new BigDecimal(massdiff));
mod.setModifiedResidue(aa.charAt(0));
this.searchStaticResidueMods.add(mod);
this.staticModMap.put(aa.charAt(0), new BigDecimal(massdiff));
}
}
private void readTerminalModification(XMLStreamReader reader) throws XMLStreamException {
// <terminal_modification terminus="n" mass="305.213185" massdiff="304.205353" variable="N" protein_terminus="N"/>
String variable = reader.getAttributeValue(null, "variable");
// dynamic modifications
if("Y".equalsIgnoreCase(variable)) {
String terminus = reader.getAttributeValue(null, "terminus");
String massdiff = reader.getAttributeValue(null, "massdiff");
TerminalModification mod = new TerminalModification();
mod.setModificationMass(new BigDecimal(massdiff));
if(terminus != null)
mod.setModifiedTerminal(Terminal.instance(terminus.charAt(0)));
this.searchDynamicTerminalMods.add(mod);
}
// static modifications
else if("N".equalsIgnoreCase(variable)) {
String terminus = reader.getAttributeValue(null, "terminus");
String massdiff = reader.getAttributeValue(null, "massdiff");
TerminalModification mod = new TerminalModification();
mod.setModificationMass(new BigDecimal(massdiff));
if(terminus != null)
mod.setModifiedTerminal(Terminal.instance(terminus.charAt(0)));
this.searchStaticTerminalMods.add(mod);
}
}
private void readParameters(XMLStreamReader reader) throws XMLStreamException {
String name = reader.getAttributeValue(null, "name");
String value = reader.getAttributeValue(null, "value");
Param param = new ParamBean(name, value);
searchParams.add(param);
}
public Program getSearchProgram() {
return this.searchProgram;
}
@Override
public MsRunSearchIn getRunSearchHeader() throws DataProviderException {
RunSearchBean runSearch = new RunSearchBean();
if(this.searchProgram == Program.SEQUEST)
runSearch.setSearchFileFormat(SearchFileFormat.PEPXML_SEQ);
else if(this.searchProgram == Program.MASCOT)
runSearch.setSearchFileFormat(SearchFileFormat.PEPXML_MASCOT);
else if(this.searchProgram == Program.XTANDEM)
runSearch.setSearchFileFormat(SearchFileFormat.PEPXML_XTANDEM);
else if(this.searchProgram == Program.COMET)
runSearch.setSearchFileFormat(SearchFileFormat.PEPXML_COMET);
else
throw new DataProviderException("Unknown search program for pepxml file: "+this.pepXmlFilePath);
return runSearch;
}
// -------------------------------------------------------------------------------------
// SEARCH SCAN
// -------------------------------------------------------------------------------------
@Override
/**
* Returns true if there is a spectrum_query element to be read
*/
public boolean hasNextSearchScan() throws DataProviderException {
if (reader == null)
return false;
try {
while(reader.hasNext()) {
if(!atFirstSpectrumQueryElement) {
reader.next();
}
atFirstSpectrumQueryElement = false;
int evtId = reader.getEventType();
if (evtId == XMLStreamReader.END_ELEMENT) {
// this is the end of one msms_run_summary
if (reader.getLocalName().equals("msms_run_summary")) {
return false;
}
}
else if (evtId == XMLStreamReader.START_ELEMENT && reader.getLocalName().equalsIgnoreCase("spectrum_query")) {
return true;
}
}
}
catch (XMLStreamException e) {
throw new DataProviderException("Error reading file: "+pepXmlFilePath, e);
}
return false;
}
public T getNextSearchScan() throws DataProviderException {
T scan = initNewSearchScan();
readPepXmlSearchScan(scan);
// read the search hits for this scan
try {
readHitsForScan(scan, searchDynamicResidueMods);
}
catch (XMLStreamException e) {
throw new DataProviderException("Error reading file: "+pepXmlFilePath, e);
}
return (T) scan;
}
//-------------------------------------------------------------------------------------------
// To be implemented by subclasses
//-------------------------------------------------------------------------------------------
public abstract S getSearch();
protected abstract T initNewSearchScan();
protected abstract G initNewPeptideProphetResult();
protected abstract R initNewSearchResult();
protected abstract void readProgramSpecificResult(R result);
protected abstract void readProgramSpecificScore(R result, String name, String value);
protected abstract double getMonoAAMass(char aa);
//-------------------------------------------------------------------------------------------
//-------------------------------------------------------------------------------------------
// ---------------------------------------------------------------------------------------
// read attributes for the <spectrum_query> element
// ---------------------------------------------------------------------------------------
void readPepXmlSearchScan(T scan) {
for (int i = 0; i < reader.getAttributeCount(); i++) {
String attrib = reader.getAttributeLocalName(i);
String val = reader.getAttributeValue(i);
if (attrib.equalsIgnoreCase("start_scan"))
scan.setScanNumber(Integer.parseInt(val));
else if (attrib.equalsIgnoreCase("precursor_neutral_mass"))
// NOTE: We store M+H in the database
scan.setObservedMass(new BigDecimal(val).add(BigDecimal.valueOf(BaseAminoAcidUtils.PROTON)));
else if (attrib.equalsIgnoreCase("assumed_charge"))
scan.setCharge(Integer.parseInt(val));
else if (attrib.equalsIgnoreCase("retention_time_sec"))
scan.setRetentionTime(new BigDecimal(val));
}
}
private void readHitsForScan(T scanResult, List<MsResidueModificationIn> searchDynaResidueMods)
throws XMLStreamException, DataProviderException {
while(reader.hasNext()) {
int evtType = reader.next();
if (evtType == XMLStreamReader.END_ELEMENT && reader.getLocalName().equalsIgnoreCase("spectrum_query"))
break;
if (evtType == XMLStreamReader.START_ELEMENT && reader.getLocalName().equalsIgnoreCase("search_hit")) {
G hit = readSearchHit(scanResult, searchDynaResidueMods);
scanResult.addSearchResult(hit);
}
}
}
// ---------------------------------------------------------------------------------------
// read contents for the <search_hit> element
// ---------------------------------------------------------------------------------------
private G readSearchHit(T scanResult,
List<MsResidueModificationIn> searchDynaResidueMods)
throws XMLStreamException, DataProviderException {
G hit = initNewPeptideProphetResult();
int numMatchingProteins = 0;
String peptideSeq = null;
char preResidue = 0;
char postResidue = 0;
String prAcc = null;
String prDescr = null;
int numEnzymaticTermini = 0;
R searchResult = initNewSearchResult();
searchResult.setScanNumber(scanResult.getScanNumber());
searchResult.setCharge(scanResult.getCharge());
searchResult.setObservedMass(scanResult.getObservedMass());
// read the attributes
for (int i = 0; i < reader.getAttributeCount(); i++) {
String attrib = reader.getAttributeLocalName(i);
String val = reader.getAttributeValue(i);
if (attrib.equalsIgnoreCase("peptide"))
peptideSeq = val;
else if (attrib.equalsIgnoreCase("peptide_prev_aa"))
preResidue = Character.valueOf(val.charAt(0));
else if (attrib.equalsIgnoreCase("peptide_next_aa"))
postResidue = Character.valueOf(val.charAt(0));
else if (attrib.equalsIgnoreCase("protein"))
prAcc = val;
else if (attrib.equalsIgnoreCase("protein_descr"))
prDescr = val;
else if (attrib.equalsIgnoreCase("num_tot_proteins"))
numMatchingProteins = Integer.parseInt(val);
else if(attrib.equalsIgnoreCase("num_tol_term"))
numEnzymaticTermini = Integer.parseInt(val);
}
readProgramSpecificResult(searchResult); // read in Sequest or Mascot specific scores
DbLocus locus1 = new DbLocus(prAcc, prDescr);
locus1.setNtermResidue(preResidue);
locus1.setCtermResidue(postResidue);
locus1.setNumEnzymaticTermini(numEnzymaticTermini);
searchResult.addMatchingProteinMatch(locus1);
List<Modification> resultModifications = new ArrayList<Modification>();
// read other interesting nested elements
while(reader.hasNext()) {
int evtType = reader.next();
if (evtType == XMLStreamReader.END_ELEMENT && reader.getLocalName().equalsIgnoreCase("search_hit"))
break;
if (evtType == XMLStreamReader.START_ELEMENT) {
// read the modification information
if(reader.getLocalName().equalsIgnoreCase("modification_info")) {
resultModifications = readModifications(peptideSeq, reader);
}
// read the <alternative_protein> elements
else if (reader.getLocalName().equalsIgnoreCase("alternative_protein")) {
DbLocus locus = readAlternativeProtein();
searchResult.addMatchingProteinMatch(locus);
}
// read the <search_score> elements
else if (reader.getLocalName().equalsIgnoreCase("search_score")) {
String scoreType = reader.getAttributeValue(null, "name");
String scoreVal = reader.getAttributeValue(null, "value");
readProgramSpecificScore(searchResult, scoreType, scoreVal);
}
// read the <analysis_result> elemets
else if (reader.getLocalName().equalsIgnoreCase("analysis_result")) {
String analysisProgram = reader.getAttributeValue(null, "analysis");
if(analysisProgram.equalsIgnoreCase("peptideprophet")) {
PeptideProphetResultDataIn ppRes = readPeptideProphetHitAnalysis(reader);
hit.setPeptideProphetResult(ppRes);
}
}
}
} // end of parsing
// set the result peptide
MsSearchResultPeptide resultPeptide = peptideResultBuilder.buildResultPeptide(
peptideSeq, preResidue, postResidue, resultModifications);
searchResult.setResultPeptide(resultPeptide);
if (numMatchingProteins != searchResult.getProteinMatchList().size()) {
// log.warn("value of attribute num_tot_proteins("+numMatchingProteins+
// ") does not match number of proteins("+searchResult.getProteinMatchList().size()+") found for this hit. "
// +"Scan# "+scanResult.getScanNumber()+"; sequence: "+peptideSeq);
// throw new DataProviderException("value of attribute num_tot_proteins("+numMatchingProteins+
// ") does not match number of proteins("+seqRes.getProteinMatchList().size()+") found for this hit. "
// +"Scan# "+scan.getScanNumber()+"; hit rank: "+seqRes.getSequestResultData().getxCorrRank());
}
hit.setSearchResult(searchResult);
return hit;
}
// ---------------------------------------------------------------------------------
// read contents of the <analysis_result analysis="peptideprophet"> element
// ---------------------------------------------------------------------------------
private PeptideProphetResultDataIn readPeptideProphetHitAnalysis(XMLStreamReader reader) throws NumberFormatException, XMLStreamException {
PeptideProphetResultData ppRes = new PeptideProphetResultData();
// read all the interesting children elements
while(reader.hasNext()) {
int evtType = reader.next();
if (evtType == XMLStreamReader.END_ELEMENT && reader.getLocalName().equalsIgnoreCase("analysis_result"))
break;
else if(evtType == XMLStreamReader.START_ELEMENT) {
if (reader.getLocalName().equalsIgnoreCase("peptideprophet_result")) {
String probability = reader.getAttributeValue(null, "probability");
String allNttProb = reader.getAttributeValue(null, "all_ntt_prob");
ppRes.setAllNttProb(allNttProb);
ppRes.setProbability(Double.parseDouble(probability));
}
// read the <parameter> elements (PeptideProphet scores)
else if (reader.getLocalName().equalsIgnoreCase("parameter")) {
String scoreType = reader.getAttributeValue(null, "name");
String scoreVal = reader.getAttributeValue(null, "value");
if (scoreType.equalsIgnoreCase("fval"))
ppRes.setfVal(Double.parseDouble(scoreVal));
else if (scoreType.equalsIgnoreCase("ntt"))
ppRes.setNumEnzymaticTermini(Integer.parseInt(scoreVal));
else if (scoreType.equalsIgnoreCase("nmc"))
ppRes.setNumMissedCleavages(Integer.parseInt(scoreVal));
else if (scoreType.equalsIgnoreCase("massd"))
ppRes.setMassDifference(Double.parseDouble(scoreVal));
}
}
}
return ppRes;
}
// ---------------------------------------------------------------------------------
// read contents of <modification_info> element
// ---------------------------------------------------------------------------------
private List<Modification> readModifications(String peptideSeq, XMLStreamReader reader) throws XMLStreamException, DataProviderException {
List<Modification> dynamicMods = new ArrayList<Modification>();
// read any relevant attributes
// String modifiedPeptide = reader.getAttributeValue(null, "modified_peptide");
String modNtermMass = reader.getAttributeValue(null, "mod_nterm_mass");
String modCtermMass = reader.getAttributeValue(null, "mod_ctermMass");
// N-term modification
if(modNtermMass != null) {
double mass = Double.parseDouble(modNtermMass);
mass -= BaseAminoAcidUtils.NTERM_MASS;
// add this only if this is a dynamic terminal modification
boolean isStaticTermMod = false;
for(MsTerminalModificationIn mMod: this.searchStaticTerminalMods) {
if(mMod.getModifiedTerminal() != Terminal.NTERM)
continue;
double diff = mass - mMod.getModificationMass().doubleValue();
if(Math.abs(diff) <= 0.05) {
isStaticTermMod = true;
break;
}
}
if(!isStaticTermMod) {
boolean foundMatch = false;
for(MsTerminalModificationIn mMod: this.searchDynamicTerminalMods) {
if(mMod.getModifiedTerminal() != Terminal.NTERM)
continue;
double diff = mass - mMod.getModificationMass().doubleValue();
if(Math.abs(diff) <= 0.05) {
dynamicMods.add(new Modification(mMod.getModificationMass(), Terminal.NTERM));
foundMatch = true;
}
}
if(!foundMatch) {
throw new DataProviderException("No match found for dynamic N-term mod mass: "+modNtermMass);
}
}
}
// C-term modification
if(modCtermMass != null) {
double mass = Double.parseDouble(modCtermMass);
mass -= BaseAminoAcidUtils.CTERM_MASS;
// add this only if this is a dynamic terminal modification
boolean isStaticTermMod = false;
for(MsTerminalModificationIn mMod: this.searchStaticTerminalMods) {
if(mMod.getModifiedTerminal() != Terminal.CTERM)
continue;
double diff = mass - mMod.getModificationMass().doubleValue();
if(Math.abs(diff) <= 0.05) {
isStaticTermMod = true;
break;
}
}
if(!isStaticTermMod) {
boolean foundMatch = false;
for(MsTerminalModificationIn mMod: this.searchDynamicTerminalMods) {
if(mMod.getModifiedTerminal() != Terminal.CTERM)
continue;
double diff = mass - mMod.getModificationMass().doubleValue();
if(Math.abs(diff) <= 0.05) {
dynamicMods.add(new Modification(mMod.getModificationMass(), Terminal.CTERM));
foundMatch = true;
}
}
if(!foundMatch) {
throw new DataProviderException("No match found for dynamic C-term mod mass: "+modCtermMass);
}
}
}
// read useful nested elements
while(reader.hasNext()) {
int evtType = reader.next();
if(evtType == XMLStreamReader.END_ELEMENT && reader.getLocalName().equalsIgnoreCase("modification_info"))
break;
if(evtType == XMLStreamReader.START_ELEMENT && reader.getLocalName().equalsIgnoreCase("mod_aminoacid_mass")) {
int pos = Integer.parseInt(reader.getAttributeValue(null, "position"));
BigDecimal mass = new BigDecimal(reader.getAttributeValue(null, "mass"));
// Add only if this is a dynamic residue modification
// this will also match it against the dynamic modifications used for the search
Modification mMod = makeModification(peptideSeq.charAt(pos - 1), pos-1, mass);
if(mMod != null) {
dynamicMods.add(mMod);
}
}
}
return dynamicMods;
}
private Modification makeModification(char modChar, int position, BigDecimal mass) throws DataProviderException {
// the purpose is to figure out if this represents a dynamic modificaion (residue)
// we also want to get the massDiff for this modification since the modification_info
// element has mass_of_aminoacid + modification_mass.
double massDiff = getModMassDiff(modChar, mass, false);
// If this mass difference and modChar match a static modification return null
BigDecimal modMass = staticModMap.get(modChar);
if(modMass != null && Math.abs(massDiff - modMass.doubleValue()) < 0.5) {
return null; // this is a static modification
}
massDiff = getModMassDiff(modChar, mass, true);
modMass = dynamicModMap.get(modChar);
if(modMass != null) {
if(Math.abs(massDiff - modMass.doubleValue()) < 0.5) {
return new Modification(position, modMass);
}
else {
throw new DataProviderException("Found a match for modified residue: "+modChar+
" but no match for mass: "+mass.doubleValue());
}
}
throw new DataProviderException("No modification match found for : "+modChar+
" and mass: "+mass.doubleValue());
}
protected final double getModMassDiff(char modChar, BigDecimal mass, boolean subtractStatic) {
// modification mass in pepXml files are: mass_of_aminoacid + modification_mass
// we need just the modification_mass
double massDiff = mass.doubleValue() - getMonoAAMass(modChar); // AminoAcidUtils.monoMass(modChar);
if(massDiff == mass.doubleValue()) {
// If massDiff is the same as mass it means we did not find mass for this amino acid
// This can happen for non-standard amino acids
// We will log a warning
log.warn("No mass found for amino acid: "+modChar);
}
// if this is amino acid has a static modification, subtract that
// A residue could have both static and dynamic modifications(??)
if(subtractStatic) {
BigDecimal staticModMass = staticModMap.get(modChar);
if(staticModMass != null) {
massDiff -= staticModMass.doubleValue();
}
}
return massDiff;
}
// ---------------------------------------------------------------------------------
// read attributes of <alternative_protein> element
// ---------------------------------------------------------------------------------
private DbLocus readAlternativeProtein() {
String prAcc = null;
String prDescr = null;
char preResidue = 0;
char postResidue = 0;
int numEnzymaticTermini = 0;
for (int i = 0; i < reader.getAttributeCount(); i++) {
String attrib = reader.getAttributeLocalName(i);
String val = reader.getAttributeValue(i);
if (attrib.equalsIgnoreCase("protein"))
prAcc = val;
else if (attrib.equalsIgnoreCase("protein_descr"))
prDescr = val;
else if (attrib.equalsIgnoreCase("peptide_prev_aa"))
preResidue = Character.valueOf(val.charAt(0));
else if (attrib.equalsIgnoreCase("peptide_next_aa"))
postResidue = Character.valueOf(val.charAt(0));
else if (attrib.equalsIgnoreCase("num_tol_term"))
numEnzymaticTermini = Integer.parseInt(val);
}
DbLocus locus = new DbLocus(prAcc, prDescr);
locus.setNtermResidue(preResidue);
locus.setCtermResidue(postResidue);
locus.setNumEnzymaticTermini(numEnzymaticTermini);
return locus;
}
}
| |
package com.devexmile.tunetz.choiceradio;
import android.app.AlertDialog;
import android.content.Context;
import android.content.DialogInterface;
import android.content.pm.ActivityInfo;
import android.net.ConnectivityManager;
import android.net.NetworkInfo;
import android.os.Bundle;
import android.support.v7.app.AppCompatActivity;
import android.support.v7.widget.Toolbar;
import android.view.View;
import android.widget.Button;
import android.widget.ImageView;
import android.widget.Toast;
import com.devexmile.tunetz.R;
import com.google.android.gms.ads.AdRequest;
import com.google.android.gms.ads.AdView;
import com.purplebrain.adbuddiz.sdk.AdBuddiz;
public class ChoiceFmActivity extends AppCompatActivity implements View.OnClickListener {
public int diskImage = 1;
private AdView mAdView;
private Button mGuideButton;
/*
* Return true if the device has a network adapter that is capable of
* accessing the network.
*/
protected static boolean networkEnabled(ConnectivityManager connec) {
// ARE WE CONNECTED TO THE NET
if (connec == null) {
return false;
}
try {
if (connec.getNetworkInfo(1) != null
&& connec.getNetworkInfo(1).getState() == NetworkInfo.State.CONNECTED)
return true;
else return connec.getNetworkInfo(0) != null
&& connec.getNetworkInfo(0).getState() == NetworkInfo.State.CONNECTED;
} catch (NullPointerException exception) {
return false;
}
}
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_choicefm);
Toolbar toolbar = (Toolbar) findViewById(R.id.toolbar);
setSupportActionBar(toolbar);
mGuideButton = (Button) findViewById(R.id.schedule);
mGuideButton.setOnClickListener(this);
// Gets the ad view defined in layout/ad_fragment.xml with ad unit ID set in
// values/strings.xml.
mAdView = (AdView) findViewById(R.id.ad_view);
AdBuddiz.showAd(this); // this = current Activity
// Create an ad request. Check your logcat output for the hashed device ID to
// get test ads on a physical device. e.g.
// "Use AdRequest.Builder.addTestDevice("ABCDEF012345") to get test ads on this device."
AdRequest adRequest = new AdRequest.Builder()
.addTestDevice(AdRequest.DEVICE_ID_EMULATOR)
.build();
// Start loading the ad in the background.
mAdView.loadAd(adRequest);
/*
FloatingActionButton fab = (FloatingActionButton) findViewById(R.id.fab);
fab.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
Snackbar.make(view, "Replace with your own action", Snackbar.LENGTH_LONG)
.setAction("Action", null).show();
}
});*/
getSupportActionBar().setDisplayHomeAsUpEnabled(true);
setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_PORTRAIT);
ConnectivityManager cm = (ConnectivityManager) this
.getSystemService(Context.CONNECTIVITY_SERVICE);
// check connections before downloading..
if (!networkEnabled(cm)) {
Toast.makeText(this, "No Network Connection", Toast.LENGTH_LONG)
.show();
}
getFragmentManager()
.beginTransaction()
.replace(R.id.choicefm_banner_container,
new ChoiceFmBannerFragment()).commit();
}
@Override
public void onClick(View v) {
final CharSequence[] items = {"", "", ""
};
AlertDialog.Builder builder = new AlertDialog.Builder(this);
builder.setTitle("Choice FM guide");
builder.setItems(items, new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int item) {
// Do something with the selection
// mGuideButton.setText(items[item]);
}
});
AlertDialog alert = builder.create();
alert.show();
}
// Method to rotate the disc images
public void swapDisk(View view) {
final ImageView diskView = (ImageView) findViewById(R.id.choicefmdiskImage);
diskView.setOnLongClickListener(new View.OnLongClickListener() {
@Override
public boolean onLongClick(View v) {
if (diskImage == 0)
diskView.setImageResource(R.drawable.medium_kington);
return true;
}
});
// rotate through disk images
if (diskImage == 0) {
// swap to disk 1
diskView.setImageResource(R.drawable.choicefm_radio_disk);
diskImage = 1;
} else if (diskImage == 1) {
// swap to disk 2
diskView.setImageResource(R.drawable.medium_kdictext);
diskImage = 2;
} else if (diskImage == 2) {
diskView.setImageResource(R.drawable.medium_tribe);
diskImage = 3;
} else if (diskImage == 3) {
diskView.setImageResource(R.drawable.medium_chronic);
diskImage = 0;
}
}
public void guideMenu(View view) {
Toast.makeText(this, "CloudsFM Guide", Toast.LENGTH_SHORT)
.show();
}
public void dislikeButton(View view) {
Toast.makeText(this, "Dislike", Toast.LENGTH_LONG)
.show();
}
public void likeButton(View view) {
Toast.makeText(this, "Like", Toast.LENGTH_LONG)
.show();
}
public void playPause(View view) {
}
public void gohome(View view) {
this.finish();
}
/**
* Called when leaving the activity
*/
@Override
public void onPause() {
if (mAdView != null) {
mAdView.pause();
}
super.onPause();
}
/**
* Called when returning to the activity
*/
@Override
public void onResume() {
super.onResume();
if (mAdView != null) {
mAdView.resume();
}
}
/**
* Called before the activity is destroyed
*/
@Override
public void onDestroy() {
if (mAdView != null) {
mAdView.destroy();
}
super.onDestroy();
}
}
| |
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.trino.operator.scalar;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.Iterables;
import com.google.common.collect.Iterators;
import com.google.common.util.concurrent.UncheckedExecutionException;
import io.airlift.slice.Slice;
import io.airlift.slice.Slices;
import io.airlift.units.DataSize;
import io.trino.Session;
import io.trino.connector.CatalogName;
import io.trino.execution.Lifespan;
import io.trino.metadata.Metadata;
import io.trino.metadata.Split;
import io.trino.metadata.TableHandle;
import io.trino.operator.DriverContext;
import io.trino.operator.DriverYieldSignal;
import io.trino.operator.FilterAndProjectOperator;
import io.trino.operator.Operator;
import io.trino.operator.OperatorFactory;
import io.trino.operator.ScanFilterAndProjectOperator;
import io.trino.operator.SourceOperator;
import io.trino.operator.SourceOperatorFactory;
import io.trino.operator.project.CursorProcessor;
import io.trino.operator.project.PageProcessor;
import io.trino.operator.project.PageProjection;
import io.trino.spi.ErrorCodeSupplier;
import io.trino.spi.HostAddress;
import io.trino.spi.Page;
import io.trino.spi.PageBuilder;
import io.trino.spi.Plugin;
import io.trino.spi.block.Block;
import io.trino.spi.connector.ColumnHandle;
import io.trino.spi.connector.ConnectorPageSource;
import io.trino.spi.connector.ConnectorSplit;
import io.trino.spi.connector.DynamicFilter;
import io.trino.spi.connector.FixedPageSource;
import io.trino.spi.connector.InMemoryRecordSet;
import io.trino.spi.connector.RecordPageSource;
import io.trino.spi.connector.RecordSet;
import io.trino.spi.predicate.Utils;
import io.trino.spi.type.DecimalType;
import io.trino.spi.type.RowType;
import io.trino.spi.type.TimeZoneKey;
import io.trino.spi.type.Type;
import io.trino.spi.type.TypeOperators;
import io.trino.split.PageSourceProvider;
import io.trino.sql.analyzer.FeaturesConfig;
import io.trino.sql.gen.ExpressionCompiler;
import io.trino.sql.planner.ExpressionInterpreter;
import io.trino.sql.planner.Symbol;
import io.trino.sql.planner.TypeProvider;
import io.trino.sql.planner.plan.PlanNodeId;
import io.trino.sql.relational.RowExpression;
import io.trino.sql.tree.DefaultTraversalVisitor;
import io.trino.sql.tree.Expression;
import io.trino.sql.tree.NodeRef;
import io.trino.sql.tree.SymbolReference;
import io.trino.testing.LocalQueryRunner;
import io.trino.testing.MaterializedResult;
import io.trino.type.BlockTypeOperators;
import org.joda.time.DateTime;
import org.joda.time.DateTimeZone;
import org.openjdk.jol.info.ClassLayout;
import java.io.Closeable;
import java.lang.reflect.Field;
import java.lang.reflect.Modifier;
import java.math.BigDecimal;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.function.Supplier;
import static io.airlift.concurrent.Threads.daemonThreadsNamed;
import static io.airlift.slice.SizeOf.sizeOf;
import static io.airlift.testing.Assertions.assertInstanceOf;
import static io.trino.SessionTestUtils.TEST_SESSION;
import static io.trino.block.BlockAssertions.createBooleansBlock;
import static io.trino.block.BlockAssertions.createDoublesBlock;
import static io.trino.block.BlockAssertions.createIntsBlock;
import static io.trino.block.BlockAssertions.createLongDecimalsBlock;
import static io.trino.block.BlockAssertions.createLongsBlock;
import static io.trino.block.BlockAssertions.createRowBlock;
import static io.trino.block.BlockAssertions.createShortDecimalsBlock;
import static io.trino.block.BlockAssertions.createSlicesBlock;
import static io.trino.block.BlockAssertions.createStringsBlock;
import static io.trino.block.BlockAssertions.createTimestampsWithTimeZoneBlock;
import static io.trino.memory.context.AggregatedMemoryContext.newSimpleAggregatedMemoryContext;
import static io.trino.spi.StandardErrorCode.INVALID_CAST_ARGUMENT;
import static io.trino.spi.StandardErrorCode.INVALID_FUNCTION_ARGUMENT;
import static io.trino.spi.StandardErrorCode.NUMERIC_VALUE_OUT_OF_RANGE;
import static io.trino.spi.type.BigintType.BIGINT;
import static io.trino.spi.type.BooleanType.BOOLEAN;
import static io.trino.spi.type.DateTimeEncoding.packDateTimeWithZone;
import static io.trino.spi.type.DecimalType.createDecimalType;
import static io.trino.spi.type.Decimals.encodeScaledValue;
import static io.trino.spi.type.DoubleType.DOUBLE;
import static io.trino.spi.type.IntegerType.INTEGER;
import static io.trino.spi.type.TimestampWithTimeZoneType.TIMESTAMP_WITH_TIME_ZONE;
import static io.trino.spi.type.VarbinaryType.VARBINARY;
import static io.trino.spi.type.VarcharType.VARCHAR;
import static io.trino.sql.ExpressionTestUtils.createExpression;
import static io.trino.sql.ExpressionTestUtils.getTypes;
import static io.trino.sql.relational.Expressions.constant;
import static io.trino.sql.relational.SqlToRowExpressionTranslator.translate;
import static io.trino.testing.TestingHandles.TEST_TABLE_HANDLE;
import static io.trino.testing.TestingTaskContext.createTaskContext;
import static io.trino.testing.assertions.TrinoExceptionAssert.assertTrinoExceptionThrownBy;
import static io.trino.type.UnknownType.UNKNOWN;
import static java.lang.String.format;
import static java.util.Objects.requireNonNull;
import static java.util.concurrent.Executors.newCachedThreadPool;
import static java.util.concurrent.Executors.newScheduledThreadPool;
import static org.testng.Assert.assertEquals;
import static org.testng.Assert.assertNotNull;
import static org.testng.Assert.assertNull;
import static org.testng.Assert.assertTrue;
import static org.testng.Assert.fail;
public final class FunctionAssertions
implements Closeable
{
private static final ExecutorService EXECUTOR = newCachedThreadPool(daemonThreadsNamed("FunctionAssertions-%s"));
private static final ScheduledExecutorService SCHEDULED_EXECUTOR = newScheduledThreadPool(2, daemonThreadsNamed("FunctionAssertions-scheduledExecutor-%s"));
// Increase the number of fields to generate a wide column
private static final int TEST_ROW_NUMBER_OF_FIELDS = 2500;
private static final RowType TEST_ROW_TYPE = createTestRowType(TEST_ROW_NUMBER_OF_FIELDS);
private static final Block TEST_ROW_DATA = createTestRowData(TEST_ROW_TYPE);
private static final DecimalType SHORT_DECIMAL_TYPE = createDecimalType(14);
private static final DecimalType LONG_DECIMAL_TYPE = createDecimalType(28);
private static final Page SOURCE_PAGE = new Page(
createLongsBlock(1234L),
createStringsBlock("hello"),
createDoublesBlock(12.34),
createBooleansBlock(true),
createLongsBlock(new DateTime(2001, 8, 22, 3, 4, 5, 321, DateTimeZone.UTC).getMillis()),
createStringsBlock("%el%"),
createStringsBlock((String) null),
createTimestampsWithTimeZoneBlock(packDateTimeWithZone(new DateTime(1970, 1, 1, 0, 1, 0, 999, DateTimeZone.UTC).getMillis(), TimeZoneKey.getTimeZoneKey("Z"))),
createSlicesBlock(Slices.wrappedBuffer((byte) 0xab)),
createIntsBlock(1234),
TEST_ROW_DATA,
createShortDecimalsBlock("1234"),
createLongDecimalsBlock("1234"));
private static final Page ZERO_CHANNEL_PAGE = new Page(1);
private static final TypeProvider INPUT_TYPES = TypeProvider.copyOf(ImmutableMap.<Symbol, Type>builder()
.put(new Symbol("bound_long"), BIGINT)
.put(new Symbol("bound_string"), VARCHAR)
.put(new Symbol("bound_double"), DOUBLE)
.put(new Symbol("bound_boolean"), BOOLEAN)
.put(new Symbol("bound_timestamp"), BIGINT)
.put(new Symbol("bound_pattern"), VARCHAR)
.put(new Symbol("bound_null_string"), VARCHAR)
.put(new Symbol("bound_timestamp_with_timezone"), TIMESTAMP_WITH_TIME_ZONE)
.put(new Symbol("bound_binary_literal"), VARBINARY)
.put(new Symbol("bound_integer"), INTEGER)
.put(new Symbol("bound_row"), TEST_ROW_TYPE)
.put(new Symbol("bound_short_decimal"), SHORT_DECIMAL_TYPE)
.put(new Symbol("bound_long_decimal"), LONG_DECIMAL_TYPE)
.build());
private static final Map<Symbol, Integer> INPUT_MAPPING = ImmutableMap.<Symbol, Integer>builder()
.put(new Symbol("bound_long"), 0)
.put(new Symbol("bound_string"), 1)
.put(new Symbol("bound_double"), 2)
.put(new Symbol("bound_boolean"), 3)
.put(new Symbol("bound_timestamp"), 4)
.put(new Symbol("bound_pattern"), 5)
.put(new Symbol("bound_null_string"), 6)
.put(new Symbol("bound_timestamp_with_timezone"), 7)
.put(new Symbol("bound_binary_literal"), 8)
.put(new Symbol("bound_integer"), 9)
.put(new Symbol("bound_row"), 10)
.put(new Symbol("bound_short_decimal"), 11)
.put(new Symbol("bound_long_decimal"), 12)
.build();
private static final PageSourceProvider PAGE_SOURCE_PROVIDER = new TestPageSourceProvider();
private static final PlanNodeId SOURCE_ID = new PlanNodeId("scan");
private final Session session;
private final LocalQueryRunner runner;
private final Metadata metadata;
private final ExpressionCompiler compiler;
public FunctionAssertions()
{
this(TEST_SESSION);
}
public FunctionAssertions(Session session)
{
this(session, new FeaturesConfig());
}
public FunctionAssertions(Session session, FeaturesConfig featuresConfig)
{
this.session = requireNonNull(session, "session is null");
runner = LocalQueryRunner.builder(session)
.withFeaturesConfig(featuresConfig)
.build();
metadata = runner.getMetadata();
compiler = runner.getExpressionCompiler();
}
public Metadata getMetadata()
{
return metadata;
}
public TypeOperators getTypeOperators()
{
return runner.getTypeOperators();
}
public BlockTypeOperators getBlockTypeOperators()
{
return runner.getBlockTypeOperators();
}
public void installPlugin(Plugin plugin)
{
runner.installPlugin(plugin);
}
public void assertFunction(String projection, Type expectedType, Object expected)
{
if (expected instanceof Slice) {
expected = ((Slice) expected).toStringUtf8();
}
Object actual = selectSingleValue(projection, expectedType, compiler);
assertEquals(actual, expected);
}
public void assertFunctionString(String projection, Type expectedType, String expected)
{
Object actual = selectSingleValue(projection, expectedType, compiler);
assertEquals(actual.toString(), expected);
}
public void tryEvaluate(String expression, Type expectedType)
{
tryEvaluate(expression, expectedType, session);
}
public void tryEvaluate(String expression, Type expectedType, Session session)
{
selectUniqueValue(expression, expectedType, session, compiler);
}
public void tryEvaluateWithAll(String expression, Type expectedType)
{
tryEvaluateWithAll(expression, expectedType, session);
}
public void tryEvaluateWithAll(String expression, Type expectedType, Session session)
{
executeProjectionWithAll(expression, expectedType, session, compiler);
}
public void executeProjectionWithFullEngine(String projection)
{
runner.execute("SELECT " + projection);
}
private Object selectSingleValue(String projection, Type expectedType, ExpressionCompiler compiler)
{
return selectUniqueValue(projection, expectedType, session, compiler);
}
private Object selectUniqueValue(String projection, Type expectedType, Session session, ExpressionCompiler compiler)
{
List<Object> results = executeProjectionWithAll(projection, expectedType, session, compiler);
HashSet<Object> resultSet = new HashSet<>(results);
// we should only have a single result
assertEquals(resultSet.size(), 1, "Expected only one result unique result, but got " + resultSet);
return Iterables.getOnlyElement(resultSet);
}
// this is not safe as it catches all RuntimeExceptions
@Deprecated
public void assertInvalidFunction(String projection)
{
try {
evaluateInvalid(projection);
fail("Expected to fail");
}
catch (RuntimeException e) {
// Expected
}
}
public void assertInvalidFunction(String projection, ErrorCodeSupplier errorCode, String message)
{
assertTrinoExceptionThrownBy(() -> evaluateInvalid(projection))
.hasErrorCode(errorCode)
.hasMessage(message);
}
public void assertInvalidFunction(String projection, String message)
{
assertInvalidFunction(projection, INVALID_FUNCTION_ARGUMENT, message);
}
public void assertInvalidFunction(String projection, ErrorCodeSupplier expectedErrorCode)
{
assertTrinoExceptionThrownBy(() -> evaluateInvalid(projection))
.hasErrorCode(expectedErrorCode);
}
public void assertNumericOverflow(String projection, String message)
{
assertTrinoExceptionThrownBy(() -> evaluateInvalid(projection))
.hasErrorCode(NUMERIC_VALUE_OUT_OF_RANGE)
.hasMessage(message);
}
public void assertInvalidCast(String projection)
{
assertTrinoExceptionThrownBy(() -> evaluateInvalid(projection))
.hasErrorCode(INVALID_CAST_ARGUMENT);
}
public void assertInvalidCast(String projection, String message)
{
assertTrinoExceptionThrownBy(() -> evaluateInvalid(projection))
.hasErrorCode(INVALID_CAST_ARGUMENT)
.hasMessage(message);
}
private void evaluateInvalid(String projection)
{
// type isn't necessary as the function is not valid
selectSingleValue(projection, UNKNOWN, compiler);
}
public void assertCachedInstanceHasBoundedRetainedSize(String projection)
{
requireNonNull(projection, "projection is null");
Expression projectionExpression = createExpression(session, projection, metadata, INPUT_TYPES);
RowExpression projectionRowExpression = toRowExpression(session, projectionExpression);
PageProcessor processor = compiler.compilePageProcessor(Optional.empty(), ImmutableList.of(projectionRowExpression)).get();
// This is a heuristic to detect whether the retained size of cachedInstance is bounded.
// * The test runs at least 1000 iterations.
// * The test passes if max retained size doesn't refresh after
// 4x the number of iterations when max was last updated.
// * The test fails if retained size reaches 1MB.
// Note that 1MB is arbitrarily chosen and may be increased if a function implementation
// legitimately needs more.
long maxRetainedSize = 0;
int maxIterationCount = 0;
for (int iterationCount = 0; iterationCount < Math.max(1000, maxIterationCount * 4); iterationCount++) {
Iterator<Optional<Page>> output = processor.process(
session.toConnectorSession(),
new DriverYieldSignal(),
newSimpleAggregatedMemoryContext().newLocalMemoryContext(PageProcessor.class.getSimpleName()),
SOURCE_PAGE);
// consume the iterator
Optional<Page> ignored = Iterators.getOnlyElement(output);
long retainedSize = processor.getProjections().stream()
.mapToLong(this::getRetainedSizeOfCachedInstance)
.sum();
if (retainedSize > maxRetainedSize) {
maxRetainedSize = retainedSize;
maxIterationCount = iterationCount;
}
if (maxRetainedSize >= 1048576) {
fail(format("The retained size of cached instance of function invocation is likely unbounded: %s", projection));
}
}
}
private long getRetainedSizeOfCachedInstance(PageProjection projection)
{
Field[] fields = projection.getClass().getDeclaredFields();
long retainedSize = 0;
for (Field field : fields) {
field.setAccessible(true);
String fieldName = field.getName();
if (!fieldName.startsWith("__cachedInstance")) {
continue;
}
try {
retainedSize += getRetainedSizeOf(field.get(projection));
}
catch (IllegalAccessException e) {
throw new RuntimeException(e);
}
}
return retainedSize;
}
private long getRetainedSizeOf(Object object)
{
if (object instanceof PageBuilder) {
return ((PageBuilder) object).getRetainedSizeInBytes();
}
if (object instanceof Block) {
return ((Block) object).getRetainedSizeInBytes();
}
Class<?> type = object.getClass();
if (type.isArray()) {
if (type == int[].class) {
return sizeOf((int[]) object);
}
else if (type == boolean[].class) {
return sizeOf((boolean[]) object);
}
else if (type == byte[].class) {
return sizeOf((byte[]) object);
}
else if (type == long[].class) {
return sizeOf((long[]) object);
}
else if (type == short[].class) {
return sizeOf((short[]) object);
}
else if (type == Block[].class) {
Object[] objects = (Object[]) object;
return Arrays.stream(objects)
.mapToLong(this::getRetainedSizeOf)
.sum();
}
else {
throw new IllegalArgumentException(format("Unknown type encountered: %s", type));
}
}
long retainedSize = ClassLayout.parseClass(type).instanceSize();
Field[] fields = type.getDeclaredFields();
for (Field field : fields) {
try {
if (field.getType().isPrimitive() || Modifier.isStatic(field.getModifiers())) {
continue;
}
field.setAccessible(true);
retainedSize += getRetainedSizeOf(field.get(object));
}
catch (IllegalAccessException t) {
throw new RuntimeException(t);
}
}
return retainedSize;
}
private List<Object> executeProjectionWithAll(String projection, Type expectedType, Session session, ExpressionCompiler compiler)
{
requireNonNull(projection, "projection is null");
Expression projectionExpression = createExpression(session, projection, metadata, INPUT_TYPES);
RowExpression projectionRowExpression = toRowExpression(session, projectionExpression);
List<Object> results = new ArrayList<>();
// If the projection does not need bound values, execute query using full engine
if (!needsBoundValue(projectionExpression)) {
MaterializedResult result = runner.execute("SELECT " + projection);
assertType(result.getTypes(), expectedType);
assertEquals(result.getTypes().size(), 1);
assertEquals(result.getMaterializedRows().size(), 1);
Object queryResult = Iterables.getOnlyElement(result.getMaterializedRows()).getField(0);
results.add(queryResult);
}
// execute as standalone operator
OperatorFactory operatorFactory = compileFilterProject(Optional.empty(), projectionRowExpression, compiler);
Object directOperatorValue = selectSingleValue(operatorFactory, expectedType, session);
results.add(directOperatorValue);
// interpret
Object interpretedValue = interpret(projectionExpression, expectedType, session);
results.add(interpretedValue);
// execute over normal operator
SourceOperatorFactory scanProjectOperatorFactory = compileScanFilterProject(Optional.empty(), projectionRowExpression, compiler);
Object scanOperatorValue = selectSingleValue(scanProjectOperatorFactory, expectedType, createNormalSplit(), session);
results.add(scanOperatorValue);
// execute over record set
Object recordValue = selectSingleValue(scanProjectOperatorFactory, expectedType, createRecordSetSplit(), session);
results.add(recordValue);
//
// If the projection does not need bound values, execute query using full engine
if (!needsBoundValue(projectionExpression)) {
MaterializedResult result = runner.execute("SELECT " + projection);
assertType(result.getTypes(), expectedType);
assertEquals(result.getTypes().size(), 1);
assertEquals(result.getMaterializedRows().size(), 1);
Object queryResult = Iterables.getOnlyElement(result.getMaterializedRows()).getField(0);
results.add(queryResult);
}
// validate type at end since some tests expect failure and for those UNKNOWN is used instead of actual type
assertEquals(projectionRowExpression.getType(), expectedType);
return results;
}
private RowExpression toRowExpression(Session session, Expression projectionExpression)
{
return toRowExpression(projectionExpression, getTypes(session, metadata, INPUT_TYPES, projectionExpression), INPUT_MAPPING);
}
private Object selectSingleValue(OperatorFactory operatorFactory, Type type, Session session)
{
Operator operator = operatorFactory.createOperator(createDriverContext(session));
return selectSingleValue(operator, type);
}
private Object selectSingleValue(SourceOperatorFactory operatorFactory, Type type, Split split, Session session)
{
SourceOperator operator = operatorFactory.createOperator(createDriverContext(session));
operator.addSplit(split);
operator.noMoreSplits();
return selectSingleValue(operator, type);
}
private Object selectSingleValue(Operator operator, Type type)
{
Page output = getAtMostOnePage(operator, SOURCE_PAGE);
assertNotNull(output);
assertEquals(output.getPositionCount(), 1);
assertEquals(output.getChannelCount(), 1);
Block block = output.getBlock(0);
assertEquals(block.getPositionCount(), 1);
return type.getObjectValue(session.toConnectorSession(), block, 0);
}
public void assertFilter(String filter, boolean expected, boolean withNoInputColumns)
{
assertFilter(filter, expected, withNoInputColumns, compiler);
}
private void assertFilter(String filter, boolean expected, boolean withNoInputColumns, ExpressionCompiler compiler)
{
List<Boolean> results = executeFilterWithAll(filter, TEST_SESSION, withNoInputColumns, compiler);
HashSet<Boolean> resultSet = new HashSet<>(results);
// we should only have a single result
assertTrue(resultSet.size() == 1, "Expected only [" + expected + "] result unique result, but got " + resultSet);
assertEquals((boolean) Iterables.getOnlyElement(resultSet), expected);
}
private List<Boolean> executeFilterWithAll(String filter, Session session, boolean executeWithNoInputColumns, ExpressionCompiler compiler)
{
requireNonNull(filter, "filter is null");
Expression filterExpression = createExpression(session, filter, metadata, INPUT_TYPES);
RowExpression filterRowExpression = toRowExpression(session, filterExpression);
List<Boolean> results = new ArrayList<>();
// execute as standalone operator
OperatorFactory operatorFactory = compileFilterProject(Optional.of(filterRowExpression), constant(true, BOOLEAN), compiler);
results.add(executeFilter(operatorFactory, session));
if (executeWithNoInputColumns) {
// execute as standalone operator
operatorFactory = compileFilterWithNoInputColumns(filterRowExpression, compiler);
results.add(executeFilterWithNoInputColumns(operatorFactory, session));
}
// interpret
Boolean interpretedValue = (Boolean) interpret(filterExpression, BOOLEAN, session);
if (interpretedValue == null) {
interpretedValue = false;
}
results.add(interpretedValue);
// execute over normal operator
SourceOperatorFactory scanProjectOperatorFactory = compileScanFilterProject(Optional.of(filterRowExpression), constant(true, BOOLEAN), compiler);
boolean scanOperatorValue = executeFilter(scanProjectOperatorFactory, createNormalSplit(), session);
results.add(scanOperatorValue);
// execute over record set
boolean recordValue = executeFilter(scanProjectOperatorFactory, createRecordSetSplit(), session);
results.add(recordValue);
//
// If the filter does not need bound values, execute query using full engine
if (!needsBoundValue(filterExpression)) {
MaterializedResult result = runner.execute("SELECT TRUE WHERE " + filter);
assertEquals(result.getTypes().size(), 1);
Boolean queryResult;
if (result.getMaterializedRows().isEmpty()) {
queryResult = false;
}
else {
assertEquals(result.getMaterializedRows().size(), 1);
queryResult = (Boolean) Iterables.getOnlyElement(result.getMaterializedRows()).getField(0);
}
results.add(queryResult);
}
return results;
}
private static boolean executeFilterWithNoInputColumns(OperatorFactory operatorFactory, Session session)
{
return executeFilterWithNoInputColumns(operatorFactory.createOperator(createDriverContext(session)));
}
private static boolean executeFilter(OperatorFactory operatorFactory, Session session)
{
return executeFilter(operatorFactory.createOperator(createDriverContext(session)));
}
private static boolean executeFilter(SourceOperatorFactory operatorFactory, Split split, Session session)
{
SourceOperator operator = operatorFactory.createOperator(createDriverContext(session));
operator.addSplit(split);
operator.noMoreSplits();
return executeFilter(operator);
}
private static boolean executeFilter(Operator operator)
{
Page page = getAtMostOnePage(operator, SOURCE_PAGE);
boolean value;
if (page != null) {
assertEquals(page.getPositionCount(), 1);
assertEquals(page.getChannelCount(), 1);
assertTrue(BOOLEAN.getBoolean(page.getBlock(0), 0));
value = true;
}
else {
value = false;
}
return value;
}
private static boolean executeFilterWithNoInputColumns(Operator operator)
{
Page page = getAtMostOnePage(operator, ZERO_CHANNEL_PAGE);
boolean value;
if (page != null) {
assertEquals(page.getPositionCount(), 1);
assertEquals(page.getChannelCount(), 0);
value = true;
}
else {
value = false;
}
return value;
}
private static boolean needsBoundValue(Expression projectionExpression)
{
AtomicBoolean hasSymbolReferences = new AtomicBoolean();
new DefaultTraversalVisitor<Void>()
{
@Override
protected Void visitSymbolReference(SymbolReference node, Void context)
{
hasSymbolReferences.set(true);
return null;
}
}.process(projectionExpression, null);
return hasSymbolReferences.get();
}
private Object interpret(Expression expression, Type expectedType, Session session)
{
Map<NodeRef<Expression>, Type> expressionTypes = getTypes(session, metadata, INPUT_TYPES, expression);
ExpressionInterpreter evaluator = new ExpressionInterpreter(expression, metadata, session, expressionTypes);
Object result = evaluator.evaluate(symbol -> {
int position = 0;
int channel = INPUT_MAPPING.get(symbol);
Type type = INPUT_TYPES.get(symbol);
Block block = SOURCE_PAGE.getBlock(channel);
if (block.isNull(position)) {
return null;
}
Class<?> javaType = type.getJavaType();
if (javaType == boolean.class) {
return type.getBoolean(block, position);
}
else if (javaType == long.class) {
return type.getLong(block, position);
}
else if (javaType == double.class) {
return type.getDouble(block, position);
}
else if (javaType == Slice.class) {
return type.getSlice(block, position);
}
else if (javaType == Block.class) {
return type.getObject(block, position);
}
else {
throw new UnsupportedOperationException("not yet implemented");
}
});
// convert result from stack type to Type ObjectValue
Block block = Utils.nativeValueToBlock(expectedType, result);
return expectedType.getObjectValue(session.toConnectorSession(), block, 0);
}
private static OperatorFactory compileFilterWithNoInputColumns(RowExpression filter, ExpressionCompiler compiler)
{
try {
Supplier<PageProcessor> processor = compiler.compilePageProcessor(Optional.of(filter), ImmutableList.of());
return FilterAndProjectOperator.createOperatorFactory(0, new PlanNodeId("test"), processor, ImmutableList.of(), DataSize.ofBytes(0), 0);
}
catch (Throwable e) {
if (e instanceof UncheckedExecutionException) {
e = e.getCause();
}
throw new RuntimeException("Error compiling " + filter + ": " + e.getMessage(), e);
}
}
private static OperatorFactory compileFilterProject(Optional<RowExpression> filter, RowExpression projection, ExpressionCompiler compiler)
{
try {
Supplier<PageProcessor> processor = compiler.compilePageProcessor(filter, ImmutableList.of(projection));
return FilterAndProjectOperator.createOperatorFactory(0, new PlanNodeId("test"), processor, ImmutableList.of(projection.getType()), DataSize.ofBytes(0), 0);
}
catch (Throwable e) {
if (e instanceof UncheckedExecutionException) {
e = e.getCause();
}
throw new RuntimeException("Error compiling " + projection + ": " + e.getMessage(), e);
}
}
private static SourceOperatorFactory compileScanFilterProject(Optional<RowExpression> filter, RowExpression projection, ExpressionCompiler compiler)
{
try {
Supplier<CursorProcessor> cursorProcessor = compiler.compileCursorProcessor(
filter,
ImmutableList.of(projection),
SOURCE_ID);
Supplier<PageProcessor> pageProcessor = compiler.compilePageProcessor(
filter,
ImmutableList.of(projection));
return new ScanFilterAndProjectOperator.ScanFilterAndProjectOperatorFactory(
0,
new PlanNodeId("test"),
SOURCE_ID,
PAGE_SOURCE_PROVIDER,
cursorProcessor,
pageProcessor,
TEST_TABLE_HANDLE,
ImmutableList.of(),
DynamicFilter.EMPTY,
ImmutableList.of(projection.getType()),
DataSize.ofBytes(0),
0);
}
catch (Throwable e) {
if (e instanceof UncheckedExecutionException) {
e = e.getCause();
}
throw new RuntimeException("Error compiling filter " + filter + ": " + e.getMessage(), e);
}
}
private RowExpression toRowExpression(Expression projection, Map<NodeRef<Expression>, Type> expressionTypes, Map<Symbol, Integer> layout)
{
return translate(projection, expressionTypes, layout, metadata, session, false);
}
private static Page getAtMostOnePage(Operator operator, Page sourcePage)
{
// add our input page if needed
if (operator.needsInput()) {
operator.addInput(sourcePage);
}
// try to get the output page
Page result = operator.getOutput();
// tell operator to finish
operator.finish();
// try to get output until the operator is finished
while (!operator.isFinished()) {
// operator should never block
assertTrue(operator.isBlocked().isDone());
Page output = operator.getOutput();
if (output != null) {
assertNull(result);
result = output;
}
}
return result;
}
private static DriverContext createDriverContext(Session session)
{
return createTaskContext(EXECUTOR, SCHEDULED_EXECUTOR, session)
.addPipelineContext(0, true, true, false)
.addDriverContext();
}
private static void assertType(List<Type> types, Type expectedType)
{
assertTrue(types.size() == 1, "Expected one type, but got " + types);
Type actualType = types.get(0);
assertEquals(actualType, expectedType);
}
public Session getSession()
{
return session;
}
@Override
public void close()
{
runner.close();
}
private static class TestPageSourceProvider
implements PageSourceProvider
{
@Override
public ConnectorPageSource createPageSource(Session session, Split split, TableHandle table, List<ColumnHandle> columns, DynamicFilter dynamicFilter)
{
assertInstanceOf(split.getConnectorSplit(), FunctionAssertions.TestSplit.class);
FunctionAssertions.TestSplit testSplit = (FunctionAssertions.TestSplit) split.getConnectorSplit();
if (testSplit.isRecordSet()) {
RecordSet records = InMemoryRecordSet.builder(ImmutableList.of(BIGINT, VARCHAR, DOUBLE, BOOLEAN, BIGINT, VARCHAR, VARCHAR, TIMESTAMP_WITH_TIME_ZONE, VARBINARY, INTEGER, TEST_ROW_TYPE, SHORT_DECIMAL_TYPE, LONG_DECIMAL_TYPE))
.addRow(
1234L,
"hello",
12.34,
true,
new DateTime(2001, 8, 22, 3, 4, 5, 321, DateTimeZone.UTC).getMillis(),
"%el%",
null,
packDateTimeWithZone(new DateTime(1970, 1, 1, 0, 1, 0, 999, DateTimeZone.UTC).getMillis(), TimeZoneKey.getTimeZoneKey("Z")),
Slices.wrappedBuffer((byte) 0xab),
1234,
TEST_ROW_DATA.getObject(0, Block.class),
new BigDecimal("1234").unscaledValue().longValue(),
encodeScaledValue(new BigDecimal("1234")))
.build();
return new RecordPageSource(records);
}
else {
return new FixedPageSource(ImmutableList.of(SOURCE_PAGE));
}
}
}
private static Split createRecordSetSplit()
{
return new Split(new CatalogName("test"), new TestSplit(true), Lifespan.taskWide());
}
private static Split createNormalSplit()
{
return new Split(new CatalogName("test"), new TestSplit(false), Lifespan.taskWide());
}
private static RowType createTestRowType(int numberOfFields)
{
Iterator<Type> types = Iterables.<Type>cycle(
BIGINT,
INTEGER,
VARCHAR,
DOUBLE,
BOOLEAN,
VARBINARY,
RowType.from(ImmutableList.of(RowType.field("nested_nested_column", VARCHAR)))).iterator();
List<RowType.Field> fields = new ArrayList<>();
for (int fieldIdx = 0; fieldIdx < numberOfFields; fieldIdx++) {
fields.add(new RowType.Field(Optional.of("nested_column_" + fieldIdx), types.next()));
}
return RowType.from(fields);
}
private static Block createTestRowData(RowType rowType)
{
Iterator<Object> values = Iterables.cycle(
1234L,
34,
"hello",
12.34d,
true,
Slices.wrappedBuffer((byte) 0xab),
createRowBlock(ImmutableList.of(VARCHAR), Collections.singleton("innerFieldValue").toArray()).getObject(0, Block.class)).iterator();
int numFields = rowType.getFields().size();
Object[] rowValues = new Object[numFields];
for (int fieldIdx = 0; fieldIdx < numFields; fieldIdx++) {
rowValues[fieldIdx] = values.next();
}
return createRowBlock(rowType.getTypeParameters(), rowValues);
}
private static class TestSplit
implements ConnectorSplit
{
private final boolean recordSet;
private TestSplit(boolean recordSet)
{
this.recordSet = recordSet;
}
private boolean isRecordSet()
{
return recordSet;
}
@Override
public boolean isRemotelyAccessible()
{
return false;
}
@Override
public List<HostAddress> getAddresses()
{
return ImmutableList.of();
}
@Override
public Object getInfo()
{
return this;
}
}
}
| |
/*
* Copyright (C) 2006 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package android.provider;
import android.content.ContentResolver;
import android.content.ContentUris;
import android.content.ContentValues;
import android.content.Context;
import android.content.Intent;
import android.database.Cursor;
import android.database.DatabaseUtils;
import android.graphics.BitmapFactory;
import android.net.Uri;
import android.provider.BrowserContract.Bookmarks;
import android.provider.BrowserContract.Combined;
import android.provider.BrowserContract.History;
import android.provider.BrowserContract.Searches;
import android.util.Log;
import android.webkit.WebIconDatabase;
public class Browser {
private static final String LOGTAG = "browser";
/**
* A table containing both bookmarks and history items. The columns of the table are defined in
* {@link BookmarkColumns}. Reading this table requires the
* {@link android.Manifest.permission#READ_HISTORY_BOOKMARKS} permission and writing to it
* requires the {@link android.Manifest.permission#WRITE_HISTORY_BOOKMARKS} permission.
*/
public static final Uri BOOKMARKS_URI = Uri.parse("content://browser/bookmarks");
/**
* The name of extra data when starting Browser with ACTION_VIEW or
* ACTION_SEARCH intent.
* <p>
* The value should be an integer between 0 and 1000. If not set or set to
* 0, the Browser will use default. If set to 100, the Browser will start
* with 100%.
*/
public static final String INITIAL_ZOOM_LEVEL = "browser.initialZoomLevel";
/**
* The name of the extra data when starting the Browser from another
* application.
* <p>
* The value is a unique identification string that will be used to
* identify the calling application. The Browser will attempt to reuse the
* same window each time the application launches the Browser with the same
* identifier.
*/
public static final String EXTRA_APPLICATION_ID = "com.android.browser.application_id";
/**
* The name of the extra data in the VIEW intent. The data are key/value
* pairs in the format of Bundle. They will be sent in the HTTP request
* headers for the provided url. The keys can't be the standard HTTP headers
* as they are set by the WebView. The url's schema must be http(s).
* <p>
*/
public static final String EXTRA_HEADERS = "com.android.browser.headers";
/* if you change column order you must also change indices
below */
public static final String[] HISTORY_PROJECTION = new String[] {
BookmarkColumns._ID, // 0
BookmarkColumns.URL, // 1
BookmarkColumns.VISITS, // 2
BookmarkColumns.DATE, // 3
BookmarkColumns.BOOKMARK, // 4
BookmarkColumns.TITLE, // 5
BookmarkColumns.FAVICON, // 6
BookmarkColumns.THUMBNAIL, // 7
BookmarkColumns.TOUCH_ICON, // 8
BookmarkColumns.USER_ENTERED, // 9
};
/* these indices dependent on HISTORY_PROJECTION */
public static final int HISTORY_PROJECTION_ID_INDEX = 0;
public static final int HISTORY_PROJECTION_URL_INDEX = 1;
public static final int HISTORY_PROJECTION_VISITS_INDEX = 2;
public static final int HISTORY_PROJECTION_DATE_INDEX = 3;
public static final int HISTORY_PROJECTION_BOOKMARK_INDEX = 4;
public static final int HISTORY_PROJECTION_TITLE_INDEX = 5;
public static final int HISTORY_PROJECTION_FAVICON_INDEX = 6;
/**
* @hide
*/
public static final int HISTORY_PROJECTION_THUMBNAIL_INDEX = 7;
/**
* @hide
*/
public static final int HISTORY_PROJECTION_TOUCH_ICON_INDEX = 8;
/* columns needed to determine whether to truncate history */
public static final String[] TRUNCATE_HISTORY_PROJECTION = new String[] {
BookmarkColumns._ID,
BookmarkColumns.DATE,
};
public static final int TRUNCATE_HISTORY_PROJECTION_ID_INDEX = 0;
/* truncate this many history items at a time */
public static final int TRUNCATE_N_OLDEST = 5;
/**
* A table containing a log of browser searches. The columns of the table are defined in
* {@link SearchColumns}. Reading this table requires the
* {@link android.Manifest.permission#READ_HISTORY_BOOKMARKS} permission and writing to it
* requires the {@link android.Manifest.permission#WRITE_HISTORY_BOOKMARKS} permission.
*/
public static final Uri SEARCHES_URI = Uri.parse("content://browser/searches");
/**
* A projection of {@link #SEARCHES_URI} that contains {@link SearchColumns#_ID},
* {@link SearchColumns#SEARCH}, and {@link SearchColumns#DATE}.
*/
public static final String[] SEARCHES_PROJECTION = new String[] {
// if you change column order you must also change indices below
SearchColumns._ID, // 0
SearchColumns.SEARCH, // 1
SearchColumns.DATE, // 2
};
/* these indices dependent on SEARCHES_PROJECTION */
public static final int SEARCHES_PROJECTION_SEARCH_INDEX = 1;
public static final int SEARCHES_PROJECTION_DATE_INDEX = 2;
/* Set a cap on the count of history items in the history/bookmark
table, to prevent db and layout operations from dragging to a
crawl. Revisit this cap when/if db/layout performance
improvements are made. Note: this does not affect bookmark
entries -- if the user wants more bookmarks than the cap, they
get them. */
private static final int MAX_HISTORY_COUNT = 250;
/**
* Open an activity to save a bookmark. Launch with a title
* and/or a url, both of which can be edited by the user before saving.
*
* @param c Context used to launch the activity to add a bookmark.
* @param title Title for the bookmark. Can be null or empty string.
* @param url Url for the bookmark. Can be null or empty string.
*/
public static final void saveBookmark(Context c,
String title,
String url) {
Intent i = new Intent(Intent.ACTION_INSERT, Browser.BOOKMARKS_URI);
i.putExtra("title", title);
i.putExtra("url", url);
c.startActivity(i);
}
/**
* Boolean extra passed along with an Intent to a browser, specifying that
* a new tab be created. Overrides EXTRA_APPLICATION_ID; if both are set,
* a new tab will be used, rather than using the same one.
*/
public static final String EXTRA_CREATE_NEW_TAB = "create_new_tab";
/**
* Stores a Bitmap extra in an {@link Intent} representing the screenshot of
* a page to share. When receiving an {@link Intent#ACTION_SEND} from the
* Browser, use this to access the screenshot.
* @hide
*/
public final static String EXTRA_SHARE_SCREENSHOT = "share_screenshot";
/**
* Stores a Bitmap extra in an {@link Intent} representing the favicon of a
* page to share. When receiving an {@link Intent#ACTION_SEND} from the
* Browser, use this to access the favicon.
* @hide
*/
public final static String EXTRA_SHARE_FAVICON = "share_favicon";
/**
* Sends the given string using an Intent with {@link Intent#ACTION_SEND} and a mime type
* of text/plain. The string is put into {@link Intent#EXTRA_TEXT}.
*
* @param context the context used to start the activity
* @param string the string to send
*/
public static final void sendString(Context context, String string) {
sendString(context, string, context.getString(com.android.internal.R.string.sendText));
}
/**
* Find an application to handle the given string and, if found, invoke
* it with the given string as a parameter.
* @param c Context used to launch the new activity.
* @param stringToSend The string to be handled.
* @param chooserDialogTitle The title of the dialog that allows the user
* to select between multiple applications that are all capable of handling
* the string.
* @hide pending API council approval
*/
public static final void sendString(Context c,
String stringToSend,
String chooserDialogTitle) {
Intent send = new Intent(Intent.ACTION_SEND);
send.setType("text/plain");
send.putExtra(Intent.EXTRA_TEXT, stringToSend);
try {
Intent i = Intent.createChooser(send, chooserDialogTitle);
// In case this is called from outside an Activity
i.setFlags(Intent.FLAG_ACTIVITY_NEW_TASK);
c.startActivity(i);
} catch(android.content.ActivityNotFoundException ex) {
// if no app handles it, do nothing
}
}
/**
* Return a cursor pointing to a list of all the bookmarks. The cursor will have a single
* column, {@link BookmarkColumns#URL}.
* <p>
* Requires {@link android.Manifest.permission#READ_HISTORY_BOOKMARKS}
*
* @param cr The ContentResolver used to access the database.
*/
public static final Cursor getAllBookmarks(ContentResolver cr) throws
IllegalStateException {
return cr.query(Bookmarks.CONTENT_URI,
new String[] { Bookmarks.URL },
Bookmarks.IS_FOLDER + " = 0", null, null);
}
/**
* Return a cursor pointing to a list of all visited site urls. The cursor will
* have a single column, {@link BookmarkColumns#URL}.
* <p>
* Requires {@link android.Manifest.permission#READ_HISTORY_BOOKMARKS}
*
* @param cr The ContentResolver used to access the database.
*/
public static final Cursor getAllVisitedUrls(ContentResolver cr) throws
IllegalStateException {
return cr.query(Combined.CONTENT_URI,
new String[] { Combined.URL }, null, null,
Combined.DATE_CREATED + " ASC");
}
private static final void addOrUrlEquals(StringBuilder sb) {
sb.append(" OR " + BookmarkColumns.URL + " = ");
}
private static final Cursor getVisitedLike(ContentResolver cr, String url) {
boolean secure = false;
String compareString = url;
if (compareString.startsWith("http://")) {
compareString = compareString.substring(7);
} else if (compareString.startsWith("https://")) {
compareString = compareString.substring(8);
secure = true;
}
if (compareString.startsWith("www.")) {
compareString = compareString.substring(4);
}
StringBuilder whereClause = null;
if (secure) {
whereClause = new StringBuilder(Bookmarks.URL + " = ");
DatabaseUtils.appendEscapedSQLString(whereClause,
"https://" + compareString);
addOrUrlEquals(whereClause);
DatabaseUtils.appendEscapedSQLString(whereClause,
"https://www." + compareString);
} else {
whereClause = new StringBuilder(Bookmarks.URL + " = ");
DatabaseUtils.appendEscapedSQLString(whereClause,
compareString);
addOrUrlEquals(whereClause);
String wwwString = "www." + compareString;
DatabaseUtils.appendEscapedSQLString(whereClause,
wwwString);
addOrUrlEquals(whereClause);
DatabaseUtils.appendEscapedSQLString(whereClause,
"http://" + compareString);
addOrUrlEquals(whereClause);
DatabaseUtils.appendEscapedSQLString(whereClause,
"http://" + wwwString);
}
return cr.query(History.CONTENT_URI, new String[] { History._ID, History.VISITS },
whereClause.toString(), null, null);
}
/**
* Update the visited history to acknowledge that a site has been
* visited.
* Requires {@link android.Manifest.permission#READ_HISTORY_BOOKMARKS}
* Requires {@link android.Manifest.permission#WRITE_HISTORY_BOOKMARKS}
* @param cr The ContentResolver used to access the database.
* @param url The site being visited.
* @param real If true, this is an actual visit, and should add to the
* number of visits. If false, the user entered it manually.
*/
public static final void updateVisitedHistory(ContentResolver cr,
String url, boolean real) {
long now = System.currentTimeMillis();
Cursor c = null;
try {
c = getVisitedLike(cr, url);
/* We should only get one answer that is exactly the same. */
if (c.moveToFirst()) {
ContentValues values = new ContentValues();
if (real) {
values.put(History.VISITS, c.getInt(1) + 1);
} else {
values.put(History.USER_ENTERED, 1);
}
values.put(History.DATE_LAST_VISITED, now);
cr.update(ContentUris.withAppendedId(History.CONTENT_URI, c.getLong(0)),
values, null, null);
} else {
truncateHistory(cr);
ContentValues values = new ContentValues();
int visits;
int user_entered;
if (real) {
visits = 1;
user_entered = 0;
} else {
visits = 0;
user_entered = 1;
}
values.put(History.URL, url);
values.put(History.VISITS, visits);
values.put(History.DATE_LAST_VISITED, now);
values.put(History.TITLE, url);
values.put(History.DATE_CREATED, 0);
values.put(History.USER_ENTERED, user_entered);
cr.insert(History.CONTENT_URI, values);
}
} catch (IllegalStateException e) {
Log.e(LOGTAG, "updateVisitedHistory", e);
} finally {
if (c != null) c.close();
}
}
/**
* Returns all the URLs in the history.
* Requires {@link android.Manifest.permission#READ_HISTORY_BOOKMARKS}
* @param cr The ContentResolver used to access the database.
* @hide pending API council approval
*/
public static final String[] getVisitedHistory(ContentResolver cr) {
Cursor c = null;
String[] str = null;
try {
String[] projection = new String[] {
History.URL,
};
c = cr.query(History.CONTENT_URI, projection, History.VISITS + " > 0", null, null);
if (c == null) return new String[0];
str = new String[c.getCount()];
int i = 0;
while (c.moveToNext()) {
str[i] = c.getString(0);
i++;
}
} catch (IllegalStateException e) {
Log.e(LOGTAG, "getVisitedHistory", e);
str = new String[0];
} finally {
if (c != null) c.close();
}
return str;
}
/**
* If there are more than MAX_HISTORY_COUNT non-bookmark history
* items in the bookmark/history table, delete TRUNCATE_N_OLDEST
* of them. This is used to keep our history table to a
* reasonable size. Note: it does not prune bookmarks. If the
* user wants 1000 bookmarks, the user gets 1000 bookmarks.
* Requires {@link android.Manifest.permission#READ_HISTORY_BOOKMARKS}
* Requires {@link android.Manifest.permission#WRITE_HISTORY_BOOKMARKS}
*
* @param cr The ContentResolver used to access the database.
*/
public static final void truncateHistory(ContentResolver cr) {
// TODO make a single request to the provider to do this in a single transaction
Cursor cursor = null;
try {
// Select non-bookmark history, ordered by date
cursor = cr.query(History.CONTENT_URI,
new String[] { History._ID, History.URL, History.DATE_LAST_VISITED },
null, null, History.DATE_LAST_VISITED + " ASC");
if (cursor.moveToFirst() && cursor.getCount() >= MAX_HISTORY_COUNT) {
final WebIconDatabase iconDb = WebIconDatabase.getInstance();
/* eliminate oldest history items */
for (int i = 0; i < TRUNCATE_N_OLDEST; i++) {
cr.delete(ContentUris.withAppendedId(History.CONTENT_URI, cursor.getLong(0)),
null, null);
iconDb.releaseIconForPageUrl(cursor.getString(1));
if (!cursor.moveToNext()) break;
}
}
} catch (IllegalStateException e) {
Log.e(LOGTAG, "truncateHistory", e);
} finally {
if (cursor != null) cursor.close();
}
}
/**
* Returns whether there is any history to clear.
* Requires {@link android.Manifest.permission#READ_HISTORY_BOOKMARKS}
* @param cr The ContentResolver used to access the database.
* @return boolean True if the history can be cleared.
*/
public static final boolean canClearHistory(ContentResolver cr) {
Cursor cursor = null;
boolean ret = false;
try {
cursor = cr.query(History.CONTENT_URI,
new String [] { History._ID, History.VISITS },
null, null, null);
ret = cursor.getCount() > 0;
} catch (IllegalStateException e) {
Log.e(LOGTAG, "canClearHistory", e);
} finally {
if (cursor != null) cursor.close();
}
return ret;
}
/**
* Delete all entries from the bookmarks/history table which are
* not bookmarks. Also set all visited bookmarks to unvisited.
* Requires {@link android.Manifest.permission#WRITE_HISTORY_BOOKMARKS}
* @param cr The ContentResolver used to access the database.
*/
public static final void clearHistory(ContentResolver cr) {
deleteHistoryWhere(cr, null);
}
/**
* Helper function to delete all history items and release the icons for them in the
* {@link WebIconDatabase}.
*
* Requires {@link android.Manifest.permission#READ_HISTORY_BOOKMARKS}
* Requires {@link android.Manifest.permission#WRITE_HISTORY_BOOKMARKS}
*
* @param cr The ContentResolver used to access the database.
* @param whereClause String to limit the items affected.
* null means all items.
*/
private static final void deleteHistoryWhere(ContentResolver cr, String whereClause) {
Cursor cursor = null;
try {
cursor = cr.query(History.CONTENT_URI, new String[] { History.URL }, whereClause,
null, null);
if (cursor.moveToFirst()) {
final WebIconDatabase iconDb = WebIconDatabase.getInstance();
do {
// Delete favicons
// TODO don't release if the URL is bookmarked
iconDb.releaseIconForPageUrl(cursor.getString(0));
} while (cursor.moveToNext());
cr.delete(History.CONTENT_URI, whereClause, null);
}
} catch (IllegalStateException e) {
Log.e(LOGTAG, "deleteHistoryWhere", e);
return;
} finally {
if (cursor != null) cursor.close();
}
}
/**
* Delete all history items from begin to end.
* Requires {@link android.Manifest.permission#WRITE_HISTORY_BOOKMARKS}
* @param cr The ContentResolver used to access the database.
* @param begin First date to remove. If -1, all dates before end.
* Inclusive.
* @param end Last date to remove. If -1, all dates after begin.
* Non-inclusive.
*/
public static final void deleteHistoryTimeFrame(ContentResolver cr,
long begin, long end) {
String whereClause;
String date = BookmarkColumns.DATE;
if (-1 == begin) {
if (-1 == end) {
clearHistory(cr);
return;
}
whereClause = date + " < " + Long.toString(end);
} else if (-1 == end) {
whereClause = date + " >= " + Long.toString(begin);
} else {
whereClause = date + " >= " + Long.toString(begin) + " AND " + date
+ " < " + Long.toString(end);
}
deleteHistoryWhere(cr, whereClause);
}
/**
* Remove a specific url from the history database.
* Requires {@link android.Manifest.permission#WRITE_HISTORY_BOOKMARKS}
* @param cr The ContentResolver used to access the database.
* @param url url to remove.
*/
public static final void deleteFromHistory(ContentResolver cr,
String url) {
cr.delete(History.CONTENT_URI, History.URL + "=?", new String[] { url });
}
/**
* Add a search string to the searches database.
* Requires {@link android.Manifest.permission#READ_HISTORY_BOOKMARKS}
* Requires {@link android.Manifest.permission#WRITE_HISTORY_BOOKMARKS}
* @param cr The ContentResolver used to access the database.
* @param search The string to add to the searches database.
*/
public static final void addSearchUrl(ContentResolver cr, String search) {
// The content provider will take care of updating existing searches instead of duplicating
ContentValues values = new ContentValues();
values.put(Searches.SEARCH, search);
values.put(Searches.DATE, System.currentTimeMillis());
cr.insert(Searches.CONTENT_URI, values);
}
/**
* Remove all searches from the search database.
* Requires {@link android.Manifest.permission#WRITE_HISTORY_BOOKMARKS}
* @param cr The ContentResolver used to access the database.
*/
public static final void clearSearches(ContentResolver cr) {
// FIXME: Should this clear the urls to which these searches lead?
// (i.e. remove google.com/query= blah blah blah)
try {
cr.delete(Searches.CONTENT_URI, null, null);
} catch (IllegalStateException e) {
Log.e(LOGTAG, "clearSearches", e);
}
}
/**
* Request all icons from the database. This call must either be called
* in the main thread or have had Looper.prepare() invoked in the calling
* thread.
* Requires {@link android.Manifest.permission#READ_HISTORY_BOOKMARKS}
* @param cr The ContentResolver used to access the database.
* @param where Clause to be used to limit the query from the database.
* Must be an allowable string to be passed into a database query.
* @param listener IconListener that gets the icons once they are
* retrieved.
*/
public static final void requestAllIcons(ContentResolver cr, String where,
WebIconDatabase.IconListener listener) {
WebIconDatabase.getInstance().bulkRequestIconForPageUrl(cr, where, listener);
}
/**
* Column definitions for the mixed bookmark and history items available
* at {@link #BOOKMARKS_URI}.
*/
public static class BookmarkColumns implements BaseColumns {
/**
* The URL of the bookmark or history item.
* <p>Type: TEXT (URL)</p>
*/
public static final String URL = "url";
/**
* The number of time the item has been visited.
* <p>Type: NUMBER</p>
*/
public static final String VISITS = "visits";
/**
* The date the item was last visited, in milliseconds since the epoch.
* <p>Type: NUMBER (date in milliseconds since January 1, 1970)</p>
*/
public static final String DATE = "date";
/**
* Flag indicating that an item is a bookmark. A value of 1 indicates a bookmark, a value
* of 0 indicates a history item.
* <p>Type: INTEGER (boolean)</p>
*/
public static final String BOOKMARK = "bookmark";
/**
* The user visible title of the bookmark or history item.
* <p>Type: TEXT</p>
*/
public static final String TITLE = "title";
/**
* The date the item created, in milliseconds since the epoch.
* <p>Type: NUMBER (date in milliseconds since January 1, 1970)</p>
*/
public static final String CREATED = "created";
/**
* The favicon of the bookmark. Must decode via {@link BitmapFactory#decodeByteArray}.
* <p>Type: BLOB (image)</p>
*/
public static final String FAVICON = "favicon";
/**
* @hide
*/
public static final String THUMBNAIL = "thumbnail";
/**
* @hide
*/
public static final String TOUCH_ICON = "touch_icon";
/**
* @hide
*/
public static final String USER_ENTERED = "user_entered";
}
/**
* Column definitions for the search history table, available at {@link #SEARCHES_URI}.
*/
public static class SearchColumns implements BaseColumns {
/**
* @deprecated Not used.
*/
@Deprecated
public static final String URL = "url";
/**
* The user entered search term.
*/
public static final String SEARCH = "search";
/**
* The date the search was performed, in milliseconds since the epoch.
* <p>Type: NUMBER (date in milliseconds since January 1, 1970)</p>
*/
public static final String DATE = "date";
}
}
| |
/*
* Copyright 2010 Red Hat, Inc. and/or its affiliates.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.drools.verifier.incoherence;
import org.drools.core.base.RuleNameMatchesAgendaFilter;
import org.drools.verifier.DefaultVerifierConfiguration;
import org.drools.verifier.TestBaseOld;
import org.drools.verifier.Verifier;
import org.drools.verifier.builder.ScopesAgendaFilter;
import org.drools.verifier.builder.VerifierBuilder;
import org.drools.verifier.builder.VerifierBuilderFactory;
import org.drools.verifier.components.Pattern;
import org.drools.verifier.data.VerifierReport;
import org.drools.verifier.data.VerifierReportFactory;
import org.drools.verifier.report.components.Severity;
import org.drools.verifier.report.components.VerifierMessage;
import org.drools.verifier.report.components.VerifierMessageBase;
import org.junit.Test;
import org.kie.api.runtime.KieSession;
import org.kie.internal.io.ResourceFactory;
import org.kie.api.io.ResourceType;
import java.io.StringReader;
import java.util.Collection;
import java.util.HashSet;
import java.util.Iterator;
import java.util.Set;
import static org.junit.Assert.*;
public class IncoherentRestrictionsTest extends TestBaseOld {
@Test
public void testApprovedTrueAndNotTrue() {
VerifierBuilder vBuilder = VerifierBuilderFactory.newVerifierBuilder();
// Check that the builder works.
assertFalse(vBuilder.hasErrors());
assertEquals(0,
vBuilder.getErrors().size());
String str = "";
str += "package mortgages\n";
str += "rule \"Bankruptcy history\"\n";
str += "salience 10\n";
str += "dialect \"mvel\"\n";
str += "when\n";
str += "Applicant( approved == \"true\" , approved != \"true\" )\n";
str += "then\n";
str += "end";
DefaultVerifierConfiguration conf = new DefaultVerifierConfiguration();
Verifier verifier = VerifierBuilderFactory.newVerifierBuilder().newVerifier(conf);
verifier.addResourcesToVerify(ResourceFactory.newReaderResource(new StringReader(str)),
ResourceType.DRL);
assertFalse(verifier.hasErrors());
assertEquals(0,
verifier.getErrors().size());
boolean works = verifier.fireAnalysis(new ScopesAgendaFilter(true,
ScopesAgendaFilter.VERIFYING_SCOPE_KNOWLEDGE_PACKAGE));
assertTrue(works);
VerifierReport result = verifier.getResult();
assertNotNull(result);
assertEquals(3,
result.getBySeverity(Severity.ERROR).size());
assertEquals(1,
result.getBySeverity(Severity.WARNING).size());
assertEquals(0,
result.getBySeverity(Severity.NOTE).size());
}
@Test
public void testIncoherentLiteralRestrictionsInSubPattern() throws Exception {
KieSession session = getStatelessKieSession(this.getClass().getResourceAsStream("Restrictions.drl"));
VerifierReport result = VerifierReportFactory.newVerifierReport();
Collection<? extends Object> testData = getTestData(getClass().getResourceAsStream("RestrictionsTest.drl"),
result.getVerifierData());
session.setGlobal("result",
result);
for (Object o : testData) {
session.insert(o);
}
session.fireAllRules(new RuleNameMatchesAgendaFilter("Incoherent LiteralRestrictions in pattern possibility"));
Iterator<VerifierMessageBase> iter = result.getBySeverity(Severity.ERROR).iterator();
Set<String> rulesThatHadErrors = new HashSet<String>();
while (iter.hasNext()) {
Object o = (Object) iter.next();
if (o instanceof VerifierMessage) {
Pattern pattern = (Pattern) ((VerifierMessage) o).getFaulty();
rulesThatHadErrors.add(pattern.getRuleName());
}
}
assertTrue(rulesThatHadErrors.remove("Incoherent restrictions 1"));
assertTrue(rulesThatHadErrors.remove("Incoherent restrictions 2"));
if (!rulesThatHadErrors.isEmpty()) {
for (String string : rulesThatHadErrors) {
fail("Rule " + string + " caused an error.");
}
}
}
@Test
public void testIncoherentLiteralRestrictionsInSubPatternImpossibleRanges() throws Exception {
KieSession session = getStatelessKieSession(this.getClass().getResourceAsStream("Restrictions.drl"));
VerifierReport result = VerifierReportFactory.newVerifierReport();
Collection<? extends Object> testData = getTestData(this.getClass().getResourceAsStream("RestrictionsTest.drl"),
result.getVerifierData());
session.setGlobal("result",
result);
for (Object o : testData) {
session.insert(o);
}
session.fireAllRules(new RuleNameMatchesAgendaFilter("Incoherent LiteralRestrictions with ranges in pattern possibility, impossible ranges"));
Iterator<VerifierMessageBase> iter = result.getBySeverity(Severity.ERROR).iterator();
Set<String> rulesThatHadErrors = new HashSet<String>();
while (iter.hasNext()) {
Object o = (Object) iter.next();
if (o instanceof VerifierMessage) {
Pattern pattern = (Pattern) ((VerifierMessage) o).getFaulty();
rulesThatHadErrors.add(pattern.getRuleName());
}
}
assertTrue(rulesThatHadErrors.remove("Incoherent restrictions 8"));
if (!rulesThatHadErrors.isEmpty()) {
for (String string : rulesThatHadErrors) {
fail("Rule " + string + " caused an error.");
}
}
}
@Test
public void testIncoherentLiteralRestrictionsInSubPatternImpossibleEqualityLess() throws Exception {
KieSession session = getStatelessKieSession(this.getClass().getResourceAsStream("Restrictions.drl"));
VerifierReport result = VerifierReportFactory.newVerifierReport();
Collection<? extends Object> testData = getTestData(this.getClass().getResourceAsStream("RestrictionsTest.drl"),
result.getVerifierData());
session.setGlobal("result",
result);
for (Object o : testData) {
session.insert(o);
}
session.fireAllRules(new RuleNameMatchesAgendaFilter("Incoherent LiteralRestrictions with ranges in pattern possibility, impossible equality less or equal"));
Iterator<VerifierMessageBase> iter = result.getBySeverity(Severity.ERROR).iterator();
Set<String> rulesThatHadErrors = new HashSet<String>();
while (iter.hasNext()) {
Object o = (Object) iter.next();
if (o instanceof VerifierMessage) {
Pattern pattern = (Pattern) ((VerifierMessage) o).getFaulty();
rulesThatHadErrors.add(pattern.getRuleName());
}
}
assertTrue(rulesThatHadErrors.remove("Incoherent restrictions 9"));
assertTrue(rulesThatHadErrors.remove("Incoherent restrictions 11"));
if (!rulesThatHadErrors.isEmpty()) {
for (String string : rulesThatHadErrors) {
fail("Rule " + string + " caused an error.");
}
}
}
@Test
public void testIncoherentLiteralRestrictionsInSubPatternImpossibleEqualityGreater() throws Exception {
KieSession session = getStatelessKieSession(this.getClass().getResourceAsStream("Restrictions.drl"));
VerifierReport result = VerifierReportFactory.newVerifierReport();
Collection<? extends Object> testData = getTestData(this.getClass().getResourceAsStream("RestrictionsTest.drl"),
result.getVerifierData());
session.setGlobal("result",
result);
for (Object o : testData) {
session.insert(o);
}
session.fireAllRules(new RuleNameMatchesAgendaFilter("Incoherent LiteralRestrictions with ranges in pattern possibility, impossible equality greater"));
Iterator<VerifierMessageBase> iter = result.getBySeverity(Severity.ERROR).iterator();
Set<String> rulesThatHadErrors = new HashSet<String>();
while (iter.hasNext()) {
Object o = (Object) iter.next();
if (o instanceof VerifierMessage) {
Pattern pattern = (Pattern) ((VerifierMessage) o).getFaulty();
rulesThatHadErrors.add(pattern.getRuleName());
}
}
assertTrue(rulesThatHadErrors.remove("Incoherent restrictions 10"));
if (!rulesThatHadErrors.isEmpty()) {
for (String string : rulesThatHadErrors) {
fail("Rule " + string + " caused an error.");
}
}
}
@Test
public void testIncoherentLiteralRestrictionsInSubPatternImpossibleRange() throws Exception {
KieSession session = getStatelessKieSession(this.getClass().getResourceAsStream("Restrictions.drl"));
VerifierReport result = VerifierReportFactory.newVerifierReport();
Collection<? extends Object> testData = getTestData(this.getClass().getResourceAsStream("RestrictionsTest.drl"),
result.getVerifierData());
session.setGlobal("result",
result);
for (Object o : testData) {
session.insert(o);
}
session.fireAllRules(new RuleNameMatchesAgendaFilter("Incoherent LiteralRestrictions with ranges in pattern possibility, impossible range"));
Iterator<VerifierMessageBase> iter = result.getBySeverity(Severity.ERROR).iterator();
Set<String> rulesThatHadErrors = new HashSet<String>();
while (iter.hasNext()) {
Object o = (Object) iter.next();
if (o instanceof VerifierMessage) {
Pattern pattern = (Pattern) ((VerifierMessage) o).getFaulty();
rulesThatHadErrors.add(pattern.getRuleName());
}
}
assertTrue(rulesThatHadErrors.remove("Incoherent restrictions 7"));
if (!rulesThatHadErrors.isEmpty()) {
for (String string : rulesThatHadErrors) {
fail("Rule " + string + " caused an error.");
}
}
}
@Test
public void testIncoherentVariableRestrictionsInSubPattern() throws Exception {
KieSession session = getStatelessKieSession(this.getClass().getResourceAsStream("Restrictions.drl"));
VerifierReport result = VerifierReportFactory.newVerifierReport();
Collection<? extends Object> testData = getTestData(this.getClass().getResourceAsStream("RestrictionsTest.drl"),
result.getVerifierData());
session.setGlobal("result",
result);
for (Object o : testData) {
session.insert(o);
}
session.fireAllRules(new RuleNameMatchesAgendaFilter("Incoherent VariableRestrictions in pattern possibility"));
Iterator<VerifierMessageBase> iter = result.getBySeverity(Severity.ERROR).iterator();
Set<String> rulesThatHadErrors = new HashSet<String>();
while (iter.hasNext()) {
Object o = (Object) iter.next();
if (o instanceof VerifierMessage) {
Pattern pattern = (Pattern) ((VerifierMessage) o).getFaulty();
rulesThatHadErrors.add(pattern.getRuleName());
}
}
assertTrue(rulesThatHadErrors.remove("Incoherent restrictions 3"));
assertTrue(rulesThatHadErrors.remove("Incoherent restrictions 4"));
assertTrue(rulesThatHadErrors.remove("Incoherent restrictions 5"));
if (!rulesThatHadErrors.isEmpty()) {
for (String string : rulesThatHadErrors) {
fail("Rule " + string + " caused an error.");
}
}
}
@Test
public void testIncoherentVariableRestrictionsInSubPatternImpossibleRange() throws Exception {
KieSession session = getStatelessKieSession(this.getClass().getResourceAsStream("Restrictions.drl"));
VerifierReport result = VerifierReportFactory.newVerifierReport();
Collection<? extends Object> testData = getTestData(this.getClass().getResourceAsStream("RestrictionsTest.drl"),
result.getVerifierData());
session.setGlobal("result",
result);
for (Object o : testData) {
session.insert(o);
}
session.fireAllRules(new RuleNameMatchesAgendaFilter("Incoherent VariableRestrictions in pattern possibility, impossible range"));
Iterator<VerifierMessageBase> iter = result.getBySeverity(Severity.ERROR).iterator();
Set<String> rulesThatHadErrors = new HashSet<String>();
while (iter.hasNext()) {
Object o = (Object) iter.next();
if (o instanceof VerifierMessage) {
Pattern pattern = (Pattern) ((VerifierMessage) o).getFaulty();
rulesThatHadErrors.add(pattern.getRuleName());
}
}
assertTrue(rulesThatHadErrors.remove("Incoherent restrictions 6"));
if (!rulesThatHadErrors.isEmpty()) {
for (String string : rulesThatHadErrors) {
fail("Rule " + string + " caused an error.");
}
}
}
}
| |
package br.unicamp.ic.sed.mobilemedia.filesystemmgr.impl;
import java.io.InputStream;
import br.unicamp.ic.sed.mobilemedia.filesystemmgr.spec.dt.ImageData;
import br.unicamp.ic.sed.mobilemedia.filesystemmgr.spec.excep.ImagePathNotValidException;
import br.unicamp.ic.sed.mobilemedia.filesystemmgr.spec.excep.InvalidArrayFormatException;
import br.unicamp.ic.sed.mobilemedia.filesystemmgr.spec.excep.InvalidImageDataException;
import br.unicamp.ic.sed.mobilemedia.filesystemmgr.spec.excep.InvalidImageFormatException;
import br.unicamp.ic.sed.mobilemedia.main.spec.dt.IImageData;
/**
* @author trevor This is a utility class. It performs conversions between Image
* objects and byte arrays, and Image metadata objects and byte arrays.
* Byte arrays are the main format for storing data in RMS, and for
* sending data over the wire.
*/
class ImageUtil {
// Delimiter used in record store data to separate fields in a string.
protected static final String DELIMITER = "*";
/**
* Constructor
*/
public ImageUtil() {
super();
}
/**
* This method reads an Image from an Input Stream and converts it from a
* standard image file format into a byte array, so that it can be
* transported over wireless protocols such as SMS
*
* @throws ImagePathNotValidException
* @throws InvalidImageFormatException
*/
public byte[] readImageAsByteArray(String imageFile)
throws ImagePathNotValidException, InvalidImageFormatException {
byte bArray[] = new byte[1000];
// Read an Image into a byte array
// Required to transfer images over SMS
InputStream is = null;
is = (InputStream) this.getClass().getResourceAsStream(imageFile);
int i, len = 0;
byte bArray2[];
byte b[] = new byte[1];
while (is.read(b) != -1) {
if (len + 1 >= bArray.length) {
bArray2 = new byte[bArray.length];
// Transfer all data from old array to temp array
for (i = 0; i < len; i++)
bArray2[i] = bArray[i];
bArray = new byte[bArray2.length + 500];
// Re-Copy contents back into new bigger array
for (i = 0; i < len; i++)
bArray[i] = bArray2[i];
}
// Set the size to be exact
bArray[len] = b[0];
len++;
}
is.close();
return bArray;
}
/**
*
* Convert the byte array from a retrieved RecordStore record into the
* ImageInfo ((renamed ImageData) object Order of the string will look like
* this: <recordId>*<foreignRecordId>*<albumName>*<imageLabel> Depending
* on the optional features, additional fields may be: <phoneNum>
*
* @throws InvalidArrayFormatException
*/
//[cosmos][MD Sce. 2]add a new parameter to method to use in after aspect.
public IImageData getImageInfoFromBytes(byte[] bytes )
throws InvalidArrayFormatException {
String iiString = new String(bytes);
// Track our position in the String using delimiters
// Ie. Get chars from beginning of String to first Delim
int startIndex = 0;
endIndex = iiString.indexOf(DELIMITER);
// Get recordID int value as String - everything before first
// delimeter
String intString = iiString.substring(startIndex, endIndex);
// Get 'foreign' record ID corresponding to the image table
startIndex = endIndex + 1;
endIndex = iiString.indexOf(DELIMITER, startIndex);
String fidString = iiString.substring(startIndex, endIndex);
// Get Album name (recordstore) - next delimeter
startIndex = endIndex + 1;
endIndex = iiString.indexOf(DELIMITER, startIndex);
String albumLabel = iiString.substring(startIndex, endIndex);
startIndex = endIndex + 1;
endIndex = iiString.indexOf(DELIMITER, startIndex);
String type = iiString.substring(startIndex, endIndex);
startIndex = endIndex + 1;
endIndex = iiString.indexOf(DELIMITER, startIndex);
if (endIndex == -1)
endIndex = iiString.length();
String imageLabel = "";
imageLabel = iiString.substring(startIndex, endIndex);
//System.out.println("[rid]="+intString+"[fid]="+fidString+"[album]="+albumLabel+"[imageLabel]="+imageLabel);
Integer x = Integer.valueOf(fidString);
//ImageData ii = new ImageData(x.intValue(), albumLabel, imageLabel);
IImageData ii = createImageData(x.intValue(), albumLabel, imageLabel,bytes , endIndex);
System.out.println("[ImageUtil.getImageInfoFromBytes(..)] intString="+intString);
x = Integer.valueOf(intString);
ii.setRecordId(x.intValue());
ii.setType(type);
System.out.println("[ImageUtil.getImageInfoFromBytes(..)] before return");
return ii;
}
/*****
* Method add just to expose some informations to aspects.
* @author Marcelo
* Scenario 2 - Sorting by View
*
* Tags:[cosmos][add]
* */
public IImageData createImageData( int foreignRecordId, String parentAlbumName,String imageLabel , byte[] bytes , int endIndex ){
return new ImageData( foreignRecordId , parentAlbumName , imageLabel );
}
/**
*
* Convert the ImageInfo (renamed ImageData) object into bytes so we can
* store it in RMS Order of the string will look like this: <recordId>*<foreignRecordId>*<albumName>*<imageLabel>
* Depending on the optional features, additional fields may be: <phoneNum>
* @throws InvalidImageDataException
*/
public byte[] getBytesFromImageInfo(IImageData ii) throws InvalidImageDataException {
// Take each String and get the bytes from it, separating fields with a
// delimiter
String byteString = new String();
// Convert the record ID for this record
int i = ii.getRecordId();
Integer j = new Integer(i);
byteString = byteString.concat(j.toString());
byteString = byteString.concat(DELIMITER);
// Convert the 'Foreign' Record ID field for the corresponding Image
// record store
int i2 = ii.getForeignRecordId();
Integer j2 = new Integer(i2);
byteString = byteString.concat(j2.toString());
byteString = byteString.concat(DELIMITER);
// Convert the album name field
byteString = byteString.concat(ii.getParentAlbumName());
byteString = byteString.concat(DELIMITER);
byteString = byteString.concat(ii.getType());
byteString = byteString.concat(DELIMITER);
// Convert the label (name) field
byteString = byteString.concat(ii.getImageLabel());
// Convert the phone number field
return byteString.getBytes();
}
private int endIndex = 0;
protected void setEndIndex(int endIndex) {
this.endIndex = endIndex;
}
protected int getEndIndex() {
return endIndex;
}
}
| |
/*
* Copyright (c) 2003, PostgreSQL Global Development Group
* See the LICENSE file in the project root for more information.
*/
package org.postgresql.core;
import org.postgresql.PGNotification;
import org.postgresql.PGProperty;
import org.postgresql.jdbc.AutoSave;
import org.postgresql.jdbc.EscapeSyntaxCallMode;
import org.postgresql.jdbc.PreferQueryMode;
import org.postgresql.util.HostSpec;
import org.postgresql.util.LruCache;
import org.postgresql.util.PSQLException;
import org.postgresql.util.PSQLState;
import org.postgresql.util.ServerErrorMessage;
import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
import org.checkerframework.checker.nullness.qual.Nullable;
import java.io.IOException;
import java.sql.SQLException;
import java.sql.SQLWarning;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Map;
import java.util.Properties;
import java.util.TreeMap;
import java.util.logging.Level;
import java.util.logging.Logger;
public abstract class QueryExecutorBase implements QueryExecutor {
private static final Logger LOGGER = Logger.getLogger(QueryExecutorBase.class.getName());
protected final PGStream pgStream;
private final String user;
private final String database;
private final int cancelSignalTimeout;
private int cancelPid;
private int cancelKey;
private boolean closed = false;
private @MonotonicNonNull String serverVersion;
private int serverVersionNum = 0;
private TransactionState transactionState = TransactionState.IDLE;
private final boolean reWriteBatchedInserts;
private final boolean columnSanitiserDisabled;
private final EscapeSyntaxCallMode escapeSyntaxCallMode;
private final boolean quoteReturningIdentifiers;
private final PreferQueryMode preferQueryMode;
private AutoSave autoSave;
private boolean flushCacheOnDeallocate = true;
protected final boolean logServerErrorDetail;
// default value for server versions that don't report standard_conforming_strings
private boolean standardConformingStrings = false;
private @Nullable SQLWarning warnings;
private final ArrayList<PGNotification> notifications = new ArrayList<PGNotification>();
private final LruCache<Object, CachedQuery> statementCache;
private final CachedQueryCreateAction cachedQueryCreateAction;
// For getParameterStatuses(), GUC_REPORT tracking
private final TreeMap<String,String> parameterStatuses
= new TreeMap<String,String>(String.CASE_INSENSITIVE_ORDER);
@SuppressWarnings({"assignment.type.incompatible", "argument.type.incompatible"})
protected QueryExecutorBase(PGStream pgStream, String user,
String database, int cancelSignalTimeout, Properties info) throws SQLException {
this.pgStream = pgStream;
this.user = user;
this.database = database;
this.cancelSignalTimeout = cancelSignalTimeout;
this.reWriteBatchedInserts = PGProperty.REWRITE_BATCHED_INSERTS.getBoolean(info);
this.columnSanitiserDisabled = PGProperty.DISABLE_COLUMN_SANITISER.getBoolean(info);
String callMode = PGProperty.ESCAPE_SYNTAX_CALL_MODE.get(info);
this.escapeSyntaxCallMode = EscapeSyntaxCallMode.of(callMode);
this.quoteReturningIdentifiers = PGProperty.QUOTE_RETURNING_IDENTIFIERS.getBoolean(info);
String preferMode = PGProperty.PREFER_QUERY_MODE.get(info);
this.preferQueryMode = PreferQueryMode.of(preferMode);
this.autoSave = AutoSave.of(PGProperty.AUTOSAVE.get(info));
this.logServerErrorDetail = PGProperty.LOG_SERVER_ERROR_DETAIL.getBoolean(info);
// assignment.type.incompatible, argument.type.incompatible
this.cachedQueryCreateAction = new CachedQueryCreateAction(this);
statementCache = new LruCache<Object, CachedQuery>(
Math.max(0, PGProperty.PREPARED_STATEMENT_CACHE_QUERIES.getInt(info)),
Math.max(0, PGProperty.PREPARED_STATEMENT_CACHE_SIZE_MIB.getInt(info) * 1024L * 1024L),
false,
cachedQueryCreateAction,
new LruCache.EvictAction<CachedQuery>() {
@Override
public void evict(CachedQuery cachedQuery) throws SQLException {
cachedQuery.query.close();
}
});
}
protected abstract void sendCloseMessage() throws IOException;
@Override
public void setNetworkTimeout(int milliseconds) throws IOException {
pgStream.setNetworkTimeout(milliseconds);
}
@Override
public int getNetworkTimeout() throws IOException {
return pgStream.getNetworkTimeout();
}
@Override
public HostSpec getHostSpec() {
return pgStream.getHostSpec();
}
@Override
public String getUser() {
return user;
}
@Override
public String getDatabase() {
return database;
}
public void setBackendKeyData(int cancelPid, int cancelKey) {
this.cancelPid = cancelPid;
this.cancelKey = cancelKey;
}
@Override
public int getBackendPID() {
return cancelPid;
}
@Override
public void abort() {
try {
pgStream.getSocket().close();
} catch (IOException e) {
// ignore
}
closed = true;
}
@Override
public void close() {
if (closed) {
return;
}
try {
LOGGER.log(Level.FINEST, " FE=> Terminate");
sendCloseMessage();
pgStream.flush();
pgStream.close();
} catch (IOException ioe) {
LOGGER.log(Level.FINEST, "Discarding IOException on close:", ioe);
}
closed = true;
}
@Override
public boolean isClosed() {
return closed;
}
@Override
public void sendQueryCancel() throws SQLException {
PGStream cancelStream = null;
// Now we need to construct and send a cancel packet
try {
if (LOGGER.isLoggable(Level.FINEST)) {
LOGGER.log(Level.FINEST, " FE=> CancelRequest(pid={0},ckey={1})", new Object[]{cancelPid, cancelKey});
}
cancelStream =
new PGStream(pgStream.getSocketFactory(), pgStream.getHostSpec(), cancelSignalTimeout);
if (cancelSignalTimeout > 0) {
cancelStream.setNetworkTimeout(cancelSignalTimeout);
}
cancelStream.sendInteger4(16);
cancelStream.sendInteger2(1234);
cancelStream.sendInteger2(5678);
cancelStream.sendInteger4(cancelPid);
cancelStream.sendInteger4(cancelKey);
cancelStream.flush();
cancelStream.receiveEOF();
} catch (IOException e) {
// Safe to ignore.
LOGGER.log(Level.FINEST, "Ignoring exception on cancel request:", e);
} finally {
if (cancelStream != null) {
try {
cancelStream.close();
} catch (IOException e) {
// Ignored.
}
}
}
}
public synchronized void addWarning(SQLWarning newWarning) {
if (warnings == null) {
warnings = newWarning;
} else {
warnings.setNextWarning(newWarning);
}
}
public synchronized void addNotification(PGNotification notification) {
notifications.add(notification);
}
@Override
public synchronized PGNotification[] getNotifications() throws SQLException {
PGNotification[] array = notifications.toArray(new PGNotification[0]);
notifications.clear();
return array;
}
@Override
public synchronized @Nullable SQLWarning getWarnings() {
SQLWarning chain = warnings;
warnings = null;
return chain;
}
@Override
public String getServerVersion() {
String serverVersion = this.serverVersion;
if (serverVersion == null) {
throw new IllegalStateException("serverVersion must not be null");
}
return serverVersion;
}
@Override
public int getServerVersionNum() {
if (serverVersionNum != 0) {
return serverVersionNum;
}
return serverVersionNum = Utils.parseServerVersionStr(getServerVersion());
}
public void setServerVersion(String serverVersion) {
this.serverVersion = serverVersion;
}
public void setServerVersionNum(int serverVersionNum) {
this.serverVersionNum = serverVersionNum;
}
public synchronized void setTransactionState(TransactionState state) {
transactionState = state;
}
public synchronized void setStandardConformingStrings(boolean value) {
standardConformingStrings = value;
}
@Override
public synchronized boolean getStandardConformingStrings() {
return standardConformingStrings;
}
@Override
public boolean getQuoteReturningIdentifiers() {
return quoteReturningIdentifiers;
}
@Override
public synchronized TransactionState getTransactionState() {
return transactionState;
}
public void setEncoding(Encoding encoding) throws IOException {
pgStream.setEncoding(encoding);
}
@Override
public Encoding getEncoding() {
return pgStream.getEncoding();
}
@Override
public boolean isReWriteBatchedInsertsEnabled() {
return this.reWriteBatchedInserts;
}
@Override
public final CachedQuery borrowQuery(String sql) throws SQLException {
return statementCache.borrow(sql);
}
@Override
public final CachedQuery borrowCallableQuery(String sql) throws SQLException {
return statementCache.borrow(new CallableQueryKey(sql));
}
@Override
public final CachedQuery borrowReturningQuery(String sql, String @Nullable [] columnNames)
throws SQLException {
return statementCache.borrow(new QueryWithReturningColumnsKey(sql, true, true,
columnNames
));
}
@Override
public CachedQuery borrowQueryByKey(Object key) throws SQLException {
return statementCache.borrow(key);
}
@Override
public void releaseQuery(CachedQuery cachedQuery) {
statementCache.put(cachedQuery.key, cachedQuery);
}
@Override
public final Object createQueryKey(String sql, boolean escapeProcessing,
boolean isParameterized, String @Nullable ... columnNames) {
Object key;
if (columnNames == null || columnNames.length != 0) {
// Null means "return whatever sensible columns are" (e.g. primary key, or serial, or something like that)
key = new QueryWithReturningColumnsKey(sql, isParameterized, escapeProcessing, columnNames);
} else if (isParameterized) {
// If no generated columns requested, just use the SQL as a cache key
key = sql;
} else {
key = new BaseQueryKey(sql, false, escapeProcessing);
}
return key;
}
@Override
public CachedQuery createQueryByKey(Object key) throws SQLException {
return cachedQueryCreateAction.create(key);
}
@Override
public final CachedQuery createQuery(String sql, boolean escapeProcessing,
boolean isParameterized, String @Nullable ... columnNames)
throws SQLException {
Object key = createQueryKey(sql, escapeProcessing, isParameterized, columnNames);
// Note: cache is not reused here for two reasons:
// 1) Simplify initial implementation for simple statements
// 2) Non-prepared statements are likely to have literals, thus query reuse would not be often
return createQueryByKey(key);
}
@Override
public boolean isColumnSanitiserDisabled() {
return columnSanitiserDisabled;
}
@Override
public EscapeSyntaxCallMode getEscapeSyntaxCallMode() {
return escapeSyntaxCallMode;
}
@Override
public PreferQueryMode getPreferQueryMode() {
return preferQueryMode;
}
public AutoSave getAutoSave() {
return autoSave;
}
public void setAutoSave(AutoSave autoSave) {
this.autoSave = autoSave;
}
protected boolean willHealViaReparse(SQLException e) {
if (e == null || e.getSQLState() == null) {
return false;
}
// "prepared statement \"S_2\" does not exist"
if (PSQLState.INVALID_SQL_STATEMENT_NAME.getState().equals(e.getSQLState())) {
return true;
}
if (!PSQLState.NOT_IMPLEMENTED.getState().equals(e.getSQLState())) {
return false;
}
if (!(e instanceof PSQLException)) {
return false;
}
PSQLException pe = (PSQLException) e;
ServerErrorMessage serverErrorMessage = pe.getServerErrorMessage();
if (serverErrorMessage == null) {
return false;
}
// "cached plan must not change result type"
String routine = serverErrorMessage.getRoutine();
return "RevalidateCachedQuery".equals(routine) // 9.2+
|| "RevalidateCachedPlan".equals(routine); // <= 9.1
}
@Override
public boolean willHealOnRetry(SQLException e) {
if (autoSave == AutoSave.NEVER && getTransactionState() == TransactionState.FAILED) {
// If autorollback is not activated, then every statement will fail with
// 'transaction is aborted', etc, etc
return false;
}
return willHealViaReparse(e);
}
public boolean isFlushCacheOnDeallocate() {
return flushCacheOnDeallocate;
}
public void setFlushCacheOnDeallocate(boolean flushCacheOnDeallocate) {
this.flushCacheOnDeallocate = flushCacheOnDeallocate;
}
protected boolean hasNotifications() {
return notifications.size() > 0;
}
@Override
public final Map<String,String> getParameterStatuses() {
return Collections.unmodifiableMap(parameterStatuses);
}
@Override
public final @Nullable String getParameterStatus(String parameterName) {
return parameterStatuses.get(parameterName);
}
/**
* Update the parameter status map in response to a new ParameterStatus
* wire protocol message.
*
* <p>The server sends ParameterStatus messages when GUC_REPORT settings are
* initially assigned and whenever they change.</p>
*
* <p>A future version may invoke a client-defined listener class at this point,
* so this should be the only access path.</p>
*
* <p>Keys are case-insensitive and case-preserving.</p>
*
* <p>The server doesn't provide a way to report deletion of a reportable
* parameter so we don't expose one here.</p>
*
* @param parameterName case-insensitive case-preserving name of parameter to create or update
* @param parameterStatus new value of parameter
* @see org.postgresql.PGConnection#getParameterStatuses
* @see org.postgresql.PGConnection#getParameterStatus
*/
protected void onParameterStatus(String parameterName, String parameterStatus) {
if (parameterName == null || parameterName.equals("")) {
throw new IllegalStateException("attempt to set GUC_REPORT parameter with null or empty-string name");
}
parameterStatuses.put(parameterName, parameterStatus);
}
}
| |
/*
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to You under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package org.apache.flex.forks.batik.bridge.svg12;
import org.apache.flex.forks.batik.bridge.BridgeContext;
import org.apache.flex.forks.batik.bridge.DocumentLoader;
import org.apache.flex.forks.batik.bridge.Messages;
import org.apache.flex.forks.batik.bridge.ScriptingEnvironment;
import org.apache.flex.forks.batik.bridge.SVGUtilities;
import org.apache.flex.forks.batik.dom.AbstractDocument;
import org.apache.flex.forks.batik.dom.AbstractElement;
import org.apache.flex.forks.batik.dom.events.EventSupport;
import org.apache.flex.forks.batik.dom.svg12.SVGGlobal;
import org.apache.flex.forks.batik.dom.svg12.XBLEventSupport;
import org.apache.flex.forks.batik.dom.util.DOMUtilities;
import org.apache.flex.forks.batik.dom.util.TriplyIndexedTable;
import org.apache.flex.forks.batik.script.Interpreter;
import org.apache.flex.forks.batik.util.SVGConstants;
import org.apache.flex.forks.batik.util.SVG12Constants;
import org.apache.flex.forks.batik.util.XMLConstants;
import org.w3c.dom.Element;
import org.w3c.dom.events.Event;
import org.w3c.dom.events.EventTarget;
import org.w3c.dom.events.EventListener;
/**
* Manages scripting handlers for SVG 1.2 'handler' elements.
*
* @author <a href="mailto:cam%40mcc%2eid%2eau">Cameron McCormack</a>
* @version $Id: SVG12ScriptingEnvironment.java 502538 2007-02-02 08:52:56Z dvholten $
*/
public class SVG12ScriptingEnvironment extends ScriptingEnvironment {
/**
* Constant used to describe handler scripts.
* {0} - URL of document containing script.
* {1} - Event type
* {2} - Event namespace
* {3} - line number of element.
*/
public static final String HANDLER_SCRIPT_DESCRIPTION
= "SVG12ScriptingEnvironment.constant.handler.script.description";
/**
* Creates a new SVG12ScriptingEnvironment.
* @param ctx the bridge context
*/
public SVG12ScriptingEnvironment(BridgeContext ctx) {
super(ctx);
}
/**
* The listeners for XML Events style handlers.
* Maps (event namespace, event local name, element) to a handler.
*/
protected TriplyIndexedTable handlerScriptingListeners;
/**
* Adds DOM listeners to the document.
*/
protected void addDocumentListeners() {
domNodeInsertedListener = new DOMNodeInsertedListener();
domNodeRemovedListener = new DOMNodeRemovedListener();
domAttrModifiedListener = new DOMAttrModifiedListener();
AbstractDocument doc = (AbstractDocument) document;
XBLEventSupport es = (XBLEventSupport) doc.initializeEventSupport();
es.addImplementationEventListenerNS
(XMLConstants.XML_EVENTS_NAMESPACE_URI,
"DOMNodeInserted",
domNodeInsertedListener, false);
es.addImplementationEventListenerNS
(XMLConstants.XML_EVENTS_NAMESPACE_URI,
"DOMNodeRemoved",
domNodeRemovedListener, false);
es.addImplementationEventListenerNS
(XMLConstants.XML_EVENTS_NAMESPACE_URI,
"DOMAttrModified",
domAttrModifiedListener, false);
}
/**
* Removes DOM listeners from the document.
*/
protected void removeDocumentListeners() {
AbstractDocument doc = (AbstractDocument) document;
XBLEventSupport es = (XBLEventSupport) doc.initializeEventSupport();
es.removeImplementationEventListenerNS
(XMLConstants.XML_EVENTS_NAMESPACE_URI,
"DOMNodeInserted",
domNodeInsertedListener, false);
es.removeImplementationEventListenerNS
(XMLConstants.XML_EVENTS_NAMESPACE_URI,
"DOMNodeRemoved",
domNodeRemovedListener, false);
es.removeImplementationEventListenerNS
(XMLConstants.XML_EVENTS_NAMESPACE_URI,
"DOMAttrModified",
domAttrModifiedListener, false);
}
/**
* The listener class for 'DOMNodeInserted' event.
*/
protected class DOMNodeInsertedListener
extends ScriptingEnvironment.DOMNodeInsertedListener {
public void handleEvent(Event evt) {
super.handleEvent(EventSupport.getUltimateOriginalEvent(evt));
}
}
/**
* The listener class for 'DOMNodeRemoved' event.
*/
protected class DOMNodeRemovedListener
extends ScriptingEnvironment.DOMNodeRemovedListener {
public void handleEvent(Event evt) {
super.handleEvent(EventSupport.getUltimateOriginalEvent(evt));
}
}
protected class DOMAttrModifiedListener
extends ScriptingEnvironment.DOMAttrModifiedListener {
public void handleEvent (Event evt) {
super.handleEvent(EventSupport.getUltimateOriginalEvent(evt));
}
}
/**
* Adds the scripting listeners to the given element.
*/
protected void addScriptingListenersOn(Element elt) {
String eltNS = elt.getNamespaceURI();
String eltLN = elt.getLocalName();
if (SVGConstants.SVG_NAMESPACE_URI.equals(eltNS)
&& SVG12Constants.SVG_HANDLER_TAG.equals(eltLN)) {
// For this 'handler' element, add a handler for the given
// event type.
AbstractElement tgt = (AbstractElement) elt.getParentNode();
String eventType = elt.getAttributeNS
(XMLConstants.XML_EVENTS_NAMESPACE_URI,
XMLConstants.XML_EVENTS_EVENT_ATTRIBUTE);
String eventNamespaceURI = XMLConstants.XML_EVENTS_NAMESPACE_URI;
if (eventType.indexOf(':') != -1) {
String prefix = DOMUtilities.getPrefix(eventType);
eventType = DOMUtilities.getLocalName(eventType);
eventNamespaceURI
= ((AbstractElement) elt).lookupNamespaceURI(prefix);
}
EventListener listener = new HandlerScriptingEventListener
(eventNamespaceURI, eventType, (AbstractElement) elt);
tgt.addEventListenerNS
(eventNamespaceURI, eventType, listener, false, null);
if (handlerScriptingListeners == null) {
handlerScriptingListeners = new TriplyIndexedTable();
}
handlerScriptingListeners.put
(eventNamespaceURI, eventType, elt, listener);
}
super.addScriptingListenersOn(elt);
}
/**
* Removes the scripting listeners from the given element.
*/
protected void removeScriptingListenersOn(Element elt) {
String eltNS = elt.getNamespaceURI();
String eltLN = elt.getLocalName();
if (SVGConstants.SVG_NAMESPACE_URI.equals(eltNS)
&& SVG12Constants.SVG_HANDLER_TAG.equals(eltLN)) {
// For this 'handler' element, remove the handler for the given
// event type.
AbstractElement tgt = (AbstractElement) elt.getParentNode();
String eventType = elt.getAttributeNS
(XMLConstants.XML_EVENTS_NAMESPACE_URI,
XMLConstants.XML_EVENTS_EVENT_ATTRIBUTE);
String eventNamespaceURI = XMLConstants.XML_EVENTS_NAMESPACE_URI;
if (eventType.indexOf(':') != -1) {
String prefix = DOMUtilities.getPrefix(eventType);
eventType = DOMUtilities.getLocalName(eventType);
eventNamespaceURI
= ((AbstractElement) elt).lookupNamespaceURI(prefix);
}
EventListener listener =
(EventListener) handlerScriptingListeners.put
(eventNamespaceURI, eventType, elt, null);
tgt.removeEventListenerNS
(eventNamespaceURI, eventType, listener, false);
}
super.removeScriptingListenersOn(elt);
}
/**
* To handle a scripting event with an XML Events style handler.
*/
protected class HandlerScriptingEventListener implements EventListener {
/**
* The namespace URI of the event type.
*/
protected String eventNamespaceURI;
/**
* The event type.
*/
protected String eventType;
/**
* The handler element.
*/
protected AbstractElement handlerElement;
/**
* Creates a new HandlerScriptingEventListener.
* @param ns Namespace URI of the event type.
* @param et The event type.
* @param e The handler element.
*/
public HandlerScriptingEventListener(String ns,
String et,
AbstractElement e) {
eventNamespaceURI = ns;
eventType = et;
handlerElement = e;
}
/**
* Runs the script.
*/
public void handleEvent(Event evt) {
Element elt = (Element)evt.getCurrentTarget();
// Evaluate the script
String script = handlerElement.getTextContent();
if (script.length() == 0)
return;
DocumentLoader dl = bridgeContext.getDocumentLoader();
AbstractDocument d
= (AbstractDocument) handlerElement.getOwnerDocument();
int line = dl.getLineNumber(handlerElement);
final String desc = Messages.formatMessage
(HANDLER_SCRIPT_DESCRIPTION,
new Object [] {d.getDocumentURI(),
eventNamespaceURI,
eventType,
new Integer(line)});
// Find the scripting language
String lang = handlerElement.getAttributeNS
(null, SVGConstants.SVG_CONTENT_SCRIPT_TYPE_ATTRIBUTE);
if (lang.length() == 0) {
Element e = elt;
while (e != null &&
(!SVGConstants.SVG_NAMESPACE_URI.equals
(e.getNamespaceURI()) ||
!SVGConstants.SVG_SVG_TAG.equals(e.getLocalName()))) {
e = SVGUtilities.getParentElement(e);
}
if (e == null)
return;
lang = e.getAttributeNS
(null, SVGConstants.SVG_CONTENT_SCRIPT_TYPE_ATTRIBUTE);
}
runEventHandler(script, evt, lang, desc);
}
}
/**
* Creates a new Window object.
*/
public org.apache.flex.forks.batik.script.Window createWindow(Interpreter interp,
String lang) {
return new Global(interp, lang);
}
/**
* The SVGGlobal object.
*/
protected class Global
extends ScriptingEnvironment.Window
implements SVGGlobal {
/**
* Creates a new Global object.
*/
public Global(Interpreter interp, String lang) {
super(interp, lang);
}
/**
* Implements
* {@link org.apache.flex.forks.batik.dom.svg12.SVGGlobal#startMouseCapture(EventTarget,boolean,boolean)}.
*/
public void startMouseCapture(EventTarget target, boolean sendAll,
boolean autoRelease) {
// XXX not sure if it's right to do this on the
// primary bridge context
((SVG12BridgeContext) bridgeContext.getPrimaryBridgeContext())
.startMouseCapture(target, sendAll, autoRelease);
}
/**
* Stops mouse capture.
*/
public void stopMouseCapture() {
// XXX not sure if it's right to do this on the
// primary bridge context
((SVG12BridgeContext) bridgeContext.getPrimaryBridgeContext())
.stopMouseCapture();
}
}
}
| |
package test.com.elezeta.wdbunit.testfile;
import java.io.BufferedReader;
import java.io.FileReader;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.OutputStream;
import java.io.PrintStream;
import java.io.PrintWriter;
import java.io.Reader;
import java.io.StringWriter;
import java.io.Writer;
import java.lang.reflect.Field;
import java.util.Collection;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
import javax.naming.spi.DirectoryManager;
import org.junit.Before;
import org.junit.Test;
import org.modelcc.IModel;
import org.modelcc.io.ModelReader;
import org.modelcc.io.java.JavaModelReader;
import org.modelcc.lexer.recognizer.PatternRecognizer;
import org.modelcc.lexer.recognizer.regexp.RegExpPatternRecognizer;
import org.modelcc.metamodel.Model;
import org.modelcc.parser.Parser;
import org.modelcc.parser.ParserException;
import org.modelcc.parser.ParserFactory;
import org.modelcc.probabilistic.ProbabilityValue;
import org.modelcc.tools.FieldFinder;
import com.elezeta.wdbunit.IndentPrintWriter;
import com.elezeta.wdbunit.WDBUnitCLI;
import com.elezeta.wdbunit.testfile.TestFile;
import com.elezeta.wdbunit.testfile.TestSummary;
import static org.junit.Assert.*;
public class BatteryTests {
PrintStream dummyStream = new PrintStream(new OutputStream() {
@Override
public void write(int b) {
// NO-OP
}
});
public Parser<TestFile> genParser() {
// Generate parser using ModelCC.
Parser<TestFile> parser = null;
try {
ModelReader jmr = new JavaModelReader(TestFile.class);
Model m = jmr.read();
Set<PatternRecognizer> ignore = new HashSet<PatternRecognizer>();
ignore.add(new RegExpPatternRecognizer("#.*\n"));
ignore.add(new RegExpPatternRecognizer("( |\n|\t|\r)+"));
parser = ParserFactory.create(m,ignore);
} catch (Exception ex) {
ex.printStackTrace();
assertTrue(false);
}
return parser;
}
private void batteryTest(String dir, int errors, int failures, int warnings) {
Parser<TestFile> parser = genParser();
try {
InputStream is = WDBUnitCLI.class.getResourceAsStream(dir);
BufferedReader br = new BufferedReader(new InputStreamReader(is));
while (br.ready()) {
String file = dir+br.readLine();
if (!file.endsWith(".swp")) {
String text = readText(file);
TestSummary summary = new TestSummary();
try {
Collection<TestFile> testFiles = parser.parseAll(text);
if (testFiles.size()>1) {
System.out.println("AMBIGUOUS "+file);
for (TestFile tf : testFiles) {
System.out.println("");
System.out.println("");
System.out.println("");
show(tf,text,parser);
}
}
assertEquals(1,testFiles.size());
TestFile testFile = testFiles.iterator().next();
TestSummary current = testFile.run(new IndentPrintWriter(dummyStream),false);
summary.addValues(current);
} catch (ParserException e) {
summary.setErrors(summary.getErrors()+1);
}
if (errors != summary.getErrors() || failures != summary.getFailures() || warnings != summary.getWarnings())
System.out.println("FAIL "+file+" "+errors+" "+failures+" "+warnings);
assertEquals(errors,summary.getErrors());
assertEquals(failures,summary.getFailures());
assertEquals(warnings,summary.getWarnings());
}
}
} catch (Exception e) {
e.printStackTrace();
assertTrue(false);
}
}
private String readText(String resourceName) {
InputStream is = Thread.currentThread().getContextClassLoader().getResourceAsStream(resourceName.substring(1));
if (is != null) {
Writer writer = new StringWriter();
char[] buffer = new char[1024];
try {
Reader reader = new BufferedReader(
new InputStreamReader(is, "UTF-8"));
int n;
while ((n = reader.read(buffer)) != -1) {
writer.write(buffer, 0, n);
}
} catch (Exception e) {
} finally {
try {
is.close();
} catch (Exception ex) {
}
}
return writer.toString();
} else {
return "";
}
}
@Test
public void AllOkTests() {
batteryTest("/examples/allok/",0,0,0);
}
@Test
public void Warning1Tests() {
batteryTest("/examples/warning1/",0,0,1);
}
@Test
public void Error1Tests() {
batteryTest("/examples/error1/",1,0,0);
}
@Test
public void Failures1Tests() {
batteryTest("/examples/failures1/",0,1,0);
}
private static void show(Object object,String inputString,Parser parser) {
show(object,0,"",inputString,parser);
}
private static void show(Object object,int indent,String fieldName,String inputString,Parser parser) {
boolean array = false;
Class clazz = object.getClass();
if (clazz.isArray()) {
array = true;
clazz = clazz.getComponentType();
}
if (IModel.class.isAssignableFrom(clazz)) {
Map<String,Object> metadata = parser.getParsingMetadata(object);
if (!array) {
if (metadata != null) {
int startIndex = (Integer)metadata.get("startIndex");
int endIndex = (Integer)metadata.get("endIndex")+1;
System.out.println(getIndent(indent)+" - "+object.getClass().getSimpleName()+"("+fieldName+"): "+inputString.substring(startIndex,endIndex).replace("\n","--"));
for (Field field : FieldFinder.getAllFields(object.getClass())) {
field.setAccessible(true);
try {
Object content = field.get(object);
if (content != null)
show(content,indent+2,field.getName(),inputString,parser);
} catch (Exception e) {
System.err.println("Cannot show field contents.");
e.printStackTrace();
}
}
}
}
else {
System.out.println(getIndent(indent)+" - "+object.getClass().getSimpleName()+"*("+fieldName+")");
for (Object content : (Object[])object) {
if (content != null)
show(content,indent+2,"",inputString,parser);
}
}
}
}
private static String getIndent(int indent) {
String ret = "";
for (int i = 0;i < indent;i++)
ret += ' ';
return ret;
}
}
| |
package org.vertexium.elasticsearch7;
import org.elasticsearch.action.admin.cluster.node.stats.NodeStats;
import org.elasticsearch.action.admin.cluster.node.stats.NodesStatsAction;
import org.elasticsearch.action.admin.cluster.node.stats.NodesStatsRequestBuilder;
import org.elasticsearch.action.admin.cluster.node.stats.NodesStatsResponse;
import org.elasticsearch.action.admin.indices.stats.IndicesStatsResponse;
import org.elasticsearch.common.io.stream.NotSerializableExceptionWrapper;
import org.elasticsearch.index.search.stats.SearchStats;
import org.junit.*;
import org.junit.rules.TestRule;
import org.junit.runners.model.Statement;
import org.junit.*;
import org.vertexium.*;
import org.vertexium.elasticsearch7.lucene.DefaultQueryStringTransformer;
import org.vertexium.elasticsearch7.scoring.ElasticsearchFieldValueScoringStrategy;
import org.vertexium.elasticsearch7.scoring.ElasticsearchHammingDistanceScoringStrategy;
import org.vertexium.elasticsearch7.sorting.ElasticsearchLengthOfStringSortingStrategy;
import org.vertexium.inmemory.InMemoryAuthorizations;
import org.vertexium.inmemory.InMemoryGraph;
import org.vertexium.inmemory.InMemoryGraphConfiguration;
import org.vertexium.mutation.ExistingElementMutation;
import org.vertexium.query.QueryResultsIterable;
import org.vertexium.query.SortDirection;
import org.vertexium.query.TermsAggregation;
import org.vertexium.query.TermsResult;
import org.vertexium.scoring.ScoringStrategy;
import org.vertexium.sorting.SortingStrategy;
import org.vertexium.test.GraphTestBase;
import org.vertexium.util.CloseableUtils;
import java.io.IOException;
import java.util.Iterator;
import java.util.List;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.atomic.AtomicBoolean;
import static org.junit.Assert.*;
import static org.junit.Assume.assumeTrue;
import static org.vertexium.test.util.VertexiumAssert.assertResultsCount;
import static org.vertexium.util.CloseableUtils.closeQuietly;
import static org.vertexium.util.IterableUtils.count;
import static org.vertexium.util.IterableUtils.toList;
public class Elasticsearch7SearchIndexTest extends GraphTestBase {
private int expectedTestElasticsearchExceptionHandlerNumberOfTimesCalled = 0;
@ClassRule
public static ElasticsearchResource elasticsearchResource = new ElasticsearchResource(Elasticsearch7SearchIndexTest.class.getName());
@Override
protected Authorizations createAuthorizations(String... auths) {
return new InMemoryAuthorizations(auths);
}
@Override
protected void addAuthorizations(String... authorizations) {
getGraph().createAuthorizations(authorizations);
}
@Before
@Override
public void before() throws Exception {
expectedTestElasticsearchExceptionHandlerNumberOfTimesCalled = 0;
TestElasticsearch7ExceptionHandler.clearNumberOfTimesCalled();
elasticsearchResource.dropIndices();
super.before();
}
@After
@Override
public void after() throws Exception {
assertEquals(
expectedTestElasticsearchExceptionHandlerNumberOfTimesCalled,
TestElasticsearch7ExceptionHandler.getNumberOfTimesCalled()
);
super.after();
}
@Rule
public TestRule esOrphanScrollCheck = (base, description) -> new Statement() {
@Override
public void evaluate() throws Throwable {
long beforeScrollCount = getCurrentScrolls();
base.evaluate();
if ((getCurrentScrolls() - beforeScrollCount) > 0) {
System.gc();
System.gc();
fail("Leaked Elasticsearch scrolls detected.");
}
}
};
@Override
@SuppressWarnings("unchecked")
protected Graph createGraph() {
InMemoryGraphConfiguration configuration = new InMemoryGraphConfiguration(elasticsearchResource.createConfig());
return InMemoryGraph.create(configuration);
}
private Elasticsearch7SearchIndex getSearchIndex() {
return (Elasticsearch7SearchIndex) ((GraphWithSearchIndex) graph).getSearchIndex();
}
protected boolean isFieldNamesInQuerySupported() {
return true;
}
@Override
protected boolean disableEdgeIndexing(Graph graph) {
Elasticsearch7SearchIndex searchIndex = (Elasticsearch7SearchIndex) ((GraphWithSearchIndex) graph).getSearchIndex();
searchIndex.getConfig().getGraphConfiguration().set(GraphConfiguration.SEARCH_INDEX_PROP_PREFIX + "." + ElasticsearchSearchIndexConfiguration.INDEX_EDGES, "false");
return true;
}
@Override
protected boolean isLuceneQueriesSupported() {
return true;
}
@Test
@Override
public void testGraphQuerySortOnPropertyThatHasNoValuesInTheIndex() {
super.testGraphQuerySortOnPropertyThatHasNoValuesInTheIndex();
getSearchIndex().clearCache();
QueryResultsIterable<Vertex> vertices = graph.query(AUTHORIZATIONS_A).sort("age", SortDirection.ASCENDING).vertices();
Assert.assertEquals(2, count(vertices));
}
@Test
public void testGraphQueryAggregateOnPropertyThatHasNoValuesInTheIndex() {
super.testGraphQueryAggregateOnPropertyThatHasNoValuesInTheIndex();
getSearchIndex().clearCache();
TermsAggregation aliasAggregation = new TermsAggregation("alias-agg", "alias");
aliasAggregation.setIncludeHasNotCount(true);
QueryResultsIterable<Vertex> vertices = graph.query(AUTHORIZATIONS_A)
.addAggregation(aliasAggregation)
.limit(0)
.vertices();
Assert.assertEquals(0, count(vertices));
TermsResult aliasAggResult = vertices.getAggregationResult(aliasAggregation.getAggregationName(), TermsResult.class);
assertEquals(2, aliasAggResult.getHasNotCount());
assertEquals(0, count(aliasAggResult.getBuckets()));
}
@Override
protected boolean isPainlessDateMath() {
return true;
}
@Test
public void testCustomQueryStringTransformer() {
Elasticsearch7SearchIndex searchIndex = (Elasticsearch7SearchIndex) ((GraphWithSearchIndex) graph).getSearchIndex();
searchIndex.getConfig().setQueryStringTransformer(new DefaultQueryStringTransformer(graph) {
@Override
protected String[] expandFieldName(String fieldName, Authorizations authorizations) {
if ("knownAs".equals(fieldName)) {
fieldName = "name";
}
return super.expandFieldName(fieldName, authorizations);
}
});
graph.defineProperty("name").dataType(String.class).textIndexHint(TextIndexHint.ALL).define();
graph.defineProperty("food").dataType(String.class).textIndexHint(TextIndexHint.ALL).define();
graph.prepareVertex("v1", VISIBILITY_A)
.setProperty("name", "Joe Ferner", VISIBILITY_A)
.setProperty("food", "pizza", VISIBILITY_A)
.save(AUTHORIZATIONS_A_AND_B);
graph.prepareVertex("v2", VISIBILITY_A)
.setProperty("name", "Joe Smith", VISIBILITY_A)
.setProperty("food", "salad", VISIBILITY_A)
.save(AUTHORIZATIONS_A_AND_B);
graph.flush();
Iterable<Vertex> vertices = graph.query("joe", AUTHORIZATIONS_A).vertices();
Assert.assertEquals(2, count(vertices));
vertices = graph.query("\"joe ferner\"", AUTHORIZATIONS_A).vertices();
Assert.assertEquals(1, count(vertices));
vertices = graph.query("name:\"joe ferner\"", AUTHORIZATIONS_A).vertices();
Assert.assertEquals(1, count(vertices));
vertices = graph.query("knownAs:\"joe ferner\"", AUTHORIZATIONS_A).vertices();
Assert.assertEquals(1, count(vertices));
vertices = graph.query("knownAs:joe", AUTHORIZATIONS_A).vertices();
Assert.assertEquals(2, count(vertices));
vertices = graph.query("knownAs:joe", AUTHORIZATIONS_A).has("food", "pizza").vertices();
Assert.assertEquals(1, count(vertices));
vertices = graph.query("food:pizza", AUTHORIZATIONS_A).vertices();
Assert.assertEquals(1, count(vertices));
vertices = graph.query("eats:pizza", AUTHORIZATIONS_A).vertices();
Assert.assertEquals(0, count(vertices));
}
@Test
public void testQueryExecutionCountWhenPaging() {
graph.prepareVertex("v1", VISIBILITY_A).save(AUTHORIZATIONS_A);
graph.prepareVertex("v2", VISIBILITY_A).save(AUTHORIZATIONS_A);
graph.flush();
long startingNumQueries = getNumQueries();
QueryResultsIterable<Vertex> vertices = graph.query(AUTHORIZATIONS_A).vertices();
assertEquals(startingNumQueries, getNumQueries());
assertResultsCount(2, 2, vertices);
assertEquals(startingNumQueries + 2, getNumQueries());
vertices = graph.query(AUTHORIZATIONS_A).limit(1).vertices();
assertEquals(startingNumQueries + 4, getNumQueries());
assertResultsCount(1, 2, vertices);
assertEquals(startingNumQueries + 4, getNumQueries());
vertices = graph.query(AUTHORIZATIONS_A).limit(10).vertices();
assertEquals(startingNumQueries + 6, getNumQueries());
assertResultsCount(2, 2, vertices);
assertEquals(startingNumQueries + 6, getNumQueries());
}
@Test
public void testQueryCreateAndUpdate() {
graph.prepareVertex("v1", VISIBILITY_A).save(AUTHORIZATIONS_A);
graph.flush();
QueryResultsIterable<Vertex> vertices = graph.query(AUTHORIZATIONS_A).vertices();
assertResultsCount(1, 1, vertices);
Vertex v1 = graph.getVertex("v1", AUTHORIZATIONS_A);
ExistingElementMutation<Vertex> m1 = v1.prepareMutation().alterElementVisibility(VISIBILITY_EMPTY);
ExistingElementMutation<Vertex> m2 = v1.prepareMutation().alterElementVisibility(VISIBILITY_EMPTY);
m1.save(AUTHORIZATIONS_A);
m2.save(AUTHORIZATIONS_A);
graph.flush();
vertices = graph.query(AUTHORIZATIONS_A).vertices();
assertResultsCount(1, 1, vertices);
}
@Test
public void testQueryExecutionCountWhenScrollingApi() {
Elasticsearch7SearchIndex searchIndex = (Elasticsearch7SearchIndex) ((GraphWithSearchIndex) graph).getSearchIndex();
searchIndex.getConfig().getGraphConfiguration().set(GraphConfiguration.SEARCH_INDEX_PROP_PREFIX + "." + ElasticsearchSearchIndexConfiguration.QUERY_PAGE_SIZE, 1);
graph.prepareVertex("v1", VISIBILITY_A).save(AUTHORIZATIONS_A);
graph.prepareVertex("v2", VISIBILITY_A).save(AUTHORIZATIONS_A);
graph.flush();
long startingNumQueries = getNumQueries();
QueryResultsIterable<Vertex> vertices = graph.query(AUTHORIZATIONS_A).vertices();
assertResultsCount(2, vertices);
assertEquals(startingNumQueries + 4, getNumQueries());
searchIndex = (Elasticsearch7SearchIndex) ((GraphWithSearchIndex) graph).getSearchIndex();
searchIndex.getConfig().getGraphConfiguration().set(GraphConfiguration.SEARCH_INDEX_PROP_PREFIX + "." + ElasticsearchSearchIndexConfiguration.QUERY_PAGE_SIZE, 2);
graph.prepareVertex("v3", VISIBILITY_A).save(AUTHORIZATIONS_A);
graph.flush();
vertices = graph.query(AUTHORIZATIONS_A).vertices();
assertResultsCount(3, vertices);
assertEquals(startingNumQueries + 8, getNumQueries());
}
@Test
public void testDisallowLeadingWildcardsInQueryString() {
graph.prepareVertex("v1", VISIBILITY_A).setProperty("prop1", "value1", VISIBILITY_A).save(AUTHORIZATIONS_A);
graph.flush();
try {
graph.query("*alue1", AUTHORIZATIONS_A).search().getTotalHits();
fail("Wildcard prefix of query string should have caused an exception");
} catch (Exception e) {
if (!(getRootCause(e) instanceof NotSerializableExceptionWrapper)) {
fail("Wildcard prefix of query string should have caused a NotSerializableExceptionWrapper exception");
}
}
}
@Test
public void testLimitingNumberOfQueryStringTerms() {
graph.prepareVertex("v1", VISIBILITY_A).setProperty("prop1", "value1", VISIBILITY_A).save(AUTHORIZATIONS_A);
graph.flush();
StringBuilder q = new StringBuilder();
for (int i = 0; i < getSearchIndex().getConfig().getMaxQueryStringTerms(); i++) {
q.append("jeff").append(i).append(" ");
}
// should succeed
graph.query(q.toString(), AUTHORIZATIONS_A).limit(0).search().getTotalHits();
try {
q.append("done");
graph.query(q.toString(), AUTHORIZATIONS_A).search().getTotalHits();
fail("Exceeding max query terms should have thrown an exception");
} catch (VertexiumException e) {
// expected
}
}
@Test
public void testQueryReturningElasticsearchEdge() {
graph.prepareVertex("v1", VISIBILITY_A).save(AUTHORIZATIONS_A);
graph.prepareVertex("v2", VISIBILITY_A).save(AUTHORIZATIONS_A);
graph.prepareEdge("e1", "v1", "v2", LABEL_LABEL1, VISIBILITY_A).save(AUTHORIZATIONS_A);
graph.flush();
QueryResultsIterable<Edge> edges = graph.query(AUTHORIZATIONS_A)
.edges(FetchHints.NONE);
assertResultsCount(1, 1, edges);
Edge e1 = toList(edges).get(0);
assertEquals(LABEL_LABEL1, e1.getLabel());
assertEquals("v1", e1.getVertexId(Direction.OUT));
assertEquals("v2", e1.getVertexId(Direction.IN));
assertEquals("e1", e1.getId());
}
@Test
public void testQueryReturningElasticsearchVertex() {
graph.prepareVertex("v1", VISIBILITY_A).save(AUTHORIZATIONS_A);
graph.prepareVertex("v2", VISIBILITY_B).save(AUTHORIZATIONS_B);
graph.prepareEdge("e1", "v1", "v2", LABEL_LABEL1, VISIBILITY_A).save(AUTHORIZATIONS_A);
graph.flush();
QueryResultsIterable<Vertex> vertices = graph.query(AUTHORIZATIONS_B)
.vertices(FetchHints.NONE);
assertResultsCount(1, 1, vertices);
Vertex vertex = toList(vertices).get(0);
assertEquals("v2", vertex.getId());
}
@Test(expected = VertexiumNotSupportedException.class)
public void testRetrievingVerticesFromElasticsearchEdge() {
graph.prepareVertex("v1", VISIBILITY_A).save(AUTHORIZATIONS_A);
graph.prepareVertex("v2", VISIBILITY_A).save(AUTHORIZATIONS_A);
graph.prepareEdge("e1", "v1", "v2", LABEL_LABEL1, VISIBILITY_A).save(AUTHORIZATIONS_A);
graph.flush();
QueryResultsIterable<Edge> edges = graph.query(AUTHORIZATIONS_A)
.edges(FetchHints.NONE);
assertResultsCount(1, 1, edges);
toList(edges).get(0).getVertices(AUTHORIZATIONS_A);
}
@Test
public void testUpdateVertexWithDeletedElasticsearchDocument() {
expectedTestElasticsearchExceptionHandlerNumberOfTimesCalled = 1;
TestElasticsearch7ExceptionHandler.authorizations = AUTHORIZATIONS_A;
graph.prepareVertex("v1", VISIBILITY_A)
.addPropertyValue("k1", "prop1", "joe", VISIBILITY_A)
.save(AUTHORIZATIONS_A);
graph.flush();
Vertex v1 = graph.getVertex("v1", AUTHORIZATIONS_A);
getSearchIndex().deleteElement(graph, v1, AUTHORIZATIONS_A);
graph.flush();
v1 = graph.getVertex("v1", AUTHORIZATIONS_A);
v1.prepareMutation()
.addPropertyValue("k1", "prop2", "bob", VISIBILITY_A)
.save(AUTHORIZATIONS_A);
graph.flush();
// Missing documents are treated as new documents (see BulkUpdateService#handleFailure) and thus are not part
// of the initial flush.
graph.flush();
List<String> results = toList(graph.query("joe", AUTHORIZATIONS_A).vertexIds());
assertEquals(1, results.size());
assertEquals("v1", results.get(0));
results = toList(graph.query("bob", AUTHORIZATIONS_A).vertexIds());
assertEquals(1, results.size());
assertEquals("v1", results.get(0));
}
@Test
public void testQueryPagingVsScrollApi() {
for (int i = 0; i < ElasticsearchResource.TEST_QUERY_PAGING_LIMIT * 2; i++) {
graph.prepareVertex("v" + i, VISIBILITY_A)
.addPropertyValue("k1", "prop1", "joe", VISIBILITY_A)
.save(AUTHORIZATIONS_A);
}
graph.flush();
int resultCount = count(graph.query(AUTHORIZATIONS_A)
.limit(ElasticsearchResource.TEST_QUERY_PAGING_LIMIT - 1)
.vertices());
assertEquals(ElasticsearchResource.TEST_QUERY_PAGING_LIMIT - 1, resultCount);
resultCount = count(graph.query(AUTHORIZATIONS_A)
.limit(ElasticsearchResource.TEST_QUERY_PAGING_LIMIT + 1)
.vertices());
assertEquals(ElasticsearchResource.TEST_QUERY_PAGING_LIMIT + 1, resultCount);
resultCount = count(graph.query(AUTHORIZATIONS_A)
.vertices());
assertEquals(ElasticsearchResource.TEST_QUERY_PAGING_LIMIT * 2, resultCount);
}
@Test
public void testMultipleThreadsFlushing() throws InterruptedException {
assumeTrue(benchmarkEnabled());
AtomicBoolean startSignal = new AtomicBoolean();
AtomicBoolean run = new AtomicBoolean(true);
AtomicBoolean writing = new AtomicBoolean(false);
AtomicBoolean writeThenFlushComplete = new AtomicBoolean(false);
CountDownLatch threadsReadyCountdown = new CountDownLatch(2);
Runnable waitForStart = () -> {
try {
while (!startSignal.get()) {
synchronized (startSignal) {
threadsReadyCountdown.countDown();
startSignal.wait();
}
}
} catch (Exception ex) {
throw new VertexiumException("thread failed", ex);
}
};
Thread constantWriteThread = new Thread(() -> {
waitForStart.run();
int i = 0;
while (run.get()) {
graph.prepareVertex("v" + i, new Visibility(""))
.addPropertyValue("k1", "name1", "value1", new Visibility(""))
.save(AUTHORIZATIONS_ALL);
writing.set(true);
i++;
}
});
Thread writeThenFlushThread = new Thread(() -> {
try {
waitForStart.run();
while (!writing.get()) {
Thread.sleep(10); // wait for other thread to start
}
for (int i = 0; i < 5; i++) {
graph.prepareVertex("vWriteTheFlush", new Visibility(""))
.addPropertyValue("k1", "name1", "value1", new Visibility(""))
.save(AUTHORIZATIONS_ALL);
graph.flush();
}
writeThenFlushComplete.set(true);
} catch (Exception ex) {
throw new VertexiumException("thread failed", ex);
}
});
// synchronize thread start
constantWriteThread.start();
writeThenFlushThread.start();
threadsReadyCountdown.await();
Thread.sleep(100);
synchronized (startSignal) {
startSignal.set(true);
startSignal.notifyAll();
}
// wait to finish
int timeout = 5000;
long startTime = System.currentTimeMillis();
while (!writeThenFlushComplete.get() && (System.currentTimeMillis() - startTime < timeout)) {
Thread.sleep(10);
}
long endTime = System.currentTimeMillis();
run.set(false);
constantWriteThread.join();
writeThenFlushThread.join();
// check results
if (endTime - startTime > timeout) {
fail("timeout waiting for flush");
}
}
@Test
public void testManyWritesToSameElement() throws InterruptedException {
int threadCount = 10;
int numberOfTimerToWrite = 100;
Thread[] threads = new Thread[threadCount];
for (int i = 0; i < threads.length; i++) {
threads[i] = new Thread(() -> {
for (int write = 0; write < numberOfTimerToWrite; write++) {
String keyAndValue = Thread.currentThread().getId() + "-" + write;
getGraph().prepareVertex("v1", VISIBILITY_EMPTY)
.addPropertyValue(keyAndValue, "name", keyAndValue, VISIBILITY_EMPTY)
.save(AUTHORIZATIONS_EMPTY);
getGraph().flush();
}
});
threads[i].setName("testManyWritesToSameElement-" + threads[i].getId());
}
for (Thread thread : threads) {
thread.start();
}
for (Thread thread : threads) {
thread.join();
}
Vertex v1 = getGraph().getVertex("v1", AUTHORIZATIONS_EMPTY);
assertEquals(threadCount * numberOfTimerToWrite, count(v1.getProperties("name")));
}
@Test
public void testManyWritesToSameElementNoFlushTillEnd() throws InterruptedException {
int threadCount = 5;
int numberOfTimerToWrite = 20;
Thread[] threads = new Thread[threadCount];
for (int i = 0; i < threads.length; i++) {
threads[i] = new Thread(() -> {
for (int write = 0; write < numberOfTimerToWrite; write++) {
String keyAndValue = Thread.currentThread().getId() + "-" + write;
getGraph().prepareVertex("v1", VISIBILITY_EMPTY)
.addPropertyValue(keyAndValue, "name", keyAndValue, VISIBILITY_EMPTY)
.save(AUTHORIZATIONS_EMPTY);
}
getGraph().flush();
});
threads[i].setName("testManyWritesToSameElementNoFlushTillEnd-" + threads[i].getId());
}
for (Thread thread : threads) {
thread.start();
}
for (Thread thread : threads) {
thread.join();
}
Vertex v1 = getGraph().getVertex("v1", AUTHORIZATIONS_EMPTY);
assertEquals(threadCount * numberOfTimerToWrite, count(v1.getProperties("name")));
}
@Test
public void testUnclosedScrollApi() {
int verticesToCreate = ElasticsearchResource.TEST_QUERY_PAGE_SIZE * 2;
for (int i = 0; i < verticesToCreate; i++) {
getGraph().prepareVertex("v" + i, VISIBILITY_EMPTY)
.addPropertyValue("k1", "name", "value1", VISIBILITY_EMPTY)
.save(AUTHORIZATIONS_EMPTY);
}
getGraph().flush();
assertEquals(0, getCurrentScrolls());
QueryResultsIterable<Vertex> vertices = getGraph().query(AUTHORIZATIONS_EMPTY)
.has("name", "value1")
.limit((Long) null)
.vertices();
assertEquals(0, getCurrentScrolls());
assertEquals(verticesToCreate, vertices.getTotalHits());
assertEquals(2, getCurrentScrolls()); // (number of scroll requests) * (number of shards)
Iterator<Vertex> it = vertices.iterator();
assertEquals(2, getCurrentScrolls());
assertTrue(it.hasNext());
it.next();
it = null;
vertices = null;
assertEquals(2, getCurrentScrolls());
System.gc();
System.gc();
}
@Test
public void testCloseIterableClearsScrollWithNoIterators() throws IOException {
int verticesToCreate = ElasticsearchResource.TEST_QUERY_PAGE_SIZE * 2;
for (int i = 0; i < verticesToCreate; i++) {
getGraph().prepareVertex("v" + i, VISIBILITY_EMPTY)
.addPropertyValue("k1", "name", "value1", VISIBILITY_EMPTY)
.save(AUTHORIZATIONS_EMPTY);
}
getGraph().flush();
assertEquals(0, getCurrentScrolls());
QueryResultsIterable<Vertex> vertices = getGraph().query(AUTHORIZATIONS_EMPTY)
.has("name", "value1")
.limit((Long) null)
.vertices();
assertEquals(0, getCurrentScrolls());
assertEquals(verticesToCreate, vertices.getTotalHits());
assertEquals(2, getCurrentScrolls()); // (number of scroll requests) * (number of shards)
vertices.close();
assertEquals(0, getCurrentScrolls());
}
@Test
public void testCompleteIteratorsClearsScroll() throws IOException {
int verticesToCreate = ElasticsearchResource.TEST_QUERY_PAGE_SIZE * 2;
for (int i = 0; i < verticesToCreate; i++) {
getGraph().prepareVertex("v" + i, VISIBILITY_EMPTY)
.addPropertyValue("k1", "name", "value1", VISIBILITY_EMPTY)
.save(AUTHORIZATIONS_EMPTY);
}
getGraph().flush();
assertEquals(0, getCurrentScrolls());
QueryResultsIterable<Vertex> vertices = getGraph().query(AUTHORIZATIONS_EMPTY)
.has("name", "value1")
.limit((Long) null)
.vertices();
assertEquals(0, getCurrentScrolls());
assertEquals(verticesToCreate, vertices.getTotalHits());
assertEquals(2, getCurrentScrolls()); // (number of scroll requests) * (number of shards)
// Open three iterators for a total of 6 scrolls. The first iterator will
// use the existing scroll id from the iterable. The second and third will
// begin new scrolls with their own scroll ids.
Iterator<Vertex> iterator1 = vertices.iterator();
assertEquals(2, getCurrentScrolls());
Iterator<Vertex> iterator2 = vertices.iterator();
assertEquals(4, getCurrentScrolls());
Iterator<Vertex> iterator3 = vertices.iterator();
assertEquals(6, getCurrentScrolls());
// iterating completely should close the scroll for just that iterator
iterator1.forEachRemaining(vertex -> {});
assertEquals(4, getCurrentScrolls());
// closing the iterator should close for just that iterator
CloseableUtils.closeQuietly(iterator2);
assertEquals(2, getCurrentScrolls());
// closing the iterable that produced the iterators should close all remaining iterators
vertices.close();
assertEquals(0, getCurrentScrolls());
}
private long getCurrentScrolls() {
NodesStatsResponse nodeStats = new NodesStatsRequestBuilder(elasticsearchResource.getClient(), NodesStatsAction.INSTANCE).get();
return nodeStats.getNodes().stream()
.mapToLong(node -> node.getIndices().getSearch().getTotal().getScrollCurrent())
.sum();
}
@Test
public void testLargeTotalHitsPaged() throws InterruptedException {
int vertexCount = 10_045;
for (int write = 0; write < vertexCount; write++) {
getGraph().prepareVertex("v" + write, VISIBILITY_EMPTY).save(AUTHORIZATIONS_EMPTY);
}
getGraph().flush();
QueryResultsIterable<String> queryResults = getGraph()
.query("*", AUTHORIZATIONS_EMPTY)
.limit(0)
.vertexIds();
assertEquals(vertexCount, queryResults.getTotalHits());
closeQuietly(queryResults);
}
@Test
public void testLargeTotalHitsScroll() throws InterruptedException {
int vertexCount = 10_045;
for (int write = 0; write < vertexCount; write++) {
getGraph().prepareVertex("v" + write, VISIBILITY_EMPTY).save(AUTHORIZATIONS_EMPTY);
}
getGraph().flush();
QueryResultsIterable<String> queryResults = getGraph()
.query("*", AUTHORIZATIONS_EMPTY)
.limit((Long) null)
.vertexIds();
assertEquals(vertexCount, queryResults.getTotalHits());
closeQuietly(queryResults);
}
@Test
public void testNumberOfRefreshes() throws InterruptedException {
getGraph().prepareVertex("vPRIME", VISIBILITY_EMPTY)
.addPropertyValue("k1", "name", "value1", VISIBILITY_EMPTY)
.save(AUTHORIZATIONS_EMPTY);
getGraph().flush();
int verticesToCreate = 100;
int insertIterations = 50;
int queryIterations = 10;
long startRefreshes = getRefreshCount();
Thread insertThread = new Thread(() -> {
for (int it = 0; it < insertIterations; it++) {
System.out.println("update " + it);
for (int i = 0; i < verticesToCreate; i++) {
getGraph().prepareVertex("v", VISIBILITY_EMPTY)
.addPropertyValue("k" + i, "name", "value" + i, VISIBILITY_EMPTY)
.save(AUTHORIZATIONS_EMPTY);
}
getGraph().flush();
}
});
Thread queryThread = new Thread(() -> {
for (int it = 0; it < queryIterations; it++) {
System.out.println("query " + it);
for (int i = 0; i < verticesToCreate; i++) {
toList(getGraph().query(AUTHORIZATIONS_EMPTY)
.has("name", "value" + i)
.vertices());
}
}
});
long startTime = System.currentTimeMillis();
queryThread.start();
insertThread.start();
queryThread.join();
insertThread.join();
long endTime = System.currentTimeMillis();
System.out.println("time: " + (endTime - startTime));
long endRefreshCount = getRefreshCount();
long totalRefreshes = endRefreshCount - startRefreshes;
System.out.println("refreshes: " + totalRefreshes);
assertTrue(
"total refreshes should be well below insert iterations times the number of vertices inserted",
totalRefreshes < insertIterations * 2
);
}
private long getRefreshCount() {
IndicesStatsResponse resp = getSearchIndex().getClient().admin().indices().prepareStats().get();
return resp.getTotal().getRefresh().getTotal();
}
private long getNumQueries() {
NodesStatsResponse nodeStats = new NodesStatsRequestBuilder(elasticsearchResource.getClient(), NodesStatsAction.INSTANCE).get();
List<NodeStats> nodes = nodeStats.getNodes();
assertEquals(1, nodes.size());
SearchStats searchStats = nodes.get(0).getIndices().getSearch();
return searchStats.getTotal().getQueryCount();
}
private Throwable getRootCause(Throwable e) {
if (e.getCause() == null) {
return e;
}
return getRootCause(e.getCause());
}
@Override
protected ScoringStrategy getHammingDistanceScoringStrategy(String field, String hash) {
return new ElasticsearchHammingDistanceScoringStrategy(field, hash);
}
@Override
protected SortingStrategy getLengthOfStringSortingStrategy(String propertyName) {
return new ElasticsearchLengthOfStringSortingStrategy(propertyName);
}
@Override
protected ScoringStrategy getFieldValueScoringStrategy(String field) {
return new ElasticsearchFieldValueScoringStrategy(field);
}
@Override
protected boolean multivalueGeopointQueryWithinMeansAny() {
return false;
}
}
| |
/*
* Copyright 2015-present Facebook, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License. You may obtain
* a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package com.facebook.buck.android;
import static org.easymock.EasyMock.createMock;
import static org.easymock.EasyMock.expect;
import static org.easymock.EasyMock.replay;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
import com.facebook.buck.io.MorePaths;
import com.facebook.buck.rules.coercer.ManifestEntries;
import com.facebook.buck.step.ExecutionContext;
import com.facebook.buck.step.TestExecutionContext;
import com.facebook.buck.testutil.TestConsole;
import com.facebook.buck.util.Verbosity;
import com.google.common.base.Suppliers;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableSortedSet;
import org.junit.Test;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.Optional;
/**
* Test generation of command line flags based on creation parameters
*/
public class AaptStepTest {
private Path basePath = Paths.get("/java/com/facebook/buck/example");
/**
* Build an AaptStep that can be used to generate a shell command. Should only
* be used for checking the generated command, since it does not refer to useful
* directories (so it can't be executed).
*/
private AaptStep buildAaptStep(
Optional<Path> pathToGeneratedProguardConfig,
boolean isCrunchFiles,
boolean includesVectorDrawables,
ManifestEntries manifestEntries) {
return new AaptStep(
/* workingDirectory */ basePath,
/* manifestDirectory */ basePath.resolve("AndroidManifest.xml"),
/* resDirectories */ ImmutableList.of(),
/* assetsDirectories */ ImmutableSortedSet.of(),
/* pathToOutputApk */ basePath.resolve("build").resolve("out.apk"),
/* pathToRDotDText */ basePath.resolve("r"),
pathToGeneratedProguardConfig,
isCrunchFiles,
includesVectorDrawables,
manifestEntries
);
}
/**
* Create an execution context with the given verbosity level. The execution context
* will yield fake values relative to the base path for all target queries.
* The mock context returned has not been replayed, so the calling code
* may add additional expectations, and is responsible for calling replay().
*/
private ExecutionContext createTestExecutionContext(Verbosity verbosity) {
final AndroidPlatformTarget androidPlatformTarget = createMock(AndroidPlatformTarget.class);
expect(androidPlatformTarget.getAaptExecutable()).andReturn(basePath.resolve("mock_aapt_bin"));
expect(androidPlatformTarget.getAndroidJar()).andReturn(basePath.resolve("mock_android.jar"));
replay(androidPlatformTarget);
ExecutionContext executionContext = TestExecutionContext.newBuilder()
.setConsole(new TestConsole(verbosity))
.setAndroidPlatformTargetSupplier(Suppliers.ofInstance(androidPlatformTarget))
.build();
return executionContext;
}
@Test
public void shouldEmitVerbosityFlagWithVerboseContext() throws Exception {
AaptStep aaptStep =
buildAaptStep(Optional.empty(), false, false, ManifestEntries.empty());
ExecutionContext executionContext = createTestExecutionContext(Verbosity.ALL);
ImmutableList<String> command = aaptStep.getShellCommandInternal(executionContext);
assertTrue(command.contains("-v"));
}
@Test
public void shouldNotEmitVerbosityFlagWithQuietContext() throws Exception {
AaptStep aaptStep =
buildAaptStep(Optional.empty(), false, false, ManifestEntries.empty());
ExecutionContext executionContext = createTestExecutionContext(Verbosity.SILENT);
ImmutableList<String> command = aaptStep.getShellCommandInternal(executionContext);
assertFalse(command.contains("-v"));
}
@Test
public void shouldEmitGFlagIfProguardConfigPresent() throws Exception {
Path proguardConfig = basePath.resolve("mock_proguard.txt");
AaptStep aaptStep =
buildAaptStep(Optional.of(proguardConfig), false, false, ManifestEntries.empty());
ExecutionContext executionContext = createTestExecutionContext(Verbosity.ALL);
ImmutableList<String> command = aaptStep.getShellCommandInternal(executionContext);
assertTrue(command.contains("-G"));
String proguardConfigPath = MorePaths.pathWithPlatformSeparators(
"/java/com/facebook/buck/example/mock_proguard.txt"
);
assertTrue(command.contains(proguardConfigPath));
}
@Test
public void shouldEmitNoCrunchFlagIfNotCrunch() throws Exception {
AaptStep aaptStep =
buildAaptStep(Optional.empty(), false, false, ManifestEntries.empty());
ExecutionContext executionContext = createTestExecutionContext(Verbosity.ALL);
ImmutableList<String> command = aaptStep.getShellCommandInternal(executionContext);
assertTrue(command.contains("--no-crunch"));
}
@Test
public void shouldNotEmitNoCrunchFlagIfCrunch() throws Exception {
AaptStep aaptStep =
buildAaptStep(Optional.empty(), true, false, ManifestEntries.empty());
ExecutionContext executionContext = createTestExecutionContext(Verbosity.ALL);
ImmutableList<String> command = aaptStep.getShellCommandInternal(executionContext);
assertFalse(command.contains("--no-crunch"));
}
@Test
public void shouldEmitNoVersionVectorsFlagIfRequested() throws Exception {
AaptStep aaptStep =
buildAaptStep(Optional.empty(), false, true, ManifestEntries.empty());
ExecutionContext executionContext = createTestExecutionContext(Verbosity.ALL);
ImmutableList<String> command = aaptStep.getShellCommandInternal(executionContext);
assertTrue(command.contains("--no-version-vectors"));
}
@Test
public void shouldNotEmitNoVersionVectorsFlagIfNotRequested() throws Exception {
AaptStep aaptStep =
buildAaptStep(Optional.empty(), false, false, ManifestEntries.empty());
ExecutionContext executionContext = createTestExecutionContext(Verbosity.ALL);
ImmutableList<String> command = aaptStep.getShellCommandInternal(executionContext);
assertFalse(command.contains("--no-version-vectors"));
}
@Test
public void shouldEmitFlagsForManifestEntries() throws Exception {
ManifestEntries entries = ManifestEntries.builder()
.setMinSdkVersion(3)
.setTargetSdkVersion(5)
.setVersionCode(7)
.setVersionName("eleven")
.setDebugMode(true)
.build();
AaptStep aaptStep = buildAaptStep(Optional.empty(), true, false, entries);
ExecutionContext executionContext = createTestExecutionContext(Verbosity.ALL);
ImmutableList<String> command = aaptStep.getShellCommandInternal(executionContext);
assertTrue(command.contains("--min-sdk-version"));
assertEquals("3", command.get(command.indexOf("--min-sdk-version") + 1));
assertTrue(command.contains("--target-sdk-version"));
assertEquals("5", command.get(command.indexOf("--target-sdk-version") + 1));
assertTrue(command.contains("--version-code"));
assertEquals("7", command.get(command.indexOf("--version-code") + 1));
assertTrue(command.contains("--version-name"));
assertEquals("eleven", command.get(command.indexOf("--version-name") + 1));
assertTrue(command.contains("--debug-mode"));
// This should be present because we've emitted > 0 manifest-changing flags.
assertTrue(command.contains("--error-on-failed-insert"));
}
@Test
public void shouldNotEmitFailOnInsertWithoutManifestEntries() throws Exception {
AaptStep aaptStep =
buildAaptStep(Optional.empty(), true, false, ManifestEntries.empty());
ExecutionContext executionContext = createTestExecutionContext(Verbosity.ALL);
ImmutableList<String> command = aaptStep.getShellCommandInternal(executionContext);
assertFalse(command.contains("--error-on-failed-insert"));
}
}
| |
/*
* Copyright 2000-2012 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/*
* Created by IntelliJ IDEA.
* User: Alexey
* Date: 18.12.2006
* Time: 20:18:31
*/
package com.intellij.util.containers;
import com.intellij.util.IncorrectOperationException;
import gnu.trove.TObjectHashingStrategy;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.TestOnly;
import java.lang.ref.ReferenceQueue;
import java.util.*;
import java.util.concurrent.ConcurrentMap;
/**
* Fully copied from java.util.WeakHashMap except "get" method optimization.
*/
abstract class ConcurrentRefHashMap<K, V> extends AbstractMap<K, V> implements ConcurrentMap<K, V>, TObjectHashingStrategy<K> {
protected final ReferenceQueue<K> myReferenceQueue = new ReferenceQueue<K>();
private final ConcurrentHashMap<Key<K, V>, V> myMap; // hashing strategy must be canonical, we compute corresponding hash codes using our own myHashingStrategy
@NotNull
private final TObjectHashingStrategy<K> myHashingStrategy;
public interface Key<K, V> {
K get();
V getValue();
// MUST work even with gced references for the code in processQueue to work
boolean equals(Object o);
int hashCode();
}
protected abstract Key<K, V> createKey(@NotNull K key, V value, @NotNull TObjectHashingStrategy<K> hashingStrategy);
private static class HardKey<K, V> implements Key<K, V> {
private K myKey;
private int myHash;
private void setKey(K key, final int hash) {
myKey = key;
myHash = hash;
}
@Override
public K get() {
return myKey;
}
@Override
public V getValue() {
return null;
}
public boolean equals(Object o) {
if (this == o) return true;
if (!(o instanceof Key)) return false;
Object t = get();
Object u = ((Key)o).get();
if (t == null || u == null) return false;
if (t == u) return true;
return t.equals(u);
}
public int hashCode() {
return myHash;
}
}
private static final Key NULL_KEY = new Key() {
@Override
public Object get() {
return null;
}
@Override
public Object getValue() {
return null;
}
};
// returns true if some keys were processed
boolean processQueue() {
Key<K, V> wk;
boolean processed = false;
while ((wk = (Key)myReferenceQueue.poll()) != null) {
V value = wk.getValue();
myMap.remove(wk, value);
processed = true;
}
return processed;
}
public ConcurrentRefHashMap(Map<? extends K, ? extends V> t) {
this(Math.max(2 * t.size(), 11), ConcurrentHashMap.DEFAULT_LOAD_FACTOR);
putAll(t);
}
public ConcurrentRefHashMap() {
this(ConcurrentHashMap.DEFAULT_INITIAL_CAPACITY);
}
public ConcurrentRefHashMap(int initialCapacity) {
this(initialCapacity, ConcurrentHashMap.DEFAULT_LOAD_FACTOR);
}
private static final TObjectHashingStrategy THIS = new TObjectHashingStrategy() {
@Override
public int computeHashCode(Object object) {
throw new IncorrectOperationException();
}
@Override
public boolean equals(Object o1, Object o2) {
throw new IncorrectOperationException();
}
};
public ConcurrentRefHashMap(int initialCapacity, float loadFactor) {
this(initialCapacity, loadFactor, 4, THIS);
}
public ConcurrentRefHashMap(@NotNull final TObjectHashingStrategy<K> hashingStrategy) {
this(ConcurrentHashMap.DEFAULT_INITIAL_CAPACITY, ConcurrentHashMap.DEFAULT_LOAD_FACTOR, 2, hashingStrategy);
}
public ConcurrentRefHashMap(int initialCapacity,
float loadFactor,
int concurrencyLevel,
@NotNull TObjectHashingStrategy<K> hashingStrategy) {
myHashingStrategy = hashingStrategy == THIS ? this : hashingStrategy;
myMap = new ConcurrentHashMap<Key<K, V>, V>(initialCapacity, loadFactor, concurrencyLevel, CANONICAL);
}
@Override
public int size() {
return entrySet().size();
}
@Override
public boolean isEmpty() {
return entrySet().isEmpty();
}
@Override
public boolean containsKey(Object key) {
// optimization:
if (key == null) {
return myMap.containsKey(NULL_KEY);
}
HardKey<K, V> hardKey = createHardKey((K)key);
boolean result = myMap.containsKey(hardKey);
releaseHardKey(hardKey);
return result;
}
private static final ThreadLocal<HardKey> HARD_KEY = new ThreadLocal<HardKey>() {
@Override
protected HardKey initialValue() {
return new HardKey();
}
};
private HardKey<K, V> createHardKey(K key) {
HardKey hardKey = HARD_KEY.get();
hardKey.setKey(key, myHashingStrategy.computeHashCode(key));
return hardKey;
}
private static void releaseHardKey(HardKey key) {
key.setKey(null, 0);
}
@Override
public V get(Object key) {
//return myMap.get(WeakKey.create(key));
// optimization:
if (key == null) {
return myMap.get(NULL_KEY);
}
HardKey<K, V> hardKey = createHardKey((K)key);
V result = myMap.get(hardKey);
releaseHardKey(hardKey);
return result;
}
@Override
public V put(K key, V value) {
processQueue();
Key<K, V> weakKey = key == null ? NULL_KEY : createKey(key, value, myHashingStrategy);
return myMap.put(weakKey, value);
}
@Override
public V remove(Object key) {
processQueue();
// optimization:
if (key == null) {
return myMap.remove(NULL_KEY);
}
HardKey hardKey = createHardKey((K)key);
V result = myMap.remove(hardKey);
releaseHardKey(hardKey);
return result;
}
@Override
public void clear() {
processQueue();
myMap.clear();
}
private static class RefEntry<K, V> implements Map.Entry<K, V> {
private final Map.Entry<?, V> ent;
private final K key; /* Strong reference to key, so that the GC
will leave it alone as long as this Entry
exists */
RefEntry(Map.Entry<?, V> ent, K key) {
this.ent = ent;
this.key = key;
}
@Override
public K getKey() {
return key;
}
@Override
public V getValue() {
return ent.getValue();
}
@Override
public V setValue(V value) {
return ent.setValue(value);
}
private static boolean valEquals(Object o1, Object o2) {
return o1 == null ? o2 == null : o1.equals(o2);
}
public boolean equals(Object o) {
if (!(o instanceof Map.Entry)) return false;
Map.Entry e = (Map.Entry)o;
return valEquals(key, e.getKey()) && valEquals(getValue(), e.getValue());
}
public int hashCode() {
Object v;
return (key == null ? 0 : key.hashCode()) ^ ((v = getValue()) == null ? 0 : v.hashCode());
}
}
/* Internal class for entry sets */
private class EntrySet extends AbstractSet<Map.Entry<K, V>> {
Set<Map.Entry<Key<K, V>, V>> hashEntrySet = myMap.entrySet();
@NotNull
@Override
public Iterator<Map.Entry<K, V>> iterator() {
return new Iterator<Map.Entry<K, V>>() {
Iterator<Map.Entry<Key<K, V>, V>> hashIterator = hashEntrySet.iterator();
RefEntry<K, V> next = null;
@Override
public boolean hasNext() {
while (hashIterator.hasNext()) {
Map.Entry<Key<K, V>, V> ent = hashIterator.next();
Key<K, V> wk = ent.getKey();
K k = null;
if (wk != null && (k = wk.get()) == null) {
/* Weak key has been cleared by GC */
continue;
}
next = new RefEntry<K, V>(ent, k);
return true;
}
return false;
}
@Override
public Map.Entry<K, V> next() {
if (next == null && !hasNext()) {
throw new NoSuchElementException();
}
RefEntry<K, V> e = next;
next = null;
return e;
}
@Override
public void remove() {
hashIterator.remove();
}
};
}
@Override
public boolean isEmpty() {
return !iterator().hasNext();
}
@Override
public int size() {
int j = 0;
for (Iterator i = iterator(); i.hasNext(); i.next()) j++;
return j;
}
@Override
public boolean remove(Object o) {
processQueue();
if (!(o instanceof Map.Entry)) return false;
Map.Entry<K,V> e = (Map.Entry)o;
V ev = e.getValue();
HardKey key = createHardKey(e.getKey());
V hv = myMap.get(key);
boolean toRemove = hv == null ? ev == null && myMap.containsKey(key) : hv.equals(ev);
if (toRemove) {
myMap.remove(key);
}
releaseHardKey(key);
return toRemove;
}
public int hashCode() {
int h = 0;
for (Object aHashEntrySet : hashEntrySet) {
Map.Entry ent = (Map.Entry)aHashEntrySet;
Key wk = (Key)ent.getKey();
if (wk == null) continue;
Object v;
h += wk.hashCode() ^ ((v = ent.getValue()) == null ? 0 : v.hashCode());
}
return h;
}
}
private Set<Map.Entry<K, V>> entrySet = null;
@NotNull
@Override
public Set<Map.Entry<K, V>> entrySet() {
if (entrySet == null) entrySet = new EntrySet();
return entrySet;
}
@Override
public V putIfAbsent(@NotNull final K key, final V value) {
processQueue();
return myMap.putIfAbsent(createKey(key, value, myHashingStrategy), value);
}
@Override
public boolean remove(@NotNull final Object key, final Object value) {
processQueue();
return myMap.remove(createKey((K)key, (V)value, myHashingStrategy), value);
}
@Override
public boolean replace(@NotNull final K key, @NotNull final V oldValue, @NotNull final V newValue) {
processQueue();
return myMap.replace(createKey(key, oldValue, myHashingStrategy), oldValue, newValue);
}
@Override
public V replace(@NotNull final K key, @NotNull final V value) {
processQueue();
return myMap.replace(createKey(key, value, myHashingStrategy), value);
}
// MAKE SURE IT CONSISTENT WITH com.intellij.util.containers.ConcurrentHashMap
@Override
public int computeHashCode(final K object) {
int h = object.hashCode();
h += ~(h << 9);
h ^= (h >>> 14);
h += (h << 4);
h ^= (h >>> 10);
return h;
}
@Override
public boolean equals(final K o1, final K o2) {
return o1.equals(o2);
}
@TestOnly
int underlyingMapSize() {
return myMap.size();
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ignite.internal.processors.cache.distributed.dht;
import java.lang.reflect.Field;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashSet;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.UUID;
import org.apache.ignite.Ignite;
import org.apache.ignite.IgniteCheckedException;
import org.apache.ignite.IgniteLogger;
import org.apache.ignite.cache.affinity.Affinity;
import org.apache.ignite.cache.affinity.AffinityFunction;
import org.apache.ignite.cluster.ClusterNode;
import org.apache.ignite.internal.processors.affinity.AffinityTopologyVersion;
import org.apache.ignite.internal.processors.cache.GridCacheAdapter;
import org.apache.ignite.internal.processors.cache.GridCacheConcurrentMap;
import org.apache.ignite.internal.processors.cache.GridCacheContext;
import org.apache.ignite.internal.processors.cache.GridCacheEntryEx;
import org.apache.ignite.internal.processors.cache.KeyCacheObject;
import org.apache.ignite.internal.processors.cache.distributed.dht.preloader.GridDhtPartitionMap2;
import org.apache.ignite.internal.util.typedef.F;
import org.apache.ignite.internal.util.typedef.P1;
import org.apache.ignite.internal.util.typedef.internal.CU;
import static org.apache.ignite.internal.processors.cache.distributed.dht.GridDhtPartitionState.OWNING;
/**
* Utility methods for dht preloader testing.
*/
public class GridCacheDhtTestUtils {
/**
* Ensure singleton.
*/
private GridCacheDhtTestUtils() {
// No-op.
}
/**
* @param dht Cache.
* @param keyCnt Number of test keys to put into cache.
* @throws IgniteCheckedException If failed to prepare.
*/
@SuppressWarnings({"UnusedAssignment", "unchecked"})
static void prepareKeys(GridDhtCache<Integer, String> dht, int keyCnt) throws IgniteCheckedException {
AffinityFunction aff = dht.context().config().getAffinity();
GridCacheConcurrentMap cacheMap;
try {
Field field = GridCacheAdapter.class.getDeclaredField("map");
field.setAccessible(true);
cacheMap = (GridCacheConcurrentMap)field.get(dht);
}
catch (Exception e) {
throw new IgniteCheckedException("Failed to get cache map.", e);
}
GridDhtPartitionTopology top = dht.topology();
GridCacheContext ctx = dht.context();
for (int i = 0; i < keyCnt; i++) {
KeyCacheObject cacheKey = ctx.toCacheKeyObject(i);
cacheMap.putEntryIfObsoleteOrAbsent(
AffinityTopologyVersion.NONE,
cacheKey,
ctx.toCacheKeyObject("value" + i),
false,
false);
dht.preloader().request(Collections.singleton(cacheKey), AffinityTopologyVersion.NONE);
GridDhtLocalPartition part = top.localPartition(aff.partition(i), false);
assert part != null;
part.own();
}
}
/**
* @param dht Dht cache.
* @param idx Cache index
*/
static void printDhtTopology(GridDhtCache<Integer, String> dht, int idx) {
final Affinity<Integer> aff = dht.affinity();
Ignite ignite = dht.context().grid();
ClusterNode locNode = ignite.cluster().localNode();
GridDhtPartitionTopology top = dht.topology();
System.out.println("\nTopology of cache #" + idx + " (" + locNode.id() + ")" + ":");
System.out.println("----------------------------------");
List<Integer> affParts = new LinkedList<>();
GridDhtPartitionMap2 map = dht.topology().partitions(locNode.id());
if (map != null)
for (int p : map.keySet())
affParts.add(p);
Collections.sort(affParts);
System.out.println("Affinity partitions: " + affParts + "\n");
List<GridDhtLocalPartition> locals = new ArrayList<GridDhtLocalPartition>(top.localPartitions());
Collections.sort(locals);
for (final GridDhtLocalPartition part : locals) {
Collection<ClusterNode> partNodes = aff.mapKeyToPrimaryAndBackups(part.id());
String ownStr = !partNodes.contains(dht.context().localNode()) ? "NOT AN OWNER" :
F.eqNodes(CU.primary(partNodes), locNode) ? "PRIMARY" : "BACKUP";
Collection<Integer> keys = F.viewReadOnly(dht.keySet(), F.<Integer>identity(), new P1<Integer>() {
@Override public boolean apply(Integer k) {
return aff.partition(k) == part.id();
}
});
System.out.println("Local partition: [" + part + "], [owning=" + ownStr + ", keyCnt=" + keys.size() +
", keys=" + keys + "]");
}
System.out.println("\nNode map:");
for (Map.Entry<UUID, GridDhtPartitionMap2> e : top.partitionMap(false).entrySet()) {
List<Integer> list = new ArrayList<>(e.getValue().keySet());
Collections.sort(list);
System.out.println("[node=" + e.getKey() + ", parts=" + list + "]");
}
System.out.println("");
}
/**
* Checks consistency of partitioned cache.
* Any preload processes must be finished before this method call().
*
* @param dht Dht cache.
* @param idx Cache index.
* @param log Logger.
*/
@SuppressWarnings("unchecked")
static void checkDhtTopology(GridDhtCache<Integer, String> dht, int idx, IgniteLogger log) {
assert dht != null;
assert idx >= 0;
assert log != null;
log.info("Checking balanced state of cache #" + idx);
Affinity<Object> aff = (Affinity)dht.affinity();
Ignite ignite = dht.context().grid();
ClusterNode locNode = ignite.cluster().localNode();
GridDhtPartitionTopology top = dht.topology();
// Expected partitions calculated with affinity function.
// They should be in topology in OWNING state.
Collection<Integer> affParts = new HashSet<>();
GridDhtPartitionMap2 map = dht.topology().partitions(locNode.id());
if (map != null)
for (int p : map.keySet())
affParts.add(p);
if (F.isEmpty(affParts))
return;
for (int p : affParts)
assert top.localPartition(p, false) != null :
"Partition does not exist in topology: [cache=" + idx + ", part=" + p + "]";
for (GridDhtLocalPartition p : top.localPartitions()) {
assert affParts.contains(p.id()) :
"Invalid local partition: [cache=" + idx + ", part=" + p + ", node partitions=" + affParts + "]";
assert p.state() == OWNING : "Invalid partition state [cache=" + idx + ", part=" + p + "]";
Collection<ClusterNode> partNodes = aff.mapPartitionToPrimaryAndBackups(p.id());
assert partNodes.contains(locNode) :
"Partition affinity nodes does not contain local node: [cache=" + idx + "]";
}
// Check keys.
for (GridCacheEntryEx e : dht.entries()) {
GridDhtCacheEntry entry = (GridDhtCacheEntry)e;
if (!affParts.contains(entry.partition()))
log.warning("Partition of stored entry is obsolete for node: [cache=" + idx + ", entry=" + entry +
", node partitions=" + affParts + "]");
int p = aff.partition(entry.key());
if (!affParts.contains(p))
log.warning("Calculated entry partition is not in node partitions: [cache=" + idx + ", part=" + p +
", entry=" + entry + ", node partitions=" + affParts + "]");
}
}
}
| |
/*
* Copyright (c) 2017 Rod Dunne
* All rights reserved
* This file is subject to the terms and conditions defined in file 'LICENSE', which is part of this source code package
*/
package com.github.roddunne.mandelbrot;
import android.content.Context;
import android.graphics.Bitmap;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.Paint;
import android.util.AttributeSet;
import android.view.MotionEvent;
import android.view.View;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.ConcurrentLinkedDeque;
/**
* 2017-04-02 rdunne
*
* This view class manually draws a Mandelbrot set, using basic Canvas bitmap operations, at a fixed expected
* resolution of 1920x1080. This matches the default resolution of the Amazon FireStick 2 on my TV.
*
* The starting image will display the "usual" x-axis bulb and cardioid entire set. The colorization uses the
* simple escape time count approach and does not smooth the colors.
*
* The user can then zoom in, by a factor of two, by clicking the primary button of an attached BlueTooth mouse on the
* FireStick.
*
*/
public class MandelbrotView extends View
{
/******************************************************************************************************************/
// Magic numbers
// Number of colors in the palette for iteration to color mapping
private final static int paletteSize_ = 256;
// Assume only default TV resolution for now.
private final static int screenHeight_ = 1080;
private final static int screenWidth_ = 1920;
// Starting range of the real/imaginary axes on the screen.
private final static double startingMinimumRealRange_ = -4.0;
private final static double startingMaximumRealRange_ = 2.6;
private final static double startingMinimumImaginaryRange_ = -1.8;
private final static double startingMaximumImaginaryRange_ = 1.8;
/******************************************************************************************************************/
// Algorithm tuning
// Configurable variables for the algorithm iteration
private final static int maximumTestIterations_ = 512;
private final static double escapeValueSquared_ = 4.0;
/******************************************************************************************************************/
// Run-time algorithm data
private double minimumRealRange_ = startingMinimumRealRange_;
private double maximumRealRange_ = startingMaximumRealRange_;
private double minimumImaginaryRange_ = startingMinimumImaginaryRange_;
private double maximumImaginaryRange_ = startingMaximumImaginaryRange_;
/******************************************************************************************************************/
// Drawing data
// Color to represent being inside the set
private final Paint paintBlack_ = new Paint();
// Colors used to represent the iterations required to escape the set
private final List<Paint> paintArray_ = new ArrayList<Paint>(paletteSize_);
// The actual bitmap that gets drawn onto the screen
private Bitmap renderBitmap_ = Bitmap.createBitmap(screenWidth_, screenHeight_, Bitmap.Config.ARGB_8888);
// The paint used to draw onto the onscreen canvas, passed to us in onDraw
private final Paint canvasPaint = new Paint(Paint.DITHER_FLAG);
// Bitmaps at various scales, used to improve user experience by simple progressive rendering and displaying (interlacing)
private final Bitmap bitmapFull_ = Bitmap.createBitmap(screenWidth_, screenHeight_, Bitmap.Config.ARGB_8888);
private final Bitmap bitmapHalf_ = Bitmap.createBitmap(screenWidth_ / 2, screenHeight_ / 2, Bitmap.Config.ARGB_8888);
private final Bitmap bitmapQuarter_ = Bitmap.createBitmap(screenWidth_ / 4, screenHeight_ / 4, Bitmap.Config.ARGB_8888);
private final Bitmap bitmapEighth_ = Bitmap.createBitmap(screenWidth_ / 8, screenHeight_ / 8, Bitmap.Config.ARGB_8888);
// A container of the iteration counts calculated for the current "zoom" level
private final int[][] iterationArray_ = new int[screenWidth_][screenHeight_];
/******************************************************************************************************************/
// Job, threading and state run time data
// The current zoom level of the Mandelbrot set, incremented by the user mouse clicks
private int currentZoomLevel_ = 0;
// The latest zoom level that we have actually drawn on screen, used to "catch up" when the user clicks rapidly in succession.
private int actualRenderedZoomLevel_ = 0;
// A list of level creation and rendering jobs to perform in FIFO order
private final ConcurrentLinkedDeque<CreationJob> creationJobs_ = new ConcurrentLinkedDeque<CreationJob>();
/**
* Constructor
*
* @param context The application Context
*/
public MandelbrotView(Context context)
{
super(context);
initialize();
}
/**
* Constructor
*
* @param context The application Context
* @param attrs Style information for custom views
*/
public MandelbrotView(Context context, AttributeSet attrs)
{
super(context, attrs);
initialize();
}
/**
* Constructor
*
* @param context The application Context
* @param attrs Style information for custom views
* @param defStyle Style information for custom views
*/
public MandelbrotView(Context context, AttributeSet attrs, int defStyle)
{
super(context, attrs, defStyle);
initialize();
}
/**
* Common initializer
*
* Set up the palette.
*/
private void initialize()
{
// TODO - Use this to seed the number of worker threads in a ThreadPoolExecutor
int numberOfProcessors = Runtime.getRuntime().availableProcessors();
paintBlack_.setColor(Color.BLACK);
// Even though not hard-coded, assumes palette is 256 colors.
for (int n = 0; n < paletteSize_; ++n)
{
Paint paint = new Paint();
paint.setColor(Color.HSVToColor(new float[]{n % 256, 255, 255}));
paintArray_.add(paint);
}
// Make the grid something nonsensical, so the interpolation algorithm does not match erroneously on first run.
for (int gridY = 0; gridY < screenHeight_; ++gridY)
{
for (int gridX = 0; gridX < screenWidth_; ++gridX)
{
iterationArray_[gridX][gridY] = -1;
}
}
// Start "task manager" busy-wait
runJobs();
// Create the first level
updateZoomLevel();
}
/**
* Uses a busy-wait synchronized queue of job objects to execute off the UI thread in order to
* calculate each level of zoom into the Mandelbrot set, and then render that new level into various
* scaled off screen bitmaps. These bitmaps are then copied on screen during on draw.
*
* TODO - switch all this to use Androids built in ThreadPoolExecutor with FutureTasks for callbacks.
* The FireStick has 4 cores. One last optimization would be to use more of them and to allow the OS
* to manage the blocking waits.
*/
private void runJobs()
{
new Thread(new Runnable()
{
public void run()
{
// Enter endless busy-wait
while (true)
{
// If we have a job, pull it and run it
if ( ! creationJobs_.isEmpty())
{
CreationJob nextJob = creationJobs_.removeFirst();
// Only calculate if the user has not clicked the mouse since job creation
if (nextJob.zoomLevelAtJobCreation_ >= currentZoomLevel_)
{
nextJob.doLongJob();
// Only render if the user has not clicked the mouse since job creation
if (nextJob.zoomLevelAtJobCreation_ >= currentZoomLevel_)
{
actualRenderedZoomLevel_ = nextJob.zoomLevelAtJobCreation_;
nextJob.doPostJob();
}
// Or render if we haven't rendered this level
else if (nextJob.zoomLevelAtJobCreation_ > actualRenderedZoomLevel_)
{
actualRenderedZoomLevel_ = nextJob.zoomLevelAtJobCreation_;
nextJob.doPostJob();
}
}
}
}
}
}).start();
}
/**
* Called to zoom into the next level of the Mandelbrot set
*
* TODO - At first this code was all procedural with many lines of copy/paste
* Then it was refactored into the table driven code (using the arrays of pairs)
* Then the job sub-classes were added to wrap those.
* But the class hierarchy is unnecessary, a single level could be parameterized sufficiently
* to reduce all this code, without it being unreadable. Even taking into account the special
* behaviour of the full scale calculation i.e. interpolating if possible.
*/
private void updateZoomLevel()
{
// Erase the iteration count container, so interpolations are not erroneously based on previous level.
for (int gridY = 0; gridY < screenHeight_; ++gridY)
{
for (int gridX = 0; gridX < screenWidth_; ++gridX)
{
iterationArray_[gridX][gridY] = -1;
}
}
// Create jobs to calculate and render the new zoom level at one eighth, quarter half and full size.
// NOTE - order is important so the progressive rendering appears correct/optimum i.e. it's a FIFO
CreationJob eighthJob = new EightCreationJob(currentZoomLevel_);
creationJobs_.addLast(eighthJob);
CreationJob quarterJob = new QuarterCreationJob(currentZoomLevel_);
creationJobs_.addLast(quarterJob);
CreationJob halfJob = new HalfScaleCreationJob(currentZoomLevel_);
creationJobs_.addLast(halfJob);
CreationJob fullJob = new FullScaleCreationJob(currentZoomLevel_);
creationJobs_.addLast(fullJob);
}
/**
* Inner helper base class to calculate and render a single bitmap for a new level of the Mandelbrot set.
*
* NOTE these classes make use of the outer class bitmap data.
*/
private abstract class CreationJob
{
// The zoom level at the time the job was created. may change before execution.
protected final int zoomLevelAtJobCreation_;
// The bitmap to draw into
protected Bitmap renderBitmapForJob_;
// The progressive rendering/"interlace"/scaling factor to draw at
protected int factor_;
/**
* Constructor
* @param currentZoomLevel The zoom level at the time the job was created.
*/
CreationJob(int currentZoomLevel)
{
zoomLevelAtJobCreation_ = currentZoomLevel;
}
// Defer calculation to sub-classes
abstract public void doLongJob();
// Rendering is the same for all sub-classes, actual drawing needs to be done on the UI thread though.
public void doPostJob()
{
renderBitmap_ = Bitmap.createScaledBitmap(renderBitmapForJob_, screenWidth_, screenHeight_, true);
// post stuff to UI thread on outer class.
post(new Runnable()
{
public void run()
{
// Simply request a redraw
invalidate();
}
});
}
}
/**
* Used to calculate the full scale iteration count for a level of the Mandelbrot set
*
* NOTE - assumes the other levels have already been calculated as it skips those array coordinates
* NOTE - will try to interpolate once it has valid values for a grid location's four neighbours
*/
private class FullScaleCreationJob extends CreationJob
{
// Tables of starting points for calculating subsets of the Mandelbrot set
private final int[] fullScaleCreationStartingPairs_ = { 1,1, 3,1, 5,1, 7,1, 1,3, 3,3, 5,3, 7,3, 1,5, 3,5, 5,5, 7,5, 1,7, 3,7, 5,7, 7,7 };
private final int[] fullScaleInterpolatedCreationStartingPairs_ = { 1,0, 3,0, 5,0, 7,0, 0,1, 2,1, 4,1, 6,1, 1,2, 3,2, 5,2, 7,2, 0,3, 2,3, 4,3, 6,3,
1,4, 3,4, 5,4, 7,4, 0,5, 2,5, 4,5, 6,5, 1,6, 3,6, 5,6, 7,6, 0,7, 2,7, 4,7, 6,7};
/**
* Constructor
*
* @param currentZoomLevel The zoom level when this job was created.
*/
FullScaleCreationJob(int currentZoomLevel)
{
super(currentZoomLevel);
renderBitmapForJob_ = bitmapFull_;
factor_ = 1;
}
/**
* Call the Mandelbrot subset creation function repeatedly until the iteration count array has been
* completely calculated for the full scale at this zoom level.
*
* NOTE these calculations can be interrupted by the user zoom.
*
* Intended to be called from a background thread
*/
@Override
public void doLongJob()
{
for (int index = 0; index < fullScaleCreationStartingPairs_.length && (currentZoomLevel_ == zoomLevelAtJobCreation_); index += 2)
{
createMandelbrotSubset(fullScaleCreationStartingPairs_[index], fullScaleCreationStartingPairs_[index + 1], zoomLevelAtJobCreation_, true, false);
}
for (int index = 0; index < fullScaleInterpolatedCreationStartingPairs_.length && (currentZoomLevel_ == zoomLevelAtJobCreation_); index += 2)
{
// Permit interpolation.
createMandelbrotSubset(fullScaleInterpolatedCreationStartingPairs_[index], fullScaleInterpolatedCreationStartingPairs_[index + 1], zoomLevelAtJobCreation_, true, true);
}
if (zoomLevelAtJobCreation_ >= currentZoomLevel_)
{
renderLevelByStep(renderBitmapForJob_, factor_);
}
}
}
/**
* Used to calculate the half scale iteration count for a level of the Mandelbrot set
*
* NOTE - assumes the lower levels have already been calculated as it skips those array coordinates
*/
private class HalfScaleCreationJob extends CreationJob
{
// Tables of starting points for calculating subsets of the Mandelbrot set
private final int[] halfScaleCreationStartingPairs_ = { 2,0, 6,0, 0,2, 2,2, 4,2, 6,2, 2,4, 6,4, 0,6, 2,6, 4,6, 6,6 };
/**
* Constructor
*
* @param currentZoomLevel The zoom level when this job was created.
*/
HalfScaleCreationJob(int currentZoomLevel)
{
super(currentZoomLevel);
renderBitmapForJob_ = bitmapHalf_;
factor_ = 2;
}
/**
* Call the Mandelbrot subset creation function repeatedly until the iteration count array has been
* completely calculated for the half scale at this zoom level.
*
* NOTE these calculations can be interrupted by the user zoom.
*
* Intended to be called from a background thread
*/
@Override
public void doLongJob()
{
for (int index = 0; index < halfScaleCreationStartingPairs_.length && (currentZoomLevel_ == zoomLevelAtJobCreation_); index += 2)
{
createMandelbrotSubset(halfScaleCreationStartingPairs_[index], halfScaleCreationStartingPairs_[index + 1], zoomLevelAtJobCreation_, true, false);
}
if (zoomLevelAtJobCreation_ >= currentZoomLevel_)
{
renderLevelByStep(renderBitmapForJob_, factor_);
}
}
}
/**
* Used to calculate the quarter scale iteration count for a level of the Mandelbrot set
*
* NOTE - assumes the eighth level has already been calculated as it skips those array coordinates
*/
private class QuarterCreationJob extends CreationJob
{
// Tables of starting points for calculating subsets of the Mandelbrot set
private final int[] level4CreationStartingPairs_ = { 4,0, 4,4, 0,4};
/**
* Constructor
*
* @param currentZoomLevel The zoom level when this job was created.
*/
QuarterCreationJob(int currentZoomLevel)
{
super(currentZoomLevel);
renderBitmapForJob_ = bitmapQuarter_;
factor_ = 4;
}
/**
* Call the Mandelbrot subset creation function repeatedly until the iteration count array has been
* completely calculated for the quarter scale at this zoom level.
*
* NOTE these calculations can be interrupted by the user zoom.
*
* Intended to be called from a background thread
*/
@Override
public void doLongJob()
{
for (int index = 0; index < level4CreationStartingPairs_.length && (currentZoomLevel_ == zoomLevelAtJobCreation_); index += 2)
{
createMandelbrotSubset(level4CreationStartingPairs_[index], level4CreationStartingPairs_[index + 1], zoomLevelAtJobCreation_, true, false);
}
if (zoomLevelAtJobCreation_ >= currentZoomLevel_)
{
renderLevelByStep(renderBitmapForJob_, factor_);
}
}
}
/**
* Used to calculate the eighth scale iteration count for a level of the Mandelbrot set
*/
private class EightCreationJob extends CreationJob
{
// Tables of starting points for calculating a subset of the Mandelbrot set
private final int[] level8CreationStartingPairs_ = { 0,0 };
/**
* Constructor
*
* @param currentZoomLevel The zoom level when this job was created.
*/
EightCreationJob(int currentZoomLevel)
{
super(currentZoomLevel);
renderBitmapForJob_ = bitmapEighth_;
factor_ = 8;
}
/**
* Call the Mandelbrot subset creation function repeatedly until the iteration count array has been
* completely calculated for the full scale at this zoom level.
*
* NOTE these calculations CANNOT be interrupted by the user zoom.
*
* Intended to be called from a background thread
*/
@Override
public void doLongJob()
{
for (int index = 0; index < level8CreationStartingPairs_.length; index +=2)
{
// Always calculate the eighth scale, once you have started, don't check for user interruption
createMandelbrotSubset(level8CreationStartingPairs_[index], level8CreationStartingPairs_[index + 1], zoomLevelAtJobCreation_, false, false);
}
// Always render the eighth scale, once you have started, don't check for user interruption
renderLevelByStep(renderBitmapForJob_, factor_);
}
}
/**
* Used to create a portion of the Mandelbrot set for the current zoom level.
*
* It is expected to be called repeatedly to calculate all the values for a single level.
*
* It skips across cells in the iteration count array, calculating whether they are in or out
* of the Mandelbrot set. And then setting their iteration count value to reflect this.
* These iteration counts can then be used to create bitmap image.
*
* The purpose of splitting the calculation into multiple steps is
* 1) to allow us to display a one eighth, one quarter and one half version of each zoom level to the user
* as soon as the values have been calculated.
* 2) to reuse the values from the eighth, quarter, and half levels in the final full scale version. i.e. to
* not recalculate each scale individually.
*
* NOTE with an naive full calculation algorithm, the time from a user zoom to actual display on the Android TV
* API 25 emulator, with two vcores on a 3GHz Intel i5, with an escape iteration maximum of 1024 and an escape value of 64.0
* was about 15 seconds.
* On an actual FireStick 2, it was ~700 seconds! With all the optimizations, progressive rendering, interpolation,
* periodicity, this has been reduced to a still ridiculous ~100 seconds. The current hard coded values of 512 and 2.0
* result in a rendering of the final full scale image in about 30 seconds on the FireStick (and 2 seconds on emulator).
*
* Interpolation optimization. If a pixel is surrounded by the same color on all four sides, we can fill it with the
* same color. We did measure that this was a noticeable performance increase. We did not measure the error rate though.
*
* NOTE at present these errors are probably OK as we do not resue the previous level to seed any of the values in
* the next zoom level. However when we introduce that optimization we will need to actually calculate any interpolated
* points.
*
* The extension of this optimization is the area filling optimization that attempts to subset
* the image into rectangular areas and if all four sides have the same value, they fill the rectangle with that value.
*
* NOTE Interpolating is only possible at the finest level. It also depends on the ordering
* of the previous calls to this function, to ensure that the cells on all four sides already have values
* assigned to them. i.e The order of pairs in the table driven arrays for starting coordinates matters.
*
* NOTE The choice of "escape" value, typically 2.0, and the maximum number of iterations to
* try before assuming the point is in the Mandelbrot set both have a determination on the "fineness" of the
* image. But, increasing them increases both fineness and runtime.
*
* Periodicity optimization. If a cell is in the set, the test will iterate to the maximum without the values escaping.
* But often the values enter a repeating cycle long before they reach the maximum iteration count. Testing for these
* cycles does actually save iterations.
*
* NOTE The check for periodicity usually first tests the immediate neighbour to see whether to bother with
* a periodicity test at all. This optimization is intended to avoid unnecessary calculations.
* In our case, as we construct the set in jumps of eight pixels, this test would be harder. At present we simply use
* the last pixel created, which is off by seven. This will not be as optimal. But runtime metrics showed that we
* still avoid many calculations.
*
* TODO rethink to remove the state parameters and the overall function complexity
*
* @param startX The starting x offset into the iteration count array
* @param startY The starting y offset into the iteration count array
* @param startingZoomLevel The zoom level at the time when the calculation job was created.
* @param checkForAbort Whether we check for user interruption to stop calculating.
*/
private void createMandelbrotSubset(final int startX, final int startY, final int startingZoomLevel, final boolean checkForAbort, final boolean useInterpolation)
{
final int step = 8;
final double realPixelIncrement = (maximumRealRange_ - minimumRealRange_) / (screenWidth_ - 1);
final double imaginaryPixelIncrement = (maximumImaginaryRange_ - minimumImaginaryRange_) / (screenHeight_ - 1);
// Used to allow early aborting of the algorithm, when the calculations are no longer needed due to user zooming.
boolean userInterruption = false;
for (int gridY = startY; gridY < screenHeight_ && ! userInterruption; gridY += step)
{
// The value on the imaginary axis for this pixel
final double currentImaginary = maximumImaginaryRange_ - gridY * imaginaryPixelIncrement;
// Used to optimize periodicity tests.
boolean previousPixelWasInsideTheSet = false;
for (int gridX = startX; gridX < screenWidth_; gridX += step)
{
// Possible value interpolation for this cell in the iteration count array
boolean skipDueToInterpolating = false;
// At the finest level, i.e. when all the other iteration count calculations are complete, it is an optimization to
// interpolate the values for pixels where all four neighbours are the same color
if (useInterpolation)
{
// If not the top or bottom row
if (gridY > 0 && gridY < screenHeight_ - 1)
{
// If not the left or right column
if (gridX > 0 && gridX < screenWidth_ - 1)
{
final int left = iterationArray_[gridX - 1][gridY];
final int right = iterationArray_[gridX + 1][gridY];
if (left == right)
{
final int above = iterationArray_[gridX][gridY - 1];
if (above == left)
{
final int below = iterationArray_[gridX][gridY + 1];
if (below == left)
{
// If cells on all four sides are the same, assume this one is the same
iterationArray_[gridX][gridY] = left;
skipDueToInterpolating = true;
}
}
}
}
}
}
if (!skipDueToInterpolating)
{
// The value on the real axis for this pixel
final double currentReal = minimumRealRange_ + gridX * realPixelIncrement;
// Starting values for the "Mandelbrot set member" test algorithm
double realZ = currentReal;
double imaginaryZ = currentImaginary;
boolean insideTheSet = true;
int iterations = 0;
// Starting values for the periodicity tests for this point
double realPeriodicityTestValue = 0.0;
double imaginaryPeriodicityTestValue = 0.0;
int currentPeriodicityTestCount = 0;
int maximumAttemptsToFindRepeats = 1;
// For this pixel/point, iterate over the mapped real and imaginary values, until either the
// values "escape" or the pixel is assumed to be in the Mandelbrot set.
for (iterations = 0; iterations < maximumTestIterations_; ++iterations)
{
// Use minimum multiplications per test iteration
final double realZSquared = realZ * realZ;
final double imaginaryZSquared = imaginaryZ * imaginaryZ;
if (realZSquared + imaginaryZSquared > escapeValueSquared_)
{
insideTheSet = false;
break;
}
// Calculate next test values
imaginaryZ = 2 * realZ * imaginaryZ + currentImaginary;
realZ = realZSquared - imaginaryZSquared + currentReal;
// Only bother to test a pixel/cell for periodicity if the previous one was in the set.
if (previousPixelWasInsideTheSet)
{
// NOTE comparison operator on floating point values actually works adequately here.
if (realZ == realPeriodicityTestValue)
{
if (imaginaryZ == imaginaryPeriodicityTestValue)
{
iterations = maximumTestIterations_;
break;
}
}
currentPeriodicityTestCount++;
// If we believe that this pixel is likely be in the set, and we haven't found values that repeat yet,
// then double the number of steps before resetting the test values.
if (currentPeriodicityTestCount > maximumAttemptsToFindRepeats)
{
currentPeriodicityTestCount = 0;
maximumAttemptsToFindRepeats *= 2;
realPeriodicityTestValue = realZ;
imaginaryPeriodicityTestValue = imaginaryZ;
}
}
}
// Set the iteration count array value for this point.
if (insideTheSet)
{
previousPixelWasInsideTheSet = true;
iterationArray_[gridX][gridY] = -1;
}
else
{
previousPixelWasInsideTheSet = false;
iterations = iterations < maximumTestIterations_ ? iterations : maximumTestIterations_;
iterationArray_[gridX][gridY] = iterations;
}
}
// If the user has "clicked the mouse" the current zoom will have changed, maybe we no longer
// need to keep calculating.
if (checkForAbort && (currentZoomLevel_ > startingZoomLevel))
{
userInterruption = true;
}
}
}
}
/**
* Renders a level of the Mandelbrot set, into a bitmap, using the current iteration count array
*
* It does this using a "step". The step is equivalent to the zoom/progressive rendering factor.
* So, a step of eight would render each eighth pixel. The resulting bitmap would then
* be expected to be scaled up to the screen size.
*
* The color for each pixel is based on the mapping from the iteration count to a color palette.
* Pixels that are inside the set are colored black.
*
* @param offscreenBitmap The bitmap to draw into, needs to match the step size.
* @param step The step to use when iterating across and down the iteration count array.
*/
private void renderLevelByStep(Bitmap offscreenBitmap, int step)
{
final Canvas offscreenCanvas = new Canvas(offscreenBitmap);
final int bitmapWidth = offscreenBitmap.getWidth();
final int bitmapHeight = offscreenBitmap.getHeight();
for (int bitmapY = 0; bitmapY < bitmapHeight; ++bitmapY)
{
final int gridY = bitmapY * step;
for (int bitmapX = 0; bitmapX < bitmapWidth; ++bitmapX)
{
final int gridX = bitmapX * step;
int iterations = iterationArray_[gridX][gridY];
if (iterations == -1)
{
offscreenCanvas.drawPoint(bitmapX, bitmapY, paintBlack_);
}
else
{
// TODO - ensure the iteration count array never stores more than maximum expected iterations.
iterations = iterations < maximumTestIterations_ ? iterations : maximumTestIterations_;
int index = iterations % paletteSize_;
offscreenCanvas.drawPoint(bitmapX, bitmapY, paintArray_.get(index));
}
}
}
}
/**
* Called from the framework to redraw our view.
*
* @param canvas The canvas we are to draw into
*/
@Override
protected void onDraw(Canvas canvas)
{
// Simply copy over the latest bitmap that we have rendered off screen.
canvas.drawBitmap(renderBitmap_, 0, 0, canvasPaint);
}
/**
* Called when the user performs a mouse/touch action inside the Mandelbrot view.
*
* We respond to a "click" by "zooming" the Mandelbrot set by a factor of two at the mouse/touch point.
*
* @param e The event that occurred
* @return True if we handled the event.
*/
@Override
public boolean onTouchEvent(MotionEvent e)
{
final float touchX = e.getX();
final float touchY = e.getY();
switch (e.getAction())
{
case MotionEvent.ACTION_DOWN:
// Find the real and imaginary value at this pixel
double realPixelIncrement = (maximumRealRange_ - minimumRealRange_) / (screenWidth_ - 1);
double imaginaryPixelIncrement = (maximumImaginaryRange_ - minimumImaginaryRange_) / (screenHeight_ - 1);
double currentImaginary = maximumImaginaryRange_ - touchY * imaginaryPixelIncrement;
double currentReal = minimumRealRange_ + touchX * realPixelIncrement;
// Calculate the new real and imaginary range values
double previousRealRange = maximumRealRange_ - minimumRealRange_;
double previousImaginaryRange = maximumImaginaryRange_ - minimumImaginaryRange_;
double newRealRange = previousRealRange / 2.0;
double newImaginaryRange = previousImaginaryRange / 2.0;
minimumRealRange_ = currentReal - newRealRange / 2.0;
maximumRealRange_ = currentReal + newRealRange / 2.0;;
minimumImaginaryRange_ = currentImaginary - newImaginaryRange / 2.0;
maximumImaginaryRange_ = currentImaginary + newImaginaryRange/ 2.0;
// TODO reuse portions of the previous iteration count values.
currentZoomLevel_++;
updateZoomLevel();
break;
}
return true;
}
}
| |
package redrun.model.gameobject;
import java.util.HashMap;
import java.util.logging.Level;
import java.util.logging.Logger;
import javax.vecmath.Quat4f;
import org.lwjgl.util.Timer;
import org.lwjgl.util.vector.Vector3f;
import org.newdawn.slick.opengl.Texture;
import redrun.model.physics.PhysicsBody;
import redrun.model.toolkit.Tools;
import static org.lwjgl.opengl.GL11.*;
/**
* This abstract class represents a game object. Every object in the 3D scene
* will extend this class.
*
* @author Troy Squillaci, Jake Nichol
* @version 1.0
* @since 2014-11-07
*/
public abstract class GameObject
{
// Identification related fields...
/** The ID of the game object. */
public final int id;
/** The ID counter. */
private static int counter = 0;
/** All game objects in existence. */
private static HashMap<Integer, GameObject> gameObjects = new HashMap<Integer, GameObject>();
// OpenGL related fields...
/** A timer associated with this game object. */
protected Timer timer = null;
/** A texture associated with this game object. */
protected Texture texture = null;
/** The display list for the game object. */
protected int displayListId = -1;
// Physics related fields...
/**
* The variable that holds all of the information needed for the physics
* calculations.
*/
protected PhysicsBody body = null;
/**
* Creates a new game object at the specified position.
*
* @param x the x position of the game object
* @param y the y position of the game object
* @param z the z position of the game object
* @param textureName the name of the texture to apply to the game object
*/
public GameObject(float x, float y, float z, String textureName)
{
body = new PhysicsBody(0, new Quat4f(0, 0, 0, 1), new Vector3f(x, y, z), null, 0);
if (textureName != null)
{
texture = Tools.loadTexture(textureName, "png");
}
timer = new Timer();
timer.pause();
id = counter++;
if (gameObjects.containsKey(id))
{
try
{
throw new IllegalArgumentException();
}
catch (IllegalArgumentException ex)
{
Logger.getLogger(GameObject.class.getName()).log(Level.SEVERE, null, ex);
}
}
gameObjects.put(id, this);
}
// OpenGL related methods...
/**
* Draws the game object to the OpenGL scene.
*/
public void draw()
{
if (texture != null)
{
glPushMatrix();
{
glEnable(GL_TEXTURE_2D);
texture.bind();
glMultMatrix(body.getOpenGLTransformMatrix());
glCallList(displayListId);
glDisable(GL_TEXTURE_2D);
}
glPopMatrix();
}
else
{
glPushMatrix();
{
glMultMatrix(body.getOpenGLTransformMatrix());
glCallList(displayListId);
}
glPopMatrix();
}
update();
}
/**
* Interacts with the game object.
*/
public abstract void interact();
/**
* Updates the game object to reflect the state of the timer.
*/
public abstract void update();
/**
* Reset the game object.
*/
public abstract void reset();
// Getter methods...
/**
* Gets an active game object with the specified ID. Returns null if no such
* game object is associated with the specified ID.
*
* @param id the ID of the game object
* @return the game object with the specified ID
*/
public static GameObject getGameObject(int id)
{
return gameObjects.get(id);
}
/**
* Prints all active game objects.
*/
public static void printAll()
{
for (GameObject gameObject : GameObject.gameObjects.values())
{
System.out.println(gameObject);
}
}
/**
* Gets the X position of the game object.
*
* @return the X position of the game object
*/
public float getX()
{
return body.getX();
}
/**
* Gets the Y position of the game object.
*
* @return the Y position of the game object
*/
public float getY()
{
return body.getY();
}
/**
* Gets the Z position of the game object.
*
* @return the Z position of the game object
*/
public float getZ()
{
return body.getZ();
}
/**
* Gets the physics rigid body.
*
* @return the the physics rigid body
*/
public PhysicsBody getBody()
{
return body;
}
/**
* Indicates if the game object is active.
*
* @return a indicator if the game object is active
*/
public boolean isActive()
{
return timer.getTime() > 0 ? true : false;
}
// Overridden methods from Object...
@Override
public boolean equals(Object obj)
{
GameObject other = (GameObject) obj;
return id == other.id;
}
@Override
public int hashCode()
{
return id;
}
@Override
public String toString()
{
// @formatter:off
return "=== Game Object ===\n" + "ID: " + id + "\n" + "Position: (" + body.getX() + ", " + body.getY() + ", "
+ body.getZ() + ")\n" + "Physics: " + body.toString() + "\n" + "Type: " + this.getClass().getName() + "\n"
+ "===================\n";
// @formatter:on
}
}
| |
/* Generated by camel build tools - do NOT edit this file! */
package org.apache.camel.component.mllp;
import java.util.Map;
import org.apache.camel.CamelContext;
import org.apache.camel.spi.ExtendedPropertyConfigurerGetter;
import org.apache.camel.spi.PropertyConfigurerGetter;
import org.apache.camel.spi.ConfigurerStrategy;
import org.apache.camel.spi.GeneratedPropertyConfigurer;
import org.apache.camel.util.CaseInsensitiveMap;
import org.apache.camel.support.component.PropertyConfigurerSupport;
/**
* Generated by camel build tools - do NOT edit this file!
*/
@SuppressWarnings("unchecked")
public class MllpComponentConfigurer extends PropertyConfigurerSupport implements GeneratedPropertyConfigurer, PropertyConfigurerGetter {
private org.apache.camel.component.mllp.MllpConfiguration getOrCreateConfiguration(MllpComponent target) {
if (target.getConfiguration() == null) {
target.setConfiguration(new org.apache.camel.component.mllp.MllpConfiguration());
}
return target.getConfiguration();
}
@Override
public boolean configure(CamelContext camelContext, Object obj, String name, Object value, boolean ignoreCase) {
MllpComponent target = (MllpComponent) obj;
switch (ignoreCase ? name.toLowerCase() : name) {
case "accepttimeout":
case "acceptTimeout": getOrCreateConfiguration(target).setAcceptTimeout(property(camelContext, int.class, value)); return true;
case "autoack":
case "autoAck": getOrCreateConfiguration(target).setAutoAck(property(camelContext, boolean.class, value)); return true;
case "autowiredenabled":
case "autowiredEnabled": target.setAutowiredEnabled(property(camelContext, boolean.class, value)); return true;
case "backlog": getOrCreateConfiguration(target).setBacklog(property(camelContext, java.lang.Integer.class, value)); return true;
case "bindretryinterval":
case "bindRetryInterval": getOrCreateConfiguration(target).setBindRetryInterval(property(camelContext, int.class, value)); return true;
case "bindtimeout":
case "bindTimeout": getOrCreateConfiguration(target).setBindTimeout(property(camelContext, int.class, value)); return true;
case "bridgeerrorhandler":
case "bridgeErrorHandler": getOrCreateConfiguration(target).setBridgeErrorHandler(property(camelContext, boolean.class, value)); return true;
case "charsetname":
case "charsetName": getOrCreateConfiguration(target).setCharsetName(property(camelContext, java.lang.String.class, value)); return true;
case "configuration": target.setConfiguration(property(camelContext, org.apache.camel.component.mllp.MllpConfiguration.class, value)); return true;
case "connecttimeout":
case "connectTimeout": getOrCreateConfiguration(target).setConnectTimeout(property(camelContext, int.class, value)); return true;
case "defaultcharset":
case "defaultCharset": target.setDefaultCharset(property(camelContext, java.lang.String.class, value)); return true;
case "exchangepattern":
case "exchangePattern": getOrCreateConfiguration(target).setExchangePattern(property(camelContext, org.apache.camel.ExchangePattern.class, value)); return true;
case "hl7headers":
case "hl7Headers": getOrCreateConfiguration(target).setHl7Headers(property(camelContext, boolean.class, value)); return true;
case "idletimeout":
case "idleTimeout": getOrCreateConfiguration(target).setIdleTimeout(property(camelContext, java.lang.Integer.class, value)); return true;
case "idletimeoutstrategy":
case "idleTimeoutStrategy": getOrCreateConfiguration(target).setIdleTimeoutStrategy(property(camelContext, org.apache.camel.component.mllp.MllpIdleTimeoutStrategy.class, value)); return true;
case "keepalive":
case "keepAlive": getOrCreateConfiguration(target).setKeepAlive(property(camelContext, java.lang.Boolean.class, value)); return true;
case "lazystartproducer":
case "lazyStartProducer": target.setLazyStartProducer(property(camelContext, boolean.class, value)); return true;
case "lenientbind":
case "lenientBind": getOrCreateConfiguration(target).setLenientBind(property(camelContext, boolean.class, value)); return true;
case "logphi":
case "logPhi": target.setLogPhi(property(camelContext, java.lang.Boolean.class, value)); return true;
case "logphimaxbytes":
case "logPhiMaxBytes": target.setLogPhiMaxBytes(property(camelContext, java.lang.Integer.class, value)); return true;
case "maxconcurrentconsumers":
case "maxConcurrentConsumers": getOrCreateConfiguration(target).setMaxConcurrentConsumers(property(camelContext, int.class, value)); return true;
case "readtimeout":
case "readTimeout": getOrCreateConfiguration(target).setReadTimeout(property(camelContext, int.class, value)); return true;
case "receivebuffersize":
case "receiveBufferSize": getOrCreateConfiguration(target).setReceiveBufferSize(property(camelContext, java.lang.Integer.class, value)); return true;
case "receivetimeout":
case "receiveTimeout": getOrCreateConfiguration(target).setReceiveTimeout(property(camelContext, int.class, value)); return true;
case "requireendofdata":
case "requireEndOfData": getOrCreateConfiguration(target).setRequireEndOfData(property(camelContext, boolean.class, value)); return true;
case "reuseaddress":
case "reuseAddress": getOrCreateConfiguration(target).setReuseAddress(property(camelContext, java.lang.Boolean.class, value)); return true;
case "sendbuffersize":
case "sendBufferSize": getOrCreateConfiguration(target).setSendBufferSize(property(camelContext, java.lang.Integer.class, value)); return true;
case "stringpayload":
case "stringPayload": getOrCreateConfiguration(target).setStringPayload(property(camelContext, boolean.class, value)); return true;
case "tcpnodelay":
case "tcpNoDelay": getOrCreateConfiguration(target).setTcpNoDelay(property(camelContext, java.lang.Boolean.class, value)); return true;
case "validatepayload":
case "validatePayload": getOrCreateConfiguration(target).setValidatePayload(property(camelContext, boolean.class, value)); return true;
default: return false;
}
}
@Override
public Class<?> getOptionType(String name, boolean ignoreCase) {
switch (ignoreCase ? name.toLowerCase() : name) {
case "accepttimeout":
case "acceptTimeout": return int.class;
case "autoack":
case "autoAck": return boolean.class;
case "autowiredenabled":
case "autowiredEnabled": return boolean.class;
case "backlog": return java.lang.Integer.class;
case "bindretryinterval":
case "bindRetryInterval": return int.class;
case "bindtimeout":
case "bindTimeout": return int.class;
case "bridgeerrorhandler":
case "bridgeErrorHandler": return boolean.class;
case "charsetname":
case "charsetName": return java.lang.String.class;
case "configuration": return org.apache.camel.component.mllp.MllpConfiguration.class;
case "connecttimeout":
case "connectTimeout": return int.class;
case "defaultcharset":
case "defaultCharset": return java.lang.String.class;
case "exchangepattern":
case "exchangePattern": return org.apache.camel.ExchangePattern.class;
case "hl7headers":
case "hl7Headers": return boolean.class;
case "idletimeout":
case "idleTimeout": return java.lang.Integer.class;
case "idletimeoutstrategy":
case "idleTimeoutStrategy": return org.apache.camel.component.mllp.MllpIdleTimeoutStrategy.class;
case "keepalive":
case "keepAlive": return java.lang.Boolean.class;
case "lazystartproducer":
case "lazyStartProducer": return boolean.class;
case "lenientbind":
case "lenientBind": return boolean.class;
case "logphi":
case "logPhi": return java.lang.Boolean.class;
case "logphimaxbytes":
case "logPhiMaxBytes": return java.lang.Integer.class;
case "maxconcurrentconsumers":
case "maxConcurrentConsumers": return int.class;
case "readtimeout":
case "readTimeout": return int.class;
case "receivebuffersize":
case "receiveBufferSize": return java.lang.Integer.class;
case "receivetimeout":
case "receiveTimeout": return int.class;
case "requireendofdata":
case "requireEndOfData": return boolean.class;
case "reuseaddress":
case "reuseAddress": return java.lang.Boolean.class;
case "sendbuffersize":
case "sendBufferSize": return java.lang.Integer.class;
case "stringpayload":
case "stringPayload": return boolean.class;
case "tcpnodelay":
case "tcpNoDelay": return java.lang.Boolean.class;
case "validatepayload":
case "validatePayload": return boolean.class;
default: return null;
}
}
@Override
public Object getOptionValue(Object obj, String name, boolean ignoreCase) {
MllpComponent target = (MllpComponent) obj;
switch (ignoreCase ? name.toLowerCase() : name) {
case "accepttimeout":
case "acceptTimeout": return getOrCreateConfiguration(target).getAcceptTimeout();
case "autoack":
case "autoAck": return getOrCreateConfiguration(target).isAutoAck();
case "autowiredenabled":
case "autowiredEnabled": return target.isAutowiredEnabled();
case "backlog": return getOrCreateConfiguration(target).getBacklog();
case "bindretryinterval":
case "bindRetryInterval": return getOrCreateConfiguration(target).getBindRetryInterval();
case "bindtimeout":
case "bindTimeout": return getOrCreateConfiguration(target).getBindTimeout();
case "bridgeerrorhandler":
case "bridgeErrorHandler": return getOrCreateConfiguration(target).isBridgeErrorHandler();
case "charsetname":
case "charsetName": return getOrCreateConfiguration(target).getCharsetName();
case "configuration": return target.getConfiguration();
case "connecttimeout":
case "connectTimeout": return getOrCreateConfiguration(target).getConnectTimeout();
case "defaultcharset":
case "defaultCharset": return target.getDefaultCharset();
case "exchangepattern":
case "exchangePattern": return getOrCreateConfiguration(target).getExchangePattern();
case "hl7headers":
case "hl7Headers": return getOrCreateConfiguration(target).isHl7Headers();
case "idletimeout":
case "idleTimeout": return getOrCreateConfiguration(target).getIdleTimeout();
case "idletimeoutstrategy":
case "idleTimeoutStrategy": return getOrCreateConfiguration(target).getIdleTimeoutStrategy();
case "keepalive":
case "keepAlive": return getOrCreateConfiguration(target).getKeepAlive();
case "lazystartproducer":
case "lazyStartProducer": return target.isLazyStartProducer();
case "lenientbind":
case "lenientBind": return getOrCreateConfiguration(target).isLenientBind();
case "logphi":
case "logPhi": return target.getLogPhi();
case "logphimaxbytes":
case "logPhiMaxBytes": return target.getLogPhiMaxBytes();
case "maxconcurrentconsumers":
case "maxConcurrentConsumers": return getOrCreateConfiguration(target).getMaxConcurrentConsumers();
case "readtimeout":
case "readTimeout": return getOrCreateConfiguration(target).getReadTimeout();
case "receivebuffersize":
case "receiveBufferSize": return getOrCreateConfiguration(target).getReceiveBufferSize();
case "receivetimeout":
case "receiveTimeout": return getOrCreateConfiguration(target).getReceiveTimeout();
case "requireendofdata":
case "requireEndOfData": return getOrCreateConfiguration(target).isRequireEndOfData();
case "reuseaddress":
case "reuseAddress": return getOrCreateConfiguration(target).getReuseAddress();
case "sendbuffersize":
case "sendBufferSize": return getOrCreateConfiguration(target).getSendBufferSize();
case "stringpayload":
case "stringPayload": return getOrCreateConfiguration(target).isStringPayload();
case "tcpnodelay":
case "tcpNoDelay": return getOrCreateConfiguration(target).getTcpNoDelay();
case "validatepayload":
case "validatePayload": return getOrCreateConfiguration(target).isValidatePayload();
default: return null;
}
}
}
| |
/*
* Copyright 2000-2009 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.openapi.application;
import com.intellij.openapi.util.io.FileUtil;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.util.SystemProperties;
import com.intellij.util.ui.UIUtil;
import com.intellij.ui.AppUIUtil;
import org.jetbrains.annotations.NonNls;
import org.jetbrains.annotations.Nullable;
import javax.swing.*;
import java.io.*;
import java.util.PropertyResourceBundle;
/**
* @author max
*/
public class ConfigImportHelper {
@NonNls private static final String BUILD_NUMBER_FILE = "build.txt";
@NonNls private static final String PLUGINS_PATH = "plugins";
@NonNls private static final String BIN_FOLDER = "bin";
private ConfigImportHelper() {}
public static void importConfigsTo(String newConfigPath) {
do {
ImportOldConfigsPanel dlg;
if (UIUtil.hasJdk6Dialogs()) {
dlg = new ImportOldConfigsPanel();
}
else {
dlg = new ImportOldConfigsPanel(JOptionPane.getRootFrame());
}
UIUtil.setToolkitModal(dlg);
AppUIUtil.updateDialogIcon(dlg);
dlg.setVisible(true);
if (dlg.isImportEnabled()) {
File instHome = dlg.getSelectedFile();
File oldConfigDir = getOldConfigDir(instHome);
if (!validateOldConfigDir(instHome, oldConfigDir)) continue;
doImport(newConfigPath, oldConfigDir);
}
break;
}
while (true);
}
public static void doImport(final String newConfigPath, final File oldConfigDir) {
try {
xcopy(oldConfigDir, new File(newConfigPath));
}
catch (IOException e) {
JOptionPane.showMessageDialog(JOptionPane.getRootFrame(),
ApplicationBundle.message("error.unable.to.import.settings", e.getMessage()),
ApplicationBundle.message("title.settings.import.failed"), JOptionPane.WARNING_MESSAGE);
}
}
public static boolean validateOldConfigDir(final File instHome, final File oldConfigDir) {
if (oldConfigDir == null) {
JOptionPane.showMessageDialog(JOptionPane.getRootFrame(),
ApplicationBundle.message("error.invalid.installation.home", instHome.getAbsolutePath(),
ApplicationNamesInfo.getInstance().getFullProductName()));
return false;
}
if (!oldConfigDir.exists()) {
JOptionPane.showMessageDialog(JOptionPane.getRootFrame(),
ApplicationBundle.message("error.no.settings.path",
oldConfigDir.getAbsolutePath()),
ApplicationBundle.message("title.settings.import.failed"), JOptionPane.WARNING_MESSAGE);
return false;
}
return true;
}
public static void xcopy(File src, File dest) throws IOException{
src = src.getCanonicalFile();
dest = dest.getCanonicalFile();
if (!src.isDirectory()){
throw new IOException(ApplicationBundle.message("config.import.invalid.directory.error", src.getAbsolutePath()));
}
if (!dest.isDirectory()){
throw new IOException(ApplicationBundle.message("config.import.invalid.directory.error", dest.getAbsolutePath()));
}
FileUtil.copyDir(src, dest);
// Delete plugins just imported. They're most probably incompatible with newer idea version.
File plugins = new File(dest, PLUGINS_PATH);
if (plugins.exists()) {
FileUtil.delete(plugins);
}
}
@Nullable
public static File getOldConfigDir(File oldInstallHome) {
int oldBuildNumber = getBuildNumber(oldInstallHome);
if (oldBuildNumber != -1 && oldBuildNumber <= 600) { // Pandora
//noinspection HardCodedStringLiteral
return new File(oldInstallHome, "config");
}
File[] launchFileCandidates = getLaunchFilesCandidates(oldInstallHome);
for (File file : launchFileCandidates) {
if (file.exists()) {
String configDir = PathManager.substituteVars(getConfigFromLaxFile(file), oldInstallHome.getPath());
if (configDir != null) {
File probableConfig = new File(configDir);
if (probableConfig.exists()) return probableConfig;
}
}
}
return null;
}
@SuppressWarnings({"HardCodedStringLiteral"})
private static File[] getLaunchFilesCandidates(File instHome) {
File bin = new File(instHome, BIN_FOLDER);
return new File[]{
new File(bin, "idea.properties"),
new File(bin, "idea.lax"),
new File(bin, "idea.bat"),
new File(bin, "idea.sh"),
new File(new File(instHome, "Contents"), "Info.plist"),
new File(new File(new File(bin, "idea.app"), "Contents"), "Info.plist"),
new File(new File(new File(instHome, "idea.app"), "Contents"), "Info.plist")
};
}
@SuppressWarnings({"HardCodedStringLiteral"})
@Nullable
public static String getConfigFromLaxFile(File file) {
if (file.getName().endsWith(".properties")) {
try {
InputStream fis = new BufferedInputStream(new FileInputStream(file));
PropertyResourceBundle bundle;
try {
bundle = new PropertyResourceBundle(fis);
}
finally {
fis.close();
}
return bundle.getString("idea.config.path");
} catch (IOException e) {
return null;
}
}
String fileContent = getContent(file);
String configParam = "idea.config.path=";
int idx = fileContent.indexOf(configParam);
if (idx == -1) {
configParam = "<key>idea.config.path</key>";
idx = fileContent.indexOf(configParam);
if (idx == -1) return null;
idx = fileContent.indexOf("<string>", idx);
if (idx == -1) return null;
idx += "<string>".length();
return fixDirName(fileContent.substring(idx, fileContent.indexOf("</string>", idx)), true);
} else {
String configDir = "";
idx += configParam.length();
if (fileContent.length() > idx) {
if (fileContent.charAt(idx) == '"') {
idx++;
while ((fileContent.length() > idx) && (fileContent.charAt(idx) != '"') && (fileContent.charAt(idx) != '\n') &&
(fileContent.charAt(idx) != '\r')) {
configDir += fileContent.charAt(idx);
idx++;
}
} else {
while ((fileContent.length() > idx) && (!Character.isSpaceChar(fileContent.charAt(idx))) &&
(fileContent.charAt(idx) != '\n') &&
(fileContent.charAt(idx) != '\r')) {
configDir += fileContent.charAt(idx);
idx++;
}
}
}
configDir = fixDirName(configDir, true);
if (configDir.length() > 0) {
configDir = (new File(configDir)).getPath();
}
return configDir;
}
}
@Nullable
private static String getContent(File file) {
try {
StringBuffer content = new StringBuffer();
BufferedReader reader = new BufferedReader(new InputStreamReader(new FileInputStream(file)));
try {
do {
String line = reader.readLine();
if (line == null) break;
content.append(line);
content.append('\n');
}
while (true);
}
finally {
reader.close();
}
return content.toString();
}
catch (Exception e) {
return null;
}
}
public static String fixDirName(String dir, boolean replaceUserHome) {
if (StringUtil.startsWithChar(dir, '\"') && StringUtil.endsWithChar(dir, '\"')) {
dir = dir.substring(1, dir.length() - 1);
}
if (replaceUserHome) {
if (dir.startsWith("~\\") || dir.startsWith("~//") || StringUtil.startsWithConcatenationOf(dir, "~", File.separator)) {
dir = SystemProperties.getUserHome() + dir.substring(1);
}
}
return dir;
}
public static boolean isInstallationHome(String installationHome) {
String mainJarName = StringUtil.toLowerCase(ApplicationNamesInfo.getInstance().getProductName()) + ".jar";
//noinspection HardCodedStringLiteral
boolean quickTest = new File(new File(installationHome, "lib"), mainJarName).exists() &&
new File(installationHome, BIN_FOLDER).exists();
if (!quickTest) return false;
File[] files = getLaunchFilesCandidates(new File(installationHome));
for (File file : files) {
if (file.exists()) return true;
}
return false;
}
private static int getBuildNumber(File installDirectory) {
installDirectory = installDirectory.getAbsoluteFile();
File buildTxt = new File(installDirectory, BUILD_NUMBER_FILE);
if ((!buildTxt.exists()) || (buildTxt.isDirectory())){
buildTxt = new File(new File(installDirectory, BIN_FOLDER), BUILD_NUMBER_FILE);
}
if (buildTxt.exists() && !buildTxt.isDirectory()){
int buildNumber = -1;
String buildNumberText = getContent(buildTxt);
if (buildNumberText != null) {
try{
if (buildNumberText.length() > 1){
buildNumberText = buildNumberText.trim();
buildNumber = Integer.parseInt(buildNumberText);
}
}
catch (Exception e){
// OK
}
}
return buildNumber;
}
return -1;
}
}
| |
// ----------------------------------------------------------------------------
// Copyright 2007-2013, GeoTelematic Solutions, Inc.
// All rights reserved
// ----------------------------------------------------------------------------
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
// ----------------------------------------------------------------------------
// Change History:
// 2010/09/09 Martin D. Flynn
// -Initial release
// 2011/08/21 Martin D. Flynn
// -Added support for returning a JSON object repreenting the tree structure
// ----------------------------------------------------------------------------
package org.opengts.util;
import java.util.*;
import java.io.*;
/**
*** Tree Node
**/
public class TreeNode
{
// ------------------------------------------------------------------------
public static final String SLASH_SEPARATOR = "/";
public static final char SLASH_SEPARATOR_CHAR = '/';
// ------------------------------------------------------------------------
// ------------------------------------------------------------------------
public static interface TreeNodeHandler
{
public boolean startNode(TreeNode tn);
public void endNode(TreeNode tn);
}
// ------------------------------------------------------------------------
// ------------------------------------------------------------------------
/**
*** Create named TreeNodes under specified parent
**/
public static TreeNode createTreePath(TreeNode parent, String name[])
{
return TreeNode.createTreePath(parent, name, null);
}
/**
*** Create named TreeNodes under specified parent
**/
public static TreeNode createTreePath(TreeNode parent, String name[], Class<? extends TreeNode> treeNodeClass)
{
/* invalid name */
if (ListTools.isEmpty(name)) {
return parent;
}
/* no parent */
if (parent == null) {
Print.logStackTrace("Parent is null!");
return null;
}
/* add children/grandchildren */
TreeNode tn = parent;
try {
for (int n = 0; n < name.length; n++) {
TreeNode sn = tn.getChildByName(name[n]);
if (sn == null) {
sn = (treeNodeClass != null)? (TreeNode)treeNodeClass.newInstance() : new TreeNode();
sn.setName(name[n]);
tn.addChild(sn);
}
tn = sn;
}
} catch (Throwable th) { // MethodInvocationException
return null; // error
}
/* return last node created */
return tn;
}
/**
*** Gets/Returns named TreeNodes under specified parent
**/
public static TreeNode getTreePath(TreeNode parent, String name[])
{
/* invalid name */
if (ListTools.isEmpty(name)) {
return parent;
}
/* no parent */
if (parent == null) {
Print.logStackTrace("Parent is null!");
return null;
}
/* find children/grandchildren */
TreeNode tn = parent;
for (int n = 0; n < name.length; n++) {
TreeNode sn = tn.getChildByName(name[n]);
if (sn == null) {
return null; // not found
}
tn = sn;
}
/* return last node found */
return tn;
}
// ------------------------------------------------------------------------
// ------------------------------------------------------------------------
/**
*** Flattens the specified tree
**/
public static java.util.List<String> flattenTree(java.util.List<String> list, String prefix, char sep, TreeNode parent)
{
/* list */
if (list == null) {
list = new Vector<String>();
}
/* invalid parent */
if (parent == null) {
return list;
}
/* this node */
String name = parent.getName();
prefix = StringTools.trim(prefix) + sep + name;
list.add(prefix);
Print.sysPrintln(prefix);
/* descend tree */
if (parent.hasChildren()) {
for (TreeNode tn : parent.getChildren()) {
TreeNode.flattenTree(list, prefix, sep, tn);
}
}
/* return collection */
return list;
}
// ------------------------------------------------------------------------
/**
*** Prints the specified Tree
**/
public static <TN extends TreeNode> void printTree(TN parent)
{
if (parent != null) {
parent.printChildren(0);
}
}
// ------------------------------------------------------------------------
// ------------------------------------------------------------------------
private String name = "";
private String description = "";
private int type = 0;
private TreeNode parent = null;
private java.util.List<TreeNode > children = null;
private RTProperties rtProp = null;
private Object value = null;
/**
*** Constructor
**/
public TreeNode()
{
super();
}
/**
*** Constructor
**/
public TreeNode(String name)
{
super();
this.setName(name);
}
/**
*** Constructor
**/
public TreeNode(String name, java.util.List<TreeNode> children)
{
this(name);
this.addChildren(children);
}
// ------------------------------------------------------------------------
/**
*** Sets the name of this TreeNode
**/
public void setName(String name)
{
this.name = StringTools.trim(name);
}
/**
*** Gets the name of this TreeNode
**/
public String getName()
{
return this.name;
}
// ------------------------------------------------------------------------
/**
*** Sets the type of this TreeNode
*** (usage defined by caller)
**/
public void setType(int type)
{
this.type = type;
}
/**
*** Gets the type of this TreeNode
**/
public int getType()
{
return this.type;
}
// ------------------------------------------------------------------------
/**
*** Sets the description of this TreeNode
**/
public void setDescription(String desc)
{
this.description = StringTools.trim(desc);
}
/**
*** Gets the description of this TreeNode
**/
public String getDescription()
{
if (!StringTools.isBlank(this.description)) {
return this.description;
} else {
return this.getName();
}
}
// ------------------------------------------------------------------------
/**
*** Returns the collection of children nodes
**/
public java.util.List<TreeNode> getChildren()
{
return this.children;
}
/**
*** Returns true if this node has children
**/
public boolean hasChildren()
{
return !ListTools.isEmpty(this.children);
}
/**
*** Returns number of children in this node
**/
public int size()
{
return ListTools.size(this.children);
}
/**
*** Adds all children in the specified java.util.List
**/
public void addChildren(java.util.List<TreeNode> children)
{
if (children != null) {
for (TreeNode tn : children) {
this.addChild(tn);
}
}
}
/**
*** Adds the specified TreeNode as a child to this node
**/
public TreeNode addChild(TreeNode node)
{
if (node == null) {
// quietly ignore
return null;
} else
if (this.isAncestor(node)) {
Print.logStackTrace("Attempting to add an ancestor to this node");
return null;
} else {
if (this.children == null) {
this.children = new Vector<TreeNode>();
}
this.children.add(node);
node.setParent(this);
return node;
}
}
/**
*** Finds/Returns the named child
*** Does not check grandchildren.
**/
public TreeNode getChildAt(int ndx)
{
java.util.List<TreeNode> chList = this.getChildren();
if ((chList != null) && (ndx >= 0) && (ndx < chList.size())) {
return chList.get(ndx);
} else {
return null;
}
}
/**
*** Finds/Returns the named child
*** Does not check grandchildren.
**/
public TreeNode getChildByName(String name)
{
/* invalid name */
if (StringTools.isBlank(name)) {
return null;
}
/* descend children */
if (this.hasChildren()) {
for (TreeNode tn : this.getChildren()) {
if (tn.getName().equals(name)) {
return tn;
}
}
}
/* not found */
return null;
}
/**
*** Finds/Returns the named child
*** Does not check grandchildren.
**/
public TreeNode getChildByPath(String name[])
{
/* invalid name */
if (ListTools.isEmpty(name)) {
return null;
}
/* find child */
TreeNode tn = this.getChildByName(name[0]);
for (int n = 1; (tn != null) && (n < name.length); n++) {
tn = tn.getChildByName(name[n]);
}
return tn; // may be null;
}
/**
*** Finds/Returns the child matching the specified value
*** Does not check grandchildren.
**/
public TreeNode getChildByValue(Object val)
{
/* invalid name */
if (val == null) {
return null;
}
/* descend children */
if (this.hasChildren()) {
for (TreeNode tn : this.getChildren()) {
Object v = tn.getObject();
if ((v != null) && val.equals(v)) {
return tn;
}
}
}
/* not found */
return null;
}
/**
*** Remove the specified child
**/
public boolean removeChild(TreeNode tn)
{
if ((this.children != null) && this.children.contains(tn)) {
tn.setParent(null);
this.children.remove(tn);
return true;
} else {
return false;
}
}
/**
*** Remove this node from it's parent
**/
public boolean removeFromParent()
{
TreeNode parent = this.getParent();
if (parent != null) {
parent.removeChild(this);
return true;
} else {
return false;
}
}
// ------------------------------------------------------------------------
/**
*** Return true if this node has a 'next' sibling
**/
public boolean hasNextSibling()
{
return (this.getNextSibling() != null);
}
/**
*** Return the 'next' sibling node of this node
**/
public TreeNode getNextSibling()
{
TreeNode parent = this.getParent();
if (parent == null) {
return null; // 'this' is the root node (no siblings)
}
java.util.List<TreeNode> siblings = parent.getChildren();
if (siblings != null) {
int sz = siblings.size();
for (int n = 0; n < sz; n++) {
if (siblings.get(n) == this) {
return ((n + 1) < sz)? siblings.get(n + 1) : null;
}
}
}
return null; // will not occur
}
// ------------------------------------------------------------------------
/**
*** Return true if this node has a 'previous' sibling
**/
public boolean hasPreviousSibling()
{
return (this.getPreviousSibling() != null);
}
/**
*** Return the 'previous' sibling node of this node
**/
public TreeNode getPreviousSibling()
{
TreeNode parent = this.getParent();
if (parent == null) {
return null; // 'this' is the root node (no siblings)
}
java.util.List<TreeNode> siblings = parent.getChildren();
if (siblings != null) {
int sz = siblings.size();
for (int n = 0; n < sz; n++) {
if (siblings.get(n) == this) {
return ((n - 1) >= 0)? siblings.get(n - 1) : null;
}
}
}
return null; // will not occur
}
// ------------------------------------------------------------------------
/**
*** Sets the parent of this TreeNode
**/
protected void setParent(TreeNode node)
{
this.parent = node;
}
/**
*** Gets the parent of this TreeNode
**/
protected TreeNode getParent()
{
return this.parent;
}
/**
*** Returns true if this TreeNode has a parent
**/
public boolean hasParent()
{
return (this.parent != null);
}
/**
*** Returns true if this node does not have a parent
**/
public boolean isRootNode()
{
return (this.parent == null);
}
// ------------------------------------------------------------------------
/**
*** Returns true if the specified node is the same as this node,
*** or is an ancestor
**/
public boolean isAncestor(TreeNode node)
{
/* invalid node */
if (node == null) {
return false;
}
/* ascend tree */
TreeNode tn = this;
while (tn != null) {
if (node == tn) {
return true;
}
tn = tn.getParent();
}
/* not an ancestor */
return false;
}
/**
*** Returns true if the specified node is an offspring of this node
**/
public boolean isOffspring(TreeNode node)
{
/* invalid node */
if (node == null) {
return false;
}
/* descend children */
if (this.hasChildren()) {
for (TreeNode tn : this.getChildren()) {
if (tn.isOffspring(node)) {
return true;
}
}
}
/* not an offspring */
return false;
}
// ------------------------------------------------------------------------
/**
*** Calculates/Returns the level of this node (root node is level '0')
**/
public int getLevel()
{
return this.getLevel(null);
}
/**
*** Calculates/Returns the level of this node (root node is level '0')
**/
public int getLevel(TreeNode parent)
{
int L = 0;
for (TreeNode tn = this.getParent(); (tn != null) && (tn != parent); L++) {
tn = tn.getParent();
}
return L;
}
// ------------------------------------------------------------------------
/**
*** Returns the path of this TreeNode
**/
public TreeNode[] getPath()
{
return this.getPath(null);
}
/**
*** Returns the path of this TreeNode
**/
public TreeNode[] getPath(TreeNode parent)
{
java.util.List<TreeNode> path = new Vector<TreeNode>();
/* ascend tree */
TreeNode tn = this;
while ((tn != null) && (tn != parent)) {
path.add(tn);
tn = tn.getParent();
}
/* reverse */
int pathLen = path.size();
TreeNode pathStr[] = new TreeNode[pathLen];
for (int i = 0; i < pathLen; i++) {
pathStr[i] = path.get((pathLen - 1) - i);
}
return pathStr;
}
// ------------------------------------------------------------------------
/**
*** Returns the path name of this TreeNode
**/
public String[] getPathNames()
{
return this.getPathNames(null);
}
/**
*** Returns the path name of this TreeNode
**/
public String[] getPathNames(TreeNode parent)
{
TreeNode tn[] = this.getPath(parent);
if (ListTools.isEmpty(tn)) {
return new String[0];
} else {
String pn[] = new String[tn.length];
for (int i = 0; i < pn.length; i++) {
pn[i] = tn[i].getName();
}
return pn;
}
}
/**
*** Returns the path name of this TreeNode
**/
public String getPathName(String sep)
{
return this.getPathName(null, sep);
}
/**
*** Returns the path name of this TreeNode
**/
public String getPathName(TreeNode parent, String sep)
{
String s = (sep != null)? sep : SLASH_SEPARATOR;
StringBuffer sb = new StringBuffer();
for (TreeNode tn : this.getPath(parent)) {
sb.append(s).append(tn.getName());
}
return sb.toString();
}
// ------------------------------------------------------------------------
/**
*** Returns the path name of this TreeNode
**/
public String[] getPathDescriptions()
{
return this.getPathDescriptions(null);
}
/**
*** Returns the path name of this TreeNode
**/
public String[] getPathDescriptions(TreeNode parent)
{
TreeNode tn[] = this.getPath(parent);
if (ListTools.isEmpty(tn)) {
return new String[0];
} else {
String pn[] = new String[tn.length];
for (int i = 0; i < pn.length; i++) {
String d = tn[i].getDescription();
pn[i] = !StringTools.isBlank(d)? d : tn[i].getName();
}
return pn;
}
}
/**
*** Returns the path name of this TreeNode
**/
public String getPathDescription(String sep)
{
return this.getPathDescription(null, sep);
}
/**
*** Returns the path name of this TreeNode
**/
public String getPathDescription(TreeNode parent, String sep)
{
String s = (sep != null)? sep : SLASH_SEPARATOR;
StringBuffer sb = new StringBuffer();
for (TreeNode tn : this.getPath(parent)) {
sb.append(s);
String d = tn.getDescription();
sb.append(!StringTools.isBlank(d)? d : tn.getName());
}
return sb.toString();
}
// ------------------------------------------------------------------------
// ------------------------------------------------------------------------
/**
*** Prefix traversal of TreeNodes
**/
public TreeNode traverseTree(TreeNodeHandler tnh)
{
/* null node */
if (tnh == null) {
return null;
}
/* call-back */
if (tnh.startNode(this)) {
// returned true, done traversing
return this;
}
/* check for children */
if (this.hasChildren()) {
for (TreeNode tn : this.getChildren()) {
TreeNode foundNode = tn.traverseTree(tnh);
if (foundNode != null) {
return foundNode;
}
}
}
/* done with traversal (for this branch) */
tnh.endNode(this);
return null;
}
/**
*** Return first node with matching name
**/
public TreeNode findChildByName(final String name)
{
return this.traverseTree(new TreeNodeHandler() {
public boolean startNode(TreeNode tn) {
return ((tn != null) && tn.getName().equals(name));
}
public void endNode(TreeNode tn) {}
});
}
/**
*** Return first node with matching property key
**/
public TreeNode findChildByProperty(final String key, final Object value)
{
/* key not specified */
if (StringTools.isBlank(key)) {
return null;
}
/* traverse and return */
return this.traverseTree(new TreeNodeHandler() {
public boolean startNode(TreeNode tn) {
if (tn == null) {
return false;
} else
if (!tn.hasProperty(key)) {
return false;
}
Object v = tn.getProperty(key);
if (v == value) {
return true;
} else
if ((v != null) && v.equals(value)) {
return true;
} else {
return false;
}
}
public void endNode(TreeNode tn) {}
});
}
// ------------------------------------------------------------------------
// ------------------------------------------------------------------------
/**
*** Returns a JSON array of this nodes children (or null if there are no children)
*** @return A JSON array of this nodes children (or null if there are no children)
**/
public JSON._Array getJsonChildrenArray()
{
JSON._Array children = null;
if (this.hasChildren()) {
children = new JSON._Array();
for (TreeNode tn : this.getChildren()) {
children.addValue(tn.getJsonObject());
}
}
return children;
}
/**
*** Returns a JSON object for this TreeNode
*** @return A JSON object for this TreeNode
**/
public JSON._Object getJsonObject()
{
return this._setJsonNodeValues(new JSON._Object(),
this.getName(), this.getDescription(),
this.getJsonChildrenArray());
}
public static final String JSON_Key_name = "name";
public static final String JSON_Key_description = "description";
public static final String JSON_Key_children = "children";
/**
*** Overridable method for setting node JSON values
*** @param jsonObj The JSON object for this not (never null)
*** @param name The default node name
*** @param desc The default node description
*** @param children A JSON array of children JSON nodes (may be null)
*** @return The jsonObj passed to this method
**/
protected JSON._Object _setJsonNodeValues(JSON._Object jsonObj,
String name, String desc,
JSON._Array children)
{
jsonObj.addKeyValue(JSON_Key_name , name);
jsonObj.addKeyValue(JSON_Key_description , desc);
if (children != null) {
jsonObj.addKeyValue(JSON_Key_children, children);
}
return jsonObj;
}
// ------------------------------------------------------------------------
// ------------------------------------------------------------------------
/**
*** Gets the String representation of this TreeNode
**/
public String toString()
{
return this.getPathName(SLASH_SEPARATOR);
}
/**
*** Gets the String representation of this TreeNode
**/
public String toString(String sep)
{
return this.getPathName(sep);
}
/**
*** Prints the children nodes
**/
public void printChildren()
{
this.printChildren(-1);
}
/**
*** Prints the children nodes
**/
public void printChildren(int lvl)
{
int level = (lvl >= 0)? lvl : this.getLevel();
String sep = SLASH_SEPARATOR;
String indent = StringTools.replicateString(" ", level);
/* description */
String desc = this.getDescription();
if (StringTools.isBlank(desc)) { desc = this.getName(); }
/* print */
StringBuffer sb = new StringBuffer();
sb.append(indent).append(desc);
if (this.hasChildren()) {
sb.append(":");
Print.sysPrintln(sb.toString());
for (TreeNode tn : this.getChildren()) {
tn.printChildren(level + 1);
}
} else {
Print.sysPrintln(sb.toString());
}
}
// ------------------------------------------------------------------------
/**
*** Returns true if the 'other' node is the same as 'this' node
**/
public boolean equals(Object other)
{
/* not event the same type? */
if (!(other instanceof TreeNode)) {
return false;
}
/* is same node? */
if (other == this) {
return true;
}
/* "similar" nodes don't count */
return false;
}
// ------------------------------------------------------------------------
/**
*** Get node properties
*** @return Node properties
**/
public RTProperties getProperties()
{
if (this.rtProp == null) {
this.rtProp = new RTProperties();
}
return this.rtProp;
}
/**
*** Returns true if properties have been defined for this node
*** @return True if properties have been defined for this node
**/
public boolean hasProperties()
{
return (this.rtProp != null) && !this.rtProp.isEmpty();
}
/**
*** Returns true if the specified property key is defined in this node
*** @param key The property key
*** @return True if the specified property key is defined in this node
**/
public boolean hasProperty(String key)
{
return (this.rtProp != null)? this.rtProp.hasProperty(key) : false;
}
/**
*** Gets the specified node property value
*** @param key The property key
*** @param dft The default return value
*** @return The property value
**/
public Object getProperty(String key, Object dft)
{
return (this.rtProp != null)? this.rtProp.getProperty(key,dft) : null;
}
/**
*** Gets the specified node property value
*** @param key The property key
*** @return The property value
**/
public Object getProperty(String key)
{
return this.getProperty(key, null);
}
/**
*** Sets the specified node property value
*** @param key The property key
*** @param val The property value
**/
public void setProperty(String key, Object val)
{
this.getProperties().setProperty(key, val);
}
// ------------------------------------------------------------------------
/**
*** Sets a node leaf object
**/
public void setObject(Object val)
{
this.value = val;
}
/**
*** Gets the node leaf object
**/
public Object getObject()
{
return this.value;
}
/**
*** Returns true if this node has an object value
**/
public boolean hasObject()
{
return (this.value != null);
}
/**
*** Returns true if this node has an object value
**/
public boolean isLeaf()
{
return (this.value != null);
}
// ------------------------------------------------------------------------
// ------------------------------------------------------------------------
// ------------------------------------------------------------------------
public static void main(String argv[])
{
RTConfig.setCommandLineArgs(argv);
String list[] = new String[] {
"AA/BA/CA/DA/EA",
"AA/BA/CA/DA/EB",
"AA/BA/CA/DB/EA",
"AA/BA/CA/DB/EB",
"AA/BA/CA/DB/EC",
"AA/BA/CA/DC/EA",
"AA/BA/CB/DA/EA",
"AA/BA/CB/DA/EB",
"AA/BA/CB/DB/EA",
"AA/BA/CB/DB/EB",
"AA/BA/CB/DB/EC",
"AA/BA/CB/DC/EA",
"AA/BB/CA/DA/EA",
"AA/BB/CA/DA/EB",
"AA/BB/CA/DB/EA",
"AA/BB/CA/DB/EB",
"AA/BB/CA/DB/EC",
"AA/BB/CA/DC/EA",
"AA/BB/CB/DA/EA",
"AA/BB/CB/DA/EB",
"AA/BB/CB/DB/EA",
"AA/BB/CB/DB/EB",
"AA/BB/CB/DB/EC",
"AA/BB/CB/DC/EA",
};
TreeNode root = new TreeNode("root");
for (String a : list) {
String aa[] = StringTools.split(a, SLASH_SEPARATOR_CHAR);
TreeNode.createTreePath(root, aa);
}
printTree(root);
Vector<String> flatList = new Vector<String>();
flattenTree(flatList, "", '-', root);
JSON._Object jsonObj = root.getJsonObject();
Print.sysPrintln("JSON:\n" + jsonObj);
}
}
| |
/*
* Copyright (c) 2013-2014 Massachusetts Institute of Technology
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package edu.mit.streamjit.impl.common;
import com.google.common.collect.ImmutableList;
import edu.mit.streamjit.api.Filter;
import edu.mit.streamjit.api.Joiner;
import edu.mit.streamjit.api.OneToOneElement;
import edu.mit.streamjit.api.Pipeline;
import edu.mit.streamjit.api.Splitjoin;
import edu.mit.streamjit.api.Splitter;
import edu.mit.streamjit.api.StreamElement;
import edu.mit.streamjit.api.StreamVisitor;
import java.util.ArrayDeque;
import java.util.Deque;
/**
* A StackVisitor is a StreamVisitor that keeps a stack trace of the visitation
* in progress. The top of the stack is the element currently being visited (in
* enter and exit methods, the element being entered or exited); the second
* element is that element's parent, and so on.
* <p/>
* StackVisitor needs to run code around the subclass implementation, so it
* provides final implementations of the StreamVisitor methods with
* corresponding abstract methods with a 0 appended (e.g., visitFilter0). These
* methods have the same contract as the corresponding StreamVisitor methods.
* beginVisit() and endVisit() are not final and may be overridden, though the
* subclass should call the superimplementation.
* @author Jeffrey Bosboom <jbosboom@csail.mit.edu>
* @since 8/8/2013
*/
public abstract class StackVisitor extends StreamVisitor {
private final Deque<GraphTraceElement> stack = new ArrayDeque<>();
protected StackVisitor() {}
protected abstract void visitFilter0(Filter<?, ?> filter);
protected abstract boolean enterPipeline0(Pipeline<?, ?> pipeline);
protected abstract void exitPipeline0(Pipeline<?, ?> pipeline);
protected abstract boolean enterSplitjoin0(Splitjoin<?, ?> splitjoin);
protected abstract void visitSplitter0(Splitter<?, ?> splitter);
protected abstract boolean enterSplitjoinBranch0(OneToOneElement<?, ?> element);
protected abstract void exitSplitjoinBranch0(OneToOneElement<?, ?> element);
protected abstract void visitJoiner0(Joiner<?, ?> joiner);
protected abstract void exitSplitjoin0(Splitjoin<?, ?> splitjoin);
protected final ImmutableList<GraphTraceElement> getTrace() {
return ImmutableList.copyOf(stack);
}
@Override
public void beginVisit() {
}
@Override
public void endVisit() {
assert stack.isEmpty();
}
//<editor-fold defaultstate="collapsed" desc="StreamVisitor method implementations">
@Override
public final void visitFilter(Filter<?, ?> filter) {
maybeIncrementPipelineElement();
stack.push(new LeafGraphTraceElement(filter));
visitFilter0(filter);
stack.pop();
}
@Override
public final boolean enterPipeline(Pipeline<?, ?> pipeline) {
maybeIncrementPipelineElement();
stack.push(new LeafGraphTraceElement(pipeline));
boolean enter = enterPipeline0(pipeline);
stack.pop();
if (enter) {
stack.push(new InternalGraphTraceElement(pipeline, -1));
return true;
} else
return false;
}
@Override
public final void exitPipeline(Pipeline<?, ?> pipeline) {
exitPipeline0(pipeline);
GraphTraceElement pop = stack.pop();
assert pop.getElement() == pipeline;
}
@Override
public final boolean enterSplitjoin(Splitjoin<?, ?> splitjoin) {
maybeIncrementPipelineElement();
stack.push(new LeafGraphTraceElement(splitjoin));
boolean enter = enterSplitjoin0(splitjoin);
if (enter)
//We leave the splitjoin on the stack; the splitter will fix it up.
return true;
else {
stack.pop();
return false;
}
}
@Override
public final void visitSplitter(Splitter<?, ?> splitter) {
GraphTraceElement pop = stack.pop();
assert pop.getElement() instanceof Splitjoin;
stack.push(new InternalGraphTraceElement(pop.getElement(),
InternalGraphTraceElement.SPLITTER_SUBELEM));
stack.push(new LeafGraphTraceElement(splitter));
visitSplitter0(splitter);
stack.pop();
}
@Override
public final boolean enterSplitjoinBranch(OneToOneElement<?, ?> element) {
InternalGraphTraceElement pop = (InternalGraphTraceElement)stack.pop();
assert pop.getElement() instanceof Splitjoin;
stack.push(new InternalGraphTraceElement(pop.getElement(), pop.subelement + 1));
stack.push(new LeafGraphTraceElement(element));
boolean enter = enterSplitjoinBranch0(element);
//Always pop the stack -- whatever we're about to enter will push its
//own trace element.
stack.pop();
return enter;
}
@Override
public final void exitSplitjoinBranch(OneToOneElement<?, ?> element) {
exitSplitjoinBranch0(element);
//Don't pop the splitjoin element -- we'll need it in
//enterSplitjoinBranch or visitJoiner.
}
@Override
public final void visitJoiner(Joiner<?, ?> joiner) {
InternalGraphTraceElement pop = (InternalGraphTraceElement)stack.pop();
assert pop.getElement() instanceof Splitjoin;
stack.push(new InternalGraphTraceElement(pop.getElement(),
InternalGraphTraceElement.JOINER_SUBELEM));
stack.push(new LeafGraphTraceElement(joiner));
visitJoiner0(joiner);
stack.pop();
}
@Override
public final void exitSplitjoin(Splitjoin<?, ?> splitjoin) {
exitSplitjoin0(splitjoin);
GraphTraceElement pop = stack.pop();
assert pop.getElement() == splitjoin;
}
/**
* There's no enterPipelineElement() method, so whenever we visit a filter
* or enter a pipeline or splitjoin, we need to check if we should increment
* a containing pipeline's subelement.
*/
private void maybeIncrementPipelineElement() {
GraphTraceElement top = stack.peek();
if (top == null) return;
if (top.getElement() instanceof Pipeline) {
stack.pop();
stack.push(new InternalGraphTraceElement(top.getElement(),
((InternalGraphTraceElement)top).subelement + 1));
}
}
//</editor-fold>
/**
* Represents a location in the stream graph, analogous to
* StackTraceElement.
* <p/>
* TODO: getSubelement()? Would require support from Pipeline and Splitjoin,
* possibly through a ContainerElement interface.
* <p/>
* TODO: Should this be exposed in the API so we can include it directly
* (rather than as text) in IllegalStreamGraphException and subclasses?
*/
public interface GraphTraceElement {
public StreamElement<?, ?> getElement();
@Override
public String toString();
}
/**
* Returns a String representation of the given list of GraphTraceElements,
* assuming the first element is the deepest in the graph.
* @param elements a graph trace
* @return a String representation of the trace
*/
public static String asTrace(Iterable<GraphTraceElement> elements) {
//TODO Java 8: move this into GraphTraceElement
return com.google.common.base.Joiner.on("\nin ").join(elements);
}
private static final class InternalGraphTraceElement implements GraphTraceElement {
/**
* SPLITTER_SUBELEM being -1 is important for correctness in
* enterSplitjoinBranch.
*/
private static final int SPLITTER_SUBELEM = -1, JOINER_SUBELEM = -2;
private final StreamElement<?, ?> element;
private final int subelement;
private InternalGraphTraceElement(StreamElement<?, ?> element, int subelement) {
this.element = element;
this.subelement = subelement;
}
@Override
public StreamElement<?, ?> getElement() {
return element;
}
@Override
public String toString() {
String subelem;
if (subelement == SPLITTER_SUBELEM)
subelem = "splitter";
else if (subelement == JOINER_SUBELEM)
subelem = "joiner";
else if (element instanceof Pipeline)
subelem = "element "+subelement;
else if (element instanceof Splitjoin)
subelem = "branch "+subelement;
else
throw new AssertionError();
return String.format("%s, %s", element, subelem);
}
}
private static final class LeafGraphTraceElement implements GraphTraceElement {
private final StreamElement<?, ?> element;
private LeafGraphTraceElement(StreamElement<?, ?> element) {
assert element != null;
this.element = element;
}
@Override
public StreamElement<?, ?> getElement() {
return element;
}
@Override
public String toString() {
return element.toString();
}
}
}
| |
/*******************************************************************************
* Copyright (c) 2015-2018 Skymind, Inc.
*
* This program and the accompanying materials are made available under the
* terms of the Apache License, Version 2.0 which is available at
* https://www.apache.org/licenses/LICENSE-2.0.
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*
* SPDX-License-Identifier: Apache-2.0
******************************************************************************/
package org.deeplearning4j.nn.conf;
import org.deeplearning4j.BaseDL4JTest;
import org.deeplearning4j.exception.DL4JInvalidConfigException;
import org.deeplearning4j.nn.api.Layer;
import org.deeplearning4j.nn.api.OptimizationAlgorithm;
import org.deeplearning4j.nn.conf.distribution.NormalDistribution;
import org.deeplearning4j.nn.conf.inputs.InputType;
import org.deeplearning4j.nn.conf.layers.*;
import org.deeplearning4j.nn.conf.preprocessor.CnnToFeedForwardPreProcessor;
import org.deeplearning4j.nn.conf.weightnoise.DropConnect;
import org.deeplearning4j.nn.multilayer.MultiLayerNetwork;
import org.deeplearning4j.nn.weights.WeightInit;
import org.deeplearning4j.optimize.listeners.ScoreIterationListener;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.TemporaryFolder;
import org.nd4j.linalg.activations.Activation;
import org.nd4j.linalg.factory.Nd4j;
import org.nd4j.linalg.learning.config.Adam;
import org.nd4j.linalg.learning.config.NoOp;
import org.nd4j.linalg.lossfunctions.LossFunctions;
import java.io.*;
import java.util.Arrays;
import java.util.Properties;
import static org.junit.Assert.*;
/**
* Created by agibsonccc on 11/27/14.
*/
public class MultiLayerNeuralNetConfigurationTest extends BaseDL4JTest {
@Rule
public TemporaryFolder testDir = new TemporaryFolder();
@Test
public void testJson() throws Exception {
MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder().list()
.layer(0, new DenseLayer.Builder().dist(new NormalDistribution(1, 1e-1)).build())
.inputPreProcessor(0, new CnnToFeedForwardPreProcessor()).build();
String json = conf.toJson();
MultiLayerConfiguration from = MultiLayerConfiguration.fromJson(json);
assertEquals(conf.getConf(0), from.getConf(0));
Properties props = new Properties();
props.put("json", json);
String key = props.getProperty("json");
assertEquals(json, key);
File f = testDir.newFile("props");
f.deleteOnExit();
BufferedOutputStream bos = new BufferedOutputStream(new FileOutputStream(f));
props.store(bos, "");
bos.flush();
bos.close();
BufferedInputStream bis = new BufferedInputStream(new FileInputStream(f));
Properties props2 = new Properties();
props2.load(bis);
bis.close();
assertEquals(props2.getProperty("json"), props.getProperty("json"));
String json2 = props2.getProperty("json");
MultiLayerConfiguration conf3 = MultiLayerConfiguration.fromJson(json2);
assertEquals(conf.getConf(0), conf3.getConf(0));
}
@Test
public void testConvnetJson() {
final int numRows = 76;
final int numColumns = 76;
int nChannels = 3;
int outputNum = 6;
int seed = 123;
//setup the network
MultiLayerConfiguration.Builder builder = new NeuralNetConfiguration.Builder().seed(seed)
.l1(1e-1).l2(2e-4).weightNoise(new DropConnect(0.5)).miniBatch(true)
.optimizationAlgo(OptimizationAlgorithm.CONJUGATE_GRADIENT).list()
.layer(0, new ConvolutionLayer.Builder(5, 5).nOut(5).dropOut(0.5).weightInit(WeightInit.XAVIER)
.activation(Activation.RELU).build())
.layer(1, new SubsamplingLayer.Builder(SubsamplingLayer.PoolingType.MAX, new int[] {2, 2})
.build())
.layer(2, new ConvolutionLayer.Builder(3, 3).nOut(10).dropOut(0.5).weightInit(WeightInit.XAVIER)
.activation(Activation.RELU).build())
.layer(3, new SubsamplingLayer.Builder(SubsamplingLayer.PoolingType.MAX, new int[] {2, 2})
.build())
.layer(4, new DenseLayer.Builder().nOut(100).activation(Activation.RELU).build())
.layer(5, new OutputLayer.Builder(LossFunctions.LossFunction.NEGATIVELOGLIKELIHOOD)
.nOut(outputNum).weightInit(WeightInit.XAVIER).activation(Activation.SOFTMAX)
.build())
.setInputType(InputType.convolutional(numRows, numColumns, nChannels));
MultiLayerConfiguration conf = builder.build();
String json = conf.toJson();
MultiLayerConfiguration conf2 = MultiLayerConfiguration.fromJson(json);
assertEquals(conf, conf2);
}
@Test
public void testUpsamplingConvnetJson() {
final int numRows = 76;
final int numColumns = 76;
int nChannels = 3;
int outputNum = 6;
int seed = 123;
//setup the network
MultiLayerConfiguration.Builder builder = new NeuralNetConfiguration.Builder().seed(seed)
.l1(1e-1).l2(2e-4).dropOut(0.5).miniBatch(true)
.optimizationAlgo(OptimizationAlgorithm.CONJUGATE_GRADIENT).list()
.layer(new ConvolutionLayer.Builder(5, 5).nOut(5).dropOut(0.5).weightInit(WeightInit.XAVIER)
.activation(Activation.RELU).build())
.layer(new Upsampling2D.Builder().size(2).build())
.layer(2, new ConvolutionLayer.Builder(3, 3).nOut(10).dropOut(0.5).weightInit(WeightInit.XAVIER)
.activation(Activation.RELU).build())
.layer(new Upsampling2D.Builder().size(2).build())
.layer(4, new DenseLayer.Builder().nOut(100).activation(Activation.RELU).build())
.layer(5, new OutputLayer.Builder(LossFunctions.LossFunction.NEGATIVELOGLIKELIHOOD)
.nOut(outputNum).weightInit(WeightInit.XAVIER).activation(Activation.SOFTMAX)
.build())
.setInputType(InputType.convolutional(numRows, numColumns, nChannels));
MultiLayerConfiguration conf = builder.build();
String json = conf.toJson();
MultiLayerConfiguration conf2 = MultiLayerConfiguration.fromJson(json);
assertEquals(conf, conf2);
}
@Test
public void testGlobalPoolingJson() {
MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder().updater(new NoOp())
.dist(new NormalDistribution(0, 1.0)).seed(12345L).list()
.layer(0, new ConvolutionLayer.Builder().kernelSize(2, 2).stride(1, 1).nOut(5).build())
.layer(1, new GlobalPoolingLayer.Builder().poolingType(PoolingType.PNORM).pnorm(3).build())
.layer(2, new OutputLayer.Builder(LossFunctions.LossFunction.MCXENT)
.activation(Activation.SOFTMAX).nOut(3).build())
.setInputType(InputType.convolutional(32, 32, 1)).build();
String str = conf.toJson();
MultiLayerConfiguration fromJson = conf.fromJson(str);
assertEquals(conf, fromJson);
}
@Test
public void testYaml() throws Exception {
MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder().list()
.layer(0, new DenseLayer.Builder().dist(new NormalDistribution(1, 1e-1)).build())
.inputPreProcessor(0, new CnnToFeedForwardPreProcessor()).build();
String json = conf.toYaml();
MultiLayerConfiguration from = MultiLayerConfiguration.fromYaml(json);
assertEquals(conf.getConf(0), from.getConf(0));
Properties props = new Properties();
props.put("json", json);
String key = props.getProperty("json");
assertEquals(json, key);
File f = testDir.newFile("props");
f.deleteOnExit();
BufferedOutputStream bos = new BufferedOutputStream(new FileOutputStream(f));
props.store(bos, "");
bos.flush();
bos.close();
BufferedInputStream bis = new BufferedInputStream(new FileInputStream(f));
Properties props2 = new Properties();
props2.load(bis);
bis.close();
assertEquals(props2.getProperty("json"), props.getProperty("json"));
String yaml = props2.getProperty("json");
MultiLayerConfiguration conf3 = MultiLayerConfiguration.fromYaml(yaml);
assertEquals(conf.getConf(0), conf3.getConf(0));
}
@Test
public void testClone() {
MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder().list().layer(0, new DenseLayer.Builder().build())
.layer(1, new OutputLayer.Builder().lossFunction(LossFunctions.LossFunction.MSE).build())
.inputPreProcessor(1, new CnnToFeedForwardPreProcessor()).build();
MultiLayerConfiguration conf2 = conf.clone();
assertEquals(conf, conf2);
assertNotSame(conf, conf2);
assertNotSame(conf.getConfs(), conf2.getConfs());
for (int i = 0; i < conf.getConfs().size(); i++) {
assertNotSame(conf.getConf(i), conf2.getConf(i));
}
assertNotSame(conf.getInputPreProcessors(), conf2.getInputPreProcessors());
for (Integer layer : conf.getInputPreProcessors().keySet()) {
assertNotSame(conf.getInputPreProcess(layer), conf2.getInputPreProcess(layer));
}
}
@Test
public void testRandomWeightInit() {
MultiLayerNetwork model1 = new MultiLayerNetwork(getConf());
model1.init();
Nd4j.getRandom().setSeed(12345L);
MultiLayerNetwork model2 = new MultiLayerNetwork(getConf());
model2.init();
float[] p1 = model1.params().data().asFloat();
float[] p2 = model2.params().data().asFloat();
System.out.println(Arrays.toString(p1));
System.out.println(Arrays.toString(p2));
org.junit.Assert.assertArrayEquals(p1, p2, 0.0f);
}
@Test
public void testTrainingListener() {
MultiLayerNetwork model1 = new MultiLayerNetwork(getConf());
model1.init();
model1.addListeners( new ScoreIterationListener(1));
MultiLayerNetwork model2 = new MultiLayerNetwork(getConf());
model2.addListeners( new ScoreIterationListener(1));
model2.init();
Layer[] l1 = model1.getLayers();
for (int i = 0; i < l1.length; i++)
assertTrue(l1[i].getListeners() != null && l1[i].getListeners().size() == 1);
Layer[] l2 = model2.getLayers();
for (int i = 0; i < l2.length; i++)
assertTrue(l2[i].getListeners() != null && l2[i].getListeners().size() == 1);
}
private static MultiLayerConfiguration getConf() {
MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder().seed(12345l).list()
.layer(0, new DenseLayer.Builder().nIn(2).nOut(2)
.dist(new NormalDistribution(0, 1)).build())
.layer(1, new OutputLayer.Builder().nIn(2).nOut(1)
.activation(Activation.TANH)
.dist(new NormalDistribution(0, 1)).lossFunction(LossFunctions.LossFunction.MSE).build())
.build();
return conf;
}
@Test
public void testInvalidConfig() {
try {
MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder().seed(12345).list()
.build();
MultiLayerNetwork net = new MultiLayerNetwork(conf);
net.init();
fail("No exception thrown for invalid configuration");
} catch (IllegalStateException e) {
//OK
e.printStackTrace();
} catch (Throwable e) {
e.printStackTrace();
fail("Unexpected exception thrown for invalid config");
}
try {
MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder().seed(12345).list()
.layer(1, new DenseLayer.Builder().nIn(3).nOut(4).build())
.layer(2, new OutputLayer.Builder().nIn(4).nOut(5).build())
.build();
MultiLayerNetwork net = new MultiLayerNetwork(conf);
net.init();
fail("No exception thrown for invalid configuration");
} catch (IllegalStateException e) {
//OK
e.printStackTrace();
} catch (Throwable e) {
e.printStackTrace();
fail("Unexpected exception thrown for invalid config");
}
try {
MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder().seed(12345).list()
.layer(0, new DenseLayer.Builder().nIn(3).nOut(4).build())
.layer(2, new OutputLayer.Builder().nIn(4).nOut(5).build())
.build();
MultiLayerNetwork net = new MultiLayerNetwork(conf);
net.init();
fail("No exception thrown for invalid configuration");
} catch (IllegalStateException e) {
//OK
e.printStackTrace();
} catch (Throwable e) {
e.printStackTrace();
fail("Unexpected exception thrown for invalid config");
}
}
@Test
public void testListOverloads() {
MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder().seed(12345).list()
.layer(0, new DenseLayer.Builder().nIn(3).nOut(4).build())
.layer(1, new OutputLayer.Builder().nIn(4).nOut(5).activation(Activation.SOFTMAX).build())
.build();
MultiLayerNetwork net = new MultiLayerNetwork(conf);
net.init();
DenseLayer dl = (DenseLayer) conf.getConf(0).getLayer();
assertEquals(3, dl.getNIn());
assertEquals(4, dl.getNOut());
OutputLayer ol = (OutputLayer) conf.getConf(1).getLayer();
assertEquals(4, ol.getNIn());
assertEquals(5, ol.getNOut());
MultiLayerConfiguration conf2 = new NeuralNetConfiguration.Builder().seed(12345).list()
.layer(0, new DenseLayer.Builder().nIn(3).nOut(4).build())
.layer(1, new OutputLayer.Builder().nIn(4).nOut(5).activation(Activation.SOFTMAX).build())
.build();
MultiLayerNetwork net2 = new MultiLayerNetwork(conf2);
net2.init();
MultiLayerConfiguration conf3 = new NeuralNetConfiguration.Builder().seed(12345)
.list(new DenseLayer.Builder().nIn(3).nOut(4).build(),
new OutputLayer.Builder().nIn(4).nOut(5).activation(Activation.SOFTMAX).build())
.build();
MultiLayerNetwork net3 = new MultiLayerNetwork(conf3);
net3.init();
assertEquals(conf, conf2);
assertEquals(conf, conf3);
}
@Test
public void testBiasLr() {
//setup the network
MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder().seed(12345).updater(new Adam(1e-2))
.biasUpdater(new Adam(0.5)).list()
.layer(0, new ConvolutionLayer.Builder(5, 5).nOut(5).weightInit(WeightInit.XAVIER)
.activation(Activation.RELU).build())
.layer(1, new DenseLayer.Builder().nOut(100).activation(Activation.RELU).build())
.layer(2, new DenseLayer.Builder().nOut(100).activation(Activation.RELU).build())
.layer(3, new OutputLayer.Builder(LossFunctions.LossFunction.NEGATIVELOGLIKELIHOOD).nOut(10)
.weightInit(WeightInit.XAVIER).activation(Activation.SOFTMAX).build())
.setInputType(InputType.convolutional(28, 28, 1)).build();
org.deeplearning4j.nn.conf.layers.BaseLayer l0 = (BaseLayer) conf.getConf(0).getLayer();
org.deeplearning4j.nn.conf.layers.BaseLayer l1 = (BaseLayer) conf.getConf(1).getLayer();
org.deeplearning4j.nn.conf.layers.BaseLayer l2 = (BaseLayer) conf.getConf(2).getLayer();
org.deeplearning4j.nn.conf.layers.BaseLayer l3 = (BaseLayer) conf.getConf(3).getLayer();
assertEquals(0.5, ((Adam)l0.getUpdaterByParam("b")).getLearningRate(), 1e-6);
assertEquals(1e-2, ((Adam)l0.getUpdaterByParam("W")).getLearningRate(), 1e-6);
assertEquals(0.5, ((Adam)l1.getUpdaterByParam("b")).getLearningRate(), 1e-6);
assertEquals(1e-2, ((Adam)l1.getUpdaterByParam("W")).getLearningRate(), 1e-6);
assertEquals(0.5, ((Adam)l2.getUpdaterByParam("b")).getLearningRate(), 1e-6);
assertEquals(1e-2, ((Adam)l2.getUpdaterByParam("W")).getLearningRate(), 1e-6);
assertEquals(0.5, ((Adam)l3.getUpdaterByParam("b")).getLearningRate(), 1e-6);
assertEquals(1e-2, ((Adam)l3.getUpdaterByParam("W")).getLearningRate(), 1e-6);
}
@Test
public void testInvalidOutputLayer(){
/*
Test case (invalid configs)
1. nOut=1 + softmax
2. mcxent + tanh
3. xent + softmax
4. xent + relu
5. mcxent + sigmoid
*/
LossFunctions.LossFunction[] lf = new LossFunctions.LossFunction[]{
LossFunctions.LossFunction.MCXENT, LossFunctions.LossFunction.MCXENT, LossFunctions.LossFunction.XENT,
LossFunctions.LossFunction.XENT, LossFunctions.LossFunction.MCXENT};
int[] nOut = new int[]{1, 3, 3, 3, 3};
Activation[] activations = new Activation[]{Activation.SOFTMAX, Activation.TANH, Activation.SOFTMAX, Activation.RELU, Activation.SIGMOID};
for( int i=0; i<lf.length; i++ ){
for(boolean lossLayer : new boolean[]{false, true}) {
for (boolean validate : new boolean[]{true, false}) {
String s = "nOut=" + nOut[i] + ",lossFn=" + lf[i] + ",lossLayer=" + lossLayer + ",validate=" + validate;
if(nOut[i] == 1 && lossLayer)
continue; //nOuts are not availabel in loss layer, can't expect it to detect this case
try {
new NeuralNetConfiguration.Builder()
.list()
.layer(new DenseLayer.Builder().nIn(10).nOut(10).build())
.layer(!lossLayer ? new OutputLayer.Builder().nIn(10).nOut(nOut[i]).activation(activations[i]).lossFunction(lf[i]).build()
: new LossLayer.Builder().activation(activations[i]).lossFunction(lf[i]).build())
.validateOutputLayerConfig(validate)
.build();
if (validate) {
fail("Expected exception: " + s);
}
} catch (DL4JInvalidConfigException e) {
if (validate) {
assertTrue(s, e.getMessage().toLowerCase().contains("invalid output"));
} else {
fail("Validation should not be enabled");
}
}
}
}
}
}
}
| |
/**
* Copyright (c) 2014 Technische Universitat Wien (TUW), Distributed Systems Group E184 (http://dsg.tuwien.ac.at)
*
* This work was partially supported by the EU FP7 FET SmartSociety (http://www.smart-society-project.eu/).
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package at.ac.tuwien.dsg.smartcom.demo;
import at.ac.tuwien.dsg.smartcom.Communication;
import at.ac.tuwien.dsg.smartcom.SmartCom;
import at.ac.tuwien.dsg.smartcom.SmartComBuilder;
import at.ac.tuwien.dsg.smartcom.adapter.InputPushAdapter;
import at.ac.tuwien.dsg.smartcom.adapter.PushTask;
import at.ac.tuwien.dsg.smartcom.adapter.annotations.Adapter;
import at.ac.tuwien.dsg.smartcom.callback.NotificationCallback;
import at.ac.tuwien.dsg.smartcom.exception.CommunicationException;
import at.ac.tuwien.dsg.smartcom.model.*;
import java.io.*;
import java.util.*;
import java.util.concurrent.*;
import java.util.concurrent.atomic.AtomicInteger;
public class PerformanceDemo {
private final static int TOTAL_MESSAGES = 100000;
public static void main(String[] args) throws IOException, CommunicationException, BrokenBarrierException, InterruptedException {
BufferedReader reader = new BufferedReader(new InputStreamReader(System.in));
System.out.println("Select the number of peers:");
int peersAmount = Integer.valueOf(reader.readLine().trim());
System.out.println("Select the number of concurrent message producers:");
int messageProducersAmount = Integer.valueOf(reader.readLine().trim());
List<Statistic> stats = new ArrayList<>(10);
for (int i = 0; i < 10; i++) {
stats.add(runDemo(peersAmount, messageProducersAmount, true));
}
float max = 0;
float sum = 0;
int length = 0;
for (Statistic stat : stats) {
length = Math.max(stat.samples.size(), length);
max = Math.max(stat.average, max);
sum += stat.average;
}
System.out.println("***************************");
System.out.println("Maximum speed: "+max);
System.out.println("Average speed: "+sum/10f);
File file = new File("performance\\performance_"+peersAmount+"_"+messageProducersAmount+".csv");
int j = 1;
while (file.exists()) {
file = new File("performance\\performance_"+peersAmount+"_"+messageProducersAmount+"_"+j+".csv");
j++;
}
FileWriter fw = new FileWriter(file);
fw.append("Timestamp:;");
for (int i = 0; i < length; i++) {
fw.append(String.valueOf(i)).append(";");
}
fw.append(System.lineSeparator());
int i = 0;
for (Statistic stat : stats) {
fw.append("Sample ").append((i++)+";");
for (Float sample : stat.samples) {
fw.append(sample+"").append(";");
}
fw.append(System.lineSeparator());
}
fw.append(System.lineSeparator());
fw.append("Sample;Average");
fw.append(System.lineSeparator());
i = 0;
for (Statistic stat : stats) {
fw.append("Sample ").append((i++)+";");
fw.append(stat.average+"");
fw.append(System.lineSeparator());
}
fw.append(System.lineSeparator());
fw.append("Total max:;").append(max+"").append(";");
fw.append(System.lineSeparator());
fw.append("Total average:;").append((sum/10f)+"").append(";");
fw.close();
}
private static class Statistic {
float average;
List<Float> samples;
}
private static Statistic runDemo(int peersAmount, int messageProducersAmount, boolean debug) throws CommunicationException, InterruptedException, BrokenBarrierException {
sentMessages = new AtomicInteger();
receivedMessages = new AtomicInteger();
inputMessages = new AtomicInteger();
queue = new LinkedBlockingDeque<>();
counterMap = Collections.synchronizedMap(new HashMap<Identifier, AtomicInteger>());
DemoPeerManager peerManager = new DemoPeerManager();
for (int i = 0; i < peersAmount; i++) {
Identifier id = Identifier.peer("peer"+i);
List<PeerChannelAddress> addresses = new ArrayList<>();
List<Serializable> parameters = new ArrayList<>(1);
PeerChannelAddress address = new PeerChannelAddress(id, Identifier.channelType("adapter"), parameters);
addresses.add(address);
peerManager.addPeer(id, new PeerInfo(id, DeliveryPolicy.Peer.TO_ALL_CHANNELS, null, addresses), id.getId());
}
float messages_per_worker_per_peer = ((float) TOTAL_MESSAGES)/((float) messageProducersAmount)/((float)peersAmount);
int interval = 1;
if (messages_per_worker_per_peer < 1) {
interval = (int) (1 / messages_per_worker_per_peer);
}
final int messages = (int) (messageProducersAmount*peersAmount* messages_per_worker_per_peer*10);
CountDownLatch counter = new CountDownLatch(messages);
SmartCom smartCom = new SmartComBuilder(peerManager, peerManager, peerManager).create();
Communication communication = smartCom.getCommunication();
communication.registerNotificationCallback(new NotificationHandler(counter));
communication.registerOutputAdapter(OutputAdapter.class);
Statistic stat = new Statistic();
stat.samples = new ArrayList<Float>();
CyclicBarrier barrier = new CyclicBarrier(messageProducersAmount+1);
for (int i = 0; i < 10; i++) {
communication.addPushAdapter(new InputAdapter());
}
for (int i = 0; i < messageProducersAmount; i++) {
Message.MessageBuilder builder = new Message.MessageBuilder()
.setType("COMPUTE")
.setSubtype("REQUEST")
.setSenderId(Identifier.component("DEMO"))
.setConversationId(System.nanoTime() + "")
.setContent("Do some stuff and respond!");
new Thread(new WorkerThread(builder, barrier, communication, peersAmount, messages_per_worker_per_peer, interval, i%interval)).start();
}
System.out.println("START");
long start = System.currentTimeMillis();
barrier.await();
int oldCount = 0;
int sameCount = 0;
int nextTreshold = messages - (messages/20);
int workerRuns = 0;
while (!counter.await(10, TimeUnit.SECONDS)) {
int count = (int) counter.getCount();
long end = System.currentTimeMillis();
long diff = end - start;
float sample = (((float) (messages - count)) / (((float) diff) / 1000f)) * 2;
stat.samples.add(sample);
if (debug) {
System.out.println("Messages left: " + count + "/" + messages + " (" + sample + ") " + inputMessages.get() + "/" + sentMessages.get());
}
if (count == oldCount) {
sameCount++;
if (sameCount == 3) {
break;
}
} else {
sameCount = 0;
}
oldCount = count;
if (count < nextTreshold && workerRuns < 9) {
System.out.println("Adding more messages!");
for (int i = 0; i < messageProducersAmount; i++) {
Message.MessageBuilder builder = new Message.MessageBuilder()
.setType("COMPUTE")
.setSubtype("REQUEST" + workerRuns)
.setSenderId(Identifier.component("DEMO"))
.setConversationId(System.nanoTime() + "")
.setContent("Do some stuff and respond!");
new Thread(new WorkerThread(builder, barrier, communication, peersAmount, messages_per_worker_per_peer, interval, i%interval)).start();
}
barrier.await();
workerRuns++;
nextTreshold-=(messages/10);
}
}
long end = System.currentTimeMillis();
System.out.println("END");
long diff = end-start;
stat.average = ((float)messages)/(((float)diff)/1000f)*2;
System.out.println("Duration: "+diff+" milliseconds");
System.out.println("Messages: "+messages);
System.out.println("Messages per seconds: "+stat.average);
smartCom.tearDownSmartCom();
return stat;
}
private static class WorkerThread implements Runnable {
private final Message msg;
private final CyclicBarrier barrier;
private final Communication communication;
private final int peers;
private final float messages_per_worker_per_peer;
private final int interval;
private final int offset;
private WorkerThread(Message.MessageBuilder builder, CyclicBarrier barrier, Communication communication, int peers, float messages_per_worker_per_peer, int interval, int offset) {
this.interval = interval;
this.offset = offset;
this.msg = builder.create();
this.barrier = barrier;
this.communication = communication;
this.peers = peers;
this.messages_per_worker_per_peer = messages_per_worker_per_peer;
}
@Override
public void run() {
try {
barrier.await();
} catch (InterruptedException | BrokenBarrierException ignored) {
}
int sent = 0;
for (int j = 0; j < messages_per_worker_per_peer; j++) {
for (int i = offset; i < peers; i+=interval) {
try {
msg.setReceiverId(Identifier.peer("peer" + i));
communication.send(msg.clone());
sent++;
} catch (CommunicationException e) {
e.printStackTrace();
}
}
}
}
}
private static class NotificationHandler implements NotificationCallback {
private final CountDownLatch counter;
public NotificationHandler(CountDownLatch counter) {
this.counter = counter;
}
@Override
public void notify(Message message) {
if ("RESPONSE".equals(message.getSubtype())) {
counter.countDown();
receivedMessages.incrementAndGet();
} else {
//discard the acknowledge messages
}
}
}
private static AtomicInteger sentMessages = new AtomicInteger();
private static AtomicInteger receivedMessages = new AtomicInteger();
private static AtomicInteger inputMessages = new AtomicInteger();
private static BlockingDeque<Message> queue = new LinkedBlockingDeque<>();
private static Map<Identifier, AtomicInteger> counterMap = Collections.synchronizedMap(new HashMap<Identifier, AtomicInteger>());
@Adapter(name="adapter", stateful = false)
public static class OutputAdapter implements at.ac.tuwien.dsg.smartcom.adapter.OutputAdapter {
@Override
public void push(Message message, PeerChannelAddress address) {
try {
AtomicInteger atomicInteger = counterMap.get(message.getSenderId());
if (atomicInteger == null) {
synchronized (counterMap) {
atomicInteger = counterMap.get(message.getSenderId());
if (atomicInteger == null) {
atomicInteger = new AtomicInteger(0);
counterMap.put(message.getSenderId(), atomicInteger);
}
}
}
atomicInteger.incrementAndGet();
sentMessages.incrementAndGet();
queue.push(message);
} catch (Exception e) {
e.printStackTrace();
}
}
}
public static class InputAdapter extends InputPushAdapter {
private boolean run = true;
@Override
protected void cleanUp() {
run = false;
}
@Override
public void init() {
schedule(new PushTask() {
@Override
public void run() {
while (run) {
try {
Message message = queue.take();
inputMessages.incrementAndGet();
publishMessage(new Message.MessageBuilder()
.setType("COMPUTE")
.setSubtype("RESPONSE")
.setSenderId(Identifier.adapter("adapter"))
.setConversationId(message.getConversationId())
.setContent("Do some stuff and respond!")
.create());
} catch (InterruptedException e) {
run = false;
}
}
}
});
}
}
}
| |
/*
* Copyright (C) 2008 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.android.sdklib.build;
import com.google.devtools.build.singlejar.ZipCombiner;
import com.google.devtools.build.singlejar.ZipEntryFilter;
import com.android.SdkConstants;
import com.android.sdklib.build.ApkBuilder.FileEntry;
import java.io.File;
import java.io.FileOutputStream;
import java.io.FilenameFilter;
import java.io.IOException;
import java.io.OutputStream;
import java.util.ArrayList;
import java.util.regex.Pattern;
/**
* Command line APK builder with signing support.
*/
public final class ApkBuilderMain {
private final static Pattern PATTERN_JAR_EXT = Pattern.compile("^.+\\.jar$",
Pattern.CASE_INSENSITIVE);
/**
* Main method. This is meant to be called from the command line through an exec.
* <p/>WARNING: this will call {@link System#exit(int)} if anything goes wrong.
* @param args command line arguments.
*/
public static void main(String[] args) {
if (args.length < 1) {
printUsageAndQuit();
}
try {
File outApk = new File(args[0]);
File dexFile = null;
ArrayList<File> zipArchives = new ArrayList<File>();
ArrayList<File> sourceFolders = new ArrayList<File>();
ArrayList<File> jarFiles = new ArrayList<File>();
ArrayList<File> nativeFolders = new ArrayList<File>();
boolean verbose = false;
boolean signed = true;
boolean debug = false;
String keystorePath = null;
int index = 1;
do {
String argument = args[index++];
if ("-v".equals(argument)) {
verbose = true;
} else if ("-d".equals(argument)) {
debug = true;
} else if ("-u".equals(argument)) {
signed = false;
} else if ("-ks".equals(argument)) {
// bazel-specific option
if (index == args.length) {
printAndExit("Missing value for -ks");
}
keystorePath = args[index++];
} else if ("-z".equals(argument)) {
// quick check on the next argument.
if (index == args.length) {
printAndExit("Missing value for -z");
}
zipArchives.add(new File(args[index++]));
} else if ("-f". equals(argument)) {
if (dexFile != null) {
// can't have more than one dex file.
printAndExit("Can't have more than one dex file (-f)");
}
// quick check on the next argument.
if (index == args.length) {
printAndExit("Missing value for -f");
}
dexFile = new File(args[index++]);
} else if ("-rf". equals(argument)) {
// quick check on the next argument.
if (index == args.length) {
printAndExit("Missing value for -rf");
}
sourceFolders.add(new File(args[index++]));
} else if ("-rj". equals(argument)) {
// quick check on the next argument.
if (index == args.length) {
printAndExit("Missing value for -rj");
}
jarFiles.add(new File(args[index++]));
} else if ("-nf".equals(argument)) {
// quick check on the next argument.
if (index == args.length) {
printAndExit("Missing value for -nf");
}
nativeFolders.add(new File(args[index++]));
} else if ("-storetype".equals(argument)) {
// quick check on the next argument.
if (index == args.length) {
printAndExit("Missing value for -storetype");
}
// FIXME
} else {
printAndExit("Unknown argument: " + argument);
}
} while (index < args.length);
if (zipArchives.size() == 0) {
printAndExit("No zip archive, there must be one for the resources");
}
if (signed && keystorePath == null) {
keystorePath = ApkBuilder.getDebugKeystore();
}
// create the builder with the basic files.
ApkBuilder builder = new ApkBuilder(outApk, zipArchives.get(0), dexFile,
signed ? keystorePath : null,
verbose ? System.out : null);
builder.setDebugMode(debug);
// add the rest of the files.
// first zip Archive was used in the constructor.
for (int i = 1 ; i < zipArchives.size() ; i++) {
builder.addZipFile(zipArchives.get(i));
}
for (File sourceFolder : sourceFolders) {
builder.addSourceFolder(sourceFolder);
}
for (File jarFile : jarFiles) {
if (jarFile.isDirectory()) {
String[] filenames = jarFile.list(new FilenameFilter() {
public boolean accept(File dir, String name) {
return PATTERN_JAR_EXT.matcher(name).matches();
}
});
for (String filename : filenames) {
builder.addResourcesFromJar(new File(jarFile, filename));
}
} else {
builder.addResourcesFromJar(jarFile);
}
}
for (File nativeFolder : nativeFolders) {
builder.addNativeLibraries(nativeFolder);
}
// seal the apk
builder.sealApk();
// ensure hermeticity, bazel specific
clearTimeStamps(outApk);
} catch (ApkCreationException e) {
printAndExit(e.getMessage());
} catch (DuplicateFileException e) {
printAndExit(String.format(
"Found duplicate file for APK: %1$s\nOrigin 1: %2$s\nOrigin 2: %3$s",
e.getArchivePath(), e.getFile1(), e.getFile2()));
} catch (SealedApkException e) {
printAndExit(e.getMessage());
} catch (Exception e) {
e.printStackTrace();
System.exit(1);
}
}
private static void clearTimeStamps(File outApk) throws IOException {
File renamed = new File(outApk.getPath() + ".nonhermetic");
if (!outApk.renameTo(renamed)) {
throw new IOException("could not rename: " + outApk);
}
OutputStream out = new FileOutputStream(outApk);
ZipCombiner combiner = new ZipCombiner(
new ZipEntryFilter() {
@Override
public void accept(String filename, StrategyCallback callback)
throws IOException {
callback.copy(ZipCombiner.DOS_EPOCH);
}
}, out);
combiner.addZip(renamed);
combiner.close(); // closes its outstream.
renamed.deleteOnExit();
}
private static void printUsageAndQuit() {
// 80 cols marker: 01234567890123456789012345678901234567890123456789012345678901234567890123456789
System.err.println("A command line tool to package an Android application from various sources.");
System.err.println("Usage: apkbuilder <out archive> [-v][-u][-storetype STORE_TYPE] [-z inputzip]");
System.err.println(" [-f inputfile] [-rf input-folder] [-rj -input-path]");
System.err.println(" [-nf native-folder] [-rj -input-path]");
System.err.println("");
System.err.println("NOTE: This is a version of the ApkBuilder tool that comes "
+ "with the Android sdk modified for Bazel.");
System.err.println("");
System.err.println(" -v Verbose.");
System.err.println(" -d Debug Mode: Includes debug files in the APK file.");
System.err.println(" -u Creates an unsigned package.");
System.err.println(" -storetype Forces the KeyStore type. If ommited the default is used.");
System.err.println("");
System.err.println(" -z Followed by the path to a zip archive.");
System.err.println(" Adds the content of the application package.");
System.err.println("");
System.err.println(" -f Followed by the path to a file.");
System.err.println(" Adds the file to the application package.");
System.err.println("");
System.err.println(" -rf Followed by the path to a source folder.");
System.err.println(" Adds the java resources found in that folder to the application");
System.err.println(" package, while keeping their path relative to the source folder.");
System.err.println("");
System.err.println(" -rj Followed by the path to a jar file or a folder containing");
System.err.println(" jar files.");
System.err.println(" Adds the java resources found in the jar file(s) to the application");
System.err.println(" package.");
System.err.println("");
System.err.println(" -nf Followed by the root folder containing native libraries to");
System.err.println(" include in the application package.");
System.exit(1);
}
private static void printAndExit(String... messages) {
for (String message : messages) {
System.err.println(message);
}
System.exit(1);
}
private ApkBuilderMain() {
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.commons.io.output;
import java.io.File;
import java.io.IOException;
import java.io.Writer;
import org.apache.commons.io.IOUtils;
import org.apache.commons.io.testtools.FileBasedTestCase;
/**
* Tests that files really lock, although no writing is done as
* the locking is tested only on construction.
*
* @version $Revision: 806007 $ $Date: 2009-08-19 19:03:11 -0400 (Wed, 19 Aug 2009) $
*/
public class LockableFileWriterTest extends FileBasedTestCase {
private File file;
private File lockDir;
private File lockFile;
private File altLockDir;
private File altLockFile;
public LockableFileWriterTest(String name) {
super(name);
}
@Override
public void setUp() {
file = new File(getTestDirectory(), "testlockfile");
lockDir = new File(System.getProperty("java.io.tmpdir"));
lockFile = new File(lockDir, file.getName() + ".lck");
altLockDir = getTestDirectory();
altLockFile = new File(altLockDir, file.getName() + ".lck");
}
@Override
public void tearDown() {
file.delete();
lockFile.delete();
altLockFile.delete();
}
//-----------------------------------------------------------------------
public void testFileLocked() throws IOException {
LockableFileWriter lfw1 = null;
LockableFileWriter lfw2 = null;
LockableFileWriter lfw3 = null;
try {
// open a valid locakable writer
lfw1 = new LockableFileWriter(file);
assertEquals(true, file.exists());
assertEquals(true, lockFile.exists());
// try to open a second writer
try {
lfw2 = new LockableFileWriter(file);
fail("Somehow able to open a locked file. ");
} catch(IOException ioe) {
String msg = ioe.getMessage();
assertTrue( "Exception message does not start correctly. ",
msg.startsWith("Can't write file, lock ") );
assertEquals(true, file.exists());
assertEquals(true, lockFile.exists());
}
// try to open a third writer
try {
lfw3 = new LockableFileWriter(file);
fail("Somehow able to open a locked file. ");
} catch(IOException ioe) {
String msg = ioe.getMessage();
assertTrue( "Exception message does not start correctly. ",
msg.startsWith("Can't write file, lock ") );
assertEquals(true, file.exists());
assertEquals(true, lockFile.exists());
}
} finally {
IOUtils.closeQuietly(lfw1);
IOUtils.closeQuietly(lfw2);
IOUtils.closeQuietly(lfw3);
}
assertEquals(true, file.exists());
assertEquals(false, lockFile.exists());
}
//-----------------------------------------------------------------------
public void testAlternateLockDir() throws IOException {
LockableFileWriter lfw1 = null;
LockableFileWriter lfw2 = null;
try {
// open a valid locakable writer
lfw1 = new LockableFileWriter(file, true, altLockDir.getAbsolutePath());
assertEquals(true, file.exists());
assertEquals(true, altLockFile.exists());
// try to open a second writer
try {
lfw2 = new LockableFileWriter(file, true, altLockDir.getAbsolutePath());
fail("Somehow able to open a locked file. ");
} catch(IOException ioe) {
String msg = ioe.getMessage();
assertTrue( "Exception message does not start correctly. ",
msg.startsWith("Can't write file, lock ") );
assertEquals(true, file.exists());
assertEquals(true, altLockFile.exists());
}
} finally {
IOUtils.closeQuietly(lfw1);
IOUtils.closeQuietly(lfw2);
}
assertEquals(true, file.exists());
assertEquals(false, altLockFile.exists());
}
//-----------------------------------------------------------------------
public void testFileNotLocked() throws IOException {
// open a valid locakable writer
LockableFileWriter lfw1 = null;
try {
lfw1 = new LockableFileWriter(file);
assertEquals(true, file.exists());
assertEquals(true, lockFile.exists());
} finally {
IOUtils.closeQuietly(lfw1);
}
assertEquals(true, file.exists());
assertEquals(false, lockFile.exists());
// open a second valid writer on the same file
LockableFileWriter lfw2 = null;
try {
lfw2 = new LockableFileWriter(file);
assertEquals(true, file.exists());
assertEquals(true, lockFile.exists());
} finally {
IOUtils.closeQuietly(lfw2);
}
assertEquals(true, file.exists());
assertEquals(false, lockFile.exists());
}
//-----------------------------------------------------------------------
public void testConstructor_File_encoding_badEncoding() {
Writer writer = null;
try {
writer = new LockableFileWriter(file, "BAD-ENCODE");
fail();
} catch (IOException ex) {
// expected
assertEquals(false, file.exists());
assertEquals(false, lockFile.exists());
} finally {
IOUtils.closeQuietly(writer);
}
assertEquals(false, file.exists());
assertEquals(false, lockFile.exists());
}
//-----------------------------------------------------------------------
public void testConstructor_File_directory() {
Writer writer = null;
try {
writer = new LockableFileWriter(getTestDirectory());
fail();
} catch (IOException ex) {
// expected
assertEquals(false, file.exists());
assertEquals(false, lockFile.exists());
} finally {
IOUtils.closeQuietly(writer);
}
assertEquals(false, file.exists());
assertEquals(false, lockFile.exists());
}
//-----------------------------------------------------------------------
public void testConstructor_File_nullFile() throws IOException {
Writer writer = null;
try {
writer = new LockableFileWriter((File) null);
fail();
} catch (NullPointerException ex) {
// expected
assertEquals(false, file.exists());
assertEquals(false, lockFile.exists());
} finally {
IOUtils.closeQuietly(writer);
}
assertEquals(false, file.exists());
assertEquals(false, lockFile.exists());
}
//-----------------------------------------------------------------------
public void testConstructor_fileName_nullFile() throws IOException {
Writer writer = null;
try {
writer = new LockableFileWriter((String) null);
fail();
} catch (NullPointerException ex) {
// expected
assertEquals(false, file.exists());
assertEquals(false, lockFile.exists());
} finally {
IOUtils.closeQuietly(writer);
}
assertEquals(false, file.exists());
assertEquals(false, lockFile.exists());
}
}
| |
/*
* Copyright (C) 2011 The Guava Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.common.collect;
import static com.google.common.collect.BoundType.CLOSED;
import static com.google.common.collect.BoundType.OPEN;
import static com.google.common.collect.DiscreteDomains.integers;
import static com.google.common.collect.testing.features.CollectionFeature.ALLOWS_NULL_QUERIES;
import static com.google.common.collect.testing.features.CollectionFeature.KNOWN_ORDER;
import static com.google.common.collect.testing.features.CollectionFeature.NON_STANDARD_TOSTRING;
import static com.google.common.collect.testing.features.CollectionFeature.RESTRICTS_ELEMENTS;
import static com.google.common.collect.testing.testers.NavigableSetNavigationTester.getHoleMethods;
import static com.google.common.testing.SerializableTester.reserialize;
import static com.google.common.testing.SerializableTester.reserializeAndAssert;
import static org.junit.contrib.truth.Truth.ASSERT;
import com.google.common.annotations.GwtCompatible;
import com.google.common.annotations.GwtIncompatible;
import com.google.common.collect.testing.NavigableSetTestSuiteBuilder;
import com.google.common.collect.testing.features.CollectionSize;
import com.google.common.collect.testing.google.SetGenerators.ContiguousSetDescendingGenerator;
import com.google.common.collect.testing.google.SetGenerators.ContiguousSetGenerator;
import com.google.common.collect.testing.google.SetGenerators.ContiguousSetHeadsetGenerator;
import com.google.common.collect.testing.google.SetGenerators.ContiguousSetSubsetGenerator;
import com.google.common.collect.testing.google.SetGenerators.ContiguousSetTailsetGenerator;
import com.google.common.testing.EqualsTester;
import junit.framework.Test;
import junit.framework.TestCase;
import junit.framework.TestSuite;
import java.util.Set;
/**
* @author Gregory Kick
*/
@GwtCompatible(emulated = true)
public class ContiguousSetTest extends TestCase {
private static DiscreteDomain<Integer> NOT_EQUAL_TO_INTEGERS = new DiscreteDomain<Integer>() {
@Override public Integer next(Integer value) {
return integers().next(value);
}
@Override public Integer previous(Integer value) {
return integers().previous(value);
}
@Override public long distance(Integer start, Integer end) {
return integers().distance(start, end);
}
@Override public Integer minValue() {
return integers().minValue();
}
@Override public Integer maxValue() {
return integers().maxValue();
}
};
public void testEquals() {
new EqualsTester()
.addEqualityGroup(
ContiguousSet.create(Ranges.closed(1, 3), integers()),
ContiguousSet.create(Ranges.closedOpen(1, 4), integers()),
ContiguousSet.create(Ranges.openClosed(0, 3), integers()),
ContiguousSet.create(Ranges.open(0, 4), integers()),
ContiguousSet.create(Ranges.closed(1, 3), NOT_EQUAL_TO_INTEGERS),
ContiguousSet.create(Ranges.closedOpen(1, 4), NOT_EQUAL_TO_INTEGERS),
ContiguousSet.create(Ranges.openClosed(0, 3), NOT_EQUAL_TO_INTEGERS),
ContiguousSet.create(Ranges.open(0, 4), NOT_EQUAL_TO_INTEGERS),
ImmutableSortedSet.of(1, 2, 3))
.testEquals();
// not testing hashCode for these because it takes forever to compute
assertEquals(
ContiguousSet.create(Ranges.closed(Integer.MIN_VALUE, Integer.MAX_VALUE), integers()),
ContiguousSet.create(Ranges.<Integer>all(), integers()));
assertEquals(
ContiguousSet.create(Ranges.closed(Integer.MIN_VALUE, Integer.MAX_VALUE), integers()),
ContiguousSet.create(Ranges.atLeast(Integer.MIN_VALUE), integers()));
assertEquals(
ContiguousSet.create(Ranges.closed(Integer.MIN_VALUE, Integer.MAX_VALUE), integers()),
ContiguousSet.create(Ranges.atMost(Integer.MAX_VALUE), integers()));
}
@GwtIncompatible("SerializableTester")
public void testSerialization() {
ContiguousSet<Integer> empty = ContiguousSet.create(Ranges.closedOpen(1, 1), integers());
assertTrue(empty instanceof EmptyContiguousSet);
reserializeAndAssert(empty);
ContiguousSet<Integer> regular = ContiguousSet.create(Ranges.closed(1, 3), integers());
assertTrue(regular instanceof RegularContiguousSet);
reserializeAndAssert(regular);
/*
* Make sure that we're using RegularContiguousSet.SerializedForm and not
* ImmutableSet.SerializedForm, which would be enormous.
*/
ContiguousSet<Integer> enormous = ContiguousSet.create(Ranges.<Integer>all(), integers());
assertTrue(enormous instanceof RegularContiguousSet);
// We can't use reserializeAndAssert because it calls hashCode, which is enormously slow.
ContiguousSet<Integer> enormousReserialized = reserialize(enormous);
assertEquals(enormous, enormousReserialized);
}
public void testHeadSet() {
ImmutableSortedSet<Integer> set = ContiguousSet.create(Ranges.closed(1, 3), integers());
ASSERT.that(set.headSet(1)).isEmpty();
ASSERT.that(set.headSet(2)).hasContentsInOrder(1);
ASSERT.that(set.headSet(3)).hasContentsInOrder(1, 2);
ASSERT.that(set.headSet(4)).hasContentsInOrder(1, 2, 3);
ASSERT.that(set.headSet(Integer.MAX_VALUE)).hasContentsInOrder(1, 2, 3);
ASSERT.that(set.headSet(1, true)).hasContentsInOrder(1);
ASSERT.that(set.headSet(2, true)).hasContentsInOrder(1, 2);
ASSERT.that(set.headSet(3, true)).hasContentsInOrder(1, 2, 3);
ASSERT.that(set.headSet(4, true)).hasContentsInOrder(1, 2, 3);
ASSERT.that(set.headSet(Integer.MAX_VALUE, true)).hasContentsInOrder(1, 2, 3);
}
public void testHeadSet_tooSmall() {
ASSERT.that(ContiguousSet.create(Ranges.closed(1, 3), integers()).headSet(0)).isEmpty();
}
public void testTailSet() {
ImmutableSortedSet<Integer> set = ContiguousSet.create(Ranges.closed(1, 3), integers());
ASSERT.that(set.tailSet(Integer.MIN_VALUE)).hasContentsInOrder(1, 2, 3);
ASSERT.that(set.tailSet(1)).hasContentsInOrder(1, 2, 3);
ASSERT.that(set.tailSet(2)).hasContentsInOrder(2, 3);
ASSERT.that(set.tailSet(3)).hasContentsInOrder(3);
ASSERT.that(set.tailSet(Integer.MIN_VALUE, false)).hasContentsInOrder(1, 2, 3);
ASSERT.that(set.tailSet(1, false)).hasContentsInOrder(2, 3);
ASSERT.that(set.tailSet(2, false)).hasContentsInOrder(3);
ASSERT.that(set.tailSet(3, false)).isEmpty();
}
public void testTailSet_tooLarge() {
ASSERT.that(ContiguousSet.create(Ranges.closed(1, 3), integers()).tailSet(4)).isEmpty();
}
public void testSubSet() {
ImmutableSortedSet<Integer> set = ContiguousSet.create(Ranges.closed(1, 3), integers());
ASSERT.that(set.subSet(1, 4)).hasContentsInOrder(1, 2, 3);
ASSERT.that(set.subSet(2, 4)).hasContentsInOrder(2, 3);
ASSERT.that(set.subSet(3, 4)).hasContentsInOrder(3);
ASSERT.that(set.subSet(3, 3)).isEmpty();
ASSERT.that(set.subSet(2, 3)).hasContentsInOrder(2);
ASSERT.that(set.subSet(1, 3)).hasContentsInOrder(1, 2);
ASSERT.that(set.subSet(1, 2)).hasContentsInOrder(1);
ASSERT.that(set.subSet(2, 2)).isEmpty();
ASSERT.that(set.subSet(Integer.MIN_VALUE, Integer.MAX_VALUE)).hasContentsInOrder(1, 2, 3);
ASSERT.that(set.subSet(1, true, 3, true)).hasContentsInOrder(1, 2, 3);
ASSERT.that(set.subSet(1, false, 3, true)).hasContentsInOrder(2, 3);
ASSERT.that(set.subSet(1, true, 3, false)).hasContentsInOrder(1, 2);
ASSERT.that(set.subSet(1, false, 3, false)).hasContentsInOrder(2);
}
public void testSubSet_outOfOrder() {
ImmutableSortedSet<Integer> set = ContiguousSet.create(Ranges.closed(1, 3), integers());
try {
set.subSet(3, 2);
fail();
} catch (IllegalArgumentException expected) {}
}
public void testSubSet_tooLarge() {
ASSERT.that(ContiguousSet.create(Ranges.closed(1, 3), integers()).subSet(4, 6)).isEmpty();
}
public void testSubSet_tooSmall() {
ASSERT.that(ContiguousSet.create(Ranges.closed(1, 3), integers()).subSet(-1, 0)).isEmpty();
}
public void testFirst() {
assertEquals(1, ContiguousSet.create(Ranges.closed(1, 3), integers()).first().intValue());
assertEquals(1, ContiguousSet.create(Ranges.open(0, 4), integers()).first().intValue());
assertEquals(Integer.MIN_VALUE,
ContiguousSet.create(Ranges.<Integer>all(), integers()).first().intValue());
}
public void testLast() {
assertEquals(3, ContiguousSet.create(Ranges.closed(1, 3), integers()).last().intValue());
assertEquals(3, ContiguousSet.create(Ranges.open(0, 4), integers()).last().intValue());
assertEquals(Integer.MAX_VALUE,
ContiguousSet.create(Ranges.<Integer>all(), integers()).last().intValue());
}
public void testContains() {
ImmutableSortedSet<Integer> set = ContiguousSet.create(Ranges.closed(1, 3), integers());
assertFalse(set.contains(0));
assertTrue(set.contains(1));
assertTrue(set.contains(2));
assertTrue(set.contains(3));
assertFalse(set.contains(4));
set = ContiguousSet.create(Ranges.open(0, 4), integers());
assertFalse(set.contains(0));
assertTrue(set.contains(1));
assertTrue(set.contains(2));
assertTrue(set.contains(3));
assertFalse(set.contains(4));
assertFalse(set.contains("blah"));
}
public void testContainsAll() {
ImmutableSortedSet<Integer> set = ContiguousSet.create(Ranges.closed(1, 3), integers());
for (Set<Integer> subset : Sets.powerSet(ImmutableSet.of(1, 2, 3))) {
assertTrue(set.containsAll(subset));
}
for (Set<Integer> subset : Sets.powerSet(ImmutableSet.of(1, 2, 3))) {
assertFalse(set.containsAll(Sets.union(subset, ImmutableSet.of(9))));
}
assertFalse(set.containsAll(ImmutableSet.of("blah")));
}
public void testRange() {
assertEquals(Ranges.closed(1, 3),
ContiguousSet.create(Ranges.closed(1, 3), integers()).range());
assertEquals(Ranges.closed(1, 3),
ContiguousSet.create(Ranges.closedOpen(1, 4), integers()).range());
assertEquals(Ranges.closed(1, 3), ContiguousSet.create(Ranges.open(0, 4), integers()).range());
assertEquals(Ranges.closed(1, 3),
ContiguousSet.create(Ranges.openClosed(0, 3), integers()).range());
assertEquals(Ranges.openClosed(0, 3),
ContiguousSet.create(Ranges.closed(1, 3), integers()).range(OPEN, CLOSED));
assertEquals(Ranges.openClosed(0, 3),
ContiguousSet.create(Ranges.closedOpen(1, 4), integers()).range(OPEN, CLOSED));
assertEquals(Ranges.openClosed(0, 3),
ContiguousSet.create(Ranges.open(0, 4), integers()).range(OPEN, CLOSED));
assertEquals(Ranges.openClosed(0, 3),
ContiguousSet.create(Ranges.openClosed(0, 3), integers()).range(OPEN, CLOSED));
assertEquals(Ranges.open(0, 4),
ContiguousSet.create(Ranges.closed(1, 3), integers()).range(OPEN, OPEN));
assertEquals(Ranges.open(0, 4),
ContiguousSet.create(Ranges.closedOpen(1, 4), integers()).range(OPEN, OPEN));
assertEquals(Ranges.open(0, 4),
ContiguousSet.create(Ranges.open(0, 4), integers()).range(OPEN, OPEN));
assertEquals(Ranges.open(0, 4),
ContiguousSet.create(Ranges.openClosed(0, 3), integers()).range(OPEN, OPEN));
assertEquals(Ranges.closedOpen(1, 4),
ContiguousSet.create(Ranges.closed(1, 3), integers()).range(CLOSED, OPEN));
assertEquals(Ranges.closedOpen(1, 4),
ContiguousSet.create(Ranges.closedOpen(1, 4), integers()).range(CLOSED, OPEN));
assertEquals(Ranges.closedOpen(1, 4),
ContiguousSet.create(Ranges.open(0, 4), integers()).range(CLOSED, OPEN));
assertEquals(Ranges.closedOpen(1, 4),
ContiguousSet.create(Ranges.openClosed(0, 3), integers()).range(CLOSED, OPEN));
}
public void testRange_unboundedRanges() {
assertEquals(Ranges.closed(Integer.MIN_VALUE, Integer.MAX_VALUE),
ContiguousSet.create(Ranges.<Integer>all(), integers()).range());
assertEquals(Ranges.atLeast(Integer.MIN_VALUE),
ContiguousSet.create(Ranges.<Integer>all(), integers()).range(CLOSED, OPEN));
assertEquals(Ranges.all(),
ContiguousSet.create(Ranges.<Integer>all(), integers()).range(OPEN, OPEN));
assertEquals(Ranges.atMost(Integer.MAX_VALUE),
ContiguousSet.create(Ranges.<Integer>all(), integers()).range(OPEN, CLOSED));
}
public void testIntersection_empty() {
ContiguousSet<Integer> set = ContiguousSet.create(Ranges.closed(1, 3), integers());
ContiguousSet<Integer> emptySet = ContiguousSet.create(Ranges.closedOpen(2, 2), integers());
assertEquals(ImmutableSet.of(), set.intersection(emptySet));
assertEquals(ImmutableSet.of(), emptySet.intersection(set));
assertEquals(ImmutableSet.of(),
ContiguousSet.create(Ranges.closed(-5, -1), integers()).intersection(
ContiguousSet.create(Ranges.open(3, 64), integers())));
}
public void testIntersection() {
ContiguousSet<Integer> set = ContiguousSet.create(Ranges.closed(1, 3), integers());
assertEquals(ImmutableSet.of(1, 2, 3),
ContiguousSet.create(Ranges.open(-1, 4), integers()).intersection(set));
assertEquals(ImmutableSet.of(1, 2, 3),
set.intersection(ContiguousSet.create(Ranges.open(-1, 4), integers())));
}
@GwtIncompatible("suite")
public static class BuiltTests extends TestCase {
public static Test suite() {
TestSuite suite = new TestSuite();
suite.addTest(NavigableSetTestSuiteBuilder.using(
new ContiguousSetGenerator())
.named("Range.asSet")
.withFeatures(CollectionSize.ANY, KNOWN_ORDER, ALLOWS_NULL_QUERIES,
NON_STANDARD_TOSTRING, RESTRICTS_ELEMENTS)
.suppressing(getHoleMethods())
.createTestSuite());
suite.addTest(NavigableSetTestSuiteBuilder.using(
new ContiguousSetHeadsetGenerator())
.named("Range.asSet, headset")
.withFeatures(CollectionSize.ANY, KNOWN_ORDER, ALLOWS_NULL_QUERIES,
NON_STANDARD_TOSTRING, RESTRICTS_ELEMENTS)
.suppressing(getHoleMethods())
.createTestSuite());
suite.addTest(NavigableSetTestSuiteBuilder.using(
new ContiguousSetTailsetGenerator())
.named("Range.asSet, tailset")
.withFeatures(CollectionSize.ANY, KNOWN_ORDER, ALLOWS_NULL_QUERIES,
NON_STANDARD_TOSTRING, RESTRICTS_ELEMENTS)
.suppressing(getHoleMethods())
.createTestSuite());
suite.addTest(NavigableSetTestSuiteBuilder.using(
new ContiguousSetSubsetGenerator())
.named("Range.asSet, subset")
.withFeatures(CollectionSize.ANY, KNOWN_ORDER, ALLOWS_NULL_QUERIES,
NON_STANDARD_TOSTRING, RESTRICTS_ELEMENTS)
.suppressing(getHoleMethods())
.createTestSuite());
suite.addTest(NavigableSetTestSuiteBuilder.using(
new ContiguousSetDescendingGenerator())
.named("Range.asSet.descendingSet")
.withFeatures(CollectionSize.ANY, KNOWN_ORDER, ALLOWS_NULL_QUERIES,
NON_STANDARD_TOSTRING, RESTRICTS_ELEMENTS)
.suppressing(getHoleMethods())
.createTestSuite());
return suite;
}
}
}
| |
// Copyright 2000-2017 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.intellij.openapi.vfs;
import com.intellij.mock.MockVirtualFile;
import com.intellij.mock.MockVirtualLink;
import com.intellij.openapi.util.Ref;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.testFramework.fixtures.BareTestFixtureTestCase;
import com.intellij.util.Function;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.Test;
import java.util.Collections;
import static com.intellij.mock.MockVirtualFile.dir;
import static com.intellij.mock.MockVirtualFile.file;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
public class VirtualFileVisitorTest extends BareTestFixtureTestCase {
private static VirtualFile myRoot;
@BeforeClass
public static void setUp() {
myRoot =
dir("/",
dir("d1",
dir("d11",
file("f11.1"),
file("f11.2")),
file("f1.1"),
dir("d12"),
dir("d13",
file("f13.1"),
file("f13.2"))),
dir("d2",
file("f2.1"),
file("f2.2")),
dir("d3"));
link("/d1/d11", "/d1/d11_link");
link("/d3", "/d3/d3_rec_link");
}
@AfterClass
public static void tearDown() {
myRoot = null;
}
@Test
public void visitAll() {
doTest(
null, null,
"-> / [0]\n" +
" -> d1 [1]\n" +
" -> d11 [2]\n" +
" -> f11.1 [3]\n" +
" <- f11.1 [4]\n" +
" -> f11.2 [3]\n" +
" <- f11.2 [4]\n" +
" <- d11 [3]\n" +
" -> f1.1 [2]\n" +
" <- f1.1 [3]\n" +
" -> d12 [2]\n" +
" <- d12 [3]\n" +
" -> d13 [2]\n" +
" -> f13.1 [3]\n" +
" <- f13.1 [4]\n" +
" -> f13.2 [3]\n" +
" <- f13.2 [4]\n" +
" <- d13 [3]\n" +
" -> d11_link [2]\n" +
" -> f11.1 [3]\n" +
" <- f11.1 [4]\n" +
" -> f11.2 [3]\n" +
" <- f11.2 [4]\n" +
" <- d11_link [3]\n" +
" <- d1 [2]\n" +
" -> d2 [1]\n" +
" -> f2.1 [2]\n" +
" <- f2.1 [3]\n" +
" -> f2.2 [2]\n" +
" <- f2.2 [3]\n" +
" <- d2 [2]\n" +
" -> d3 [1]\n" +
" -> d3_rec_link [2]\n" +
" <- d3_rec_link [3]\n" +
" <- d3 [2]\n" +
"<- / [1]\n");
}
@Test
public void skipChildrenForDirectory() {
doTest(
file -> "d11".equals(file.getName()) ? VirtualFileVisitor.SKIP_CHILDREN : VirtualFileVisitor.CONTINUE,
null,
"-> / [0]\n" +
" -> d1 [1]\n" +
" -> d11 [2]\n" +
" -> f1.1 [2]\n" +
" <- f1.1 [3]\n" +
" -> d12 [2]\n" +
" <- d12 [3]\n" +
" -> d13 [2]\n" +
" -> f13.1 [3]\n" +
" <- f13.1 [4]\n" +
" -> f13.2 [3]\n" +
" <- f13.2 [4]\n" +
" <- d13 [3]\n" +
" -> d11_link [2]\n" +
" -> f11.1 [3]\n" +
" <- f11.1 [4]\n" +
" -> f11.2 [3]\n" +
" <- f11.2 [4]\n" +
" <- d11_link [3]\n" +
" <- d1 [2]\n" +
" -> d2 [1]\n" +
" -> f2.1 [2]\n" +
" <- f2.1 [3]\n" +
" -> f2.2 [2]\n" +
" <- f2.2 [3]\n" +
" <- d2 [2]\n" +
" -> d3 [1]\n" +
" -> d3_rec_link [2]\n" +
" <- d3_rec_link [3]\n" +
" <- d3 [2]\n" +
" <- / [1]\n");
}
@Test
public void skipChildrenForFiles() {
doTest(
file -> file.isDirectory() ? VirtualFileVisitor.CONTINUE : VirtualFileVisitor.SKIP_CHILDREN,
null,
"-> / [0]\n" +
" -> d1 [1]\n" +
" -> d11 [2]\n" +
" -> f11.1 [3]\n" +
" -> f11.2 [3]\n" +
" <- d11 [3]\n" +
" -> f1.1 [2]\n" +
" -> d12 [2]\n" +
" <- d12 [3]\n" +
" -> d13 [2]\n" +
" -> f13.1 [3]\n" +
" -> f13.2 [3]\n" +
" <- d13 [3]\n" +
" -> d11_link [2]\n" +
" -> f11.1 [3]\n" +
" -> f11.2 [3]\n" +
" <- d11_link [3]\n" +
" <- d1 [2]\n" +
" -> d2 [1]\n" +
" -> f2.1 [2]\n" +
" -> f2.2 [2]\n" +
" <- d2 [2]\n" +
" -> d3 [1]\n" +
" -> d3_rec_link [2]\n" +
" <- d3_rec_link [3]\n" +
" <- d3 [2]\n" +
" <- / [1]\n");
}
@Test
public void skipToParent() {
Ref<VirtualFile> skip = Ref.create();
doTest(
file -> {
if ("d1".equals(file.getName())) skip.set(file);
return "f11.1".equals(file.getName()) ? skip.get() : VirtualFileVisitor.CONTINUE;
},
null,
"-> / [0]\n" +
" -> d1 [1]\n" +
" -> d11 [2]\n" +
" -> f11.1 [3]\n" +
" -> d2 [1]\n" +
" -> f2.1 [2]\n" +
" <- f2.1 [3]\n" +
" -> f2.2 [2]\n" +
" <- f2.2 [3]\n" +
" <- d2 [2]\n" +
" -> d3 [1]\n" +
" -> d3_rec_link [2]\n" +
" <- d3_rec_link [3]\n" +
" <- d3 [2]\n" +
" <- / [1]\n");
}
@Test
public void skipToRoot() {
doTest(
file -> "f11.1".equals(file.getName()) ? myRoot : VirtualFileVisitor.CONTINUE,
null,
"-> / [0]\n" +
" -> d1 [1]\n" +
" -> d11 [2]\n" +
" -> f11.1 [3]\n");
}
@Test
public void abort() {
doTest(
file -> {
if ("f11.1".equals(file.getName())) throw new AbortException();
return VirtualFileVisitor.CONTINUE;
},
null,
"-> / [0]\n" +
" -> d1 [1]\n" +
" -> d11 [2]\n" +
" -> f11.1 [3]\n");
}
@Test
public void depthLimit() {
doTest(
null, null,
"-> / [0]\n" +
"<- / [1]\n",
VirtualFileVisitor.limit(0)
);
doTest(
null, null,
"-> / [0]\n" +
" -> d1 [1]\n" +
" <- d1 [2]\n" +
" -> d2 [1]\n" +
" <- d2 [2]\n" +
" -> d3 [1]\n" +
" <- d3 [2]\n" +
"<- / [1]\n",
VirtualFileVisitor.ONE_LEVEL_DEEP
);
doTest(
null, null,
"-> d1 [0]\n" +
"<- d1 [1]\n" +
"-> d2 [0]\n" +
"<- d2 [1]\n" +
"-> d3 [0]\n" +
"<- d3 [1]\n",
VirtualFileVisitor.SKIP_ROOT, VirtualFileVisitor.ONE_LEVEL_DEEP);
}
@Test
public void customIterable() {
doTest(
null,
file -> "d13".equals(file.getName()) ? Collections.singletonList(file.getChildren()[1]) : null,
"-> / [0]\n" +
" -> d1 [1]\n" +
" -> d11 [2]\n" +
" -> f11.1 [3]\n" +
" <- f11.1 [4]\n" +
" -> f11.2 [3]\n" +
" <- f11.2 [4]\n" +
" <- d11 [3]\n" +
" -> f1.1 [2]\n" +
" <- f1.1 [3]\n" +
" -> d12 [2]\n" +
" <- d12 [3]\n" +
" -> d13 [2]\n" +
" -> f13.2 [3]\n" +
" <- f13.2 [4]\n" +
" <- d13 [3]\n" +
" -> d11_link [2]\n" +
" -> f11.1 [3]\n" +
" <- f11.1 [4]\n" +
" -> f11.2 [3]\n" +
" <- f11.2 [4]\n" +
" <- d11_link [3]\n" +
" <- d1 [2]\n" +
" -> d2 [1]\n" +
" -> f2.1 [2]\n" +
" <- f2.1 [3]\n" +
" -> f2.2 [2]\n" +
" <- f2.2 [3]\n" +
" <- d2 [2]\n" +
" -> d3 [1]\n" +
" -> d3_rec_link [2]\n" +
" <- d3_rec_link [3]\n" +
" <- d3 [2]\n" +
"<- / [1]\n");
}
private static class AbortException extends RuntimeException { }
private static void doTest(@Nullable Function<VirtualFile, Object> condition,
@Nullable Function<VirtualFile, Iterable<VirtualFile>> iterable,
@NotNull String expected,
@NotNull VirtualFileVisitor.Option... options) {
StringBuilder sb = new StringBuilder();
try {
VfsUtilCore.visitChildrenRecursively(myRoot, new VirtualFileVisitor<Integer>(options) {
{ setValueForChildren(0); }
private int level = 0;
@NotNull
@Override
public Result visitFileEx(@NotNull VirtualFile file) {
sb.append(StringUtil.repeat(" ", level++))
.append("-> ").append(file.getName()).append(" [").append(getCurrentValue()).append("]\n");
setValueForChildren(getCurrentValue() + 1);
if (condition != null) {
Object result = condition.fun(file);
if (result instanceof Result) return (Result)result;
if (result instanceof VirtualFile) return skipTo((VirtualFile)result);
}
return CONTINUE;
}
@Override
public void afterChildrenVisited(@NotNull VirtualFile file) {
sb.append(StringUtil.repeat(" ", --level))
.append("<- ").append(file.getName()).append(" [").append(getCurrentValue()).append("]\n");
}
@Nullable
@Override
public Iterable<VirtualFile> getChildrenIterable(@NotNull VirtualFile file) {
return iterable != null ? iterable.fun(file) : super.getChildrenIterable(file);
}
});
}
catch (AbortException ignore) { }
assertEquals(expected, sb.toString());
}
private static void link(@NotNull String targetPath, @NotNull String linkPath) {
VirtualFile target = myRoot.findFileByRelativePath(targetPath);
assertNotNull(targetPath, target);
int pos = linkPath.lastIndexOf('/');
VirtualFile linkParent = myRoot.findFileByRelativePath(linkPath.substring(0, pos));
assertNotNull(linkPath, linkParent);
((MockVirtualFile)linkParent).addChild(new MockVirtualLink(linkPath.substring(pos + 1), target));
}
}
| |
package org.testobject.kernel.imaging.segmentation;
import java.util.ArrayList;
import java.util.LinkedList;
import java.util.List;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.testobject.commons.math.algebra.Rectangle;
/**
* A builder class that constructs the blob hierarchy for a given boolean raster
* using a linear line-by-line scan method. In essence, each line is processed
* from left to right. The current position is marked as dot, we keep track of the local
* neighbourhood (at positions a,b,c,d) and ignore the rest (denoted by x):
*
* x x x x x b c d x x x x x x x x x x x x
* x x x x x a .
*
* Algorithm:
*
* // compute color at current position (.)
* if FG:
* if any of a, b, c, or d are FG (positive), then assign same id there
* else generate new FG blob id
* if BG:
* if any of a or c are BG, then propagate their id
* else generate new BG blob id (negative)
*
* then move one spot to the right
*
* // merge connected blobs
* if FG:
* possible merge between b and d (need to check only if c is BG (negative))
* possible merge between a and d (need to check only if b and c are both BG)
* if BG:
* possible merge between a and c (need to check only if b is FG (positive))
*
*
* Interestingly, it is guaranteed that we have to merge at most two blobs. We denote
* the ids of background blobs as zero and show that the former statement can be easily
* proved by enumerating the following cases:
*
* (1) b != 0, c == 0, d != 0, a != 0 => b == a, c == 0, d != 0 => 2 blobs
* (2) b == 0, c != 0, d != 0, a != 0 => a == c, c == d, d != 0 => 2 blobs
* (3) b == 0, c == 0, d != 0, a != 0 => b == 0, c == 0 => 2 blobs
*
* Thus the construction of the linear scan guarantees that the assumption of merging at most two blobs
* holds and we can exploit it by using a single one-dimensional id merge "relation".
*
* With respect to performance a linear-scan has beneficial side-effects:
* - memory locality: due to the nature of the scan memory locality is guranteed (cache lines)
* - avoid multi-dim access: we avoid (expensive) multi-dim array access by "fixing" the current line
* - re-using arrays: we can re-use most of the arrays and avoid allocation
* - reduced memory access: for each pixel we only have to check 2 neighbouring pixels & ids
*
*
*
* @author enijkamp
*
*/
public class LinearBlobBuilder
{
public static final Log log = LogFactory.getLog(LinearBlobBuilder.class);
final static boolean DEBUG = false;
private final static class B
{
int id;
int parid;
int area;
int minx;
int maxx;
int miny;
int maxy;
final List<B> children = new ArrayList<B>(16);
void add(int x, int y)
{
area++;
minx = Math.min(minx, x);
miny = Math.min(miny, y);
maxx = Math.max(maxx, x);
maxy = Math.max(maxy, y);
}
void merge(B other)
{
this.area += other.area;
this.minx = Math.min(this.minx, other.minx);
this.miny = Math.min(this.miny, other.miny);
this.maxx = Math.max(this.maxx, other.maxx);
this.maxy = Math.max(this.maxy, other.maxy);
}
B(int id, int parid)
{
this.id = id;
this.parid = parid;
area = 0;
minx = Integer.MAX_VALUE;
miny = Integer.MAX_VALUE;
maxx = -1;
maxy = -1;
}
}
private int[] idmap = null;
private int[][] ids = null;
private B[] bmap = null;
private final void merge(int low, int high)
{
int l = low;
int h = high;
while (idmap[l] > 0)
{
l = idmap[l];
}
while (idmap[h] > 0)
{
h = idmap[h];
}
if (l == h)
{
return;
}
else if (l > h)
{
int t = low;
low = high;
high = t;
t = l;
l = h;
h = t;
}
idmap[h] = l;
}
private static final void assertThat(boolean what)
{
if (!what)
{
throw new AssertionError();
}
}
private static void validate(int x, int y, int[][] ids, int a, int b, int c, int d)
{
int width = ids[0].length;
if (y == 0)
{
assertThat(b == -1);
assertThat(c == -1);
assertThat(d == -1);
if (x == 0)
{
assertThat(a == -1);
}
else
{
assertThat(a == ids[y][x - 1]);
}
}
else
{
if (x == 0)
{
assertThat(a == -1);
assertThat(b == -1);
}
else
{
assertThat(a == ids[y][x - 1]);
assertThat(b == ids[y - 1][x - 1]);
}
assertThat(c == ids[y - 1][x]);
if (x == width - 1)
{
assertThat(d == -1);
}
else
{
assertThat(d == ids[y - 1][x + 1]);
}
}
}
private final void assertMerged(int a, int b)
{
if (a == b)
return;
while (idmap[a] > 0)
{
a = idmap[a];
}
while (idmap[b] > 0)
{
b = idmap[b];
}
if (a != b)
{
throw new IllegalStateException();
}
}
//
// dot shows the "current position" with coordinates (x, y)
// x, a, b, c, d show processed (non-zero) ids
// we keep track of neighboring ids, at positions a, b, c, and d
// (do not care about the rest, denoted by x)
//
// x x x x x b c d x x x x x x x x x x x x
// x x x x x a .
//
// algo:
// compute color at current point (.)
// if FG:
// if any of a, b, c, or d are FG (positive), then assign same id there
// else generate new FG blob id
// if BG:
// if any of a or c are BG, then propagate their id
// else generate new BG blob id (negative)
//
// then move one spot to the right.
//
// merging:
// if FG:
// possible merge between b and d (need to check only if c is BG (negative))
// possible merge between a and d (need to check only if b and c are both BG)
//
// if BG:
// possible merge between a and c (need to check only if b is FG (positive))
public Blob build(ArrayRaster r)
{
int idSource = 1;
int merges = 0;
final int width = r.getSize().w;
final int height = r.getSize().h;
if (width <= 0 || height <= 0)
{
throw new IllegalArgumentException("can not deal with zero-sized rasters");
}
if (ids == null || ids.length != height || ids[0].length != width)
{
/* System.out.println("(RE)ALLOCATING WORK BUFFERS"); */
this.ids = new int[height][width];
// array length computation:
// number of blobs can not be more than half of the number of pixels.
// but note the initial "outer" BG blob (the root), that is artificial.
// and, finally, remember that slot 0 is not used at all (wasted)
// So the final formula that gurantees no ArrayBoundException is:
// (h * w + 2). Just in case i forgot something, will add arbitrary number 32
this.bmap = new B[height * width + 32];
this.idmap = new int[this.bmap.length];
}
final boolean[][] raster = r.fg;
final int[][] ids = this.ids;
final B[] bmap = this.bmap;
final int[] idmap = this.idmap;
bmap[1] = new B(-1, 0);
bmap[1].minx = 0;
bmap[1].miny = 0;
bmap[1].maxx = width - 1;
bmap[1].maxy = height - 1;
for (int y = 0; y < height; y++)
{
final boolean[] row = raster[y];
final int[] currentIdRow = ids[y];
final int[] previousIdRow = y > 0 ? ids[y - 1] : null;
int a = -1;
int b = -1;
int c;
int d;
if (y == 0)
{
c = -1;
d = -1;
}
else
{
c = previousIdRow[0];
if (width > 1)
d = previousIdRow[1];
else
d = -1;
}
for (int x = 0; x < width; x++)
{
if (DEBUG)
{
validate(x, y, ids, a, b, c, d);
}
int newa;
if (row[x])
{
if (a > 0)
{
newa = a;
// check for FG blob merge
if (b < 0 && c < 0 && d > 0 && a != d)
{
merge(newa, d);
merges++;
}
}
else if (b > 0)
{
newa = b;
}
else if (c > 0)
{
newa = c;
}
else if (d > 0)
{
newa = d;
}
else
{
// new FG blob
newa = ++idSource;
B blob = new B(newa, c);
bmap[newa] = blob;
}
// check for FG blob merge
if (c < 0 && b > 0 && d > 0 && b != d)
{
merge(newa, d);
merges++;
}
if (DEBUG)
{
if (a > 0)
{
assertMerged(newa, a);
}
if (b > 0)
{
assertMerged(newa, b);
}
if (c > 0)
{
assertMerged(newa, c);
}
if (d > 0)
{
assertMerged(newa, d);
}
}
B blob = bmap[newa];
blob.add(x, y);
}
else
{
if (a < 0)
{
newa = a;
// check for BG blob merge
if (b > 0 && c < 0 && a != c)
{
merge(-newa, -c);
merges++;
}
}
else if (c < 0)
{
newa = c;
}
else
{
// new BG blob
newa = -++idSource;
B blob = new B(newa, c);
bmap[-newa] = blob;
}
if (DEBUG)
{
if (a < 0)
{
assertMerged(-newa, -a);
}
if (c < 0)
{
assertMerged(-newa, -c);
}
}
B blob = bmap[-newa];
blob.add(x, y);
}
currentIdRow[x] = newa;
a = newa;
b = c;
c = d;
if (x + 2 < width && y > 0)
{
d = previousIdRow[x + 2];
}
else
{
d = -1;
}
}
}
// beyond last scanline - possibly merge BG blobs
for (int x = 0; x < width; x++)
{
int id = ids[height - 1][x];
if (id < 0 && id != -1)
{
merge(-id, 1);
merges++;
}
}
// normalize idmap array
for (int y = 0; y < height; y++)
{
int[] row = ids[y];
for (int x = 0; x < width; x++)
{
final int id = row[x];
if (id > 0)
{
int i = idmap[id];
if (i > 0)
{
while (idmap[i] > 0)
{
i = idmap[i];
}
row[x] = i;
}
}
else
{
int i = idmap[-id];
if (i > 0)
{
while (idmap[i] > 0)
{
i = idmap[i];
}
row[x] = -i;
}
}
}
}
// build hierarchy
B root = null;
for (int i = 1; i < idSource + 1; i++)
{
if (idmap[i] == 0)
{
B b = bmap[i];
if (DEBUG)
{
if (b == null)
throw new AssertionError();
}
if (b.parid != 0)
{
B parent = bmap[Math.abs(b.parid)];
parent.children.add(b);
}
else if (root == null)
{
root = b;
}
else
{
throw new AssertionError();
}
}
else
{
int origin = i;
while (idmap[origin] > 0)
{
origin = idmap[origin];
}
B b = bmap[origin]; // "normal"
int j = i;
while (j != origin)
{
b.merge(bmap[j]);
bmap[j] = b;
int nextj = idmap[j];
idmap[j] = origin;
j = nextj;
}
}
}
/*
for(int i = 1; i < idSource; i++)
{
B b = bmap[i];
boolean found = false;
for(int j = 0; j < blobs.length; j++)
{
if(b == blobs[j])
{
found = true;
break;
}
}
if(!found)
{
throw new AssertionError();
}
}
*/
if (root == null)
{
throw new AssertionError();
}
/*
Arrays.sort(blobs, new Comparator<B>() {
@Override
public int compare(B a, B b)
{
if(a.minx < b.minx)
{
return -1;
}
else if(a.minx == b.minx)
{
return 0;
}
else
{
return 1;
}
}
});
*/
// compute the number of unique blobs
log.debug("idSource=" + idSource + ", merges=" + merges);
// build "standard Blob tree so that I can display it
return buildTree(root, ids);
// Utils.displayHierarchy(sroot);
// Utils.displayIds(ids);
}
private static final Blob buildTree(B b, int[][] ids)
{
List<Blob> ch = new LinkedList<Blob>();
for (B c : b.children)
{
ch.add(buildTree(c, ids));
}
Rectangle.Int bbox = new Rectangle.Int(b.minx, b.miny, b.maxx - b.minx + 1, b.maxy - b.miny + 1);
// FIXME abs? what about bg blobs? (en)
// return new Blob(Math.abs(b.id), bbox, b.area, ch, ids);
return new Blob(b.id, bbox, b.area, ch, ids);
}
}
| |
/**
* Copyright (c) 2000-present Liferay, Inc. All rights reserved.
*
* This library is free software; you can redistribute it and/or modify it under
* the terms of the GNU Lesser General Public License as published by the Free
* Software Foundation; either version 2.1 of the License, or (at your option)
* any later version.
*
* This library is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
* FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
* details.
*/
package org.oep.core.dossiermgt.service;
import com.liferay.portal.service.InvokableLocalService;
/**
* @author trungdk
* @generated
*/
public class DossierProcLocalServiceClp implements DossierProcLocalService {
public DossierProcLocalServiceClp(
InvokableLocalService invokableLocalService) {
_invokableLocalService = invokableLocalService;
_methodName0 = "addDossierProc";
_methodParameterTypes0 = new String[] {
"org.oep.core.dossiermgt.model.DossierProc"
};
_methodName1 = "createDossierProc";
_methodParameterTypes1 = new String[] { "long" };
_methodName2 = "deleteDossierProc";
_methodParameterTypes2 = new String[] { "long" };
_methodName3 = "deleteDossierProc";
_methodParameterTypes3 = new String[] {
"org.oep.core.dossiermgt.model.DossierProc"
};
_methodName4 = "dynamicQuery";
_methodParameterTypes4 = new String[] { };
_methodName5 = "dynamicQuery";
_methodParameterTypes5 = new String[] {
"com.liferay.portal.kernel.dao.orm.DynamicQuery"
};
_methodName6 = "dynamicQuery";
_methodParameterTypes6 = new String[] {
"com.liferay.portal.kernel.dao.orm.DynamicQuery", "int", "int"
};
_methodName7 = "dynamicQuery";
_methodParameterTypes7 = new String[] {
"com.liferay.portal.kernel.dao.orm.DynamicQuery", "int", "int",
"com.liferay.portal.kernel.util.OrderByComparator"
};
_methodName8 = "dynamicQueryCount";
_methodParameterTypes8 = new String[] {
"com.liferay.portal.kernel.dao.orm.DynamicQuery"
};
_methodName9 = "dynamicQueryCount";
_methodParameterTypes9 = new String[] {
"com.liferay.portal.kernel.dao.orm.DynamicQuery",
"com.liferay.portal.kernel.dao.orm.Projection"
};
_methodName10 = "fetchDossierProc";
_methodParameterTypes10 = new String[] { "long" };
_methodName11 = "getDossierProc";
_methodParameterTypes11 = new String[] { "long" };
_methodName12 = "getPersistedModel";
_methodParameterTypes12 = new String[] { "java.io.Serializable" };
_methodName13 = "getDossierProcs";
_methodParameterTypes13 = new String[] { "int", "int" };
_methodName14 = "getDossierProcsCount";
_methodParameterTypes14 = new String[] { };
_methodName15 = "updateDossierProc";
_methodParameterTypes15 = new String[] {
"org.oep.core.dossiermgt.model.DossierProc"
};
_methodName16 = "getBeanIdentifier";
_methodParameterTypes16 = new String[] { };
_methodName17 = "setBeanIdentifier";
_methodParameterTypes17 = new String[] { "java.lang.String" };
_methodName19 = "addDossierProc";
_methodParameterTypes19 = new String[] {
"long", "java.lang.String", "java.lang.String",
"java.lang.String", "java.lang.String", "java.lang.String",
"java.lang.String", "java.lang.String", "java.lang.String",
"java.lang.String", "java.lang.String", "java.lang.String",
"java.lang.String", "java.lang.String", "java.lang.String",
"java.lang.String", "java.lang.String", "int", "int",
"java.util.Date", "java.util.Date", "int",
"com.liferay.portal.service.ServiceContext"
};
_methodName20 = "updateDossierProc";
_methodParameterTypes20 = new String[] {
"long", "java.lang.String", "java.lang.String",
"java.lang.String", "java.lang.String", "java.lang.String",
"java.lang.String", "java.lang.String", "java.lang.String",
"java.lang.String", "java.lang.String", "java.lang.String",
"java.lang.String", "java.lang.String", "java.lang.String",
"java.lang.String", "java.lang.String", "int", "int",
"java.util.Date", "java.util.Date", "int",
"com.liferay.portal.service.ServiceContext"
};
_methodName21 = "updateDossierProc";
_methodParameterTypes21 = new String[] {
"org.oep.core.dossiermgt.model.DossierProc",
"com.liferay.portal.service.ServiceContext"
};
_methodName22 = "removeDossierProc";
_methodParameterTypes22 = new String[] {
"org.oep.core.dossiermgt.model.DossierProc"
};
_methodName23 = "removeDossierProc";
_methodParameterTypes23 = new String[] { "long" };
_methodName24 = "deleteDossierProcs";
_methodParameterTypes24 = new String[] { "long" };
_methodName25 = "getByDossierProcNo";
_methodParameterTypes25 = new String[] {
"java.lang.String", "com.liferay.portal.service.ServiceContext"
};
_methodName26 = "findAllByAdministrationNo";
_methodParameterTypes26 = new String[] {
"java.lang.String", "com.liferay.portal.service.ServiceContext"
};
_methodName27 = "findAllByDomainNo";
_methodParameterTypes27 = new String[] {
"java.lang.String", "com.liferay.portal.service.ServiceContext"
};
_methodName28 = "findAllByAdministrationAndDomain";
_methodParameterTypes28 = new String[] {
"java.lang.String", "java.lang.String",
"com.liferay.portal.service.ServiceContext"
};
_methodName29 = "findByGroupAdministrationAndDomain";
_methodParameterTypes29 = new String[] {
"java.lang.String", "java.lang.String",
"com.liferay.portal.service.ServiceContext"
};
_methodName30 = "getCompanyDossierProcs";
_methodParameterTypes30 = new String[] {
"long", "com.liferay.portal.kernel.dao.orm.QueryDefinition"
};
_methodName31 = "getCompanyDossierProcsCount";
_methodParameterTypes31 = new String[] {
"long", "com.liferay.portal.kernel.dao.orm.QueryDefinition"
};
_methodName32 = "findByGroupLikeName";
_methodParameterTypes32 = new String[] {
"java.lang.String", "int", "int",
"com.liferay.portal.service.ServiceContext"
};
_methodName33 = "countByGroupLikeName";
_methodParameterTypes33 = new String[] {
"java.lang.String", "com.liferay.portal.service.ServiceContext"
};
_methodName34 = "findByLikeName";
_methodParameterTypes34 = new String[] {
"java.lang.String", "int", "int",
"com.liferay.portal.service.ServiceContext"
};
_methodName35 = "countByLikeName";
_methodParameterTypes35 = new String[] {
"java.lang.String", "com.liferay.portal.service.ServiceContext"
};
_methodName36 = "findByCustomCondition";
_methodParameterTypes36 = new String[] {
"java.lang.String", "java.util.Date", "java.util.Date", "int",
"int", "int", "com.liferay.portal.service.ServiceContext"
};
_methodName37 = "countByCustomCondition";
_methodParameterTypes37 = new String[] {
"java.lang.String", "java.util.Date", "java.util.Date", "int",
"com.liferay.portal.service.ServiceContext"
};
_methodName38 = "findByGroupCustomCondition";
_methodParameterTypes38 = new String[] {
"java.lang.String", "java.lang.String", "java.lang.String",
"java.util.Date", "java.util.Date", "int", "int", "int",
"com.liferay.portal.service.ServiceContext"
};
_methodName39 = "countByGroupCustomCondition";
_methodParameterTypes39 = new String[] {
"java.lang.String", "java.lang.String", "java.lang.String",
"java.util.Date", "java.util.Date", "int",
"com.liferay.portal.service.ServiceContext"
};
_methodName40 = "findByCompany";
_methodParameterTypes40 = new String[] { "long" };
_methodName41 = "findByCompany";
_methodParameterTypes41 = new String[] {
"com.liferay.portal.service.ServiceContext"
};
}
@Override
public org.oep.core.dossiermgt.model.DossierProc addDossierProc(
org.oep.core.dossiermgt.model.DossierProc dossierProc)
throws com.liferay.portal.kernel.exception.SystemException {
Object returnObj = null;
try {
returnObj = _invokableLocalService.invokeMethod(_methodName0,
_methodParameterTypes0,
new Object[] { ClpSerializer.translateInput(dossierProc) });
}
catch (Throwable t) {
t = ClpSerializer.translateThrowable(t);
if (t instanceof com.liferay.portal.kernel.exception.SystemException) {
throw (com.liferay.portal.kernel.exception.SystemException)t;
}
if (t instanceof RuntimeException) {
throw (RuntimeException)t;
}
else {
throw new RuntimeException(t.getClass().getName() +
" is not a valid exception");
}
}
return (org.oep.core.dossiermgt.model.DossierProc)ClpSerializer.translateOutput(returnObj);
}
@Override
public org.oep.core.dossiermgt.model.DossierProc createDossierProc(
long dossierProcId) {
Object returnObj = null;
try {
returnObj = _invokableLocalService.invokeMethod(_methodName1,
_methodParameterTypes1, new Object[] { dossierProcId });
}
catch (Throwable t) {
t = ClpSerializer.translateThrowable(t);
if (t instanceof RuntimeException) {
throw (RuntimeException)t;
}
else {
throw new RuntimeException(t.getClass().getName() +
" is not a valid exception");
}
}
return (org.oep.core.dossiermgt.model.DossierProc)ClpSerializer.translateOutput(returnObj);
}
@Override
public org.oep.core.dossiermgt.model.DossierProc deleteDossierProc(
long dossierProcId)
throws com.liferay.portal.kernel.exception.PortalException,
com.liferay.portal.kernel.exception.SystemException {
Object returnObj = null;
try {
returnObj = _invokableLocalService.invokeMethod(_methodName2,
_methodParameterTypes2, new Object[] { dossierProcId });
}
catch (Throwable t) {
t = ClpSerializer.translateThrowable(t);
if (t instanceof com.liferay.portal.kernel.exception.PortalException) {
throw (com.liferay.portal.kernel.exception.PortalException)t;
}
if (t instanceof com.liferay.portal.kernel.exception.SystemException) {
throw (com.liferay.portal.kernel.exception.SystemException)t;
}
if (t instanceof RuntimeException) {
throw (RuntimeException)t;
}
else {
throw new RuntimeException(t.getClass().getName() +
" is not a valid exception");
}
}
return (org.oep.core.dossiermgt.model.DossierProc)ClpSerializer.translateOutput(returnObj);
}
@Override
public org.oep.core.dossiermgt.model.DossierProc deleteDossierProc(
org.oep.core.dossiermgt.model.DossierProc dossierProc)
throws com.liferay.portal.kernel.exception.SystemException {
Object returnObj = null;
try {
returnObj = _invokableLocalService.invokeMethod(_methodName3,
_methodParameterTypes3,
new Object[] { ClpSerializer.translateInput(dossierProc) });
}
catch (Throwable t) {
t = ClpSerializer.translateThrowable(t);
if (t instanceof com.liferay.portal.kernel.exception.SystemException) {
throw (com.liferay.portal.kernel.exception.SystemException)t;
}
if (t instanceof RuntimeException) {
throw (RuntimeException)t;
}
else {
throw new RuntimeException(t.getClass().getName() +
" is not a valid exception");
}
}
return (org.oep.core.dossiermgt.model.DossierProc)ClpSerializer.translateOutput(returnObj);
}
@Override
public com.liferay.portal.kernel.dao.orm.DynamicQuery dynamicQuery() {
Object returnObj = null;
try {
returnObj = _invokableLocalService.invokeMethod(_methodName4,
_methodParameterTypes4, new Object[] { });
}
catch (Throwable t) {
t = ClpSerializer.translateThrowable(t);
if (t instanceof RuntimeException) {
throw (RuntimeException)t;
}
else {
throw new RuntimeException(t.getClass().getName() +
" is not a valid exception");
}
}
return (com.liferay.portal.kernel.dao.orm.DynamicQuery)ClpSerializer.translateOutput(returnObj);
}
@Override
@SuppressWarnings("rawtypes")
public java.util.List dynamicQuery(
com.liferay.portal.kernel.dao.orm.DynamicQuery dynamicQuery)
throws com.liferay.portal.kernel.exception.SystemException {
Object returnObj = null;
try {
returnObj = _invokableLocalService.invokeMethod(_methodName5,
_methodParameterTypes5,
new Object[] { ClpSerializer.translateInput(dynamicQuery) });
}
catch (Throwable t) {
t = ClpSerializer.translateThrowable(t);
if (t instanceof com.liferay.portal.kernel.exception.SystemException) {
throw (com.liferay.portal.kernel.exception.SystemException)t;
}
if (t instanceof RuntimeException) {
throw (RuntimeException)t;
}
else {
throw new RuntimeException(t.getClass().getName() +
" is not a valid exception");
}
}
return (java.util.List)ClpSerializer.translateOutput(returnObj);
}
@Override
@SuppressWarnings("rawtypes")
public java.util.List dynamicQuery(
com.liferay.portal.kernel.dao.orm.DynamicQuery dynamicQuery, int start,
int end) throws com.liferay.portal.kernel.exception.SystemException {
Object returnObj = null;
try {
returnObj = _invokableLocalService.invokeMethod(_methodName6,
_methodParameterTypes6,
new Object[] {
ClpSerializer.translateInput(dynamicQuery),
start,
end
});
}
catch (Throwable t) {
t = ClpSerializer.translateThrowable(t);
if (t instanceof com.liferay.portal.kernel.exception.SystemException) {
throw (com.liferay.portal.kernel.exception.SystemException)t;
}
if (t instanceof RuntimeException) {
throw (RuntimeException)t;
}
else {
throw new RuntimeException(t.getClass().getName() +
" is not a valid exception");
}
}
return (java.util.List)ClpSerializer.translateOutput(returnObj);
}
@Override
@SuppressWarnings("rawtypes")
public java.util.List dynamicQuery(
com.liferay.portal.kernel.dao.orm.DynamicQuery dynamicQuery, int start,
int end,
com.liferay.portal.kernel.util.OrderByComparator orderByComparator)
throws com.liferay.portal.kernel.exception.SystemException {
Object returnObj = null;
try {
returnObj = _invokableLocalService.invokeMethod(_methodName7,
_methodParameterTypes7,
new Object[] {
ClpSerializer.translateInput(dynamicQuery),
start,
end,
ClpSerializer.translateInput(orderByComparator)
});
}
catch (Throwable t) {
t = ClpSerializer.translateThrowable(t);
if (t instanceof com.liferay.portal.kernel.exception.SystemException) {
throw (com.liferay.portal.kernel.exception.SystemException)t;
}
if (t instanceof RuntimeException) {
throw (RuntimeException)t;
}
else {
throw new RuntimeException(t.getClass().getName() +
" is not a valid exception");
}
}
return (java.util.List)ClpSerializer.translateOutput(returnObj);
}
@Override
public long dynamicQueryCount(
com.liferay.portal.kernel.dao.orm.DynamicQuery dynamicQuery)
throws com.liferay.portal.kernel.exception.SystemException {
Object returnObj = null;
try {
returnObj = _invokableLocalService.invokeMethod(_methodName8,
_methodParameterTypes8,
new Object[] { ClpSerializer.translateInput(dynamicQuery) });
}
catch (Throwable t) {
t = ClpSerializer.translateThrowable(t);
if (t instanceof com.liferay.portal.kernel.exception.SystemException) {
throw (com.liferay.portal.kernel.exception.SystemException)t;
}
if (t instanceof RuntimeException) {
throw (RuntimeException)t;
}
else {
throw new RuntimeException(t.getClass().getName() +
" is not a valid exception");
}
}
return ((Long)returnObj).longValue();
}
@Override
public long dynamicQueryCount(
com.liferay.portal.kernel.dao.orm.DynamicQuery dynamicQuery,
com.liferay.portal.kernel.dao.orm.Projection projection)
throws com.liferay.portal.kernel.exception.SystemException {
Object returnObj = null;
try {
returnObj = _invokableLocalService.invokeMethod(_methodName9,
_methodParameterTypes9,
new Object[] {
ClpSerializer.translateInput(dynamicQuery),
ClpSerializer.translateInput(projection)
});
}
catch (Throwable t) {
t = ClpSerializer.translateThrowable(t);
if (t instanceof com.liferay.portal.kernel.exception.SystemException) {
throw (com.liferay.portal.kernel.exception.SystemException)t;
}
if (t instanceof RuntimeException) {
throw (RuntimeException)t;
}
else {
throw new RuntimeException(t.getClass().getName() +
" is not a valid exception");
}
}
return ((Long)returnObj).longValue();
}
@Override
public org.oep.core.dossiermgt.model.DossierProc fetchDossierProc(
long dossierProcId)
throws com.liferay.portal.kernel.exception.SystemException {
Object returnObj = null;
try {
returnObj = _invokableLocalService.invokeMethod(_methodName10,
_methodParameterTypes10, new Object[] { dossierProcId });
}
catch (Throwable t) {
t = ClpSerializer.translateThrowable(t);
if (t instanceof com.liferay.portal.kernel.exception.SystemException) {
throw (com.liferay.portal.kernel.exception.SystemException)t;
}
if (t instanceof RuntimeException) {
throw (RuntimeException)t;
}
else {
throw new RuntimeException(t.getClass().getName() +
" is not a valid exception");
}
}
return (org.oep.core.dossiermgt.model.DossierProc)ClpSerializer.translateOutput(returnObj);
}
@Override
public org.oep.core.dossiermgt.model.DossierProc getDossierProc(
long dossierProcId)
throws com.liferay.portal.kernel.exception.PortalException,
com.liferay.portal.kernel.exception.SystemException {
Object returnObj = null;
try {
returnObj = _invokableLocalService.invokeMethod(_methodName11,
_methodParameterTypes11, new Object[] { dossierProcId });
}
catch (Throwable t) {
t = ClpSerializer.translateThrowable(t);
if (t instanceof com.liferay.portal.kernel.exception.PortalException) {
throw (com.liferay.portal.kernel.exception.PortalException)t;
}
if (t instanceof com.liferay.portal.kernel.exception.SystemException) {
throw (com.liferay.portal.kernel.exception.SystemException)t;
}
if (t instanceof RuntimeException) {
throw (RuntimeException)t;
}
else {
throw new RuntimeException(t.getClass().getName() +
" is not a valid exception");
}
}
return (org.oep.core.dossiermgt.model.DossierProc)ClpSerializer.translateOutput(returnObj);
}
@Override
public com.liferay.portal.model.PersistedModel getPersistedModel(
java.io.Serializable primaryKeyObj)
throws com.liferay.portal.kernel.exception.PortalException,
com.liferay.portal.kernel.exception.SystemException {
Object returnObj = null;
try {
returnObj = _invokableLocalService.invokeMethod(_methodName12,
_methodParameterTypes12,
new Object[] { ClpSerializer.translateInput(primaryKeyObj) });
}
catch (Throwable t) {
t = ClpSerializer.translateThrowable(t);
if (t instanceof com.liferay.portal.kernel.exception.PortalException) {
throw (com.liferay.portal.kernel.exception.PortalException)t;
}
if (t instanceof com.liferay.portal.kernel.exception.SystemException) {
throw (com.liferay.portal.kernel.exception.SystemException)t;
}
if (t instanceof RuntimeException) {
throw (RuntimeException)t;
}
else {
throw new RuntimeException(t.getClass().getName() +
" is not a valid exception");
}
}
return (com.liferay.portal.model.PersistedModel)ClpSerializer.translateOutput(returnObj);
}
@Override
public java.util.List<org.oep.core.dossiermgt.model.DossierProc> getDossierProcs(
int start, int end)
throws com.liferay.portal.kernel.exception.SystemException {
Object returnObj = null;
try {
returnObj = _invokableLocalService.invokeMethod(_methodName13,
_methodParameterTypes13, new Object[] { start, end });
}
catch (Throwable t) {
t = ClpSerializer.translateThrowable(t);
if (t instanceof com.liferay.portal.kernel.exception.SystemException) {
throw (com.liferay.portal.kernel.exception.SystemException)t;
}
if (t instanceof RuntimeException) {
throw (RuntimeException)t;
}
else {
throw new RuntimeException(t.getClass().getName() +
" is not a valid exception");
}
}
return (java.util.List<org.oep.core.dossiermgt.model.DossierProc>)ClpSerializer.translateOutput(returnObj);
}
@Override
public int getDossierProcsCount()
throws com.liferay.portal.kernel.exception.SystemException {
Object returnObj = null;
try {
returnObj = _invokableLocalService.invokeMethod(_methodName14,
_methodParameterTypes14, new Object[] { });
}
catch (Throwable t) {
t = ClpSerializer.translateThrowable(t);
if (t instanceof com.liferay.portal.kernel.exception.SystemException) {
throw (com.liferay.portal.kernel.exception.SystemException)t;
}
if (t instanceof RuntimeException) {
throw (RuntimeException)t;
}
else {
throw new RuntimeException(t.getClass().getName() +
" is not a valid exception");
}
}
return ((Integer)returnObj).intValue();
}
@Override
public org.oep.core.dossiermgt.model.DossierProc updateDossierProc(
org.oep.core.dossiermgt.model.DossierProc dossierProc)
throws com.liferay.portal.kernel.exception.SystemException {
Object returnObj = null;
try {
returnObj = _invokableLocalService.invokeMethod(_methodName15,
_methodParameterTypes15,
new Object[] { ClpSerializer.translateInput(dossierProc) });
}
catch (Throwable t) {
t = ClpSerializer.translateThrowable(t);
if (t instanceof com.liferay.portal.kernel.exception.SystemException) {
throw (com.liferay.portal.kernel.exception.SystemException)t;
}
if (t instanceof RuntimeException) {
throw (RuntimeException)t;
}
else {
throw new RuntimeException(t.getClass().getName() +
" is not a valid exception");
}
}
return (org.oep.core.dossiermgt.model.DossierProc)ClpSerializer.translateOutput(returnObj);
}
@Override
public java.lang.String getBeanIdentifier() {
Object returnObj = null;
try {
returnObj = _invokableLocalService.invokeMethod(_methodName16,
_methodParameterTypes16, new Object[] { });
}
catch (Throwable t) {
t = ClpSerializer.translateThrowable(t);
if (t instanceof RuntimeException) {
throw (RuntimeException)t;
}
else {
throw new RuntimeException(t.getClass().getName() +
" is not a valid exception");
}
}
return (java.lang.String)ClpSerializer.translateOutput(returnObj);
}
@Override
public void setBeanIdentifier(java.lang.String beanIdentifier) {
try {
_invokableLocalService.invokeMethod(_methodName17,
_methodParameterTypes17,
new Object[] { ClpSerializer.translateInput(beanIdentifier) });
}
catch (Throwable t) {
t = ClpSerializer.translateThrowable(t);
if (t instanceof RuntimeException) {
throw (RuntimeException)t;
}
else {
throw new RuntimeException(t.getClass().getName() +
" is not a valid exception");
}
}
}
@Override
public java.lang.Object invokeMethod(java.lang.String name,
java.lang.String[] parameterTypes, java.lang.Object[] arguments)
throws java.lang.Throwable {
throw new UnsupportedOperationException();
}
@Override
public org.oep.core.dossiermgt.model.DossierProc addDossierProc(
long userId, java.lang.String dossierProcNo, java.lang.String name,
java.lang.String enName, java.lang.String shortName,
java.lang.String processDescription,
java.lang.String methodDescription,
java.lang.String dossierDescription,
java.lang.String conditionDescription,
java.lang.String durationDescription,
java.lang.String actorsDescription,
java.lang.String resultsDescription,
java.lang.String recordsDescription, java.lang.String feeDescription,
java.lang.String instructionsDescription,
java.lang.String administrationNo, java.lang.String domainNo,
int forCitizen, int forBusiness, java.util.Date effectDate,
java.util.Date expireDate, int statusActive,
com.liferay.portal.service.ServiceContext serviceContext)
throws com.liferay.portal.kernel.exception.PortalException,
com.liferay.portal.kernel.exception.SystemException {
Object returnObj = null;
try {
returnObj = _invokableLocalService.invokeMethod(_methodName19,
_methodParameterTypes19,
new Object[] {
userId,
ClpSerializer.translateInput(dossierProcNo),
ClpSerializer.translateInput(name),
ClpSerializer.translateInput(enName),
ClpSerializer.translateInput(shortName),
ClpSerializer.translateInput(processDescription),
ClpSerializer.translateInput(methodDescription),
ClpSerializer.translateInput(dossierDescription),
ClpSerializer.translateInput(conditionDescription),
ClpSerializer.translateInput(durationDescription),
ClpSerializer.translateInput(actorsDescription),
ClpSerializer.translateInput(resultsDescription),
ClpSerializer.translateInput(recordsDescription),
ClpSerializer.translateInput(feeDescription),
ClpSerializer.translateInput(instructionsDescription),
ClpSerializer.translateInput(administrationNo),
ClpSerializer.translateInput(domainNo),
forCitizen,
forBusiness,
ClpSerializer.translateInput(effectDate),
ClpSerializer.translateInput(expireDate),
statusActive,
ClpSerializer.translateInput(serviceContext)
});
}
catch (Throwable t) {
t = ClpSerializer.translateThrowable(t);
if (t instanceof com.liferay.portal.kernel.exception.PortalException) {
throw (com.liferay.portal.kernel.exception.PortalException)t;
}
if (t instanceof com.liferay.portal.kernel.exception.SystemException) {
throw (com.liferay.portal.kernel.exception.SystemException)t;
}
if (t instanceof RuntimeException) {
throw (RuntimeException)t;
}
else {
throw new RuntimeException(t.getClass().getName() +
" is not a valid exception");
}
}
return (org.oep.core.dossiermgt.model.DossierProc)ClpSerializer.translateOutput(returnObj);
}
@Override
public org.oep.core.dossiermgt.model.DossierProc updateDossierProc(
long id, java.lang.String dossierProcNo, java.lang.String name,
java.lang.String enName, java.lang.String shortName,
java.lang.String processDescription,
java.lang.String methodDescription,
java.lang.String dossierDescription,
java.lang.String conditionDescription,
java.lang.String durationDescription,
java.lang.String actorsDescription,
java.lang.String resultsDescription,
java.lang.String recordsDescription, java.lang.String feeDescription,
java.lang.String instructionsDescription,
java.lang.String administrationNo, java.lang.String domainNo,
int forCitizen, int forBusiness, java.util.Date effectDate,
java.util.Date expireDate, int statusActive,
com.liferay.portal.service.ServiceContext serviceContext)
throws com.liferay.portal.kernel.exception.PortalException,
com.liferay.portal.kernel.exception.SystemException {
Object returnObj = null;
try {
returnObj = _invokableLocalService.invokeMethod(_methodName20,
_methodParameterTypes20,
new Object[] {
id,
ClpSerializer.translateInput(dossierProcNo),
ClpSerializer.translateInput(name),
ClpSerializer.translateInput(enName),
ClpSerializer.translateInput(shortName),
ClpSerializer.translateInput(processDescription),
ClpSerializer.translateInput(methodDescription),
ClpSerializer.translateInput(dossierDescription),
ClpSerializer.translateInput(conditionDescription),
ClpSerializer.translateInput(durationDescription),
ClpSerializer.translateInput(actorsDescription),
ClpSerializer.translateInput(resultsDescription),
ClpSerializer.translateInput(recordsDescription),
ClpSerializer.translateInput(feeDescription),
ClpSerializer.translateInput(instructionsDescription),
ClpSerializer.translateInput(administrationNo),
ClpSerializer.translateInput(domainNo),
forCitizen,
forBusiness,
ClpSerializer.translateInput(effectDate),
ClpSerializer.translateInput(expireDate),
statusActive,
ClpSerializer.translateInput(serviceContext)
});
}
catch (Throwable t) {
t = ClpSerializer.translateThrowable(t);
if (t instanceof com.liferay.portal.kernel.exception.PortalException) {
throw (com.liferay.portal.kernel.exception.PortalException)t;
}
if (t instanceof com.liferay.portal.kernel.exception.SystemException) {
throw (com.liferay.portal.kernel.exception.SystemException)t;
}
if (t instanceof RuntimeException) {
throw (RuntimeException)t;
}
else {
throw new RuntimeException(t.getClass().getName() +
" is not a valid exception");
}
}
return (org.oep.core.dossiermgt.model.DossierProc)ClpSerializer.translateOutput(returnObj);
}
@Override
public org.oep.core.dossiermgt.model.DossierProc updateDossierProc(
org.oep.core.dossiermgt.model.DossierProc dossierProc,
com.liferay.portal.service.ServiceContext serviceContext)
throws com.liferay.portal.kernel.exception.PortalException,
com.liferay.portal.kernel.exception.SystemException {
Object returnObj = null;
try {
returnObj = _invokableLocalService.invokeMethod(_methodName21,
_methodParameterTypes21,
new Object[] {
ClpSerializer.translateInput(dossierProc),
ClpSerializer.translateInput(serviceContext)
});
}
catch (Throwable t) {
t = ClpSerializer.translateThrowable(t);
if (t instanceof com.liferay.portal.kernel.exception.PortalException) {
throw (com.liferay.portal.kernel.exception.PortalException)t;
}
if (t instanceof com.liferay.portal.kernel.exception.SystemException) {
throw (com.liferay.portal.kernel.exception.SystemException)t;
}
if (t instanceof RuntimeException) {
throw (RuntimeException)t;
}
else {
throw new RuntimeException(t.getClass().getName() +
" is not a valid exception");
}
}
return (org.oep.core.dossiermgt.model.DossierProc)ClpSerializer.translateOutput(returnObj);
}
@Override
public void removeDossierProc(
org.oep.core.dossiermgt.model.DossierProc dossierProc)
throws com.liferay.portal.kernel.exception.PortalException,
com.liferay.portal.kernel.exception.SystemException {
try {
_invokableLocalService.invokeMethod(_methodName22,
_methodParameterTypes22,
new Object[] { ClpSerializer.translateInput(dossierProc) });
}
catch (Throwable t) {
t = ClpSerializer.translateThrowable(t);
if (t instanceof com.liferay.portal.kernel.exception.PortalException) {
throw (com.liferay.portal.kernel.exception.PortalException)t;
}
if (t instanceof com.liferay.portal.kernel.exception.SystemException) {
throw (com.liferay.portal.kernel.exception.SystemException)t;
}
if (t instanceof RuntimeException) {
throw (RuntimeException)t;
}
else {
throw new RuntimeException(t.getClass().getName() +
" is not a valid exception");
}
}
}
@Override
public void removeDossierProc(long id)
throws com.liferay.portal.kernel.exception.PortalException,
com.liferay.portal.kernel.exception.SystemException {
try {
_invokableLocalService.invokeMethod(_methodName23,
_methodParameterTypes23, new Object[] { id });
}
catch (Throwable t) {
t = ClpSerializer.translateThrowable(t);
if (t instanceof com.liferay.portal.kernel.exception.PortalException) {
throw (com.liferay.portal.kernel.exception.PortalException)t;
}
if (t instanceof com.liferay.portal.kernel.exception.SystemException) {
throw (com.liferay.portal.kernel.exception.SystemException)t;
}
if (t instanceof RuntimeException) {
throw (RuntimeException)t;
}
else {
throw new RuntimeException(t.getClass().getName() +
" is not a valid exception");
}
}
}
@Override
public void deleteDossierProcs(long groupId)
throws com.liferay.portal.kernel.exception.PortalException,
com.liferay.portal.kernel.exception.SystemException {
try {
_invokableLocalService.invokeMethod(_methodName24,
_methodParameterTypes24, new Object[] { groupId });
}
catch (Throwable t) {
t = ClpSerializer.translateThrowable(t);
if (t instanceof com.liferay.portal.kernel.exception.PortalException) {
throw (com.liferay.portal.kernel.exception.PortalException)t;
}
if (t instanceof com.liferay.portal.kernel.exception.SystemException) {
throw (com.liferay.portal.kernel.exception.SystemException)t;
}
if (t instanceof RuntimeException) {
throw (RuntimeException)t;
}
else {
throw new RuntimeException(t.getClass().getName() +
" is not a valid exception");
}
}
}
@Override
public org.oep.core.dossiermgt.model.DossierProc getByDossierProcNo(
java.lang.String dossierProcNo,
com.liferay.portal.service.ServiceContext serviceContext)
throws com.liferay.portal.kernel.exception.PortalException,
com.liferay.portal.kernel.exception.SystemException {
Object returnObj = null;
try {
returnObj = _invokableLocalService.invokeMethod(_methodName25,
_methodParameterTypes25,
new Object[] {
ClpSerializer.translateInput(dossierProcNo),
ClpSerializer.translateInput(serviceContext)
});
}
catch (Throwable t) {
t = ClpSerializer.translateThrowable(t);
if (t instanceof com.liferay.portal.kernel.exception.PortalException) {
throw (com.liferay.portal.kernel.exception.PortalException)t;
}
if (t instanceof com.liferay.portal.kernel.exception.SystemException) {
throw (com.liferay.portal.kernel.exception.SystemException)t;
}
if (t instanceof RuntimeException) {
throw (RuntimeException)t;
}
else {
throw new RuntimeException(t.getClass().getName() +
" is not a valid exception");
}
}
return (org.oep.core.dossiermgt.model.DossierProc)ClpSerializer.translateOutput(returnObj);
}
@Override
public java.util.List<org.oep.core.dossiermgt.model.DossierProc> findAllByAdministrationNo(
java.lang.String administrationNo,
com.liferay.portal.service.ServiceContext serviceContext)
throws com.liferay.portal.kernel.exception.PortalException,
com.liferay.portal.kernel.exception.SystemException {
Object returnObj = null;
try {
returnObj = _invokableLocalService.invokeMethod(_methodName26,
_methodParameterTypes26,
new Object[] {
ClpSerializer.translateInput(administrationNo),
ClpSerializer.translateInput(serviceContext)
});
}
catch (Throwable t) {
t = ClpSerializer.translateThrowable(t);
if (t instanceof com.liferay.portal.kernel.exception.PortalException) {
throw (com.liferay.portal.kernel.exception.PortalException)t;
}
if (t instanceof com.liferay.portal.kernel.exception.SystemException) {
throw (com.liferay.portal.kernel.exception.SystemException)t;
}
if (t instanceof RuntimeException) {
throw (RuntimeException)t;
}
else {
throw new RuntimeException(t.getClass().getName() +
" is not a valid exception");
}
}
return (java.util.List<org.oep.core.dossiermgt.model.DossierProc>)ClpSerializer.translateOutput(returnObj);
}
@Override
public java.util.List<org.oep.core.dossiermgt.model.DossierProc> findAllByDomainNo(
java.lang.String domainNo,
com.liferay.portal.service.ServiceContext serviceContext)
throws com.liferay.portal.kernel.exception.PortalException,
com.liferay.portal.kernel.exception.SystemException {
Object returnObj = null;
try {
returnObj = _invokableLocalService.invokeMethod(_methodName27,
_methodParameterTypes27,
new Object[] {
ClpSerializer.translateInput(domainNo),
ClpSerializer.translateInput(serviceContext)
});
}
catch (Throwable t) {
t = ClpSerializer.translateThrowable(t);
if (t instanceof com.liferay.portal.kernel.exception.PortalException) {
throw (com.liferay.portal.kernel.exception.PortalException)t;
}
if (t instanceof com.liferay.portal.kernel.exception.SystemException) {
throw (com.liferay.portal.kernel.exception.SystemException)t;
}
if (t instanceof RuntimeException) {
throw (RuntimeException)t;
}
else {
throw new RuntimeException(t.getClass().getName() +
" is not a valid exception");
}
}
return (java.util.List<org.oep.core.dossiermgt.model.DossierProc>)ClpSerializer.translateOutput(returnObj);
}
@Override
public java.util.List<org.oep.core.dossiermgt.model.DossierProc> findAllByAdministrationAndDomain(
java.lang.String administrationNo, java.lang.String domainNo,
com.liferay.portal.service.ServiceContext serviceContext)
throws com.liferay.portal.kernel.exception.PortalException,
com.liferay.portal.kernel.exception.SystemException {
Object returnObj = null;
try {
returnObj = _invokableLocalService.invokeMethod(_methodName28,
_methodParameterTypes28,
new Object[] {
ClpSerializer.translateInput(administrationNo),
ClpSerializer.translateInput(domainNo),
ClpSerializer.translateInput(serviceContext)
});
}
catch (Throwable t) {
t = ClpSerializer.translateThrowable(t);
if (t instanceof com.liferay.portal.kernel.exception.PortalException) {
throw (com.liferay.portal.kernel.exception.PortalException)t;
}
if (t instanceof com.liferay.portal.kernel.exception.SystemException) {
throw (com.liferay.portal.kernel.exception.SystemException)t;
}
if (t instanceof RuntimeException) {
throw (RuntimeException)t;
}
else {
throw new RuntimeException(t.getClass().getName() +
" is not a valid exception");
}
}
return (java.util.List<org.oep.core.dossiermgt.model.DossierProc>)ClpSerializer.translateOutput(returnObj);
}
@Override
public java.util.List<org.oep.core.dossiermgt.model.DossierProc> findByGroupAdministrationAndDomain(
java.lang.String administrationNo, java.lang.String domainNo,
com.liferay.portal.service.ServiceContext serviceContext)
throws com.liferay.portal.kernel.exception.PortalException,
com.liferay.portal.kernel.exception.SystemException {
Object returnObj = null;
try {
returnObj = _invokableLocalService.invokeMethod(_methodName29,
_methodParameterTypes29,
new Object[] {
ClpSerializer.translateInput(administrationNo),
ClpSerializer.translateInput(domainNo),
ClpSerializer.translateInput(serviceContext)
});
}
catch (Throwable t) {
t = ClpSerializer.translateThrowable(t);
if (t instanceof com.liferay.portal.kernel.exception.PortalException) {
throw (com.liferay.portal.kernel.exception.PortalException)t;
}
if (t instanceof com.liferay.portal.kernel.exception.SystemException) {
throw (com.liferay.portal.kernel.exception.SystemException)t;
}
if (t instanceof RuntimeException) {
throw (RuntimeException)t;
}
else {
throw new RuntimeException(t.getClass().getName() +
" is not a valid exception");
}
}
return (java.util.List<org.oep.core.dossiermgt.model.DossierProc>)ClpSerializer.translateOutput(returnObj);
}
@Override
public java.util.List<org.oep.core.dossiermgt.model.DossierProc> getCompanyDossierProcs(
long companyId,
com.liferay.portal.kernel.dao.orm.QueryDefinition queryDefinition)
throws com.liferay.portal.kernel.exception.SystemException {
Object returnObj = null;
try {
returnObj = _invokableLocalService.invokeMethod(_methodName30,
_methodParameterTypes30,
new Object[] {
companyId,
ClpSerializer.translateInput(queryDefinition)
});
}
catch (Throwable t) {
t = ClpSerializer.translateThrowable(t);
if (t instanceof com.liferay.portal.kernel.exception.SystemException) {
throw (com.liferay.portal.kernel.exception.SystemException)t;
}
if (t instanceof RuntimeException) {
throw (RuntimeException)t;
}
else {
throw new RuntimeException(t.getClass().getName() +
" is not a valid exception");
}
}
return (java.util.List<org.oep.core.dossiermgt.model.DossierProc>)ClpSerializer.translateOutput(returnObj);
}
@Override
public int getCompanyDossierProcsCount(long companyId,
com.liferay.portal.kernel.dao.orm.QueryDefinition queryDefinition)
throws com.liferay.portal.kernel.exception.SystemException {
Object returnObj = null;
try {
returnObj = _invokableLocalService.invokeMethod(_methodName31,
_methodParameterTypes31,
new Object[] {
companyId,
ClpSerializer.translateInput(queryDefinition)
});
}
catch (Throwable t) {
t = ClpSerializer.translateThrowable(t);
if (t instanceof com.liferay.portal.kernel.exception.SystemException) {
throw (com.liferay.portal.kernel.exception.SystemException)t;
}
if (t instanceof RuntimeException) {
throw (RuntimeException)t;
}
else {
throw new RuntimeException(t.getClass().getName() +
" is not a valid exception");
}
}
return ((Integer)returnObj).intValue();
}
@Override
public java.util.List<org.oep.core.dossiermgt.model.DossierProc> findByGroupLikeName(
java.lang.String name, int startIndex, int endIndex,
com.liferay.portal.service.ServiceContext serviceContext)
throws com.liferay.portal.kernel.exception.SystemException {
Object returnObj = null;
try {
returnObj = _invokableLocalService.invokeMethod(_methodName32,
_methodParameterTypes32,
new Object[] {
ClpSerializer.translateInput(name),
startIndex,
endIndex,
ClpSerializer.translateInput(serviceContext)
});
}
catch (Throwable t) {
t = ClpSerializer.translateThrowable(t);
if (t instanceof com.liferay.portal.kernel.exception.SystemException) {
throw (com.liferay.portal.kernel.exception.SystemException)t;
}
if (t instanceof RuntimeException) {
throw (RuntimeException)t;
}
else {
throw new RuntimeException(t.getClass().getName() +
" is not a valid exception");
}
}
return (java.util.List<org.oep.core.dossiermgt.model.DossierProc>)ClpSerializer.translateOutput(returnObj);
}
@Override
public int countByGroupLikeName(java.lang.String name,
com.liferay.portal.service.ServiceContext serviceContext)
throws com.liferay.portal.kernel.exception.SystemException {
Object returnObj = null;
try {
returnObj = _invokableLocalService.invokeMethod(_methodName33,
_methodParameterTypes33,
new Object[] {
ClpSerializer.translateInput(name),
ClpSerializer.translateInput(serviceContext)
});
}
catch (Throwable t) {
t = ClpSerializer.translateThrowable(t);
if (t instanceof com.liferay.portal.kernel.exception.SystemException) {
throw (com.liferay.portal.kernel.exception.SystemException)t;
}
if (t instanceof RuntimeException) {
throw (RuntimeException)t;
}
else {
throw new RuntimeException(t.getClass().getName() +
" is not a valid exception");
}
}
return ((Integer)returnObj).intValue();
}
@Override
public java.util.List<org.oep.core.dossiermgt.model.DossierProc> findByLikeName(
java.lang.String name, int startIndex, int endIndex,
com.liferay.portal.service.ServiceContext serviceContext)
throws com.liferay.portal.kernel.exception.SystemException {
Object returnObj = null;
try {
returnObj = _invokableLocalService.invokeMethod(_methodName34,
_methodParameterTypes34,
new Object[] {
ClpSerializer.translateInput(name),
startIndex,
endIndex,
ClpSerializer.translateInput(serviceContext)
});
}
catch (Throwable t) {
t = ClpSerializer.translateThrowable(t);
if (t instanceof com.liferay.portal.kernel.exception.SystemException) {
throw (com.liferay.portal.kernel.exception.SystemException)t;
}
if (t instanceof RuntimeException) {
throw (RuntimeException)t;
}
else {
throw new RuntimeException(t.getClass().getName() +
" is not a valid exception");
}
}
return (java.util.List<org.oep.core.dossiermgt.model.DossierProc>)ClpSerializer.translateOutput(returnObj);
}
@Override
public int countByLikeName(java.lang.String name,
com.liferay.portal.service.ServiceContext serviceContext)
throws com.liferay.portal.kernel.exception.SystemException {
Object returnObj = null;
try {
returnObj = _invokableLocalService.invokeMethod(_methodName35,
_methodParameterTypes35,
new Object[] {
ClpSerializer.translateInput(name),
ClpSerializer.translateInput(serviceContext)
});
}
catch (Throwable t) {
t = ClpSerializer.translateThrowable(t);
if (t instanceof com.liferay.portal.kernel.exception.SystemException) {
throw (com.liferay.portal.kernel.exception.SystemException)t;
}
if (t instanceof RuntimeException) {
throw (RuntimeException)t;
}
else {
throw new RuntimeException(t.getClass().getName() +
" is not a valid exception");
}
}
return ((Integer)returnObj).intValue();
}
@Override
public java.util.List<org.oep.core.dossiermgt.model.DossierProc> findByCustomCondition(
java.lang.String name, java.util.Date effectDate,
java.util.Date expireDate, int active, int startIndex, int endIndex,
com.liferay.portal.service.ServiceContext serviceContext)
throws com.liferay.portal.kernel.exception.SystemException {
Object returnObj = null;
try {
returnObj = _invokableLocalService.invokeMethod(_methodName36,
_methodParameterTypes36,
new Object[] {
ClpSerializer.translateInput(name),
ClpSerializer.translateInput(effectDate),
ClpSerializer.translateInput(expireDate),
active,
startIndex,
endIndex,
ClpSerializer.translateInput(serviceContext)
});
}
catch (Throwable t) {
t = ClpSerializer.translateThrowable(t);
if (t instanceof com.liferay.portal.kernel.exception.SystemException) {
throw (com.liferay.portal.kernel.exception.SystemException)t;
}
if (t instanceof RuntimeException) {
throw (RuntimeException)t;
}
else {
throw new RuntimeException(t.getClass().getName() +
" is not a valid exception");
}
}
return (java.util.List<org.oep.core.dossiermgt.model.DossierProc>)ClpSerializer.translateOutput(returnObj);
}
@Override
public int countByCustomCondition(java.lang.String name,
java.util.Date effectDate, java.util.Date expireDate, int active,
com.liferay.portal.service.ServiceContext serviceContext)
throws com.liferay.portal.kernel.exception.SystemException {
Object returnObj = null;
try {
returnObj = _invokableLocalService.invokeMethod(_methodName37,
_methodParameterTypes37,
new Object[] {
ClpSerializer.translateInput(name),
ClpSerializer.translateInput(effectDate),
ClpSerializer.translateInput(expireDate),
active,
ClpSerializer.translateInput(serviceContext)
});
}
catch (Throwable t) {
t = ClpSerializer.translateThrowable(t);
if (t instanceof com.liferay.portal.kernel.exception.SystemException) {
throw (com.liferay.portal.kernel.exception.SystemException)t;
}
if (t instanceof RuntimeException) {
throw (RuntimeException)t;
}
else {
throw new RuntimeException(t.getClass().getName() +
" is not a valid exception");
}
}
return ((Integer)returnObj).intValue();
}
@Override
public java.util.List<org.oep.core.dossiermgt.model.DossierProc> findByGroupCustomCondition(
java.lang.String name, java.lang.String administrationNo,
java.lang.String domainNo, java.util.Date effectDate,
java.util.Date expireDate, int active, int startIndex, int endIndex,
com.liferay.portal.service.ServiceContext serviceContext)
throws com.liferay.portal.kernel.exception.SystemException {
Object returnObj = null;
try {
returnObj = _invokableLocalService.invokeMethod(_methodName38,
_methodParameterTypes38,
new Object[] {
ClpSerializer.translateInput(name),
ClpSerializer.translateInput(administrationNo),
ClpSerializer.translateInput(domainNo),
ClpSerializer.translateInput(effectDate),
ClpSerializer.translateInput(expireDate),
active,
startIndex,
endIndex,
ClpSerializer.translateInput(serviceContext)
});
}
catch (Throwable t) {
t = ClpSerializer.translateThrowable(t);
if (t instanceof com.liferay.portal.kernel.exception.SystemException) {
throw (com.liferay.portal.kernel.exception.SystemException)t;
}
if (t instanceof RuntimeException) {
throw (RuntimeException)t;
}
else {
throw new RuntimeException(t.getClass().getName() +
" is not a valid exception");
}
}
return (java.util.List<org.oep.core.dossiermgt.model.DossierProc>)ClpSerializer.translateOutput(returnObj);
}
@Override
public int countByGroupCustomCondition(java.lang.String name,
java.lang.String administrationNo, java.lang.String domainNo,
java.util.Date effectDate, java.util.Date expireDate, int active,
com.liferay.portal.service.ServiceContext serviceContext)
throws com.liferay.portal.kernel.exception.SystemException {
Object returnObj = null;
try {
returnObj = _invokableLocalService.invokeMethod(_methodName39,
_methodParameterTypes39,
new Object[] {
ClpSerializer.translateInput(name),
ClpSerializer.translateInput(administrationNo),
ClpSerializer.translateInput(domainNo),
ClpSerializer.translateInput(effectDate),
ClpSerializer.translateInput(expireDate),
active,
ClpSerializer.translateInput(serviceContext)
});
}
catch (Throwable t) {
t = ClpSerializer.translateThrowable(t);
if (t instanceof com.liferay.portal.kernel.exception.SystemException) {
throw (com.liferay.portal.kernel.exception.SystemException)t;
}
if (t instanceof RuntimeException) {
throw (RuntimeException)t;
}
else {
throw new RuntimeException(t.getClass().getName() +
" is not a valid exception");
}
}
return ((Integer)returnObj).intValue();
}
@Override
public java.util.List<org.oep.core.dossiermgt.model.DossierProc> findByCompany(
long companyId)
throws com.liferay.portal.kernel.exception.SystemException {
Object returnObj = null;
try {
returnObj = _invokableLocalService.invokeMethod(_methodName40,
_methodParameterTypes40, new Object[] { companyId });
}
catch (Throwable t) {
t = ClpSerializer.translateThrowable(t);
if (t instanceof com.liferay.portal.kernel.exception.SystemException) {
throw (com.liferay.portal.kernel.exception.SystemException)t;
}
if (t instanceof RuntimeException) {
throw (RuntimeException)t;
}
else {
throw new RuntimeException(t.getClass().getName() +
" is not a valid exception");
}
}
return (java.util.List<org.oep.core.dossiermgt.model.DossierProc>)ClpSerializer.translateOutput(returnObj);
}
@Override
public java.util.List<org.oep.core.dossiermgt.model.DossierProc> findByCompany(
com.liferay.portal.service.ServiceContext serviceContext)
throws com.liferay.portal.kernel.exception.SystemException {
Object returnObj = null;
try {
returnObj = _invokableLocalService.invokeMethod(_methodName41,
_methodParameterTypes41,
new Object[] { ClpSerializer.translateInput(serviceContext) });
}
catch (Throwable t) {
t = ClpSerializer.translateThrowable(t);
if (t instanceof com.liferay.portal.kernel.exception.SystemException) {
throw (com.liferay.portal.kernel.exception.SystemException)t;
}
if (t instanceof RuntimeException) {
throw (RuntimeException)t;
}
else {
throw new RuntimeException(t.getClass().getName() +
" is not a valid exception");
}
}
return (java.util.List<org.oep.core.dossiermgt.model.DossierProc>)ClpSerializer.translateOutput(returnObj);
}
private InvokableLocalService _invokableLocalService;
private String _methodName0;
private String[] _methodParameterTypes0;
private String _methodName1;
private String[] _methodParameterTypes1;
private String _methodName2;
private String[] _methodParameterTypes2;
private String _methodName3;
private String[] _methodParameterTypes3;
private String _methodName4;
private String[] _methodParameterTypes4;
private String _methodName5;
private String[] _methodParameterTypes5;
private String _methodName6;
private String[] _methodParameterTypes6;
private String _methodName7;
private String[] _methodParameterTypes7;
private String _methodName8;
private String[] _methodParameterTypes8;
private String _methodName9;
private String[] _methodParameterTypes9;
private String _methodName10;
private String[] _methodParameterTypes10;
private String _methodName11;
private String[] _methodParameterTypes11;
private String _methodName12;
private String[] _methodParameterTypes12;
private String _methodName13;
private String[] _methodParameterTypes13;
private String _methodName14;
private String[] _methodParameterTypes14;
private String _methodName15;
private String[] _methodParameterTypes15;
private String _methodName16;
private String[] _methodParameterTypes16;
private String _methodName17;
private String[] _methodParameterTypes17;
private String _methodName19;
private String[] _methodParameterTypes19;
private String _methodName20;
private String[] _methodParameterTypes20;
private String _methodName21;
private String[] _methodParameterTypes21;
private String _methodName22;
private String[] _methodParameterTypes22;
private String _methodName23;
private String[] _methodParameterTypes23;
private String _methodName24;
private String[] _methodParameterTypes24;
private String _methodName25;
private String[] _methodParameterTypes25;
private String _methodName26;
private String[] _methodParameterTypes26;
private String _methodName27;
private String[] _methodParameterTypes27;
private String _methodName28;
private String[] _methodParameterTypes28;
private String _methodName29;
private String[] _methodParameterTypes29;
private String _methodName30;
private String[] _methodParameterTypes30;
private String _methodName31;
private String[] _methodParameterTypes31;
private String _methodName32;
private String[] _methodParameterTypes32;
private String _methodName33;
private String[] _methodParameterTypes33;
private String _methodName34;
private String[] _methodParameterTypes34;
private String _methodName35;
private String[] _methodParameterTypes35;
private String _methodName36;
private String[] _methodParameterTypes36;
private String _methodName37;
private String[] _methodParameterTypes37;
private String _methodName38;
private String[] _methodParameterTypes38;
private String _methodName39;
private String[] _methodParameterTypes39;
private String _methodName40;
private String[] _methodParameterTypes40;
private String _methodName41;
private String[] _methodParameterTypes41;
}
| |
package pro.taskana.classification.internal;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import org.apache.ibatis.exceptions.PersistenceException;
import org.apache.ibatis.session.RowBounds;
import pro.taskana.classification.api.ClassificationCustomField;
import pro.taskana.classification.api.ClassificationQuery;
import pro.taskana.classification.api.ClassificationQueryColumnName;
import pro.taskana.classification.api.models.ClassificationSummary;
import pro.taskana.common.api.TimeInterval;
import pro.taskana.common.api.exceptions.InvalidArgumentException;
import pro.taskana.common.api.exceptions.SystemException;
import pro.taskana.common.api.exceptions.TaskanaRuntimeException;
import pro.taskana.common.internal.InternalTaskanaEngine;
/** Implementation of ClassificationQuery interface. */
public class ClassificationQueryImpl implements ClassificationQuery {
private static final String LINK_TO_SUMMARYMAPPER =
"pro.taskana.classification.internal.ClassificationQueryMapper.queryClassificationSummaries";
private static final String LINK_TO_COUNTER =
"pro.taskana.classification.internal.ClassificationQueryMapper.countQueryClassifications";
private static final String LINK_TO_VALUEMAPPER =
"pro.taskana.classification.internal.ClassificationQueryMapper."
+ "queryClassificationColumnValues";
private final InternalTaskanaEngine taskanaEngine;
private final List<String> orderBy;
private final List<String> orderColumns;
private ClassificationQueryColumnName columnName;
private String[] key;
private String[] idIn;
private String[] parentId;
private String[] parentKey;
private String[] category;
private String[] type;
private String[] domain;
private Boolean validInDomain;
private TimeInterval[] createdIn;
private TimeInterval[] modifiedIn;
private String[] nameIn;
private String[] nameLike;
private String descriptionLike;
private int[] priority;
private String[] serviceLevelIn;
private String[] serviceLevelLike;
private String[] applicationEntryPointIn;
private String[] applicationEntryPointLike;
private String[] custom1In;
private String[] custom1Like;
private String[] custom2In;
private String[] custom2Like;
private String[] custom3In;
private String[] custom3Like;
private String[] custom4In;
private String[] custom4Like;
private String[] custom5In;
private String[] custom5Like;
private String[] custom6In;
private String[] custom6Like;
private String[] custom7In;
private String[] custom7Like;
private String[] custom8In;
private String[] custom8Like;
ClassificationQueryImpl(InternalTaskanaEngine taskanaEngine) {
this.taskanaEngine = taskanaEngine;
this.orderBy = new ArrayList<>();
this.orderColumns = new ArrayList<>();
}
@Override
public ClassificationQuery keyIn(String... key) {
this.key = key;
return this;
}
@Override
public ClassificationQuery idIn(String... id) {
this.idIn = id;
return this;
}
@Override
public ClassificationQuery parentIdIn(String... parentId) {
this.parentId = parentId;
return this;
}
@Override
public ClassificationQuery parentKeyIn(String... parentKey) {
this.parentKey = parentKey;
return this;
}
@Override
public ClassificationQuery categoryIn(String... category) {
this.category = category;
return this;
}
@Override
public ClassificationQuery typeIn(String... type) {
this.type = type;
return this;
}
@Override
public ClassificationQuery domainIn(String... domain) {
this.domain = domain;
return this;
}
@Override
public ClassificationQuery validInDomainEquals(Boolean validInDomain) {
this.validInDomain = validInDomain;
return this;
}
@Override
public ClassificationQuery createdWithin(TimeInterval... createdIn) {
validateAllTimeIntervals(createdIn);
this.createdIn = createdIn;
return this;
}
@Override
public ClassificationQuery modifiedWithin(TimeInterval... modifiedIn) {
validateAllTimeIntervals(modifiedIn);
this.modifiedIn = modifiedIn;
return this;
}
@Override
public ClassificationQuery nameIn(String... nameIn) {
this.nameIn = nameIn;
return this;
}
@Override
public ClassificationQuery nameLike(String... nameLike) {
this.nameLike = toLowerCopy(nameLike);
return this;
}
@Override
public ClassificationQuery descriptionLike(String description) {
this.descriptionLike = description.toLowerCase();
return this;
}
@Override
public ClassificationQuery priorityIn(int... priorities) {
this.priority = priorities;
return this;
}
@Override
public ClassificationQuery serviceLevelIn(String... serviceLevelIn) {
this.serviceLevelIn = serviceLevelIn;
return this;
}
@Override
public ClassificationQuery serviceLevelLike(String... serviceLevelLike) {
this.serviceLevelLike = toLowerCopy(serviceLevelLike);
return this;
}
@Override
public ClassificationQuery applicationEntryPointIn(String... applicationEntryPointIn) {
this.applicationEntryPointIn = applicationEntryPointIn;
return this;
}
@Override
public ClassificationQuery applicationEntryPointLike(String... applicationEntryPointLike) {
this.applicationEntryPointLike = toLowerCopy(applicationEntryPointLike);
return this;
}
@Override
public ClassificationQuery customAttributeIn(
ClassificationCustomField customField, String... customIn) throws InvalidArgumentException {
if (customIn.length == 0) {
throw new InvalidArgumentException(
"At least one string has to be provided as a search parameter");
}
switch (customField) {
case CUSTOM_1:
this.custom1In = customIn;
break;
case CUSTOM_2:
this.custom2In = customIn;
break;
case CUSTOM_3:
this.custom3In = customIn;
break;
case CUSTOM_4:
this.custom4In = customIn;
break;
case CUSTOM_5:
this.custom5In = customIn;
break;
case CUSTOM_6:
this.custom6In = customIn;
break;
case CUSTOM_7:
this.custom7In = customIn;
break;
case CUSTOM_8:
this.custom8In = customIn;
break;
default:
throw new SystemException("Unknown customField '" + customField + "'");
}
return this;
}
@Override
public ClassificationQuery customAttributeLike(
ClassificationCustomField customField, String... customLike) throws InvalidArgumentException {
if (customLike.length == 0) {
throw new InvalidArgumentException(
"At least one string has to be provided as a search parameter");
}
switch (customField) {
case CUSTOM_1:
this.custom1Like = toLowerCopy(customLike);
break;
case CUSTOM_2:
this.custom2Like = toLowerCopy(customLike);
break;
case CUSTOM_3:
this.custom3Like = toLowerCopy(customLike);
break;
case CUSTOM_4:
this.custom4Like = toLowerCopy(customLike);
break;
case CUSTOM_5:
this.custom5Like = toLowerCopy(customLike);
break;
case CUSTOM_6:
this.custom6Like = toLowerCopy(customLike);
break;
case CUSTOM_7:
this.custom7Like = toLowerCopy(customLike);
break;
case CUSTOM_8:
this.custom8Like = toLowerCopy(customLike);
break;
default:
throw new SystemException("Unknown customField '" + customField + "'");
}
return this;
}
@Override
public ClassificationQuery orderByKey(SortDirection sortDirection) {
return addOrderCriteria("KEY", sortDirection);
}
@Override
public ClassificationQuery orderByParentId(SortDirection sortDirection) {
return addOrderCriteria("PARENT_ID", sortDirection);
}
@Override
public ClassificationQuery orderByParentKey(SortDirection sortDirection) {
return addOrderCriteria("PARENT_KEY", sortDirection);
}
@Override
public ClassificationQuery orderByCategory(SortDirection sortDirection) {
return addOrderCriteria("CATEGORY", sortDirection);
}
@Override
public ClassificationQuery orderByDomain(SortDirection sortDirection) {
return addOrderCriteria("DOMAIN", sortDirection);
}
@Override
public ClassificationQuery orderByName(SortDirection sortDirection) {
return addOrderCriteria("NAME", sortDirection);
}
@Override
public ClassificationQuery orderByServiceLevel(SortDirection sortDirection) {
return addOrderCriteria("SERVICE_LEVEL", sortDirection);
}
@Override
public ClassificationQuery orderByPriority(SortDirection sortDirection) {
return addOrderCriteria("PRIORITY", sortDirection);
}
@Override
public ClassificationQuery orderByApplicationEntryPoint(SortDirection sortDirection) {
return addOrderCriteria("APPLICATION_ENTRY_POINT", sortDirection);
}
@Override
public ClassificationQuery orderByCustomAttribute(
ClassificationCustomField customField, SortDirection sortDirection) {
return addOrderCriteria(customField.name(), sortDirection);
}
@Override
public List<ClassificationSummary> list() {
return taskanaEngine.executeInDatabaseConnection(
() -> taskanaEngine.getSqlSession().selectList(LINK_TO_SUMMARYMAPPER, this));
}
@Override
public List<ClassificationSummary> list(int offset, int limit) {
List<ClassificationSummary> result = new ArrayList<>();
try {
taskanaEngine.openConnection();
RowBounds rowBounds = new RowBounds(offset, limit);
result = taskanaEngine.getSqlSession().selectList(LINK_TO_SUMMARYMAPPER, this, rowBounds);
return result;
} catch (PersistenceException e) {
if (e.getMessage().contains("ERRORCODE=-4470")) {
TaskanaRuntimeException ex =
new SystemException(
"The offset beginning was set over the amount of result-rows.", e.getCause());
ex.setStackTrace(e.getStackTrace());
throw ex;
}
throw e;
} finally {
taskanaEngine.returnConnection();
}
}
@Override
public List<String> listValues(
ClassificationQueryColumnName columnName, SortDirection sortDirection) {
List<String> result = new ArrayList<>();
try {
taskanaEngine.openConnection();
this.columnName = columnName;
this.orderBy.clear();
this.addOrderCriteria(columnName.toString(), sortDirection);
result = taskanaEngine.getSqlSession().selectList(LINK_TO_VALUEMAPPER, this);
return result;
} finally {
taskanaEngine.returnConnection();
}
}
@Override
public ClassificationSummary single() {
ClassificationSummary result = null;
try {
taskanaEngine.openConnection();
result = taskanaEngine.getSqlSession().selectOne(LINK_TO_SUMMARYMAPPER, this);
return result;
} finally {
taskanaEngine.returnConnection();
}
}
@Override
public long count() {
Long rowCount = null;
try {
taskanaEngine.openConnection();
rowCount = taskanaEngine.getSqlSession().selectOne(LINK_TO_COUNTER, this);
return (rowCount == null) ? 0L : rowCount;
} finally {
taskanaEngine.returnConnection();
}
}
public String[] getKey() {
return key;
}
public String[] getIdIn() {
return idIn;
}
public String[] getparentId() {
return parentId;
}
public String[] getparentKey() {
return parentKey;
}
public String[] getCategory() {
return category;
}
public String[] getType() {
return type;
}
public String[] getNameIn() {
return nameIn;
}
public String[] getNameLike() {
return nameLike;
}
public String getDescriptionLike() {
return descriptionLike;
}
public int[] getPriority() {
return priority;
}
public String[] getServiceLevelIn() {
return serviceLevelIn;
}
public String[] getServiceLevelLike() {
return serviceLevelLike;
}
public String[] getDomain() {
return domain;
}
public Boolean getValidInDomain() {
return validInDomain;
}
public TimeInterval[] getCreatedIn() {
return createdIn;
}
public TimeInterval[] getModifiedIn() {
return modifiedIn;
}
public String[] getApplicationEntryPointIn() {
return applicationEntryPointIn;
}
public String[] getApplicationEntryPointLike() {
return applicationEntryPointLike;
}
public String[] getCustom1In() {
return custom1In;
}
public String[] getCustom1Like() {
return custom1Like;
}
public String[] getCustom2In() {
return custom2In;
}
public String[] getCustom2Like() {
return custom2Like;
}
public String[] getCustom3In() {
return custom3In;
}
public String[] getCustom3Like() {
return custom3Like;
}
public String[] getCustom4In() {
return custom4In;
}
public String[] getCustom4Like() {
return custom4Like;
}
public String[] getCustom5In() {
return custom5In;
}
public String[] getCustom5Like() {
return custom5Like;
}
public String[] getCustom6In() {
return custom6In;
}
public String[] getCustom6Like() {
return custom6Like;
}
public String[] getCustom7In() {
return custom7In;
}
public String[] getCustom7Like() {
return custom7Like;
}
public String[] getCustom8In() {
return custom8In;
}
public String[] getCustom8Like() {
return custom8Like;
}
public ClassificationQueryColumnName getColumnName() {
return columnName;
}
public List<String> getOrderBy() {
return orderBy;
}
public List<String> getOrderColumns() {
return orderColumns;
}
private void validateAllTimeIntervals(TimeInterval[] createdIn) {
for (TimeInterval ti : createdIn) {
if (!ti.isValid()) {
throw new IllegalArgumentException("TimeInterval " + ti + " is invalid.");
}
}
}
private ClassificationQuery addOrderCriteria(String columnName, SortDirection sortDirection) {
String orderByDirection =
" " + (sortDirection == null ? SortDirection.ASCENDING : sortDirection);
orderBy.add(columnName + orderByDirection);
orderColumns.add(columnName);
return this;
}
@Override
public String toString() {
return "ClassificationQueryImpl ["
+ "columnName= "
+ this.columnName
+ ", key= "
+ Arrays.toString(this.key)
+ ", idIn= "
+ Arrays.toString(this.idIn)
+ ", parentId= "
+ Arrays.toString(this.parentId)
+ ", parentKey= "
+ Arrays.toString(this.parentKey)
+ ", category= "
+ Arrays.toString(this.category)
+ ", type= "
+ Arrays.toString(this.type)
+ ", domain= "
+ Arrays.toString(this.domain)
+ ", validInDomain= "
+ this.validInDomain
+ ", createdIn= "
+ Arrays.toString(this.createdIn)
+ ", modifiedIn= "
+ Arrays.toString(this.modifiedIn)
+ ", nameIn= "
+ Arrays.toString(this.nameIn)
+ ", nameLike= "
+ Arrays.toString(this.nameLike)
+ ", descriptionLike= "
+ this.descriptionLike
+ ", priority= "
+ Arrays.toString(this.priority)
+ ", serviceLevelIn= "
+ Arrays.toString(this.serviceLevelIn)
+ ", serviceLevelLike= "
+ Arrays.toString(this.serviceLevelLike)
+ ", applicationEntryPointIn= "
+ Arrays.toString(this.applicationEntryPointIn)
+ ", applicationEntryPointLike= "
+ Arrays.toString(this.applicationEntryPointLike)
+ ", custom1In= "
+ Arrays.toString(this.custom1In)
+ ", custom1Like= "
+ Arrays.toString(this.custom1Like)
+ ", custom2In= "
+ Arrays.toString(this.custom2In)
+ ", custom2Like= "
+ Arrays.toString(this.custom2Like)
+ ", custom3In= "
+ Arrays.toString(this.custom3In)
+ ", custom3Like= "
+ Arrays.toString(this.custom3Like)
+ ", custom4In= "
+ Arrays.toString(this.custom4In)
+ ", custom4Like= "
+ Arrays.toString(this.custom4Like)
+ ", custom5In= "
+ Arrays.toString(this.custom5In)
+ ", custom5Like= "
+ Arrays.toString(this.custom5Like)
+ ", custom6In= "
+ Arrays.toString(this.custom6In)
+ ", custom6Like= "
+ Arrays.toString(this.custom6Like)
+ ", custom7In= "
+ Arrays.toString(this.custom7In)
+ ", custom7Like= "
+ Arrays.toString(this.custom7Like)
+ ", custom8In= "
+ Arrays.toString(this.custom8In)
+ ", custom8Like= "
+ Arrays.toString(this.custom8Like)
+ ", orderBy= "
+ this.orderBy
+ "]";
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.tools;
import java.io.ByteArrayOutputStream;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.PrintStream;
import java.net.URI;
import java.security.PrivilegedExceptionAction;
import java.util.ArrayList;
import java.util.List;
import java.util.Random;
import java.util.StringTokenizer;
import junit.framework.TestCase;
import org.apache.commons.logging.LogFactory;
import org.apache.commons.logging.impl.Log4JLogger;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.ContentSummary;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.FsShell;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.permission.FsPermission;
import org.apache.hadoop.hdfs.MiniDFSCluster;
import org.apache.hadoop.hdfs.MiniDFSCluster.Builder;
import org.apache.hadoop.hdfs.server.datanode.DataNode;
import org.apache.hadoop.hdfs.server.namenode.FSNamesystem;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.util.ToolRunner;
import org.apache.log4j.Level;
/**
* A JUnit test for copying files recursively.
*/
public class TestCopyFiles extends TestCase {
{
((Log4JLogger)LogFactory.getLog("org.apache.hadoop.hdfs.StateChange")
).getLogger().setLevel(Level.ERROR);
((Log4JLogger)DataNode.LOG).getLogger().setLevel(Level.ERROR);
((Log4JLogger)LogFactory.getLog(FSNamesystem.class)).getLogger().setLevel(Level.ERROR);
((Log4JLogger)DistCpV1.LOG).getLogger().setLevel(Level.ALL);
}
static final URI LOCAL_FS = URI.create("file:///");
private static final Random RAN = new Random();
private static final int NFILES = 20;
private static String TEST_ROOT_DIR =
new Path(System.getProperty("test.build.data","/tmp"))
.toString().replace(' ', '+');
/** class MyFile contains enough information to recreate the contents of
* a single file.
*/
private static class MyFile {
private static Random gen = new Random();
private static final int MAX_LEVELS = 3;
private static final int MAX_SIZE = 8*1024;
private static String[] dirNames = {
"zero", "one", "two", "three", "four", "five", "six", "seven", "eight", "nine"
};
private final String name;
private int size = 0;
private long seed = 0L;
MyFile() {
this(gen.nextInt(MAX_LEVELS));
}
MyFile(int nLevels) {
String xname = "";
if (nLevels != 0) {
int[] levels = new int[nLevels];
for (int idx = 0; idx < nLevels; idx++) {
levels[idx] = gen.nextInt(10);
}
StringBuffer sb = new StringBuffer();
for (int idx = 0; idx < nLevels; idx++) {
sb.append(dirNames[levels[idx]]);
sb.append("/");
}
xname = sb.toString();
}
long fidx = gen.nextLong() & Long.MAX_VALUE;
name = xname + Long.toString(fidx);
reset();
}
void reset() {
final int oldsize = size;
do { size = gen.nextInt(MAX_SIZE); } while (oldsize == size);
final long oldseed = seed;
do { seed = gen.nextLong() & Long.MAX_VALUE; } while (oldseed == seed);
}
String getName() { return name; }
int getSize() { return size; }
long getSeed() { return seed; }
}
private static MyFile[] createFiles(URI fsname, String topdir)
throws IOException {
return createFiles(FileSystem.get(fsname, new Configuration()), topdir);
}
/** create NFILES with random names and directory hierarchies
* with random (but reproducible) data in them.
*/
private static MyFile[] createFiles(FileSystem fs, String topdir)
throws IOException {
Path root = new Path(topdir);
MyFile[] files = new MyFile[NFILES];
for (int i = 0; i < NFILES; i++) {
files[i] = createFile(root, fs);
}
return files;
}
static MyFile createFile(Path root, FileSystem fs, int levels)
throws IOException {
MyFile f = levels < 0 ? new MyFile() : new MyFile(levels);
Path p = new Path(root, f.getName());
FSDataOutputStream out = fs.create(p);
byte[] toWrite = new byte[f.getSize()];
new Random(f.getSeed()).nextBytes(toWrite);
out.write(toWrite);
out.close();
FileSystem.LOG.info("created: " + p + ", size=" + f.getSize());
return f;
}
static MyFile createFile(Path root, FileSystem fs) throws IOException {
return createFile(root, fs, -1);
}
private static boolean checkFiles(FileSystem fs, String topdir, MyFile[] files
) throws IOException {
return checkFiles(fs, topdir, files, false);
}
private static boolean checkFiles(FileSystem fs, String topdir, MyFile[] files,
boolean existingOnly) throws IOException {
Path root = new Path(topdir);
for (int idx = 0; idx < files.length; idx++) {
Path fPath = new Path(root, files[idx].getName());
try {
fs.getFileStatus(fPath);
FSDataInputStream in = fs.open(fPath);
byte[] toRead = new byte[files[idx].getSize()];
byte[] toCompare = new byte[files[idx].getSize()];
Random rb = new Random(files[idx].getSeed());
rb.nextBytes(toCompare);
assertEquals("Cannnot read file.", toRead.length, in.read(toRead));
in.close();
for (int i = 0; i < toRead.length; i++) {
if (toRead[i] != toCompare[i]) {
return false;
}
}
toRead = null;
toCompare = null;
}
catch(FileNotFoundException fnfe) {
if (!existingOnly) {
throw fnfe;
}
}
}
return true;
}
private static void updateFiles(FileSystem fs, String topdir, MyFile[] files,
int nupdate) throws IOException {
assert nupdate <= NFILES;
Path root = new Path(topdir);
for (int idx = 0; idx < nupdate; ++idx) {
Path fPath = new Path(root, files[idx].getName());
// overwrite file
assertTrue(fPath.toString() + " does not exist", fs.exists(fPath));
FSDataOutputStream out = fs.create(fPath);
files[idx].reset();
byte[] toWrite = new byte[files[idx].getSize()];
Random rb = new Random(files[idx].getSeed());
rb.nextBytes(toWrite);
out.write(toWrite);
out.close();
}
}
private static FileStatus[] getFileStatus(FileSystem fs,
String topdir, MyFile[] files) throws IOException {
return getFileStatus(fs, topdir, files, false);
}
private static FileStatus[] getFileStatus(FileSystem fs,
String topdir, MyFile[] files, boolean existingOnly) throws IOException {
Path root = new Path(topdir);
List<FileStatus> statuses = new ArrayList<FileStatus>();
for (int idx = 0; idx < NFILES; ++idx) {
try {
statuses.add(fs.getFileStatus(new Path(root, files[idx].getName())));
} catch(FileNotFoundException fnfe) {
if (!existingOnly) {
throw fnfe;
}
}
}
return statuses.toArray(new FileStatus[statuses.size()]);
}
private static boolean checkUpdate(FileSystem fs, FileStatus[] old,
String topdir, MyFile[] upd, final int nupdate) throws IOException {
Path root = new Path(topdir);
// overwrote updated files
for (int idx = 0; idx < nupdate; ++idx) {
final FileStatus stat =
fs.getFileStatus(new Path(root, upd[idx].getName()));
if (stat.getModificationTime() <= old[idx].getModificationTime()) {
return false;
}
}
// did not overwrite files not updated
for (int idx = nupdate; idx < NFILES; ++idx) {
final FileStatus stat =
fs.getFileStatus(new Path(root, upd[idx].getName()));
if (stat.getModificationTime() != old[idx].getModificationTime()) {
return false;
}
}
return true;
}
/** delete directory and everything underneath it.*/
private static void deldir(FileSystem fs, String topdir) throws IOException {
fs.delete(new Path(topdir), true);
}
/** copy files from local file system to local file system */
public void testCopyFromLocalToLocal() throws Exception {
Configuration conf = new Configuration();
FileSystem localfs = FileSystem.get(LOCAL_FS, conf);
MyFile[] files = createFiles(LOCAL_FS, TEST_ROOT_DIR+"/srcdat");
ToolRunner.run(new DistCpV1(new Configuration()),
new String[] {"file:///"+TEST_ROOT_DIR+"/srcdat",
"file:///"+TEST_ROOT_DIR+"/destdat"});
assertTrue("Source and destination directories do not match.",
checkFiles(localfs, TEST_ROOT_DIR+"/destdat", files));
deldir(localfs, TEST_ROOT_DIR+"/destdat");
deldir(localfs, TEST_ROOT_DIR+"/srcdat");
}
/** copy files from dfs file system to dfs file system */
public void testCopyFromDfsToDfs() throws Exception {
String namenode = null;
MiniDFSCluster cluster = null;
try {
Configuration conf = new Configuration();
cluster = new MiniDFSCluster(conf, 2, true, null);
final FileSystem hdfs = cluster.getFileSystem();
namenode = FileSystem.getDefaultUri(conf).toString();
if (namenode.startsWith("hdfs://")) {
MyFile[] files = createFiles(URI.create(namenode), "/srcdat");
ToolRunner.run(new DistCpV1(conf), new String[] {
"-log",
namenode+"/logs",
namenode+"/srcdat",
namenode+"/destdat"});
assertTrue("Source and destination directories do not match.",
checkFiles(hdfs, "/destdat", files));
FileSystem fs = FileSystem.get(URI.create(namenode+"/logs"), conf);
assertTrue("Log directory does not exist.",
fs.exists(new Path(namenode+"/logs")));
deldir(hdfs, "/destdat");
deldir(hdfs, "/srcdat");
deldir(hdfs, "/logs");
}
} finally {
if (cluster != null) { cluster.shutdown(); }
}
}
/** copy empty directory on dfs file system */
public void testEmptyDir() throws Exception {
String namenode = null;
MiniDFSCluster cluster = null;
try {
Configuration conf = new Configuration();
cluster = new MiniDFSCluster(conf, 2, true, null);
final FileSystem hdfs = cluster.getFileSystem();
namenode = FileSystem.getDefaultUri(conf).toString();
if (namenode.startsWith("hdfs://")) {
FileSystem fs = FileSystem.get(URI.create(namenode), new Configuration());
fs.mkdirs(new Path("/empty"));
ToolRunner.run(new DistCpV1(conf), new String[] {
"-log",
namenode+"/logs",
namenode+"/empty",
namenode+"/dest"});
fs = FileSystem.get(URI.create(namenode+"/destdat"), conf);
assertTrue("Destination directory does not exist.",
fs.exists(new Path(namenode+"/dest")));
deldir(hdfs, "/dest");
deldir(hdfs, "/empty");
deldir(hdfs, "/logs");
}
} finally {
if (cluster != null) { cluster.shutdown(); }
}
}
/** copy files from local file system to dfs file system */
public void testCopyFromLocalToDfs() throws Exception {
MiniDFSCluster cluster = null;
try {
Configuration conf = new Configuration();
cluster = new MiniDFSCluster(conf, 1, true, null);
final FileSystem hdfs = cluster.getFileSystem();
final String namenode = hdfs.getUri().toString();
if (namenode.startsWith("hdfs://")) {
MyFile[] files = createFiles(LOCAL_FS, TEST_ROOT_DIR+"/srcdat");
ToolRunner.run(new DistCpV1(conf), new String[] {
"-log",
namenode+"/logs",
"file:///"+TEST_ROOT_DIR+"/srcdat",
namenode+"/destdat"});
assertTrue("Source and destination directories do not match.",
checkFiles(cluster.getFileSystem(), "/destdat", files));
assertTrue("Log directory does not exist.",
hdfs.exists(new Path(namenode+"/logs")));
deldir(hdfs, "/destdat");
deldir(hdfs, "/logs");
deldir(FileSystem.get(LOCAL_FS, conf), TEST_ROOT_DIR+"/srcdat");
}
} finally {
if (cluster != null) { cluster.shutdown(); }
}
}
/** copy files from dfs file system to local file system */
public void testCopyFromDfsToLocal() throws Exception {
MiniDFSCluster cluster = null;
try {
Configuration conf = new Configuration();
final FileSystem localfs = FileSystem.get(LOCAL_FS, conf);
cluster = new MiniDFSCluster(conf, 1, true, null);
final FileSystem hdfs = cluster.getFileSystem();
final String namenode = FileSystem.getDefaultUri(conf).toString();
if (namenode.startsWith("hdfs://")) {
MyFile[] files = createFiles(URI.create(namenode), "/srcdat");
ToolRunner.run(new DistCpV1(conf), new String[] {
"-log",
"/logs",
namenode+"/srcdat",
"file:///"+TEST_ROOT_DIR+"/destdat"});
assertTrue("Source and destination directories do not match.",
checkFiles(localfs, TEST_ROOT_DIR+"/destdat", files));
assertTrue("Log directory does not exist.",
hdfs.exists(new Path("/logs")));
deldir(localfs, TEST_ROOT_DIR+"/destdat");
deldir(hdfs, "/logs");
deldir(hdfs, "/srcdat");
}
} finally {
if (cluster != null) { cluster.shutdown(); }
}
}
public void testCopyDfsToDfsUpdateOverwrite() throws Exception {
MiniDFSCluster cluster = null;
try {
Configuration conf = new Configuration();
cluster = new MiniDFSCluster(conf, 2, true, null);
final FileSystem hdfs = cluster.getFileSystem();
final String namenode = hdfs.getUri().toString();
if (namenode.startsWith("hdfs://")) {
MyFile[] files = createFiles(URI.create(namenode), "/srcdat");
ToolRunner.run(new DistCpV1(conf), new String[] {
"-p",
"-log",
namenode+"/logs",
namenode+"/srcdat",
namenode+"/destdat"});
assertTrue("Source and destination directories do not match.",
checkFiles(hdfs, "/destdat", files));
FileSystem fs = FileSystem.get(URI.create(namenode+"/logs"), conf);
assertTrue("Log directory does not exist.",
fs.exists(new Path(namenode+"/logs")));
FileStatus[] dchkpoint = getFileStatus(hdfs, "/destdat", files);
final int nupdate = NFILES>>2;
updateFiles(cluster.getFileSystem(), "/srcdat", files, nupdate);
deldir(hdfs, "/logs");
ToolRunner.run(new DistCpV1(conf), new String[] {
"-prbugp", // no t to avoid preserving mod. times
"-update",
"-log",
namenode+"/logs",
namenode+"/srcdat",
namenode+"/destdat"});
assertTrue("Source and destination directories do not match.",
checkFiles(hdfs, "/destdat", files));
assertTrue("Update failed to replicate all changes in src",
checkUpdate(hdfs, dchkpoint, "/destdat", files, nupdate));
deldir(hdfs, "/logs");
ToolRunner.run(new DistCpV1(conf), new String[] {
"-prbugp", // no t to avoid preserving mod. times
"-overwrite",
"-log",
namenode+"/logs",
namenode+"/srcdat",
namenode+"/destdat"});
assertTrue("Source and destination directories do not match.",
checkFiles(hdfs, "/destdat", files));
assertTrue("-overwrite didn't.",
checkUpdate(hdfs, dchkpoint, "/destdat", files, NFILES));
deldir(hdfs, "/destdat");
deldir(hdfs, "/srcdat");
deldir(hdfs, "/logs");
}
} finally {
if (cluster != null) { cluster.shutdown(); }
}
}
public void testCopyDfsToDfsUpdateWithSkipCRC() throws Exception {
MiniDFSCluster cluster = null;
try {
Configuration conf = new Configuration();
cluster = new MiniDFSCluster(conf, 2, true, null);
final FileSystem hdfs = cluster.getFileSystem();
final String namenode = hdfs.getUri().toString();
FileSystem fs = FileSystem.get(URI.create(namenode), new Configuration());
// Create two files of the same name, same length but different
// contents
final String testfilename = "test";
final String srcData = "act act act";
final String destData = "cat cat cat";
if (namenode.startsWith("hdfs://")) {
deldir(hdfs,"/logs");
Path srcPath = new Path("/srcdat", testfilename);
Path destPath = new Path("/destdat", testfilename);
FSDataOutputStream out = fs.create(srcPath, true);
out.writeUTF(srcData);
out.close();
out = fs.create(destPath, true);
out.writeUTF(destData);
out.close();
// Run with -skipcrccheck option
ToolRunner.run(new DistCpV1(conf), new String[] {
"-p",
"-update",
"-skipcrccheck",
"-log",
namenode+"/logs",
namenode+"/srcdat",
namenode+"/destdat"});
// File should not be overwritten
FSDataInputStream in = hdfs.open(destPath);
String s = in.readUTF();
System.out.println("Dest had: " + s);
assertTrue("Dest got over written even with skip crc",
s.equalsIgnoreCase(destData));
in.close();
deldir(hdfs, "/logs");
// Run without the option
ToolRunner.run(new DistCpV1(conf), new String[] {
"-p",
"-update",
"-log",
namenode+"/logs",
namenode+"/srcdat",
namenode+"/destdat"});
// File should be overwritten
in = hdfs.open(destPath);
s = in.readUTF();
System.out.println("Dest had: " + s);
assertTrue("Dest did not get overwritten without skip crc",
s.equalsIgnoreCase(srcData));
in.close();
deldir(hdfs, "/destdat");
deldir(hdfs, "/srcdat");
deldir(hdfs, "/logs");
}
} finally {
if (cluster != null) { cluster.shutdown(); }
}
}
public void testCopyDuplication() throws Exception {
final FileSystem localfs = FileSystem.get(LOCAL_FS, new Configuration());
try {
MyFile[] files = createFiles(localfs, TEST_ROOT_DIR+"/srcdat");
ToolRunner.run(new DistCpV1(new Configuration()),
new String[] {"file:///"+TEST_ROOT_DIR+"/srcdat",
"file:///"+TEST_ROOT_DIR+"/src2/srcdat"});
assertTrue("Source and destination directories do not match.",
checkFiles(localfs, TEST_ROOT_DIR+"/src2/srcdat", files));
assertEquals(DistCpV1.DuplicationException.ERROR_CODE,
ToolRunner.run(new DistCpV1(new Configuration()),
new String[] {"file:///"+TEST_ROOT_DIR+"/srcdat",
"file:///"+TEST_ROOT_DIR+"/src2/srcdat",
"file:///"+TEST_ROOT_DIR+"/destdat",}));
}
finally {
deldir(localfs, TEST_ROOT_DIR+"/destdat");
deldir(localfs, TEST_ROOT_DIR+"/srcdat");
deldir(localfs, TEST_ROOT_DIR+"/src2");
}
}
public void testCopySingleFile() throws Exception {
FileSystem fs = FileSystem.get(LOCAL_FS, new Configuration());
Path root = new Path(TEST_ROOT_DIR+"/srcdat");
try {
MyFile[] files = {createFile(root, fs)};
//copy a dir with a single file
ToolRunner.run(new DistCpV1(new Configuration()),
new String[] {"file:///"+TEST_ROOT_DIR+"/srcdat",
"file:///"+TEST_ROOT_DIR+"/destdat"});
assertTrue("Source and destination directories do not match.",
checkFiles(fs, TEST_ROOT_DIR+"/destdat", files));
//copy a single file
String fname = files[0].getName();
Path p = new Path(root, fname);
FileSystem.LOG.info("fname=" + fname + ", exists? " + fs.exists(p));
ToolRunner.run(new DistCpV1(new Configuration()),
new String[] {"file:///"+TEST_ROOT_DIR+"/srcdat/"+fname,
"file:///"+TEST_ROOT_DIR+"/dest2/"+fname});
assertTrue("Source and destination directories do not match.",
checkFiles(fs, TEST_ROOT_DIR+"/dest2", files));
// single file update should skip copy if destination has the file already
String[] args = {"-update", "file:///"+TEST_ROOT_DIR+"/srcdat/"+fname,
"file:///"+TEST_ROOT_DIR+"/dest2/"+fname};
Configuration conf = new Configuration();
JobConf job = new JobConf(conf, DistCpV1.class);
DistCpV1.Arguments distcpArgs = DistCpV1.Arguments.valueOf(args, conf);
assertFalse("Single file update failed to skip copying even though the "
+ "file exists at destination.", DistCpV1.setup(conf, job, distcpArgs));
//copy single file to existing dir
deldir(fs, TEST_ROOT_DIR+"/dest2");
fs.mkdirs(new Path(TEST_ROOT_DIR+"/dest2"));
MyFile[] files2 = {createFile(root, fs, 0)};
String sname = files2[0].getName();
ToolRunner.run(new DistCpV1(new Configuration()),
new String[] {"-update",
"file:///"+TEST_ROOT_DIR+"/srcdat/"+sname,
"file:///"+TEST_ROOT_DIR+"/dest2/"});
assertTrue("Source and destination directories do not match.",
checkFiles(fs, TEST_ROOT_DIR+"/dest2", files2));
updateFiles(fs, TEST_ROOT_DIR+"/srcdat", files2, 1);
//copy single file to existing dir w/ dst name conflict
ToolRunner.run(new DistCpV1(new Configuration()),
new String[] {"-update",
"file:///"+TEST_ROOT_DIR+"/srcdat/"+sname,
"file:///"+TEST_ROOT_DIR+"/dest2/"});
assertTrue("Source and destination directories do not match.",
checkFiles(fs, TEST_ROOT_DIR+"/dest2", files2));
}
finally {
deldir(fs, TEST_ROOT_DIR+"/destdat");
deldir(fs, TEST_ROOT_DIR+"/dest2");
deldir(fs, TEST_ROOT_DIR+"/srcdat");
}
}
/** tests basedir option copying files from dfs file system to dfs file system */
public void testBasedir() throws Exception {
String namenode = null;
MiniDFSCluster cluster = null;
try {
Configuration conf = new Configuration();
cluster = new MiniDFSCluster(conf, 2, true, null);
final FileSystem hdfs = cluster.getFileSystem();
namenode = FileSystem.getDefaultUri(conf).toString();
if (namenode.startsWith("hdfs://")) {
MyFile[] files = createFiles(URI.create(namenode), "/basedir/middle/srcdat");
ToolRunner.run(new DistCpV1(conf), new String[] {
"-basedir",
"/basedir",
namenode+"/basedir/middle/srcdat",
namenode+"/destdat"});
assertTrue("Source and destination directories do not match.",
checkFiles(hdfs, "/destdat/middle/srcdat", files));
deldir(hdfs, "/destdat");
deldir(hdfs, "/basedir");
deldir(hdfs, "/logs");
}
} finally {
if (cluster != null) { cluster.shutdown(); }
}
}
public void testPreserveOption() throws Exception {
Configuration conf = new Configuration();
MiniDFSCluster cluster = null;
try {
cluster = new MiniDFSCluster(conf, 2, true, null);
String nnUri = FileSystem.getDefaultUri(conf).toString();
FileSystem fs = FileSystem.get(URI.create(nnUri), conf);
{//test preserving user
MyFile[] files = createFiles(URI.create(nnUri), "/srcdat");
FileStatus[] srcstat = getFileStatus(fs, "/srcdat", files);
for(int i = 0; i < srcstat.length; i++) {
fs.setOwner(srcstat[i].getPath(), "u" + i, null);
}
ToolRunner.run(new DistCpV1(conf),
new String[]{"-pu", nnUri+"/srcdat", nnUri+"/destdat"});
assertTrue("Source and destination directories do not match.",
checkFiles(fs, "/destdat", files));
FileStatus[] dststat = getFileStatus(fs, "/destdat", files);
for(int i = 0; i < dststat.length; i++) {
assertEquals("i=" + i, "u" + i, dststat[i].getOwner());
}
deldir(fs, "/destdat");
deldir(fs, "/srcdat");
}
{//test preserving group
MyFile[] files = createFiles(URI.create(nnUri), "/srcdat");
FileStatus[] srcstat = getFileStatus(fs, "/srcdat", files);
for(int i = 0; i < srcstat.length; i++) {
fs.setOwner(srcstat[i].getPath(), null, "g" + i);
}
ToolRunner.run(new DistCpV1(conf),
new String[]{"-pg", nnUri+"/srcdat", nnUri+"/destdat"});
assertTrue("Source and destination directories do not match.",
checkFiles(fs, "/destdat", files));
FileStatus[] dststat = getFileStatus(fs, "/destdat", files);
for(int i = 0; i < dststat.length; i++) {
assertEquals("i=" + i, "g" + i, dststat[i].getGroup());
}
deldir(fs, "/destdat");
deldir(fs, "/srcdat");
}
{//test preserving mode
MyFile[] files = createFiles(URI.create(nnUri), "/srcdat");
FileStatus[] srcstat = getFileStatus(fs, "/srcdat", files);
FsPermission[] permissions = new FsPermission[srcstat.length];
for(int i = 0; i < srcstat.length; i++) {
permissions[i] = new FsPermission((short)(i & 0666));
fs.setPermission(srcstat[i].getPath(), permissions[i]);
}
ToolRunner.run(new DistCpV1(conf),
new String[]{"-pp", nnUri+"/srcdat", nnUri+"/destdat"});
assertTrue("Source and destination directories do not match.",
checkFiles(fs, "/destdat", files));
FileStatus[] dststat = getFileStatus(fs, "/destdat", files);
for(int i = 0; i < dststat.length; i++) {
assertEquals("i=" + i, permissions[i], dststat[i].getPermission());
}
deldir(fs, "/destdat");
deldir(fs, "/srcdat");
}
{//test preserving times
MyFile[] files = createFiles(URI.create(nnUri), "/srcdat");
fs.mkdirs(new Path("/srcdat/tmpf1"));
fs.mkdirs(new Path("/srcdat/tmpf2"));
FileStatus[] srcstat = getFileStatus(fs, "/srcdat", files);
FsPermission[] permissions = new FsPermission[srcstat.length];
for(int i = 0; i < srcstat.length; i++) {
fs.setTimes(srcstat[i].getPath(), 40, 50);
}
ToolRunner.run(new DistCpV1(conf),
new String[]{"-pt", nnUri+"/srcdat", nnUri+"/destdat"});
FileStatus[] dststat = getFileStatus(fs, "/destdat", files);
for(int i = 0; i < dststat.length; i++) {
assertEquals("Modif. Time i=" + i, 40, dststat[i].getModificationTime());
assertEquals("Access Time i=" + i+ srcstat[i].getPath() + "-" + dststat[i].getPath(), 50, dststat[i].getAccessTime());
}
assertTrue("Source and destination directories do not match.",
checkFiles(fs, "/destdat", files));
deldir(fs, "/destdat");
deldir(fs, "/srcdat");
}
} finally {
if (cluster != null) { cluster.shutdown(); }
}
}
public void testMapCount() throws Exception {
String namenode = null;
MiniDFSCluster dfs = null;
MiniDFSCluster mr = null;
try {
Configuration conf = new Configuration();
dfs= new MiniDFSCluster.Builder(conf).numDataNodes(3).format(true).build();
FileSystem fs = dfs.getFileSystem();
final FsShell shell = new FsShell(conf);
namenode = fs.getUri().toString();
MyFile[] files = createFiles(fs.getUri(), "/srcdat");
long totsize = 0;
for (MyFile f : files) {
totsize += f.getSize();
}
Configuration job = new JobConf(conf);
job.setLong("distcp.bytes.per.map", totsize / 3);
ToolRunner.run(new DistCpV1(job),
new String[] {"-m", "100",
"-log",
namenode+"/logs",
namenode+"/srcdat",
namenode+"/destdat"});
assertTrue("Source and destination directories do not match.",
checkFiles(fs, "/destdat", files));
String logdir = namenode + "/logs";
System.out.println(execCmd(shell, "-lsr", logdir));
FileStatus[] logs = fs.listStatus(new Path(logdir));
// rare case where splits are exact, logs.length can be 4
assertTrue( logs.length == 2);
deldir(fs, "/destdat");
deldir(fs, "/logs");
ToolRunner.run(new DistCpV1(job),
new String[] {"-m", "1",
"-log",
namenode+"/logs",
namenode+"/srcdat",
namenode+"/destdat"});
System.out.println(execCmd(shell, "-lsr", logdir));
logs = fs.globStatus(new Path(namenode+"/logs/part*"));
assertTrue("Unexpected map count, logs.length=" + logs.length,
logs.length == 1);
} finally {
if (dfs != null) { dfs.shutdown(); }
if (mr != null) { mr.shutdown(); }
}
}
public void testLimits() throws Exception {
Configuration conf = new Configuration();
MiniDFSCluster cluster = null;
try {
cluster = new MiniDFSCluster(conf, 2, true, null);
final String nnUri = FileSystem.getDefaultUri(conf).toString();
final FileSystem fs = FileSystem.get(URI.create(nnUri), conf);
final DistCpV1 distcp = new DistCpV1(conf);
final FsShell shell = new FsShell(conf);
final String srcrootdir = "/src_root";
final Path srcrootpath = new Path(srcrootdir);
final String dstrootdir = "/dst_root";
final Path dstrootpath = new Path(dstrootdir);
{//test -filelimit
MyFile[] files = createFiles(URI.create(nnUri), srcrootdir);
int filelimit = files.length / 2;
System.out.println("filelimit=" + filelimit);
ToolRunner.run(distcp,
new String[]{"-filelimit", ""+filelimit, nnUri+srcrootdir, nnUri+dstrootdir});
String results = execCmd(shell, "-lsr", dstrootdir);
results = removePrefix(results, dstrootdir);
System.out.println("results=" + results);
FileStatus[] dststat = getFileStatus(fs, dstrootdir, files, true);
assertEquals(filelimit, dststat.length);
deldir(fs, dstrootdir);
deldir(fs, srcrootdir);
}
{//test -sizelimit
createFiles(URI.create(nnUri), srcrootdir);
long sizelimit = fs.getContentSummary(srcrootpath).getLength()/2;
System.out.println("sizelimit=" + sizelimit);
ToolRunner.run(distcp,
new String[]{"-sizelimit", ""+sizelimit, nnUri+srcrootdir, nnUri+dstrootdir});
ContentSummary summary = fs.getContentSummary(dstrootpath);
System.out.println("summary=" + summary);
assertTrue(summary.getLength() <= sizelimit);
deldir(fs, dstrootdir);
deldir(fs, srcrootdir);
}
{//test update
final MyFile[] srcs = createFiles(URI.create(nnUri), srcrootdir);
final long totalsize = fs.getContentSummary(srcrootpath).getLength();
System.out.println("src.length=" + srcs.length);
System.out.println("totalsize =" + totalsize);
fs.mkdirs(dstrootpath);
final int parts = RAN.nextInt(NFILES/3 - 1) + 2;
final int filelimit = srcs.length/parts;
final long sizelimit = totalsize/parts;
System.out.println("filelimit=" + filelimit);
System.out.println("sizelimit=" + sizelimit);
System.out.println("parts =" + parts);
final String[] args = {"-filelimit", ""+filelimit, "-sizelimit", ""+sizelimit,
"-update", nnUri+srcrootdir, nnUri+dstrootdir};
int dstfilecount = 0;
long dstsize = 0;
for(int i = 0; i <= parts; i++) {
ToolRunner.run(distcp, args);
FileStatus[] dststat = getFileStatus(fs, dstrootdir, srcs, true);
System.out.println(i + ") dststat.length=" + dststat.length);
assertTrue(dststat.length - dstfilecount <= filelimit);
ContentSummary summary = fs.getContentSummary(dstrootpath);
System.out.println(i + ") summary.getLength()=" + summary.getLength());
assertTrue(summary.getLength() - dstsize <= sizelimit);
assertTrue(checkFiles(fs, dstrootdir, srcs, true));
dstfilecount = dststat.length;
dstsize = summary.getLength();
}
deldir(fs, dstrootdir);
deldir(fs, srcrootdir);
}
} finally {
if (cluster != null) { cluster.shutdown(); }
}
}
static final long now = System.currentTimeMillis();
static UserGroupInformation createUGI(String name, boolean issuper) {
String username = name + now;
String group = issuper? "supergroup": username;
return UserGroupInformation.createUserForTesting(username,
new String[]{group});
}
static Path createHomeDirectory(FileSystem fs, UserGroupInformation ugi
) throws IOException {
final Path home = new Path("/user/" + ugi.getUserName());
fs.mkdirs(home);
fs.setOwner(home, ugi.getUserName(), ugi.getGroupNames()[0]);
fs.setPermission(home, new FsPermission((short)0700));
return home;
}
public void testHftpAccessControl() throws Exception {
MiniDFSCluster cluster = null;
try {
final UserGroupInformation DFS_UGI = createUGI("dfs", true);
final UserGroupInformation USER_UGI = createUGI("user", false);
//start cluster by DFS_UGI
final Configuration dfsConf = new Configuration();
cluster = new MiniDFSCluster(dfsConf, 2, true, null);
cluster.waitActive();
final String httpAdd = dfsConf.get("dfs.http.address");
final URI nnURI = FileSystem.getDefaultUri(dfsConf);
final String nnUri = nnURI.toString();
FileSystem fs1 = DFS_UGI.doAs(new PrivilegedExceptionAction<FileSystem>() {
public FileSystem run() throws IOException {
return FileSystem.get(nnURI, dfsConf);
}
});
final Path home =
createHomeDirectory(fs1, USER_UGI);
//now, login as USER_UGI
final Configuration userConf = new Configuration();
final FileSystem fs =
USER_UGI.doAs(new PrivilegedExceptionAction<FileSystem>() {
public FileSystem run() throws IOException {
return FileSystem.get(nnURI, userConf);
}
});
final Path srcrootpath = new Path(home, "src_root");
final String srcrootdir = srcrootpath.toString();
final Path dstrootpath = new Path(home, "dst_root");
final String dstrootdir = dstrootpath.toString();
final DistCpV1 distcp = USER_UGI.doAs(new PrivilegedExceptionAction<DistCpV1>() {
public DistCpV1 run() {
return new DistCpV1(userConf);
}
});
FileSystem.mkdirs(fs, srcrootpath, new FsPermission((short)0700));
final String[] args = {"hftp://"+httpAdd+srcrootdir, nnUri+dstrootdir};
{ //copy with permission 000, should fail
fs.setPermission(srcrootpath, new FsPermission((short)0));
USER_UGI.doAs(new PrivilegedExceptionAction<Void>() {
public Void run() throws Exception {
assertEquals(-3, ToolRunner.run(distcp, args));
return null;
}
});
}
} finally {
if (cluster != null) { cluster.shutdown(); }
}
}
/** test -delete */
public void testDelete() throws Exception {
final Configuration conf = new Configuration();
conf.setInt("fs.trash.interval", 60);
MiniDFSCluster cluster = null;
try {
cluster = new MiniDFSCluster(conf, 2, true, null);
final URI nnURI = FileSystem.getDefaultUri(conf);
final String nnUri = nnURI.toString();
final FileSystem fs = FileSystem.get(URI.create(nnUri), conf);
final DistCpV1 distcp = new DistCpV1(conf);
final FsShell shell = new FsShell(conf);
final String srcrootdir = "/src_root";
final String dstrootdir = "/dst_root";
{
//create source files
createFiles(nnURI, srcrootdir);
String srcresults = execCmd(shell, "-lsr", srcrootdir);
srcresults = removePrefix(srcresults, srcrootdir);
System.out.println("srcresults=" + srcresults);
//create some files in dst
createFiles(nnURI, dstrootdir);
System.out.println("dstrootdir=" + dstrootdir);
shell.run(new String[]{"-lsr", dstrootdir});
//run distcp
ToolRunner.run(distcp,
new String[]{"-delete", "-update", "-log", "/log",
nnUri+srcrootdir, nnUri+dstrootdir});
//make sure src and dst contains the same files
String dstresults = execCmd(shell, "-lsr", dstrootdir);
dstresults = removePrefix(dstresults, dstrootdir);
System.out.println("first dstresults=" + dstresults);
assertEquals(srcresults, dstresults);
//create additional file in dst
create(fs, new Path(dstrootdir, "foo"));
create(fs, new Path(dstrootdir, "foobar"));
//run distcp again
ToolRunner.run(distcp,
new String[]{"-delete", "-update", "-log", "/log2",
nnUri+srcrootdir, nnUri+dstrootdir});
//make sure src and dst contains the same files
dstresults = execCmd(shell, "-lsr", dstrootdir);
dstresults = removePrefix(dstresults, dstrootdir);
System.out.println("second dstresults=" + dstresults);
assertEquals(srcresults, dstresults);
// verify that files removed in -delete were moved to the trash
// regrettably, this test will break if Trash changes incompatibly
assertTrue(fs.exists(new Path(fs.getHomeDirectory(),
".Trash/Current" + dstrootdir + "/foo")));
assertTrue(fs.exists(new Path(fs.getHomeDirectory(),
".Trash/Current" + dstrootdir + "/foobar")));
//cleanup
deldir(fs, dstrootdir);
deldir(fs, srcrootdir);
}
} finally {
if (cluster != null) { cluster.shutdown(); }
}
}
/**
* verify that -delete option works for other {@link FileSystem}
* implementations. See MAPREDUCE-1285 */
public void testDeleteLocal() throws Exception {
MiniDFSCluster cluster = null;
try {
Configuration conf = new Configuration();
final FileSystem localfs = FileSystem.get(LOCAL_FS, conf);
cluster = new MiniDFSCluster(conf, 1, true, null);
final FileSystem hdfs = cluster.getFileSystem();
final String namenode = FileSystem.getDefaultUri(conf).toString();
if (namenode.startsWith("hdfs://")) {
MyFile[] files = createFiles(URI.create(namenode), "/srcdat");
String destdir = TEST_ROOT_DIR + "/destdat";
MyFile[] localFiles = createFiles(localfs, destdir);
ToolRunner.run(new DistCpV1(conf), new String[] {
"-delete",
"-update",
"-log",
"/logs",
namenode+"/srcdat",
"file:///"+TEST_ROOT_DIR+"/destdat"});
assertTrue("Source and destination directories do not match.",
checkFiles(localfs, destdir, files));
assertTrue("Log directory does not exist.",
hdfs.exists(new Path("/logs")));
deldir(localfs, destdir);
deldir(hdfs, "/logs");
deldir(hdfs, "/srcdat");
}
} finally {
if (cluster != null) { cluster.shutdown(); }
}
}
/** test globbing */
public void testGlobbing() throws Exception {
String namenode = null;
MiniDFSCluster cluster = null;
try {
Configuration conf = new Configuration();
cluster = new MiniDFSCluster(conf, 2, true, null);
final FileSystem hdfs = cluster.getFileSystem();
namenode = FileSystem.getDefaultUri(conf).toString();
if (namenode.startsWith("hdfs://")) {
MyFile[] files = createFiles(URI.create(namenode), "/srcdat");
ToolRunner.run(new DistCpV1(conf), new String[] {
"-log",
namenode+"/logs",
namenode+"/srcdat/*",
namenode+"/destdat"});
assertTrue("Source and destination directories do not match.",
checkFiles(hdfs, "/destdat", files));
FileSystem fs = FileSystem.get(URI.create(namenode+"/logs"), conf);
assertTrue("Log directory does not exist.",
fs.exists(new Path(namenode+"/logs")));
deldir(hdfs, "/destdat");
deldir(hdfs, "/srcdat");
deldir(hdfs, "/logs");
}
} finally {
if (cluster != null) { cluster.shutdown(); }
}
}
static void create(FileSystem fs, Path f) throws IOException {
FSDataOutputStream out = fs.create(f);
try {
byte[] b = new byte[1024 + RAN.nextInt(1024)];
RAN.nextBytes(b);
out.write(b);
} finally {
if (out != null) out.close();
}
}
static String execCmd(FsShell shell, String... args) throws Exception {
ByteArrayOutputStream baout = new ByteArrayOutputStream();
PrintStream out = new PrintStream(baout, true);
PrintStream old = System.out;
System.setOut(out);
shell.run(args);
out.close();
System.setOut(old);
return baout.toString();
}
private static String removePrefix(String lines, String prefix) {
final int prefixlen = prefix.length();
final StringTokenizer t = new StringTokenizer(lines, "\n");
final StringBuffer results = new StringBuffer();
for(; t.hasMoreTokens(); ) {
String s = t.nextToken();
results.append(s.substring(s.indexOf(prefix) + prefixlen) + "\n");
}
return results.toString();
}
}
| |
/*
* JasperReports - Free Java Reporting Library.
* Copyright (C) 2001 - 2014 TIBCO Software Inc. All rights reserved.
* http://www.jaspersoft.com
*
* Unless you have purchased a commercial license agreement from Jaspersoft,
* the following license terms apply:
*
* This program is part of JasperReports.
*
* JasperReports is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* JasperReports is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with JasperReports. If not, see <http://www.gnu.org/licenses/>.
*/
package net.sf.jasperreports.engine.base;
import java.awt.Color;
import java.io.IOException;
import java.io.ObjectInputStream;
import net.sf.jasperreports.charts.JRAreaPlot;
import net.sf.jasperreports.charts.JRBar3DPlot;
import net.sf.jasperreports.charts.JRBarPlot;
import net.sf.jasperreports.charts.JRBubblePlot;
import net.sf.jasperreports.charts.JRCandlestickPlot;
import net.sf.jasperreports.charts.JRCategoryDataset;
import net.sf.jasperreports.charts.JRGanttDataset;
import net.sf.jasperreports.charts.JRHighLowDataset;
import net.sf.jasperreports.charts.JRHighLowPlot;
import net.sf.jasperreports.charts.JRLinePlot;
import net.sf.jasperreports.charts.JRMeterPlot;
import net.sf.jasperreports.charts.JRMultiAxisPlot;
import net.sf.jasperreports.charts.JRPie3DPlot;
import net.sf.jasperreports.charts.JRPieDataset;
import net.sf.jasperreports.charts.JRPiePlot;
import net.sf.jasperreports.charts.JRScatterPlot;
import net.sf.jasperreports.charts.JRThermometerPlot;
import net.sf.jasperreports.charts.JRTimePeriodDataset;
import net.sf.jasperreports.charts.JRTimeSeriesDataset;
import net.sf.jasperreports.charts.JRTimeSeriesPlot;
import net.sf.jasperreports.charts.JRValueDataset;
import net.sf.jasperreports.charts.JRXyDataset;
import net.sf.jasperreports.charts.JRXyzDataset;
import net.sf.jasperreports.charts.type.EdgeEnum;
import net.sf.jasperreports.engine.JRAnchor;
import net.sf.jasperreports.engine.JRChart;
import net.sf.jasperreports.engine.JRChartDataset;
import net.sf.jasperreports.engine.JRChartPlot;
import net.sf.jasperreports.engine.JRConstants;
import net.sf.jasperreports.engine.JRExpression;
import net.sf.jasperreports.engine.JRExpressionCollector;
import net.sf.jasperreports.engine.JRFont;
import net.sf.jasperreports.engine.JRGroup;
import net.sf.jasperreports.engine.JRHyperlinkHelper;
import net.sf.jasperreports.engine.JRHyperlinkParameter;
import net.sf.jasperreports.engine.JRLineBox;
import net.sf.jasperreports.engine.JRRuntimeException;
import net.sf.jasperreports.engine.JRVisitor;
import net.sf.jasperreports.engine.type.EvaluationTimeEnum;
import net.sf.jasperreports.engine.type.HyperlinkTargetEnum;
import net.sf.jasperreports.engine.type.HyperlinkTypeEnum;
import net.sf.jasperreports.engine.type.ModeEnum;
import net.sf.jasperreports.engine.util.JRBoxUtil;
import net.sf.jasperreports.engine.util.JRCloneUtils;
import net.sf.jasperreports.engine.util.JRStyleResolver;
/**
* @author Teodor Danciu (teodord@users.sourceforge.net)
* @version $Id: JRBaseChart.java 7199 2014-08-27 13:58:10Z teodord $
*/
public class JRBaseChart extends JRBaseElement implements JRChart
{
/**
*
*/
private static final long serialVersionUID = JRConstants.SERIAL_VERSION_UID;
/*
* Chart properties
*/
public static final String PROPERTY_LEGEND_BACKGROUND_COLOR = "legendBackgroundColor";
public static final String PROPERTY_LEGEND_COLOR = "legendColor";
public static final String PROPERTY_LEGEND_POSITION = "legendPosition";
public static final String PROPERTY_SHOW_LEGEND = "isShowLegend";
public static final String PROPERTY_SUBTITLE_COLOR = "subtitleColor";
public static final String PROPERTY_TITLE_COLOR = "titleColor";
public static final String PROPERTY_TITLE_POSITION = "titlePosition";
public static final String PROPERTY_RENDER_TYPE = "renderType";
public static final String PROPERTY_THEME = "theme";
/**
*
*/
protected byte chartType;
/**
*
*/
protected Boolean showLegend;
protected EvaluationTimeEnum evaluationTimeValue = EvaluationTimeEnum.NOW;
protected String linkType;
protected String linkTarget;
private JRHyperlinkParameter[] hyperlinkParameters;
protected Color titleColor;
protected Color subtitleColor;
protected Color legendColor;
protected Color legendBackgroundColor;
protected EdgeEnum legendPositionValue;
protected EdgeEnum titlePositionValue;
protected String renderType;
protected String theme;
/**
*
*/
protected JRLineBox lineBox;
protected JRFont titleFont;
protected JRFont subtitleFont;
protected JRFont legendFont;
protected String customizerClass;
/**
*
*/
protected JRGroup evaluationGroup;
protected JRExpression titleExpression;
protected JRExpression subtitleExpression;
protected JRExpression anchorNameExpression;
protected JRExpression hyperlinkReferenceExpression;
protected JRExpression hyperlinkWhenExpression;
protected JRExpression hyperlinkAnchorExpression;
protected JRExpression hyperlinkPageExpression;
private JRExpression hyperlinkTooltipExpression;
protected JRChartDataset dataset;
protected JRChartPlot plot;
/**
* The bookmark level for the anchor associated with this chart.
* @see JRAnchor#getBookmarkLevel()
*/
protected int bookmarkLevel = JRAnchor.NO_BOOKMARK;
/**
*
*/
protected JRBaseChart(JRChart chart, JRBaseObjectFactory factory)
{
super(chart, factory);
chartType = chart.getChartType();
switch(chartType) {
case CHART_TYPE_AREA:
dataset = factory.getCategoryDataset((JRCategoryDataset) chart.getDataset());
plot = factory.getAreaPlot((JRAreaPlot) chart.getPlot());
break;
case CHART_TYPE_BAR:
dataset = factory.getCategoryDataset((JRCategoryDataset) chart.getDataset());
plot = factory.getBarPlot((JRBarPlot) chart.getPlot());
break;
case CHART_TYPE_BAR3D:
dataset = factory.getCategoryDataset((JRCategoryDataset) chart.getDataset());
plot = factory.getBar3DPlot((JRBar3DPlot) chart.getPlot());
break;
case CHART_TYPE_BUBBLE:
dataset = factory.getXyzDataset((JRXyzDataset) chart.getDataset());
plot = factory.getBubblePlot((JRBubblePlot) chart.getPlot());
break;
case CHART_TYPE_CANDLESTICK:
dataset = factory.getHighLowDataset((JRHighLowDataset) chart.getDataset());
plot = factory.getCandlestickPlot((JRCandlestickPlot) chart.getPlot());
break;
case CHART_TYPE_HIGHLOW:
dataset = factory.getHighLowDataset((JRHighLowDataset) chart.getDataset());
plot = factory.getHighLowPlot((JRHighLowPlot) chart.getPlot());
break;
case CHART_TYPE_LINE:
dataset = factory.getCategoryDataset((JRCategoryDataset) chart.getDataset());
plot = factory.getLinePlot((JRLinePlot) chart.getPlot());
break;
case CHART_TYPE_METER:
dataset = factory.getValueDataset((JRValueDataset) chart.getDataset());
plot = factory.getMeterPlot((JRMeterPlot) chart.getPlot());
break;
case CHART_TYPE_MULTI_AXIS:
dataset = null;
plot = factory.getMultiAxisPlot((JRMultiAxisPlot) chart.getPlot());
break;
case CHART_TYPE_PIE:
dataset = factory.getPieDataset((JRPieDataset) chart.getDataset());
plot = factory.getPiePlot((JRPiePlot) chart.getPlot());
break;
case CHART_TYPE_PIE3D:
dataset = factory.getPieDataset((JRPieDataset) chart.getDataset());
plot = factory.getPie3DPlot((JRPie3DPlot) chart.getPlot());
break;
case CHART_TYPE_SCATTER:
dataset = factory.getXyDataset((JRXyDataset) chart.getDataset());
plot = factory.getScatterPlot((JRScatterPlot) chart.getPlot());
break;
case CHART_TYPE_STACKEDBAR:
dataset = factory.getCategoryDataset((JRCategoryDataset) chart.getDataset());
plot = factory.getBarPlot((JRBarPlot) chart.getPlot());
break;
case CHART_TYPE_STACKEDBAR3D:
dataset = factory.getCategoryDataset((JRCategoryDataset) chart.getDataset());
plot = factory.getBar3DPlot((JRBar3DPlot) chart.getPlot());
break;
case CHART_TYPE_THERMOMETER:
dataset = factory.getValueDataset((JRValueDataset) chart.getDataset());
plot = factory.getThermometerPlot((JRThermometerPlot) chart.getPlot());
break;
case CHART_TYPE_TIMESERIES:
dataset = factory.getTimeSeriesDataset((JRTimeSeriesDataset)chart.getDataset());
plot = factory.getTimeSeriesPlot( (JRTimeSeriesPlot)chart.getPlot() );
break;
case CHART_TYPE_XYAREA:
dataset = factory.getXyDataset((JRXyDataset) chart.getDataset());
plot = factory.getAreaPlot((JRAreaPlot) chart.getPlot());
break;
case CHART_TYPE_XYBAR:
switch (chart.getDataset().getDatasetType()){
case JRChartDataset.TIMESERIES_DATASET:
dataset = factory.getTimeSeriesDataset((JRTimeSeriesDataset) chart.getDataset());
break;
case JRChartDataset.TIMEPERIOD_DATASET:
dataset = factory.getTimePeriodDataset((JRTimePeriodDataset) chart.getDataset() );
break;
case JRChartDataset.XY_DATASET:
dataset = factory.getXyDataset( (JRXyDataset)chart.getDataset() );
break;
}
plot = factory.getBarPlot((JRBarPlot)chart.getPlot());
break;
case CHART_TYPE_XYLINE:
dataset = factory.getXyDataset((JRXyDataset) chart.getDataset());
plot = factory.getLinePlot((JRLinePlot) chart.getPlot());
break;
case CHART_TYPE_STACKEDAREA:
dataset = factory.getCategoryDataset((JRCategoryDataset) chart.getDataset());
plot = factory.getAreaPlot((JRAreaPlot) chart.getPlot());
break;
case CHART_TYPE_GANTT:
dataset = factory.getGanttDataset((JRGanttDataset) chart.getDataset());
plot = factory.getBarPlot((JRBarPlot) chart.getPlot());
break;
default:
throw new JRRuntimeException("Chart type not supported.");
}
showLegend = chart.getShowLegend();
evaluationTimeValue = chart.getEvaluationTimeValue();
linkType = chart.getLinkType();
linkTarget = chart.getLinkTarget();
titlePositionValue = chart.getTitlePositionValue();
titleColor = chart.getOwnTitleColor();
subtitleColor = chart.getOwnSubtitleColor();
legendColor = chart.getOwnLegendColor();
legendBackgroundColor = chart.getOwnLegendBackgroundColor();
legendPositionValue = chart.getLegendPositionValue();
renderType = chart.getRenderType();
theme = chart.getTheme();
titleFont = factory.getFont(this, chart.getTitleFont());
subtitleFont = factory.getFont(this, chart.getSubtitleFont());
legendFont = factory.getFont(this, chart.getLegendFont());
evaluationGroup = factory.getGroup(chart.getEvaluationGroup());
titleExpression = factory.getExpression(chart.getTitleExpression());
subtitleExpression = factory.getExpression(chart.getSubtitleExpression());
anchorNameExpression = factory.getExpression(chart.getAnchorNameExpression());
hyperlinkReferenceExpression = factory.getExpression(chart.getHyperlinkReferenceExpression());
hyperlinkWhenExpression = factory.getExpression(chart.getHyperlinkWhenExpression());
hyperlinkAnchorExpression = factory.getExpression(chart.getHyperlinkAnchorExpression());
hyperlinkPageExpression = factory.getExpression(chart.getHyperlinkPageExpression());
hyperlinkTooltipExpression = factory.getExpression(chart.getHyperlinkTooltipExpression());
bookmarkLevel = chart.getBookmarkLevel();
hyperlinkParameters = JRBaseHyperlink.copyHyperlinkParameters(chart, factory);
customizerClass = chart.getCustomizerClass();
lineBox = chart.getLineBox().clone(this);
}
/**
*
*/
public Boolean getShowLegend()
{
return this.showLegend;
}
/**
*
*/
public void setShowLegend(Boolean isShowLegend)
{
Boolean old = this.showLegend;
this.showLegend = isShowLegend;
getEventSupport().firePropertyChange(PROPERTY_SHOW_LEGEND, old, this.showLegend);
}
/**
*
*/
public EvaluationTimeEnum getEvaluationTimeValue()
{
return evaluationTimeValue;
}
/**
*
*/
public JRGroup getEvaluationGroup()
{
return evaluationGroup;
}
/**
*
*/
public JRLineBox getLineBox()
{
return lineBox;
}
/**
*
*/
public JRFont getTitleFont()
{
return titleFont;
}
/**
*
*/
public EdgeEnum getTitlePositionValue()
{
return titlePositionValue;
}
/**
*
*/
public void setTitlePosition(EdgeEnum titlePositionValue)
{
EdgeEnum old = this.titlePositionValue;
this.titlePositionValue = titlePositionValue;
getEventSupport().firePropertyChange(PROPERTY_TITLE_POSITION, old, this.titlePositionValue);
}
/**
*
*/
public Color getTitleColor()
{
return JRStyleResolver.getTitleColor(this);
}
/**
*
*/
public Color getOwnTitleColor()
{
return titleColor;
}
/**
*
*/
public void setTitleColor(Color titleColor)
{
Object old = this.titleColor;
this.titleColor = titleColor;
getEventSupport().firePropertyChange(PROPERTY_TITLE_COLOR, old, this.titleColor);
}
/**
*
*/
public JRFont getSubtitleFont()
{
return subtitleFont;
}
/**
*
*/
public Color getOwnSubtitleColor()
{
return subtitleColor;
}
/**
*
*/
public Color getSubtitleColor()
{
return JRStyleResolver.getSubtitleColor(this);
}
/**
*
*/
public void setSubtitleColor(Color subtitleColor)
{
Object old = this.subtitleColor;
this.subtitleColor = subtitleColor;
getEventSupport().firePropertyChange(PROPERTY_SUBTITLE_COLOR, old, this.subtitleColor);
}
public Color getLegendBackgroundColor() {
return JRStyleResolver.getLegendBackgroundColor(this);
}
public Color getOwnLegendBackgroundColor() {
return legendBackgroundColor;
}
public Color getOwnLegendColor() {
return legendColor;
}
public Color getLegendColor() {
return JRStyleResolver.getLegendColor(this);
}
public JRFont getLegendFont() {
return legendFont;
}
public void setLegendBackgroundColor(Color legendBackgroundColor) {
Object old = this.legendBackgroundColor;
this.legendBackgroundColor = legendBackgroundColor;
getEventSupport().firePropertyChange(PROPERTY_LEGEND_BACKGROUND_COLOR, old, this.legendBackgroundColor);
}
public void setLegendColor(Color legendColor) {
Object old = this.legendColor;
this.legendColor = legendColor;
getEventSupport().firePropertyChange(PROPERTY_LEGEND_COLOR, old, this.legendColor);
}
/**
*
*/
public EdgeEnum getLegendPositionValue()
{
return legendPositionValue;
}
/**
*
*/
public void setLegendPosition(EdgeEnum legendPositionValue)
{
EdgeEnum old = this.legendPositionValue;
this.legendPositionValue = legendPositionValue;
getEventSupport().firePropertyChange(PROPERTY_LEGEND_POSITION, old, this.legendPositionValue);
}
/**
* @deprecated Replaced by {@link #getHyperlinkTypeValue()}.
*/
public byte getHyperlinkType()
{
return getHyperlinkTypeValue().getValue();
}
/**
*
*/
public HyperlinkTypeEnum getHyperlinkTypeValue()
{
return JRHyperlinkHelper.getHyperlinkTypeValue(this);
}
/**
*
*/
public byte getHyperlinkTarget()
{
return JRHyperlinkHelper.getHyperlinkTarget(this);
}
/**
*
*/
public JRExpression getTitleExpression()
{
return titleExpression;
}
/**
*
*/
public JRExpression getSubtitleExpression()
{
return subtitleExpression;
}
/**
*
*/
public JRExpression getAnchorNameExpression()
{
return anchorNameExpression;
}
/**
*
*/
public JRExpression getHyperlinkReferenceExpression()
{
return hyperlinkReferenceExpression;
}
/**
*
*/
public JRExpression getHyperlinkWhenExpression()
{
return hyperlinkWhenExpression;
}
/**
*
*/
public JRExpression getHyperlinkAnchorExpression()
{
return hyperlinkAnchorExpression;
}
/**
*
*/
public JRExpression getHyperlinkPageExpression()
{
return hyperlinkPageExpression;
}
/**
*
*/
public JRChartDataset getDataset()
{
return dataset;
}
/**
*
*/
public JRChartPlot getPlot()
{
return plot;
}
public byte getChartType()
{
return chartType;
}
/**
*
*/
public String getRenderType()
{
return renderType;
}
/**
*
*/
public void setRenderType(String renderType)
{
String old = this.renderType;
this.renderType = renderType;
getEventSupport().firePropertyChange(PROPERTY_RENDER_TYPE, old, this.renderType);
}
/**
*
*/
public String getTheme()
{
return theme;
}
/**
*
*/
public void setTheme(String theme)
{
String old = this.theme;
this.theme = theme;
getEventSupport().firePropertyChange(PROPERTY_THEME, old, this.theme);
}
public void collectExpressions(JRExpressionCollector collector)
{
collector.collect(this);
}
public void visit(JRVisitor visitor)
{
visitor.visitChart(this);
}
public int getBookmarkLevel()
{
return bookmarkLevel;
}
/**
*
*/
public String getCustomizerClass()
{
return customizerClass;
}
/**
*
*/
public ModeEnum getModeValue()
{
return JRStyleResolver.getMode(this, ModeEnum.TRANSPARENT);
}
public String getLinkType()
{
return linkType;
}
public String getLinkTarget()
{
return linkTarget;
}
public JRHyperlinkParameter[] getHyperlinkParameters()
{
return hyperlinkParameters;
}
public JRExpression getHyperlinkTooltipExpression()
{
return hyperlinkTooltipExpression;
}
/**
*
*/
public Color getDefaultLineColor()
{
return getForecolor();
}
/**
*
*/
public Object clone()
{
JRBaseChart clone = (JRBaseChart)super.clone();
clone.lineBox = lineBox.clone(clone);
clone.hyperlinkParameters = JRCloneUtils.cloneArray(hyperlinkParameters);
clone.titleFont = JRCloneUtils.nullSafeClone((JRBaseFont)titleFont);
clone.subtitleFont = JRCloneUtils.nullSafeClone((JRBaseFont)subtitleFont);
clone.legendFont = JRCloneUtils.nullSafeClone((JRBaseFont)legendFont);
clone.titleExpression = JRCloneUtils.nullSafeClone(titleExpression);
clone.subtitleExpression = JRCloneUtils.nullSafeClone(subtitleExpression);
clone.anchorNameExpression = JRCloneUtils.nullSafeClone(anchorNameExpression);
clone.hyperlinkReferenceExpression = JRCloneUtils.nullSafeClone(hyperlinkReferenceExpression);
clone.hyperlinkWhenExpression = JRCloneUtils.nullSafeClone(hyperlinkWhenExpression);
clone.hyperlinkAnchorExpression = JRCloneUtils.nullSafeClone(hyperlinkAnchorExpression);
clone.hyperlinkPageExpression = JRCloneUtils.nullSafeClone(hyperlinkPageExpression);
clone.hyperlinkTooltipExpression = JRCloneUtils.nullSafeClone(hyperlinkTooltipExpression);
clone.dataset = JRCloneUtils.nullSafeClone(dataset);
clone.plot = plot == null ? null : (JRChartPlot) plot.clone(clone);
return clone;
}
/*
* These fields are only for serialization backward compatibility.
*/
private int PSEUDO_SERIAL_VERSION_UID = JRConstants.PSEUDO_SERIAL_VERSION_UID; //NOPMD
/**
* @deprecated
*/
private Byte border;
/**
* @deprecated
*/
private Byte topBorder;
/**
* @deprecated
*/
private Byte leftBorder;
/**
* @deprecated
*/
private Byte bottomBorder;
/**
* @deprecated
*/
private Byte rightBorder;
/**
* @deprecated
*/
private Color borderColor;
/**
* @deprecated
*/
private Color topBorderColor;
/**
* @deprecated
*/
private Color leftBorderColor;
/**
* @deprecated
*/
private Color bottomBorderColor;
/**
* @deprecated
*/
private Color rightBorderColor;
/**
* @deprecated
*/
private Integer padding;
/**
* @deprecated
*/
private Integer topPadding;
/**
* @deprecated
*/
private Integer leftPadding;
/**
* @deprecated
*/
private Integer bottomPadding;
/**
* @deprecated
*/
private Integer rightPadding;
/**
* @deprecated
*/
private boolean isShowLegend;
/**
* @deprecated
*/
private byte legendPosition;
/**
* @deprecated
*/
private byte titlePosition;
/**
* @deprecated
*/
private byte hyperlinkType;
/**
* @deprecated
*/
private byte hyperlinkTarget;
/**
* @deprecated
*/
private byte evaluationTime;
/**
* @deprecated
*/
private Byte legendPositionByte;
/**
* @deprecated
*/
private Byte titlePositionByte;
private void readObject(ObjectInputStream in) throws IOException, ClassNotFoundException
{
in.defaultReadObject();
if (lineBox == null)
{
lineBox = new JRBaseLineBox(this);
JRBoxUtil.setToBox(
border,
topBorder,
leftBorder,
bottomBorder,
rightBorder,
borderColor,
topBorderColor,
leftBorderColor,
bottomBorderColor,
rightBorderColor,
padding,
topPadding,
leftPadding,
bottomPadding,
rightPadding,
lineBox
);
border = null;
topBorder = null;
leftBorder = null;
bottomBorder = null;
rightBorder = null;
borderColor = null;
topBorderColor = null;
leftBorderColor = null;
bottomBorderColor = null;
rightBorderColor = null;
padding = null;
topPadding = null;
leftPadding = null;
bottomPadding = null;
rightPadding = null;
}
if (linkType == null)
{
linkType = JRHyperlinkHelper.getLinkType(HyperlinkTypeEnum.getByValue(hyperlinkType));
}
if (linkTarget == null)
{
linkTarget = JRHyperlinkHelper.getLinkTarget(HyperlinkTargetEnum.getByValue(hyperlinkTarget));
}
if (PSEUDO_SERIAL_VERSION_UID < JRConstants.PSEUDO_SERIAL_VERSION_UID_3_7_2)
{
evaluationTimeValue = EvaluationTimeEnum.getByValue(evaluationTime);
if (PSEUDO_SERIAL_VERSION_UID < JRConstants.PSEUDO_SERIAL_VERSION_UID_3_1_3)
{
legendPositionValue = EdgeEnum.getByValue(legendPosition);
titlePositionValue = EdgeEnum.getByValue(titlePosition);
showLegend = Boolean.valueOf(isShowLegend);
}
else
{
legendPositionValue = EdgeEnum.getByValue(legendPositionByte);
titlePositionValue = EdgeEnum.getByValue(titlePositionByte);
legendPositionByte = null;
titlePositionByte = null;
}
}
}
}
| |
/*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
/*
* This code was generated by https://github.com/googleapis/google-api-java-client-services/
* Modify at your own risk.
*/
package com.google.api.services.compute.model;
/**
* Contains a list of IP owners.
*
* <p> This is the Java data model class that specifies how to parse/serialize into the JSON that is
* transmitted over HTTP when working with the Compute Engine API. For a detailed explanation see:
* <a href="https://developers.google.com/api-client-library/java/google-http-java-client/json">https://developers.google.com/api-client-library/java/google-http-java-client/json</a>
* </p>
*
* @author Google, Inc.
*/
@SuppressWarnings("javadoc")
public final class IpOwnerList extends com.google.api.client.json.GenericJson {
/**
* [Output Only] Unique identifier for the resource; defined by the server.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String id;
/**
* A list of InternalIpOwner resources.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.util.List<InternalIpOwner> items;
static {
// hack to force ProGuard to consider InternalIpOwner used, since otherwise it would be stripped out
// see https://github.com/google/google-api-java-client/issues/543
com.google.api.client.util.Data.nullOf(InternalIpOwner.class);
}
/**
* [Output Only] Type of resource. Always compute#ipOwnerList for lists of IP owners.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String kind;
/**
* [Output Only] This token allows you to get the next page of results for list requests. If the
* number of results is larger than maxResults, use the nextPageToken as a value for the query
* parameter pageToken in the next list request. Subsequent list requests will have their own
* nextPageToken to continue paging through the results.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String nextPageToken;
/**
* [Output Only] Server-defined URL for this resource.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String selfLink;
/**
* [Output Only] Informational warning message.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private Warning warning;
/**
* [Output Only] Unique identifier for the resource; defined by the server.
* @return value or {@code null} for none
*/
public java.lang.String getId() {
return id;
}
/**
* [Output Only] Unique identifier for the resource; defined by the server.
* @param id id or {@code null} for none
*/
public IpOwnerList setId(java.lang.String id) {
this.id = id;
return this;
}
/**
* A list of InternalIpOwner resources.
* @return value or {@code null} for none
*/
public java.util.List<InternalIpOwner> getItems() {
return items;
}
/**
* A list of InternalIpOwner resources.
* @param items items or {@code null} for none
*/
public IpOwnerList setItems(java.util.List<InternalIpOwner> items) {
this.items = items;
return this;
}
/**
* [Output Only] Type of resource. Always compute#ipOwnerList for lists of IP owners.
* @return value or {@code null} for none
*/
public java.lang.String getKind() {
return kind;
}
/**
* [Output Only] Type of resource. Always compute#ipOwnerList for lists of IP owners.
* @param kind kind or {@code null} for none
*/
public IpOwnerList setKind(java.lang.String kind) {
this.kind = kind;
return this;
}
/**
* [Output Only] This token allows you to get the next page of results for list requests. If the
* number of results is larger than maxResults, use the nextPageToken as a value for the query
* parameter pageToken in the next list request. Subsequent list requests will have their own
* nextPageToken to continue paging through the results.
* @return value or {@code null} for none
*/
public java.lang.String getNextPageToken() {
return nextPageToken;
}
/**
* [Output Only] This token allows you to get the next page of results for list requests. If the
* number of results is larger than maxResults, use the nextPageToken as a value for the query
* parameter pageToken in the next list request. Subsequent list requests will have their own
* nextPageToken to continue paging through the results.
* @param nextPageToken nextPageToken or {@code null} for none
*/
public IpOwnerList setNextPageToken(java.lang.String nextPageToken) {
this.nextPageToken = nextPageToken;
return this;
}
/**
* [Output Only] Server-defined URL for this resource.
* @return value or {@code null} for none
*/
public java.lang.String getSelfLink() {
return selfLink;
}
/**
* [Output Only] Server-defined URL for this resource.
* @param selfLink selfLink or {@code null} for none
*/
public IpOwnerList setSelfLink(java.lang.String selfLink) {
this.selfLink = selfLink;
return this;
}
/**
* [Output Only] Informational warning message.
* @return value or {@code null} for none
*/
public Warning getWarning() {
return warning;
}
/**
* [Output Only] Informational warning message.
* @param warning warning or {@code null} for none
*/
public IpOwnerList setWarning(Warning warning) {
this.warning = warning;
return this;
}
@Override
public IpOwnerList set(String fieldName, Object value) {
return (IpOwnerList) super.set(fieldName, value);
}
@Override
public IpOwnerList clone() {
return (IpOwnerList) super.clone();
}
/**
* [Output Only] Informational warning message.
*/
public static final class Warning extends com.google.api.client.json.GenericJson {
/**
* [Output Only] A warning code, if applicable. For example, Compute Engine returns
* NO_RESULTS_ON_PAGE if there are no results in the response.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String code;
/**
* [Output Only] Metadata about this warning in key: value format. For example: "data": [ { "key":
* "scope", "value": "zones/us-east1-d" }
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.util.List<Data> data;
static {
// hack to force ProGuard to consider Data used, since otherwise it would be stripped out
// see https://github.com/google/google-api-java-client/issues/543
com.google.api.client.util.Data.nullOf(Data.class);
}
/**
* [Output Only] A human-readable description of the warning code.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String message;
/**
* [Output Only] A warning code, if applicable. For example, Compute Engine returns
* NO_RESULTS_ON_PAGE if there are no results in the response.
* @return value or {@code null} for none
*/
public java.lang.String getCode() {
return code;
}
/**
* [Output Only] A warning code, if applicable. For example, Compute Engine returns
* NO_RESULTS_ON_PAGE if there are no results in the response.
* @param code code or {@code null} for none
*/
public Warning setCode(java.lang.String code) {
this.code = code;
return this;
}
/**
* [Output Only] Metadata about this warning in key: value format. For example: "data": [ { "key":
* "scope", "value": "zones/us-east1-d" }
* @return value or {@code null} for none
*/
public java.util.List<Data> getData() {
return data;
}
/**
* [Output Only] Metadata about this warning in key: value format. For example: "data": [ { "key":
* "scope", "value": "zones/us-east1-d" }
* @param data data or {@code null} for none
*/
public Warning setData(java.util.List<Data> data) {
this.data = data;
return this;
}
/**
* [Output Only] A human-readable description of the warning code.
* @return value or {@code null} for none
*/
public java.lang.String getMessage() {
return message;
}
/**
* [Output Only] A human-readable description of the warning code.
* @param message message or {@code null} for none
*/
public Warning setMessage(java.lang.String message) {
this.message = message;
return this;
}
@Override
public Warning set(String fieldName, Object value) {
return (Warning) super.set(fieldName, value);
}
@Override
public Warning clone() {
return (Warning) super.clone();
}
/**
* Model definition for IpOwnerListWarningData.
*/
public static final class Data extends com.google.api.client.json.GenericJson {
/**
* [Output Only] A key that provides more detail on the warning being returned. For example, for
* warnings where there are no results in a list request for a particular zone, this key might be
* scope and the key value might be the zone name. Other examples might be a key indicating a
* deprecated resource and a suggested replacement, or a warning about invalid network settings
* (for example, if an instance attempts to perform IP forwarding but is not enabled for IP
* forwarding).
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String key;
/**
* [Output Only] A warning data value corresponding to the key.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String value;
/**
* [Output Only] A key that provides more detail on the warning being returned. For example, for
* warnings where there are no results in a list request for a particular zone, this key might be
* scope and the key value might be the zone name. Other examples might be a key indicating a
* deprecated resource and a suggested replacement, or a warning about invalid network settings
* (for example, if an instance attempts to perform IP forwarding but is not enabled for IP
* forwarding).
* @return value or {@code null} for none
*/
public java.lang.String getKey() {
return key;
}
/**
* [Output Only] A key that provides more detail on the warning being returned. For example, for
* warnings where there are no results in a list request for a particular zone, this key might be
* scope and the key value might be the zone name. Other examples might be a key indicating a
* deprecated resource and a suggested replacement, or a warning about invalid network settings
* (for example, if an instance attempts to perform IP forwarding but is not enabled for IP
* forwarding).
* @param key key or {@code null} for none
*/
public Data setKey(java.lang.String key) {
this.key = key;
return this;
}
/**
* [Output Only] A warning data value corresponding to the key.
* @return value or {@code null} for none
*/
public java.lang.String getValue() {
return value;
}
/**
* [Output Only] A warning data value corresponding to the key.
* @param value value or {@code null} for none
*/
public Data setValue(java.lang.String value) {
this.value = value;
return this;
}
@Override
public Data set(String fieldName, Object value) {
return (Data) super.set(fieldName, value);
}
@Override
public Data clone() {
return (Data) super.clone();
}
}
}
}
| |
package org.whaka.util.reflection.comparison;
import java.util.Arrays;
import java.util.List;
import java.util.Map;
import com.google.common.base.MoreObjects;
import org.whaka.util.reflection.UberClasses;
import org.whaka.util.reflection.comparison.ComparisonPerformer;
import org.whaka.util.reflection.comparison.ComparisonPerformers;
import org.whaka.util.reflection.comparison.ComparisonResult;
import org.whaka.util.reflection.comparison.ComplexComparisonResult;
import org.whaka.util.reflection.comparison.ComplexComparisonResultBuilder;
import org.whaka.util.reflection.comparison.performers.AbstractComparisonPerformer;
import org.whaka.util.reflection.properties.ClassPropertyKey;
/**
* Class provides test examples of using "comparison" package manually.
* Just examples nothing more.
*/
@SuppressWarnings("unused")
public class PerformerBuildersTestExample {
public static void main(String[] args) {
Child c1 = new ChildImpl(17, "qw", true);
Child c2 = new ChildImpl(12, "qwe", false);
Parent p1 = new ParentImpl(42L, "pop", 36, Arrays.asList(c1, c2), new int[]{1,2,3});
Parent p2 = new ParentImpl(42L, "pop", 36, Arrays.asList(c1, c2), new int[]{1,2,3});
ComparisonResult result = PARENT_COMPARISON_3.apply(p1, p2);
printResult(result);
}
public static void printResult(ComparisonResult result) {
printResult(null, result, 0);
}
private static void printResult(ClassPropertyKey key, ComparisonResult result, int level) {
for (int i = 0; i < level; i++)
System.out.print(" ");
if (key != null)
System.out.print(key + ": ");
System.out.println(result);
if (result instanceof ComplexComparisonResult) {
Map<ClassPropertyKey, ComparisonResult> props =
((ComplexComparisonResult) result).getPropertyResults();
for (Map.Entry<ClassPropertyKey, ComparisonResult> e : props.entrySet())
printResult(e.getKey(), e.getValue(), level + 1);
}
}
public static interface Child {
long getId();
String getName();
boolean isBoy();
}
public static class ChildImpl implements Child {
private long id;
private String name;
private boolean boy;
public ChildImpl(long id, String name, boolean boy) {
this.id = id;
this.name = name;
this.boy = boy;
}
@Override
public long getId() {
return id;
}
@Override
public String getName() {
return name;
}
@Override
public boolean isBoy() {
return boy;
}
@Override
public String toString() {
return MoreObjects.toStringHelper(this)
.add("id", getId())
.add("name", getName())
.add("boy", isBoy())
.toString();
}
}
public static interface Parent {
long getId();
String getName();
int getAge();
List<Child> getChild();
int[] getArr();
}
public static class ParentImpl implements Parent {
private int[] arr;
private long id;
private String name;
private int age;
private List<Child> child;
public ParentImpl(long id, String name, int age, List<Child> child, int[] arr) {
this.id = id;
this.name = name;
this.age = age;
this.child = child;
this.arr = arr;
}
@Override
public long getId() {
return id;
}
@Override
public String getName() {
return name;
}
@Override
public int getAge() {
return age;
}
@Override
public List<Child> getChild() {
return child;
}
@Override
public int[] getArr() {
return arr;
}
@Override
public String toString() {
return MoreObjects.toStringHelper(this)
.add("id", getId())
.add("name", getName())
.add("age", getAge())
.add("arr", getArr())
.add("child", getChild())
.toString();
}
}
private static ComparisonPerformer<Child> CHILD_COMPARISON =
new AbstractComparisonPerformer<PerformerBuildersTestExample.Child>("ChildComparison") {
@Override
public ComparisonResult apply(Child actual, Child expected) {
return new ComplexComparisonResultBuilder<>(Child.class)
.apply("getId()", actual.getId(), expected.getId())
.apply("getName()", actual.getName(), expected.getName())
.apply("isBoy()", actual.isBoy(), expected.isBoy())
.build(actual, expected, this);
}
};
private static ComparisonPerformer<Parent> PARENT_COMPARISON =
new AbstractComparisonPerformer<PerformerBuildersTestExample.Parent>("ParentComparison") {
@Override
public ComparisonResult apply(Parent actual, Parent expected) {
return new ComplexComparisonResultBuilder<>(Parent.class)
.apply("getId()", actual.getId(), expected.getId())
.apply("getName()", actual.getName(), expected.getName())
.apply("getAge()", actual.getAge(), expected.getAge())
.apply("getArr()", actual.getArr(), expected.getArr())
.apply("getChild()", actual.getChild(), expected.getChild(), ComparisonPerformers.list(CHILD_COMPARISON))
.build(actual, expected, this);
}
};
private static ComparisonPerformer<Child> CHILD_COMPARISON_2 =
ComparisonPerformers.buildProperties(Child.class)
.addProperty("getId()", Child::getId)
.addProperty("getName()", Child::getName)
.addProperty("isBoy()", Child::isBoy)
.build("ChildComparison2");
private static ComparisonPerformer<Parent> PARENT_COMPARISON_2 =
ComparisonPerformers.buildProperties(Parent.class)
.addProperty("getId()", Parent::getId)
.addProperty("getName()", Parent::getName)
.addProperty("getAge()", Parent::getAge)
.addProperty("getArr()", Parent::getArr)
.addProperty("getChild()", Parent::getChild)
.configureDynamicPerformer(p -> p.registerDelegate(Child.class, CHILD_COMPARISON_2))
.build("ParentComparison2");
private static ComparisonPerformer<Child> CHILD_COMPARISON_3 =
ComparisonPerformers.buildGetters(Child.class)
.build("ChildComparison3");
private static ComparisonPerformer<Parent> PARENT_COMPARISON_3 =
ComparisonPerformers.buildGetters(Parent.class)
.configureDynamicPerformer(p -> {
p.setDefaultDelegate(ComparisonPerformers.REFLECTIVE_EQUALS);
p.registerDelegate(Child.class, CHILD_COMPARISON_3);
p.registerCollectionDelegateProvider(UberClasses.cast(List.class), Child.class, ComparisonPerformers::list);
})
.build("ParentComparison3");
}
| |
/**
* Copyright 2015 StreamSets Inc.
*
* Licensed under the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.streamsets.datacollector.runner;
import com.codahale.metrics.Counter;
import com.codahale.metrics.Gauge;
import com.codahale.metrics.Histogram;
import com.codahale.metrics.Meter;
import com.codahale.metrics.MetricRegistry;
import com.codahale.metrics.Timer;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Supplier;
import com.streamsets.datacollector.config.StageType;
import com.streamsets.datacollector.memory.MemoryMonitor;
import com.streamsets.datacollector.memory.MemoryUsageCollector;
import com.streamsets.datacollector.memory.MemoryUsageCollectorResourceBundle;
import com.streamsets.datacollector.metrics.MetricsConfigurator;
import com.streamsets.datacollector.restapi.bean.CounterJson;
import com.streamsets.datacollector.restapi.bean.HistogramJson;
import com.streamsets.datacollector.restapi.bean.MeterJson;
import com.streamsets.datacollector.restapi.bean.MetricRegistryJson;
import com.streamsets.datacollector.util.AggregatorUtil;
import com.streamsets.datacollector.util.Configuration;
import com.streamsets.datacollector.validation.Issue;
import com.streamsets.pipeline.api.Batch;
import com.streamsets.pipeline.api.StageException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.TimeUnit;
public class StagePipe extends Pipe<StagePipe.Context> {
private static final Logger LOG = LoggerFactory.getLogger(StagePipe.class);
//Runtime stat gauge name
public static final String RUNTIME_STATS_GAUGE = "RuntimeStatsGauge";
private Timer processingTimer;
private Counter memoryConsumedCounter;
private Meter inputRecordsMeter;
private Meter outputRecordsMeter;
private Meter errorRecordsMeter;
private Meter stageErrorMeter;
private Counter inputRecordsCounter;
private Counter outputRecordsCounter;
private Counter errorRecordsCounter;
private Counter stageErrorCounter;
private Histogram inputRecordsHistogram;
private Histogram outputRecordsHistogram;
private Histogram errorRecordsHistogram;
private Histogram stageErrorsHistogram;
private Map<String, Counter> outputRecordsPerLaneCounter;
private Map<String, Meter> outputRecordsPerLaneMeter;
private StagePipe.Context context;
private final ResourceControlledScheduledExecutor scheduledExecutorService;
private final MemoryUsageCollectorResourceBundle memoryUsageCollectorResourceBundle;
private final String name;
private final String rev;
private final Configuration configuration;
private final MetricRegistryJson metricRegistryJson;
private Map<String, Object> batchMetrics;
@VisibleForTesting
StagePipe(StageRuntime stage, List<String> inputLanes, List<String> outputLanes, List<String> eventLanes) {
this("myPipeline", "0", new Configuration(), stage, inputLanes, outputLanes, eventLanes, new ResourceControlledScheduledExecutor(0.02f),
new MemoryUsageCollectorResourceBundle(), null);
}
public StagePipe(String name, String rev, Configuration configuration, StageRuntime stage, List<String> inputLanes,
List<String> outputLanes, List<String> eventLanes, ResourceControlledScheduledExecutor scheduledExecutorService,
MemoryUsageCollectorResourceBundle memoryUsageCollectorResourceBundle, MetricRegistryJson metricRegistryJson) {
super(stage, inputLanes, outputLanes, eventLanes);
this.name = name;
this.rev = rev;
this.configuration = configuration;
this.scheduledExecutorService = scheduledExecutorService;
this.memoryUsageCollectorResourceBundle = memoryUsageCollectorResourceBundle;
this.metricRegistryJson = metricRegistryJson;
this.batchMetrics = new HashMap<>();
}
@Override
public List<Issue> init(StagePipe.Context pipeContext) throws StageException {
List<Issue> issues = getStage().init();
if(issues.isEmpty()) {
MetricRegistry metrics = getStage().getContext().getMetrics();
String metricsKey = "stage." + getStage().getConfiguration().getInstanceName();
processingTimer = MetricsConfigurator.createTimer(metrics, metricsKey + ".batchProcessing", name, rev);
memoryConsumedCounter = MetricsConfigurator.createCounter(metrics, metricsKey + ".memoryConsumed", name, rev);
inputRecordsMeter = MetricsConfigurator.createMeter(metrics, metricsKey + ".inputRecords", name, rev);
outputRecordsMeter = MetricsConfigurator.createMeter(metrics, metricsKey + ".outputRecords", name, rev);
errorRecordsMeter = MetricsConfigurator.createMeter(metrics, metricsKey + ".errorRecords", name, rev);
stageErrorMeter = MetricsConfigurator.createMeter(metrics, metricsKey + ".stageErrors", name, rev);
inputRecordsCounter = MetricsConfigurator.createCounter(metrics, metricsKey + ".inputRecords", name, rev);
outputRecordsCounter = MetricsConfigurator.createCounter(metrics, metricsKey + ".outputRecords", name, rev);
errorRecordsCounter = MetricsConfigurator.createCounter(metrics, metricsKey + ".errorRecords", name, rev);
stageErrorCounter = MetricsConfigurator.createCounter(metrics, metricsKey + ".stageErrors", name, rev);
inputRecordsHistogram = MetricsConfigurator.createHistogram5Min(metrics, metricsKey + ".inputRecords", name, rev);
outputRecordsHistogram = MetricsConfigurator.createHistogram5Min(metrics, metricsKey + ".outputRecords", name, rev);
errorRecordsHistogram = MetricsConfigurator.createHistogram5Min(metrics, metricsKey + ".errorRecords", name, rev);
stageErrorsHistogram = MetricsConfigurator.createHistogram5Min(metrics, metricsKey + ".stageErrors", name, rev);
if (metricRegistryJson != null) {
MeterJson inputRecordsMeterJson =
metricRegistryJson.getMeters().get(metricsKey + ".inputRecords" + MetricsConfigurator.METER_SUFFIX);
inputRecordsMeter.mark(inputRecordsMeterJson.getCount());
MeterJson outputRecordsMeterJson =
metricRegistryJson.getMeters().get(metricsKey + ".outputRecords" + MetricsConfigurator.METER_SUFFIX);
outputRecordsMeter.mark(outputRecordsMeterJson.getCount());
MeterJson errorRecordsMeterJson =
metricRegistryJson.getMeters().get(metricsKey + ".errorRecords" + MetricsConfigurator.METER_SUFFIX);
errorRecordsMeter.mark(errorRecordsMeterJson.getCount());
MeterJson stageErrorMeterJson =
metricRegistryJson.getMeters().get(metricsKey + ".stageErrors" + MetricsConfigurator.METER_SUFFIX);
stageErrorMeter.mark(stageErrorMeterJson.getCount());
CounterJson inputRecordsCounterJson =
metricRegistryJson.getCounters().get(metricsKey + ".inputRecords" + MetricsConfigurator.COUNTER_SUFFIX);
inputRecordsCounter.inc(inputRecordsCounterJson.getCount());
CounterJson outputRecordsCounterJson =
metricRegistryJson.getCounters().get(metricsKey + ".outputRecords" + MetricsConfigurator.COUNTER_SUFFIX);
outputRecordsCounter.inc(outputRecordsCounterJson.getCount());
CounterJson errorRecordsCounterJson =
metricRegistryJson.getCounters().get(metricsKey + ".errorRecords" + MetricsConfigurator.COUNTER_SUFFIX);
errorRecordsCounter.inc(errorRecordsCounterJson.getCount());
CounterJson stageErrorCounterJson =
metricRegistryJson.getCounters().get(metricsKey + ".stageErrors" + MetricsConfigurator.COUNTER_SUFFIX);
stageErrorCounter.inc(stageErrorCounterJson.getCount());
HistogramJson metricHistrogramJson =
metricRegistryJson.getHistograms()
.get(metricsKey + ".inputRecords" + MetricsConfigurator.HISTOGRAM_M5_SUFFIX);
inputRecordsHistogram.update(metricHistrogramJson.getCount());
HistogramJson outputRecordsHistogramJson =
metricRegistryJson.getHistograms().get(
metricsKey + ".outputRecords" + MetricsConfigurator.HISTOGRAM_M5_SUFFIX);
outputRecordsHistogram.update(outputRecordsHistogramJson.getCount());
HistogramJson errorRecordsHistogramJson =
metricRegistryJson.getHistograms()
.get(metricsKey + ".errorRecords" + MetricsConfigurator.HISTOGRAM_M5_SUFFIX);
errorRecordsHistogram.update(errorRecordsHistogramJson.getCount());
HistogramJson stageErrorsHistogramJson =
metricRegistryJson.getHistograms().get(metricsKey + ".stageErrors" + MetricsConfigurator.HISTOGRAM_M5_SUFFIX);
stageErrorsHistogram.update(stageErrorsHistogramJson.getCount());
}
if (getStage().getConfiguration().getOutputAndEventLanes().size() > 0) {
outputRecordsPerLaneCounter = new HashMap<>();
outputRecordsPerLaneMeter = new HashMap<>();
for (String lane : getStage().getConfiguration().getOutputAndEventLanes()) {
Counter outputRecordsCounter =
MetricsConfigurator.createCounter(metrics, metricsKey + ":" + lane + ".outputRecords", name, rev);
if (metricRegistryJson != null) {
CounterJson counterJson =
metricRegistryJson.getCounters().get(
metricsKey + ":" + lane + ".outputRecords" + MetricsConfigurator.COUNTER_SUFFIX);
outputRecordsCounter.inc(counterJson.getCount());
}
outputRecordsPerLaneCounter.put(lane, outputRecordsCounter);
Meter outputRecordsMeter = MetricsConfigurator.createMeter(
metrics, metricsKey + ":" + lane + ".outputRecords", name, rev);
if (metricRegistryJson != null) {
MeterJson meterJson =
metricRegistryJson.getMeters().get(
metricsKey + ":" + lane + ".outputRecords" + MetricsConfigurator.METER_SUFFIX);
outputRecordsMeter.mark(meterJson.getCount());
}
outputRecordsPerLaneMeter.put(lane, outputRecordsMeter);
}
}
this.context = pipeContext;
if (configuration.get("monitor.memory", false)) {
LOG.info("Starting memory collector for {}", getStage().getInfo().getInstanceName());
scheduledExecutorService.submit(
new MemoryMonitor(memoryConsumedCounter,
new Supplier<MemoryUsageCollector>() {
@Override
public MemoryUsageCollector get() {
return new MemoryUsageCollector.Builder()
.setMemoryUsageCollectorResourceBundle(memoryUsageCollectorResourceBundle)
.setStageRuntime(getStage()).build();
}
}));
}
createRuntimeStatsGauge(metrics);
}
return issues;
}
@Override
@SuppressWarnings("unchecked")
public void process(PipeBatch pipeBatch) throws StageException, PipelineRuntimeException {
//note down time when this stage was entered
long startTimeInStage = System.currentTimeMillis();
//update stats
updateStatsAtStart(startTimeInStage);
BatchMakerImpl batchMaker = pipeBatch.startStage(this);
BatchImpl batchImpl = pipeBatch.getBatch(this);
ErrorSink errorSink = pipeBatch.getErrorSink();
EventSink eventSink = new EventSink();
String previousOffset = pipeBatch.getPreviousOffset();
InstanceErrorSink instanceErrorSink = new InstanceErrorSink(getStage().getInfo().getInstanceName(), errorSink);
FilterRecordBatch.Predicate[] predicates = new FilterRecordBatch.Predicate[2];
predicates[0] = new RequiredFieldsPredicate(getStage().getRequiredFields());
predicates[1] = new PreconditionsPredicate(getStage().getContext(), getStage().getPreconditions());
Batch batch = new FilterRecordBatch(batchImpl, predicates, instanceErrorSink);
long start = System.currentTimeMillis();
String newOffset = getStage().execute(previousOffset, pipeBatch.getBatchSize(), batch, batchMaker, errorSink, eventSink);
if (isSource()) {
pipeBatch.setNewOffset(newOffset);
}
long processingTime = System.currentTimeMillis() - start;
processingTimer.update(processingTime, TimeUnit.MILLISECONDS);
int batchSize = batchImpl.getSize();
inputRecordsCounter.inc(batchSize);
inputRecordsMeter.mark(batchSize);
inputRecordsHistogram.update(batchSize);
int stageErrorRecordCount = errorSink.getErrorRecords(getStage().getInfo().getInstanceName()).size();
errorRecordsCounter.inc(stageErrorRecordCount);
errorRecordsMeter.mark(stageErrorRecordCount);
errorRecordsHistogram.update(stageErrorRecordCount);
int outputRecordsCount = batchMaker.getSize();
if (isTargetOrExecutor()) {
//Assumption is that the target will not drop any record.
//Records are sent to destination or to the error sink.
outputRecordsCount = batchSize - stageErrorRecordCount;
}
outputRecordsCounter.inc(outputRecordsCount);
outputRecordsMeter.mark(outputRecordsCount);
outputRecordsHistogram.update(outputRecordsCount);
int stageErrorsCount = errorSink.getStageErrors(getStage().getInfo().getInstanceName()).size();
stageErrorCounter.inc(stageErrorsCount);
stageErrorMeter.mark(stageErrorsCount);
stageErrorsHistogram.update(stageErrorsCount);
Map<String, Integer> outputRecordsPerLane = new HashMap<>();
if (getStage().getConfiguration().getOutputLanes().size() > 0) {
for (String lane : getStage().getConfiguration().getOutputLanes()) {
int outputRecords = batchMaker.getSize(lane);
outputRecordsPerLane.put(lane, outputRecords);
outputRecordsPerLaneCounter.get(lane).inc(outputRecords);
outputRecordsPerLaneMeter.get(lane).mark(outputRecords);
}
}
if(getStage().getConfiguration().getEventLanes().size() > 0) {
String lane = getStage().getConfiguration().getEventLanes().get(0);
int eventRecords = eventSink.getEventRecords().size();
outputRecordsPerLane.put(lane, eventRecords);
outputRecordsPerLaneCounter.get(lane).inc(eventRecords);
outputRecordsPerLaneMeter.get(lane).mark(eventRecords);
}
// capture stage metrics for this batch
batchMetrics.clear();
batchMetrics.put(AggregatorUtil.PROCESSING_TIME, processingTime);
batchMetrics.put(AggregatorUtil.INPUT_RECORDS, batchSize);
batchMetrics.put(AggregatorUtil.ERROR_RECORDS, stageErrorRecordCount);
batchMetrics.put(AggregatorUtil.OUTPUT_RECORDS, outputRecordsCount);
batchMetrics.put(AggregatorUtil.STAGE_ERROR, stageErrorsCount);
batchMetrics.put(AggregatorUtil.OUTPUT_RECORDS_PER_LANE, outputRecordsPerLane);
pipeBatch.completeStage(batchMaker, eventSink);
//get records count to determine if this stage saw any record in this batch
int recordsCount = batchSize;
if(isSource()) {
//source does not have input records
recordsCount = outputRecordsCount;
}
//update stats
updateStatsAtEnd(startTimeInStage, newOffset, recordsCount);
}
@Override
public void destroy(PipeBatch pipeBatch) {
EventSink eventSink = new EventSink();
ErrorSink errorSink = pipeBatch.getErrorSink();
getStage().destroy(errorSink, eventSink);
pipeBatch.completeStage(this, eventSink);
}
public long getMemoryConsumed() {
return memoryConsumedCounter.getCount();
}
public Map<String, Object> getBatchMetrics() {
return batchMetrics;
}
@SuppressWarnings("unchecked")
private Gauge<Object> createRuntimeStatsGauge(MetricRegistry metricRegistry) {
Gauge<Object> runtimeStatsGauge = MetricsConfigurator.getGauge(metricRegistry, RUNTIME_STATS_GAUGE);
if(runtimeStatsGauge == null) {
runtimeStatsGauge = new Gauge<Object>() {
@Override
public Object getValue() {
return context.getRuntimeStats();
}
};
try {
MetricsConfigurator.createGauge(metricRegistry, RUNTIME_STATS_GAUGE, runtimeStatsGauge, name ,rev);
} catch (Exception e) {
for(StackTraceElement se : e.getStackTrace()) {
LOG.error(se.toString());
}
throw e;
}
}
return runtimeStatsGauge;
}
private void updateStatsAtStart(long startTimeInStage) {
//update the runtime stats
//The following needs to be done at the beginning of a stage per batch
//1. set name of current stage
//2. update current batch age, [if source then update the batch age]
//3. update time in current stage [near zero]
context.getRuntimeStats().setCurrentStage(getStage().getInfo().getInstanceName());
//update batch ige if the stage is Source
if (isSource()) {
context.getRuntimeStats().setBatchStartTime(System.currentTimeMillis());
}
context.getRuntimeStats().setCurrentBatchAge(
System.currentTimeMillis() - context.getRuntimeStats().getBatchStartTime());
context.getRuntimeStats().setTimeInCurrentStage(System.currentTimeMillis() - startTimeInStage);
}
private void updateStatsAtEnd(long startTimeInStage, String offset, int outputRecordsCount) {
//update the runtime stats
//The following needs to be done at the beginning of a stage per batch
//1. If source, update batch counter, current offset, if there was at least one record in this batch then
// update time of last record
//2. update current batch age
//3. update time in current stage
if (isSource()) {
context.getRuntimeStats().setBatchCount(context.getRuntimeStats().getBatchCount() + 1);
context.getRuntimeStats().setCurrentSourceOffset(offset);
if (outputRecordsCount > 0) {
context.getRuntimeStats().setTimeOfLastReceivedRecord(System.currentTimeMillis());
}
}
context.getRuntimeStats().setCurrentBatchAge(
System.currentTimeMillis() - context.getRuntimeStats().getBatchStartTime());
context.getRuntimeStats().setTimeInCurrentStage(System.currentTimeMillis() - startTimeInStage);
}
private boolean isSource() {
if (getStage().getDefinition().getType() == StageType.SOURCE) {
return true;
}
return false;
}
private boolean isTargetOrExecutor() {
if(getStage().getDefinition().getType().isOneOf(StageType.TARGET, StageType.TARGET)) {
return true;
}
return false;
}
public interface Context extends Pipe.Context {
public RuntimeStats getRuntimeStats();
}
}
| |
package org.batfish.datamodel;
import com.google.common.collect.ImmutableSet;
import java.util.Set;
import javax.annotation.Nullable;
import javax.annotation.ParametersAreNullableByDefault;
/** Represents the attributes of the session established between two {@link IpsecPeerConfig}s */
@ParametersAreNullableByDefault
public class IpsecSession {
public static final Set<ConfigurationFormat> CLOUD_CONFIGURATION_FORMATS =
ImmutableSet.of(ConfigurationFormat.AWS);
/** Port on which IKE (Phase 1) and IPsec(Phase 2) parameters are exchanged through UDP */
public static final int IPSEC_UDP_PORT = 500;
@Nullable private final IkePhase1Policy _initiatorIkeP1Policy;
@Nullable private final IpsecPhase2Policy _initiatorIpsecP2Policy;
@Nullable private final IkePhase1Proposal _negotiatedIkeP1Proposal;
@Nullable private final IkePhase1Key _negotiatedIkeP1Key;
@Nullable private final IpsecPhase2Proposal _negotiatedIpsecP2Proposal;
@Nullable private final IkePhase1Policy _responderIkeP1Policy;
@Nullable private final IpsecPhase2Policy _responderIpsecP2Policy;
/**
* Is true when at least one of the peers for this IPsec session is a cloud type configuration
* (like AWS)
*/
private final boolean _cloud;
private IpsecSession(
boolean cloud,
IkePhase1Policy initiatorIkeP1Policy,
IpsecPhase2Policy initiatorIpsecP2Policy,
IkePhase1Proposal negotiatedIkeP1Proposal,
IkePhase1Key negotiatedIkeP1Key,
IpsecPhase2Proposal negotiatedIpsecP2Proposal,
IkePhase1Policy responderIkeP1Policy,
IpsecPhase2Policy responderIpsecP2Policy) {
_cloud = cloud;
_initiatorIkeP1Policy = initiatorIkeP1Policy;
_initiatorIpsecP2Policy = initiatorIpsecP2Policy;
_negotiatedIkeP1Proposal = negotiatedIkeP1Proposal;
_negotiatedIkeP1Key = negotiatedIkeP1Key;
_negotiatedIpsecP2Proposal = negotiatedIpsecP2Proposal;
_responderIkeP1Policy = responderIkeP1Policy;
_responderIpsecP2Policy = responderIpsecP2Policy;
}
/**
* Is true when at least one of the peers for this IPsec session is a cloud type configuration
* (like AWS)
*
* @return true for a cloud type {@link IpsecSession}
*/
public boolean isCloud() {
return _cloud;
}
@Nullable
public IkePhase1Policy getInitiatorIkeP1Policy() {
return _initiatorIkeP1Policy;
}
@Nullable
public IpsecPhase2Policy getInitiatorIpsecP2Policy() {
return _initiatorIpsecP2Policy;
}
@Nullable
public IkePhase1Policy getResponderIkeP1Policy() {
return _responderIkeP1Policy;
}
@Nullable
public IpsecPhase2Policy getResponderIpsecP2Policy() {
return _responderIpsecP2Policy;
}
@Nullable
public IkePhase1Proposal getNegotiatedIkeP1Proposal() {
return _negotiatedIkeP1Proposal;
}
@Nullable
public IkePhase1Key getNegotiatedIkeP1Key() {
return _negotiatedIkeP1Key;
}
@Nullable
public IpsecPhase2Proposal getNegotiatedIpsecP2Proposal() {
return _negotiatedIpsecP2Proposal;
}
public static Builder builder() {
return new Builder();
}
public static class Builder {
private boolean _cloud;
private IkePhase1Policy _initiatorIkeP1Policy;
private IpsecPhase2Policy _initiatorIpsecP2Policy;
private IkePhase1Proposal _negotiatedIkeP1Proposal;
private IkePhase1Key _negotiatedIkeP1Key;
private IpsecPhase2Proposal _negotiatedIpsecP2Proposal;
private IkePhase1Policy _responderIkeP1Policy;
private IpsecPhase2Policy _responderIpsecP2Policy;
public IpsecSession build() {
return new IpsecSession(
_cloud,
_initiatorIkeP1Policy,
_initiatorIpsecP2Policy,
_negotiatedIkeP1Proposal,
_negotiatedIkeP1Key,
_negotiatedIpsecP2Proposal,
_responderIkeP1Policy,
_responderIpsecP2Policy);
}
public boolean isCloud() {
return _cloud;
}
@Nullable
public IkePhase1Policy getInitiatorIkeP1Policy() {
return _initiatorIkeP1Policy;
}
@Nullable
public IpsecPhase2Policy getInitiatorIpsecP2Policy() {
return _initiatorIpsecP2Policy;
}
@Nullable
public IkePhase1Policy getResponderIkeP1Policy() {
return _responderIkeP1Policy;
}
@Nullable
public IpsecPhase2Policy getResponderIpsecP2Policy() {
return _responderIpsecP2Policy;
}
@Nullable
public IkePhase1Proposal getNegotiatedIkeP1Proposal() {
return _negotiatedIkeP1Proposal;
}
@Nullable
public IkePhase1Key getNegotiatedIkeP1Key() {
return _negotiatedIkeP1Key;
}
@Nullable
public IpsecPhase2Proposal getNegotiatedIpsecP2Proposal() {
return _negotiatedIpsecP2Proposal;
}
public Builder setCloud(boolean cloud) {
_cloud = cloud;
return this;
}
public Builder setInitiatorIkeP1Policy(IkePhase1Policy initiatorIkeP1Policy) {
_initiatorIkeP1Policy = initiatorIkeP1Policy;
return this;
}
public Builder setInitiatorIpsecP2Policy(IpsecPhase2Policy initiatorIpsecP2Policy) {
_initiatorIpsecP2Policy = initiatorIpsecP2Policy;
return this;
}
public Builder setResponderIkeP1Policy(IkePhase1Policy responderIkeP1Policy) {
_responderIkeP1Policy = responderIkeP1Policy;
return this;
}
public Builder setResponderIpsecP2Policy(IpsecPhase2Policy responderIpsecP2Policy) {
_responderIpsecP2Policy = responderIpsecP2Policy;
return this;
}
public Builder setNegotiatedIkeP1Proposal(IkePhase1Proposal ikePhase1Proposal) {
_negotiatedIkeP1Proposal = ikePhase1Proposal;
return this;
}
public Builder setNegotiatedIkeP1Key(IkePhase1Key negotiatedIkePhase1Key) {
_negotiatedIkeP1Key = negotiatedIkePhase1Key;
return this;
}
public Builder setNegotiatedIpsecP2Proposal(IpsecPhase2Proposal ipsecPhase2Proposal) {
_negotiatedIpsecP2Proposal = ipsecPhase2Proposal;
return this;
}
}
public enum IpsecSessionType {
STATIC,
DYNAMIC
}
/**
* Returns true if the given {@link Configuration} has a cloud type {@link ConfigurationFormat}
*
* @param configuration {@link Configuration}
* @return true if {@link Configuration} is a cloud type node
*/
public static boolean isCloudConfig(Configuration configuration) {
return CLOUD_CONFIGURATION_FORMATS.contains(configuration.getConfigurationFormat());
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.sysml.runtime.matrix;
import java.io.IOException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.mapred.JobClient;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapred.OutputCollector;
import org.apache.hadoop.mapred.Reducer;
import org.apache.hadoop.mapred.Reporter;
import org.apache.hadoop.mapred.RunningJob;
import org.apache.sysml.conf.ConfigurationManager;
import org.apache.sysml.conf.DMLConfig;
import org.apache.sysml.runtime.instructions.MRJobInstruction;
import org.apache.sysml.runtime.instructions.mr.CombineBinaryInstruction;
import org.apache.sysml.runtime.instructions.mr.CombineTernaryInstruction;
import org.apache.sysml.runtime.instructions.mr.MRInstruction;
import org.apache.sysml.runtime.matrix.data.InputInfo;
import org.apache.sysml.runtime.matrix.data.MatrixIndexes;
import org.apache.sysml.runtime.matrix.data.OutputInfo;
import org.apache.sysml.runtime.matrix.data.Pair;
import org.apache.sysml.runtime.matrix.data.TaggedMatrixBlock;
import org.apache.sysml.runtime.matrix.data.TaggedMatrixCell;
import org.apache.sysml.runtime.matrix.data.TaggedMatrixValue;
import org.apache.sysml.runtime.matrix.data.WeightedPair;
import org.apache.sysml.runtime.matrix.mapred.GMRMapper;
import org.apache.sysml.runtime.matrix.mapred.IndexedMatrixValue;
import org.apache.sysml.runtime.matrix.mapred.MRConfigurationNames;
import org.apache.sysml.runtime.matrix.mapred.MRJobConfiguration;
import org.apache.sysml.runtime.matrix.mapred.MRJobConfiguration.ConvertTarget;
import org.apache.sysml.runtime.matrix.mapred.MRJobConfiguration.MatrixChar_N_ReducerGroups;
import org.apache.sysml.runtime.matrix.mapred.ReduceBase;
import org.apache.sysml.runtime.util.UtilFunctions;
public class CombineMR
{
private static final Log LOG = LogFactory.getLog(CombineMR.class.getName());
private CombineMR() {
//prevent instantiation via private constructor
}
public static class InnerReducer extends ReduceBase
implements Reducer<MatrixIndexes, TaggedMatrixValue, MatrixIndexes, WeightedPair>
{
protected MRInstruction[] comb_instructions=null;
private MatrixIndexes keyBuff=new MatrixIndexes();
private WeightedPair valueBuff=new WeightedPair();
private HashMap<Byte, Pair<Integer, Integer>> outputBlockSizes=new HashMap<>();
private HashMap<Byte, ArrayList<Integer>> outputIndexesMapping=new HashMap<>();
@Override
public void reduce(MatrixIndexes indexes,
Iterator<TaggedMatrixValue> values,
OutputCollector<MatrixIndexes, WeightedPair> out, Reporter reporter)
throws IOException {
long start=System.currentTimeMillis();
if(firsttime)
{
cachedReporter=reporter;
firsttime=false;
}
cachedValues.reset();
while(values.hasNext())
{
TaggedMatrixValue taggedValue=values.next();
cachedValues.set(taggedValue.getTag(), indexes, taggedValue.getBaseObject(), true);
}
//LOG.info("before aggregation: \n"+cachedValues);
//perform aggregate operations first
//processAggregateInstructions(indexes, values);
//LOG.info("after aggregation: \n"+cachedValues);
//perform mixed operations
//processReducerInstructions();
processCombineInstructionsAndOutput(reporter);
reporter.incrCounter(Counters.COMBINE_OR_REDUCE_TIME, System.currentTimeMillis()-start);
}
@Override
public void configure(JobConf job)
{
super.configure(job);
try {
comb_instructions = MRJobConfiguration.getCombineInstruction(job);
} catch (Exception e) {
throw new RuntimeException(e);
}
for(int i=0; i<resultIndexes.length; i++)
{
MatrixCharacteristics stat=MRJobConfiguration.getMatrixCharacteristicsForOutput(job, resultIndexes[i]);
outputBlockSizes.put(resultIndexes[i], new Pair<>(stat.getRowsPerBlock(), stat.getColsPerBlock()));
}
for(MRInstruction ins: comb_instructions)
{
outputIndexesMapping.put(ins.output, getOutputIndexes(ins.output));
}
}
void processCombineInstructionsAndOutput(Reporter reporter)
throws IOException
{
for(MRInstruction ins: comb_instructions)
{
if(ins instanceof CombineBinaryInstruction)
processBinaryCombineInstruction((CombineBinaryInstruction)ins, reporter);
else if(ins instanceof CombineTernaryInstruction)
processTernaryCombineInstruction((CombineTernaryInstruction)ins, reporter);
else
throw new IOException("unsupported instruction: "+ins);
}
}
private void processTernaryCombineInstruction(
CombineTernaryInstruction ins, Reporter reporter) throws IOException{
IndexedMatrixValue in1=cachedValues.getFirst(ins.input1);
IndexedMatrixValue in2=cachedValues.getFirst(ins.input2);
IndexedMatrixValue in3=cachedValues.getFirst(ins.input3);
if(in1==null && in2==null && in3==null)
return;
int nr=0, nc=0;
if(in1!=null)
{
nr=in1.getValue().getNumRows();
nc=in1.getValue().getNumColumns();
}else if(in2!=null)
{
nr=in2.getValue().getNumRows();
nc=in2.getValue().getNumColumns();
}else
{
nr=in3.getValue().getNumRows();
nc=in3.getValue().getNumColumns();
}
//if one of the inputs is null, then it is a all zero block
if(in1==null)
{
in1=zeroInput;
in1.getValue().reset(nr, nc);
}
if(in2==null)
{
in2=zeroInput;
in2.getValue().reset(nr, nc);
}
if(in3==null)
{
in3=zeroInput;
in3.getValue().reset(nr, nc);
}
//process instruction
try {
ArrayList<Integer> outputIndexes = outputIndexesMapping.get(ins.output);
for(int r=0; r<nr; r++)
for(int c=0; c<nc; c++)
{
Pair<Integer, Integer> blockSize=outputBlockSizes.get(ins.output);
keyBuff.setIndexes(
UtilFunctions.computeCellIndex(in1.getIndexes().getRowIndex(), blockSize.getKey(), r),
UtilFunctions.computeCellIndex(in1.getIndexes().getColumnIndex(), blockSize.getValue(), c)
);
valueBuff.setValue(in1.getValue().getValue(r, c));
valueBuff.setOtherValue(in2.getValue().getValue(r, c));
valueBuff.setWeight(in3.getValue().getValue(r, c));
for(int i: outputIndexes)
{
collectFinalMultipleOutputs.collectOutput(keyBuff, valueBuff, i, reporter);
//System.out.println("output: "+keyBuff+" -- "+valueBuff);
}
}
} catch (Exception e) {
throw new RuntimeException(e);
}
}
private void processBinaryCombineInstruction(CombineBinaryInstruction ins, Reporter reporter)
throws IOException
{
IndexedMatrixValue in1=cachedValues.getFirst(ins.input1);
IndexedMatrixValue in2=cachedValues.getFirst(ins.input2);
if(in1==null && in2==null)
return;
MatrixIndexes indexes;
if(in1!=null)
indexes=in1.getIndexes();
else
indexes=in2.getIndexes();
//if one of the inputs is null, then it is a all zero block
if(in1==null)
{
in1=zeroInput;
in1.getValue().reset(in2.getValue().getNumRows(),
in2.getValue().getNumColumns());
}
if(in2==null)
{
in2=zeroInput;
in2.getValue().reset(in1.getValue().getNumRows(),
in1.getValue().getNumColumns());
}
//System.out.println("in1:"+in1);
//System.out.println("in2:"+in2);
//process instruction
try {
/*in1.getValue().combineOperations(in2.getValue(), collectFinalMultipleOutputs,
reporter, keyBuff, valueBuff, getOutputIndexes(ins.output));*/
ArrayList<Integer> outputIndexes = outputIndexesMapping.get(ins.output);
for(int r=0; r<in1.getValue().getNumRows(); r++)
for(int c=0; c<in1.getValue().getNumColumns(); c++)
{
Pair<Integer, Integer> blockSize=outputBlockSizes.get(ins.output);
keyBuff.setIndexes(
UtilFunctions.computeCellIndex(indexes.getRowIndex(), blockSize.getKey(), r),
UtilFunctions.computeCellIndex(indexes.getColumnIndex(), blockSize.getValue(), c)
);
valueBuff.setValue(in1.getValue().getValue(r, c));
double temp=in2.getValue().getValue(r, c);
if(ins.isSecondInputWeight())
{
valueBuff.setWeight(temp);
valueBuff.setOtherValue(0);
}
else
{
valueBuff.setWeight(1);
valueBuff.setOtherValue(temp);
}
for(int i: outputIndexes)
{
collectFinalMultipleOutputs.collectOutput(keyBuff, valueBuff, i, reporter);
//System.out.println("output: "+keyBuff+" -- "+valueBuff);
}
}
} catch (Exception e) {
throw new RuntimeException(e);
}
}
}
public static JobReturn runJob(MRJobInstruction inst, String[] inputs, InputInfo[] inputInfos,
long[] rlens, long[] clens, int[] brlens, int[] bclens, String combineInstructions,
int numReducers, int replication, byte[] resultIndexes, String[] outputs, OutputInfo[] outputInfos)
throws Exception
{
JobConf job;
job = new JobConf(CombineMR.class);
job.setJobName("Standalone-MR");
boolean inBlockRepresentation=MRJobConfiguration.deriveRepresentation(inputInfos);
//whether use block representation or cell representation
MRJobConfiguration.setMatrixValueClass(job, inBlockRepresentation);
byte[] inputIndexes=new byte[inputs.length];
for(byte b=0; b<inputs.length; b++)
inputIndexes[b]=b;
//set up the input files and their format information
MRJobConfiguration.setUpMultipleInputs(job, inputIndexes, inputs, inputInfos, brlens, bclens,
true, inBlockRepresentation? ConvertTarget.BLOCK: ConvertTarget.CELL);
//set up the dimensions of input matrices
MRJobConfiguration.setMatricesDimensions(job, inputIndexes, rlens, clens);
//set up the block size
MRJobConfiguration.setBlocksSizes(job, inputIndexes, brlens, bclens);
//set up unary instructions that will perform in the mapper
MRJobConfiguration.setInstructionsInMapper(job, "");
//set up the aggregate instructions that will happen in the combiner and reducer
MRJobConfiguration.setAggregateInstructions(job, "");
//set up the instructions that will happen in the reducer, after the aggregation instrucions
MRJobConfiguration.setInstructionsInReducer(job, "");
MRJobConfiguration.setCombineInstructions(job, combineInstructions);
//set up the replication factor for the results
job.setInt(MRConfigurationNames.DFS_REPLICATION, replication);
//set up custom map/reduce configurations
DMLConfig config = ConfigurationManager.getDMLConfig();
MRJobConfiguration.setupCustomMRConfigurations(job, config);
//set up what matrices are needed to pass from the mapper to reducer
HashSet<Byte> mapoutputIndexes=MRJobConfiguration.setUpOutputIndexesForMapper(job, inputIndexes, null, null, combineInstructions,
resultIndexes);
//set up the multiple output files, and their format information
MRJobConfiguration.setUpMultipleOutputs(job, resultIndexes, null, outputs, outputInfos, inBlockRepresentation);
// configure mapper and the mapper output key value pairs
job.setMapperClass(GMRMapper.class);
job.setMapOutputKeyClass(MatrixIndexes.class);
if(inBlockRepresentation)
job.setMapOutputValueClass(TaggedMatrixBlock.class);
else
job.setMapOutputValueClass(TaggedMatrixCell.class);
//configure reducer
job.setReducerClass(InnerReducer.class);
//job.setReducerClass(PassThroughReducer.class);
MatrixChar_N_ReducerGroups ret=MRJobConfiguration.computeMatrixCharacteristics(job, inputIndexes,
null, null, null, combineInstructions, resultIndexes, mapoutputIndexes, false);
MatrixCharacteristics[] stats=ret.stats;
//set up the number of reducers
MRJobConfiguration.setNumReducers(job, ret.numReducerGroups, numReducers);
// Print the complete instruction
if (LOG.isTraceEnabled())
inst.printCompleteMRJobInstruction(stats);
// By default, the job executes in "cluster" mode.
// Determine if we can optimize and run it in "local" mode.
MatrixCharacteristics[] inputStats = new MatrixCharacteristics[inputs.length];
for ( int i=0; i < inputs.length; i++ ) {
inputStats[i] = new MatrixCharacteristics(rlens[i], clens[i], brlens[i], bclens[i]);
}
//set unique working dir
MRJobConfiguration.setUniqueWorkingDir(job);
RunningJob runjob=JobClient.runJob(job);
return new JobReturn(stats, runjob.isSuccessful());
}
}
| |
/*
* Copyright (C) 2014 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.android.fmradio.views;
import android.animation.Animator;
import android.animation.Animator.AnimatorListener;
import android.animation.AnimatorListenerAdapter;
import android.animation.ObjectAnimator;
import android.content.Context;
import android.content.res.Configuration;
import android.content.res.Resources;
import android.content.res.TypedArray;
import android.database.Cursor;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.Paint;
import android.graphics.Typeface;
import android.hardware.display.DisplayManagerGlobal;
import android.os.Handler;
import android.os.Looper;
import android.util.AttributeSet;
import android.util.DisplayMetrics;
import android.view.Display;
import android.view.DisplayInfo;
import android.view.LayoutInflater;
import android.view.Menu;
import android.view.MenuItem;
import android.view.MotionEvent;
import android.view.VelocityTracker;
import android.view.View;
import android.view.ViewConfiguration;
import android.view.ViewGroup;
import android.view.ViewTreeObserver.OnPreDrawListener;
import android.view.animation.Interpolator;
import android.widget.AdapterView;
import android.widget.AdapterView.OnItemClickListener;
import android.widget.BaseAdapter;
import android.widget.EdgeEffect;
import android.widget.FrameLayout;
import android.widget.GridView;
import android.widget.ImageView;
import android.widget.PopupMenu;
import android.widget.PopupMenu.OnMenuItemClickListener;
import android.widget.ScrollView;
import android.widget.Scroller;
import android.widget.TextView;
import com.android.fmradio.FmStation;
import com.android.fmradio.FmUtils;
import com.android.fmradio.R;
import com.android.fmradio.FmStation.Station;
/**
* Modified from Contact MultiShrinkScroll Handle the touch event and change
* header size and scroll
*/
public class FmScroller extends FrameLayout {
private static final String TAG = "FmScroller";
/**
* 1000 pixels per millisecond. Ie, 1 pixel per second.
*/
private static final int PIXELS_PER_SECOND = 1000;
private static final int ON_PLAY_ANIMATION_DELAY = 1000;
private static final int PORT_COLUMN_NUM = 3;
private static final int LAND_COLUMN_NUM = 5;
private static final int STATE_NO_FAVORITE = 0;
private static final int STATE_HAS_FAVORITE = 1;
private float[] mLastEventPosition = {
0, 0
};
private VelocityTracker mVelocityTracker;
private boolean mIsBeingDragged = false;
private boolean mReceivedDown = false;
private boolean mFirstOnResume = true;
private String mSelection = "IS_FAVORITE=?";
private String[] mSelectionArgs = {
"1"
};
private EventListener mEventListener;
private PopupMenu mPopupMenu;
private Handler mMainHandler;
private ScrollView mScrollView;
private View mScrollViewChild;
private GridView mGridView;
private TextView mFavoriteText;
private View mHeader;
private int mMaximumHeaderHeight;
private int mMinimumHeaderHeight;
private Adjuster mAdjuster;
private int mCurrentStation;
private boolean mIsFmPlaying;
private FavoriteAdapter mAdapter;
private final Scroller mScroller;
private final EdgeEffect mEdgeGlowBottom;
private final int mTouchSlop;
private final int mMaximumVelocity;
private final int mMinimumVelocity;
private final int mActionBarSize;
private final AnimatorListener mHeaderExpandAnimationListener = new AnimatorListenerAdapter() {
@Override
public void onAnimationEnd(Animator animation) {
refreshStateHeight();
}
};
/**
* Interpolator from android.support.v4.view.ViewPager. Snappier and more
* elastic feeling than the default interpolator.
*/
private static final Interpolator INTERPOLATOR = new Interpolator() {
/**
* {@inheritDoc}
*/
@Override
public float getInterpolation(float t) {
t -= 1.0f;
return t * t * t * t * t + 1.0f;
}
};
/**
* Constructor
*
* @param context The context
*/
public FmScroller(Context context) {
this(context, null);
}
/**
* Constructor
*
* @param context The context
* @param attrs The attrs
*/
public FmScroller(Context context, AttributeSet attrs) {
this(context, attrs, 0);
}
/**
* Constructor
*
* @param context The context
* @param attrs The attrs
* @param defStyleAttr The default attr
*/
public FmScroller(Context context, AttributeSet attrs, int defStyleAttr) {
super(context, attrs, defStyleAttr);
final ViewConfiguration configuration = ViewConfiguration.get(context);
setFocusable(false);
// Drawing must be enabled in order to support EdgeEffect
setWillNotDraw(/* willNotDraw = */false);
mEdgeGlowBottom = new EdgeEffect(context);
mScroller = new Scroller(context, INTERPOLATOR);
mTouchSlop = configuration.getScaledTouchSlop();
mMinimumVelocity = configuration.getScaledMinimumFlingVelocity();
mMaximumVelocity = configuration.getScaledMaximumFlingVelocity();
final TypedArray attributeArray = context.obtainStyledAttributes(new int[] {
android.R.attr.actionBarSize
});
mActionBarSize = attributeArray.getDimensionPixelSize(0, 0);
attributeArray.recycle();
}
/**
* This method must be called inside the Activity's OnCreate.
*/
public void initialize() {
mScrollView = (ScrollView) findViewById(R.id.content_scroller);
mScrollViewChild = findViewById(R.id.favorite_container);
mHeader = findViewById(R.id.main_header_parent);
mMainHandler = new Handler(Looper.getMainLooper());
mFavoriteText = (TextView) findViewById(R.id.favorite_text);
mGridView = (GridView) findViewById(R.id.gridview);
mAdapter = new FavoriteAdapter(getContext());
mAdjuster = new Adjuster(getContext());
mGridView.setAdapter(mAdapter);
Cursor c = getData();
mAdapter.swipResult(c);
mGridView.setFocusable(false);
mGridView.setFocusableInTouchMode(false);
mGridView.setOnItemClickListener(new OnItemClickListener() {
@Override
public void onItemClick(AdapterView<?> parent, View view, int position, long id) {
if (mEventListener != null && mAdapter != null) {
mEventListener.onPlay(mAdapter.getFrequency(position));
}
mMainHandler.removeCallbacks(null);
mMainHandler.postDelayed(new Runnable() {
@Override
public void run() {
mMaximumHeaderHeight = getMaxHeight(STATE_HAS_FAVORITE);
expandHeader();
}
}, ON_PLAY_ANIMATION_DELAY);
}
});
// Called when first time create activity
doOnPreDraw(this, /* drawNextFrame = */false, new Runnable() {
@Override
public void run() {
refreshStateHeight();
setHeaderHeight(getMaximumScrollableHeaderHeight());
updateHeaderTextAndButton();
refreshFavoriteLayout();
}
});
}
/**
* Runs a piece of code just before the next draw, after layout and measurement
*
* @param view The view depend on
* @param drawNextFrame Whether to draw next frame
* @param runnable The executed runnable instance
*/
private void doOnPreDraw(final View view, final boolean drawNextFrame,
final Runnable runnable) {
final OnPreDrawListener listener = new OnPreDrawListener() {
@Override
public boolean onPreDraw() {
view.getViewTreeObserver().removeOnPreDrawListener(this);
runnable.run();
return drawNextFrame;
}
};
view.getViewTreeObserver().addOnPreDrawListener(listener);
}
private void refreshFavoriteLayout() {
setFavoriteTextHeight(mAdapter.getCount() == 0);
setGridViewHeight(computeGridViewHeight());
}
private void setFavoriteTextHeight(boolean show) {
if (mAdapter.getCount() == 0) {
mFavoriteText.setVisibility(View.GONE);
} else {
mFavoriteText.setVisibility(View.VISIBLE);
}
}
private void setGridViewHeight(int height) {
final ViewGroup.LayoutParams params = mGridView.getLayoutParams();
params.height = height;
mGridView.setLayoutParams(params);
}
private int computeGridViewHeight() {
int itemcount = mAdapter.getCount();
if (itemcount == 0) {
return 0;
}
int curOrientation = getResources().getConfiguration().orientation;
final boolean isLandscape = curOrientation == Configuration.ORIENTATION_LANDSCAPE;
int columnNum = isLandscape ? LAND_COLUMN_NUM : PORT_COLUMN_NUM;
int itemHeight = (int) getResources().getDimension(R.dimen.fm_gridview_item_height);
int itemPadding = (int) getResources().getDimension(R.dimen.fm_gridview_item_padding);
int rownum = (int) Math.ceil(itemcount / (float) columnNum);
int totalHeight = rownum * itemHeight + rownum * itemPadding;
if (rownum == 2) {
int minGridViewHeight = getHeight() - getMinHeight(STATE_HAS_FAVORITE) - 72;
totalHeight = Math.max(totalHeight, minGridViewHeight);
}
return totalHeight;
}
@Override
public boolean onInterceptTouchEvent(MotionEvent event) {
// The only time we want to intercept touch events is when we are being
// dragged.
return shouldStartDrag(event);
}
private boolean shouldStartDrag(MotionEvent event) {
if (mIsBeingDragged) {
mIsBeingDragged = false;
return false;
}
switch (event.getAction()) {
// If we are in the middle of a fling and there is a down event,
// we'll steal it and
// start a drag.
case MotionEvent.ACTION_DOWN:
updateLastEventPosition(event);
if (!mScroller.isFinished()) {
startDrag();
return true;
} else {
mReceivedDown = true;
}
break;
// Otherwise, we will start a drag if there is enough motion in the
// direction we are
// capable of scrolling.
case MotionEvent.ACTION_MOVE:
if (motionShouldStartDrag(event)) {
updateLastEventPosition(event);
startDrag();
return true;
}
break;
default:
break;
}
return false;
}
@Override
public boolean onTouchEvent(MotionEvent event) {
final int action = event.getAction();
if (mVelocityTracker == null) {
mVelocityTracker = VelocityTracker.obtain();
}
mVelocityTracker.addMovement(event);
if (!mIsBeingDragged) {
if (shouldStartDrag(event)) {
return true;
}
if (action == MotionEvent.ACTION_UP && mReceivedDown) {
mReceivedDown = false;
return performClick();
}
return true;
}
switch (action) {
case MotionEvent.ACTION_MOVE:
final float delta = updatePositionAndComputeDelta(event);
scrollTo(0, getScroll() + (int) delta);
mReceivedDown = false;
if (mIsBeingDragged) {
final int distanceFromMaxScrolling = getMaximumScrollUpwards() - getScroll();
if (delta > distanceFromMaxScrolling) {
// The ScrollView is being pulled upwards while there is
// no more
// content offscreen, and the view port is already fully
// expanded.
mEdgeGlowBottom.onPull(delta / getHeight(), 1 - event.getX() / getWidth());
}
if (!mEdgeGlowBottom.isFinished()) {
postInvalidateOnAnimation();
}
}
break;
case MotionEvent.ACTION_UP:
case MotionEvent.ACTION_CANCEL:
stopDrag(action == MotionEvent.ACTION_CANCEL);
mReceivedDown = false;
break;
default:
break;
}
return true;
}
/**
* Expand to maximum size or starting size. Disable clicks on the
* photo until the animation is complete.
*/
private void expandHeader() {
if (getHeaderHeight() != mMaximumHeaderHeight) {
// Expand header
final ObjectAnimator animator = ObjectAnimator.ofInt(this, "headerHeight",
mMaximumHeaderHeight);
animator.addListener(mHeaderExpandAnimationListener);
animator.setDuration(300);
animator.start();
// Scroll nested scroll view to its top
if (mScrollView.getScrollY() != 0) {
ObjectAnimator.ofInt(mScrollView, "scrollY", 0).setDuration(300).start();
}
}
}
private void collapseHeader() {
if (getHeaderHeight() != mMinimumHeaderHeight) {
final ObjectAnimator animator = ObjectAnimator.ofInt(this, "headerHeight",
mMinimumHeaderHeight);
animator.addListener(mHeaderExpandAnimationListener);
animator.start();
}
}
private void startDrag() {
mIsBeingDragged = true;
mScroller.abortAnimation();
}
private void stopDrag(boolean cancelled) {
mIsBeingDragged = false;
if (!cancelled && getChildCount() > 0) {
final float velocity = getCurrentVelocity();
if (velocity > mMinimumVelocity || velocity < -mMinimumVelocity) {
fling(-velocity);
}
}
if (mVelocityTracker != null) {
mVelocityTracker.recycle();
mVelocityTracker = null;
}
mEdgeGlowBottom.onRelease();
}
@Override
public void scrollTo(int x, int y) {
final int delta = y - getScroll();
if (delta > 0) {
scrollUp(delta);
} else {
scrollDown(delta);
}
updateHeaderTextAndButton();
}
private int getToolbarHeight() {
return mHeader.getLayoutParams().height;
}
/**
* Set the height of the toolbar and update its tint accordingly.
*/
@FmReflection
public void setHeaderHeight(int height) {
final ViewGroup.LayoutParams toolbarLayoutParams = mHeader.getLayoutParams();
toolbarLayoutParams.height = height;
mHeader.setLayoutParams(toolbarLayoutParams);
updateHeaderTextAndButton();
}
/**
* Get header height. Used in ObjectAnimator
*
* @return The header height
*/
@FmReflection
public int getHeaderHeight() {
return mHeader.getLayoutParams().height;
}
/**
* Set scroll. Used in ObjectAnimator
*/
@FmReflection
public void setScroll(int scroll) {
scrollTo(0, scroll);
}
/**
* Returns the total amount scrolled inside the nested ScrollView + the amount
* of shrinking performed on the ToolBar. This is the value inspected by animators.
*/
@FmReflection
public int getScroll() {
return getMaximumScrollableHeaderHeight() - getToolbarHeight() + mScrollView.getScrollY();
}
private int getMaximumScrollableHeaderHeight() {
return mMaximumHeaderHeight;
}
/**
* A variant of {@link #getScroll} that pretends the header is never
* larger than than mIntermediateHeaderHeight. This function is sometimes
* needed when making scrolling decisions that will not change the header
* size (ie, snapping to the bottom or top). When mIsOpenContactSquare is
* true, this function considers mIntermediateHeaderHeight == mMaximumHeaderHeight,
* since snapping decisions will be made relative the full header size when
* mIsOpenContactSquare = true. This value should never be used in conjunction
* with {@link #getScroll} values.
*/
private int getScrollIgnoreOversizedHeaderForSnapping() {
return Math.max(getMaximumScrollableHeaderHeight() - getToolbarHeight(), 0)
+ mScrollView.getScrollY();
}
/**
* Return amount of scrolling needed in order for all the visible
* subviews to scroll off the bottom.
*/
private int getScrollUntilOffBottom() {
return getHeight() + getScrollIgnoreOversizedHeaderForSnapping();
}
@Override
public void computeScroll() {
if (mScroller.computeScrollOffset()) {
// Examine the fling results in order to activate EdgeEffect when we
// fling to the end.
final int oldScroll = getScroll();
scrollTo(0, mScroller.getCurrY());
final int delta = mScroller.getCurrY() - oldScroll;
final int distanceFromMaxScrolling = getMaximumScrollUpwards() - getScroll();
if (delta > distanceFromMaxScrolling && distanceFromMaxScrolling > 0) {
mEdgeGlowBottom.onAbsorb((int) mScroller.getCurrVelocity());
}
if (!awakenScrollBars()) {
// Keep on drawing until the animation has finished.
postInvalidateOnAnimation();
}
if (mScroller.getCurrY() >= getMaximumScrollUpwards()) {
mScroller.abortAnimation();
}
}
}
@Override
public void draw(Canvas canvas) {
super.draw(canvas);
if (!mEdgeGlowBottom.isFinished()) {
final int restoreCount = canvas.save();
final int width = getWidth() - getPaddingLeft() - getPaddingRight();
final int height = getHeight();
// Draw the EdgeEffect on the bottom of the Window (Or a little bit
// below the bottom
// of the Window if we start to scroll upwards while EdgeEffect is
// visible). This
// does not need to consider the case where this MultiShrinkScroller
// doesn't fill
// the Window, since the nested ScrollView should be set to
// fillViewport.
canvas.translate(-width + getPaddingLeft(), height + getMaximumScrollUpwards()
- getScroll());
canvas.rotate(180, width, 0);
mEdgeGlowBottom.setSize(width, height);
if (mEdgeGlowBottom.draw(canvas)) {
postInvalidateOnAnimation();
}
canvas.restoreToCount(restoreCount);
}
}
private float getCurrentVelocity() {
if (mVelocityTracker == null) {
return 0;
}
mVelocityTracker.computeCurrentVelocity(PIXELS_PER_SECOND, mMaximumVelocity);
return mVelocityTracker.getYVelocity();
}
private void fling(float velocity) {
// For reasons I do not understand, scrolling is less janky when
// maxY=Integer.MAX_VALUE
// then when maxY is set to an actual value.
mScroller.fling(0, getScroll(), 0, (int) velocity, 0, 0, -Integer.MAX_VALUE,
Integer.MAX_VALUE);
invalidate();
}
private int getMaximumScrollUpwards() {
return // How much the Header view can compress
getMaximumScrollableHeaderHeight() - getFullyCompressedHeaderHeight()
// How much the ScrollView can scroll. 0, if child is
// smaller than ScrollView.
+ Math.max(0, mScrollViewChild.getHeight() - getHeight()
+ getFullyCompressedHeaderHeight());
}
private void scrollUp(int delta) {
final ViewGroup.LayoutParams toolbarLayoutParams = mHeader.getLayoutParams();
if (toolbarLayoutParams.height > getFullyCompressedHeaderHeight()) {
final int originalValue = toolbarLayoutParams.height;
toolbarLayoutParams.height -= delta;
toolbarLayoutParams.height = Math.max(toolbarLayoutParams.height,
getFullyCompressedHeaderHeight());
mHeader.setLayoutParams(toolbarLayoutParams);
delta -= originalValue - toolbarLayoutParams.height;
}
mScrollView.scrollBy(0, delta);
}
/**
* Returns the minimum size that we want to compress the header to,
* given that we don't want to allow the the ScrollView to scroll
* unless there is new content off of the edge of ScrollView.
*/
private int getFullyCompressedHeaderHeight() {
int height = Math.min(Math.max(mHeader.getLayoutParams().height
- getOverflowingChildViewSize(), mMinimumHeaderHeight),
getMaximumScrollableHeaderHeight());
return height;
}
/**
* Returns the amount of mScrollViewChild that doesn't fit inside its parent. Outside size
*/
private int getOverflowingChildViewSize() {
final int usedScrollViewSpace = mScrollViewChild.getHeight();
return -getHeight() + usedScrollViewSpace + mHeader.getLayoutParams().height;
}
private void scrollDown(int delta) {
if (mScrollView.getScrollY() > 0) {
final int originalValue = mScrollView.getScrollY();
mScrollView.scrollBy(0, delta);
}
}
private void updateHeaderTextAndButton() {
mAdjuster.handleScroll();
}
private void updateLastEventPosition(MotionEvent event) {
mLastEventPosition[0] = event.getX();
mLastEventPosition[1] = event.getY();
}
private boolean motionShouldStartDrag(MotionEvent event) {
final float deltaX = event.getX() - mLastEventPosition[0];
final float deltaY = event.getY() - mLastEventPosition[1];
final boolean draggedX = (deltaX > mTouchSlop || deltaX < -mTouchSlop);
final boolean draggedY = (deltaY > mTouchSlop || deltaY < -mTouchSlop);
return draggedY && !draggedX;
}
private float updatePositionAndComputeDelta(MotionEvent event) {
final int vertical = 1;
final float position = mLastEventPosition[vertical];
updateLastEventPosition(event);
return position - mLastEventPosition[vertical];
}
/**
* Interpolator that enforces a specific starting velocity.
* This is useful to avoid a discontinuity between dragging
* speed and flinging speed. Similar to a
* {@link android.view.animation.AccelerateInterpolator} in
* the sense that getInterpolation() is a quadratic function.
*/
private static class AcceleratingFlingInterpolator implements Interpolator {
private final float mStartingSpeedPixelsPerFrame;
private final float mDurationMs;
private final int mPixelsDelta;
private final float mNumberFrames;
public AcceleratingFlingInterpolator(int durationMs, float startingSpeedPixelsPerSecond,
int pixelsDelta) {
mStartingSpeedPixelsPerFrame = startingSpeedPixelsPerSecond / getRefreshRate();
mDurationMs = durationMs;
mPixelsDelta = pixelsDelta;
mNumberFrames = mDurationMs / getFrameIntervalMs();
}
@Override
public float getInterpolation(float input) {
final float animationIntervalNumber = mNumberFrames * input;
final float linearDelta = (animationIntervalNumber * mStartingSpeedPixelsPerFrame)
/ mPixelsDelta;
// Add the results of a linear interpolator (with the initial speed)
// with the
// results of a AccelerateInterpolator.
if (mStartingSpeedPixelsPerFrame > 0) {
return Math.min(input * input + linearDelta, 1);
} else {
// Initial fling was in the wrong direction, make sure that the
// quadratic component
// grows faster in order to make up for this.
return Math.min(input * (input - linearDelta) + linearDelta, 1);
}
}
private float getRefreshRate() {
DisplayInfo di = DisplayManagerGlobal.getInstance().getDisplayInfo(
Display.DEFAULT_DISPLAY);
return di.getMode().getRefreshRate();
}
public long getFrameIntervalMs() {
return (long) (1000 / getRefreshRate());
}
}
private int getMaxHeight(int state) {
int height = 0;
switch (state) {
case STATE_NO_FAVORITE:
height = getHeight();
break;
case STATE_HAS_FAVORITE:
height = (int) getResources().getDimension(R.dimen.fm_main_header_big);
break;
default:
break;
}
return height;
}
private int getMinHeight(int state) {
int height = 0;
switch (state) {
case STATE_NO_FAVORITE:
height = (int) getResources().getDimension(R.dimen.fm_main_header_big);
break;
case STATE_HAS_FAVORITE:
height = (int) getResources().getDimension(R.dimen.fm_main_header_small);
break;
default:
break;
}
return height;
}
private void setMinHeight(int height) {
mMinimumHeaderHeight = height;
}
class FavoriteAdapter extends BaseAdapter {
private Cursor mCursor;
private LayoutInflater mInflater;
public FavoriteAdapter(Context context) {
mInflater = LayoutInflater.from(context);
}
public int getFrequency(int position) {
if (mCursor != null && mCursor.moveToFirst()) {
mCursor.moveToPosition(position);
return mCursor.getInt(mCursor.getColumnIndex(FmStation.Station.FREQUENCY));
}
return 0;
}
public void swipResult(Cursor cursor) {
if (null != mCursor) {
mCursor.close();
}
mCursor = cursor;
notifyDataSetChanged();
}
@Override
public int getCount() {
if (null != mCursor) {
return mCursor.getCount();
}
return 0;
}
@Override
public Object getItem(int position) {
return null;
}
@Override
public long getItemId(int position) {
return 0;
}
@Override
public View getView(int position, View convertView, ViewGroup parent) {
ViewHolder viewHolder = null;
if (null == convertView) {
viewHolder = new ViewHolder();
convertView = mInflater.inflate(R.layout.favorite_gridview_item, null);
viewHolder.mStationFreq = (TextView) convertView.findViewById(R.id.station_freq);
viewHolder.mPlayIndicator = (FmVisualizerView) convertView
.findViewById(R.id.fm_play_indicator);
viewHolder.mStationName = (TextView) convertView.findViewById(R.id.station_name);
viewHolder.mMoreButton = (ImageView) convertView.findViewById(R.id.station_more);
viewHolder.mPopupMenuAnchor = convertView.findViewById(R.id.popupmenu_anchor);
convertView.setTag(viewHolder);
} else {
viewHolder = (ViewHolder) convertView.getTag();
}
if (mCursor != null && mCursor.moveToPosition(position)) {
final int stationFreq = mCursor.getInt(mCursor
.getColumnIndex(FmStation.Station.FREQUENCY));
String name = mCursor.getString(mCursor
.getColumnIndex(FmStation.Station.STATION_NAME));
String rds = mCursor.getString(mCursor
.getColumnIndex(FmStation.Station.RADIO_TEXT));
final int isFavorite = mCursor.getInt(mCursor
.getColumnIndex(FmStation.Station.IS_FAVORITE));
if (null == name || "".equals(name)) {
name = mCursor.getString(mCursor
.getColumnIndex(FmStation.Station.PROGRAM_SERVICE));
}
if (null == name || "".equals(name)) {
name = "";
}
viewHolder.mStationFreq.setText(FmUtils.formatStation(stationFreq));
viewHolder.mStationName.setText(name);
if (mCurrentStation == stationFreq) {
viewHolder.mPlayIndicator.setVisibility(View.VISIBLE);
if (mIsFmPlaying) {
viewHolder.mPlayIndicator.startAnimation();
} else {
viewHolder.mPlayIndicator.stopAnimation();
}
viewHolder.mStationFreq.setTextColor(Color.parseColor("#607D8B"));
viewHolder.mStationFreq.setAlpha(1f);
viewHolder.mStationName.setMaxLines(1);
} else {
viewHolder.mPlayIndicator.setVisibility(View.GONE);
viewHolder.mPlayIndicator.stopAnimation();
viewHolder.mStationFreq.setTextColor(Color.parseColor("#000000"));
viewHolder.mStationFreq.setAlpha(0.87f);
viewHolder.mStationName.setMaxLines(2);
}
viewHolder.mMoreButton.setTag(viewHolder.mPopupMenuAnchor);
viewHolder.mMoreButton.setOnClickListener(new OnClickListener() {
@Override
public void onClick(View v) {
// Use anchor view to fix PopupMenu postion and cover more button
View anchor = v;
if (v.getTag() != null) {
anchor = (View) v.getTag();
}
showPopupMenu(anchor, stationFreq);
}
});
}
return convertView;
}
}
private Cursor getData() {
Cursor cursor = getContext().getContentResolver().query(Station.CONTENT_URI,
FmStation.COLUMNS, mSelection, mSelectionArgs,
FmStation.Station.FREQUENCY);
return cursor;
}
/**
* Called when FmRadioActivity.onResume(), refresh layout
*/
public void onResume() {
Cursor c = getData();
mAdapter.swipResult(c);
if (mFirstOnResume) {
mFirstOnResume = false;
} else {
refreshStateHeight();
updateHeaderTextAndButton();
refreshFavoriteLayout();
int curOrientation = getResources().getConfiguration().orientation;
final boolean isLandscape = curOrientation == Configuration.ORIENTATION_LANDSCAPE;
int columnNum = isLandscape ? LAND_COLUMN_NUM : PORT_COLUMN_NUM;
boolean isOneRow = c.getCount() <= columnNum;
boolean hasFavoriteCurrent = c.getCount() > 0;
if (mHasFavoriteWhenOnPause != hasFavoriteCurrent || isOneRow) {
setHeaderHeight(getMaximumScrollableHeaderHeight());
}
}
}
private boolean mHasFavoriteWhenOnPause = false;
/**
* Called when FmRadioActivity.onPause()
*/
public void onPause() {
if (mAdapter != null && mAdapter.getCount() > 0) {
mHasFavoriteWhenOnPause = true;
} else {
mHasFavoriteWhenOnPause = false;
}
}
/**
* Notify refresh adapter when data change
*/
public void notifyAdatperChange() {
Cursor c = getData();
mAdapter.swipResult(c);
}
private void refreshStateHeight() {
if (mAdapter != null && mAdapter.getCount() > 0) {
mMaximumHeaderHeight = getMaxHeight(STATE_HAS_FAVORITE);
mMinimumHeaderHeight = getMinHeight(STATE_HAS_FAVORITE);
} else {
mMaximumHeaderHeight = getMaxHeight(STATE_NO_FAVORITE);
mMinimumHeaderHeight = getMinHeight(STATE_NO_FAVORITE);
}
}
/**
* Called when add a favorite
*/
public void onAddFavorite() {
Cursor c = getData();
mAdapter.swipResult(c);
refreshFavoriteLayout();
if (c.getCount() == 1) {
// Last time count is 0, so need set STATE_NO_FAVORITE then collapse header
mMinimumHeaderHeight = getMinHeight(STATE_NO_FAVORITE);
mMaximumHeaderHeight = getMaxHeight(STATE_NO_FAVORITE);
collapseHeader();
}
}
/**
* Called when remove a favorite
*/
public void onRemoveFavorite() {
Cursor c = getData();
mAdapter.swipResult(c);
refreshFavoriteLayout();
if (c != null && c.getCount() == 0) {
// Stop the play animation
mMainHandler.removeCallbacks(null);
// Last time count is 1, so need set STATE_NO_FAVORITE then expand header
mMinimumHeaderHeight = getMinHeight(STATE_NO_FAVORITE);
mMaximumHeaderHeight = getMaxHeight(STATE_NO_FAVORITE);
expandHeader();
}
}
private void showPopupMenu(View anchor, final int frequency) {
dismissPopupMenu();
mPopupMenu = new PopupMenu(getContext(), anchor);
Menu menu = mPopupMenu.getMenu();
mPopupMenu.getMenuInflater().inflate(R.menu.gridview_item_more_menu, menu);
mPopupMenu.setOnMenuItemClickListener(new OnMenuItemClickListener() {
@Override
public boolean onMenuItemClick(MenuItem item) {
switch (item.getItemId()) {
case R.id.remove_favorite:
if (mEventListener != null) {
mEventListener.onRemoveFavorite(frequency);
}
break;
case R.id.rename:
if (mEventListener != null) {
mEventListener.onRename(frequency);
}
break;
default:
break;
}
return false;
}
});
mPopupMenu.show();
}
private void dismissPopupMenu() {
if (mPopupMenu != null) {
mPopupMenu.dismiss();
mPopupMenu = null;
}
}
/**
* Called when FmRadioActivity.onDestory()
*/
public void closeAdapterCursor() {
mAdapter.swipResult(null);
}
/**
* Register a listener for GridView item event
*
* @param listener The event listener
*/
public void registerListener(EventListener listener) {
mEventListener = listener;
}
/**
* Unregister a listener for GridView item event
*
* @param listener The event listener
*/
public void unregisterListener(EventListener listener) {
mEventListener = null;
}
/**
* Listen for GridView item event: remove, rename, click play
*/
public interface EventListener {
/**
* Callback when click remove favorite menu
*
* @param frequency The frequency want to remove
*/
void onRemoveFavorite(int frequency);
/**
* Callback when click rename favorite menu
*
* @param frequency The frequency want to rename
*/
void onRename(int frequency);
/**
* Callback when click gridview item to play
*
* @param frequency The frequency want to play
*/
void onPlay(int frequency);
}
/**
* Refresh the play indicator in gridview when play station or play state change
*
* @param currentStation current station
* @param isFmPlaying whether fm is playing
*/
public void refreshPlayIndicator(int currentStation, boolean isFmPlaying) {
mCurrentStation = currentStation;
mIsFmPlaying = isFmPlaying;
if (mAdapter != null) {
mAdapter.notifyDataSetChanged();
}
}
/**
* Adjust view padding and text size when scroll
*/
private class Adjuster {
private final DisplayMetrics mDisplayMetrics;
private final int mFirstTargetHeight;
private final int mSecondTargetHeight;
private final int mActionBarHeight = mActionBarSize;
private final int mStatusBarHeight;
private final int mFullHeight;// display height without status bar
private final float mDensity;
private final Typeface mDefaultFrequencyTypeface;
// Text view
private TextView mFrequencyText;
private TextView mFmDescriptionText;
private TextView mStationNameText;
private TextView mStationRdsText;
/*
* The five control buttons view(previous, next, increase,
* decrease, favorite) and stop button
*/
private View mControlView;
private View mPlayButtonView;
private final Context mContext;
private final boolean mIsLandscape;
private FirstRangeAdjuster mFirstRangeAdjuster;
private SecondRangeAdjuster mSecondRangeAdjusterr;
public Adjuster(Context context) {
mContext = context;
mDisplayMetrics = mContext.getResources().getDisplayMetrics();
mDensity = mDisplayMetrics.density;
int curOrientation = getResources().getConfiguration().orientation;
mIsLandscape = curOrientation == Configuration.ORIENTATION_LANDSCAPE;
Resources res = mContext.getResources();
mFirstTargetHeight = res.getDimensionPixelSize(R.dimen.fm_main_header_big);
mSecondTargetHeight = res.getDimensionPixelSize(R.dimen.fm_main_header_small);
mStatusBarHeight = res
.getDimensionPixelSize(com.android.internal.R.dimen.status_bar_height);
mFullHeight = mDisplayMetrics.heightPixels - mStatusBarHeight;
mFrequencyText = (TextView) findViewById(R.id.station_value);
mFmDescriptionText = (TextView) findViewById(R.id.text_fm);
mStationNameText = (TextView) findViewById(R.id.station_name);
mStationRdsText = (TextView) findViewById(R.id.station_rds);
mControlView = findViewById(R.id.rl_imgbtnpart);
mPlayButtonView = findViewById(R.id.play_button_container);
mFirstRangeAdjuster = new FirstRangeAdjuster();
mSecondRangeAdjusterr = new SecondRangeAdjuster();
mControlView.setMinimumWidth(mIsLandscape ? mDisplayMetrics.heightPixels
: mDisplayMetrics.widthPixels);
mDefaultFrequencyTypeface = mFrequencyText.getTypeface();
}
public void handleScroll() {
int height = getHeaderHeight();
if (mIsLandscape || height > mFirstTargetHeight) {
mFirstRangeAdjuster.handleScroll();
} else if (height >= mSecondTargetHeight) {
mSecondRangeAdjusterr.handleScroll();
}
}
private class FirstRangeAdjuster {
protected int mTargetHeight;
// start text size and margin
protected float mFmDescriptionTextSizeStart;
protected float mFrequencyStartTextSize;
protected float mStationNameTextSizeStart;
protected float mFmDescriptionMarginTopStart;
protected float mFmDescriptionStartPaddingLeft;
protected float mFrequencyMarginTopStart;
protected float mStationNameMarginTopStart;
protected float mStationRdsMarginTopStart;
protected float mControlViewMarginTopStart;
// target text size and margin
protected float mFmDescriptionTextSizeTarget;
protected float mFrequencyTextSizeTarget;
protected float mStationNameTextSizeTarget;
protected float mFmDescriptionMarginTopTarget;
protected float mFrequencyMarginTopTarget;
protected float mStationNameMarginTopTarget;
protected float mStationRdsMarginTopTarget;
protected float mControlViewMarginTopTarget;
protected float mPlayButtonMarginTopStart;
protected float mPlayButtonMarginTopTarget;
protected float mPlayButtonHeight;
// Padding adjust rate as linear
protected float mFmDescriptionPaddingRate;
protected float mFrequencyPaddingRate;
protected float mStationNamePaddingRate;
protected float mStationRdsPaddingRate;
protected float mControlViewPaddingRate;
// init it with display height
protected float mPlayButtonPaddingRate;
// Text size adjust rate as linear
// adjust from first to target critical height
protected float mFmDescriptionTextSizeRate;
protected float mFrequencyTextSizeRate;
// adjust before first critical height
protected float mStationNameTextSizeRate;
public FirstRangeAdjuster() {
Resources res = mContext.getResources();
mTargetHeight = mFirstTargetHeight;
// init start
mFmDescriptionTextSizeStart = res.getDimension(R.dimen.fm_description_text_size);
mFrequencyStartTextSize = res.getDimension(R.dimen.fm_frequency_text_size_start);
mStationNameTextSizeStart = res
.getDimension(R.dimen.fm_station_name_text_size_start);
// first view, margin refer to parent
mFmDescriptionMarginTopStart = res
.getDimension(R.dimen.fm_description_margin_top_start) + mActionBarHeight;
mFrequencyMarginTopStart = res.getDimension(R.dimen.fm_frequency_margin_top_start);
mStationNameMarginTopStart = res
.getDimension(R.dimen.fm_station_name_margin_top_start);
mStationRdsMarginTopStart = res
.getDimension(R.dimen.fm_station_rds_margin_top_start);
mControlViewMarginTopStart = res
.getDimension(R.dimen.fm_control_buttons_margin_top_start);
// init target
mFrequencyTextSizeTarget = res
.getDimension(R.dimen.fm_frequency_text_size_first_target);
mFmDescriptionTextSizeTarget = mFrequencyTextSizeTarget;
mStationNameTextSizeTarget = res
.getDimension(R.dimen.fm_station_name_text_size_first_target);
mFmDescriptionMarginTopTarget = res
.getDimension(R.dimen.fm_description_margin_top_first_target);
mFmDescriptionStartPaddingLeft = mFrequencyText.getPaddingLeft();
// first view, margin refer to parent if not in landscape
if (!mIsLandscape) {
mFmDescriptionMarginTopTarget += mActionBarHeight;
} else {
mFrequencyMarginTopStart += mActionBarHeight + mFmDescriptionTextSizeStart;
}
mFrequencyMarginTopTarget = res
.getDimension(R.dimen.fm_frequency_margin_top_first_target);
mStationNameMarginTopTarget = res
.getDimension(R.dimen.fm_station_name_margin_top_first_target);
mStationRdsMarginTopTarget = res
.getDimension(R.dimen.fm_station_rds_margin_top_first_target);
mControlViewMarginTopTarget = res
.getDimension(R.dimen.fm_control_buttons_margin_top_first_target);
// init text size and margin adjust rate
int scrollHeight = mFullHeight - mTargetHeight;
mFmDescriptionTextSizeRate =
(mFmDescriptionTextSizeStart - mFmDescriptionTextSizeTarget) / scrollHeight;
mFrequencyTextSizeRate = (mFrequencyStartTextSize - mFrequencyTextSizeTarget)
/ scrollHeight;
mStationNameTextSizeRate = (mStationNameTextSizeStart - mStationNameTextSizeTarget)
/ scrollHeight;
mFmDescriptionPaddingRate =
(mFmDescriptionMarginTopStart - mFmDescriptionMarginTopTarget)
/ scrollHeight;
mFrequencyPaddingRate = (mFrequencyMarginTopStart - mFrequencyMarginTopTarget)
/ scrollHeight;
mStationNamePaddingRate = (mStationNameMarginTopStart - mStationNameMarginTopTarget)
/ scrollHeight;
mStationRdsPaddingRate = (mStationRdsMarginTopStart - mStationRdsMarginTopTarget)
/ scrollHeight;
mControlViewPaddingRate = (mControlViewMarginTopStart - mControlViewMarginTopTarget)
/ scrollHeight;
// init play button padding, it different to others, padding top refer to parent
mPlayButtonHeight = res.getDimension(R.dimen.play_button_height);
mPlayButtonMarginTopStart = mFullHeight - mPlayButtonHeight - 16 * mDensity;
mPlayButtonMarginTopTarget = mFirstTargetHeight - mPlayButtonHeight / 2;
mPlayButtonPaddingRate = (mPlayButtonMarginTopStart - mPlayButtonMarginTopTarget)
/ scrollHeight;
}
public void handleScroll() {
if (mIsLandscape) {
handleScrollLandscapeMode();
return;
}
int currentHeight = getHeaderHeight();
float newMargin = 0;
float lastHeight = 0;
float newTextSize;
// 1.FM description (margin)
newMargin = getNewSize(currentHeight, mTargetHeight, mFmDescriptionMarginTopTarget,
mFmDescriptionPaddingRate);
lastHeight = setNewPadding(mFmDescriptionText, newMargin);
// 2. frequency text (text size and margin)
newTextSize = getNewSize(currentHeight, mTargetHeight, mFrequencyTextSizeTarget,
mFrequencyTextSizeRate);
mFrequencyText.setTextSize(newTextSize / mDensity);
newMargin = getNewSize(currentHeight, mTargetHeight, mFrequencyMarginTopTarget,
mFrequencyPaddingRate);
lastHeight = setNewPadding(mFrequencyText, newMargin + lastHeight);
// 3. station name (margin and text size)
newMargin = getNewSize(currentHeight, mTargetHeight, mStationNameMarginTopTarget,
mStationNamePaddingRate);
lastHeight = setNewPadding(mStationNameText, newMargin + lastHeight);
newTextSize = getNewSize(currentHeight, mTargetHeight, mStationNameTextSizeTarget,
mStationNameTextSizeRate);
mStationNameText.setTextSize(newTextSize / mDensity);
// 4. station rds (margin)
newMargin = getNewSize(currentHeight, mTargetHeight, mStationRdsMarginTopTarget,
mStationRdsPaddingRate);
lastHeight = setNewPadding(mStationRdsText, newMargin + lastHeight);
// 5. control buttons (margin)
newMargin = getNewSize(currentHeight, mTargetHeight, mControlViewMarginTopTarget,
mControlViewPaddingRate);
setNewPadding(mControlView, newMargin + lastHeight);
// 6. stop button (padding), it different to others, padding top refer to parent
newMargin = getNewSize(currentHeight, mTargetHeight, mPlayButtonMarginTopTarget,
mPlayButtonPaddingRate);
setNewPadding(mPlayButtonView, newMargin);
}
private void handleScrollLandscapeMode() {
int currentHeight = getHeaderHeight();
float newMargin = 0;
float lastHeight = 0;
float newTextSize;
// 1. FM description (color, alpha and margin)
newMargin = getNewSize(currentHeight, mTargetHeight, mFmDescriptionMarginTopTarget,
mFmDescriptionPaddingRate);
setNewPadding(mFmDescriptionText, newMargin);
newTextSize = getNewSize(currentHeight, mTargetHeight, mFmDescriptionTextSizeTarget,
mFmDescriptionTextSizeRate);
mFmDescriptionText.setTextSize(newTextSize / mDensity);
boolean reachTop = (mSecondTargetHeight == getHeaderHeight());
mFmDescriptionText.setTextColor(reachTop ? Color.WHITE
: getResources().getColor(R.color.text_fm_color));
mFmDescriptionText.setAlpha(reachTop ? 0.87f : 1.0f);
// 2. frequency text (text size, padding and margin)
newTextSize = getNewSize(currentHeight, mTargetHeight, mFrequencyTextSizeTarget,
mFrequencyTextSizeRate);
mFrequencyText.setTextSize(newTextSize / mDensity);
newMargin = getNewSize(currentHeight, mTargetHeight, mFrequencyMarginTopTarget,
mFrequencyPaddingRate);
// Move frequency text like "103.7" from middle to action bar in landscape,
// or opposite direction. For example:
// ************************* *************************
// * * * FM 103.7 *
// * FM * <--> * *
// * 103.7 * * *
// ************************* *************************
// "FM", "103.7" and other subviews are in a RelativeLayout (id actionbar_parent)
// in main_header.xml. The position is controlled by the padding of each subview.
// Because "FM" and "103.7" move up, we need to change the padding top and change
// the padding left of "103.7".
// The padding between "FM" and "103.7" is 0.2 (e.g. paddingRate) times
// the length of "FM" string length.
float paddingRate = 0.2f;
float addPadding = (((1 + paddingRate) * computeFmDescriptionWidth())
* (mFullHeight - currentHeight)) / (mFullHeight - mTargetHeight);
mFrequencyText.setPadding((int) (addPadding + mFmDescriptionStartPaddingLeft),
(int) (newMargin), mFrequencyText.getPaddingRight(),
mFrequencyText.getPaddingBottom());
lastHeight = newMargin + lastHeight + mFrequencyText.getTextSize();
// If frequency text move to action bar, change it to bold
setNewTypefaceForFrequencyText();
// 3. station name (text size and margin)
newTextSize = getNewSize(currentHeight, mTargetHeight, mStationNameTextSizeTarget,
mStationNameTextSizeRate);
mStationNameText.setTextSize(newTextSize / mDensity);
newMargin = getNewSize(currentHeight, mTargetHeight, mStationNameMarginTopTarget,
mStationNamePaddingRate);
// if move to target position, need not move over the edge of actionbar
if (lastHeight <= mActionBarHeight) {
lastHeight = mActionBarHeight;
}
lastHeight = setNewPadding(mStationNameText, newMargin + lastHeight);
/*
* 4. station rds (margin), in landscape with favorite
* it need parallel to station name
*/
newMargin = getNewSize(currentHeight, mTargetHeight, mStationRdsMarginTopTarget,
mStationRdsPaddingRate);
int targetHeight = mFullHeight - (mFullHeight - mTargetHeight) / 2;
if (currentHeight <= targetHeight) {
String stationName = "" + mStationNameText.getText();
int stationNameTextWidth = mStationNameText.getPaddingLeft();
if (!stationName.equals("")) {
Paint paint = mStationNameText.getPaint();
stationNameTextWidth += (int) paint.measureText(stationName) + 8;
}
mStationRdsText.setPadding((int) stationNameTextWidth,
(int) (newMargin + lastHeight), mStationRdsText.getPaddingRight(),
mStationRdsText.getPaddingBottom());
} else {
mStationRdsText.setPadding((int) (16 * mDensity),
(int) (newMargin + lastHeight), mStationRdsText.getPaddingRight(),
mStationRdsText.getPaddingBottom());
}
// 5. control buttons (margin)
newMargin = getNewSize(currentHeight, mTargetHeight, mControlViewMarginTopTarget,
mControlViewPaddingRate);
setNewPadding(mControlView, newMargin + lastHeight);
// 6. stop button (padding), it different to others, padding top refer to parent
newMargin = getNewSize(currentHeight, mTargetHeight, mPlayButtonMarginTopTarget,
mPlayButtonPaddingRate);
setNewPadding(mPlayButtonView, newMargin);
}
// Compute the text "FM" width
private float computeFmDescriptionWidth() {
Paint paint = mFmDescriptionText.getPaint();
return (float) paint.measureText(mFmDescriptionText.getText().toString());
}
}
private class SecondRangeAdjuster extends FirstRangeAdjuster {
public SecondRangeAdjuster() {
Resources res = mContext.getResources();
mTargetHeight = mSecondTargetHeight;
// init start
mFrequencyStartTextSize = res
.getDimension(R.dimen.fm_frequency_text_size_first_target);
mStationNameTextSizeStart = res
.getDimension(R.dimen.fm_station_name_text_size_first_target);
mFmDescriptionMarginTopStart = res
.getDimension(R.dimen.fm_description_margin_top_first_target)
+ mActionBarHeight;// first view, margin refer to parent
mFrequencyMarginTopStart = res
.getDimension(R.dimen.fm_frequency_margin_top_first_target);
mStationNameMarginTopStart = res
.getDimension(R.dimen.fm_station_name_margin_top_first_target);
mStationRdsMarginTopStart = res
.getDimension(R.dimen.fm_station_rds_margin_top_first_target);
mControlViewMarginTopStart = res
.getDimension(R.dimen.fm_control_buttons_margin_top_first_target);
// init target
mFrequencyTextSizeTarget = res
.getDimension(R.dimen.fm_frequency_text_size_second_target);
mStationNameTextSizeTarget = res
.getDimension(R.dimen.fm_station_name_text_size_second_target);
mFmDescriptionMarginTopTarget = res
.getDimension(R.dimen.fm_description_margin_top_second_target);
mFrequencyMarginTopTarget = res
.getDimension(R.dimen.fm_frequency_margin_top_second_target);
mStationNameMarginTopTarget = res
.getDimension(R.dimen.fm_station_name_margin_top_second_target);
mStationRdsMarginTopTarget = res
.getDimension(R.dimen.fm_station_rds_margin_top_second_target);
mControlViewMarginTopTarget = res
.getDimension(R.dimen.fm_control_buttons_margin_top_second_target);
// init text size and margin adjust rate
float scrollHeight = mFirstTargetHeight - mTargetHeight;
mFrequencyTextSizeRate =
(mFrequencyStartTextSize - mFrequencyTextSizeTarget)
/ scrollHeight;
mStationNameTextSizeRate =
(mStationNameTextSizeStart - mStationNameTextSizeTarget)
/ scrollHeight;
mFmDescriptionPaddingRate =
(mFmDescriptionMarginTopStart - mFmDescriptionMarginTopTarget)
/ scrollHeight;
mFrequencyPaddingRate = (mFrequencyMarginTopStart - mFrequencyMarginTopTarget)
/ scrollHeight;
mStationNamePaddingRate = (mStationNameMarginTopStart - mStationNameMarginTopTarget)
/ scrollHeight;
mStationRdsPaddingRate = (mStationRdsMarginTopStart - mStationRdsMarginTopTarget)
/ scrollHeight;
mControlViewPaddingRate = (mControlViewMarginTopStart - mControlViewMarginTopTarget)
/ scrollHeight;
// init play button padding, it different to others, padding top refer to parent
mPlayButtonHeight = res.getDimension(R.dimen.play_button_height);
mPlayButtonMarginTopStart = mFullHeight - mPlayButtonHeight - 16 * mDensity;
mPlayButtonMarginTopTarget = mFirstTargetHeight - mPlayButtonHeight / 2;
mPlayButtonPaddingRate = (mPlayButtonMarginTopStart - mPlayButtonMarginTopTarget)
/ scrollHeight;
}
@Override
public void handleScroll() {
int currentHeight = getHeaderHeight();
float newMargin = 0;
float lastHeight = 0;
float newTextSize;
// 1. FM description (alpha and margin)
float alpha = 0f;
int offset = (int) ((mFirstTargetHeight - currentHeight) / mDensity);// dip
if (offset <= 0) {
alpha = 1f;
} else if (offset <= 16) {
alpha = 1 - offset / 16f;
}
mFmDescriptionText.setAlpha(alpha);
newMargin = getNewSize(currentHeight, mTargetHeight, mFmDescriptionMarginTopTarget,
mFmDescriptionPaddingRate);
lastHeight = setNewPadding(mFmDescriptionText, newMargin);
// 2. frequency text (text size and margin)
newTextSize = getNewSize(currentHeight, mTargetHeight, mFrequencyTextSizeTarget,
mFrequencyTextSizeRate);
mFrequencyText.setTextSize(newTextSize / mDensity);
newMargin = getNewSize(currentHeight, mTargetHeight, mFrequencyMarginTopTarget,
mFrequencyPaddingRate);
lastHeight = setNewPadding(mFrequencyText, newMargin + lastHeight);
// If frequency text move to action bar, change it to bold
setNewTypefaceForFrequencyText();
// 3. station name (text size and margin)
newTextSize = getNewSize(currentHeight, mTargetHeight, mStationNameTextSizeTarget,
mStationNameTextSizeRate);
mStationNameText.setTextSize(newTextSize / mDensity);
newMargin = getNewSize(currentHeight, mTargetHeight, mStationNameMarginTopTarget,
mStationNamePaddingRate);
// if move to target position, need not move over the edge of actionbar
if (lastHeight <= mActionBarHeight) {
lastHeight = mActionBarHeight;
}
lastHeight = setNewPadding(mStationNameText, newMargin + lastHeight);
// 4. station rds (margin)
newMargin = getNewSize(currentHeight, mTargetHeight, mStationRdsMarginTopTarget,
mStationRdsPaddingRate);
lastHeight = setNewPadding(mStationRdsText, newMargin + lastHeight);
// 5. control buttons (margin)
newMargin = getNewSize(currentHeight, mTargetHeight, mControlViewMarginTopTarget,
mControlViewPaddingRate);
setNewPadding(mControlView, newMargin + lastHeight);
// 6. stop button (padding), it different to others, padding top refer to parent
newMargin = currentHeight - mPlayButtonHeight / 2;
setNewPadding(mPlayButtonView, newMargin);
}
}
private void setNewTypefaceForFrequencyText() {
boolean needBold = (mSecondTargetHeight == getHeaderHeight());
mFrequencyText.setTypeface(needBold ? Typeface.SANS_SERIF : mDefaultFrequencyTypeface);
}
private float setNewPadding(TextView current, float newMargin) {
current.setPadding(current.getPaddingLeft(), (int) (newMargin),
current.getPaddingRight(), current.getPaddingBottom());
float nextLayoutPadding = newMargin + current.getTextSize();
return nextLayoutPadding;
}
private void setNewPadding(View current, float newMargin) {
float newPadding = newMargin;
current.setPadding(current.getPaddingLeft(), (int) (newPadding),
current.getPaddingRight(), current.getPaddingBottom());
}
private float getNewSize(int currentHeight, int targetHeight,
float targetSize, float rate) {
if (currentHeight == targetHeight) {
return targetSize;
}
return targetSize + (currentHeight - targetHeight) * rate;
}
}
private final class ViewHolder {
ImageView mMoreButton;
FmVisualizerView mPlayIndicator;
TextView mStationFreq;
TextView mStationName;
View mPopupMenuAnchor;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.runtime.executiongraph;
import org.apache.flink.api.common.time.Time;
import org.apache.flink.runtime.akka.AkkaUtils;
import org.apache.flink.runtime.clusterframework.types.AllocationID;
import org.apache.flink.runtime.deployment.ResultPartitionDeploymentDescriptor;
import org.apache.flink.runtime.deployment.TaskDeploymentDescriptor;
import org.apache.flink.runtime.execution.ExecutionState;
import org.apache.flink.runtime.executiongraph.utils.SimpleAckingTaskManagerGateway;
import org.apache.flink.runtime.io.network.partition.ResultPartitionType;
import org.apache.flink.runtime.jobgraph.IntermediateDataSetID;
import org.apache.flink.runtime.jobgraph.JobVertexID;
import org.apache.flink.runtime.jobgraph.ScheduleMode;
import org.apache.flink.runtime.jobmaster.LogicalSlot;
import org.apache.flink.runtime.jobmaster.SlotContext;
import org.apache.flink.runtime.jobmaster.TestingLogicalSlot;
import org.apache.flink.runtime.messages.Acknowledge;
import org.apache.flink.runtime.testutils.DirectScheduledExecutorService;
import org.apache.flink.util.TestLogger;
import org.junit.Test;
import java.util.Collection;
import java.util.concurrent.CompletableFuture;
import static org.apache.flink.runtime.executiongraph.ExecutionGraphTestUtils.ERROR_MESSAGE;
import static org.apache.flink.runtime.executiongraph.ExecutionGraphTestUtils.getExecutionVertex;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
public class ExecutionVertexDeploymentTest extends TestLogger {
@Test
public void testDeployCall() {
try {
final JobVertexID jid = new JobVertexID();
final ExecutionJobVertex ejv = getExecutionVertex(jid);
final LogicalSlot slot = new TestingLogicalSlot();
final ExecutionVertex vertex = new ExecutionVertex(ejv, 0, new IntermediateResult[0],
AkkaUtils.getDefaultTimeout());
assertEquals(ExecutionState.CREATED, vertex.getExecutionState());
vertex.deployToSlot(slot);
assertEquals(ExecutionState.DEPLOYING, vertex.getExecutionState());
// no repeated scheduling
try {
vertex.deployToSlot(slot);
fail("Scheduled from wrong state");
} catch (IllegalStateException e) {
// as expected
}
assertNull(vertex.getFailureCause());
assertTrue(vertex.getStateTimestamp(ExecutionState.CREATED) > 0);
assertTrue(vertex.getStateTimestamp(ExecutionState.DEPLOYING) > 0);
} catch (Exception e) {
e.printStackTrace();
fail(e.getMessage());
}
}
@Test
public void testDeployWithSynchronousAnswer() {
try {
final JobVertexID jid = new JobVertexID();
final ExecutionJobVertex ejv = getExecutionVertex(jid, new DirectScheduledExecutorService());
final LogicalSlot slot = new TestingLogicalSlot();
final ExecutionVertex vertex = new ExecutionVertex(ejv, 0, new IntermediateResult[0],
AkkaUtils.getDefaultTimeout());
assertEquals(ExecutionState.CREATED, vertex.getExecutionState());
vertex.deployToSlot(slot);
assertEquals(ExecutionState.DEPLOYING, vertex.getExecutionState());
// no repeated scheduling
try {
vertex.deployToSlot(slot);
fail("Scheduled from wrong state");
} catch (IllegalStateException e) {
// as expected
}
assertNull(vertex.getFailureCause());
assertTrue(vertex.getStateTimestamp(ExecutionState.CREATED) > 0);
assertTrue(vertex.getStateTimestamp(ExecutionState.DEPLOYING) > 0);
assertTrue(vertex.getStateTimestamp(ExecutionState.RUNNING) == 0);
} catch (Exception e) {
e.printStackTrace();
fail(e.getMessage());
}
}
@Test
public void testDeployWithAsynchronousAnswer() {
try {
final JobVertexID jid = new JobVertexID();
final ExecutionJobVertex ejv = getExecutionVertex(jid);
final ExecutionVertex vertex = new ExecutionVertex(ejv, 0, new IntermediateResult[0],
AkkaUtils.getDefaultTimeout());
final LogicalSlot slot = new TestingLogicalSlot();
assertEquals(ExecutionState.CREATED, vertex.getExecutionState());
vertex.deployToSlot(slot);
// no repeated scheduling
try {
vertex.deployToSlot(slot);
fail("Scheduled from wrong state");
} catch (IllegalStateException e) {
// as expected
}
assertEquals(ExecutionState.DEPLOYING, vertex.getExecutionState());
// no repeated scheduling
try {
vertex.deployToSlot(slot);
fail("Scheduled from wrong state");
} catch (IllegalStateException e) {
// as expected
}
assertTrue(vertex.getStateTimestamp(ExecutionState.CREATED) > 0);
assertTrue(vertex.getStateTimestamp(ExecutionState.DEPLOYING) > 0);
assertTrue(vertex.getStateTimestamp(ExecutionState.RUNNING) == 0);
} catch (Exception e) {
e.printStackTrace();
fail(e.getMessage());
}
}
@Test
public void testDeployFailedSynchronous() {
try {
final JobVertexID jid = new JobVertexID();
final ExecutionJobVertex ejv = getExecutionVertex(jid, new DirectScheduledExecutorService());
final ExecutionVertex vertex = new ExecutionVertex(ejv, 0, new IntermediateResult[0],
AkkaUtils.getDefaultTimeout());
final LogicalSlot slot = new TestingLogicalSlot(new SubmitFailingSimpleAckingTaskManagerGateway());
assertEquals(ExecutionState.CREATED, vertex.getExecutionState());
vertex.deployToSlot(slot);
assertEquals(ExecutionState.FAILED, vertex.getExecutionState());
assertNotNull(vertex.getFailureCause());
assertTrue(vertex.getFailureCause().getMessage().contains(ERROR_MESSAGE));
assertTrue(vertex.getStateTimestamp(ExecutionState.CREATED) > 0);
assertTrue(vertex.getStateTimestamp(ExecutionState.DEPLOYING) > 0);
assertTrue(vertex.getStateTimestamp(ExecutionState.FAILED) > 0);
} catch (Exception e) {
e.printStackTrace();
fail(e.getMessage());
}
}
@Test
public void testDeployFailedAsynchronously() {
try {
final JobVertexID jid = new JobVertexID();
final ExecutionJobVertex ejv = getExecutionVertex(jid);
final ExecutionVertex vertex = new ExecutionVertex(ejv, 0, new IntermediateResult[0],
AkkaUtils.getDefaultTimeout());
final LogicalSlot slot = new TestingLogicalSlot(new SubmitFailingSimpleAckingTaskManagerGateway());
assertEquals(ExecutionState.CREATED, vertex.getExecutionState());
vertex.deployToSlot(slot);
// wait until the state transition must be done
for (int i = 0; i < 100; i++) {
if (vertex.getExecutionState() == ExecutionState.FAILED && vertex.getFailureCause() != null) {
break;
} else {
Thread.sleep(10);
}
}
assertEquals(ExecutionState.FAILED, vertex.getExecutionState());
assertNotNull(vertex.getFailureCause());
assertTrue(vertex.getFailureCause().getMessage().contains(ERROR_MESSAGE));
assertTrue(vertex.getStateTimestamp(ExecutionState.CREATED) > 0);
assertTrue(vertex.getStateTimestamp(ExecutionState.DEPLOYING) > 0);
assertTrue(vertex.getStateTimestamp(ExecutionState.FAILED) > 0);
} catch (Exception e) {
e.printStackTrace();
fail(e.getMessage());
}
}
@Test
public void testFailExternallyDuringDeploy() {
try {
final JobVertexID jid = new JobVertexID();
final ExecutionJobVertex ejv = getExecutionVertex(jid, new DirectScheduledExecutorService());
final ExecutionVertex vertex = new ExecutionVertex(ejv, 0, new IntermediateResult[0],
AkkaUtils.getDefaultTimeout());
TestingLogicalSlot testingLogicalSlot = new TestingLogicalSlot(new SubmitBlockingSimpleAckingTaskManagerGateway());
assertEquals(ExecutionState.CREATED, vertex.getExecutionState());
vertex.deployToSlot(testingLogicalSlot);
assertEquals(ExecutionState.DEPLOYING, vertex.getExecutionState());
Exception testError = new Exception("test error");
vertex.fail(testError);
assertEquals(ExecutionState.FAILED, vertex.getExecutionState());
assertEquals(testError, vertex.getFailureCause());
assertTrue(vertex.getStateTimestamp(ExecutionState.CREATED) > 0);
assertTrue(vertex.getStateTimestamp(ExecutionState.DEPLOYING) > 0);
assertTrue(vertex.getStateTimestamp(ExecutionState.FAILED) > 0);
} catch (Exception e) {
e.printStackTrace();
fail(e.getMessage());
}
}
private static class SubmitFailingSimpleAckingTaskManagerGateway extends SimpleAckingTaskManagerGateway {
@Override
public CompletableFuture<Acknowledge> submitTask(TaskDeploymentDescriptor tdd, Time timeout) {
CompletableFuture<Acknowledge> future = new CompletableFuture<>();
future.completeExceptionally(new Exception(ERROR_MESSAGE));
return future;
}
}
private static class SubmitBlockingSimpleAckingTaskManagerGateway extends SimpleAckingTaskManagerGateway {
@Override
public CompletableFuture<Acknowledge> submitTask(TaskDeploymentDescriptor tdd, Time timeout) {
return new CompletableFuture<>();
}
}
/**
* Tests that the lazy scheduling flag is correctly forwarded to the produced partition descriptors.
*/
@Test
public void testTddProducedPartitionsLazyScheduling() throws Exception {
ExecutionJobVertex jobVertex = getExecutionVertex(new JobVertexID(), new DirectScheduledExecutorService());
IntermediateResult result =
new IntermediateResult(new IntermediateDataSetID(), jobVertex, 1, ResultPartitionType.PIPELINED);
ExecutionVertex vertex =
new ExecutionVertex(jobVertex, 0, new IntermediateResult[]{result}, Time.minutes(1));
ExecutionEdge mockEdge = createMockExecutionEdge(1);
result.getPartitions()[0].addConsumerGroup();
result.getPartitions()[0].addConsumer(mockEdge, 0);
SlotContext slotContext = mock(SlotContext.class);
when(slotContext.getAllocationId()).thenReturn(new AllocationID());
LogicalSlot slot = mock(LogicalSlot.class);
when(slot.getAllocationId()).thenReturn(new AllocationID());
for (ScheduleMode mode : ScheduleMode.values()) {
vertex.getExecutionGraph().setScheduleMode(mode);
TaskDeploymentDescriptor tdd = vertex.createDeploymentDescriptor(new ExecutionAttemptID(), slot, null, 1);
Collection<ResultPartitionDeploymentDescriptor> producedPartitions = tdd.getProducedPartitions();
assertEquals(1, producedPartitions.size());
ResultPartitionDeploymentDescriptor desc = producedPartitions.iterator().next();
assertEquals(mode.allowLazyDeployment(), desc.sendScheduleOrUpdateConsumersMessage());
}
}
private ExecutionEdge createMockExecutionEdge(int maxParallelism) {
ExecutionVertex targetVertex = mock(ExecutionVertex.class);
ExecutionJobVertex targetJobVertex = mock(ExecutionJobVertex.class);
when(targetVertex.getJobVertex()).thenReturn(targetJobVertex);
when(targetJobVertex.getMaxParallelism()).thenReturn(maxParallelism);
ExecutionEdge edge = mock(ExecutionEdge.class);
when(edge.getTarget()).thenReturn(targetVertex);
return edge;
}
}
| |
/*
Copyright (c) 2014,2015 Ahome' Innovation Technologies. All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package com.ait.lienzo.client.core.shape;
import com.ait.lienzo.client.core.Attribute;
import com.ait.lienzo.client.core.Context2D;
import com.ait.lienzo.client.core.shape.json.IFactory;
import com.ait.lienzo.client.core.shape.json.validators.ValidationContext;
import com.ait.lienzo.client.core.shape.json.validators.ValidationException;
import com.ait.lienzo.client.core.types.BoundingBox;
import com.ait.lienzo.client.core.types.NFastArrayList;
import com.ait.lienzo.client.core.types.PathPartList;
import com.ait.lienzo.client.core.types.Point2D;
import com.ait.lienzo.client.core.types.Point2DArray;
import com.ait.lienzo.shared.core.types.ShapeType;
import com.google.gwt.json.client.JSONObject;
public class Spline extends AbstractMultiPointShape<Spline>
{
private boolean m_fill = false;
private final PathPartList m_list = new PathPartList();
/**
* Constructor. Creates an instance of a spline.
*/
public Spline(final Point2DArray points)
{
super(ShapeType.SPLINE);
setControlPoints(points);
}
protected Spline(final JSONObject node, final ValidationContext ctx) throws ValidationException
{
super(ShapeType.SPLINE, node, ctx);
}
@Override
public BoundingBox getBoundingBox()
{
if (m_list.size() < 1)
{
parse(getAttributes());
}
return m_list.getBoundingBox();
}
/**
* Draws this Spline
*
* @param context the {@link Context2D} used to draw this spline.
*/
@Override
protected boolean prepare(final Context2D context, final Attributes attr, final double alpha)
{
if (m_list.size() < 1)
{
parse(attr);
}
if (m_list.size() < 1)
{
return false;
}
m_fill = context.path(m_list);
return true;
}
@Override
protected void fill(Context2D context, Attributes attr, double alpha)
{
if (m_fill)
{
super.fill(context, attr, alpha);
}
}
@Override
public Spline refresh()
{
m_list.clear();
return this;
}
private final void parse(Attributes attr)
{
final PathPoint[] points = getPathPoints(attr.getControlPoints());
final int size = points.length;
if (size < 3)
{
if (size > 1)
{
m_list.M(points[0].x, points[0].y).L(points[1].x, points[1].y);
}
return;
}
final double curveFactor = attr.getCurveFactor();
final double angleFactor = attr.getAngleFactor();
boolean closed = false;
int begindex = 1;
int endindex = size - 1;
if ((points[0].x == points[size - 1].x) && (points[0].y == points[size - 1].y))
{
begindex = 0;
endindex = size;
closed = true;
}
else
{
closed = false;
}
final NFastArrayList<PathPoint[]> carray = new NFastArrayList<PathPoint[]>();
for (int i = begindex; i < endindex; i++)
{
final PathPoint p0 = ((i - 1) < 0) ? points[size - 2] : points[i - 1];
final PathPoint p1 = points[i];
final PathPoint p2 = ((i + 1) == size) ? points[1] : points[i + 1];
final double a = Math.max(PathPoint.distance(p0, p1), 0.001);
final double b = Math.max(PathPoint.distance(p1, p2), 0.001);
final PathPoint apt = new PathPoint(p0.x - p1.x, p0.y - p1.y);
final PathPoint bpt = new PathPoint(p1.x, p1.y);
final PathPoint cpt = new PathPoint(p2.x - p1.x, p2.y - p1.y);
if (a > b)
{
apt.normalize(b);
}
else if (b > a)
{
cpt.normalize(a);
}
apt.offset(p1.x, p1.y);
cpt.offset(p1.x, p1.y);
final double ax = bpt.x - apt.x;
final double ay = bpt.y - apt.y;
final double bx = bpt.x - cpt.x;
final double by = bpt.y - cpt.y;
double rx = ax + bx;
double ry = ay + by;
if ((rx == 0) && (ry == 0))
{
rx = -bx;
ry = by;
}
if ((ay == 0) && (by == 0))
{
rx = 0;
ry = 1;
}
else if ((ax == 0) && (bx == 0))
{
rx = 1;
ry = 0;
}
double cdist = Math.min(a, b) * curveFactor;
if (angleFactor != 0)
{
final double c = Math.max(PathPoint.distance(p0, p2), 0.001);
cdist *= ((1 - angleFactor) + angleFactor * (Math.acos(Math.min(Math.max((b * b + a * a - c * c) / (2 * b * a), -1), 1)) / Math.PI));
}
final double cangl = Math.atan2(ry, rx) + Math.PI / 2;
final PathPoint cp2 = PathPoint.polar(cdist, cangl);
final PathPoint cp1 = PathPoint.polar(cdist, cangl + Math.PI);
cp1.offset(p1.x, p1.y);
cp2.offset(p1.x, p1.y);
if (PathPoint.distance(cp2, p2) > PathPoint.distance(cp1, p2))
{
carray.add(i, PathPoint.toArray(cp2, cp1));
}
else
{
carray.add(i, PathPoint.toArray(cp1, cp2));
}
}
final boolean lineFlatten = attr.getLineFlatten();
m_list.M(points[0].x, points[0].y);
if (begindex == 1)
{
final PathPoint point = carray.get(1)[0];
m_list.Q(point.x, point.y, points[1].x, points[1].y);
}
int i;
for (i = begindex; i < (endindex - 1); i++)
{
boolean line = lineFlatten && ((i > 0 && Math.atan2(points[i].y - points[i - 1].y, points[i].x - points[i - 1].x) == Math.atan2(points[i + 1].y - points[i].y, points[i + 1].x - points[i].x)) || (i < size - 2 && Math.atan2(points[i + 2].y - points[i + 1].y, points[i + 2].x - points[i + 1].x) == Math.atan2(points[i + 1].y - points[i].y, points[i + 1].x - points[i].x)));
if (line)
{
m_list.L(points[i + 1].x, points[i + 1].y);
}
else
{
final PathPoint p1 = carray.get(i)[1];
final PathPoint p2 = carray.get(i + 1)[0];
m_list.C(p1.x, p1.y, p2.x, p2.y, points[i + 1].x, points[i + 1].y);
}
}
if (endindex == (size - 1))
{
final PathPoint point = carray.get(i)[1];
m_list.Q(point.x, point.y, points[i + 1].x, points[i + 1].y);
}
if (closed)
{
m_list.Z();
}
}
private final static PathPoint[] getPathPoints(final Point2DArray array)
{
if ((null == array) || (array.size() < 2))
{
return new PathPoint[0];
}
final Point2DArray unique = array.noAdjacentPoints();
final int size = unique.size();
if (size < 2)
{
return new PathPoint[0];
}
final PathPoint[] points = new PathPoint[size];
for (int i = 0; i < size; i++)
{
final Point2D point = unique.get(i);
points[i] = new PathPoint(point.getX(), point.getY());
}
return points;
}
/**
* Gets this spline's control points.
*
* @return {@link Point2DArray}
*/
public Point2DArray getControlPoints()
{
return getAttributes().getControlPoints();
}
/**
* Sets the control points for this curve.
*
* @param points
* A {@link Point2DArray} containing the control points
*
* @return this Spline
*/
public Spline setControlPoints(final Point2DArray points)
{
getAttributes().setControlPoints(points);
m_list.clear();
return this;
}
@Override
public Spline setPoint2DArray(Point2DArray points)
{
return setControlPoints(points);
}
@Override
public Point2DArray getPoint2DArray()
{
return getControlPoints();
}
public double getCurveFactor()
{
return getAttributes().getCurveFactor();
}
public Spline setCurveFactor(final double factor)
{
getAttributes().setCurveFactor(factor);
m_list.clear();
return this;
}
public double getAngleFactor()
{
return getAttributes().getAngleFactor();
}
public Spline setAngleFactor(final double factor)
{
getAttributes().setAngleFactor(factor);
m_list.clear();
return this;
}
public boolean getLineFlatten()
{
return getAttributes().getLineFlatten();
}
public Spline setLineFlatten(final boolean flat)
{
getAttributes().setLineFlatten(flat);
m_list.clear();
return this;
}
@Override
public IFactory<Spline> getFactory()
{
return new SplineFactory();
}
public static class SplineFactory extends ShapeFactory<Spline>
{
public SplineFactory()
{
super(ShapeType.SPLINE);
addAttribute(Attribute.CURVE_FACTOR);
addAttribute(Attribute.ANGLE_FACTOR);
addAttribute(Attribute.LINE_FLATTEN);
addAttribute(Attribute.CONTROL_POINTS, true);
}
@Override
public Spline create(final JSONObject node, final ValidationContext ctx) throws ValidationException
{
return new Spline(node, ctx);
}
}
private static final class PathPoint
{
public double x;
public double y;
PathPoint(double x, double y)
{
this.x = x;
this.y = y;
}
final void normalize(final double length)
{
if (((x == 0) && (y == 0)) || (length == 0))
{
return;
}
final double scale = length / Math.sqrt((x * x) + (y * y));
x *= scale;
y *= scale;
}
final void offset(final double dx, final double dy)
{
x += dx;
y += dy;
}
static final double distance(final PathPoint a, final PathPoint b)
{
final double dx = b.x - a.x;
final double dy = b.y - a.y;
return Math.sqrt((dx * dx) + (dy * dy));
}
static final PathPoint polar(final double length, final double angle)
{
return new PathPoint(length * Math.cos(angle), length * Math.sin(angle));
}
static final PathPoint[] toArray(final PathPoint... points)
{
return points;
}
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.