gt stringclasses 1
value | context stringlengths 2.05k 161k |
|---|---|
/*
* Copyright 2000-2009 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.ui;
import com.intellij.ui.awt.RelativePoint;
import com.intellij.ui.popup.AbstractPopup;
import javax.swing.*;
import java.awt.*;
import java.awt.event.MouseAdapter;
import java.awt.event.MouseEvent;
import java.awt.event.MouseMotionListener;
/**
* User: anna
* Date: 13-Mar-2006
*/
public class ResizeComponentListener extends MouseAdapter implements MouseMotionListener {
private static final int SENSITIVITY = 4;
private final AbstractPopup myPopup;
private final AbstractPopup.MyContentPanel myComponent;
private Point myStartPoint = null;
private int myDirection = -1;
public ResizeComponentListener(final AbstractPopup popup) {
myPopup = popup;
myComponent = (AbstractPopup.MyContentPanel)popup.getContent();
}
public void mousePressed(MouseEvent e) {
final Window popupWindow = SwingUtilities.windowForComponent(myComponent);
if (popupWindow != null) {
myStartPoint = new RelativePoint(e).getScreenPoint();
myDirection = getDirection(myStartPoint, popupWindow.getBounds());
if (myDirection == Cursor.DEFAULT_CURSOR){
myStartPoint = null;
} else {
if (isToShowBorder()) {
myComponent.setBorder(BorderFactory.createMatteBorder(2, 2, 2, 2, Color.black.brighter()));
}
}
}
}
public void mouseClicked(MouseEvent e) {
endOperation();
}
public void mouseReleased(MouseEvent e) {
endOperation();
}
public void mouseExited(MouseEvent e) {
final Window popupWindow = SwingUtilities.windowForComponent(myComponent);
if (popupWindow == null) return;
clearBorder(popupWindow);
}
private void endOperation() {
final Window popupWindow = SwingUtilities.windowForComponent(myComponent);
if (popupWindow != null) {
if (isToShowBorder()) {
myComponent.setBorder(BorderFactory.createEmptyBorder(2,2,2,2));
}
Dimension size = popupWindow.getSize();
Dimension minSize = popupWindow.getMinimumSize();
if (size.width < minSize.width) {
size.width = minSize.width;
}
if (size.height < minSize.height) {
size.height = minSize.height;
}
popupWindow.setSize(size);
popupWindow.validate();
popupWindow.repaint();
setWindowCursor(popupWindow, Cursor.DEFAULT_CURSOR);
myPopup.storeDimensionSize(popupWindow.getSize());
}
myStartPoint = null;
myDirection = -1;
}
private boolean isToShowBorder() {
return false;
}
private void doResize(final Point point) {
final Window popupWindow = SwingUtilities.windowForComponent(myComponent);
final Rectangle bounds = popupWindow.getBounds();
final Point location = popupWindow.getLocation();
switch (myDirection){
case Cursor.NW_RESIZE_CURSOR :
popupWindow.setBounds(location.x + point.x - myStartPoint.x,
location.y + point.y - myStartPoint.y,
bounds.width + myStartPoint.x - point.x,
bounds.height + myStartPoint.y - point.y );
break;
case Cursor.N_RESIZE_CURSOR :
popupWindow.setBounds(location.x,
location.y + point.y - myStartPoint.y,
bounds.width,
bounds.height + myStartPoint.y - point.y);
break;
case Cursor.NE_RESIZE_CURSOR :
popupWindow.setBounds(location.x,
location.y + point.y - myStartPoint.y,
bounds.width + point.x - myStartPoint.x,
bounds.height + myStartPoint.y - point.y);
break;
case Cursor.E_RESIZE_CURSOR :
popupWindow.setBounds(location.x ,
location.y,
bounds.width + point.x - myStartPoint.x,
bounds.height);
break;
case Cursor.SE_RESIZE_CURSOR :
popupWindow.setBounds(location.x,
location.y,
bounds.width + point.x - myStartPoint.x,
bounds.height + point.y - myStartPoint.y);
break;
case Cursor.S_RESIZE_CURSOR :
popupWindow.setBounds(location.x,
location.y,
bounds.width ,
bounds.height + point.y - myStartPoint.y);
break;
case Cursor.SW_RESIZE_CURSOR :
popupWindow.setBounds(location.x + point.x - myStartPoint.x,
location.y,
bounds.width + myStartPoint.x - point.x,
bounds.height + point.y - myStartPoint.y);
break;
case Cursor.W_RESIZE_CURSOR :
popupWindow.setBounds(location.x + point.x - myStartPoint.x,
location.y,
bounds.width + myStartPoint.x - point.x,
bounds.height);
break;
}
popupWindow.validate();
}
public void mouseMoved(MouseEvent e) {
Point point = new RelativePoint(e).getScreenPoint();
final Window popupWindow = SwingUtilities.windowForComponent(myComponent);
if (popupWindow == null) return;
final int cursor = getDirection(point, popupWindow.getBounds());
if (cursor != Cursor.DEFAULT_CURSOR){
if (isToShowBorder()) {
if (myStartPoint == null) {
myComponent.setBorder(BorderFactory.createMatteBorder(2, 2, 2, 2, Color.black.brighter()));
}
}
setWindowCursor(popupWindow, cursor);
e.consume();
} else {
clearBorder(popupWindow);
}
}
private void setWindowCursor(final Window popupWindow, final int cursor) {
popupWindow.setCursor(myPopup.isToDrawMacCorner()? Cursor.getDefaultCursor() : Cursor.getPredefinedCursor(cursor));
}
private void clearBorder(final Window popupWindow) {
if (isToShowBorder()){
myComponent.setBorder(BorderFactory.createEmptyBorder(2, 2, 2, 2));
}
setWindowCursor(popupWindow, Cursor.DEFAULT_CURSOR);
}
public void mouseDragged(MouseEvent e) {
if (e.isConsumed()) return;
final Point point = new RelativePoint(e).getScreenPoint();
final Window popupWindow = SwingUtilities.windowForComponent(myComponent);
if (popupWindow == null) return;
if (myStartPoint != null) {
if (isToShowBorder()) {
setWindowCursor(popupWindow, myDirection);
}
doResize(point);
myStartPoint = point;
e.consume();
} else {
if (isToShowBorder()) {
final int cursor = getDirection(point, popupWindow.getBounds());
setWindowCursor(popupWindow, cursor);
}
}
}
private int getDirection(Point startPoint, Rectangle bounds){
if (myPopup.isToDrawMacCorner()){
if (bounds.x + bounds.width - startPoint.x < 16 && //inside icon
bounds.y + bounds.height - startPoint.y < 16 &&
bounds.y + bounds.height - startPoint.y > 0 &&
bounds.x + bounds.width - startPoint.x > 0){
return Cursor.SE_RESIZE_CURSOR;
}
return Cursor.DEFAULT_CURSOR;
}
bounds = new Rectangle(bounds.x + 2, bounds.y + 2, bounds.width - 2, bounds.height - 2);
if (!bounds.contains(startPoint)){
return Cursor.DEFAULT_CURSOR;
}
if (Math.abs(startPoint.x - bounds.x ) < SENSITIVITY){ //left bound
if (Math.abs(startPoint.y - bounds.y) < SENSITIVITY){ //top
return Cursor.NW_RESIZE_CURSOR;
} else if (Math.abs(bounds.y + bounds.height - startPoint.y) < SENSITIVITY) { //bottom
return Cursor.SW_RESIZE_CURSOR;
} else { //edge
return Cursor.W_RESIZE_CURSOR;
}
} else if (Math.abs(bounds.x + bounds.width - startPoint.x) < SENSITIVITY){ //right
if (Math.abs(startPoint.y - bounds.y) < SENSITIVITY){ //top
return Cursor.NE_RESIZE_CURSOR;
} else if (Math.abs(bounds.y + bounds.height - startPoint.y) < SENSITIVITY) { //bottom
return Cursor.SE_RESIZE_CURSOR;
} else { //edge
return Cursor.E_RESIZE_CURSOR;
}
} else { //other
if (Math.abs(startPoint.y - bounds.y) < SENSITIVITY){ //top
return Cursor.N_RESIZE_CURSOR;
} else if (Math.abs(bounds.y + bounds.height - startPoint.y) < SENSITIVITY) { //bottom
return Cursor.S_RESIZE_CURSOR;
} else { //edge
return Cursor.DEFAULT_CURSOR;
}
}
}
}
| |
/*
* Copyright 2014-present Facebook, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License. You may obtain
* a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package com.facebook.buck.cxx;
import static org.hamcrest.Matchers.containsString;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotEquals;
import static org.junit.Assert.assertThat;
import com.facebook.buck.core.build.buildable.context.FakeBuildableContext;
import com.facebook.buck.core.build.context.BuildContext;
import com.facebook.buck.core.build.context.FakeBuildContext;
import com.facebook.buck.core.model.BuildTarget;
import com.facebook.buck.core.model.BuildTargetFactory;
import com.facebook.buck.core.rulekey.RuleKey;
import com.facebook.buck.core.rules.ActionGraphBuilder;
import com.facebook.buck.core.rules.BuildRule;
import com.facebook.buck.core.rules.BuildRuleResolver;
import com.facebook.buck.core.rules.SourcePathRuleFinder;
import com.facebook.buck.core.rules.resolver.impl.TestActionGraphBuilder;
import com.facebook.buck.core.sourcepath.FakeSourcePath;
import com.facebook.buck.core.sourcepath.PathSourcePath;
import com.facebook.buck.core.sourcepath.SourcePath;
import com.facebook.buck.core.sourcepath.resolver.SourcePathResolver;
import com.facebook.buck.core.sourcepath.resolver.impl.DefaultSourcePathResolver;
import com.facebook.buck.core.toolchain.tool.Tool;
import com.facebook.buck.core.toolchain.tool.impl.HashedFileTool;
import com.facebook.buck.cxx.toolchain.ArchiveContents;
import com.facebook.buck.cxx.toolchain.Archiver;
import com.facebook.buck.cxx.toolchain.BsdArchiver;
import com.facebook.buck.cxx.toolchain.GnuArchiver;
import com.facebook.buck.io.filesystem.ProjectFilesystem;
import com.facebook.buck.io.filesystem.impl.FakeProjectFilesystem;
import com.facebook.buck.rules.keys.TestDefaultRuleKeyFactory;
import com.facebook.buck.shell.Genrule;
import com.facebook.buck.shell.GenruleBuilder;
import com.facebook.buck.step.Step;
import com.facebook.buck.step.TestExecutionContext;
import com.facebook.buck.testutil.FakeFileHashCache;
import com.google.common.base.Strings;
import com.google.common.collect.FluentIterable;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSortedSet;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.Optional;
import org.junit.Test;
public class ArchiveTest {
private static final Path AR = Paths.get("ar");
private static final Path RANLIB = Paths.get("ranlib");
private static final Path DEFAULT_OUTPUT = Paths.get("foo/libblah.a");
private static final ImmutableList<SourcePath> DEFAULT_INPUTS =
ImmutableList.of(
FakeSourcePath.of("a.o"), FakeSourcePath.of("b.o"), FakeSourcePath.of("c.o"));
private final ProjectFilesystem projectFilesystem = new FakeProjectFilesystem();
private final Archiver DEFAULT_ARCHIVER =
new GnuArchiver(new HashedFileTool(PathSourcePath.of(projectFilesystem, AR)));
private final Optional<Tool> DEFAULT_RANLIB =
Optional.of(new HashedFileTool(PathSourcePath.of(projectFilesystem, RANLIB)));
@Test
public void testThatInputChangesCauseRuleKeyChanges() {
SourcePathRuleFinder ruleFinder = new SourcePathRuleFinder(new TestActionGraphBuilder());
SourcePathResolver pathResolver = DefaultSourcePathResolver.from(ruleFinder);
BuildTarget target = BuildTargetFactory.newInstance("//foo:bar");
FakeFileHashCache hashCache =
FakeFileHashCache.createFromStrings(
ImmutableMap.<String, String>builder()
.put(AR.toString(), Strings.repeat("0", 40))
.put(RANLIB.toString(), Strings.repeat("1", 40))
.put("a.o", Strings.repeat("a", 40))
.put("b.o", Strings.repeat("b", 40))
.put("c.o", Strings.repeat("c", 40))
.put(Paths.get("different").toString(), Strings.repeat("d", 40))
.build());
// Generate a rule key for the defaults.
RuleKey defaultRuleKey =
new TestDefaultRuleKeyFactory(hashCache, pathResolver, ruleFinder)
.build(
Archive.from(
target,
projectFilesystem,
ruleFinder,
DEFAULT_ARCHIVER,
ImmutableList.of(),
DEFAULT_RANLIB,
ImmutableList.of(),
ArchiveContents.NORMAL,
DEFAULT_OUTPUT,
DEFAULT_INPUTS,
/* cacheable */ true));
// Verify that changing the archiver causes a rulekey change.
RuleKey archiverChange =
new TestDefaultRuleKeyFactory(hashCache, pathResolver, ruleFinder)
.build(
Archive.from(
target,
projectFilesystem,
ruleFinder,
new GnuArchiver(
new HashedFileTool(
PathSourcePath.of(projectFilesystem, Paths.get("different")))),
ImmutableList.of(),
DEFAULT_RANLIB,
ImmutableList.of(),
ArchiveContents.NORMAL,
DEFAULT_OUTPUT,
DEFAULT_INPUTS,
/* cacheable */ true));
assertNotEquals(defaultRuleKey, archiverChange);
// Verify that changing the output path causes a rulekey change.
RuleKey outputChange =
new TestDefaultRuleKeyFactory(hashCache, pathResolver, ruleFinder)
.build(
Archive.from(
target,
projectFilesystem,
ruleFinder,
DEFAULT_ARCHIVER,
ImmutableList.of(),
DEFAULT_RANLIB,
ImmutableList.of(),
ArchiveContents.NORMAL,
Paths.get("different"),
DEFAULT_INPUTS,
/* cacheable */ true));
assertNotEquals(defaultRuleKey, outputChange);
// Verify that changing the inputs causes a rulekey change.
RuleKey inputChange =
new TestDefaultRuleKeyFactory(hashCache, pathResolver, ruleFinder)
.build(
Archive.from(
target,
projectFilesystem,
ruleFinder,
DEFAULT_ARCHIVER,
ImmutableList.of(),
DEFAULT_RANLIB,
ImmutableList.of(),
ArchiveContents.NORMAL,
DEFAULT_OUTPUT,
ImmutableList.of(FakeSourcePath.of("different")),
/* cacheable */ true));
assertNotEquals(defaultRuleKey, inputChange);
// Verify that changing the type of archiver causes a rulekey change.
RuleKey archiverTypeChange =
new TestDefaultRuleKeyFactory(hashCache, pathResolver, ruleFinder)
.build(
Archive.from(
target,
projectFilesystem,
ruleFinder,
new BsdArchiver(new HashedFileTool(PathSourcePath.of(projectFilesystem, AR))),
ImmutableList.of(),
DEFAULT_RANLIB,
ImmutableList.of(),
ArchiveContents.NORMAL,
DEFAULT_OUTPUT,
DEFAULT_INPUTS,
/* cacheable */ true));
assertNotEquals(defaultRuleKey, archiverTypeChange);
}
@Test
public void flagsArePropagated() {
BuildRuleResolver resolver = new TestActionGraphBuilder();
BuildTarget target = BuildTargetFactory.newInstance("//foo:bar");
ProjectFilesystem projectFilesystem = new FakeProjectFilesystem();
SourcePathRuleFinder ruleFinder = new SourcePathRuleFinder(resolver);
SourcePathResolver pathResolver = DefaultSourcePathResolver.from(ruleFinder);
Archive archive =
Archive.from(
target,
projectFilesystem,
ruleFinder,
DEFAULT_ARCHIVER,
ImmutableList.of("-foo"),
DEFAULT_RANLIB,
ImmutableList.of("-bar"),
ArchiveContents.NORMAL,
DEFAULT_OUTPUT,
ImmutableList.of(FakeSourcePath.of("simple.o")),
/* cacheable */ true);
BuildContext buildContext =
BuildContext.builder()
.from(FakeBuildContext.NOOP_CONTEXT)
.setSourcePathResolver(pathResolver)
.build();
ImmutableList<Step> steps = archive.getBuildSteps(buildContext, new FakeBuildableContext());
Step archiveStep = FluentIterable.from(steps).filter(ArchiveStep.class).first().get();
assertThat(
archiveStep.getDescription(TestExecutionContext.newInstance()), containsString("-foo"));
Step ranlibStep = FluentIterable.from(steps).filter(RanlibStep.class).first().get();
assertThat(
ranlibStep.getDescription(TestExecutionContext.newInstance()), containsString("-bar"));
}
@Test
public void testThatBuildTargetSourcePathDepsAndPathsArePropagated() {
ActionGraphBuilder graphBuilder = new TestActionGraphBuilder();
BuildTarget target = BuildTargetFactory.newInstance("//foo:bar");
ProjectFilesystem projectFilesystem = new FakeProjectFilesystem();
// Create a couple of genrules to generate inputs for an archive rule.
Genrule genrule1 =
GenruleBuilder.newGenruleBuilder(BuildTargetFactory.newInstance("//:genrule"))
.setOut("foo/bar.o")
.build(graphBuilder);
Genrule genrule2 =
GenruleBuilder.newGenruleBuilder(BuildTargetFactory.newInstance("//:genrule2"))
.setOut("foo/test.o")
.build(graphBuilder);
// Build the archive using a normal input the outputs of the genrules above.
SourcePathRuleFinder ruleFinder = new SourcePathRuleFinder(graphBuilder);
Archive archive =
Archive.from(
target,
projectFilesystem,
ruleFinder,
DEFAULT_ARCHIVER,
ImmutableList.of(),
DEFAULT_RANLIB,
ImmutableList.of(),
ArchiveContents.NORMAL,
DEFAULT_OUTPUT,
ImmutableList.of(
FakeSourcePath.of("simple.o"),
genrule1.getSourcePathToOutput(),
genrule2.getSourcePathToOutput()),
/* cacheable */ true);
// Verify that the archive dependencies include the genrules providing the
// SourcePath inputs.
assertEquals(ImmutableSortedSet.<BuildRule>of(genrule1, genrule2), archive.getBuildDeps());
}
}
| |
/****************************************************************************
Copyright (c) 2010-2012 cocos2d-x.org
Copyright (c) 2013-2016 Chukong Technologies Inc.
Copyright (c) 2017-2018 Xiamen Yaji Software Co., Ltd.
http://www.cocos2d-x.org
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
****************************************************************************/
package org.cocos2dx.lib;
import android.content.pm.PackageManager;
import android.media.AudioManager;
import android.app.Activity;
import android.content.ComponentName;
import android.content.Context;
import android.content.Intent;
import android.content.ServiceConnection;
import android.content.SharedPreferences;
import android.content.pm.ApplicationInfo;
import android.content.pm.PackageManager.NameNotFoundException;
import android.content.res.AssetFileDescriptor;
import android.content.res.AssetManager;
import android.net.Uri;
import android.os.Build;
import android.os.Environment;
import android.os.IBinder;
import android.os.ParcelFileDescriptor;
import android.os.Vibrator;
import android.preference.PreferenceManager.OnActivityResultListener;
import android.util.DisplayMetrics;
import android.util.Log;
import android.view.Display;
import android.view.WindowManager;
import com.android.vending.expansion.zipfile.APKExpansionSupport;
import com.android.vending.expansion.zipfile.ZipResourceFile;
import com.enhance.gameservice.IGameTuningService;
import java.io.IOException;
import java.io.File;
import java.io.FilenameFilter;
import java.io.UnsupportedEncodingException;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.util.LinkedHashSet;
import java.util.Locale;
import java.util.Map;
import java.util.Set;
public class Cocos2dxHelper {
// ===========================================================
// Constants
// ===========================================================
private static final String PREFS_NAME = "Cocos2dxPrefsFile";
private static final int RUNNABLES_PER_FRAME = 5;
private static final String TAG = Cocos2dxHelper.class.getSimpleName();
// ===========================================================
// Fields
// ===========================================================
private static Cocos2dxMusic sCocos2dMusic;
private static Cocos2dxSound sCocos2dSound = null;
private static AssetManager sAssetManager;
private static Cocos2dxAccelerometer sCocos2dxAccelerometer = null;
private static boolean sAccelerometerEnabled;
private static boolean sCompassEnabled;
private static boolean sActivityVisible;
private static String sPackageName;
private static Activity sActivity = null;
private static Cocos2dxHelperListener sCocos2dxHelperListener;
private static Set<OnActivityResultListener> onActivityResultListeners = new LinkedHashSet<OnActivityResultListener>();
private static Vibrator sVibrateService = null;
//Enhance API modification begin
private static IGameTuningService mGameServiceBinder = null;
private static final int BOOST_TIME = 7;
//Enhance API modification end
// The absolute path to the OBB if it exists, else the absolute path to the APK.
private static String sAssetsPath = "";
// The OBB file
private static ZipResourceFile sOBBFile = null;
// ===========================================================
// Constructors
// ===========================================================
public static void runOnGLThread(final Runnable r) {
((Cocos2dxActivity)sActivity).runOnGLThread(r);
}
private static boolean sInited = false;
public static void init(final Activity activity) {
sActivity = activity;
Cocos2dxHelper.sCocos2dxHelperListener = (Cocos2dxHelperListener)activity;
if (!sInited) {
PackageManager pm = activity.getPackageManager();
boolean isSupportLowLatency = pm.hasSystemFeature(PackageManager.FEATURE_AUDIO_LOW_LATENCY);
Log.d(TAG, "isSupportLowLatency:" + isSupportLowLatency);
int sampleRate = 44100;
int bufferSizeInFrames = 192;
if (Build.VERSION.SDK_INT >= 17) {
AudioManager am = (AudioManager) activity.getSystemService(Context.AUDIO_SERVICE);
// use reflection to remove dependence of API 17 when compiling
// AudioManager.getProperty(AudioManager.PROPERTY_OUTPUT_SAMPLE_RATE);
final Class audioManagerClass = AudioManager.class;
Object[] parameters = new Object[]{Cocos2dxReflectionHelper.<String>getConstantValue(audioManagerClass, "PROPERTY_OUTPUT_SAMPLE_RATE")};
final String strSampleRate = Cocos2dxReflectionHelper.<String>invokeInstanceMethod(am, "getProperty", new Class[]{String.class}, parameters);
// AudioManager.getProperty(AudioManager.PROPERTY_OUTPUT_FRAMES_PER_BUFFER);
parameters = new Object[]{Cocos2dxReflectionHelper.<String>getConstantValue(audioManagerClass, "PROPERTY_OUTPUT_FRAMES_PER_BUFFER")};
final String strBufferSizeInFrames = Cocos2dxReflectionHelper.<String>invokeInstanceMethod(am, "getProperty", new Class[]{String.class}, parameters);
try {
sampleRate = Integer.parseInt(strSampleRate);
bufferSizeInFrames = Integer.parseInt(strBufferSizeInFrames);
} catch (NumberFormatException e) {
Log.e(TAG, "parseInt failed", e);
}
Log.d(TAG, "sampleRate: " + sampleRate + ", framesPerBuffer: " + bufferSizeInFrames);
} else {
Log.d(TAG, "android version is lower than 17");
}
nativeSetAudioDeviceInfo(isSupportLowLatency, sampleRate, bufferSizeInFrames);
final ApplicationInfo applicationInfo = activity.getApplicationInfo();
Cocos2dxHelper.sPackageName = applicationInfo.packageName;
Cocos2dxHelper.sCocos2dMusic = new Cocos2dxMusic(activity);
Cocos2dxHelper.sAssetManager = activity.getAssets();
Cocos2dxHelper.nativeSetContext((Context)activity, Cocos2dxHelper.sAssetManager);
Cocos2dxBitmap.setContext(activity);
Cocos2dxHelper.sVibrateService = (Vibrator)activity.getSystemService(Context.VIBRATOR_SERVICE);
sInited = true;
//Enhance API modification begin
Intent serviceIntent = new Intent(IGameTuningService.class.getName());
serviceIntent.setPackage("com.enhance.gameservice");
boolean suc = activity.getApplicationContext().bindService(serviceIntent, connection, Context.BIND_AUTO_CREATE);
//Enhance API modification end
}
}
// This function returns the absolute path to the OBB if it exists,
// else it returns the absolute path to the APK.
public static String getAssetsPath()
{
if (Cocos2dxHelper.sAssetsPath.equals("")) {
String pathToOBB = Environment.getExternalStorageDirectory().getAbsolutePath() + "/Android/obb/" + Cocos2dxHelper.sPackageName;
// Listing all files inside the folder (pathToOBB) where OBB files are expected to be found.
String[] fileNames = new File(pathToOBB).list(new FilenameFilter() { // Using filter to pick up only main OBB file name.
public boolean accept(File dir, String name) {
return name.startsWith("main.") && name.endsWith(".obb"); // It's possible to filter only by extension here to get path to patch OBB file also.
}
});
String fullPathToOBB = "";
if (fileNames != null && fileNames.length > 0) // If there is at least 1 element inside the array with OBB file names, then we may think fileNames[0] will have desired main OBB file name.
fullPathToOBB = pathToOBB + "/" + fileNames[0]; // Composing full file name for main OBB file.
File obbFile = new File(fullPathToOBB);
if (obbFile.exists())
Cocos2dxHelper.sAssetsPath = fullPathToOBB;
else
Cocos2dxHelper.sAssetsPath = Cocos2dxHelper.sActivity.getApplicationInfo().sourceDir;
}
return Cocos2dxHelper.sAssetsPath;
}
public static ZipResourceFile getObbFile() {
if (null == sOBBFile) {
int versionCode = 1;
try {
versionCode = Cocos2dxActivity.getContext().getPackageManager().getPackageInfo(Cocos2dxHelper.getCocos2dxPackageName(), 0).versionCode;
} catch (NameNotFoundException e) {
e.printStackTrace();
}
try {
sOBBFile = APKExpansionSupport.getAPKExpansionZipFile(Cocos2dxActivity.getContext(), versionCode, 0);
} catch (IOException e) {
e.printStackTrace();
}
}
return sOBBFile;
}
//Enhance API modification begin
private static ServiceConnection connection = new ServiceConnection() {
public void onServiceConnected(ComponentName name, IBinder service) {
mGameServiceBinder = IGameTuningService.Stub.asInterface(service);
fastLoading(BOOST_TIME);
}
public void onServiceDisconnected(ComponentName name) {
sActivity.getApplicationContext().unbindService(connection);
}
};
//Enhance API modification end
public static Activity getActivity() {
return sActivity;
}
public static void addOnActivityResultListener(OnActivityResultListener listener) {
onActivityResultListeners.add(listener);
}
public static Set<OnActivityResultListener> getOnActivityResultListeners() {
return onActivityResultListeners;
}
public static boolean isActivityVisible(){
return sActivityVisible;
}
// ===========================================================
// Getter & Setter
// ===========================================================
// ===========================================================
// Methods for/from SuperClass/Interfaces
// ===========================================================
// ===========================================================
// Methods
// ===========================================================
private static native void nativeSetEditTextDialogResult(final byte[] pBytes);
private static native void nativeSetContext(final Context pContext, final AssetManager pAssetManager);
private static native void nativeSetAudioDeviceInfo(boolean isSupportLowLatency, int deviceSampleRate, int audioBufferSizeInFames);
public static String getCocos2dxPackageName() {
return Cocos2dxHelper.sPackageName;
}
public static String getCocos2dxWritablePath() {
return sActivity.getFilesDir().getAbsolutePath();
}
public static String getCurrentLanguage() {
return Locale.getDefault().getLanguage();
}
public static String getDeviceModel(){
return Build.MODEL;
}
public static AssetManager getAssetManager() {
return Cocos2dxHelper.sAssetManager;
}
public static void enableAccelerometer() {
Cocos2dxHelper.sAccelerometerEnabled = true;
Cocos2dxHelper.getAccelerometer().enableAccel();
}
public static void enableCompass() {
Cocos2dxHelper.sCompassEnabled = true;
Cocos2dxHelper.getAccelerometer().enableCompass();
}
public static void setAccelerometerInterval(float interval) {
Cocos2dxHelper.getAccelerometer().setInterval(interval);
}
public static void disableAccelerometer() {
Cocos2dxHelper.sAccelerometerEnabled = false;
Cocos2dxHelper.getAccelerometer().disable();
}
public static void setKeepScreenOn(boolean value) {
((Cocos2dxActivity)sActivity).setKeepScreenOn(value);
}
public static void vibrate(float duration) {
sVibrateService.vibrate((long)(duration * 1000));
}
public static String getVersion() {
try {
String version = Cocos2dxActivity.getContext().getPackageManager().getPackageInfo(Cocos2dxActivity.getContext().getPackageName(), 0).versionName;
return version;
} catch(Exception e) {
return "";
}
}
public static boolean openURL(String url) {
boolean ret = false;
try {
Intent i = new Intent(Intent.ACTION_VIEW);
i.setData(Uri.parse(url));
sActivity.startActivity(i);
ret = true;
} catch (Exception e) {
}
return ret;
}
public static long[] getObbAssetFileDescriptor(final String path) {
long[] array = new long[3];
if (Cocos2dxHelper.getObbFile() != null) {
AssetFileDescriptor descriptor = Cocos2dxHelper.getObbFile().getAssetFileDescriptor(path);
if (descriptor != null) {
try {
ParcelFileDescriptor parcel = descriptor.getParcelFileDescriptor();
Method method = parcel.getClass().getMethod("getFd", new Class[] {});
array[0] = (Integer)method.invoke(parcel);
array[1] = descriptor.getStartOffset();
array[2] = descriptor.getLength();
} catch (NoSuchMethodException e) {
Log.e(Cocos2dxHelper.TAG, "Accessing file descriptor directly from the OBB is only supported from Android 3.1 (API level 12) and above.");
} catch (IllegalAccessException e) {
Log.e(Cocos2dxHelper.TAG, e.toString());
} catch (InvocationTargetException e) {
Log.e(Cocos2dxHelper.TAG, e.toString());
}
}
}
return array;
}
public static void preloadBackgroundMusic(final String pPath) {
Cocos2dxHelper.sCocos2dMusic.preloadBackgroundMusic(pPath);
}
public static void playBackgroundMusic(final String pPath, final boolean isLoop) {
Cocos2dxHelper.sCocos2dMusic.playBackgroundMusic(pPath, isLoop);
}
public static void resumeBackgroundMusic() {
Cocos2dxHelper.sCocos2dMusic.resumeBackgroundMusic();
}
public static void pauseBackgroundMusic() {
Cocos2dxHelper.sCocos2dMusic.pauseBackgroundMusic();
}
public static void stopBackgroundMusic() {
Cocos2dxHelper.sCocos2dMusic.stopBackgroundMusic();
}
public static void rewindBackgroundMusic() {
Cocos2dxHelper.sCocos2dMusic.rewindBackgroundMusic();
}
public static boolean willPlayBackgroundMusic() {
return Cocos2dxHelper.sCocos2dMusic.willPlayBackgroundMusic();
}
public static boolean isBackgroundMusicPlaying() {
return Cocos2dxHelper.sCocos2dMusic.isBackgroundMusicPlaying();
}
public static float getBackgroundMusicVolume() {
return Cocos2dxHelper.sCocos2dMusic.getBackgroundVolume();
}
public static void setBackgroundMusicVolume(final float volume) {
Cocos2dxHelper.sCocos2dMusic.setBackgroundVolume(volume);
}
public static void preloadEffect(final String path) {
Cocos2dxHelper.getSound().preloadEffect(path);
}
public static int playEffect(final String path, final boolean isLoop, final float pitch, final float pan, final float gain) {
return Cocos2dxHelper.getSound().playEffect(path, isLoop, pitch, pan, gain);
}
public static void resumeEffect(final int soundId) {
Cocos2dxHelper.getSound().resumeEffect(soundId);
}
public static void pauseEffect(final int soundId) {
Cocos2dxHelper.getSound().pauseEffect(soundId);
}
public static void stopEffect(final int soundId) {
Cocos2dxHelper.getSound().stopEffect(soundId);
}
public static float getEffectsVolume() {
return Cocos2dxHelper.getSound().getEffectsVolume();
}
public static void setEffectsVolume(final float volume) {
Cocos2dxHelper.getSound().setEffectsVolume(volume);
}
public static void unloadEffect(final String path) {
Cocos2dxHelper.getSound().unloadEffect(path);
}
public static void pauseAllEffects() {
Cocos2dxHelper.getSound().pauseAllEffects();
}
public static void resumeAllEffects() {
Cocos2dxHelper.getSound().resumeAllEffects();
}
public static void stopAllEffects() {
Cocos2dxHelper.getSound().stopAllEffects();
}
static void setAudioFocus(boolean isAudioFocus) {
sCocos2dMusic.setAudioFocus(isAudioFocus);
getSound().setAudioFocus(isAudioFocus);
}
public static void end() {
Cocos2dxHelper.sCocos2dMusic.end();
Cocos2dxHelper.getSound().end();
}
public static void onResume() {
sActivityVisible = true;
if (Cocos2dxHelper.sAccelerometerEnabled) {
Cocos2dxHelper.getAccelerometer().enableAccel();
}
if (Cocos2dxHelper.sCompassEnabled) {
Cocos2dxHelper.getAccelerometer().enableCompass();
}
}
public static void onPause() {
sActivityVisible = false;
if (Cocos2dxHelper.sAccelerometerEnabled) {
Cocos2dxHelper.getAccelerometer().disable();
}
}
public static void onEnterBackground() {
getSound().onEnterBackground();
sCocos2dMusic.onEnterBackground();
}
public static void onEnterForeground() {
getSound().onEnterForeground();
sCocos2dMusic.onEnterForeground();
}
public static void terminateProcess() {
android.os.Process.killProcess(android.os.Process.myPid());
}
private static void showDialog(final String pTitle, final String pMessage) {
Cocos2dxHelper.sCocos2dxHelperListener.showDialog(pTitle, pMessage);
}
public static void setEditTextDialogResult(final String pResult) {
try {
final byte[] bytesUTF8 = pResult.getBytes("UTF8");
Cocos2dxHelper.sCocos2dxHelperListener.runOnGLThread(new Runnable() {
@Override
public void run() {
Cocos2dxHelper.nativeSetEditTextDialogResult(bytesUTF8);
}
});
} catch (UnsupportedEncodingException pUnsupportedEncodingException) {
/* Nothing. */
}
}
public static int getDPI()
{
if (sActivity != null)
{
DisplayMetrics metrics = new DisplayMetrics();
WindowManager wm = sActivity.getWindowManager();
if (wm != null)
{
Display d = wm.getDefaultDisplay();
if (d != null)
{
d.getMetrics(metrics);
return (int)(metrics.density*160.0f);
}
}
}
return -1;
}
// ===========================================================
// Functions for CCUserDefault
// ===========================================================
public static boolean getBoolForKey(String key, boolean defaultValue) {
SharedPreferences settings = sActivity.getSharedPreferences(Cocos2dxHelper.PREFS_NAME, 0);
try {
return settings.getBoolean(key, defaultValue);
}
catch (Exception ex) {
ex.printStackTrace();
Map allValues = settings.getAll();
Object value = allValues.get(key);
if ( value instanceof String)
{
return Boolean.parseBoolean(value.toString());
}
else if (value instanceof Integer)
{
int intValue = ((Integer) value).intValue();
return (intValue != 0) ;
}
else if (value instanceof Float)
{
float floatValue = ((Float) value).floatValue();
return (floatValue != 0.0f);
}
}
return defaultValue;
}
public static int getIntegerForKey(String key, int defaultValue) {
SharedPreferences settings = sActivity.getSharedPreferences(Cocos2dxHelper.PREFS_NAME, 0);
try {
return settings.getInt(key, defaultValue);
}
catch (Exception ex) {
ex.printStackTrace();
Map allValues = settings.getAll();
Object value = allValues.get(key);
if ( value instanceof String) {
return Integer.parseInt(value.toString());
}
else if (value instanceof Float)
{
return ((Float) value).intValue();
}
else if (value instanceof Boolean)
{
boolean booleanValue = ((Boolean) value).booleanValue();
if (booleanValue)
return 1;
}
}
return defaultValue;
}
public static float getFloatForKey(String key, float defaultValue) {
SharedPreferences settings = sActivity.getSharedPreferences(Cocos2dxHelper.PREFS_NAME, 0);
try {
return settings.getFloat(key, defaultValue);
}
catch (Exception ex) {
ex.printStackTrace();
Map allValues = settings.getAll();
Object value = allValues.get(key);
if ( value instanceof String) {
return Float.parseFloat(value.toString());
}
else if (value instanceof Integer)
{
return ((Integer) value).floatValue();
}
else if (value instanceof Boolean)
{
boolean booleanValue = ((Boolean) value).booleanValue();
if (booleanValue)
return 1.0f;
}
}
return defaultValue;
}
public static double getDoubleForKey(String key, double defaultValue) {
// SharedPreferences doesn't support saving double value
return getFloatForKey(key, (float) defaultValue);
}
public static String getStringForKey(String key, String defaultValue) {
SharedPreferences settings = sActivity.getSharedPreferences(Cocos2dxHelper.PREFS_NAME, 0);
try {
return settings.getString(key, defaultValue);
}
catch (Exception ex) {
ex.printStackTrace();
return settings.getAll().get(key).toString();
}
}
public static void setBoolForKey(String key, boolean value) {
SharedPreferences settings = sActivity.getSharedPreferences(Cocos2dxHelper.PREFS_NAME, 0);
SharedPreferences.Editor editor = settings.edit();
editor.putBoolean(key, value);
editor.apply();
}
public static void setIntegerForKey(String key, int value) {
SharedPreferences settings = sActivity.getSharedPreferences(Cocos2dxHelper.PREFS_NAME, 0);
SharedPreferences.Editor editor = settings.edit();
editor.putInt(key, value);
editor.apply();
}
public static void setFloatForKey(String key, float value) {
SharedPreferences settings = sActivity.getSharedPreferences(Cocos2dxHelper.PREFS_NAME, 0);
SharedPreferences.Editor editor = settings.edit();
editor.putFloat(key, value);
editor.apply();
}
public static void setDoubleForKey(String key, double value) {
// SharedPreferences doesn't support recording double value
SharedPreferences settings = sActivity.getSharedPreferences(Cocos2dxHelper.PREFS_NAME, 0);
SharedPreferences.Editor editor = settings.edit();
editor.putFloat(key, (float)value);
editor.apply();
}
public static void setStringForKey(String key, String value) {
SharedPreferences settings = sActivity.getSharedPreferences(Cocos2dxHelper.PREFS_NAME, 0);
SharedPreferences.Editor editor = settings.edit();
editor.putString(key, value);
editor.apply();
}
public static void deleteValueForKey(String key) {
SharedPreferences settings = sActivity.getSharedPreferences(Cocos2dxHelper.PREFS_NAME, 0);
SharedPreferences.Editor editor = settings.edit();
editor.remove(key);
editor.apply();
}
public static byte[] conversionEncoding(byte[] text, String fromCharset,String newCharset)
{
try {
String str = new String(text,fromCharset);
return str.getBytes(newCharset);
} catch (UnsupportedEncodingException e) {
e.printStackTrace();
}
return null;
}
// ===========================================================
// Inner and Anonymous Classes
// ===========================================================
public static interface Cocos2dxHelperListener {
public void showDialog(final String pTitle, final String pMessage);
public void runOnGLThread(final Runnable pRunnable);
}
//Enhance API modification begin
public static int setResolutionPercent(int per) {
try {
if (mGameServiceBinder != null) {
return mGameServiceBinder.setPreferredResolution(per);
}
return -1;
} catch (Exception e) {
e.printStackTrace();
return -1;
}
}
public static int setFPS(int fps) {
try {
if (mGameServiceBinder != null) {
return mGameServiceBinder.setFramePerSecond(fps);
}
return -1;
} catch (Exception e) {
e.printStackTrace();
return -1;
}
}
public static int fastLoading(int sec) {
try {
if (mGameServiceBinder != null) {
return mGameServiceBinder.boostUp(sec);
}
return -1;
} catch (Exception e) {
e.printStackTrace();
return -1;
}
}
public static int getTemperature() {
try {
if (mGameServiceBinder != null) {
return mGameServiceBinder.getAbstractTemperature();
}
return -1;
} catch (Exception e) {
e.printStackTrace();
return -1;
}
}
public static int setLowPowerMode(boolean enable) {
try {
if (mGameServiceBinder != null) {
return mGameServiceBinder.setGamePowerSaving(enable);
}
return -1;
} catch (Exception e) {
e.printStackTrace();
return -1;
}
}
//Enhance API modification end
public static float[] getAccelValue() {
return Cocos2dxHelper.getAccelerometer().accelerometerValues;
}
public static float[] getCompassValue() {
return Cocos2dxHelper.getAccelerometer().compassFieldValues;
}
public static int getSDKVersion() {
return Build.VERSION.SDK_INT;
}
private static Cocos2dxAccelerometer getAccelerometer() {
if (null == sCocos2dxAccelerometer)
Cocos2dxHelper.sCocos2dxAccelerometer = new Cocos2dxAccelerometer(sActivity);
return sCocos2dxAccelerometer;
}
private static Cocos2dxSound getSound() {
if (null == sCocos2dSound)
sCocos2dSound = new Cocos2dxSound(sActivity);
return sCocos2dSound;
}
}
| |
/*
* Copyright (c) 2008-2018, Hazelcast, Inc. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.hazelcast.cache.impl;
import com.hazelcast.cache.CacheEventType;
import com.hazelcast.cache.ICache;
import com.hazelcast.core.ManagedContext;
import com.hazelcast.nio.ObjectDataInput;
import com.hazelcast.nio.ObjectDataOutput;
import com.hazelcast.nio.serialization.Data;
import com.hazelcast.nio.serialization.IdentifiedDataSerializable;
import com.hazelcast.spi.EventRegistration;
import com.hazelcast.spi.ListenerWrapperEventFilter;
import com.hazelcast.spi.NotifiableEventListener;
import com.hazelcast.spi.serialization.SerializationService;
import javax.cache.configuration.CacheEntryListenerConfiguration;
import javax.cache.configuration.Factory;
import javax.cache.event.CacheEntryCreatedListener;
import javax.cache.event.CacheEntryEvent;
import javax.cache.event.CacheEntryEventFilter;
import javax.cache.event.CacheEntryExpiredListener;
import javax.cache.event.CacheEntryListener;
import javax.cache.event.CacheEntryRemovedListener;
import javax.cache.event.CacheEntryUpdatedListener;
import javax.cache.event.EventType;
import java.io.IOException;
import java.util.Collection;
import java.util.HashSet;
/**
* This implementation of {@link CacheEventListener} uses the adapter pattern for wrapping all cache event listener
* types into a single listener.
* <p>JCache has multiple {@link CacheEntryListener} sub-interfaces for each event type. This adapter
* implementation delegates to the correct subtype using the event type.</p>
* <p/>
* <p>Another responsibility of this implementation is filtering events by using the already configured
* event filters.</p>
*
* @param <K> the type of key.
* @param <V> the type of value.
* @see javax.cache.event.CacheEntryCreatedListener
* @see javax.cache.event.CacheEntryUpdatedListener
* @see javax.cache.event.CacheEntryRemovedListener
* @see javax.cache.event.CacheEntryExpiredListener
* @see javax.cache.event.CacheEntryEventFilter
*/
public class CacheEventListenerAdaptor<K, V>
implements CacheEventListener,
CacheEntryListenerProvider<K, V>,
NotifiableEventListener<CacheService>,
ListenerWrapperEventFilter,
IdentifiedDataSerializable {
// all fields are effectively final
private transient CacheEntryListener<K, V> cacheEntryListener;
private transient CacheEntryCreatedListener cacheEntryCreatedListener;
private transient CacheEntryRemovedListener cacheEntryRemovedListener;
private transient CacheEntryUpdatedListener cacheEntryUpdatedListener;
private transient CacheEntryExpiredListener cacheEntryExpiredListener;
private transient CacheEntryEventFilter<? super K, ? super V> filter;
private boolean isOldValueRequired;
private transient SerializationService serializationService;
private transient ICache<K, V> source;
public CacheEventListenerAdaptor() {
}
public CacheEventListenerAdaptor(ICache<K, V> source,
CacheEntryListenerConfiguration<K, V> cacheEntryListenerConfiguration,
SerializationService serializationService) {
this.source = source;
this.serializationService = serializationService;
this.cacheEntryListener = createCacheEntryListener(cacheEntryListenerConfiguration);
if (cacheEntryListener instanceof CacheEntryCreatedListener) {
this.cacheEntryCreatedListener = (CacheEntryCreatedListener) cacheEntryListener;
} else {
this.cacheEntryCreatedListener = null;
}
if (cacheEntryListener instanceof CacheEntryRemovedListener) {
this.cacheEntryRemovedListener = (CacheEntryRemovedListener) cacheEntryListener;
} else {
this.cacheEntryRemovedListener = null;
}
if (cacheEntryListener instanceof CacheEntryUpdatedListener) {
this.cacheEntryUpdatedListener = (CacheEntryUpdatedListener) cacheEntryListener;
} else {
this.cacheEntryUpdatedListener = null;
}
if (cacheEntryListener instanceof CacheEntryExpiredListener) {
this.cacheEntryExpiredListener = (CacheEntryExpiredListener) cacheEntryListener;
} else {
this.cacheEntryExpiredListener = null;
}
injectDependencies(cacheEntryListener);
Factory<CacheEntryEventFilter<? super K, ? super V>> filterFactory =
cacheEntryListenerConfiguration.getCacheEntryEventFilterFactory();
if (filterFactory != null) {
this.filter = filterFactory.create();
} else {
this.filter = null;
}
injectDependencies(filter);
this.isOldValueRequired = cacheEntryListenerConfiguration.isOldValueRequired();
}
private CacheEntryListener<K, V> createCacheEntryListener(
CacheEntryListenerConfiguration<K, V> cacheEntryListenerConfiguration) {
Factory<CacheEntryListener<? super K, ? super V>> cacheEntryListenerFactory =
cacheEntryListenerConfiguration.getCacheEntryListenerFactory();
injectDependencies(cacheEntryListenerFactory);
return (CacheEntryListener<K, V>) cacheEntryListenerFactory.create();
}
private void injectDependencies(Object obj) {
ManagedContext managedContext = serializationService.getManagedContext();
managedContext.initialize(obj);
}
@Override
public CacheEntryListener<K, V> getCacheEntryListener() {
return cacheEntryListener;
}
@Override
public void handleEvent(Object eventObject) {
if (eventObject instanceof CacheEventSet) {
CacheEventSet cacheEventSet = (CacheEventSet) eventObject;
try {
if (cacheEventSet.getEventType() != CacheEventType.COMPLETED) {
handleEvent(cacheEventSet.getEventType().getType(), cacheEventSet.getEvents());
}
} finally {
((CacheSyncListenerCompleter) source).countDownCompletionLatch(cacheEventSet.getCompletionId());
}
}
}
private void handleEvent(int type, Collection<CacheEventData> keys) {
final Iterable<CacheEntryEvent<? extends K, ? extends V>> cacheEntryEvent = createCacheEntryEvent(keys);
CacheEventType eventType = CacheEventType.getByType(type);
switch (eventType) {
case CREATED:
if (this.cacheEntryCreatedListener != null) {
this.cacheEntryCreatedListener.onCreated(cacheEntryEvent);
}
break;
case UPDATED:
if (this.cacheEntryUpdatedListener != null) {
this.cacheEntryUpdatedListener.onUpdated(cacheEntryEvent);
}
break;
case REMOVED:
if (this.cacheEntryRemovedListener != null) {
this.cacheEntryRemovedListener.onRemoved(cacheEntryEvent);
}
break;
case EXPIRED:
if (this.cacheEntryExpiredListener != null) {
this.cacheEntryExpiredListener.onExpired(cacheEntryEvent);
}
break;
default:
throw new IllegalArgumentException("Invalid event type: " + eventType.name());
}
}
private Iterable<CacheEntryEvent<? extends K, ? extends V>> createCacheEntryEvent(Collection<CacheEventData> keys) {
HashSet<CacheEntryEvent<? extends K, ? extends V>> evt = new HashSet<CacheEntryEvent<? extends K, ? extends V>>();
for (CacheEventData cacheEventData : keys) {
EventType eventType = CacheEventType.convertToEventType(cacheEventData.getCacheEventType());
K key = toObject(cacheEventData.getDataKey());
boolean hasNewValue = !(eventType == EventType.REMOVED || eventType == EventType.EXPIRED);
final V newValue;
final V oldValue;
if (isOldValueRequired) {
if (hasNewValue) {
newValue = toObject(cacheEventData.getDataValue());
oldValue = toObject(cacheEventData.getDataOldValue());
} else {
// according to contract of CacheEntryEvent#getValue
oldValue = toObject(cacheEventData.getDataValue());
newValue = oldValue;
}
} else {
if (hasNewValue) {
newValue = toObject(cacheEventData.getDataValue());
oldValue = null;
} else {
newValue = null;
oldValue = null;
}
}
final CacheEntryEventImpl<K, V> event =
new CacheEntryEventImpl<K, V>(source, eventType, key, newValue, oldValue);
if (filter == null || filter.evaluate(event)) {
evt.add(event);
}
}
return evt;
}
private <T> T toObject(Data data) {
return serializationService.toObject(data);
}
public void handle(int type, Collection<CacheEventData> keys, int completionId) {
try {
if (CacheEventType.getByType(type) != CacheEventType.COMPLETED) {
handleEvent(type, keys);
}
} finally {
((CacheSyncListenerCompleter) source).countDownCompletionLatch(completionId);
}
}
@Override
public void onRegister(CacheService cacheService, String serviceName,
String topic, EventRegistration registration) {
CacheContext cacheContext = cacheService.getOrCreateCacheContext(topic);
cacheContext.increaseCacheEntryListenerCount();
}
@Override
public void onDeregister(CacheService cacheService, String serviceName,
String topic, EventRegistration registration) {
CacheContext cacheContext = cacheService.getOrCreateCacheContext(topic);
cacheContext.decreaseCacheEntryListenerCount();
}
@Override
public boolean eval(Object event) {
return true;
}
@Override
public Object getListener() {
return this;
}
@Override
public int getFactoryId() {
return CacheDataSerializerHook.F_ID;
}
@Override
public int getId() {
return CacheDataSerializerHook.CACHE_EVENT_LISTENER_ADAPTOR;
}
@Override
public void writeData(ObjectDataOutput out)
throws IOException {
}
@Override
public void readData(ObjectDataInput in)
throws IOException {
}
}
| |
package net.nitrado.api.common.http;
import com.google.gson.JsonObject;
import com.google.gson.JsonParser;
import com.google.gson.JsonSyntaxException;
import net.nitrado.api.common.exceptions.*;
import java.io.*;
import java.net.HttpURLConnection;
import java.net.URL;
import java.net.URLEncoder;
/**
* HttpClient that actually connects to the internet and gets the data.
*/
public class ProductionHttpClient implements HttpClient {
private int rateLimit;
private int rateLimitRemaining;
private long rateLimitReset;
private String locale = "en";
private Parameter[] additionalHeaders;
public ProductionHttpClient() {
}
public ProductionHttpClient(Parameter[] additionalHeaders) {
this.additionalHeaders = additionalHeaders;
}
public JsonObject dataGet(String url, String accessToken, Parameter[] parameters) throws NitrapiException {
// create the full url string with parameters
boolean first = true;
StringBuilder fullUrl = new StringBuilder();
fullUrl.append(url);
if (parameters != null) {
for (Parameter parameter : parameters) {
if (parameter.getValue() != null) {
fullUrl.append(first ? "?" : "&");
fullUrl.append(parameter.getKey());
fullUrl.append("=");
try {
fullUrl.append(URLEncoder.encode(parameter.getValue(), "UTF-8"));
} catch (UnsupportedEncodingException e) {
// everyone should support utf-8 so this should not happen
e.printStackTrace();
}
first = false;
}
}
}
fullUrl.append(first ? "?" : "&");
fullUrl.append("locale");
fullUrl.append("=");
fullUrl.append(locale);
try {
HttpURLConnection connection = (HttpURLConnection) new URL(fullUrl.toString()).openConnection();
connection.setRequestMethod("GET");
connection.setRequestProperty("Authorization", "Bearer " + accessToken);
if (additionalHeaders != null) {
for (Parameter parameter : additionalHeaders) {
connection.setRequestProperty(parameter.getKey(), parameter.getValue());
}
}
BufferedReader reader;
if (connection.getResponseCode() == 200) {
reader = new BufferedReader(new InputStreamReader(connection.getInputStream()));
} else {
reader = new BufferedReader(new InputStreamReader(connection.getErrorStream()));
}
StringBuffer response = new StringBuffer();
String line;
while ((line = reader.readLine()) != null) {
response.append(line);
}
reader.close();
return parseResult(response, connection);
} catch (IOException e) {
throw new NitrapiHttpException(e);
}
}
public JsonObject dataPost(String url, String accessToken, Parameter[] parameters) throws NitrapiException {
String params = prepareParameterString(parameters);
url += "?locale=" + locale;
try {
HttpURLConnection connection = (HttpURLConnection) new URL(url).openConnection();
connection.setDoOutput(true);
connection.setRequestMethod("POST");
connection.setRequestProperty("Authorization", "Bearer " + accessToken);
if (additionalHeaders != null) {
for (Parameter parameter : additionalHeaders) {
connection.setRequestProperty(parameter.getKey(), parameter.getValue());
}
}
// write post parameters
BufferedWriter writer = new BufferedWriter(new OutputStreamWriter(connection.getOutputStream(), "UTF-8"));
writer.write(params);
writer.flush();
writer.close();
BufferedReader reader;
if (connection.getResponseCode() == 200 || connection.getResponseCode() == 201) {
reader = new BufferedReader(new InputStreamReader(connection.getInputStream()));
} else {
reader = new BufferedReader(new InputStreamReader(connection.getErrorStream()));
}
StringBuffer response = new StringBuffer();
String line;
while ((line = reader.readLine()) != null) {
response.append(line);
}
reader.close();
return parseResult(response, connection);
} catch (IOException e) {
throw new NitrapiHttpException(e);
}
}
public JsonObject dataPut(String url, String accessToken, Parameter[] parameters) throws NitrapiException {
String params = prepareParameterString(parameters);
url += "?locale=" + locale;
try {
HttpURLConnection connection = (HttpURLConnection) new URL(url).openConnection();
connection.setDoOutput(true);
connection.setRequestMethod("PUT");
connection.setRequestProperty("Authorization", "Bearer " + accessToken);
if (additionalHeaders != null) {
for (Parameter parameter : additionalHeaders) {
connection.setRequestProperty(parameter.getKey(), parameter.getValue());
}
}
// write post parameters
BufferedWriter writer = new BufferedWriter(new OutputStreamWriter(connection.getOutputStream(), "UTF-8"));
writer.write(params);
writer.flush();
writer.close();
BufferedReader reader;
if (connection.getResponseCode() == 200 || connection.getResponseCode() == 201) {
reader = new BufferedReader(new InputStreamReader(connection.getInputStream()));
} else {
reader = new BufferedReader(new InputStreamReader(connection.getErrorStream()));
}
StringBuffer response = new StringBuffer();
String line;
while ((line = reader.readLine()) != null) {
response.append(line);
}
reader.close();
return parseResult(response, connection);
} catch (IOException e) {
throw new NitrapiHttpException(e);
}
}
public JsonObject dataDelete(String url, String accessToken, Parameter[] parameters) throws NitrapiException {
String params = prepareParameterString(parameters);
url += "?locale=" + locale;
try {
HttpURLConnection connection = (HttpURLConnection) new URL(url).openConnection();
connection.setDoOutput(true);
connection.setRequestMethod("DELETE");
connection.setRequestProperty("Authorization", "Bearer " + accessToken);
if (additionalHeaders != null) {
for (Parameter parameter : additionalHeaders) {
connection.setRequestProperty(parameter.getKey(), parameter.getValue());
}
}
// write post parameters
BufferedWriter writer = new BufferedWriter(new OutputStreamWriter(connection.getOutputStream(), "UTF-8"));
writer.write(params);
writer.flush();
writer.close();
BufferedReader reader;
if (connection.getResponseCode() == 200 || connection.getResponseCode() == 201) {
reader = new BufferedReader(new InputStreamReader(connection.getInputStream()));
} else {
reader = new BufferedReader(new InputStreamReader(connection.getErrorStream()));
}
StringBuffer response = new StringBuffer();
String line;
while ((line = reader.readLine()) != null) {
response.append(line);
}
reader.close();
return parseResult(response, connection);
} catch (IOException e) {
throw new NitrapiHttpException(e);
}
}
public InputStream rawGet(String url) throws NitrapiException {
try {
HttpURLConnection connection = (HttpURLConnection) new URL(url).openConnection();
connection.setRequestMethod("GET");
return connection.getInputStream();
} catch (IOException e) {
throw new NitrapiHttpException(e);
}
}
public void rawPost(String url, String token, byte[] body) throws NitrapiException {
try {
HttpURLConnection connection = (HttpURLConnection) new URL(url).openConnection();
connection.setDoOutput(true);
connection.setRequestMethod("POST");
connection.setRequestProperty("Token", token);
connection.setRequestProperty("Content-Type", "application/binary");
// write post parameters
connection.getOutputStream().write(body);
connection.getOutputStream().close();
BufferedReader reader;
if (connection.getResponseCode() == 200 || connection.getResponseCode() == 201) {
reader = new BufferedReader(new InputStreamReader(connection.getInputStream()));
} else {
reader = new BufferedReader(new InputStreamReader(connection.getErrorStream()));
}
StringBuffer response = new StringBuffer();
String line;
while ((line = reader.readLine()) != null) {
response.append(line);
}
reader.close();
parseResult(response, connection);
} catch (IOException e) {
throw new NitrapiHttpException(e);
}
}
private String prepareParameterString(Parameter[] parameters) {
// create POST parameter string
StringBuilder params = new StringBuilder();
if (parameters != null) {
for (Parameter parameter : parameters) {
addParameter(params, parameter);
}
}
return params.toString();
}
private void addParameter(StringBuilder params, Parameter parameter) {
if (parameter.getKey() == null) {
// Add subParameters
for (Parameter subParameter: parameter.getSubParameters()) {
addParameter(params, subParameter);
}
}
if (parameter.getValue() != null) {
params.append("&");
params.append(parameter.getKey());
params.append("=");
try {
params.append(URLEncoder.encode(parameter.getValue(), "UTF-8"));
} catch (UnsupportedEncodingException e) {
// everyone should support utf-8 so this should not happen
e.printStackTrace();
}
}
}
protected JsonObject parseResult(StringBuffer response, HttpURLConnection connection) throws IOException, NitrapiException {
if (connection.getHeaderField("X-Rate-Limit") != null) {
rateLimit = Integer.parseInt(connection.getHeaderField("X-RateLimit-Limit"));
rateLimitRemaining = Integer.parseInt(connection.getHeaderField("X-RateLimit-Remaining"));
rateLimitReset = Long.parseLong(connection.getHeaderField("X-RateLimit-Reset"));
}
String errorId = null;
if (connection.getHeaderField("X-Raven-Event-ID") != null) {
errorId = connection.getHeaderField("X-Raven-Event-ID");
}
if (response.length() == 0) {
if (connection.getResponseCode() < 300) { // OK
return null;
}
throw new NitrapiHttpException(new NitrapiErrorException("Empty result. (HTTP " + connection.getResponseCode() + ")", errorId));
}
JsonParser parser = new JsonParser();
JsonObject result;
try {
result = (JsonObject) parser.parse(response.toString());
} catch (JsonSyntaxException e) {
// invalid json
result = new JsonObject();
result.addProperty("message", "Invalid json: " + response.toString());
}
if (connection.getResponseCode() < 300) { // OK
// return the interesting subobject
if (result.get("data") != null) {
return result.get("data").getAsJsonObject();
}
return result;
}
// Throw appropriate exception
String message = null;
if (result.has("message")) {
message = result.get("message").getAsString();
}
switch (connection.getResponseCode()) {
case 401:
if (result.has("data")) {
JsonObject data = result.get("data").getAsJsonObject();
if (data.has("error_code")) {
if (data.get("error_code").getAsString().startsWith("access_token_")) {
throw new NitrapiAccessTokenInvalidException(message);
}
}
}
throw new NitrapiErrorException(message, errorId);
case 428:
throw new NitrapiConcurrencyException(message);
case 503:
throw new NitrapiMaintenanceException(message);
default:
throw new NitrapiErrorException(message, errorId);
}
}
public int getRateLimit() {
return rateLimit;
}
public int getRateLimitRemaining() {
return rateLimitRemaining;
}
public long getRateLimitReset() {
return rateLimitReset;
}
public void setLanguage(String lang) {
locale = lang;
}
public String getLanguage() {
return locale;
}
}
| |
//
// This file was generated by the JavaTM Architecture for XML Binding(JAXB)
// Reference Implementation, v2.2.4
// See <a href="http://java.sun.com/xml/jaxb">http://java.sun.com/xml/jaxb</a>
// Any modifications to this file will be lost upon recompilation of the source
// schema.
// Generated on: 2011.06.16 at 09:21:15 AM EDT
//
package x3d.model;
import java.util.ArrayList;
import java.util.List;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlAttribute;
import javax.xml.bind.annotation.XmlElement;
import javax.xml.bind.annotation.XmlElements;
import javax.xml.bind.annotation.XmlID;
import javax.xml.bind.annotation.XmlIDREF;
import javax.xml.bind.annotation.XmlSchemaType;
import javax.xml.bind.annotation.XmlSeeAlso;
import javax.xml.bind.annotation.XmlType;
import javax.xml.bind.annotation.adapters.CollapsedStringAdapter;
import javax.xml.bind.annotation.adapters.XmlJavaTypeAdapter;
/**
* <p>
* Java class for X3DNode complex type.
*
* <p>
* The following schema fragment specifies the expected content contained within
* this class.
*
* <pre>
* <complexType name="X3DNode">
* <complexContent>
* <restriction base="{http://www.w3.org/2001/XMLSchema}anyType">
* <sequence>
* <element ref="{}IS" minOccurs="0"/>
* <group ref="{}ChildContentModelCore" minOccurs="0"/>
* </sequence>
* <attGroup ref="{}globalAttributes"/>
* <attGroup ref="{}DEF_USE"/>
* </restriction>
* </complexContent>
* </complexType>
* </pre>
*
*
*/
@XmlAccessorType(XmlAccessType.FIELD)
@XmlType(name = "X3DNode", propOrder = { "is", "metadataObject", "def", "use" })
@XmlSeeAlso({
// X3DFogObject.class,
// X3DPrototypeInstanceNode.class,
// WildcardNodeType.class,
// X3DParticlePhysicsModelNode.class,
// LayoutGroup.class,
// X3DLayerNode.class,
// LayerSet.class,
// DISEntityTypeMapping.class,
// X3DRigidJointNode.class,
// RigidBody.class,
// X3DNBodyCollisionSpaceNode.class,
// ViewpointGroup.class,
// ShaderPart.class,
// Contour2D.class,
X3DFontStyleNode.class,
// GeoOrigin.class,
X3DAppearanceNode.class, X3DAppearanceChildNode.class,
// X3DParticleEmitterNode.class,
X3DChildNode.class,
// X3DGeometricPropertyNode.class,
X3DGeometryNode.class,
X3DMetadataObject.class })
public abstract class X3DNode {
@XmlElement(name = "IS")
private IS is;
@XmlElements({
@XmlElement(name = "MetadataDouble", type = MetadataDouble.class),
@XmlElement(name = "MetadataFloat", type = MetadataFloat.class),
@XmlElement(name = "MetadataInteger", type = MetadataInteger.class),
@XmlElement(name = "MetadataSet", type = MetadataSet.class),
@XmlElement(name = "MetadataString", type = MetadataString.class) })
private X3DMetadataObject metadataObject;
@XmlAttribute(name = "class")
@XmlSchemaType(name = "NMTOKENS")
private List<String> clazz;
@XmlAttribute(name = "DEF")
@XmlJavaTypeAdapter(CollapsedStringAdapter.class)
@XmlID
@XmlSchemaType(name = "ID")
private String def;
@XmlAttribute(name = "USE")
@XmlIDREF
@XmlSchemaType(name = "IDREF")
private X3DNode use;
/**
* Gets the value of the is property.
*
* @return possible object is {@link IS }
*
*/
public IS getIS() {
return is;
}
/**
* Sets the value of the is property.
*
* @param value
* allowed object is {@link IS }
*
*/
public void setIS(IS value) {
this.is = value;
}
/**
* Gets the value of the metadataObject property.
*
* @return possible object is {@link MetadataDouble }
*
*/
public X3DMetadataObject getMetadataObject() {
return metadataObject;
}
/**
* Sets the value of the metadataObject property.
*
* @param value
* allowed object is {@link X3DMetadataObject }
*
*/
public void setMetadataObject(X3DMetadataObject value) {
this.metadataObject = value;
}
/**
* Gets the value of the clazz property.
*
* <p>
* This accessor method returns a reference to the live list, not a
* snapshot. Therefore any modification you make to the returned list will
* be present inside the JAXB object. This is why there is not a
* <CODE>set</CODE> method for the clazz property.
*
* <p>
* For example, to add a new item, do as follows:
*
* <pre>
* getClazz().add(newItem);
* </pre>
*
*
* <p>
* Objects of the following type(scale) are allowed in the list {@link String }
*
*
*/
public List<String> getClazz() {
if (clazz == null) {
clazz = new ArrayList<>();
}
return this.clazz;
}
/**
* Gets the value of the def property.
*
* @return possible object is {@link String }
*
*/
public String getDEF() {
return def;
}
/**
* Sets the value of the def property.
*
* @param value
* allowed object is {@link String }
*
*/
public void setDEF(String value) {
this.def = value;
}
/**
* Gets the value of the use property.
*
* @return possible object is {@link X3DNode }
*
*/
public X3DNode getUSE() {
return use;
}
/**
* Sets the value of the use property.
*
* @param value
* allowed object is {@link X3DNode }
*
*/
public void setUSE(X3DNode value) {
this.use = value;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.pig.piggybank.storage;
import java.io.IOException;
import java.util.Arrays;
import java.util.ArrayList;
import java.util.List;
import java.util.Properties;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.InputFormat;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.RecordReader;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.input.TextInputFormat;
import org.apache.pig.Expression;
import org.apache.pig.LoadFunc;
import org.apache.pig.LoadMetadata;
import org.apache.pig.LoadPushDown;
import org.apache.pig.PigWarning;
import org.apache.pig.ResourceSchema;
import org.apache.pig.ResourceSchema.ResourceFieldSchema;
import org.apache.pig.ResourceStatistics;
import org.apache.pig.backend.hadoop.executionengine.mapReduceLayer.PigSplit;
import org.apache.pig.data.DataType;
import org.apache.pig.data.Tuple;
import org.apache.pig.data.TupleFactory;
import org.apache.pig.impl.logicalLayer.FrontendException;
import org.apache.pig.impl.util.ObjectSerializer;
import org.apache.pig.impl.util.UDFContext;
import org.apache.pig.impl.util.Utils;
import org.apache.pig.parser.ParserException;
import org.joda.time.DateTime;
import org.joda.time.DateTimeZone;
/**
* A fixed-width file loader.
*
* Takes a string argument specifying the ranges of each column in a unix 'cut'-like format.
* Ex: '-5, 10-12, 14, 20-'
* Ranges are comma-separated, 1-indexed (for ease of use with 1-indexed text editors), and inclusive.
* A single-column field at position n may be specified as either 'n-n' or simply 'n'.
*
* A second optional argument specifies whether to skip the first row of the input file,
* assuming it to be a header. As Pig may combine multiple input files each with their own header
* into a single split, FixedWidthLoader makes sure to skip any duplicate headers as will.
* 'SKIP_HEADER' skips the row; anything else and the default behavior ('USE_HEADER') is not to skip it.
*
* A third optional argument specifies a Pig schema to load the data with. Automatically
* trims whitespace from numeric fields. Note that if fewer fields are specified in the
* schema than are specified in the column spec, only the fields in the schema will
* be used.
*
* Warning: fields loaded as char/byte arrays will trim all leading and trailing whitespace
* from the field value as it is indistiguishable from the spaces that separate different fields.
*
* All datetimes are converted to UTC when loaded.
*
* Column spec idea and syntax parser borrowed from Russ Lankenau's implementation
* at https://github.com/rlankenau/fixed-width-pig-loader
*/
public class FixedWidthLoader extends LoadFunc implements LoadMetadata, LoadPushDown {
public static class FixedWidthField {
int start, end;
FixedWidthField(int start, int end) {
this.start = start;
this.end = end;
}
}
private TupleFactory tupleFactory = TupleFactory.getInstance();
private RecordReader reader = null;
private ArrayList<FixedWidthField> columns;
private ResourceSchema schema = null;
private ResourceFieldSchema[] fields;
private boolean loadingFirstRecord = true;
private boolean skipHeader = false;
private String header = null;
private int splitIndex;
private boolean[] requiredFields = null;
private int numRequiredFields;
private String udfContextSignature = null;
private static final String SCHEMA_SIGNATURE = "pig.fixedwidthloader.schema";
private static final String REQUIRED_FIELDS_SIGNATURE = "pig.fixedwidthloader.required_fields";
private static final Log log = LogFactory.getLog(FixedWidthLoader.class);
/*
* Constructors and helper methods
*/
public FixedWidthLoader() {
throw new IllegalArgumentException(
"Usage: org.apache.pig.piggybank.storage.FixedWidthLoader(" +
"'<column spec>'[, { 'USE_HEADER' | 'SKIP_HEADER' }[, '<schema>']]" +
")"
);
}
public FixedWidthLoader(String columnSpec) {
try {
columns = parseColumnSpec(columnSpec);
String schemaStr = generateDefaultSchemaString();
schema = new ResourceSchema(Utils.getSchemaFromString(schemaStr));
fields = schema.getFields();
} catch (ParserException e) {
throw new IllegalArgumentException("Invalid schema format: " + e.getMessage());
}
}
public FixedWidthLoader(String columnSpec, String skipHeaderStr) {
this(columnSpec);
if (skipHeaderStr.equalsIgnoreCase("SKIP_HEADER"))
skipHeader = true;
}
public FixedWidthLoader(String columnSpec, String skipHeaderStr, String schemaStr) {
try {
columns = parseColumnSpec(columnSpec);
schemaStr = schemaStr.replaceAll("[\\s\\r\\n]", "");
schema = new ResourceSchema(Utils.getSchemaFromString(schemaStr));
fields = schema.getFields();
for (int i = 0; i < fields.length; i++) {
byte fieldType = fields[i].getType();
if (fieldType == DataType.MAP || fieldType == DataType.TUPLE || fieldType == DataType.BAG) {
throw new IllegalArgumentException(
"Field \"" + fields[i].getName() + "\" is an object type (map, tuple, or bag). " +
"Object types are not supported by FixedWidthLoader."
);
}
}
if (fields.length < columns.size())
warn("More columns specified in column spec than fields specified in schema. Only loading fields specified in schema.",
PigWarning.UDF_WARNING_2);
else if (fields.length > columns.size())
throw new IllegalArgumentException("More fields specified in schema than columns specified in column spec.");
} catch (ParserException e) {
throw new IllegalArgumentException("Invalid schema format: " + e.getMessage());
}
if (skipHeaderStr.equalsIgnoreCase("SKIP_HEADER"))
skipHeader = true;
}
public static ArrayList<FixedWidthField> parseColumnSpec(String spec) {
ArrayList<FixedWidthField> columns = new ArrayList<FixedWidthField>();
String[] ranges = spec.split(",");
for (String range : ranges) {
// Ranges are 1-indexed and inclusive-inclusive [] in spec,
// but we convert to 0-indexing and inclusive-exclusive [) internally
if (range.indexOf("-") != -1) {
int start, end;
String offsets[] = range.split("-", 2);
offsets[0] = offsets[0].trim();
offsets[1] = offsets[1].trim();
if (offsets[0].equals(""))
start = 0;
else
start = Integer.parseInt(offsets[0]) - 1;
if (offsets[1].equals(""))
end = Integer.MAX_VALUE;
else
end = Integer.parseInt(offsets[1]);
if (start + 1 < 1)
throw new IllegalArgumentException("Illegal column spec '" + range + "': start value must be at least 1");
if (start + 1 > end)
throw new IllegalArgumentException("Illegal column spec '" + range + "': start value must be less than end value");
columns.add(new FixedWidthField(start, end));
} else {
int offset = Integer.parseInt(range.trim()) - 1;
columns.add(new FixedWidthField(offset, offset + 1));
}
}
return columns;
}
private String generateDefaultSchemaString() {
StringBuilder sb = new StringBuilder();
for (int i = 0; i < columns.size(); i++) {
sb.append((i == 0? "" : ", ") + "f" + i + ": bytearray");
}
return sb.toString();
}
/*
* Methods called on the frontend
*/
@Override
public InputFormat getInputFormat() throws IOException {
return new TextInputFormat();
}
@Override
public void setLocation(String location, Job job) throws IOException {
FileInputFormat.setInputPaths(job, location);
}
@Override
public void setUDFContextSignature( String signature ) {
udfContextSignature = signature;
}
public ResourceSchema getSchema(String location, Job job)
throws IOException {
if (schema != null) {
// Send schema to backend
// Schema should have been passed as an argument (-> constructor)
// or provided in the default constructor
UDFContext udfc = UDFContext.getUDFContext();
Properties p = udfc.getUDFProperties(this.getClass(), new String[]{ udfContextSignature });
p.setProperty(SCHEMA_SIGNATURE, schema.toString());
return schema;
} else {
// Should never get here
throw new IllegalArgumentException(
"No schema found: default schema was never created and no user-specified schema was found."
);
}
}
/*
* Methods called on the backend
*/
@Override
public void prepareToRead(RecordReader reader, PigSplit split) throws IOException {
// Save reader to use in getNext()
this.reader = reader;
splitIndex = split.getSplitIndex();
// Get schema from front-end
UDFContext udfc = UDFContext.getUDFContext();
Properties p = udfc.getUDFProperties(this.getClass(), new String[] { udfContextSignature });
String strSchema = p.getProperty(SCHEMA_SIGNATURE);
if (strSchema == null) {
throw new IOException("Could not find schema in UDF context");
}
schema = new ResourceSchema(Utils.getSchemaFromString(strSchema));
requiredFields = (boolean[]) ObjectSerializer.deserialize(p.getProperty(REQUIRED_FIELDS_SIGNATURE));
if (requiredFields != null) {
numRequiredFields = 0;
for (int i = 0; i < requiredFields.length; i++) {
if (requiredFields[i])
numRequiredFields++;
}
}
}
@Override
public Tuple getNext() throws IOException {
if (loadingFirstRecord && skipHeader && (splitIndex == 0 || splitIndex == -1)) {
try {
if (!reader.nextKeyValue())
return null;
header = ((Text) reader.getCurrentValue()).toString();
} catch (Exception e) {
throw new IOException(e);
}
}
loadingFirstRecord = false;
String line;
try {
if (!reader.nextKeyValue()) return null;
line = ((Text) reader.getCurrentValue()).toString();
// if the line is a duplicate header and 'SKIP_HEADER' is set, ignore it
// (this might happen if multiple files each with a header are combined into a single split)
if (line.equals(header)) {
if (!reader.nextKeyValue()) return null;
line = ((Text) reader.getCurrentValue()).toString();
}
} catch (Exception e) {
throw new IOException(e);
}
Tuple t;
if (requiredFields != null) {
t = tupleFactory.newTuple(numRequiredFields);
int count = 0;
for (int i = 0; i < fields.length; i++) {
if (requiredFields[i]) {
try {
t.set(count, readField(line, fields[i], columns.get(i)));
} catch (Exception e) {
warn("Exception when parsing field \"" + fields[i].getName() + "\" " +
"in record " + line.toString() + ": " + e.toString(),
PigWarning.UDF_WARNING_1);
}
count++;
}
}
} else {
t = tupleFactory.newTuple(fields.length);
for (int i = 0; i < fields.length; i++) {
try {
t.set(i, readField(line, fields[i], columns.get(i)));
} catch (Exception e) {
warn("Exception when parsing field \"" + fields[i].getName() + "\" " +
"in record " + line.toString() + ": " + e.toString(),
PigWarning.UDF_WARNING_1);
}
}
}
return t;
}
private Object readField(String line, ResourceFieldSchema field, FixedWidthField column)
throws IOException, IllegalArgumentException {
int start = column.start;
int end = Math.min(column.end, line.length());
if (start > line.length())
return null;
if (end <= start)
return null;
String s = line.substring(start, end);
String sTrim = s.trim();
switch (field.getType()) {
case DataType.UNKNOWN:
case DataType.BYTEARRAY:
case DataType.CHARARRAY:
if (s.trim().length() == 0)
return null;
return s.trim();
case DataType.BOOLEAN:
return Boolean.parseBoolean(sTrim);
case DataType.INTEGER:
return Integer.parseInt(sTrim);
case DataType.LONG:
return Long.parseLong(sTrim);
case DataType.FLOAT:
return Float.parseFloat(sTrim);
case DataType.DOUBLE:
return Double.parseDouble(sTrim);
case DataType.DATETIME:
return (new DateTime(sTrim)).toDateTime(DateTimeZone.UTC);
case DataType.MAP:
case DataType.TUPLE:
case DataType.BAG:
throw new IllegalArgumentException("Object types (map, tuple, bag) are not supported by FixedWidthLoader");
default:
throw new IllegalArgumentException(
"Unknown type in input schema: " + field.getType());
}
}
@Override
public RequiredFieldResponse pushProjection(RequiredFieldList requiredFieldList) throws FrontendException {
if (requiredFieldList == null)
return null;
if (fields != null && requiredFieldList.getFields() != null)
{
requiredFields = new boolean[fields.length];
for (RequiredField f : requiredFieldList.getFields()) {
requiredFields[f.getIndex()] = true;
}
UDFContext udfc = UDFContext.getUDFContext();
Properties p = udfc.getUDFProperties(this.getClass(), new String[]{ udfContextSignature });
try {
p.setProperty(REQUIRED_FIELDS_SIGNATURE, ObjectSerializer.serialize(requiredFields));
} catch (Exception e) {
throw new RuntimeException("Cannot serialize requiredFields for pushProjection");
}
}
return new RequiredFieldResponse(true);
}
@Override
public List<OperatorSet> getFeatures() {
return Arrays.asList(LoadPushDown.OperatorSet.PROJECTION);
}
public ResourceStatistics getStatistics(String location, Job job)
throws IOException {
// Not implemented
return null;
}
public String[] getPartitionKeys(String location, Job job)
throws IOException {
// Not implemented
return null;
}
public void setPartitionFilter(Expression partitionFilter)
throws IOException {
// Not implemented
}
}
| |
/**
* Copyright (c) 2010-2017 by the respective copyright holders.
*
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*/
package org.openhab2.lwm2m;
import java.io.BufferedReader;
import java.io.ByteArrayInputStream;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileReader;
import java.io.FilenameFilter;
import java.io.IOException;
import java.io.InputStreamReader;
import java.io.PrintWriter;
import java.io.StringReader;
import java.io.StringWriter;
import java.net.HttpURLConnection;
import java.net.MalformedURLException;
import java.net.URL;
import java.nio.file.Paths;
import java.util.ArrayList;
import java.util.List;
import java.util.Set;
import java.util.TreeSet;
import java.util.concurrent.Callable;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import javax.xml.XMLConstants;
import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.DocumentBuilderFactory;
import javax.xml.parsers.ParserConfigurationException;
import javax.xml.transform.ErrorListener;
import javax.xml.transform.Source;
import javax.xml.transform.TransformerException;
import javax.xml.transform.stream.StreamSource;
import javax.xml.validation.Schema;
import javax.xml.validation.SchemaFactory;
import javax.xml.validation.Validator;
import javax.xml.xpath.XPath;
import javax.xml.xpath.XPathConstants;
import javax.xml.xpath.XPathExpression;
import javax.xml.xpath.XPathExpressionException;
import javax.xml.xpath.XPathFactory;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.CommandLineParser;
import org.apache.commons.cli.DefaultParser;
import org.apache.commons.cli.HelpFormatter;
import org.apache.commons.cli.Option;
import org.apache.commons.cli.Options;
import org.apache.commons.cli.ParseException;
import org.w3c.dom.Document;
import org.w3c.dom.NodeList;
import org.xml.sax.SAXException;
import net.sf.saxon.s9api.Processor;
import net.sf.saxon.s9api.SaxonApiException;
import net.sf.saxon.s9api.Serializer;
import net.sf.saxon.s9api.XsltExecutable;
import net.sf.saxon.s9api.XsltTransformer;
public class Main {
private static final String DEFAULT_OMAP_URL = "http://www.openmobilealliance.org/wp/OMNA/LwM2M/DDF.xml";
private static final String[] ADDITIONAL_DOWNLOAD_RESOURCES = {
"http://www.openmobilealliance.org/wp/OMNA/LwM2M/Common.xml" };
/**
* Check readme.md file in the root directory. This application downloads OMA LWM2M Registry object files,
* transforms those files to Openhab2 Things and Channels and stores those in an out directory. You need a
* res directory with schema/thing-description-1.0.0.xsd, schema/LWM2M.xsd, transform/transform.xsl.
*
* @param args
* @throws MalformedURLException
* @throws InterruptedException
* @throws SaxonApiException
* @throws SAXException
*/
public static void main(String[] args)
throws MalformedURLException, InterruptedException, SaxonApiException, SAXException {
System.out.println("Checking directories, schema files and load transformation file");
Options options = new Options();
Option input = new Option("r", "res", true,
"resource file path, with schema and transform subdirectory (default is 'res')");
input.setRequired(false);
options.addOption(input);
Option output = new Option("o", "output", true, "output file path (default is 'out')");
output.setRequired(false);
options.addOption(output);
Option updateOption = new Option("u", "update", false, "update OMA registry files (default is false)");
updateOption.setRequired(false);
options.addOption(updateOption);
Option disableValidationOption = new Option("dv", "disable-validation", false,
"Disable the validation of downloaded files");
disableValidationOption.setRequired(false);
options.addOption(disableValidationOption);
Option updateURLOption = new Option("url", "update-url", true,
"OMA registry url (default is " + DEFAULT_OMAP_URL + ")");
updateURLOption.setRequired(false);
options.addOption(updateURLOption);
CommandLineParser parser = new DefaultParser();
HelpFormatter formatter = new HelpFormatter();
CommandLine cmd;
try {
cmd = parser.parse(options, args);
} catch (ParseException e) {
System.out.println(e.getMessage());
formatter.printHelp("utility-name", options);
System.exit(1);
return;
}
File basePath = Paths.get(val(cmd, "res", "res")).toAbsolutePath().toFile();
//File openhabSchemaFile = new File(basePath, "schema/thing-description-1.0.0.xsd");
File lwm2mSchemaFile = new File(basePath, "schema/LWM2M.xsd");
File transformFile = new File(basePath, "transform/transform.xsl");
File transformDDF = new File(basePath, "transform/transform_ddf.xsl");
File transformPostFile = new File(basePath, "transform/post_transform.xsl");
File inputPath = new File(basePath, "lwm2m_object_registry");
File destPath = Paths.get(val(cmd, "output", "out")).toAbsolutePath().toFile();
if (!basePath.exists() || !inputPath.exists() || !transformFile.exists() || !transformPostFile.exists()
|| !lwm2mSchemaFile.exists()) {
System.err.println(
"Res directory or subdirectories does not exist in your working directory: " + basePath.toString());
System.exit(-1);
}
if (!destPath.exists()) {
destPath.mkdirs();
}
Processor processor = new Processor(false);
XsltExecutable templateTransform = processor.newXsltCompiler().compile(new StreamSource(transformFile));
Processor processorPost = new Processor(false);
XsltExecutable templateTransformPost = processorPost.newXsltCompiler()
.compile(new StreamSource(transformPostFile));
Processor processorIndex = new Processor(false);
XsltExecutable templateTransformIndex = processorIndex.newXsltCompiler().compile(new StreamSource(transformDDF));
if (templateTransform == null || templateTransformPost == null || templateTransformIndex == null) {
System.err.println("Failed to load transform.xsl");
System.exit(-1);
return;
}
// Setup input validator
SchemaFactory schemaFactory = SchemaFactory.newInstance(XMLConstants.W3C_XML_SCHEMA_NS_URI);
Schema lwm2mSchema = schemaFactory.newSchema(lwm2mSchemaFile);
if (cmd.hasOption("update")) {
System.out.println("Download OMA LWM2M Registry data");
String updateURL = val(cmd, "updateurl", DEFAULT_OMAP_URL);
if (!updateFiles(inputPath, updateURL)) {
System.err.println("Download failed");
return;
}
} else {
System.out.println("Use cached OMA LWM2M Registry data");
}
transformInputfile(new File(inputPath, "DDF.xml"), destPath.toURI().toURL().toString(), processorIndex, templateTransformIndex);
transformInputfiles(inputPath.listFiles(xmlfilenameFilter), destPath.toURI().toURL().toString(), lwm2mSchema,
processor, templateTransform, cmd.hasOption("disable-validation"));
transformOutputfiles(destPath, processorPost, templateTransformPost);
//validateOutput(openhabSchemaFile, destPath);
System.out.println("Done");
}
private static String val(CommandLine cmd, String optionName, String defaultValue) {
if (cmd.hasOption("input")) {
try {
return ((String) cmd.getParsedOptionValue(optionName)).toString();
} catch (ParseException e) {
return defaultValue;
}
} else {
return defaultValue;
}
}
static FilenameFilter xmlfilenameFilter = new FilenameFilter() {
@Override
public boolean accept(File dir, String name) {
return name.endsWith(".xml");
}
};
private static void transformOutputfiles(File destPath, Processor processor, XsltExecutable template)
throws MalformedURLException, InterruptedException {
ExecutorService exec = Executors.newFixedThreadPool(Runtime.getRuntime().availableProcessors());
File[] files = destPath.listFiles(xmlfilenameFilter);
List<Callable<Object>> todo = new ArrayList<Callable<Object>>(files.length);
for (File xmlFile : files) {
todo.add(Executors.callable(() -> {
System.out.println("Post Transform " + xmlFile.getName().toString());
try {
StringBuilder xmlFileContent = new StringBuilder();
String inputLine;
BufferedReader in = new BufferedReader(new FileReader(xmlFile));
while ((inputLine = in.readLine()) != null) {
xmlFileContent.append(inputLine);
}
in.close();
xmlFile.delete();
// Setup transformer
Serializer serializer = processor.newSerializer();
serializer.setOutputFile(xmlFile);
XsltTransformer transformer = template.load();
transformer.setDestination(serializer);
transformer.setInitialContextNode(processor.newDocumentBuilder()
.build(new StreamSource(new StringReader(xmlFileContent.toString()))));
transformer.transform();
} catch (SaxonApiException e) {
e.printStackTrace();
} catch (FileNotFoundException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
}
}));
}
exec.invokeAll(todo);
exec.shutdown();
}
private static void transformInputfile(File xmlFile, String destPath, Processor processor, XsltExecutable template) {
System.out.println("Transform " + xmlFile.getName().toString());
StreamSource streamSource = new StreamSource(xmlFile);
try {
// Setup transformer
Serializer serializer = processor.newSerializer();
serializer.setOutputWriter(new StringWriter());
XsltTransformer transformer = template.load();
transformer.setErrorListener(new ErrorListener() {
@Override
public void warning(TransformerException exception) throws TransformerException {
throw exception;
}
@Override
public void fatalError(TransformerException exception) throws TransformerException {
throw exception;
}
@Override
public void error(TransformerException exception) throws TransformerException {
throw exception;
}
});
transformer.setDestination(serializer);
transformer.setBaseOutputURI(destPath);
transformer.setInitialContextNode(processor.newDocumentBuilder().build(streamSource));
transformer.setSource(streamSource);
transformer.transform();
} catch (SaxonApiException e) {
System.err.println("Error in file " + xmlFile.getName().toString());
System.err.println(e.getMessage());
}
}
private static void transformInputfiles(File[] files, String destPath, Schema lwm2mSchema, Processor processor,
XsltExecutable template, boolean disableValidation) throws InterruptedException {
ExecutorService exec = Executors.newFixedThreadPool(Runtime.getRuntime().availableProcessors());
List<Callable<Object>> todo = new ArrayList<Callable<Object>>(files.length);
for (File xmlFile : files) {
todo.add(Executors.callable(() -> {
if (!disableValidation) {
Validator validator = lwm2mSchema.newValidator();
StreamSource streamSource = new StreamSource(xmlFile);
try {
validator.validate(streamSource);
} catch (SAXException e) {
System.err.println("\tInput of " + xmlFile.getName().toString() + " NOT valid");
System.err.println("\tReason: " + e.getLocalizedMessage());
return;
} catch (IOException e) {
e.printStackTrace();
return;
}
}
transformInputfile(xmlFile, destPath, processor, template);
}));
}
exec.invokeAll(todo);
exec.shutdown();
}
/**
* Download web page, extract xml file links, download them in parallel, store them in the
* given directory.
*
* @param destPath The dest dir to store files.
* @parem registryURLs Registry URLs to download the single object files from.
* Might be multiple urls separated by ";".
*/
private static boolean updateFiles(File destPath, String registryURLs) {
Set<String> links = new TreeSet<String>();
String[] urls = registryURLs.split(";");
for (String url : urls) {
String fileName = url.substring( url.lastIndexOf('/')+1, url.length() );
String data;
try {
data = downloadFile(url);
if (data.length() > 0) {
File destFile = new File(destPath, fileName);
System.out.println("Downloaded " + url+" to "+destFile.getAbsolutePath());
try (PrintWriter out = new PrintWriter(destFile)) {
out.println(data);
} catch (FileNotFoundException e) {
System.err.println(
"Failed to store " + destFile.getAbsolutePath() + " " + e.getLocalizedMessage());
}
} else {
System.err.println("Failed to download " + url);
}
} catch (IOException e1) {
e1.printStackTrace();
return false;
}
// Extracting the links from an XML document
if (url.endsWith(".xml")) {
try {
DocumentBuilderFactory factory = DocumentBuilderFactory.newInstance();
DocumentBuilder builder = factory.newDocumentBuilder();
Document doc = builder.parse(new ByteArrayInputStream(data.getBytes()));
XPathFactory xPathfactory = XPathFactory.newInstance();
XPath xpath = xPathfactory.newXPath();
XPathExpression expr = xpath.compile("/DDFList/Item/DDF");
NodeList nl = (NodeList) expr.evaluate(doc, XPathConstants.NODESET);
for (int i = 0; i < nl.getLength(); ++i) {
links.add(nl.item(i).getTextContent());
}
} catch (ParserConfigurationException | SAXException | IOException | XPathExpressionException e1) {
e1.printStackTrace();
return false;
}
} else {
// The following code is for extracting the xml links from a static html page
Pattern linkPattern = Pattern.compile("<a[^>]+href=[\"']?([^\"'>]*\\.xml)[\"']?[^>]*>(.+?)</a>",
Pattern.CASE_INSENSITIVE | Pattern.DOTALL);
Matcher pageMatcher = linkPattern.matcher(data);
while (pageMatcher.find()) {
links.add(pageMatcher.group(1));
}
}
if (links.size() == 0) {
System.err.println("No XML files found on page: " + data);
return false;
}
}
for (String url : ADDITIONAL_DOWNLOAD_RESOURCES) {
links.add(url);
}
ExecutorService exec = Executors.newFixedThreadPool(Runtime.getRuntime().availableProcessors());
List<Callable<Object>> todo = new ArrayList<Callable<Object>>(links.size());
for (String urlName : links) {
todo.add(Executors.callable(new Runnable() {
@Override
public void run() {
String fileName = new File(urlName).getName();
String xmlFile;
try {
xmlFile = downloadFile(urlName);
} catch (IOException e) {
System.err.println("Failed to download " + urlName);
return;
}
if (xmlFile.length() > 0) {
System.out.println("Downloaded " + fileName);
File destFile = new File(destPath, fileName);
try (PrintWriter out = new PrintWriter(destFile)) {
out.println(xmlFile);
} catch (FileNotFoundException e) {
System.err.println(
"Failed to store " + destFile.getAbsolutePath() + " " + e.getLocalizedMessage());
}
} else {
System.err.println("Failed to download " + urlName);
}
}
}));
}
try {
exec.invokeAll(todo);
} catch (InterruptedException e) {
e.printStackTrace();
return false;
}
exec.shutdown();
return true;
}
private static String downloadFile(String urlName) throws IOException {
URL url = new URL(urlName);
HttpURLConnection connection = (HttpURLConnection) url.openConnection();
connection.setReadTimeout(2500);
connection.setRequestProperty("User-Agent",
"Mozilla/4.0 (compatible; MSIE 6.0; Windows NT 5.1; .NET CLR 1.0.3705; .NET CLR 1.1.4322; .NET CLR 1.2.30703)");
BufferedReader in = new BufferedReader(new InputStreamReader(connection.getInputStream()));
StringBuilder response = new StringBuilder();
String inputLine;
while ((inputLine = in.readLine()) != null) {
response.append(inputLine);
}
in.close();
return response.toString();
}
/**
* Validate output files in parallel
*
* @param openhabSchemaFile
* @param destPath
* @param filenameFilter
* @throws InterruptedException
*/
private static void validateOutput(File openhabSchemaFile, File destPath) throws InterruptedException {
// Validate
SchemaFactory schemaFactory = SchemaFactory.newInstance(XMLConstants.W3C_XML_SCHEMA_NS_URI);
Schema schema;
try {
schema = schemaFactory.newSchema(openhabSchemaFile);
} catch (SAXException e) {
e.printStackTrace();
return;
}
File[] files = destPath.listFiles(xmlfilenameFilter);
ExecutorService exec = Executors.newFixedThreadPool(Runtime.getRuntime().availableProcessors());
List<Callable<Object>> todo = new ArrayList<Callable<Object>>(files.length);
for (File xmlFile : files) {
todo.add(Executors.callable(new Runnable() {
@Override
public void run() {
System.out.println("Validate " + xmlFile.getName().toString());
Source xmlSource = new StreamSource(xmlFile);
try {
Validator validator = schema.newValidator();
validator.validate(xmlSource);
} catch (SAXException e) {
System.out.println(xmlFile.getName().toString() + " NOT valid");
System.out.println("Reason: " + e.getLocalizedMessage());
} catch (IOException e) {
e.printStackTrace();
}
}
}));
}
exec.invokeAll(todo);
exec.shutdown();
}
}
| |
package org.innovateuk.ifs.management.competition.setup.application.controller;
import lombok.extern.slf4j.Slf4j;
import org.innovateuk.ifs.application.service.QuestionSetupRestService;
import org.innovateuk.ifs.commons.security.SecuredBySpring;
import org.innovateuk.ifs.commons.service.ServiceResult;
import org.innovateuk.ifs.competition.resource.CompetitionResource;
import org.innovateuk.ifs.competition.resource.CompetitionSetupQuestionResource;
import org.innovateuk.ifs.competition.resource.CompetitionSetupSection;
import org.innovateuk.ifs.competition.resource.CompetitionSetupSubsection;
import org.innovateuk.ifs.competition.service.CompetitionRestService;
import org.innovateuk.ifs.competition.service.CompetitionSetupRestService;
import org.innovateuk.ifs.controller.ValidationHandler;
import org.innovateuk.ifs.form.service.FormInputRestService;
import org.innovateuk.ifs.management.competition.setup.application.form.*;
import org.innovateuk.ifs.management.competition.setup.application.validator.CompetitionSetupApplicationQuestionValidator;
import org.innovateuk.ifs.management.competition.setup.core.form.CompetitionSetupForm;
import org.innovateuk.ifs.management.competition.setup.core.populator.CompetitionSetupPopulator;
import org.innovateuk.ifs.management.competition.setup.core.service.CompetitionSetupQuestionService;
import org.innovateuk.ifs.management.competition.setup.core.service.CompetitionSetupService;
import org.innovateuk.ifs.management.competition.setup.core.viewmodel.CompetitionSetupSubsectionViewModel;
import org.innovateuk.ifs.management.competition.setup.core.viewmodel.GeneralSetupViewModel;
import org.innovateuk.ifs.management.competition.setup.core.viewmodel.QuestionSetupViewModel;
import org.innovateuk.ifs.question.resource.QuestionSetupType;
import org.innovateuk.ifs.question.service.QuestionSetupCompetitionRestService;
import org.innovateuk.ifs.user.resource.UserResource;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.core.io.ByteArrayResource;
import org.springframework.http.ResponseEntity;
import org.springframework.security.access.prepost.PreAuthorize;
import org.springframework.stereotype.Controller;
import org.springframework.ui.Model;
import org.springframework.validation.BindingResult;
import org.springframework.web.bind.annotation.*;
import org.springframework.web.multipart.MultipartFile;
import javax.validation.Valid;
import java.util.Optional;
import java.util.function.Function;
import java.util.function.Supplier;
import static org.innovateuk.ifs.commons.service.ServiceResult.serviceSuccess;
import static org.innovateuk.ifs.competition.resource.CompetitionSetupSection.APPLICATION_FORM;
import static org.innovateuk.ifs.competition.resource.CompetitionSetupSubsection.*;
import static org.innovateuk.ifs.controller.ErrorToObjectErrorConverterFactory.asGlobalErrors;
import static org.innovateuk.ifs.controller.ErrorToObjectErrorConverterFactory.fieldErrorsToFieldErrors;
import static org.innovateuk.ifs.controller.FileUploadControllerUtils.getMultipartFileBytes;
import static org.innovateuk.ifs.file.controller.FileDownloadControllerUtils.getFileResponseEntity;
import static org.innovateuk.ifs.management.competition.setup.CompetitionSetupController.COMPETITION_SETUP_FORM_KEY;
/**
* Controller to manage the Application Questions and it's sub-sections in the
* competition setup process
*/
@Slf4j
@Controller
@RequestMapping("/competition/setup/{competitionId}/section/application")
@SecuredBySpring(value = "Controller", description = "TODO", securedType = CompetitionSetupApplicationController.class)
@PreAuthorize("hasAnyAuthority('comp_admin')")
public class CompetitionSetupApplicationController {
public static final String APPLICATION_LANDING_REDIRECT = "redirect:/competition/setup/%d/section/application/landing-page";
public static final String QUESTION_REDIRECT = "redirect:/competition/setup/%d/section/application/question/%d/edit";
private static final String QUESTION_VIEW = "competition/setup/question";
private static final String MODEL = "model";
@Autowired
private CompetitionSetupService competitionSetupService;
@Autowired
private CompetitionRestService competitionRestService;
@Autowired
private CompetitionSetupRestService competitionSetupRestService;
@Autowired
private CompetitionSetupQuestionService competitionSetupQuestionService;
@Autowired
private QuestionSetupCompetitionRestService questionSetupCompetitionRestService;
@Autowired
private QuestionSetupRestService questionSetupRestService;
@Autowired
private CompetitionSetupPopulator competitionSetupPopulator;
@Autowired
private FormInputRestService formInputRestService;
@Autowired
private CompetitionSetupApplicationQuestionValidator competitionSetupApplicationQuestionValidator;
@Value("${ifs.loan.partb.enabled}")
private boolean ifsLoanPartBEnabled;
@PostMapping(value = "/landing-page", params = "createQuestion")
public String createQuestion(@PathVariable long competitionId) {
ServiceResult<CompetitionSetupQuestionResource> result = questionSetupCompetitionRestService
.addDefaultToCompetition(competitionId).toServiceResult();
Function<CompetitionSetupQuestionResource, String> successViewFunction =
(question) -> String.format("redirect:/competition/setup/%d/section/application/question/%d/edit", competitionId, question.getQuestionId());
return successViewFunction.apply(result.getSuccess());
}
@PostMapping(value = "/landing-page", params = "deleteQuestion")
public String deleteQuestion(@ModelAttribute("deleteQuestion") DeleteQuestionForm deleteQuestionForm,
@PathVariable long competitionId) {
questionSetupCompetitionRestService.deleteById(deleteQuestionForm.getDeleteQuestion());
Supplier<String> view = () -> String.format(APPLICATION_LANDING_REDIRECT, competitionId);
return view.get();
}
@GetMapping("/landing-page")
public String applicationProcessLandingPage(Model model, @PathVariable long competitionId, UserResource loggedInUser) {
CompetitionResource competitionResource = competitionRestService.getCompetitionById(competitionId).getSuccess();
if (competitionResource.isNonIfs()) {
return "redirect:/non-ifs-competition/setup/" + competitionId;
}
if (!competitionSetupService.hasInitialDetailsBeenPreviouslySubmitted(competitionId)) {
return "redirect:/competition/setup/" + competitionResource.getId();
}
model.addAttribute(MODEL, competitionSetupService.populateCompetitionSectionModelAttributes(competitionResource, loggedInUser, APPLICATION_FORM));
model.addAttribute(COMPETITION_SETUP_FORM_KEY, new LandingPageForm());
return "competition/setup";
}
@PostMapping("/landing-page")
public String setApplicationProcessAsComplete(Model model,
@PathVariable long competitionId,
@ModelAttribute(COMPETITION_SETUP_FORM_KEY) LandingPageForm form,
BindingResult bindingResult,
ValidationHandler validationHandler,
UserResource loggedInUser) {
CompetitionResource competitionResource = competitionRestService.getCompetitionById(competitionId).getSuccess();
if (!competitionSetupService.hasInitialDetailsBeenPreviouslySubmitted(competitionId)) {
return "redirect:/competition/setup/" + competitionId;
}
Supplier<String> failureView = () -> {
model.addAttribute(MODEL, competitionSetupService.populateCompetitionSectionModelAttributes(competitionResource, loggedInUser, APPLICATION_FORM));
model.addAttribute(COMPETITION_SETUP_FORM_KEY, form);
return "competition/setup";
};
Supplier<String> successView = () -> String.format(APPLICATION_LANDING_REDIRECT, competitionId);
return validationHandler.performActionOrBindErrorsToField("", failureView, successView, () ->
competitionSetupQuestionService.validateApplicationQuestions(competitionResource, form, bindingResult));
}
@GetMapping("/question/finance")
public String seeApplicationFinances(@PathVariable long competitionId,
UserResource loggedInUser,
Model model) {
CompetitionResource competitionResource = competitionRestService.getCompetitionById(competitionId).getSuccess();
if (competitionResource.isNonIfs()) {
return "redirect:/non-ifs-competition/setup/" + competitionId;
}
if (!competitionSetupService.hasInitialDetailsBeenPreviouslySubmitted(competitionId)) {
return "redirect:/competition/setup/" + competitionResource.getId();
}
return getFinancePage(model, competitionResource, loggedInUser, false, null);
}
@GetMapping("/question/finance/edit")
public String editApplicationFinances(@PathVariable long competitionId,
UserResource loggedInUser,
Model model) {
CompetitionResource competitionResource = competitionRestService.getCompetitionById(competitionId).getSuccess();
if (competitionResource.isNonIfs()) {
return "redirect:/non-ifs-competition/setup/" + competitionId;
}
if (!competitionSetupService.hasInitialDetailsBeenPreviouslySubmitted(competitionId)) {
return "redirect:/competition/setup/" + competitionResource.getId();
}
return ifUserCanAccessEditPageMarkSectionAsIncomplete(competitionResource,
() -> getFinancePage(model, competitionResource, loggedInUser, true, null),
Optional.of(FINANCES), Optional.empty(), loggedInUser);
}
@PostMapping("/question/finance/edit")
public String submitApplicationFinances(@Valid @ModelAttribute(COMPETITION_SETUP_FORM_KEY) FinanceForm form,
BindingResult bindingResult,
ValidationHandler validationHandler,
@PathVariable long competitionId,
UserResource loggedInUser,
Model model) {
return handleFinanceSaving(competitionId, loggedInUser, model, form, validationHandler);
}
private String handleFinanceSaving(long competitionId, UserResource loggedInUser, Model model, FinanceForm form, ValidationHandler validationHandler) {
CompetitionResource competitionResource = competitionRestService.getCompetitionById(competitionId).getSuccess();
if (!competitionSetupService.hasInitialDetailsBeenPreviouslySubmitted(competitionId)) {
return "redirect:/competition/setup/" + competitionResource.getId();
}
Supplier<String> failureView = () -> getFinancePage(model, competitionResource, loggedInUser, true, form);
Supplier<String> successView = () -> String.format(APPLICATION_LANDING_REDIRECT, competitionId);
return validationHandler.performActionOrBindErrorsToField("", failureView, successView,
() -> competitionSetupService.saveCompetitionSetupSubsection(form, competitionResource, APPLICATION_FORM, FINANCES, loggedInUser));
}
@GetMapping("/question/{questionId}")
public String seeQuestionInCompSetup(@PathVariable long competitionId,
@PathVariable("questionId") Long questionId,
UserResource loggedInUser,
Model model) {
CompetitionResource competitionResource = competitionRestService.getCompetitionById(competitionId).getSuccess();
if (competitionResource.isNonIfs()) {
return "redirect:/non-ifs-competition/setup/" + competitionId;
}
if (!competitionSetupService.hasInitialDetailsBeenPreviouslySubmitted(competitionId)) {
return "redirect:/competition/setup/" + competitionResource.getId();
}
return getQuestionPage(model, competitionResource, loggedInUser, questionId, false, null);
}
@GetMapping("/question/{questionId}/edit")
public String editQuestionInCompSetup(@PathVariable long competitionId,
@PathVariable long questionId,
UserResource loggedInUser,
Model model) {
CompetitionResource competitionResource = competitionRestService.getCompetitionById(competitionId).getSuccess();
if (competitionResource.isNonIfs()) {
return "redirect:/non-ifs-competition/setup/" + competitionId;
}
return ifUserCanAccessEditPageMarkSectionAsIncomplete(competitionResource,
() -> getQuestionPage(model, competitionResource, loggedInUser, questionId, true, null),
Optional.empty(),
Optional.ofNullable(questionId),
loggedInUser);
}
@PostMapping(value = "/question/{questionId}/edit", params = {"!uploadTemplateDocumentFile", "!removeTemplateDocumentFile", "question.type=ASSESSED_QUESTION"})
public String submitAssessedQuestion(@Valid @ModelAttribute(COMPETITION_SETUP_FORM_KEY) QuestionForm competitionSetupForm,
BindingResult bindingResult,
ValidationHandler validationHandler,
@PathVariable long competitionId,
@PathVariable long questionId,
UserResource loggedInUser,
Model model) {
CompetitionResource competitionResource = competitionRestService.getCompetitionById(competitionId).getSuccess();
competitionSetupApplicationQuestionValidator.validate(competitionSetupForm, bindingResult, questionId, competitionResource);
if (!competitionSetupService.hasInitialDetailsBeenPreviouslySubmitted(competitionId)) {
return "redirect:/competition/setup/" + competitionResource.getId();
}
Supplier<String> failureView = () -> getQuestionPage(model, competitionResource, loggedInUser, competitionSetupForm.getQuestion().getQuestionId(), true, competitionSetupForm);
Supplier<String> successView = () -> String.format(APPLICATION_LANDING_REDIRECT, competitionId);
return validationHandler.performActionOrBindErrorsToField("", failureView, successView,
() -> competitionSetupService.saveCompetitionSetupSubsection(competitionSetupForm, competitionResource, APPLICATION_FORM, QUESTIONS, loggedInUser));
}
@PostMapping(value = "/question/{questionId}/edit", params = {"!uploadTemplateDocumentFile", "!removeTemplateDocumentFile","question.type=KTP_ASSESSMENT"})
public String submitKtpAssessedQuestion(@Valid @ModelAttribute(COMPETITION_SETUP_FORM_KEY) KtpAssessmentForm competitionSetupForm,
BindingResult bindingResult,
ValidationHandler validationHandler,
@PathVariable long competitionId,
@PathVariable long questionId,
UserResource loggedInUser,
Model model) {
CompetitionResource competitionResource = competitionRestService.getCompetitionById(competitionId).getSuccess();
competitionSetupApplicationQuestionValidator.validateKtpAssessmentQuestion(competitionSetupForm, bindingResult);
if (!competitionSetupService.hasInitialDetailsBeenPreviouslySubmitted(competitionId)) {
return "redirect:/competition/setup/" + competitionResource.getId();
}
Supplier<String> failureView = () -> getQuestionPage(model, competitionResource, loggedInUser, competitionSetupForm.getQuestion().getQuestionId(), true, competitionSetupForm);
Supplier<String> successView = () -> String.format(APPLICATION_LANDING_REDIRECT, competitionId);
return validationHandler.performActionOrBindErrorsToField("", failureView, successView,
() -> competitionSetupService.saveCompetitionSetupSubsection(competitionSetupForm, competitionResource, APPLICATION_FORM, KTP_ASSESSMENT, loggedInUser));
}
@PostMapping(value = "/question/{questionId}/edit", params = {"uploadTemplateDocumentFile"})
public String uploadTemplateDocumentFile(@ModelAttribute(COMPETITION_SETUP_FORM_KEY) QuestionForm competitionSetupForm,
BindingResult bindingResult,
ValidationHandler validationHandler,
@PathVariable long competitionId,
@PathVariable long questionId,
UserResource loggedInUser,
Model model) {
MultipartFile file = competitionSetupForm.getTemplateDocumentFile();
Supplier<String> view = () -> getQuestionPage(model, competitionRestService.getCompetitionById(competitionId).getSuccess(),
loggedInUser, questionId, true, competitionSetupForm);
return validationHandler.performActionOrBindErrorsToField("templateDocumentFile", view, view,
() -> questionSetupCompetitionRestService.uploadTemplateDocument(questionId,
file.getContentType(), file.getSize(), file.getOriginalFilename(), getMultipartFileBytes(file)));
}
@PostMapping(value = "/question/{questionId}/edit", params = {"removeTemplateDocumentFile"})
public String removeTemplateDocumentFile(@ModelAttribute(COMPETITION_SETUP_FORM_KEY) QuestionForm competitionSetupForm,
BindingResult bindingResult,
ValidationHandler validationHandler,
@PathVariable long competitionId,
@PathVariable long questionId,
UserResource loggedInUser,
Model model) {
Supplier<String> view = () -> getQuestionPage(model, competitionRestService.getCompetitionById(competitionId).getSuccess(),
loggedInUser, questionId, true, competitionSetupForm);
return validationHandler.performActionOrBindErrorsToField("templateDocumentFile", view, view,
() -> questionSetupCompetitionRestService.deleteTemplateDocument(questionId));
}
@GetMapping("/question/{questionId}/download-template-file")
public @ResponseBody
ResponseEntity<ByteArrayResource> downloadFile(Model model,
@PathVariable long questionId) {
CompetitionSetupQuestionResource question = questionSetupCompetitionRestService.getByQuestionId(questionId).getSuccess();
return getFileResponseEntity(formInputRestService.downloadFile(question.getTemplateFormInput()).getSuccess(),
formInputRestService.findFile(question.getTemplateFormInput()).getSuccess());
}
@PostMapping(value = "/question/{questionId}/edit", params = {"!uploadTemplateDocumentFile", "!removeTemplateDocumentFile"})
public String submitProjectDetailsQuestion(@Valid @ModelAttribute(COMPETITION_SETUP_FORM_KEY) ProjectForm competitionSetupForm,
BindingResult bindingResult,
ValidationHandler validationHandler,
@PathVariable long competitionId,
@PathVariable long questionId,
UserResource loggedInUser,
Model model) {
competitionSetupApplicationQuestionValidator.validate(competitionSetupForm, bindingResult, questionId);
CompetitionResource competitionResource = competitionRestService.getCompetitionById(competitionId).getSuccess();
if (competitionResource.isNonIfs()) {
return "redirect:/non-ifs-competition/setup/" + competitionId;
}
if (!competitionSetupService.hasInitialDetailsBeenPreviouslySubmitted(competitionId)) {
return "redirect:/competition/setup/" + competitionResource.getId();
}
Supplier<String> failureView = () -> getQuestionPage(model, competitionResource, loggedInUser, competitionSetupForm.getQuestion().getQuestionId(), true, competitionSetupForm);
Supplier<String> successView = () -> String.format(APPLICATION_LANDING_REDIRECT, competitionId);
return validationHandler.performActionOrBindErrorsToField("", failureView, successView,
() -> competitionSetupService.saveCompetitionSetupSubsection(competitionSetupForm, competitionResource, APPLICATION_FORM, PROJECT_DETAILS, loggedInUser));
}
@GetMapping(value = "/detail")
public String viewApplicationDetails(@PathVariable long competitionId,
UserResource loggedInUser,
Model model) {
CompetitionResource competitionResource = competitionRestService.getCompetitionById(competitionId).getSuccess();
if (competitionResource.isNonIfs()) {
return "redirect:/non-ifs-competition/setup/" + competitionId;
}
if (!competitionSetupService.hasInitialDetailsBeenPreviouslySubmitted(competitionId)) {
return "redirect:/competition/setup/" + competitionResource.getId();
}
return getDetailsPage(model, competitionResource, loggedInUser, false, null);
}
@GetMapping(value = "/detail/edit")
public String getEditApplicationDetails(@PathVariable long competitionId,
UserResource loggedInUser,
Model model) {
CompetitionResource competitionResource = competitionRestService.getCompetitionById(competitionId).getSuccess();
if (competitionResource.isNonIfs()) {
return "redirect:/non-ifs-competition/setup/" + competitionId;
}
if (!competitionSetupService.hasInitialDetailsBeenPreviouslySubmitted(competitionId)) {
return "redirect:/competition/setup/" + competitionResource.getId();
}
return ifUserCanAccessEditPageMarkSectionAsIncomplete(competitionResource,
() -> getDetailsPage(model, competitionResource, loggedInUser, true, null),
Optional.of(APPLICATION_DETAILS),
Optional.empty(),
loggedInUser);
}
@PostMapping("/detail/edit")
public String submitApplicationDetails(@ModelAttribute(COMPETITION_SETUP_FORM_KEY) DetailsForm form,
BindingResult bindingResult,
ValidationHandler validationHandler,
@PathVariable long competitionId,
UserResource loggedInUser,
Model model) {
CompetitionResource competitionResource = competitionRestService.getCompetitionById(competitionId).getSuccess();
if (!competitionSetupService.hasInitialDetailsBeenPreviouslySubmitted(competitionId)) {
return "redirect:/competition/setup/" + competitionResource.getId();
}
Supplier<String> failureView = () -> getDetailsPage(model, competitionResource, loggedInUser, true, form);
Supplier<String> successView = () -> String.format(APPLICATION_LANDING_REDIRECT, competitionId);
return validationHandler.addAnyErrors(
competitionSetupService.saveCompetitionSetupSubsection(form,
competitionResource,
APPLICATION_FORM,
APPLICATION_DETAILS,
loggedInUser),
fieldErrorsToFieldErrors(),
asGlobalErrors())
.failNowOrSucceedWith(
failureView,
successView
);
}
private String getFinancePage(Model model, CompetitionResource competitionResource, UserResource loggedInUser, boolean isEditable, CompetitionSetupForm form) {
model.addAttribute(MODEL, setupQuestionViewModel(competitionResource, loggedInUser, Optional.empty(), FINANCES, isEditable, null));
model.addAttribute(COMPETITION_SETUP_FORM_KEY, setupQuestionForm(competitionResource, Optional.empty(), FINANCES, form));
return "competition/finances";
}
private String getDetailsPage(Model model, CompetitionResource competitionResource, UserResource loggedInUser, boolean isEditable, CompetitionSetupForm form) {
model.addAttribute(MODEL, setupQuestionViewModel(competitionResource, loggedInUser, Optional.empty(), APPLICATION_DETAILS, isEditable, null));
model.addAttribute(COMPETITION_SETUP_FORM_KEY, setupQuestionForm(competitionResource, Optional.empty(), APPLICATION_DETAILS, form));
return "competition/application-details";
}
private String getQuestionPage(Model model, CompetitionResource competitionResource, UserResource loggedInUser, Long questionId, boolean isEditable, CompetitionSetupForm form) {
ServiceResult<String> view = questionSetupCompetitionRestService.getByQuestionId(questionId).toServiceResult()
.andOnSuccessReturn(
questionResource -> {
QuestionSetupType type = questionResource.getType();
CompetitionSetupSubsection setupSubsection;
if (type.equals(QuestionSetupType.ASSESSED_QUESTION)) {
setupSubsection = CompetitionSetupSubsection.QUESTIONS;
} else if (type.equals(QuestionSetupType.KTP_ASSESSMENT)) {
setupSubsection = CompetitionSetupSubsection.KTP_ASSESSMENT;
} else {
setupSubsection = CompetitionSetupSubsection.PROJECT_DETAILS;
}
model.addAttribute(MODEL, setupQuestionViewModel(competitionResource, loggedInUser, Optional.of(questionId), setupSubsection, isEditable, questionResource.getTemplateFilename()));
model.addAttribute(COMPETITION_SETUP_FORM_KEY, setupQuestionForm(competitionResource, Optional.of(questionId), setupSubsection, form));
return QUESTION_VIEW;
}).andOnFailure(() -> serviceSuccess("redirect:/non-ifs-competition/setup/" + questionId));
return view.getSuccess();
}
private QuestionSetupViewModel setupQuestionViewModel(final CompetitionResource competition, final UserResource loggedInUser, final Optional<Long> questionId, CompetitionSetupSubsection subsection, boolean isEditable, String filename) {
CompetitionSetupSection section = APPLICATION_FORM;
CompetitionSetupSubsectionViewModel subsectionViewModel = competitionSetupService.populateCompetitionSubsectionModelAttributes(competition, section,
subsection, questionId);
GeneralSetupViewModel generalViewModel = competitionSetupPopulator.populateGeneralModelAttributes(competition, loggedInUser, section);
CompetitionSetupQuestionResource questionResource = questionId.isPresent() ? questionSetupCompetitionRestService.getByQuestionId(
(questionId.get())).getSuccess() : null;
return new QuestionSetupViewModel(generalViewModel, subsectionViewModel, competition.getName(), isEditable, filename, displayAssessmentOptions(competition, questionResource), ifsLoanPartBEnabled);
}
private boolean displayAssessmentOptions(CompetitionResource competitionResource, CompetitionSetupQuestionResource questionResource) {
return (!competitionResource.isKtp() && isAssessedQuestion(questionResource)) || isKtpAssessmentQuestion(questionResource);
}
private boolean isAssessedQuestion(CompetitionSetupQuestionResource questionResource) {
return questionResource != null && (questionResource.getScored() != null || questionResource.getResearchCategoryQuestion() != null || questionResource.getScope() != null);
}
private boolean isKtpAssessmentQuestion(CompetitionSetupQuestionResource questionResource) {
return questionResource != null && questionResource.getType().equals(QuestionSetupType.KTP_ASSESSMENT);
}
private CompetitionSetupForm setupQuestionForm(final CompetitionResource competition, final Optional<Long> questionId, CompetitionSetupSubsection subsection, CompetitionSetupForm competitionSetupForm) {
CompetitionSetupSection section = APPLICATION_FORM;
if (competitionSetupForm == null) {
competitionSetupForm = competitionSetupService.getSubsectionFormData(
competition,
section,
subsection,
questionId);
}
return competitionSetupForm;
}
private String ifUserCanAccessEditPageMarkSectionAsIncomplete(CompetitionResource competition, Supplier<String> successAction,
Optional<CompetitionSetupSubsection> subsectionOpt,
Optional<Long> questionIdOpt,
UserResource loggedInUser) {
if (CompetitionSetupSection.APPLICATION_FORM.preventEdit(competition, loggedInUser)) {
log.error(String.format("Competition with id %1$d cannot edit section %2$s: ", competition.getId(), CompetitionSetupSection.APPLICATION_FORM));
return "redirect:/dashboard";
} else {
questionIdOpt.ifPresent(questionId -> questionSetupRestService.markQuestionSetupIncomplete(competition.getId(), CompetitionSetupSection.APPLICATION_FORM, questionId));
subsectionOpt.ifPresent(competitionSetupSubsection -> competitionSetupRestService.markSubSectionIncomplete(competition.getId(), CompetitionSetupSection.APPLICATION_FORM, competitionSetupSubsection));
competitionSetupRestService.markSectionIncomplete(competition.getId(), CompetitionSetupSection.APPLICATION_FORM);
return successAction.get();
}
}
}
| |
/*
* Copyright (C) 2016 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.android.exoplayer2.source.hls.playlist;
import static com.google.common.truth.Truth.assertThat;
import android.net.Uri;
import com.google.android.exoplayer2.C;
import com.google.android.exoplayer2.source.hls.playlist.HlsMediaPlaylist.Segment;
import com.google.android.exoplayer2.util.Util;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.nio.charset.Charset;
import java.util.List;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.robolectric.RobolectricTestRunner;
/** Test for {@link HlsMediaPlaylistParserTest}. */
@RunWith(RobolectricTestRunner.class)
public class HlsMediaPlaylistParserTest {
@Test
public void testParseMediaPlaylist() throws Exception {
Uri playlistUri = Uri.parse("https://example.com/test.m3u8");
String playlistString =
"#EXTM3U\n"
+ "#EXT-X-VERSION:3\n"
+ "#EXT-X-PLAYLIST-TYPE:VOD\n"
+ "#EXT-X-START:TIME-OFFSET=-25"
+ "#EXT-X-TARGETDURATION:8\n"
+ "#EXT-X-MEDIA-SEQUENCE:2679\n"
+ "#EXT-X-DISCONTINUITY-SEQUENCE:4\n"
+ "#EXT-X-ALLOW-CACHE:YES\n"
+ "\n"
+ "#EXTINF:7.975,\n"
+ "#EXT-X-BYTERANGE:51370@0\n"
+ "https://priv.example.com/fileSequence2679.ts\n"
+ "\n"
+ "#EXT-X-KEY:METHOD=AES-128,"
+ "URI=\"https://priv.example.com/key.php?r=2680\",IV=0x1566B\n"
+ "#EXTINF:7.975,\n"
+ "#EXT-X-BYTERANGE:51501@2147483648\n"
+ "https://priv.example.com/fileSequence2680.ts\n"
+ "\n"
+ "#EXT-X-KEY:METHOD=NONE\n"
+ "#EXTINF:7.941,\n"
+ "#EXT-X-BYTERANGE:51501\n" // @2147535149
+ "https://priv.example.com/fileSequence2681.ts\n"
+ "\n"
+ "#EXT-X-DISCONTINUITY\n"
+ "#EXT-X-KEY:METHOD=AES-128,URI=\"https://priv.example.com/key.php?r=2682\"\n"
+ "#EXTINF:7.975,\n"
+ "#EXT-X-BYTERANGE:51740\n" // @2147586650
+ "https://priv.example.com/fileSequence2682.ts\n"
+ "\n"
+ "#EXTINF:7.975,\n"
+ "https://priv.example.com/fileSequence2683.ts\n"
+ "#EXT-X-ENDLIST";
InputStream inputStream =
new ByteArrayInputStream(playlistString.getBytes(Charset.forName(C.UTF8_NAME)));
HlsPlaylist playlist = new HlsPlaylistParser().parse(playlistUri, inputStream);
HlsMediaPlaylist mediaPlaylist = (HlsMediaPlaylist) playlist;
assertThat(mediaPlaylist.playlistType).isEqualTo(HlsMediaPlaylist.PLAYLIST_TYPE_VOD);
assertThat(mediaPlaylist.startOffsetUs).isEqualTo(mediaPlaylist.durationUs - 25000000);
assertThat(mediaPlaylist.mediaSequence).isEqualTo(2679);
assertThat(mediaPlaylist.version).isEqualTo(3);
assertThat(mediaPlaylist.hasEndTag).isTrue();
List<Segment> segments = mediaPlaylist.segments;
assertThat(segments).isNotNull();
assertThat(segments).hasSize(5);
Segment segment = segments.get(0);
assertThat(mediaPlaylist.discontinuitySequence + segment.relativeDiscontinuitySequence)
.isEqualTo(4);
assertThat(segment.durationUs).isEqualTo(7975000);
assertThat(segment.fullSegmentEncryptionKeyUri).isNull();
assertThat(segment.encryptionIV).isNull();
assertThat(segment.byterangeLength).isEqualTo(51370);
assertThat(segment.byterangeOffset).isEqualTo(0);
assertThat(segment.url).isEqualTo("https://priv.example.com/fileSequence2679.ts");
segment = segments.get(1);
assertThat(segment.relativeDiscontinuitySequence).isEqualTo(0);
assertThat(segment.durationUs).isEqualTo(7975000);
assertThat(segment.fullSegmentEncryptionKeyUri)
.isEqualTo("https://priv.example.com/key.php?r=2680");
assertThat(segment.encryptionIV).isEqualTo("0x1566B");
assertThat(segment.byterangeLength).isEqualTo(51501);
assertThat(segment.byterangeOffset).isEqualTo(2147483648L);
assertThat(segment.url).isEqualTo("https://priv.example.com/fileSequence2680.ts");
segment = segments.get(2);
assertThat(segment.relativeDiscontinuitySequence).isEqualTo(0);
assertThat(segment.durationUs).isEqualTo(7941000);
assertThat(segment.fullSegmentEncryptionKeyUri).isNull();
assertThat(segment.encryptionIV).isEqualTo(null);
assertThat(segment.byterangeLength).isEqualTo(51501);
assertThat(segment.byterangeOffset).isEqualTo(2147535149L);
assertThat(segment.url).isEqualTo("https://priv.example.com/fileSequence2681.ts");
segment = segments.get(3);
assertThat(segment.relativeDiscontinuitySequence).isEqualTo(1);
assertThat(segment.durationUs).isEqualTo(7975000);
assertThat(segment.fullSegmentEncryptionKeyUri)
.isEqualTo("https://priv.example.com/key.php?r=2682");
// 0xA7A == 2682.
assertThat(segment.encryptionIV).isNotNull();
assertThat(Util.toUpperInvariant(segment.encryptionIV)).isEqualTo("A7A");
assertThat(segment.byterangeLength).isEqualTo(51740);
assertThat(segment.byterangeOffset).isEqualTo(2147586650L);
assertThat(segment.url).isEqualTo("https://priv.example.com/fileSequence2682.ts");
segment = segments.get(4);
assertThat(segment.relativeDiscontinuitySequence).isEqualTo(1);
assertThat(segment.durationUs).isEqualTo(7975000);
assertThat(segment.fullSegmentEncryptionKeyUri)
.isEqualTo("https://priv.example.com/key.php?r=2682");
// 0xA7B == 2683.
assertThat(segment.encryptionIV).isNotNull();
assertThat(Util.toUpperInvariant(segment.encryptionIV)).isEqualTo("A7B");
assertThat(segment.byterangeLength).isEqualTo(C.LENGTH_UNSET);
assertThat(segment.byterangeOffset).isEqualTo(0);
assertThat(segment.url).isEqualTo("https://priv.example.com/fileSequence2683.ts");
}
@Test
public void testParseSampleAesMethod() throws Exception {
Uri playlistUri = Uri.parse("https://example.com/test.m3u8");
String playlistString =
"#EXTM3U\n"
+ "#EXT-X-MEDIA-SEQUENCE:0\n"
+ "#EXTINF:8,\n"
+ "https://priv.example.com/1.ts\n"
+ "\n"
+ "#EXT-X-KEY:METHOD=SAMPLE-AES,URI="
+ "\"data:text/plain;base64,VGhpcyBpcyBhbiBlYXN0ZXIgZWdn\","
+ "IV=0x9358382AEB449EE23C3D809DA0B9CCD3,KEYFORMATVERSIONS=\"1\","
+ "KEYFORMAT=\"com.widevine\",IV=0x1566B\n"
+ "#EXTINF:8,\n"
+ "https://priv.example.com/2.ts\n"
+ "#EXT-X-ENDLIST\n";
InputStream inputStream =
new ByteArrayInputStream(playlistString.getBytes(Charset.forName(C.UTF8_NAME)));
HlsMediaPlaylist playlist =
(HlsMediaPlaylist) new HlsPlaylistParser().parse(playlistUri, inputStream);
assertThat(playlist.drmInitData.schemeType).isEqualTo(C.CENC_TYPE_cbcs);
assertThat(playlist.drmInitData.get(0).matches(C.WIDEVINE_UUID)).isTrue();
}
@Test
public void testParseSampleAesCencMethod() throws Exception {
Uri playlistUri = Uri.parse("https://example.com/test.m3u8");
String playlistString =
"#EXTM3U\n"
+ "#EXT-X-MEDIA-SEQUENCE:0\n"
+ "#EXTINF:8,\n"
+ "https://priv.example.com/1.ts\n"
+ "\n"
+ "#EXT-X-KEY:URI=\"data:text/plain;base64,VGhpcyBpcyBhbiBlYXN0ZXIgZWdn\","
+ "IV=0x9358382AEB449EE23C3D809DA0B9CCD3,KEYFORMATVERSIONS=\"1\","
+ "KEYFORMAT=\"urn:uuid:edef8ba9-79d6-4ace-a3c8-27dcd51d21ed\","
+ "IV=0x1566B,METHOD=SAMPLE-AES-CENC \n"
+ "#EXTINF:8,\n"
+ "https://priv.example.com/2.ts\n"
+ "#EXT-X-ENDLIST\n";
InputStream inputStream =
new ByteArrayInputStream(playlistString.getBytes(Charset.forName(C.UTF8_NAME)));
HlsMediaPlaylist playlist =
(HlsMediaPlaylist) new HlsPlaylistParser().parse(playlistUri, inputStream);
assertThat(playlist.drmInitData.schemeType).isEqualTo(C.CENC_TYPE_cenc);
assertThat(playlist.drmInitData.get(0).matches(C.WIDEVINE_UUID)).isTrue();
}
@Test
public void testParseSampleAesCtrMethod() throws Exception {
Uri playlistUri = Uri.parse("https://example.com/test.m3u8");
String playlistString =
"#EXTM3U\n"
+ "#EXT-X-MEDIA-SEQUENCE:0\n"
+ "#EXTINF:8,\n"
+ "https://priv.example.com/1.ts\n"
+ "\n"
+ "#EXT-X-KEY:METHOD=SAMPLE-AES-CTR,URI="
+ "\"data:text/plain;base64,VGhpcyBpcyBhbiBlYXN0ZXIgZWdn\","
+ "IV=0x9358382AEB449EE23C3D809DA0B9CCD3,KEYFORMATVERSIONS=\"1\","
+ "KEYFORMAT=\"com.widevine\",IV=0x1566B\n"
+ "#EXTINF:8,\n"
+ "https://priv.example.com/2.ts\n"
+ "#EXT-X-ENDLIST\n";
InputStream inputStream =
new ByteArrayInputStream(playlistString.getBytes(Charset.forName(C.UTF8_NAME)));
HlsMediaPlaylist playlist =
(HlsMediaPlaylist) new HlsPlaylistParser().parse(playlistUri, inputStream);
assertThat(playlist.drmInitData.schemeType).isEqualTo(C.CENC_TYPE_cenc);
assertThat(playlist.drmInitData.get(0).matches(C.WIDEVINE_UUID)).isTrue();
}
@Test
public void testGapTag() throws IOException {
Uri playlistUri = Uri.parse("https://example.com/test2.m3u8");
String playlistString =
"#EXTM3U\n"
+ "#EXT-X-VERSION:3\n"
+ "#EXT-X-TARGETDURATION:5\n"
+ "#EXT-X-PLAYLIST-TYPE:VOD\n"
+ "#EXT-X-MEDIA-SEQUENCE:0\n"
+ "#EXT-X-PROGRAM-DATE-TIME:2016-09-22T02:00:01+00:00\n"
+ "#EXT-X-KEY:METHOD=AES-128,URI=\"https://example.com/key?value=something\"\n"
+ "#EXTINF:5.005,\n"
+ "02/00/27.ts\n"
+ "#EXTINF:5.005,\n"
+ "02/00/32.ts\n"
+ "#EXT-X-KEY:METHOD=NONE\n"
+ "#EXTINF:5.005,\n"
+ "#EXT-X-GAP \n"
+ "../dummy.ts\n"
+ "#EXT-X-KEY:METHOD=AES-128,URI=\"https://key-service.bamgrid.com/1.0/key?"
+ "hex-value=9FB8989D15EEAAF8B21B860D7ED3072A\",IV=0x410C8AC18AA42EFA18B5155484F5FC34\n"
+ "#EXTINF:5.005,\n"
+ "02/00/42.ts\n"
+ "#EXTINF:5.005,\n"
+ "02/00/47.ts\n";
InputStream inputStream =
new ByteArrayInputStream(playlistString.getBytes(Charset.forName(C.UTF8_NAME)));
HlsMediaPlaylist playlist =
(HlsMediaPlaylist) new HlsPlaylistParser().parse(playlistUri, inputStream);
assertThat(playlist.hasEndTag).isFalse();
assertThat(playlist.segments.get(1).hasGapTag).isFalse();
assertThat(playlist.segments.get(2).hasGapTag).isTrue();
assertThat(playlist.segments.get(3).hasGapTag).isFalse();
}
@Test
public void testMapTag() throws IOException {
Uri playlistUri = Uri.parse("https://example.com/test3.m3u8");
String playlistString =
"#EXTM3U\n"
+ "#EXT-X-VERSION:3\n"
+ "#EXT-X-TARGETDURATION:5\n"
+ "#EXT-X-MEDIA-SEQUENCE:10\n"
+ "#EXTINF:5.005,\n"
+ "02/00/27.ts\n"
+ "#EXT-X-MAP:URI=\"init1.ts\""
+ "#EXTINF:5.005,\n"
+ "02/00/32.ts\n"
+ "#EXTINF:5.005,\n"
+ "02/00/42.ts\n"
+ "#EXT-X-MAP:URI=\"init2.ts\""
+ "#EXTINF:5.005,\n"
+ "02/00/47.ts\n";
InputStream inputStream =
new ByteArrayInputStream(playlistString.getBytes(Charset.forName(C.UTF8_NAME)));
HlsMediaPlaylist playlist =
(HlsMediaPlaylist) new HlsPlaylistParser().parse(playlistUri, inputStream);
List<Segment> segments = playlist.segments;
assertThat(segments.get(0).initializationSegment).isNull();
assertThat(segments.get(1).initializationSegment)
.isSameAs(segments.get(2).initializationSegment);
assertThat(segments.get(1).initializationSegment.url).isEqualTo("init1.ts");
assertThat(segments.get(3).initializationSegment.url).isEqualTo("init2.ts");
}
}
| |
/*
* Copyright 2016-present Open Networking Foundation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.onosproject.yms.app.yob;
import org.junit.Test;
import org.onosproject.yang.gen.v1.urn.topo.rev20140101.YmsTopologyOpParam;
import org.onosproject.yang.gen.v1.urn.topo.rev20140101.ymstopology.DefaultNode;
import org.onosproject.yang.gen.v1.urn.topo.rev20140101.ymstopology.Node;
import org.onosproject.yang.gen.v1.urn.topo.rev20140101.ymstopology.node.choice1.Case1a;
import org.onosproject.yang.gen.v1.urn.topo.rev20140101.ymstopology.node.choice1.Case1b;
import org.onosproject.yang.gen.v1.urn.topo.rev20140101.ymstopology.node.choice1.DefaultCase1a;
import org.onosproject.yang.gen.v1.urn.topo.rev20140101.ymstopology.node.choice1.DefaultCase1b;
import org.onosproject.yang.gen.v1.urn.topo.rev20140101.ymstopology.node.choice1.case1b.choice1b.Case1Bi;
import org.onosproject.yang.gen.v1.urn.topo.rev20140101.ymstopology.node.choice1.case1b.choice1b.DefaultCase1Bi;
import org.onosproject.yms.app.ydt.YangRequestWorkBench;
import org.onosproject.yms.app.ydt.YdtExtendedContext;
import org.onosproject.yms.ydt.YdtContext;
import java.io.IOException;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import static org.onosproject.yms.app.yob.YobTestUtils.LEAF_1A1;
import static org.onosproject.yms.app.yob.YobTestUtils.LEAF_1A2;
import static org.onosproject.yms.app.yob.YobTestUtils.LEAF_1BIA;
import static org.onosproject.yms.app.yob.YobTestUtils.LEAF_1BIB;
import static org.onosproject.yms.app.yob.YobTestUtils.NODE;
import static org.onosproject.yms.app.yob.YobTestUtils.ROOT_DATA_RESOURCE;
import static org.onosproject.yms.app.yob.YobTestUtils.STR_LEAF_VALUE;
import static org.onosproject.yms.app.yob.YobTestUtils.TOPOLOGY;
import static org.onosproject.yms.ydt.YdtContextOperationType.NONE;
/**
* Test the YANG object building for the YANG data tree based on the non
* schema choice and case nodes.
*/
public class YobChoiceTest {
private YobTestUtils utils = YobTestUtils.instance();
@Test
public void caseInChoice() throws IOException {
YangRequestWorkBench ydtBuilder = new YangRequestWorkBench(
ROOT_DATA_RESOURCE, null, null, utils.schemaRegistry(), true);
ydtBuilder.addChild(TOPOLOGY, null, NONE);
ydtBuilder.addChild(NODE, null);
ydtBuilder.addLeaf(LEAF_1A1, null, STR_LEAF_VALUE);
YdtContext logicalRoot = ydtBuilder.getRootNode();
YdtExtendedContext appRoot =
(YdtExtendedContext) logicalRoot.getFirstChild();
DefaultYobBuilder yobBuilder = new DefaultYobBuilder();
Object yangObject = yobBuilder.getYangObject(appRoot,
utils.schemaRegistry());
assertNotNull(yangObject);
assertEquals("YANG object created is not topology object",
YmsTopologyOpParam.class, yangObject.getClass());
YmsTopologyOpParam topology = (YmsTopologyOpParam) yangObject;
assertNotNull("Failed to build the object", topology.node());
assertEquals("Single node entry is expected", 1,
topology.node().size());
assertEquals("Node type is not DefaultNode", DefaultNode.class,
topology.node().get(0).getClass());
Node node = topology.node().get(0);
assertNotNull("choice1 is not set in node", node.choice1());
assertEquals("choice 1 type is not ", DefaultCase1a.class,
node.choice1().getClass());
Case1a case1a = (Case1a) node.choice1();
assertNotNull("leaf1a1 is not set in case", case1a.leaf1A1());
assertEquals("leaf1a1 type is not correct", String.class,
case1a.leaf1A1().getClass());
assertEquals("leaf1a1 value is not correct", STR_LEAF_VALUE,
case1a.leaf1A1());
}
@Test
public void caseWithMultiAttribute() throws IOException {
YangRequestWorkBench ydtBuilder = new YangRequestWorkBench(
ROOT_DATA_RESOURCE, null, null, utils.schemaRegistry(), true);
ydtBuilder.addChild(TOPOLOGY, null, NONE);
ydtBuilder.addChild(NODE, null);
ydtBuilder.addLeaf(LEAF_1A1, null, STR_LEAF_VALUE);
ydtBuilder.traverseToParent();
ydtBuilder.addLeaf(LEAF_1A2, null, STR_LEAF_VALUE);
YdtContext logicalRoot = ydtBuilder.getRootNode();
YdtExtendedContext appRoot =
(YdtExtendedContext) logicalRoot.getFirstChild();
DefaultYobBuilder yobBuilder = new DefaultYobBuilder();
Object yangObject = yobBuilder.getYangObject(appRoot,
utils.schemaRegistry());
assertNotNull(yangObject);
assertEquals("YANG object created is not topology object",
YmsTopologyOpParam.class, yangObject.getClass());
YmsTopologyOpParam topology = (YmsTopologyOpParam) yangObject;
assertNotNull("Failed to build the object", topology.node());
assertEquals("Single node entry is expected", 1,
topology.node().size());
assertEquals("Node type is not DefaultNode", DefaultNode.class,
topology.node().get(0).getClass());
Node node = topology.node().get(0);
assertNotNull("choice1 is not set in node", node.choice1());
assertEquals("choice 1 type is not ", DefaultCase1a.class,
node.choice1().getClass());
Case1a case1a = (Case1a) node.choice1();
assertNotNull("leaf1a1 is not set in case", case1a.leaf1A1());
assertEquals("leaf1a1 type is not correct", String.class,
case1a.leaf1A1().getClass());
assertEquals("leaf1a1 value is not correct", STR_LEAF_VALUE,
case1a.leaf1A1());
assertNotNull("leaf1a2 is not set in case", case1a.leaf1A2());
assertEquals("leaf1a2 type is not correct", String.class,
case1a.leaf1A2().getClass());
assertEquals("leaf1a1 value is not correct", STR_LEAF_VALUE,
case1a.leaf1A1());
}
@Test
public void recursiveChoice() throws IOException {
YangRequestWorkBench ydtBuilder = new YangRequestWorkBench(
ROOT_DATA_RESOURCE, null, null, utils.schemaRegistry(), true);
ydtBuilder.addChild(TOPOLOGY, null, NONE);
ydtBuilder.addChild(NODE, null);
ydtBuilder.addLeaf(LEAF_1BIA, null, STR_LEAF_VALUE);
YdtContext logicalRoot = ydtBuilder.getRootNode();
YdtExtendedContext appRoot =
(YdtExtendedContext) logicalRoot.getFirstChild();
DefaultYobBuilder yobBuilder = new DefaultYobBuilder();
Object yangObject = yobBuilder.getYangObject(appRoot,
utils.schemaRegistry());
assertNotNull(yangObject);
assertEquals("YANG object created is not topology object",
YmsTopologyOpParam.class, yangObject.getClass());
YmsTopologyOpParam topology = (YmsTopologyOpParam) yangObject;
assertNotNull("Failed to build the object", topology.node());
assertEquals("Single node entry is expected", 1,
topology.node().size());
assertEquals("Node type is not DefaultNode", DefaultNode.class,
topology.node().get(0).getClass());
Node node = topology.node().get(0);
assertNotNull("Choice 1 is not set in Node", node.choice1());
assertEquals("Choice 1 is not of type DefaultCase1b",
DefaultCase1b.class, node.choice1().getClass());
Case1b case1b = (Case1b) node.choice1();
assertNotNull("Case1b does not have child choice1b ",
case1b.choice1b());
assertEquals("choice1b is not of type DefaultCase1Bi",
DefaultCase1Bi.class, case1b.choice1b().getClass());
Case1Bi case1Bi = (Case1Bi) case1b.choice1b();
assertNotNull("leaf1bia is not set", case1Bi.leaf1Bia());
assertEquals("leaf1bia type is not string", String.class,
case1Bi.leaf1Bia().getClass());
assertEquals("leaf1bia value is wrong", STR_LEAF_VALUE,
case1Bi.leaf1Bia());
}
@Test
public void recursiveChoiceWithMultipleAttribute() throws IOException {
YangRequestWorkBench ydtBuilder = new YangRequestWorkBench(
ROOT_DATA_RESOURCE, null, null, utils.schemaRegistry(), true);
ydtBuilder.addChild(TOPOLOGY, null, NONE);
ydtBuilder.addChild(NODE, null);
ydtBuilder.addLeaf(LEAF_1BIA, null, STR_LEAF_VALUE);
ydtBuilder.traverseToParent();
ydtBuilder.addLeaf(LEAF_1BIB, null, STR_LEAF_VALUE);
YdtContext logicalRoot = ydtBuilder.getRootNode();
YdtExtendedContext appRoot =
(YdtExtendedContext) logicalRoot.getFirstChild();
DefaultYobBuilder yobBuilder = new DefaultYobBuilder();
Object yangObject = yobBuilder.getYangObject(appRoot,
utils.schemaRegistry());
assertNotNull(yangObject);
assertEquals("YANG object created is not topology object",
YmsTopologyOpParam.class, yangObject.getClass());
YmsTopologyOpParam topology = (YmsTopologyOpParam) yangObject;
assertNotNull("Failed to build the object", topology.node());
assertEquals("Single node entry is expected", 1,
topology.node().size());
assertEquals("Node type is not DefaultNode", DefaultNode.class,
topology.node().get(0).getClass());
Node node = topology.node().get(0);
assertNotNull("Choice 1 is not set in Node", node.choice1());
assertEquals("Choice 1 is not of type DefaultCase1b",
DefaultCase1b.class,
node.choice1().getClass());
Case1b case1b = (Case1b) node.choice1();
assertNotNull("Case1b does not have child choice1b ",
case1b.choice1b());
assertEquals("choice1b is not of type DefaultCase1Bi",
DefaultCase1Bi.class,
case1b.choice1b().getClass());
Case1Bi case1Bi = (Case1Bi) case1b.choice1b();
assertNotNull("leaf1bia is not set", case1Bi.leaf1Bia());
assertEquals("leaf1bia type is not string", String.class,
case1Bi.leaf1Bia().getClass());
assertEquals("leaf1bia value is wrong", STR_LEAF_VALUE,
case1Bi.leaf1Bia());
assertNotNull("leaf1bib is not set", case1Bi.leaf1Bib());
assertEquals("leaf1bia type is not string", String.class,
case1Bi.leaf1Bib().getClass());
assertEquals("leaf1bia value is wrong", STR_LEAF_VALUE,
case1Bi.leaf1Bib());
}
}
| |
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0 and the Server Side Public License, v 1; you may not use this file except
* in compliance with, at your election, the Elastic License 2.0 or the Server
* Side Public License, v 1.
*/
package org.elasticsearch.indices.analysis;
import org.apache.lucene.analysis.LowerCaseFilter;
import org.apache.lucene.analysis.TokenStream;
import org.elasticsearch.Version;
import org.elasticsearch.cluster.metadata.IndexMetadata;
import org.elasticsearch.common.NamedRegistry;
import org.elasticsearch.common.logging.DeprecationCategory;
import org.elasticsearch.common.logging.DeprecationLogger;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.env.Environment;
import org.elasticsearch.index.IndexSettings;
import org.elasticsearch.index.analysis.AbstractTokenFilterFactory;
import org.elasticsearch.index.analysis.AnalysisRegistry;
import org.elasticsearch.index.analysis.AnalyzerProvider;
import org.elasticsearch.index.analysis.CharFilterFactory;
import org.elasticsearch.index.analysis.HunspellTokenFilterFactory;
import org.elasticsearch.index.analysis.KeywordAnalyzerProvider;
import org.elasticsearch.index.analysis.LowercaseNormalizerProvider;
import org.elasticsearch.index.analysis.PreBuiltAnalyzerProviderFactory;
import org.elasticsearch.index.analysis.PreConfiguredCharFilter;
import org.elasticsearch.index.analysis.PreConfiguredTokenFilter;
import org.elasticsearch.index.analysis.PreConfiguredTokenizer;
import org.elasticsearch.index.analysis.ShingleTokenFilterFactory;
import org.elasticsearch.index.analysis.SimpleAnalyzerProvider;
import org.elasticsearch.index.analysis.StandardAnalyzerProvider;
import org.elasticsearch.index.analysis.StandardTokenizerFactory;
import org.elasticsearch.index.analysis.StopAnalyzerProvider;
import org.elasticsearch.index.analysis.StopTokenFilterFactory;
import org.elasticsearch.index.analysis.TokenFilterFactory;
import org.elasticsearch.index.analysis.TokenizerFactory;
import org.elasticsearch.index.analysis.WhitespaceAnalyzerProvider;
import org.elasticsearch.plugins.AnalysisPlugin;
import java.io.IOException;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import static java.util.Collections.unmodifiableMap;
import static org.elasticsearch.plugins.AnalysisPlugin.requiresAnalysisSettings;
/**
* Sets up {@link AnalysisRegistry}.
*/
public final class AnalysisModule {
static {
Settings build = Settings.builder()
.put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT)
.put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 1)
.put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1)
.build();
IndexMetadata metadata = IndexMetadata.builder("_na_").settings(build).build();
NA_INDEX_SETTINGS = new IndexSettings(metadata, Settings.EMPTY);
}
private static final IndexSettings NA_INDEX_SETTINGS;
private static final DeprecationLogger deprecationLogger = DeprecationLogger.getLogger(AnalysisModule.class);
private final HunspellService hunspellService;
private final AnalysisRegistry analysisRegistry;
public AnalysisModule(Environment environment, List<AnalysisPlugin> plugins) throws IOException {
NamedRegistry<AnalysisProvider<CharFilterFactory>> charFilters = setupCharFilters(plugins);
NamedRegistry<org.apache.lucene.analysis.hunspell.Dictionary> hunspellDictionaries = setupHunspellDictionaries(plugins);
hunspellService = new HunspellService(environment.settings(), environment, hunspellDictionaries.getRegistry());
NamedRegistry<AnalysisProvider<TokenFilterFactory>> tokenFilters = setupTokenFilters(plugins, hunspellService);
NamedRegistry<AnalysisProvider<TokenizerFactory>> tokenizers = setupTokenizers(plugins);
NamedRegistry<AnalysisProvider<AnalyzerProvider<?>>> analyzers = setupAnalyzers(plugins);
NamedRegistry<AnalysisProvider<AnalyzerProvider<?>>> normalizers = setupNormalizers(plugins);
Map<String, PreConfiguredCharFilter> preConfiguredCharFilters = setupPreConfiguredCharFilters(plugins);
Map<String, PreConfiguredTokenFilter> preConfiguredTokenFilters = setupPreConfiguredTokenFilters(plugins);
Map<String, PreConfiguredTokenizer> preConfiguredTokenizers = setupPreConfiguredTokenizers(plugins);
Map<String, PreBuiltAnalyzerProviderFactory> preConfiguredAnalyzers = setupPreBuiltAnalyzerProviderFactories(plugins);
analysisRegistry = new AnalysisRegistry(
environment,
charFilters.getRegistry(),
tokenFilters.getRegistry(),
tokenizers.getRegistry(),
analyzers.getRegistry(),
normalizers.getRegistry(),
preConfiguredCharFilters,
preConfiguredTokenFilters,
preConfiguredTokenizers,
preConfiguredAnalyzers
);
}
HunspellService getHunspellService() {
return hunspellService;
}
public AnalysisRegistry getAnalysisRegistry() {
return analysisRegistry;
}
private NamedRegistry<AnalysisProvider<CharFilterFactory>> setupCharFilters(List<AnalysisPlugin> plugins) {
NamedRegistry<AnalysisProvider<CharFilterFactory>> charFilters = new NamedRegistry<>("char_filter");
charFilters.extractAndRegister(plugins, AnalysisPlugin::getCharFilters);
return charFilters;
}
public NamedRegistry<org.apache.lucene.analysis.hunspell.Dictionary> setupHunspellDictionaries(List<AnalysisPlugin> plugins) {
NamedRegistry<org.apache.lucene.analysis.hunspell.Dictionary> hunspellDictionaries = new NamedRegistry<>("dictionary");
hunspellDictionaries.extractAndRegister(plugins, AnalysisPlugin::getHunspellDictionaries);
return hunspellDictionaries;
}
private NamedRegistry<AnalysisProvider<TokenFilterFactory>> setupTokenFilters(
List<AnalysisPlugin> plugins,
HunspellService hunspellService
) {
NamedRegistry<AnalysisProvider<TokenFilterFactory>> tokenFilters = new NamedRegistry<>("token_filter");
tokenFilters.register("stop", StopTokenFilterFactory::new);
// Add "standard" for old indices (bwc)
tokenFilters.register("standard", new AnalysisProvider<TokenFilterFactory>() {
@Override
public TokenFilterFactory get(IndexSettings indexSettings, Environment environment, String name, Settings settings) {
if (indexSettings.getIndexVersionCreated().before(Version.V_7_0_0)) {
deprecationLogger.warn(
DeprecationCategory.ANALYSIS,
"standard_deprecation",
"The [standard] token filter name is deprecated and will be removed in a future version."
);
} else {
throw new IllegalArgumentException("The [standard] token filter has been removed.");
}
return new AbstractTokenFilterFactory(indexSettings, name, settings) {
@Override
public TokenStream create(TokenStream tokenStream) {
return tokenStream;
}
};
}
@Override
public boolean requiresAnalysisSettings() {
return false;
}
});
tokenFilters.register("shingle", ShingleTokenFilterFactory::new);
tokenFilters.register(
"hunspell",
requiresAnalysisSettings(
(indexSettings, env, name, settings) -> new HunspellTokenFilterFactory(indexSettings, name, settings, hunspellService)
)
);
tokenFilters.extractAndRegister(plugins, AnalysisPlugin::getTokenFilters);
return tokenFilters;
}
static Map<String, PreBuiltAnalyzerProviderFactory> setupPreBuiltAnalyzerProviderFactories(List<AnalysisPlugin> plugins) {
NamedRegistry<PreBuiltAnalyzerProviderFactory> preConfiguredCharFilters = new NamedRegistry<>("pre-built analyzer");
for (AnalysisPlugin plugin : plugins) {
for (PreBuiltAnalyzerProviderFactory factory : plugin.getPreBuiltAnalyzerProviderFactories()) {
preConfiguredCharFilters.register(factory.getName(), factory);
}
}
return unmodifiableMap(preConfiguredCharFilters.getRegistry());
}
static Map<String, PreConfiguredCharFilter> setupPreConfiguredCharFilters(List<AnalysisPlugin> plugins) {
NamedRegistry<PreConfiguredCharFilter> preConfiguredCharFilters = new NamedRegistry<>("pre-configured char_filter");
// No char filter are available in lucene-core so none are built in to Elasticsearch core
for (AnalysisPlugin plugin : plugins) {
for (PreConfiguredCharFilter filter : plugin.getPreConfiguredCharFilters()) {
preConfiguredCharFilters.register(filter.getName(), filter);
}
}
return unmodifiableMap(preConfiguredCharFilters.getRegistry());
}
static Map<String, PreConfiguredTokenFilter> setupPreConfiguredTokenFilters(List<AnalysisPlugin> plugins) {
NamedRegistry<PreConfiguredTokenFilter> preConfiguredTokenFilters = new NamedRegistry<>("pre-configured token_filter");
// Add filters available in lucene-core
preConfiguredTokenFilters.register("lowercase", PreConfiguredTokenFilter.singleton("lowercase", true, LowerCaseFilter::new));
// Add "standard" for old indices (bwc)
preConfiguredTokenFilters.register(
"standard",
PreConfiguredTokenFilter.elasticsearchVersion("standard", true, (reader, version) -> {
// This was originally removed in 7_0_0 but due to a cacheing bug it was still possible
// in certain circumstances to create a new index referencing the standard token filter
// until version 7_5_2
if (version.before(Version.V_7_6_0)) {
deprecationLogger.warn(
DeprecationCategory.ANALYSIS,
"standard_deprecation",
"The [standard] token filter is deprecated and will be removed in a future version."
);
} else {
throw new IllegalArgumentException("The [standard] token filter has been removed.");
}
return reader;
})
);
/* Note that "stop" is available in lucene-core but it's pre-built
* version uses a set of English stop words that are in
* lucene-analyzers-common so "stop" is defined in the analysis-common
* module. */
for (AnalysisPlugin plugin : plugins) {
for (PreConfiguredTokenFilter filter : plugin.getPreConfiguredTokenFilters()) {
preConfiguredTokenFilters.register(filter.getName(), filter);
}
}
return unmodifiableMap(preConfiguredTokenFilters.getRegistry());
}
static Map<String, PreConfiguredTokenizer> setupPreConfiguredTokenizers(List<AnalysisPlugin> plugins) {
NamedRegistry<PreConfiguredTokenizer> preConfiguredTokenizers = new NamedRegistry<>("pre-configured tokenizer");
// Temporary shim to register old style pre-configured tokenizers
for (PreBuiltTokenizers tokenizer : PreBuiltTokenizers.values()) {
String name = tokenizer.name().toLowerCase(Locale.ROOT);
PreConfiguredTokenizer preConfigured;
switch (tokenizer.getCachingStrategy()) {
case ONE:
preConfigured = PreConfiguredTokenizer.singleton(name, () -> tokenizer.create(Version.CURRENT));
break;
default:
throw new UnsupportedOperationException("Caching strategy unsupported by temporary shim [" + tokenizer + "]");
}
preConfiguredTokenizers.register(name, preConfigured);
}
for (AnalysisPlugin plugin : plugins) {
for (PreConfiguredTokenizer tokenizer : plugin.getPreConfiguredTokenizers()) {
preConfiguredTokenizers.register(tokenizer.getName(), tokenizer);
}
}
return unmodifiableMap(preConfiguredTokenizers.getRegistry());
}
private NamedRegistry<AnalysisProvider<TokenizerFactory>> setupTokenizers(List<AnalysisPlugin> plugins) {
NamedRegistry<AnalysisProvider<TokenizerFactory>> tokenizers = new NamedRegistry<>("tokenizer");
tokenizers.register("standard", StandardTokenizerFactory::new);
tokenizers.extractAndRegister(plugins, AnalysisPlugin::getTokenizers);
return tokenizers;
}
private NamedRegistry<AnalysisProvider<AnalyzerProvider<?>>> setupAnalyzers(List<AnalysisPlugin> plugins) {
NamedRegistry<AnalysisProvider<AnalyzerProvider<?>>> analyzers = new NamedRegistry<>("analyzer");
analyzers.register("default", StandardAnalyzerProvider::new);
analyzers.register("standard", StandardAnalyzerProvider::new);
analyzers.register("simple", SimpleAnalyzerProvider::new);
analyzers.register("stop", StopAnalyzerProvider::new);
analyzers.register("whitespace", WhitespaceAnalyzerProvider::new);
analyzers.register("keyword", KeywordAnalyzerProvider::new);
analyzers.extractAndRegister(plugins, AnalysisPlugin::getAnalyzers);
return analyzers;
}
private NamedRegistry<AnalysisProvider<AnalyzerProvider<?>>> setupNormalizers(List<AnalysisPlugin> plugins) {
NamedRegistry<AnalysisProvider<AnalyzerProvider<?>>> normalizers = new NamedRegistry<>("normalizer");
normalizers.register("lowercase", LowercaseNormalizerProvider::new);
// TODO: pluggability?
return normalizers;
}
/**
* The basic factory interface for analysis components.
*/
public interface AnalysisProvider<T> {
/**
* Creates a new analysis provider.
*
* @param indexSettings the index settings for the index this provider is created for
* @param environment the nodes environment to load resources from persistent storage
* @param name the name of the analysis component
* @param settings the component specific settings without context prefixes
* @return a new provider instance
* @throws IOException if an {@link IOException} occurs
*/
T get(IndexSettings indexSettings, Environment environment, String name, Settings settings) throws IOException;
/**
* Creates a new global scope analysis provider without index specific settings not settings for the provider itself.
* This can be used to get a default instance of an analysis factory without binding to an index.
*
* @param environment the nodes environment to load resources from persistent storage
* @param name the name of the analysis component
* @return a new provider instance
* @throws IOException if an {@link IOException} occurs
* @throws IllegalArgumentException if the provider requires analysis settings ie. if {@link #requiresAnalysisSettings()} returns
* <code>true</code>
*/
default T get(Environment environment, String name) throws IOException {
if (requiresAnalysisSettings()) {
throw new IllegalArgumentException("Analysis settings required - can't instantiate analysis factory");
}
return get(NA_INDEX_SETTINGS, environment, name, NA_INDEX_SETTINGS.getSettings());
}
/**
* If <code>true</code> the analysis component created by this provider requires certain settings to be instantiated.
* it can't be created with defaults. The default is <code>false</code>.
*/
default boolean requiresAnalysisSettings() {
return false;
}
}
}
| |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.action.support.replication;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.ExceptionsHelper;
import org.elasticsearch.action.ActionResponse;
import org.elasticsearch.action.ShardOperationFailedException;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.io.stream.Streamable;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.index.shard.ShardId;
import org.elasticsearch.rest.RestStatus;
import java.io.IOException;
import java.util.Arrays;
/**
* Base class for write action responses.
*/
public class ReplicationResponse extends ActionResponse {
public static final ReplicationResponse.ShardInfo.Failure[] EMPTY = new ReplicationResponse.ShardInfo.Failure[0];
private ShardInfo shardInfo;
@Override
public void readFrom(StreamInput in) throws IOException {
super.readFrom(in);
shardInfo = ReplicationResponse.ShardInfo.readShardInfo(in);
}
@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
shardInfo.writeTo(out);
}
public ShardInfo getShardInfo() {
return shardInfo;
}
public void setShardInfo(ShardInfo shardInfo) {
this.shardInfo = shardInfo;
}
public static class ShardInfo implements Streamable, ToXContent {
private int total;
private int successful;
private Failure[] failures = EMPTY;
public ShardInfo() {
}
public ShardInfo(int total, int successful, Failure... failures) {
assert total >= 0 && successful >= 0;
this.total = total;
this.successful = successful;
this.failures = failures;
}
/**
* @return the total number of shards the write should go to (replicas and primaries). This includes relocating shards, so this
* number can be higher than the number of shards.
*/
public int getTotal() {
return total;
}
/**
* @return the total number of shards the write succeeded on (replicas and primaries). This includes relocating shards, so this
* number can be higher than the number of shards.
*/
public int getSuccessful() {
return successful;
}
/**
* @return The total number of replication failures.
*/
public int getFailed() {
return failures.length;
}
/**
* @return The replication failures that have been captured in the case writes have failed on replica shards.
*/
public Failure[] getFailures() {
return failures;
}
public RestStatus status() {
RestStatus status = RestStatus.OK;
for (Failure failure : failures) {
if (failure.primary() && failure.status().getStatus() > status.getStatus()) {
status = failure.status();
}
}
return status;
}
@Override
public void readFrom(StreamInput in) throws IOException {
total = in.readVInt();
successful = in.readVInt();
int size = in.readVInt();
failures = new Failure[size];
for (int i = 0; i < size; i++) {
Failure failure = new Failure();
failure.readFrom(in);
failures[i] = failure;
}
}
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeVInt(total);
out.writeVInt(successful);
out.writeVInt(failures.length);
for (Failure failure : failures) {
failure.writeTo(out);
}
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject(Fields._SHARDS);
builder.field(Fields.TOTAL, total);
builder.field(Fields.SUCCESSFUL, successful);
builder.field(Fields.FAILED, getFailed());
if (failures.length > 0) {
builder.startArray(Fields.FAILURES);
for (Failure failure : failures) {
failure.toXContent(builder, params);
}
builder.endArray();
}
builder.endObject();
return builder;
}
@Override
public String toString() {
return "ShardInfo{" +
"total=" + total +
", successful=" + successful +
", failures=" + Arrays.toString(failures) +
'}';
}
public static ShardInfo readShardInfo(StreamInput in) throws IOException {
ShardInfo shardInfo = new ShardInfo();
shardInfo.readFrom(in);
return shardInfo;
}
public static class Failure implements ShardOperationFailedException, ToXContent {
private ShardId shardId;
private String nodeId;
private Exception cause;
private RestStatus status;
private boolean primary;
public Failure(ShardId shardId, @Nullable String nodeId, Exception cause, RestStatus status, boolean primary) {
this.shardId = shardId;
this.nodeId = nodeId;
this.cause = cause;
this.status = status;
this.primary = primary;
}
Failure() {
}
/**
* @return On what index the failure occurred.
*/
@Override
public String index() {
return shardId.getIndexName();
}
/**
* @return On what shard id the failure occurred.
*/
@Override
public int shardId() {
return shardId.id();
}
public ShardId fullShardId() {
return shardId;
}
/**
* @return On what node the failure occurred.
*/
@Nullable
public String nodeId() {
return nodeId;
}
/**
* @return A text description of the failure
*/
@Override
public String reason() {
return ExceptionsHelper.detailedMessage(cause);
}
/**
* @return The status to report if this failure was a primary failure.
*/
@Override
public RestStatus status() {
return status;
}
@Override
public Throwable getCause() {
return cause;
}
/**
* @return Whether this failure occurred on a primary shard.
* (this only reports true for delete by query)
*/
public boolean primary() {
return primary;
}
@Override
public void readFrom(StreamInput in) throws IOException {
shardId = ShardId.readShardId(in);
nodeId = in.readOptionalString();
cause = in.readException();
status = RestStatus.readFrom(in);
primary = in.readBoolean();
}
@Override
public void writeTo(StreamOutput out) throws IOException {
shardId.writeTo(out);
out.writeOptionalString(nodeId);
out.writeException(cause);
RestStatus.writeTo(out, status);
out.writeBoolean(primary);
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject();
builder.field(Fields._INDEX, shardId.getIndexName());
builder.field(Fields._SHARD, shardId.id());
builder.field(Fields._NODE, nodeId);
builder.field(Fields.REASON);
builder.startObject();
ElasticsearchException.toXContent(builder, params, cause);
builder.endObject();
builder.field(Fields.STATUS, status);
builder.field(Fields.PRIMARY, primary);
builder.endObject();
return builder;
}
private static class Fields {
private static final String _INDEX = "_index";
private static final String _SHARD = "_shard";
private static final String _NODE = "_node";
private static final String REASON = "reason";
private static final String STATUS = "status";
private static final String PRIMARY = "primary";
}
}
private static class Fields {
private static final String _SHARDS = "_shards";
private static final String TOTAL = "total";
private static final String SUCCESSFUL = "successful";
private static final String FAILED = "failed";
private static final String FAILURES = "failures";
}
}
}
| |
package com.perimeterx.api.proxy;
import com.perimeterx.api.providers.IPProvider;
import com.perimeterx.models.configuration.PXConfiguration;
import com.perimeterx.models.proxy.PredefinedResponse;
import com.perimeterx.utils.PXLogger;
import org.apache.http.*;
import org.apache.http.client.HttpClient;
import org.apache.http.client.utils.URIUtils;
import org.apache.http.entity.InputStreamEntity;
import org.apache.http.message.BasicHeader;
import org.apache.http.message.BasicHttpEntityEnclosingRequest;
import org.apache.http.message.BasicHttpRequest;
import org.apache.http.message.HeaderGroup;
import javax.servlet.http.Cookie;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import java.io.IOException;
import java.io.OutputStream;
import java.net.HttpCookie;
import java.net.URI;
import java.net.URISyntaxException;
import java.util.BitSet;
import java.util.Enumeration;
import java.util.Formatter;
/**
* Created by nitzangoldfeder on 14/05/2018.
*/
public class RemoteServer {
private final PXLogger logger = PXLogger.getLogger(RemoteServer.class);
private final String CONTENT_LENGTH_HEADER = "Content-Length";
private HttpServletResponse res;
private HttpServletRequest req;
private HttpClient proxyClient;
private IPProvider ipProvider;
private int maxUrlLength = 1000;
private PredefinedResponse predefinedResponse;
private PredefinedResponseHelper predefinedResponseHelper;
private PXConfiguration pxConfiguration;
protected String targetUri;
protected URI targetUriObj;
protected HttpHost targetHost;
/**
* These are the "hop-by-hop" headers that should not be copied.
* http://www.w3.org/Protocols/rfc2616/rfc2616-sec13.html
* I use an HttpClient HeaderGroup class instead of Set<String> because this
* approach does case insensitive lookup faster.
*/
protected static final HeaderGroup hopByHopHeaders;
static {
hopByHopHeaders = new HeaderGroup();
String[] headers = new String[]{
"Connection", "Keep-Alive", "Proxy-Authenticate", "Proxy-Authorization",
"TE", "Trailers", "Transfer-Encoding", "Upgrade"};
for (String header : headers) {
hopByHopHeaders.addHeader(new BasicHeader(header, null));
}
}
public RemoteServer(String serverUrl, String uri, HttpServletRequest req, HttpServletResponse res,
IPProvider ipProvider, HttpClient httpClient, PredefinedResponse predefinedResponse,
PredefinedResponseHelper predefinedResponseHelper, PXConfiguration pxConfiguration) throws URISyntaxException {
this.req = req;
this.res = res;
this.targetUri = serverUrl.concat(uri);
this.proxyClient = httpClient;
this.targetUriObj = new URI(targetUri);
this.targetHost = URIUtils.extractHost(targetUriObj);
this.ipProvider = ipProvider;
this.predefinedResponse = predefinedResponse;
this.predefinedResponseHelper = predefinedResponseHelper;
this.pxConfiguration = pxConfiguration;
}
public HttpRequest prepareProxyRequest() throws IOException {
logger.debug("Preparing proxy request");
String method = req.getMethod();
String proxyRequestUri = rewriteUrlFromRequest(req);
HttpRequest proxyRequest;
// Copy the body if content-length exists or transfer encoding
if (req.getHeader(HttpHeaders.CONTENT_LENGTH) != null || req.getHeader(HttpHeaders.TRANSFER_ENCODING) != null) {
proxyRequest = newProxyRequestWithEntity(method, proxyRequestUri, req);
} else {
// case not, BasicHttpRequest
proxyRequest = new BasicHttpRequest(method, proxyRequestUri);
}
// Reverse proxy
copyRequestHeaders(req, proxyRequest);
handleXForwardedForHeader(req, proxyRequest);
// PX Logic
handlePXHeaders(proxyRequest);
return proxyRequest;
}
public HttpResponse handleResponse(HttpRequest proxyRequest, boolean allowPredefinedHandler) {
HttpResponse proxyResponse = null;
try {
// Execute the request
proxyResponse = doExecute(proxyRequest);
int statusCode = proxyResponse.getStatusLine().getStatusCode();
// In failure we can check if we enable predefined request or proxy the original response
if (allowPredefinedHandler && statusCode >= HttpStatus.SC_BAD_REQUEST) {
predefinedResponseHelper.handlePredefinedResponse(res, predefinedResponse);
return proxyResponse;
}
res.setStatus(statusCode);
// Copying response headers to make sure SESSIONID or other Cookie which comes from the remote
// server will be saved in client when the proxied url was redirected to another one.
// See issue [#51](https://github.com/mitre/HTTP-Proxy-Servlet/issues/51)
copyResponseHeaders(proxyResponse, req, res);
if (statusCode == HttpServletResponse.SC_NOT_MODIFIED) {
// 304 needs special handling. See:
// http://www.ics.uci.edu/pub/ietf/http/rfc1945.html#Code304
// Don't send body entity/content!
res.setIntHeader(HttpHeaders.CONTENT_LENGTH, 0);
} else {
// Send the content to the client
copyResponseEntity(proxyResponse);
}
} catch (Exception e) {
if (allowPredefinedHandler) {
predefinedResponseHelper.handlePredefinedResponse(res, predefinedResponse);
}
}
return proxyResponse;
}
/**
* Copy response body data (the entity) from the proxy to the servlet client.
*/
protected void copyResponseEntity(HttpResponse proxyResponse) throws IOException {
HttpEntity entity = proxyResponse.getEntity();
if (entity != null) {
OutputStream servletOutputStream = res.getOutputStream();
entity.writeTo(servletOutputStream);
}
}
/**
* Copy proxied response headers back to the servlet client.
*/
protected void copyResponseHeaders(HttpResponse proxyResponse, HttpServletRequest servletRequest,
HttpServletResponse servletResponse) {
for (Header header : proxyResponse.getAllHeaders()) {
copyResponseHeader(servletRequest, servletResponse, header);
}
}
/**
* Copy a proxied response header back to the servlet client.
* This is easily overwritten to filter out certain headers if desired.
*/
protected void copyResponseHeader(HttpServletRequest servletRequest,
HttpServletResponse servletResponse, Header header) {
String headerName = header.getName();
if (hopByHopHeaders.containsHeader(headerName))
return;
String headerValue = header.getValue();
if (headerName.equalsIgnoreCase(org.apache.http.cookie.SM.SET_COOKIE) ||
headerName.equalsIgnoreCase(org.apache.http.cookie.SM.SET_COOKIE2)) {
copyProxyCookie(servletRequest, servletResponse, headerValue);
} else if (headerName.equalsIgnoreCase(HttpHeaders.LOCATION)) {
// LOCATION Header may have to be rewritten.
servletResponse.addHeader(headerName, rewriteUrlFromResponse(servletRequest, headerValue));
} else {
servletResponse.addHeader(headerName, headerValue);
}
}
/**
* For a redirect response from the target server, this translates {@code theUrl} to redirect to
* and translates it to one the original client can use.
*/
protected String rewriteUrlFromResponse(HttpServletRequest servletRequest, String theUrl) {
final String targetUri = this.targetUri;
if (theUrl.startsWith(targetUri)) {
/*-
* The URL points back to the back-end server.
* Instead of returning it verbatim we replace the target path with our
* source path in a way that should instruct the original client to
* request the URL pointed through this Proxy.
* We do this by taking the current request and rewriting the path part
* using this servlet's absolute path and the path from the returned URL
* after the base target URL.
*/
StringBuffer curUrl = servletRequest.getRequestURL();//no query
int pos;
// Skip the protocol part
if ((pos = curUrl.indexOf("://")) >= 0) {
// Skip the authority part
// + 3 to skip the separator between protocol and authority
if ((pos = curUrl.indexOf("/", pos + 3)) >= 0) {
// Trim everything after the authority part.
curUrl.setLength(pos);
}
}
// Context path starts with a / if it is not blank
curUrl.append(servletRequest.getContextPath());
// Servlet path starts with a / if it is not blank
curUrl.append(servletRequest.getServletPath());
curUrl.append(theUrl, targetUri.length(), theUrl.length());
return curUrl.toString();
}
return theUrl;
}
/**
* Copy cookie from the proxy to the servlet client.
* Replaces cookie path to local path and renames cookie to avoid collisions.
*/
protected void copyProxyCookie(HttpServletRequest servletRequest,
HttpServletResponse servletResponse, String headerValue) {
//build path for resulting cookie
String path = servletRequest.getContextPath(); // path starts with / or is empty string
path += servletRequest.getServletPath(); // servlet path starts with / or is empty string
if (path.isEmpty()) {
path = "/";
}
for (HttpCookie cookie : HttpCookie.parse(headerValue)) {
//set cookie name prefixed w/ a proxy value so it won't collide w/ other cookies
String proxyCookieName = cookie.getName();
Cookie servletCookie = new Cookie(proxyCookieName, cookie.getValue());
servletCookie.setComment(cookie.getComment());
servletCookie.setMaxAge((int) cookie.getMaxAge());
servletCookie.setPath(path); //set to the path of the proxy servlet
// don't set cookie domain
servletCookie.setSecure(cookie.getSecure());
servletCookie.setVersion(cookie.getVersion());
servletResponse.addCookie(servletCookie);
}
}
/**
* Copy request headers from the servlet client to the proxy request.
* This is easily overridden to add your own.
*/
protected void copyRequestHeaders(HttpServletRequest servletRequest, HttpRequest proxyRequest) {
// Get an Enumeration of all of the header names sent by the client
@SuppressWarnings("unchecked")
Enumeration<String> enumerationOfHeaderNames = servletRequest.getHeaderNames();
while (enumerationOfHeaderNames.hasMoreElements()) {
String headerName = enumerationOfHeaderNames.nextElement();
copyRequestHeader(servletRequest, proxyRequest, headerName);
}
}
/**
* Append request headers related to PerimeterX
*/
protected void handlePXHeaders(HttpRequest proxyRequest) {
proxyRequest.addHeader("X-PX-ENFORCER-TRUE-IP", this.ipProvider.getRequestIP(this.req));
proxyRequest.addHeader("X-PX-FIRST-PARTY", "1");
}
private void handleXForwardedForHeader(HttpServletRequest servletRequest, HttpRequest proxyRequest) {
String forHeaderName = "X-Forwarded-For";
String forHeader = servletRequest.getRemoteAddr();
String existingForHeader = servletRequest.getHeader(forHeaderName);
if (existingForHeader != null) {
forHeader = existingForHeader + ", " + forHeader;
}
proxyRequest.setHeader(forHeaderName, forHeader);
String protoHeaderName = "X-Forwarded-Proto";
String protoHeader = servletRequest.getScheme();
proxyRequest.setHeader(protoHeaderName, protoHeader);
}
/**
* Copy a request header from the servlet client to the proxy request.
* This is easily overridden to filter out certain headers if desired.
*/
protected void copyRequestHeader(HttpServletRequest servletRequest, HttpRequest proxyRequest,
String headerName) {
//Instead the content-length is effectively set via InputStreamEntity
if (headerName.equalsIgnoreCase(HttpHeaders.CONTENT_LENGTH)) {
return;
}
if (hopByHopHeaders.containsHeader(headerName)) {
return;
}
if (pxConfiguration.getIpHeaders().contains(headerName)) {
return;
}
@SuppressWarnings("unchecked")
Enumeration<String> headers = servletRequest.getHeaders(headerName);
while (headers.hasMoreElements()) {//sometimes more than one value
String headerValue = headers.nextElement();
if (headerName.equalsIgnoreCase(HttpHeaders.HOST)) {
HttpHost host = this.targetHost;
headerValue = host.getHostName();
if (host.getPort() != -1) {
headerValue += ":" + host.getPort();
}
}
proxyRequest.addHeader(headerName, headerValue);
}
}
protected HttpRequest newProxyRequestWithEntity(String method, String proxyRequestUri, HttpServletRequest servletRequest) throws IOException {
HttpEntityEnclosingRequest eProxyRequest = new BasicHttpEntityEnclosingRequest(method, proxyRequestUri);
// Add the input entity (streamed)
// note: we don't bother ensuring we close the servletInputStream since the container handles it
eProxyRequest.setEntity(new InputStreamEntity(servletRequest.getInputStream(), getContentLength(servletRequest)));
return eProxyRequest;
}
// Get the header value as a long in order to more correctly proxy very large requests
private long getContentLength(HttpServletRequest request) {
String contentLengthHeader = request.getHeader(CONTENT_LENGTH_HEADER);
if (contentLengthHeader != null) {
return Long.parseLong(contentLengthHeader);
}
return -1L;
}
protected String rewriteUrlFromRequest(HttpServletRequest servletRequest) {
logger.debug("Rewiring url from request");
StringBuilder uri = new StringBuilder(this.maxUrlLength);
uri.append(this.targetUri);
logger.debug("Setting uri to reverse {}", uri);
// Handle the query string & fragment
String queryString = servletRequest.getQueryString();//ex:(following '?'): name=value&foo=bar#fragment
String fragment = null;
//split off fragment from queryString, updating queryString if found
if (queryString != null) {
int fragIdx = queryString.indexOf('#');
if (fragIdx >= 0) {
fragment = queryString.substring(fragIdx + 1);
queryString = queryString.substring(0, fragIdx);
}
}
if (queryString != null && queryString.length() > 0) {
uri.append('?');
// queryString is not decoded, so we need encodeUriQuery not to encode "%" characters, to avoid double-encoding
uri.append(encodeUriQuery(queryString, false));
}
if (fragment != null) {
uri.append('#');
// fragment is not decoded, so we need encodeUriQuery not to encode "%" characters, to avoid double-encoding
uri.append(encodeUriQuery(fragment, false));
}
logger.debug("Final uri to proxy: {}", uri);
return uri.toString();
}
/**
* Encodes characters in the query or fragment part of the URI.
*
* <p>Unfortunately, an incoming URI sometimes has characters disallowed by the spec. HttpClient
* insists that the outgoing proxied request has a valid URI because it uses Java's {@link URI}.
* To be more forgiving, we must escape the problematic characters. See the URI class for the
* spec.
*
* @param in example: name=value&foo=bar#fragment
* @param encodePercent determine whether percent characters need to be encoded
*/
private static CharSequence encodeUriQuery(CharSequence in, boolean encodePercent) {
//Note that I can't simply use URI.java to encode because it will escape pre-existing escaped things.
StringBuilder outBuf = null;
Formatter formatter = null;
for (int i = 0; i < in.length(); i++) {
char c = in.charAt(i);
boolean escape = true;
if (c < 128) {
if (asciiQueryChars.get((int) c) && !(encodePercent && c == '%')) {
escape = false;
}
} else if (!Character.isISOControl(c) && !Character.isSpaceChar(c)) {//not-ascii
escape = false;
}
if (!escape) {
if (outBuf != null)
outBuf.append(c);
} else {
//escape
if (outBuf == null) {
outBuf = new StringBuilder(in.length() + 5 * 3);
outBuf.append(in, 0, i);
formatter = new Formatter(outBuf);
}
//leading %, 0 padded, width 2, capital hex
formatter.format("%%%02X", (int) c);
}
}
return outBuf != null ? outBuf : in;
}
private static final BitSet asciiQueryChars;
static {
char[] c_unreserved = "_-!.~'()*".toCharArray();//plus alphanum
char[] c_punct = ",;:$&+=".toCharArray();
char[] c_reserved = "?/[]@".toCharArray();//plus punct
asciiQueryChars = new BitSet(128);
for (char c = 'a'; c <= 'z'; c++) asciiQueryChars.set((int) c);
for (char c = 'A'; c <= 'Z'; c++) asciiQueryChars.set((int) c);
for (char c = '0'; c <= '9'; c++) asciiQueryChars.set((int) c);
for (char c : c_unreserved) asciiQueryChars.set((int) c);
for (char c : c_punct) asciiQueryChars.set((int) c);
for (char c : c_reserved) asciiQueryChars.set((int) c);
asciiQueryChars.set((int) '%');//leave existing percent escapes in place
}
protected HttpResponse doExecute(HttpRequest proxyRequest) throws IOException {
return proxyClient.execute(targetHost, proxyRequest);
}
}
| |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.action.admin.cluster.allocation;
import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse;
import org.elasticsearch.action.support.ActiveShardCount;
import org.elasticsearch.cluster.ClusterInfo;
import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.health.ClusterHealthStatus;
import org.elasticsearch.cluster.metadata.IndexMetadata;
import org.elasticsearch.cluster.node.DiscoveryNode;
import org.elasticsearch.cluster.routing.ShardRoutingState;
import org.elasticsearch.cluster.routing.UnassignedInfo;
import org.elasticsearch.cluster.routing.UnassignedInfo.AllocationStatus;
import org.elasticsearch.cluster.routing.UnassignedInfo.Reason;
import org.elasticsearch.cluster.routing.allocation.AllocateUnassignedDecision;
import org.elasticsearch.cluster.routing.allocation.AllocationDecision;
import org.elasticsearch.cluster.routing.allocation.MoveDecision;
import org.elasticsearch.cluster.routing.allocation.NodeAllocationResult;
import org.elasticsearch.cluster.routing.allocation.decider.Decision;
import org.elasticsearch.common.Priority;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.util.set.Sets;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentParser.Token;
import org.elasticsearch.common.xcontent.json.JsonXContent;
import org.elasticsearch.index.shard.ShardId;
import org.elasticsearch.test.ESIntegTestCase;
import org.elasticsearch.test.InternalTestCluster;
import java.io.IOException;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.Set;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.greaterThan;
import static org.hamcrest.Matchers.greaterThanOrEqualTo;
import static org.hamcrest.Matchers.is;
import static org.hamcrest.Matchers.lessThanOrEqualTo;
import static org.hamcrest.Matchers.oneOf;
import static org.hamcrest.Matchers.startsWith;
/**
* Tests for the cluster allocation explanation
*/
@ESIntegTestCase.ClusterScope(scope = ESIntegTestCase.Scope.TEST, numDataNodes = 0)
public final class ClusterAllocationExplainIT extends ESIntegTestCase {
public void testUnassignedPrimaryWithExistingIndex() throws Exception {
logger.info("--> starting 2 nodes");
internalCluster().startNodes(2);
prepareIndex(1, 0);
logger.info("--> stopping the node with the primary");
internalCluster().stopRandomNode(InternalTestCluster.nameFilter(primaryNodeName()));
ensureStableCluster(1);
boolean includeYesDecisions = randomBoolean();
boolean includeDiskInfo = randomBoolean();
ClusterAllocationExplanation explanation = runExplain(true, includeYesDecisions, includeDiskInfo);
ShardId shardId = explanation.getShard();
boolean isPrimary = explanation.isPrimary();
ShardRoutingState shardState = explanation.getShardState();
DiscoveryNode currentNode = explanation.getCurrentNode();
UnassignedInfo unassignedInfo = explanation.getUnassignedInfo();
ClusterInfo clusterInfo = explanation.getClusterInfo();
AllocateUnassignedDecision allocateDecision = explanation.getShardAllocationDecision().getAllocateDecision();
MoveDecision moveDecision = explanation.getShardAllocationDecision().getMoveDecision();
// verify shard info
assertEquals("idx", shardId.getIndexName());
assertEquals(0, shardId.getId());
assertTrue(isPrimary);
// verify current node info
assertNotEquals(ShardRoutingState.STARTED, shardState);
assertNull(currentNode);
// verify unassigned info
assertNotNull(unassignedInfo);
assertEquals(Reason.NODE_LEFT, unassignedInfo.getReason());
assertTrue(unassignedInfo.getLastAllocationStatus() == AllocationStatus.FETCHING_SHARD_DATA
|| unassignedInfo.getLastAllocationStatus() == AllocationStatus.NO_VALID_SHARD_COPY);
// verify cluster info
verifyClusterInfo(clusterInfo, includeDiskInfo, 1);
// verify decision objects
assertTrue(allocateDecision.isDecisionTaken());
assertFalse(moveDecision.isDecisionTaken());
assertTrue(allocateDecision.getAllocationDecision() == AllocationDecision.NO_VALID_SHARD_COPY
|| allocateDecision.getAllocationDecision() == AllocationDecision.AWAITING_INFO);
if (allocateDecision.getAllocationDecision() == AllocationDecision.NO_VALID_SHARD_COPY) {
assertEquals("cannot allocate because a previous copy of the primary shard existed but can no longer be " +
"found on the nodes in the cluster", allocateDecision.getExplanation());
} else {
assertEquals("cannot allocate because information about existing shard data is still being retrieved from some of the nodes",
allocateDecision.getExplanation());
}
assertNull(allocateDecision.getAllocationId());
assertNull(allocateDecision.getTargetNode());
assertEquals(0L, allocateDecision.getConfiguredDelayInMillis());
assertEquals(0L, allocateDecision.getRemainingDelayInMillis());
if (allocateDecision.getAllocationDecision() == AllocationDecision.NO_VALID_SHARD_COPY) {
assertEquals(1, allocateDecision.getNodeDecisions().size());
// verify JSON output
try (XContentParser parser = getParser(explanation)) {
verifyShardInfo(parser, true, includeDiskInfo, ShardRoutingState.UNASSIGNED);
parser.nextToken();
assertEquals("can_allocate", parser.currentName());
parser.nextToken();
assertEquals(AllocationDecision.NO_VALID_SHARD_COPY.toString(), parser.text());
parser.nextToken();
assertEquals("allocate_explanation", parser.currentName());
parser.nextToken();
assertEquals("cannot allocate because a previous copy of the primary shard existed but can no longer be found " +
"on the nodes in the cluster", parser.text());
verifyStaleShardCopyNodeDecisions(parser, 1, Collections.emptySet());
}
}
}
public void testUnassignedReplicaDelayedAllocation() throws Exception {
logger.info("--> starting 3 nodes");
internalCluster().startNodes(3);
prepareIndex(1, 1);
logger.info("--> stopping the node with the replica");
internalCluster().stopRandomNode(InternalTestCluster.nameFilter(replicaNode().getName()));
ensureStableCluster(2);
assertBusy(() ->
// wait till we have passed any pending shard data fetching
assertEquals(AllocationDecision.ALLOCATION_DELAYED, client().admin().cluster().prepareAllocationExplain()
.setIndex("idx").setShard(0).setPrimary(false).get().getExplanation()
.getShardAllocationDecision().getAllocateDecision().getAllocationDecision())
);
logger.info("--> observing delayed allocation...");
boolean includeYesDecisions = randomBoolean();
boolean includeDiskInfo = randomBoolean();
ClusterAllocationExplanation explanation = runExplain(false, includeYesDecisions, includeDiskInfo);
ShardId shardId = explanation.getShard();
boolean isPrimary = explanation.isPrimary();
ShardRoutingState shardRoutingState = explanation.getShardState();
DiscoveryNode currentNode = explanation.getCurrentNode();
UnassignedInfo unassignedInfo = explanation.getUnassignedInfo();
ClusterInfo clusterInfo = explanation.getClusterInfo();
AllocateUnassignedDecision allocateDecision = explanation.getShardAllocationDecision().getAllocateDecision();
MoveDecision moveDecision = explanation.getShardAllocationDecision().getMoveDecision();
// verify shard info
assertEquals("idx", shardId.getIndexName());
assertEquals(0, shardId.getId());
assertFalse(isPrimary);
// verify current node info
assertNotEquals(ShardRoutingState.STARTED, shardRoutingState);
assertNull(currentNode);
// verify unassigned info
assertNotNull(unassignedInfo);
assertEquals(Reason.NODE_LEFT, unassignedInfo.getReason());
assertEquals(AllocationStatus.NO_ATTEMPT, unassignedInfo.getLastAllocationStatus());
// verify cluster info
verifyClusterInfo(clusterInfo, includeDiskInfo, 2);
// verify decision objects
assertTrue(allocateDecision.isDecisionTaken());
assertFalse(moveDecision.isDecisionTaken());
assertEquals(AllocationDecision.ALLOCATION_DELAYED, allocateDecision.getAllocationDecision());
assertThat(allocateDecision.getExplanation(), startsWith("cannot allocate because the cluster is still waiting"));
assertThat(allocateDecision.getExplanation(), containsString(
"despite being allowed to allocate the shard to at least one other node"));
assertNull(allocateDecision.getAllocationId());
assertNull(allocateDecision.getTargetNode());
assertEquals(60000L, allocateDecision.getConfiguredDelayInMillis());
assertThat(allocateDecision.getRemainingDelayInMillis(), greaterThan(0L));
assertEquals(2, allocateDecision.getNodeDecisions().size());
String primaryNodeName = primaryNodeName();
for (NodeAllocationResult result : allocateDecision.getNodeDecisions()) {
assertNotNull(result.getNode());
boolean nodeHoldingPrimary = result.getNode().getName().equals(primaryNodeName);
if (nodeHoldingPrimary) {
// shouldn't be able to allocate to the same node as the primary, the same shard decider should say no
assertEquals(AllocationDecision.NO, result.getNodeDecision());
assertThat(result.getShardStoreInfo().getMatchingBytes(), greaterThan(0L));
} else {
assertEquals(AllocationDecision.YES, result.getNodeDecision());
assertNull(result.getShardStoreInfo());
}
if (includeYesDecisions) {
assertThat(result.getCanAllocateDecision().getDecisions().size(), greaterThan(1));
} else {
// if we are not including YES decisions, then the node holding the primary should have 1 NO decision,
// the other node should have zero NO decisions
assertEquals(nodeHoldingPrimary ? 1 : 0, result.getCanAllocateDecision().getDecisions().size());
}
for (Decision d : result.getCanAllocateDecision().getDecisions()) {
if (d.label().equals("same_shard") && nodeHoldingPrimary) {
assertEquals(Decision.Type.NO, d.type());
assertThat(d.getExplanation(), startsWith(
"the shard cannot be allocated to the same node on which a copy of the shard already exists"));
} else {
assertEquals(Decision.Type.YES, d.type());
assertNotNull(d.getExplanation());
}
}
}
// verify JSON output
try (XContentParser parser = getParser(explanation)) {
verifyShardInfo(parser, false, includeDiskInfo, ShardRoutingState.UNASSIGNED);
parser.nextToken();
assertEquals("can_allocate", parser.currentName());
parser.nextToken();
assertEquals(AllocationDecision.ALLOCATION_DELAYED.toString(), parser.text());
parser.nextToken();
assertEquals("allocate_explanation", parser.currentName());
parser.nextToken();
assertThat(parser.text(), startsWith("cannot allocate because the cluster is still waiting"));
parser.nextToken();
assertEquals("configured_delay_in_millis", parser.currentName());
parser.nextToken();
assertEquals(60000L, parser.longValue());
parser.nextToken();
assertEquals("remaining_delay_in_millis", parser.currentName());
parser.nextToken();
assertThat(parser.longValue(), greaterThan(0L));
Map<String, AllocationDecision> nodes = new HashMap<>();
nodes.put(primaryNodeName, AllocationDecision.NO);
String[] currentNodes = internalCluster().getNodeNames();
nodes.put(currentNodes[0].equals(primaryNodeName) ? currentNodes[1] : currentNodes[0], AllocationDecision.YES);
verifyNodeDecisions(parser, nodes, includeYesDecisions, true);
assertEquals(Token.END_OBJECT, parser.nextToken());
}
}
public void testUnassignedReplicaWithPriorCopy() throws Exception {
logger.info("--> starting 3 nodes");
List<String> nodes = internalCluster().startNodes(3);
prepareIndex(1, 1);
String primaryNodeName = primaryNodeName();
nodes.remove(primaryNodeName);
logger.info("--> shutting down all nodes except the one that holds the primary");
Settings node0DataPathSettings = internalCluster().dataPathSettings(nodes.get(0));
Settings node1DataPathSettings = internalCluster().dataPathSettings(nodes.get(1));
internalCluster().stopRandomNode(InternalTestCluster.nameFilter(nodes.get(0)));
internalCluster().stopRandomNode(InternalTestCluster.nameFilter(nodes.get(1)));
ensureStableCluster(1);
logger.info("--> setting allocation filtering to only allow allocation on the currently running node");
client().admin().indices().prepareUpdateSettings("idx").setSettings(
Settings.builder().put("index.routing.allocation.include._name", primaryNodeName)).get();
logger.info("--> restarting the stopped nodes");
internalCluster().startNode(Settings.builder().put("node.name", nodes.get(0)).put(node0DataPathSettings).build());
internalCluster().startNode(Settings.builder().put("node.name", nodes.get(1)).put(node1DataPathSettings).build());
ensureStableCluster(3);
boolean includeYesDecisions = randomBoolean();
boolean includeDiskInfo = randomBoolean();
ClusterAllocationExplanation explanation = runExplain(false, includeYesDecisions, includeDiskInfo);
ShardId shardId = explanation.getShard();
boolean isPrimary = explanation.isPrimary();
ShardRoutingState shardRoutingState = explanation.getShardState();
DiscoveryNode currentNode = explanation.getCurrentNode();
UnassignedInfo unassignedInfo = explanation.getUnassignedInfo();
ClusterInfo clusterInfo = explanation.getClusterInfo();
AllocateUnassignedDecision allocateDecision = explanation.getShardAllocationDecision().getAllocateDecision();
MoveDecision moveDecision = explanation.getShardAllocationDecision().getMoveDecision();
// verify shard info
assertEquals("idx", shardId.getIndexName());
assertEquals(0, shardId.getId());
assertFalse(isPrimary);
// verify current node info
assertNotEquals(ShardRoutingState.STARTED, shardRoutingState);
assertNull(currentNode);
// verify unassigned info
assertNotNull(unassignedInfo);
assertEquals(Reason.NODE_LEFT, unassignedInfo.getReason());
assertEquals(AllocationStatus.NO_ATTEMPT, unassignedInfo.getLastAllocationStatus());
// verify cluster info
verifyClusterInfo(clusterInfo, includeDiskInfo, 3);
// verify decision objects
assertTrue(allocateDecision.isDecisionTaken());
assertFalse(moveDecision.isDecisionTaken());
AllocationDecision decisionToAllocate = allocateDecision.getAllocationDecision();
assertTrue(decisionToAllocate == AllocationDecision.AWAITING_INFO || decisionToAllocate == AllocationDecision.NO);
if (decisionToAllocate == AllocationDecision.AWAITING_INFO) {
assertEquals("cannot allocate because information about existing shard data is still being retrieved from some of the nodes",
allocateDecision.getExplanation());
} else {
assertEquals("cannot allocate because allocation is not permitted to any of the nodes", allocateDecision.getExplanation());
}
assertNull(allocateDecision.getAllocationId());
assertNull(allocateDecision.getTargetNode());
assertEquals(0L, allocateDecision.getConfiguredDelayInMillis());
assertEquals(0L, allocateDecision.getRemainingDelayInMillis());
assertEquals(3, allocateDecision.getNodeDecisions().size());
for (NodeAllocationResult result : allocateDecision.getNodeDecisions()) {
assertNotNull(result.getNode());
boolean nodeHoldingPrimary = result.getNode().getName().equals(primaryNodeName);
assertEquals(AllocationDecision.NO, result.getNodeDecision());
if (includeYesDecisions) {
assertThat(result.getCanAllocateDecision().getDecisions().size(), greaterThan(1));
} else {
assertEquals(1, result.getCanAllocateDecision().getDecisions().size());
}
for (Decision d : result.getCanAllocateDecision().getDecisions()) {
if (d.label().equals("same_shard") && nodeHoldingPrimary) {
assertEquals(Decision.Type.NO, d.type());
assertThat(d.getExplanation(), startsWith(
"the shard cannot be allocated to the same node on which a copy of the shard already exists"));
} else if (d.label().equals("filter") && nodeHoldingPrimary == false) {
assertEquals(Decision.Type.NO, d.type());
assertEquals("node does not match index setting [index.routing.allocation.include] " +
"filters [_name:\"" + primaryNodeName + "\"]", d.getExplanation());
} else {
assertEquals(Decision.Type.YES, d.type());
assertNotNull(d.getExplanation());
}
}
}
// verify JSON output
try (XContentParser parser = getParser(explanation)) {
verifyShardInfo(parser, false, includeDiskInfo, ShardRoutingState.UNASSIGNED);
parser.nextToken();
assertEquals("can_allocate", parser.currentName());
parser.nextToken();
String allocationDecision = parser.text();
assertTrue(allocationDecision.equals(AllocationDecision.NO.toString())
|| allocationDecision.equals(AllocationDecision.AWAITING_INFO.toString()));
parser.nextToken();
assertEquals("allocate_explanation", parser.currentName());
parser.nextToken();
if (allocationDecision.equals("awaiting_info")) {
assertEquals("cannot allocate because information about existing shard data is still being retrieved " +
"from some of the nodes", parser.text());
} else {
assertEquals("cannot allocate because allocation is not permitted to any of the nodes", parser.text());
}
Map<String, AllocationDecision> nodeDecisions = new HashMap<>();
for (String nodeName : internalCluster().getNodeNames()) {
nodeDecisions.put(nodeName, AllocationDecision.NO);
}
verifyNodeDecisions(parser, nodeDecisions, includeYesDecisions, true);
assertEquals(Token.END_OBJECT, parser.nextToken());
}
}
public void testAllocationFilteringOnIndexCreation() throws Exception {
logger.info("--> starting 2 nodes");
internalCluster().startNodes(2);
logger.info("--> creating an index with 1 primary, 0 replicas, with allocation filtering so the primary can't be assigned");
prepareIndex(IndexMetadata.State.OPEN, 1, 0,
Settings.builder().put("index.routing.allocation.include._name", "non_existent_node").build(),
ActiveShardCount.NONE);
boolean includeYesDecisions = randomBoolean();
boolean includeDiskInfo = randomBoolean();
ClusterAllocationExplanation explanation = runExplain(true, includeYesDecisions, includeDiskInfo);
ShardId shardId = explanation.getShard();
boolean isPrimary = explanation.isPrimary();
ShardRoutingState shardRoutingState = explanation.getShardState();
DiscoveryNode currentNode = explanation.getCurrentNode();
UnassignedInfo unassignedInfo = explanation.getUnassignedInfo();
ClusterInfo clusterInfo = explanation.getClusterInfo();
AllocateUnassignedDecision allocateDecision = explanation.getShardAllocationDecision().getAllocateDecision();
MoveDecision moveDecision = explanation.getShardAllocationDecision().getMoveDecision();
// verify shard info
assertEquals("idx", shardId.getIndexName());
assertEquals(0, shardId.getId());
assertTrue(isPrimary);
// verify current node info
assertNotEquals(ShardRoutingState.STARTED, shardRoutingState);
assertNull(currentNode);
// verify unassigned info
assertNotNull(unassignedInfo);
assertEquals(Reason.INDEX_CREATED, unassignedInfo.getReason());
assertEquals(AllocationStatus.DECIDERS_NO, unassignedInfo.getLastAllocationStatus());
// verify cluster info
verifyClusterInfo(clusterInfo, includeDiskInfo, 2);
// verify decision objects
assertTrue(allocateDecision.isDecisionTaken());
assertFalse(moveDecision.isDecisionTaken());
assertEquals(AllocationDecision.NO, allocateDecision.getAllocationDecision());
assertEquals("cannot allocate because allocation is not permitted to any of the nodes", allocateDecision.getExplanation());
assertNull(allocateDecision.getAllocationId());
assertNull(allocateDecision.getTargetNode());
assertEquals(0L, allocateDecision.getConfiguredDelayInMillis());
assertEquals(0L, allocateDecision.getRemainingDelayInMillis());
assertEquals(2, allocateDecision.getNodeDecisions().size());
for (NodeAllocationResult result : allocateDecision.getNodeDecisions()) {
assertNotNull(result.getNode());
assertEquals(AllocationDecision.NO, result.getNodeDecision());
if (includeYesDecisions) {
assertThat(result.getCanAllocateDecision().getDecisions().size(), greaterThan(1));
} else {
assertEquals(1, result.getCanAllocateDecision().getDecisions().size());
}
for (Decision d : result.getCanAllocateDecision().getDecisions()) {
if (d.label().equals("filter")) {
assertEquals(Decision.Type.NO, d.type());
assertEquals("node does not match index setting [index.routing.allocation.include] filters " +
"[_name:\"non_existent_node\"]", d.getExplanation());
}
}
}
// verify JSON output
try (XContentParser parser = getParser(explanation)) {
verifyShardInfo(parser, true, includeDiskInfo, ShardRoutingState.UNASSIGNED);
parser.nextToken();
assertEquals("can_allocate", parser.currentName());
parser.nextToken();
String allocationDecision = parser.text();
assertTrue(allocationDecision.equals(AllocationDecision.NO.toString())
|| allocationDecision.equals(AllocationDecision.AWAITING_INFO.toString()));
parser.nextToken();
assertEquals("allocate_explanation", parser.currentName());
parser.nextToken();
if (allocationDecision.equals("awaiting_info")) {
assertEquals("cannot allocate because information about existing shard data is still being retrieved " +
"from some of the nodes", parser.text());
} else {
assertEquals("cannot allocate because allocation is not permitted to any of the nodes", parser.text());
}
Map<String, AllocationDecision> nodeDecisions = new HashMap<>();
for (String nodeName : internalCluster().getNodeNames()) {
nodeDecisions.put(nodeName, AllocationDecision.NO);
}
verifyNodeDecisions(parser, nodeDecisions, includeYesDecisions, false);
assertEquals(Token.END_OBJECT, parser.nextToken());
}
}
public void testAllocationFilteringPreventsShardMove() throws Exception {
logger.info("--> starting 2 nodes");
internalCluster().startNodes(2);
prepareIndex(1, 0);
logger.info("--> setting up allocation filtering to prevent allocation to both nodes");
client().admin().indices().prepareUpdateSettings("idx").setSettings(
Settings.builder().put("index.routing.allocation.include._name", "non_existent_node")).get();
boolean includeYesDecisions = randomBoolean();
boolean includeDiskInfo = randomBoolean();
ClusterAllocationExplanation explanation = runExplain(true, includeYesDecisions, includeDiskInfo);
ShardId shardId = explanation.getShard();
boolean isPrimary = explanation.isPrimary();
ShardRoutingState shardRoutingState = explanation.getShardState();
DiscoveryNode currentNode = explanation.getCurrentNode();
UnassignedInfo unassignedInfo = explanation.getUnassignedInfo();
ClusterInfo clusterInfo = explanation.getClusterInfo();
AllocateUnassignedDecision allocateDecision = explanation.getShardAllocationDecision().getAllocateDecision();
MoveDecision moveDecision = explanation.getShardAllocationDecision().getMoveDecision();
// verify shard info
assertEquals("idx", shardId.getIndexName());
assertEquals(0, shardId.getId());
assertTrue(isPrimary);
// verify current node info
assertEquals(ShardRoutingState.STARTED, shardRoutingState);
assertNotNull(currentNode);
// verify unassigned info
assertNull(unassignedInfo);
// verify cluster info
verifyClusterInfo(clusterInfo, includeDiskInfo, 2);
// verify decision object
assertFalse(allocateDecision.isDecisionTaken());
assertTrue(moveDecision.isDecisionTaken());
assertEquals(AllocationDecision.NO, moveDecision.getAllocationDecision());
assertEquals("cannot move shard to another node, even though it is not allowed to remain on its current node",
moveDecision.getExplanation());
assertFalse(moveDecision.canRemain());
assertFalse(moveDecision.forceMove());
assertFalse(moveDecision.canRebalanceCluster());
assertNull(moveDecision.getClusterRebalanceDecision());
assertNull(moveDecision.getTargetNode());
assertEquals(0, moveDecision.getCurrentNodeRanking());
// verifying can remain decision object
assertNotNull(moveDecision.getCanRemainDecision());
assertEquals(Decision.Type.NO, moveDecision.getCanRemainDecision().type());
for (Decision d : moveDecision.getCanRemainDecision().getDecisions()) {
if (d.label().equals("filter")) {
assertEquals(Decision.Type.NO, d.type());
assertEquals("node does not match index setting [index.routing.allocation.include] filters [_name:\"non_existent_node\"]",
d.getExplanation());
} else {
assertEquals(Decision.Type.YES, d.type());
assertNotNull(d.getExplanation());
}
}
// verify node decisions
assertEquals(1, moveDecision.getNodeDecisions().size());
NodeAllocationResult result = moveDecision.getNodeDecisions().get(0);
assertNotNull(result.getNode());
assertEquals(1, result.getWeightRanking());
assertEquals(AllocationDecision.NO, result.getNodeDecision());
if (includeYesDecisions) {
assertThat(result.getCanAllocateDecision().getDecisions().size(), greaterThan(1));
} else {
assertEquals(1, result.getCanAllocateDecision().getDecisions().size());
}
for (Decision d : result.getCanAllocateDecision().getDecisions()) {
if (d.label().equals("filter")) {
assertEquals(Decision.Type.NO, d.type());
assertEquals("node does not match index setting [index.routing.allocation.include] filters [_name:\"non_existent_node\"]",
d.getExplanation());
} else {
assertEquals(Decision.Type.YES, d.type());
assertNotNull(d.getExplanation());
}
}
// verify JSON output
try (XContentParser parser = getParser(explanation)) {
verifyShardInfo(parser, true, includeDiskInfo, ShardRoutingState.STARTED);
parser.nextToken();
assertEquals("can_remain_on_current_node", parser.currentName());
parser.nextToken();
assertEquals(AllocationDecision.NO.toString(), parser.text());
parser.nextToken();
assertEquals("can_remain_decisions", parser.currentName());
verifyDeciders(parser, AllocationDecision.NO);
parser.nextToken();
assertEquals("can_move_to_other_node", parser.currentName());
parser.nextToken();
assertEquals(AllocationDecision.NO.toString(), parser.text());
parser.nextToken();
assertEquals("move_explanation", parser.currentName());
parser.nextToken();
assertEquals("cannot move shard to another node, even though it is not allowed to remain on its current node", parser.text());
verifyNodeDecisions(parser, allNodeDecisions(AllocationDecision.NO, true), includeYesDecisions, false);
assertEquals(Token.END_OBJECT, parser.nextToken());
}
}
public void testRebalancingNotAllowed() throws Exception {
logger.info("--> starting a single node");
internalCluster().startNode();
ensureStableCluster(1);
prepareIndex(5, 0);
logger.info("--> disabling rebalancing on the index");
client().admin().indices().prepareUpdateSettings("idx").setSettings(
Settings.builder().put("index.routing.rebalance.enable", "none")).get();
logger.info("--> starting another node, with rebalancing disabled, it should get no shards");
internalCluster().startNode();
ensureStableCluster(2);
boolean includeYesDecisions = randomBoolean();
boolean includeDiskInfo = randomBoolean();
ClusterAllocationExplanation explanation = runExplain(true, includeYesDecisions, includeDiskInfo);
ShardId shardId = explanation.getShard();
boolean isPrimary = explanation.isPrimary();
ShardRoutingState shardRoutingState = explanation.getShardState();
DiscoveryNode currentNode = explanation.getCurrentNode();
UnassignedInfo unassignedInfo = explanation.getUnassignedInfo();
ClusterInfo clusterInfo = explanation.getClusterInfo();
AllocateUnassignedDecision allocateDecision = explanation.getShardAllocationDecision().getAllocateDecision();
MoveDecision moveDecision = explanation.getShardAllocationDecision().getMoveDecision();
// verify shard info
assertEquals("idx", shardId.getIndexName());
assertEquals(0, shardId.getId());
assertTrue(isPrimary);
// verify current node info
assertEquals(ShardRoutingState.STARTED, shardRoutingState);
assertNotNull(currentNode);
// verify unassigned info
assertNull(unassignedInfo);
// verify cluster info
verifyClusterInfo(clusterInfo, includeDiskInfo, 2);
// verify decision object
assertFalse(allocateDecision.isDecisionTaken());
assertTrue(moveDecision.isDecisionTaken());
assertEquals(AllocationDecision.NO, moveDecision.getAllocationDecision());
assertEquals("rebalancing is not allowed, even though there is at least one node on which the shard can be allocated",
moveDecision.getExplanation());
assertTrue(moveDecision.canRemain());
assertFalse(moveDecision.forceMove());
assertFalse(moveDecision.canRebalanceCluster());
assertNotNull(moveDecision.getCanRemainDecision());
assertNull(moveDecision.getTargetNode());
assertEquals(2, moveDecision.getCurrentNodeRanking());
// verifying cluster rebalance decision object
assertNotNull(moveDecision.getClusterRebalanceDecision());
assertEquals(Decision.Type.NO, moveDecision.getClusterRebalanceDecision().type());
for (Decision d : moveDecision.getClusterRebalanceDecision().getDecisions()) {
if (d.label().equals("enable")) {
assertEquals(Decision.Type.NO, d.type());
assertEquals("no rebalancing is allowed due to index setting [index.routing.rebalance.enable=none]",
d.getExplanation());
} else {
assertEquals(Decision.Type.YES, d.type());
assertNotNull(d.getExplanation());
}
}
// verify node decisions
assertEquals(1, moveDecision.getNodeDecisions().size());
NodeAllocationResult result = moveDecision.getNodeDecisions().get(0);
assertNotNull(result.getNode());
assertEquals(1, result.getWeightRanking());
assertEquals(AllocationDecision.YES, result.getNodeDecision());
if (includeYesDecisions) {
assertThat(result.getCanAllocateDecision().getDecisions().size(), greaterThan(0));
} else {
assertEquals(0, result.getCanAllocateDecision().getDecisions().size());
}
for (Decision d : result.getCanAllocateDecision().getDecisions()) {
assertEquals(Decision.Type.YES, d.type());
assertNotNull(d.getExplanation());
}
// verify JSON output
try (XContentParser parser = getParser(explanation)) {
verifyShardInfo(parser, true, includeDiskInfo, ShardRoutingState.STARTED);
parser.nextToken();
assertEquals("can_remain_on_current_node", parser.currentName());
parser.nextToken();
assertEquals(AllocationDecision.YES.toString(), parser.text());
parser.nextToken();
assertEquals("can_rebalance_cluster", parser.currentName());
parser.nextToken();
assertEquals(AllocationDecision.NO.toString(), parser.text());
parser.nextToken();
assertEquals("can_rebalance_cluster_decisions", parser.currentName());
verifyDeciders(parser, AllocationDecision.NO);
parser.nextToken();
assertEquals("can_rebalance_to_other_node", parser.currentName());
parser.nextToken();
assertEquals(AllocationDecision.NO.toString(), parser.text());
parser.nextToken();
assertEquals("rebalance_explanation", parser.currentName());
parser.nextToken();
assertEquals("rebalancing is not allowed, even though there is at least one node on which the shard can be allocated",
parser.text());
verifyNodeDecisions(parser, allNodeDecisions(AllocationDecision.YES, true), includeYesDecisions, false);
assertEquals(Token.END_OBJECT, parser.nextToken());
}
}
public void testWorseBalance() throws Exception {
logger.info("--> starting a single node");
internalCluster().startNode();
ensureStableCluster(1);
prepareIndex(5, 0);
logger.info("--> setting balancing threshold really high, so it won't be met");
client().admin().cluster().prepareUpdateSettings().setTransientSettings(
Settings.builder().put("cluster.routing.allocation.balance.threshold", 1000.0f)).get();
logger.info("--> starting another node, with the rebalance threshold so high, it should not get any shards");
internalCluster().startNode();
ensureStableCluster(2);
boolean includeYesDecisions = randomBoolean();
boolean includeDiskInfo = randomBoolean();
ClusterAllocationExplanation explanation = runExplain(true, includeYesDecisions, includeDiskInfo);
ShardId shardId = explanation.getShard();
boolean isPrimary = explanation.isPrimary();
ShardRoutingState shardRoutingState = explanation.getShardState();
DiscoveryNode currentNode = explanation.getCurrentNode();
UnassignedInfo unassignedInfo = explanation.getUnassignedInfo();
ClusterInfo clusterInfo = explanation.getClusterInfo();
AllocateUnassignedDecision allocateDecision = explanation.getShardAllocationDecision().getAllocateDecision();
MoveDecision moveDecision = explanation.getShardAllocationDecision().getMoveDecision();
// verify shard info
assertEquals("idx", shardId.getIndexName());
assertEquals(0, shardId.getId());
assertTrue(isPrimary);
// verify current node info
assertEquals(ShardRoutingState.STARTED, shardRoutingState);
assertNotNull(currentNode);
// verify unassigned info
assertNull(unassignedInfo);
// verify cluster info
verifyClusterInfo(clusterInfo, includeDiskInfo, 2);
// verify decision object
assertFalse(allocateDecision.isDecisionTaken());
assertTrue(moveDecision.isDecisionTaken());
assertEquals(AllocationDecision.NO, moveDecision.getAllocationDecision());
assertEquals("cannot rebalance as no target node exists that can both allocate this shard and improve the cluster balance",
moveDecision.getExplanation());
assertTrue(moveDecision.canRemain());
assertFalse(moveDecision.forceMove());
assertTrue(moveDecision.canRebalanceCluster());
assertNotNull(moveDecision.getCanRemainDecision());
assertNull(moveDecision.getTargetNode());
assertEquals(1, moveDecision.getCurrentNodeRanking());
// verifying cluster rebalance decision object
assertNotNull(moveDecision.getClusterRebalanceDecision());
assertEquals(Decision.Type.YES, moveDecision.getClusterRebalanceDecision().type());
for (Decision d : moveDecision.getClusterRebalanceDecision().getDecisions()) {
assertEquals(Decision.Type.YES, d.type());
assertNotNull(d.getExplanation());
}
// verify node decisions
assertEquals(1, moveDecision.getNodeDecisions().size());
NodeAllocationResult result = moveDecision.getNodeDecisions().get(0);
assertNotNull(result.getNode());
assertEquals(1, result.getWeightRanking());
assertEquals(AllocationDecision.WORSE_BALANCE, result.getNodeDecision());
if (includeYesDecisions) {
assertThat(result.getCanAllocateDecision().getDecisions().size(), greaterThan(0));
} else {
assertEquals(0, result.getCanAllocateDecision().getDecisions().size());
}
for (Decision d : result.getCanAllocateDecision().getDecisions()) {
assertEquals(Decision.Type.YES, d.type());
assertNotNull(d.getExplanation());
}
// verify JSON output
try (XContentParser parser = getParser(explanation)) {
verifyShardInfo(parser, true, includeDiskInfo, ShardRoutingState.STARTED);
parser.nextToken();
assertEquals("can_remain_on_current_node", parser.currentName());
parser.nextToken();
assertEquals(AllocationDecision.YES.toString(), parser.text());
parser.nextToken();
assertEquals("can_rebalance_cluster", parser.currentName());
parser.nextToken();
assertEquals(AllocationDecision.YES.toString(), parser.text());
parser.nextToken();
assertEquals("can_rebalance_to_other_node", parser.currentName());
parser.nextToken();
assertEquals(AllocationDecision.NO.toString(), parser.text());
parser.nextToken();
assertEquals("rebalance_explanation", parser.currentName());
parser.nextToken();
assertEquals("cannot rebalance as no target node exists that can both allocate this shard and improve the cluster balance",
parser.text());
verifyNodeDecisions(parser, allNodeDecisions(AllocationDecision.WORSE_BALANCE, true), includeYesDecisions, false);
assertEquals(Token.END_OBJECT, parser.nextToken());
}
}
public void testBetterBalanceButCannotAllocate() throws Exception {
logger.info("--> starting a single node");
String firstNode = internalCluster().startNode();
ensureStableCluster(1);
prepareIndex(5, 0);
logger.info("--> setting up allocation filtering to only allow allocation to the current node");
client().admin().indices().prepareUpdateSettings("idx").setSettings(
Settings.builder().put("index.routing.allocation.include._name", firstNode)).get();
logger.info("--> starting another node, with filtering not allowing allocation to the new node, it should not get any shards");
internalCluster().startNode();
ensureStableCluster(2);
boolean includeYesDecisions = randomBoolean();
boolean includeDiskInfo = randomBoolean();
ClusterAllocationExplanation explanation = runExplain(true, includeYesDecisions, includeDiskInfo);
ShardId shardId = explanation.getShard();
boolean isPrimary = explanation.isPrimary();
ShardRoutingState shardRoutingState = explanation.getShardState();
DiscoveryNode currentNode = explanation.getCurrentNode();
UnassignedInfo unassignedInfo = explanation.getUnassignedInfo();
ClusterInfo clusterInfo = explanation.getClusterInfo();
AllocateUnassignedDecision allocateDecision = explanation.getShardAllocationDecision().getAllocateDecision();
MoveDecision moveDecision = explanation.getShardAllocationDecision().getMoveDecision();
// verify shard info
assertEquals("idx", shardId.getIndexName());
assertEquals(0, shardId.getId());
assertTrue(isPrimary);
// verify current node info
assertEquals(ShardRoutingState.STARTED, shardRoutingState);
assertNotNull(currentNode);
// verify unassigned info
assertNull(unassignedInfo);
// verify cluster info
verifyClusterInfo(clusterInfo, includeDiskInfo, 2);
// verify decision object
assertFalse(allocateDecision.isDecisionTaken());
assertTrue(moveDecision.isDecisionTaken());
assertEquals(AllocationDecision.NO, moveDecision.getAllocationDecision());
assertEquals("cannot rebalance as no target node exists that can both allocate this shard and improve the cluster balance",
moveDecision.getExplanation());
assertTrue(moveDecision.canRemain());
assertFalse(moveDecision.forceMove());
assertTrue(moveDecision.canRebalanceCluster());
assertNotNull(moveDecision.getCanRemainDecision());
assertNull(moveDecision.getTargetNode());
assertEquals(2, moveDecision.getCurrentNodeRanking());
// verifying cluster rebalance decision object
assertNotNull(moveDecision.getClusterRebalanceDecision());
assertEquals(Decision.Type.YES, moveDecision.getClusterRebalanceDecision().type());
for (Decision d : moveDecision.getClusterRebalanceDecision().getDecisions()) {
assertEquals(Decision.Type.YES, d.type());
assertNotNull(d.getExplanation());
}
// verify node decisions
assertEquals(1, moveDecision.getNodeDecisions().size());
NodeAllocationResult result = moveDecision.getNodeDecisions().get(0);
assertNotNull(result.getNode());
assertEquals(1, result.getWeightRanking());
assertEquals(AllocationDecision.NO, result.getNodeDecision());
if (includeYesDecisions) {
assertThat(result.getCanAllocateDecision().getDecisions().size(), greaterThan(1));
} else {
assertEquals(1, result.getCanAllocateDecision().getDecisions().size());
}
String primaryNodeName = primaryNodeName();
for (Decision d : result.getCanAllocateDecision().getDecisions()) {
if (d.label().equals("filter")) {
assertEquals(Decision.Type.NO, d.type());
assertEquals("node does not match index setting [index.routing.allocation.include] filters [_name:\"" +
primaryNodeName + "\"]", d.getExplanation());
} else {
assertEquals(Decision.Type.YES, d.type());
assertNotNull(d.getExplanation());
}
}
// verify JSON output
try (XContentParser parser = getParser(explanation)) {
verifyShardInfo(parser, true, includeDiskInfo, ShardRoutingState.STARTED);
parser.nextToken();
assertEquals("can_remain_on_current_node", parser.currentName());
parser.nextToken();
assertEquals(AllocationDecision.YES.toString(), parser.text());
parser.nextToken();
assertEquals("can_rebalance_cluster", parser.currentName());
parser.nextToken();
assertEquals(AllocationDecision.YES.toString(), parser.text());
parser.nextToken();
assertEquals("can_rebalance_to_other_node", parser.currentName());
parser.nextToken();
assertEquals(AllocationDecision.NO.toString(), parser.text());
parser.nextToken();
assertEquals("rebalance_explanation", parser.currentName());
parser.nextToken();
assertEquals("cannot rebalance as no target node exists that can both allocate this shard and improve the cluster balance",
parser.text());
verifyNodeDecisions(parser, allNodeDecisions(AllocationDecision.NO, true), includeYesDecisions, false);
assertEquals(Token.END_OBJECT, parser.nextToken());
}
}
public void testAssignedReplicaOnSpecificNode() throws Exception {
logger.info("--> starting 3 nodes");
List<String> nodes = internalCluster().startNodes(3);
String excludedNode = nodes.get(randomIntBetween(0, 2));
prepareIndex(randomIndexState(), 1, 2,
Settings.builder().put("index.routing.allocation.exclude._name", excludedNode).build(),
ActiveShardCount.from(2));
boolean includeYesDecisions = randomBoolean();
boolean includeDiskInfo = randomBoolean();
ClusterAllocationExplanation explanation = runExplain(false, replicaNode().getId(), includeYesDecisions, includeDiskInfo);
ShardId shardId = explanation.getShard();
boolean isPrimary = explanation.isPrimary();
ShardRoutingState shardRoutingState = explanation.getShardState();
DiscoveryNode currentNode = explanation.getCurrentNode();
UnassignedInfo unassignedInfo = explanation.getUnassignedInfo();
ClusterInfo clusterInfo = explanation.getClusterInfo();
AllocateUnassignedDecision allocateDecision = explanation.getShardAllocationDecision().getAllocateDecision();
MoveDecision moveDecision = explanation.getShardAllocationDecision().getMoveDecision();
// verify shard info
assertEquals("idx", shardId.getIndexName());
assertEquals(0, shardId.getId());
assertFalse(isPrimary);
// verify current node info
assertEquals(ShardRoutingState.STARTED, shardRoutingState);
assertNotNull(currentNode);
assertEquals(replicaNode().getName(), currentNode.getName());
// verify unassigned info
assertNull(unassignedInfo);
// verify cluster info
verifyClusterInfo(clusterInfo, includeDiskInfo, 3);
// verify decision objects
assertFalse(allocateDecision.isDecisionTaken());
assertTrue(moveDecision.isDecisionTaken());
assertEquals(AllocationDecision.NO, moveDecision.getAllocationDecision());
assertEquals("rebalancing is not allowed", moveDecision.getExplanation());
assertTrue(moveDecision.canRemain());
assertFalse(moveDecision.forceMove());
assertFalse(moveDecision.canRebalanceCluster());
assertNotNull(moveDecision.getCanRemainDecision());
assertNull(moveDecision.getTargetNode());
// verifying cluster rebalance decision object
assertNotNull(moveDecision.getClusterRebalanceDecision());
assertEquals(Decision.Type.NO, moveDecision.getClusterRebalanceDecision().type());
// verify node decisions
assertEquals(2, moveDecision.getNodeDecisions().size());
for (NodeAllocationResult result : moveDecision.getNodeDecisions()) {
assertNotNull(result.getNode());
assertEquals(1, result.getWeightRanking());
assertEquals(AllocationDecision.NO, result.getNodeDecision());
if (includeYesDecisions) {
assertThat(result.getCanAllocateDecision().getDecisions().size(), greaterThan(1));
} else {
assertEquals(1, result.getCanAllocateDecision().getDecisions().size());
}
for (Decision d : result.getCanAllocateDecision().getDecisions()) {
if (d.type() == Decision.Type.NO) {
assertThat(d.label(), is(oneOf("filter", "same_shard")));
}
assertNotNull(d.getExplanation());
}
}
// verify JSON output
try (XContentParser parser = getParser(explanation)) {
verifyShardInfo(parser, false, includeDiskInfo, ShardRoutingState.STARTED);
parser.nextToken();
assertEquals("can_remain_on_current_node", parser.currentName());
parser.nextToken();
assertEquals(AllocationDecision.YES.toString(), parser.text());
parser.nextToken();
assertEquals("can_rebalance_cluster", parser.currentName());
parser.nextToken();
assertEquals(AllocationDecision.NO.toString(), parser.text());
parser.nextToken();
assertEquals("can_rebalance_cluster_decisions", parser.currentName());
verifyDeciders(parser, AllocationDecision.NO);
parser.nextToken();
assertEquals("can_rebalance_to_other_node", parser.currentName());
parser.nextToken();
assertEquals(AllocationDecision.NO.toString(), parser.text());
parser.nextToken();
assertEquals("rebalance_explanation", parser.currentName());
parser.nextToken();
assertEquals("rebalancing is not allowed", parser.text());
verifyNodeDecisions(parser, allNodeDecisions(AllocationDecision.NO, false), includeYesDecisions, false);
assertEquals(Token.END_OBJECT, parser.nextToken());
}
}
public void testCannotAllocateStaleReplicaExplanation() throws Exception {
logger.info("--> starting 3 nodes");
final String masterNode = internalCluster().startNode();
// start replica node first, so it's path will be used first when we start a node after
// stopping all of them at end of test.
final String replicaNode = internalCluster().startNode();
Settings replicaDataPathSettings = internalCluster().dataPathSettings(replicaNode);
final String primaryNode = internalCluster().startNode();
prepareIndex(IndexMetadata.State.OPEN, 1, 1,
Settings.builder()
.put("index.routing.allocation.include._name", primaryNode)
.put("index.routing.allocation.exclude._name", masterNode)
.build(),
ActiveShardCount.ONE);
client().admin().indices().prepareUpdateSettings("idx").setSettings(
Settings.builder().put("index.routing.allocation.include._name", (String) null)).get();
ensureGreen();
assertThat(replicaNode().getName(), equalTo(replicaNode));
assertThat(primaryNodeName(), equalTo(primaryNode));
logger.info("--> stop node with the replica shard");
internalCluster().stopRandomNode(InternalTestCluster.nameFilter(replicaNode));
final IndexMetadata.State indexState = randomIndexState();
if (indexState == IndexMetadata.State.OPEN) {
logger.info("--> index more data, now the replica is stale");
indexData();
} else {
logger.info("--> close the index, now the replica is stale");
assertAcked(client().admin().indices().prepareClose("idx"));
final ClusterHealthResponse clusterHealthResponse = client().admin().cluster().prepareHealth("idx")
.setTimeout(TimeValue.timeValueSeconds(30))
.setWaitForActiveShards(ActiveShardCount.ONE)
.setWaitForNoInitializingShards(true)
.setWaitForEvents(Priority.LANGUID)
.get();
assertThat(clusterHealthResponse.getStatus().value(), lessThanOrEqualTo(ClusterHealthStatus.YELLOW.value()));
}
logger.info("--> stop the node with the primary");
internalCluster().stopRandomNode(InternalTestCluster.nameFilter(primaryNode));
logger.info("--> restart the node with the stale replica");
String restartedNode = internalCluster().startDataOnlyNode(replicaDataPathSettings);
ensureClusterSizeConsistency(); // wait for the master to finish processing join.
// wait until the system has fetched shard data and we know there is no valid shard copy
assertBusy(() -> {
ClusterAllocationExplanation explanation = client().admin().cluster().prepareAllocationExplain()
.setIndex("idx").setShard(0).setPrimary(true).get().getExplanation();
assertTrue(explanation.getShardAllocationDecision().getAllocateDecision().isDecisionTaken());
assertEquals(AllocationDecision.NO_VALID_SHARD_COPY,
explanation.getShardAllocationDecision().getAllocateDecision().getAllocationDecision());
});
boolean includeYesDecisions = randomBoolean();
boolean includeDiskInfo = randomBoolean();
ClusterAllocationExplanation explanation = runExplain(true, includeYesDecisions, includeDiskInfo);
ShardId shardId = explanation.getShard();
boolean isPrimary = explanation.isPrimary();
ShardRoutingState shardRoutingState = explanation.getShardState();
DiscoveryNode currentNode = explanation.getCurrentNode();
UnassignedInfo unassignedInfo = explanation.getUnassignedInfo();
AllocateUnassignedDecision allocateDecision = explanation.getShardAllocationDecision().getAllocateDecision();
MoveDecision moveDecision = explanation.getShardAllocationDecision().getMoveDecision();
// verify shard info
assertEquals("idx", shardId.getIndexName());
assertEquals(0, shardId.getId());
assertTrue(isPrimary);
// verify current node info
assertEquals(ShardRoutingState.UNASSIGNED, shardRoutingState);
assertNull(currentNode);
// verify unassigned info
assertNotNull(unassignedInfo);
// verify decision object
assertTrue(allocateDecision.isDecisionTaken());
assertFalse(moveDecision.isDecisionTaken());
assertEquals(AllocationDecision.NO_VALID_SHARD_COPY, allocateDecision.getAllocationDecision());
assertEquals(2, allocateDecision.getNodeDecisions().size());
for (NodeAllocationResult nodeAllocationResult : allocateDecision.getNodeDecisions()) {
if (nodeAllocationResult.getNode().getName().equals(restartedNode)) {
assertNotNull(nodeAllocationResult.getShardStoreInfo());
assertNotNull(nodeAllocationResult.getShardStoreInfo().getAllocationId());
assertFalse(nodeAllocationResult.getShardStoreInfo().isInSync());
assertNull(nodeAllocationResult.getShardStoreInfo().getStoreException());
} else {
assertNotNull(nodeAllocationResult.getShardStoreInfo());
assertNull(nodeAllocationResult.getShardStoreInfo().getAllocationId());
assertFalse(nodeAllocationResult.getShardStoreInfo().isInSync());
assertNull(nodeAllocationResult.getShardStoreInfo().getStoreException());
}
}
// verify JSON output
try (XContentParser parser = getParser(explanation)) {
verifyShardInfo(parser, true, includeDiskInfo, ShardRoutingState.UNASSIGNED);
parser.nextToken();
assertEquals("can_allocate", parser.currentName());
parser.nextToken();
assertEquals(AllocationDecision.NO_VALID_SHARD_COPY.toString(), parser.text());
parser.nextToken();
assertEquals("allocate_explanation", parser.currentName());
parser.nextToken();
assertEquals("cannot allocate because all found copies of the shard are either stale or corrupt", parser.text());
verifyStaleShardCopyNodeDecisions(parser, 2, Collections.singleton(restartedNode));
}
}
private void verifyClusterInfo(ClusterInfo clusterInfo, boolean includeDiskInfo, int numNodes) {
if (includeDiskInfo) {
assertThat(clusterInfo.getNodeMostAvailableDiskUsages().size(), greaterThanOrEqualTo(0));
assertThat(clusterInfo.getNodeLeastAvailableDiskUsages().size(), greaterThanOrEqualTo(0));
assertThat(clusterInfo.getNodeMostAvailableDiskUsages().size(), lessThanOrEqualTo(numNodes));
assertThat(clusterInfo.getNodeLeastAvailableDiskUsages().size(), lessThanOrEqualTo(numNodes));
} else {
assertNull(clusterInfo);
}
}
private ClusterAllocationExplanation runExplain(boolean primary, boolean includeYesDecisions, boolean includeDiskInfo)
throws Exception {
return runExplain(primary, null, includeYesDecisions, includeDiskInfo);
}
private ClusterAllocationExplanation runExplain(boolean primary, String nodeId, boolean includeYesDecisions, boolean includeDiskInfo)
throws Exception {
ClusterAllocationExplanation explanation = client().admin().cluster().prepareAllocationExplain()
.setIndex("idx").setShard(0).setPrimary(primary)
.setIncludeYesDecisions(includeYesDecisions)
.setIncludeDiskInfo(includeDiskInfo)
.setCurrentNode(nodeId)
.get().getExplanation();
if (logger.isDebugEnabled()) {
XContentBuilder builder = JsonXContent.contentBuilder();
builder.prettyPrint();
builder.humanReadable(true);
logger.debug("--> explain json output: \n{}", Strings.toString(explanation.toXContent(builder, ToXContent.EMPTY_PARAMS)));
}
return explanation;
}
private void prepareIndex(final int numPrimaries, final int numReplicas) {
prepareIndex(randomIndexState(), numPrimaries, numReplicas, Settings.EMPTY, ActiveShardCount.ALL);
}
private void prepareIndex(final IndexMetadata.State state, final int numPrimaries, final int numReplicas,
final Settings settings, final ActiveShardCount activeShardCount) {
logger.info("--> creating a {} index with {} primary, {} replicas", state, numPrimaries, numReplicas);
assertAcked(client().admin().indices().prepareCreate("idx")
.setSettings(Settings.builder()
.put("index.number_of_shards", numPrimaries)
.put("index.number_of_replicas", numReplicas)
.put(settings))
.setWaitForActiveShards(activeShardCount)
.get());
if (activeShardCount != ActiveShardCount.NONE) {
indexData();
}
if (state == IndexMetadata.State.CLOSE) {
assertAcked(client().admin().indices().prepareClose("idx"));
final ClusterHealthResponse clusterHealthResponse = client().admin().cluster().prepareHealth("idx")
.setTimeout(TimeValue.timeValueSeconds(30))
.setWaitForActiveShards(activeShardCount)
.setWaitForEvents(Priority.LANGUID)
.get();
assertThat(clusterHealthResponse.getStatus().value(), lessThanOrEqualTo(ClusterHealthStatus.YELLOW.value()));
}
}
private static IndexMetadata.State randomIndexState() {
return randomFrom(IndexMetadata.State.values());
}
private void indexData() {
for (int i = 0; i < 10; i++) {
index("idx", Integer.toString(i), Collections.singletonMap("f1", Integer.toString(i)));
}
flushAndRefresh("idx");
}
private String primaryNodeName() {
ClusterState clusterState = client().admin().cluster().prepareState().get().getState();
String nodeId = clusterState.getRoutingTable().index("idx").shard(0).primaryShard().currentNodeId();
return clusterState.getRoutingNodes().node(nodeId).node().getName();
}
private DiscoveryNode replicaNode() {
ClusterState clusterState = client().admin().cluster().prepareState().get().getState();
String nodeId = clusterState.getRoutingTable().index("idx").shard(0).replicaShards().get(0).currentNodeId();
return clusterState.getRoutingNodes().node(nodeId).node();
}
private XContentParser getParser(ClusterAllocationExplanation explanation) throws IOException {
XContentBuilder builder = JsonXContent.contentBuilder();
return createParser(explanation.toXContent(builder, ToXContent.EMPTY_PARAMS));
}
private void verifyShardInfo(XContentParser parser, boolean primary, boolean includeDiskInfo, ShardRoutingState state)
throws IOException {
parser.nextToken();
assertEquals(Token.START_OBJECT, parser.currentToken());
parser.nextToken();
assertEquals("index", parser.currentName());
parser.nextToken();
assertEquals("idx", parser.text());
parser.nextToken();
assertEquals("shard", parser.currentName());
parser.nextToken();
assertEquals(0, parser.intValue());
parser.nextToken();
assertEquals("primary", parser.currentName());
parser.nextToken();
assertEquals(primary, parser.booleanValue());
parser.nextToken();
assertEquals("current_state", parser.currentName());
parser.nextToken();
assertEquals(state.toString().toLowerCase(Locale.ROOT), parser.text());
if (state == ShardRoutingState.UNASSIGNED) {
parser.nextToken();
assertEquals("unassigned_info", parser.currentName());
assertEquals(Token.START_OBJECT, parser.nextToken());
Token token;
while ((token = parser.nextToken()) != Token.END_OBJECT) { // until we reach end of unassigned_info
if (token == XContentParser.Token.FIELD_NAME) {
assertNotEquals("delayed", parser.currentName()); // we should never display "delayed" from unassigned info
if (parser.currentName().equals("last_allocation_status")) {
parser.nextToken();
assertThat(parser.text(), is(oneOf(AllocationDecision.NO.toString(),
AllocationDecision.NO_VALID_SHARD_COPY.toString(),
AllocationDecision.AWAITING_INFO.toString(),
AllocationDecision.NO_ATTEMPT.toString())));
}
}
}
} else {
assertEquals(ShardRoutingState.STARTED, state);
parser.nextToken();
assertEquals("current_node", parser.currentName());
assertEquals(Token.START_OBJECT, parser.nextToken());
Token token;
while ((token = parser.nextToken()) != Token.END_OBJECT) { // until we reach end of current_node
if (token == Token.FIELD_NAME) {
assertTrue(parser.currentName().equals("id")
|| parser.currentName().equals("name")
|| parser.currentName().equals("transport_address")
|| parser.currentName().equals("weight_ranking"));
} else {
assertTrue(token.isValue());
assertNotNull(parser.text());
}
}
}
if (includeDiskInfo) {
// disk info is included, just verify the object is there
parser.nextToken();
assertEquals("cluster_info", parser.currentName());
assertEquals(Token.START_OBJECT, parser.nextToken());
int numObjects = 1;
while (numObjects > 0) {
Token token = parser.nextToken();
if (token == Token.START_OBJECT) {
++numObjects;
} else if (token == Token.END_OBJECT) {
--numObjects;
}
}
}
}
private void verifyStaleShardCopyNodeDecisions(XContentParser parser, int numNodes, Set<String> foundStores) throws IOException {
parser.nextToken();
assertEquals("node_allocation_decisions", parser.currentName());
assertEquals(Token.START_ARRAY, parser.nextToken());
for (int i = 0; i < numNodes; i++) {
String nodeName = verifyNodeDecisionPrologue(parser);
assertEquals(AllocationDecision.NO.toString(), parser.text());
parser.nextToken();
assertEquals("store", parser.currentName());
assertEquals(Token.START_OBJECT, parser.nextToken());
parser.nextToken();
if (foundStores.contains(nodeName)) {
// shard data was found on the node, but it is stale
assertEquals("in_sync", parser.currentName());
parser.nextToken();
assertFalse(parser.booleanValue());
parser.nextToken();
assertEquals("allocation_id", parser.currentName());
parser.nextToken();
assertNotNull(parser.text());
} else {
// no shard data was found on the node
assertEquals("found", parser.currentName());
parser.nextToken();
assertFalse(parser.booleanValue());
}
assertEquals(Token.END_OBJECT, parser.nextToken());
parser.nextToken();
assertEquals(Token.END_OBJECT, parser.currentToken());
}
assertEquals(Token.END_ARRAY, parser.nextToken());
}
private void verifyNodeDecisions(XContentParser parser, Map<String, AllocationDecision> expectedNodeDecisions,
boolean includeYesDecisions, boolean reuseStore) throws IOException {
parser.nextToken();
assertEquals("node_allocation_decisions", parser.currentName());
assertEquals(Token.START_ARRAY, parser.nextToken());
boolean encounteredNo = false;
final int numNodes = expectedNodeDecisions.size();
for (int i = 0; i < numNodes; i++) {
String nodeName = verifyNodeDecisionPrologue(parser);
AllocationDecision allocationDecision = expectedNodeDecisions.get(nodeName);
assertEquals(allocationDecision.toString(), parser.text());
if (allocationDecision != AllocationDecision.YES) {
encounteredNo = true;
} else {
assertFalse("encountered a YES node decision after a NO node decision - sort order is wrong", encounteredNo);
}
parser.nextToken();
if ("store".equals(parser.currentName())) {
assertTrue("store info should not be present", reuseStore);
assertEquals(Token.START_OBJECT, parser.nextToken());
parser.nextToken();
assertEquals("matching_size_in_bytes", parser.currentName());
parser.nextToken();
assertThat(parser.longValue(), greaterThan(0L));
assertEquals(Token.END_OBJECT, parser.nextToken());
parser.nextToken();
}
if (reuseStore == false) {
assertEquals("weight_ranking", parser.currentName());
parser.nextToken();
assertThat(parser.intValue(), greaterThan(0));
parser.nextToken();
}
if (allocationDecision == AllocationDecision.NO || allocationDecision == AllocationDecision.THROTTLED || includeYesDecisions) {
assertEquals("deciders", parser.currentName());
boolean atLeastOneMatchingDecisionFound = verifyDeciders(parser, allocationDecision);
parser.nextToken();
if (allocationDecision == AllocationDecision.NO || allocationDecision == AllocationDecision.THROTTLED) {
assertTrue("decision was " + allocationDecision + " but found no node's with that decision",
atLeastOneMatchingDecisionFound);
}
}
assertEquals(Token.END_OBJECT, parser.currentToken());
}
assertEquals(Token.END_ARRAY, parser.nextToken());
}
private String verifyNodeDecisionPrologue(XContentParser parser) throws IOException {
assertEquals(Token.START_OBJECT, parser.nextToken());
parser.nextToken();
assertEquals("node_id", parser.currentName());
parser.nextToken();
assertNotNull(parser.text());
parser.nextToken();
assertEquals("node_name", parser.currentName());
parser.nextToken();
String nodeName = parser.text();
assertNotNull(nodeName);
parser.nextToken();
assertEquals("transport_address", parser.currentName());
parser.nextToken();
assertNotNull(parser.text());
parser.nextToken();
assertEquals("node_decision", parser.currentName());
parser.nextToken();
return nodeName;
}
private boolean verifyDeciders(XContentParser parser, AllocationDecision allocationDecision) throws IOException {
assertEquals(Token.START_ARRAY, parser.nextToken());
boolean atLeastOneMatchingDecisionFound = false;
while (parser.nextToken() != Token.END_ARRAY) {
assertEquals(Token.START_OBJECT, parser.currentToken());
parser.nextToken();
assertEquals("decider", parser.currentName());
parser.nextToken();
assertNotNull(parser.text());
parser.nextToken();
assertEquals("decision", parser.currentName());
parser.nextToken();
String decisionText = parser.text();
if ((allocationDecision == AllocationDecision.NO && decisionText.equals("NO")
|| (allocationDecision == AllocationDecision.THROTTLED && decisionText.equals("THROTTLE")))) {
atLeastOneMatchingDecisionFound = true;
}
assertNotNull(decisionText);
parser.nextToken();
assertEquals("explanation", parser.currentName());
parser.nextToken();
assertNotNull(parser.text());
assertEquals(Token.END_OBJECT, parser.nextToken());
}
return atLeastOneMatchingDecisionFound;
}
private Map<String, AllocationDecision> allNodeDecisions(AllocationDecision allocationDecision, boolean removePrimary) {
Map<String, AllocationDecision> nodeDecisions = new HashMap<>();
Set<String> allNodes = Sets.newHashSet(internalCluster().getNodeNames());
allNodes.remove(removePrimary ? primaryNodeName() : replicaNode().getName());
for (String nodeName : allNodes) {
nodeDecisions.put(nodeName, allocationDecision);
}
return nodeDecisions;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.oodt.cas.filemgr.ingest;
//JDK imports
import java.io.File;
import java.io.FileInputStream;
import java.net.URL;
import java.util.Properties;
//OODT imports
import org.apache.oodt.cas.filemgr.metadata.CoreMetKeys;
import org.apache.oodt.cas.filemgr.structs.Product;
import org.apache.oodt.cas.filemgr.system.XmlRpcFileManager;
import org.apache.oodt.cas.filemgr.system.XmlRpcFileManagerClient;
import org.apache.oodt.cas.metadata.Metadata;
import org.apache.oodt.cas.metadata.SerializableMetadata;
// Jnit imports
import junit.framework.TestCase;
/**
* @author mattmann
* @version $Revision$
*
* <p>
* Describe your class here
* </p>.
*/
public class TestStdIngester extends TestCase {
private static final int FM_PORT = 50010;
private XmlRpcFileManager fm;
private String luceneCatLoc;
private StdIngester ingester;
private static final String transferServiceFacClass = "org.apache.oodt.cas."
+ "filemgr.datatransfer.LocalDataTransferFactory";
private Properties initialProperties = new Properties(
System.getProperties());
public TestStdIngester() {
ingester = new StdIngester(transferServiceFacClass);
}
public void testIngest() {
Metadata prodMet = null;
try {
URL ingestUrl = this.getClass().getResource("/ingest");
URL refUrl = this.getClass().getResource("/ingest/test.txt");
URL metUrl = this.getClass().getResource("/ingest/test.txt.met");
prodMet = new SerializableMetadata(new FileInputStream(
new File(metUrl.getFile())));
// now add the right file location
prodMet.addMetadata(CoreMetKeys.FILE_LOCATION, new File(
ingestUrl.getFile()).getCanonicalPath());
ingester.ingest(new URL("http://localhost:" + FM_PORT), new File(
refUrl.getFile()), prodMet);
} catch (Exception e) {
fail(e.getMessage());
}
// now make sure that the file is ingested
try {
XmlRpcFileManagerClient fmClient = new XmlRpcFileManagerClient(new URL("http://localhost:"+FM_PORT));
Product p = fmClient.getProductByName("test.txt");
assertNotNull(p);
assertEquals(Product.STATUS_RECEIVED, p.getTransferStatus());
assertTrue(fmClient.hasProduct("test.txt"));
fmClient = null;
} catch (Exception e){
fail(e.getMessage());
}
}
/*
* (non-Javadoc)
*
* @see junit.framework.TestCase#setUp()
*/
protected void setUp() throws Exception {
startXmlRpcFileManager();
}
/*
* (non-Javadoc)
*
* @see junit.framework.TestCase#tearDown()
*/
protected void tearDown() throws Exception {
fm.shutdown();
fm = null;
// blow away lucene cat
deleteAllFiles(luceneCatLoc);
// blow away test file
deleteAllFiles("/tmp/test.txt");
// Reset the System properties to initial values.
System.setProperties(initialProperties);
}
private void deleteAllFiles(String startDir) {
File startDirFile = new File(startDir);
File[] delFiles = startDirFile.listFiles();
if (delFiles != null && delFiles.length > 0) {
for (int i = 0; i < delFiles.length; i++) {
delFiles[i].delete();
}
}
startDirFile.delete();
}
private void startXmlRpcFileManager() {
Properties properties = new Properties(System.getProperties());
// first make sure to load properties for the file manager
// and make sure to load logging properties as well
// set the log levels
URL loggingPropertiesUrl = this.getClass().getResource(
"/test.logging.properties");
properties.setProperty("java.util.logging.config.file", new File(
loggingPropertiesUrl.getFile()).getAbsolutePath());
// first load the example configuration
try {
URL filemgrPropertiesUrl = this.getClass().getResource(
"/filemgr.properties");
properties.load(
new FileInputStream(new File(filemgrPropertiesUrl.getFile())));
} catch (Exception e) {
fail(e.getMessage());
}
// override the catalog to use: we'll use lucene
try {
URL ingestUrl = this.getClass().getResource("/ingest");
luceneCatLoc = new File(ingestUrl.getFile()).getCanonicalPath()
+ "/cat";
} catch (Exception e) {
fail(e.getMessage());
}
properties.setProperty("filemgr.catalog.factory",
"org.apache.oodt.cas.filemgr.catalog.LuceneCatalogFactory");
properties.setProperty(
"org.apache.oodt.cas.filemgr.catalog.lucene.idxPath",
luceneCatLoc);
// now override the repo mgr policy
try {
URL fmpolicyUrl = this.getClass().getResource("/ingest/fmpolicy");
properties.setProperty(
"org.apache.oodt.cas.filemgr.repositorymgr.dirs",
"file://"
+ new File(fmpolicyUrl.getFile()).getCanonicalPath());
} catch (Exception e) {
fail(e.getMessage());
}
// now override the val layer ones
URL examplesCoreUrl = this.getClass().getResource("/examples/core");
properties.setProperty("org.apache.oodt.cas.filemgr.validation.dirs",
"file://"
+ new File(examplesCoreUrl.getFile()).getAbsolutePath());
// set up mime repo path
URL mimeTypesUrl = this.getClass().getResource("/mime-types.xml");
properties.setProperty(
"org.apache.oodt.cas.filemgr.mime.type.repository", new File(
mimeTypesUrl.getFile()).getAbsolutePath());
System.setProperties(properties);
try {
fm = new XmlRpcFileManager(FM_PORT);
} catch (Exception e) {
fail(e.getMessage());
}
}
}
| |
/*
* Copyright (C) 2014-2015 LinkedIn Corp. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use
* this file except in compliance with the License. You may obtain a copy of the
* License at http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed
* under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied.
*/
package gobblin.util;
import gobblin.configuration.ConfigurationKeys;
import java.io.File;
import java.io.FileFilter;
import java.io.FilenameFilter;
import java.io.IOException;
import java.util.List;
import java.util.Properties;
import java.util.Set;
import org.apache.commons.configuration.ConfigurationConverter;
import org.apache.commons.configuration.ConfigurationException;
import org.apache.commons.configuration.PropertiesConfiguration;
import org.apache.commons.io.monitor.FileAlterationListener;
import org.apache.commons.io.monitor.FileAlterationMonitor;
import org.apache.commons.io.monitor.FileAlterationObserver;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.common.base.Function;
import com.google.common.base.Preconditions;
import com.google.common.base.Splitter;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Iterables;
import com.google.common.collect.Lists;
import com.google.common.io.Files;
/**
* A utility class used by the scheduler.
*
* @author Yinan Li
*/
public class SchedulerUtils {
private static final Logger LOGGER = LoggerFactory.getLogger(SchedulerUtils.class);
// Extension of properties files
public static final String JOB_PROPS_FILE_EXTENSION = "properties";
// A filter for properties files
private static final FilenameFilter PROPERTIES_FILE_FILTER = new FilenameFilter() {
@Override
public boolean accept(File file, String name) {
return Files.getFileExtension(name).equalsIgnoreCase(JOB_PROPS_FILE_EXTENSION);
}
};
// A filter for non-properties files
private static final FilenameFilter NON_PROPERTIES_FILE_FILTER = new FilenameFilter() {
@Override
public boolean accept(File dir, String name) {
return !Files.getFileExtension(name).equalsIgnoreCase(JOB_PROPS_FILE_EXTENSION);
}
};
/**
* Load job configurations from job configuration files stored under the
* root job configuration file directory.
*
* @param properties Gobblin framework configuration properties
* @return a list of job configurations in the form of {@link java.util.Properties}
*/
public static List<Properties> loadJobConfigs(Properties properties)
throws ConfigurationException {
Preconditions.checkArgument(properties.containsKey(ConfigurationKeys.JOB_CONFIG_FILE_DIR_KEY),
"Missing configuration property: " + ConfigurationKeys.JOB_CONFIG_FILE_DIR_KEY);
List<Properties> jobConfigs = Lists.newArrayList();
loadJobConfigsRecursive(jobConfigs, properties, getJobConfigurationFileExtensions(properties),
new File(properties.getProperty(ConfigurationKeys.JOB_CONFIG_FILE_DIR_KEY)));
return jobConfigs;
}
/**
* Load job configurations from job configuration files affected by changes to the given common properties file.
*
* @param properties Gobblin framework configuration properties
* @param commonPropsFile the common properties file with changes
* @param jobConfigFileDir root job configuration file directory
* @return a list of job configurations in the form of {@link java.util.Properties}
*/
public static List<Properties> loadJobConfigs(Properties properties, File commonPropsFile, File jobConfigFileDir)
throws ConfigurationException, IOException {
List<Properties> commonPropsList = Lists.newArrayList();
// Start from the parent of parent of the changed common properties file to avoid
// loading the common properties file here since it will be loaded below anyway
getCommonProperties(commonPropsList, jobConfigFileDir, commonPropsFile.getParentFile().getParentFile());
// Add the framework configuration properties to the end
commonPropsList.add(properties);
Properties commonProps = new Properties();
// Include common properties in reverse order
for (Properties pros : Lists.reverse(commonPropsList)) {
commonProps.putAll(pros);
}
List<Properties> jobConfigs = Lists.newArrayList();
// The common properties file will be loaded here
loadJobConfigsRecursive(jobConfigs, commonProps, getJobConfigurationFileExtensions(properties),
commonPropsFile.getParentFile());
return jobConfigs;
}
/**
* Load a given job configuration file.
*
* @param properties Gobblin framework configuration properties
* @param jobConfigFile job configuration file to be loaded
* @param jobConfigFileDir root job configuration file directory
* @return a job configuration in the form of {@link java.util.Properties}
*/
public static Properties loadJobConfig(Properties properties, File jobConfigFile, File jobConfigFileDir)
throws ConfigurationException, IOException {
List<Properties> commonPropsList = Lists.newArrayList();
getCommonProperties(commonPropsList, jobConfigFileDir, jobConfigFile.getParentFile());
// Add the framework configuration properties to the end
commonPropsList.add(properties);
Properties jobProps = new Properties();
// Include common properties in reverse order
for (Properties commonProps : Lists.reverse(commonPropsList)) {
jobProps.putAll(commonProps);
}
// Then load the job configuration properties defined in the job configuration file
jobProps.putAll(ConfigurationConverter.getProperties(new PropertiesConfiguration(jobConfigFile)));
jobProps.setProperty(ConfigurationKeys.JOB_CONFIG_FILE_PATH_KEY, jobConfigFile.getAbsolutePath());
return jobProps;
}
/**
* Add {@link org.apache.commons.io.monitor.FileAlterationMonitor}s for the given
* root directory and any nested subdirectories under the root directory to the given
* {@link org.apache.commons.io.monitor.FileAlterationMonitor}.
*
* @param monitor a {@link org.apache.commons.io.monitor.FileAlterationMonitor}
* @param listener a {@link org.apache.commons.io.monitor.FileAlterationListener}
* @param rootDir root directory
*/
public static void addFileAlterationObserver(FileAlterationMonitor monitor, FileAlterationListener listener,
File rootDir) {
// Add a observer for the current root directory
FileAlterationObserver observer = new FileAlterationObserver(rootDir);
observer.addListener(listener);
monitor.addObserver(observer);
// List subdirectories under the current root directory
File[] subDirs = rootDir.listFiles(new FileFilter() {
@Override
public boolean accept(File file) {
return file.isDirectory();
}
});
if (subDirs == null || subDirs.length == 0) {
return;
}
// Recursively add a observer for each subdirectory
for (File subDir : subDirs) {
addFileAlterationObserver(monitor, listener, subDir);
}
}
/**
* Recursively load job configuration files under the given directory.
*/
private static void loadJobConfigsRecursive(List<Properties> jobConfigs, Properties rootProps,
Set<String> jobConfigFileExtensions, File jobConfigDir)
throws ConfigurationException {
// Get the properties file that ends with .properties if any
String[] propertiesFiles = jobConfigDir.list(PROPERTIES_FILE_FILTER);
if (propertiesFiles != null && propertiesFiles.length > 0) {
// There should be a single properties file in each directory (or sub directory)
if (propertiesFiles.length != 1) {
throw new RuntimeException("Found more than one .properties file in directory: " + jobConfigDir);
}
// Load the properties, which may overwrite the same properties defined in the parent or ancestor directories.
rootProps.putAll(ConfigurationConverter
.getProperties(new PropertiesConfiguration(new File(jobConfigDir, propertiesFiles[0]))));
}
// Get all non-properties files
String[] names = jobConfigDir.list(NON_PROPERTIES_FILE_FILTER);
if (names == null || names.length == 0) {
return;
}
for (String name : names) {
File file = new File(jobConfigDir, name);
if (file.isDirectory()) {
Properties rootPropsCopy = new Properties();
rootPropsCopy.putAll(rootProps);
loadJobConfigsRecursive(jobConfigs, rootPropsCopy, jobConfigFileExtensions, file);
} else {
if (!jobConfigFileExtensions.contains(Files.getFileExtension(file.getName()).toLowerCase())) {
LOGGER.warn("Skipped file " + file + " that has an unsupported extension");
continue;
}
File doneFile = new File(file + ".done");
if (doneFile.exists()) {
// Skip the job configuration file when a .done file with the same name exists,
// which means the job configuration file is for a one-time job and the job has
// already run and finished.
LOGGER.info("Skipped job configuration file " + file + " for which a .done file exists");
continue;
}
Properties jobProps = new Properties();
// Put all parent/ancestor properties first
jobProps.putAll(rootProps);
// Then load the job configuration properties defined in the job configuration file
jobProps.putAll(ConfigurationConverter.getProperties(new PropertiesConfiguration(file)));
jobProps.setProperty(ConfigurationKeys.JOB_CONFIG_FILE_PATH_KEY, file.getAbsolutePath());
jobConfigs.add(jobProps);
}
}
}
private static Set<String> getJobConfigurationFileExtensions(Properties properties) {
Iterable<String> jobConfigFileExtensionsIterable = Splitter.on(",").omitEmptyStrings().trimResults()
.split(properties.getProperty(ConfigurationKeys.JOB_CONFIG_FILE_EXTENSIONS_KEY,
ConfigurationKeys.DEFAULT_JOB_CONFIG_FILE_EXTENSIONS));
return ImmutableSet.copyOf(Iterables.transform(jobConfigFileExtensionsIterable, new Function<String, String>() {
@Override
public String apply(String input) {
return null != input ? input.toLowerCase() : "";
}
}));
}
private static void getCommonProperties(List<Properties> commonPropsList, File jobConfigFileDir, File dir)
throws ConfigurationException, IOException {
// Make sure the given starting directory is under the job configuration file directory
Preconditions.checkArgument(dir.getCanonicalPath().startsWith(jobConfigFileDir.getCanonicalPath()),
String.format("%s is not an ancestor directory of %s", jobConfigFileDir, dir));
// Traversal backward until the parent of the root job configuration file directory is reached
while (!dir.equals(jobConfigFileDir.getParentFile())) {
// Get the properties file that ends with .properties if any
String[] propertiesFiles = dir.list(PROPERTIES_FILE_FILTER);
if (propertiesFiles != null && propertiesFiles.length > 0) {
// There should be a single properties file in each directory (or sub directory)
if (propertiesFiles.length != 1) {
throw new RuntimeException("Found more than one .properties file in directory: " + dir);
}
commonPropsList.add(
ConfigurationConverter.getProperties(new PropertiesConfiguration(new File(dir, propertiesFiles[0]))));
}
dir = dir.getParentFile();
}
}
}
| |
/* ************************************************************************
#
# designCraft.io
#
# http://designcraft.io/
#
# Copyright:
# Copyright 2014 eTimeline, LLC. All rights reserved.
#
# License:
# See the license.txt file in the project's top-level directory for details.
#
# Authors:
# * Andy White
#
************************************************************************ */
package dcraft.bus;
import dcraft.bus.Message;
import dcraft.bus.net.StreamMessage;
import dcraft.lang.op.OperationResult;
import dcraft.struct.ListStruct;
import dcraft.struct.RecordStruct;
import dcraft.struct.Struct;
import dcraft.work.TaskRun;
public class MessageUtil {
static public Message success(String... flds) {
Message m = new Message();
if (flds.length > 0) {
RecordStruct body = new RecordStruct();
String name = null;
for (String o : flds) {
if (o == null)
continue;
if (name != null) {
body.setField(name, o);
name = null;
}
else {
name = o;
}
}
m.setField("Body", body);
}
return m;
}
public static Message success(Struct body) {
Message m = new Message();
m.setField("Body", body);
return m;
}
// TODO discourage use of these following - use OC .toLogMessage(); instead
static public Message error(int code, String msg) {
OperationResult or = new OperationResult();
or.error(code, msg);
return or.toLogMessage();
}
static public Message errorTr(int code, Object... params) {
OperationResult or = new OperationResult();
or.errorTr(code, params);
return or.toLogMessage();
}
static public Message fromRecord(RecordStruct msg) {
Message m = new Message();
m.copyFields(msg);
return m;
}
static boolean addressReply(Message msg, RecordStruct original) {
if ((msg == null) || original.isFieldEmpty("RespondTo"))
return false;
msg.setField("ToHub", original.getFieldAsString("FromHub"));
msg.setField("Service", original.getFieldAsString("RespondTo"));
msg.setField("Feature", "Reply");
msg.setField("Op", "Deliver");
msg.setField("Tag", original.getFieldAsString("RespondTag"));
return true;
}
static public StreamMessage streamError(int code, String msg) {
OperationResult or = new OperationResult();
or.error(code, msg);
return MessageUtil.streamMessages(or);
}
static public StreamMessage streamErrorTr(int code, Object... params) {
OperationResult or = new OperationResult();
or.errorTr(code, params);
return MessageUtil.streamMessages(or);
}
static public StreamMessage streamMessages(OperationResult ri) {
StreamMessage m = new StreamMessage();
if (ri != null)
m.setField("Messages", ri.getMessages());
return m;
}
static public StreamMessage streamFromRecord(RecordStruct msg) {
StreamMessage m = new StreamMessage();
m.copyFields(msg);
return m;
}
static public void streamAddressReply(RecordStruct msg, RecordStruct original) {
if (msg == null)
return;
msg.setField("ToHub", original.getFieldAsString("Hub"));
msg.setField("ToSession", original.getFieldAsString("Session"));
msg.setField("ToChannel", original.getFieldAsString("Channel"));
}
// assumes body will be in StreamMessage format
static public void streamAddressReply(Message msg, RecordStruct original) {
if (msg == null)
return;
msg.withToHub(original.getFieldAsString("Hub"));
RecordStruct body = msg.getFieldAsRecord("Body");
if (body == null) {
body = new RecordStruct();
msg.setField("Body", body);
}
body.setField("ToHub", original.getFieldAsString("Hub"));
body.setField("ToSession", original.getFieldAsString("Session"));
body.setField("ToChannel", original.getFieldAsString("Channel"));
}
public static StreamMessage streamFinal() {
return new StreamMessage("Final");
}
public static Message message(TaskRun request) {
return (Message) request.getTask().getParams();
}
public static RecordStruct bodyAsRecord(TaskRun request) {
Message msg = (Message) request.getTask().getParams();
return msg.getFieldAsRecord("Body");
}
public static ListStruct bodyAsList(TaskRun request) {
Message msg = (Message) request.getTask().getParams();
return msg.getFieldAsList("Body");
}
public static void setBody(TaskRun request, Struct body) {
((Message) request.getTask().getParams()).setField("Body", body);
}
// search backward through log to find an error, if we hit a message with an Exit tag then
// stop, as Exit resets Error (unless it is an error itself)
// similar to findExitEntry but stops after last Error as we don't need to loop through all
static public boolean hasErrors(RecordStruct rec) {
ListStruct msgs = rec.getFieldAsList("Messages");
if (msgs == null)
return false;
for (int i = msgs.getSize() - 1; i >= 0; i--) {
RecordStruct msg = (RecordStruct) msgs.getItem(i);
if ("Error".equals(msg.getFieldAsString("Level")))
return true;
if (msg.hasField("Tags")) {
ListStruct tags = msg.getFieldAsList("Tags");
if (tags.stringStream().anyMatch(tag -> tag.equals("Exit")))
break;
}
}
return false;
}
static public long getCode(RecordStruct rec) {
RecordStruct entry = MessageUtil.findExitEntry(rec);
if (entry == null)
return 0;
return entry.getFieldAsInteger("Code", 0);
}
static public String getMessage(RecordStruct rec) {
RecordStruct entry = MessageUtil.findExitEntry(rec);
if (entry == null)
return null;
return entry.getFieldAsString("Message");
}
static public RecordStruct findExitEntry(RecordStruct rec) {
return MessageUtil.findExitEntry(rec, 0, -1);
}
// search backward through log to find an exit, if we hit a message with an Exit tag then
// stop, as Exit resets Error. now return the first error after Exit. if no errors after
// then return Exit
static public RecordStruct findExitEntry(RecordStruct rec, int msgStart, int msgEnd) {
ListStruct msgs = rec.getFieldAsList("Messages");
if (msgs == null)
return null;
if (msgEnd == -1)
msgEnd = msgs.getSize();
RecordStruct firsterror = null;
for (int i = msgEnd - 1; i >= msgStart; i--) {
RecordStruct msg = (RecordStruct) msgs.getItem(i);
if ("Error".equals(msg.getFieldAsString("Level")))
firsterror = msg;
if (msg.hasField("Tags")) {
ListStruct tags = msg.getFieldAsList("Tags");
if (tags.stringStream().anyMatch(tag -> tag.equals("Exit")))
return (firsterror != null) ? firsterror : msg;
}
}
return firsterror;
}
}
| |
/*************************GO-LICENSE-START*********************************
* Copyright 2014 ThoughtWorks, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*************************GO-LICENSE-END***********************************/
package com.thoughtworks.go.server.controller;
import java.io.BufferedInputStream;
import java.io.BufferedOutputStream;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.InputStream;
import java.io.ObjectOutputStream;
import java.util.Arrays;
import java.util.Map;
import javax.servlet.ServletOutputStream;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import com.thoughtworks.go.config.AgentConfig;
import com.thoughtworks.go.config.GoConfigFileDao;
import com.thoughtworks.go.config.update.ApproveAgentCommand;
import com.thoughtworks.go.config.update.UpdateEnvironmentsCommand;
import com.thoughtworks.go.config.update.UpdateResourceCommand;
import com.thoughtworks.go.security.Registration;
import com.thoughtworks.go.server.controller.actions.JsonAction;
import com.thoughtworks.go.server.service.AgentRuntimeInfo;
import com.thoughtworks.go.server.service.AgentService;
import com.thoughtworks.go.server.service.GoConfigService;
import com.thoughtworks.go.server.service.result.HttpOperationResult;
import com.thoughtworks.go.server.util.UserHelper;
import com.thoughtworks.go.server.web.JsonView;
import com.thoughtworks.go.util.SystemEnvironment;
import com.thoughtworks.go.util.json.JsonMap;
import org.apache.commons.io.IOUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Controller;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.servlet.ModelAndView;
import org.springframework.web.servlet.View;
import static com.thoughtworks.go.util.GoConstants.ERROR_FOR_JSON;
import static com.thoughtworks.go.util.FileDigester.copyAndDigest;
import static com.thoughtworks.go.util.FileDigester.md5DigestOfStream;
@Controller
public class AgentRegistrationController {
private static final Log LOG = LogFactory.getLog(AgentRegistrationController.class);
private final AgentService agentService;
private final GoConfigService goConfigService;
private final SystemEnvironment systemEnvironment;
private volatile String agentChecksum;
private volatile String agentLauncherChecksum;
private volatile String agentPluginsChecksum;
@Autowired
public AgentRegistrationController(AgentService agentService, GoConfigService goConfigService, SystemEnvironment systemEnvironment) {
this.agentService = agentService;
this.goConfigService = goConfigService;
this.systemEnvironment = systemEnvironment;
}
@RequestMapping(value = "/latest-agent.status", method = RequestMethod.HEAD)
public void checkAgentStatus(HttpServletRequest request, HttpServletResponse response) throws IOException {
populateAgentChecksum();
response.setHeader(SystemEnvironment.AGENT_CONTENT_MD5_HEADER, agentChecksum);
populateLauncherChecksum();
response.setHeader(SystemEnvironment.AGENT_LAUNCHER_CONTENT_MD5_HEADER, agentLauncherChecksum);
populateAgentPluginsChecksum();
response.setHeader(SystemEnvironment.AGENT_PLUGINS_ZIP_MD5_HEADER, agentPluginsChecksum);
setOtherHeaders(response);
}
@RequestMapping(value = "/latest-agent.status", method = RequestMethod.GET)
public void latestAgentStatus(HttpServletRequest request, HttpServletResponse response) throws IOException {
checkAgentStatus(request, response);
}
@RequestMapping(value = "/agent", method = RequestMethod.HEAD)
public void checkAgentVersion(HttpServletRequest request, HttpServletResponse response) throws IOException {
populateAgentChecksum();
response.setHeader("Content-MD5", agentChecksum);
setOtherHeaders(response);
}
@RequestMapping(value = "/agent-launcher.jar", method = RequestMethod.HEAD)
public void checkAgentLauncherVersion(HttpServletRequest request, HttpServletResponse response) throws IOException {
populateLauncherChecksum();
response.setHeader("Content-MD5", agentLauncherChecksum);
setOtherHeaders(response);
}
@RequestMapping(value = "/agent-plugins.zip", method = RequestMethod.HEAD)
public void checkAgentPluginsZipStatus(HttpServletRequest request, HttpServletResponse response) throws IOException {
populateAgentPluginsChecksum();
response.setHeader("Content-MD5", agentPluginsChecksum);
setOtherHeaders(response);
}
private void populateLauncherChecksum() throws IOException {
if (agentLauncherChecksum == null) {
agentLauncherChecksum = getChecksumFor(new AgentLauncherSrc());
}
}
private void populateAgentChecksum() throws IOException {
if (agentChecksum == null) {
agentChecksum = getChecksumFor(new AgentJarSrc());
}
}
private String getChecksumFor(final InputStreamSrc src) throws IOException {
InputStream inputStream = null;
String checksum = null;
try {
inputStream = src.invoke();
checksum = md5DigestOfStream(inputStream);
} finally {
IOUtils.closeQuietly(inputStream);
}
assert (checksum != null);
return checksum;
}
private void setOtherHeaders(HttpServletResponse response) {
response.setHeader("Cruise-Server-Ssl-Port", Integer.toString(systemEnvironment.getSslServerPort()));
}
@RequestMapping(value = "/agent", method = RequestMethod.GET)
public ModelAndView downloadAgent(HttpServletRequest request, HttpServletResponse response) throws IOException {
return getDownload(new AgentJarSrc());
}
@RequestMapping(value = "/agent-launcher.jar", method = RequestMethod.GET)
public ModelAndView downloadAgentLauncher(HttpServletRequest request,
HttpServletResponse response) throws IOException {
return getDownload(new AgentLauncherSrc());
}
@RequestMapping(value = "/agent-plugins.zip", method = RequestMethod.GET)
public ModelAndView downloadPluginsZip(HttpServletRequest request,
HttpServletResponse response) throws IOException {
return getDownload(new AgentPluginsZipSrc());
}
private ModelAndView getDownload(final InputStreamSrc inStreamSrc) throws FileNotFoundException {
return new ModelAndView(new View() {
public String getContentType() {
return "application/octet-stream";
}
public void render(Map model, HttpServletRequest request, HttpServletResponse response) throws IOException {
InputStream rawIS = null;
BufferedInputStream is = null;
BufferedOutputStream os = null;
try {
rawIS = inStreamSrc.invoke();
is = new BufferedInputStream(rawIS);
os = new BufferedOutputStream(response.getOutputStream());
String md5 = copyAndDigest(is, os);
response.setHeader("Content-MD5", md5);
setOtherHeaders(response);
os.flush();
} finally {
IOUtils.closeQuietly(is);
IOUtils.closeQuietly(os);
IOUtils.closeQuietly(rawIS);
}
}
});
}
@RequestMapping(value = "/agent", method = RequestMethod.POST)
public ModelAndView agentRequest(@RequestParam("hostname") String hostname,
@RequestParam("uuid") String uuid,
@RequestParam("location") String location,
@RequestParam("usablespace") String usablespace,
@RequestParam("operating_system") String operatingSystem,
@RequestParam("agentAutoRegisterKey") String agentAutoRegisterKey,
@RequestParam("agentAutoRegisterResources") String agentAutoRegisterResources,
@RequestParam("agentAutoRegisterEnvironments") String agentAutoRegisterEnvironments,
HttpServletRequest request,
HttpServletResponse response) throws IOException {
final String ipAddress = request.getRemoteAddr();
if (LOG.isDebugEnabled()) {
LOG.debug(String.format("Processing registration request from agent [%s/%s]", hostname, ipAddress));
}
Registration keyEntry;
try {
if (goConfigService.serverConfig().shouldAutoRegisterAgentWith(agentAutoRegisterKey)) {
LOG.info(String.format("[Agent Auto Registration] Auto registering agent with uuid %s ", uuid));
GoConfigFileDao.CompositeConfigCommand compositeConfigCommand = new GoConfigFileDao.CompositeConfigCommand(
new ApproveAgentCommand(uuid, ipAddress, hostname),
new UpdateResourceCommand(uuid, agentAutoRegisterResources),
new UpdateEnvironmentsCommand(uuid, agentAutoRegisterEnvironments)
);
goConfigService.updateConfig(compositeConfigCommand);
}
keyEntry = agentService.requestRegistration(
AgentRuntimeInfo.fromServer(new AgentConfig(uuid, hostname, ipAddress), goConfigService.hasAgent(uuid), location,
Long.parseLong(usablespace), operatingSystem));
} catch (Exception e) {
keyEntry = Registration.createNullPrivateKeyEntry();
LOG.error("Error occured during agent registration process: ", e);
}
final Registration anotherCopy = keyEntry;
return new ModelAndView(new View() {
public String getContentType() {
return "application/x-java-serialized-object";
}
public void render(Map model, HttpServletRequest request, HttpServletResponse response) throws IOException {
ServletOutputStream servletOutputStream = null;
ObjectOutputStream objectOutputStream = null;
try {
servletOutputStream = response.getOutputStream();
objectOutputStream = new ObjectOutputStream(servletOutputStream);
objectOutputStream.writeObject(anotherCopy);
} finally {
IOUtils.closeQuietly(servletOutputStream);
IOUtils.closeQuietly(objectOutputStream);
}
}
});
}
@RequestMapping(value = "/**/registerAgent.json", method = RequestMethod.POST)
public ModelAndView registerAgent(HttpServletResponse response,
@RequestParam("uuid") String uuid) {
try {
agentService.approve(uuid);
JsonMap result = JsonView.getSimpleAjaxResult("result", "success");
return JsonAction.jsonCreated(result).respond(response);
} catch (Exception ex) {
String message = ex.getMessage();
LOG.error(String.format("Error approving agent [%s]", uuid), ex);
JsonMap result = JsonView.getSimpleAjaxResult("result", "failed");
result.put(ERROR_FOR_JSON, message);
return JsonAction.jsonNotAcceptable(result).respond(response);
}
}
@RequestMapping(value = "/**/denyAgent.json", method = RequestMethod.POST)
public ModelAndView denyAgent(HttpServletResponse response, @RequestParam("uuid") String uuid) {
try {
agentService.disableAgents(UserHelper.getUserName(), new HttpOperationResult(), Arrays.asList(uuid));
JsonMap result = JsonView.getSimpleAjaxResult("result", "success");
return JsonAction.jsonCreated(result).respond(response);
} catch (Exception ex) {
String message = ex.getMessage();
JsonMap result = JsonView.getSimpleAjaxResult("result", "failed");
result.put(ERROR_FOR_JSON, message);
return JsonAction.jsonNotAcceptable(result).respond(response);
}
}
private void populateAgentPluginsChecksum() throws IOException {
if (agentPluginsChecksum == null) {
agentPluginsChecksum = getChecksumFor(new AgentPluginsZipSrc());
}
}
public static interface InputStreamSrc {
InputStream invoke() throws FileNotFoundException;
}
private class AgentJarSrc implements InputStreamSrc {
public InputStream invoke() throws FileNotFoundException {
return agentService.agentJarInputStream();
}
}
private class AgentLauncherSrc implements InputStreamSrc {
public InputStream invoke() throws FileNotFoundException {
return agentService.agentLauncherJarInputStream();
}
}
private class AgentPluginsZipSrc implements InputStreamSrc {
public InputStream invoke() throws FileNotFoundException {
return new FileInputStream(systemEnvironment.get(SystemEnvironment.ALL_PLUGINS_ZIP_PATH));
}
}
}
| |
/*
* Copyright 2000-2017 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.psi.impl.source.resolve.reference.impl;
import com.intellij.codeInsight.completion.InsertHandler;
import com.intellij.codeInsight.completion.InsertionContext;
import com.intellij.codeInsight.completion.JavaLookupElementBuilder;
import com.intellij.codeInsight.completion.PrioritizedLookupElement;
import com.intellij.codeInsight.lookup.LookupElement;
import com.intellij.codeInsight.lookup.LookupElementBuilder;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.util.Iconable;
import com.intellij.openapi.util.Pair;
import com.intellij.openapi.util.RecursionGuard;
import com.intellij.openapi.util.RecursionManager;
import com.intellij.psi.*;
import com.intellij.psi.codeStyle.JavaCodeStyleManager;
import com.intellij.psi.impl.JavaConstantExpressionEvaluator;
import com.intellij.psi.search.GlobalSearchScope;
import com.intellij.psi.util.PsiTreeUtil;
import com.intellij.psi.util.PsiTypesUtil;
import com.intellij.psi.util.PsiUtilCore;
import com.intellij.psi.util.TypeConversionUtil;
import com.intellij.util.ArrayUtil;
import com.intellij.util.ObjectUtils;
import com.intellij.util.PlatformIcons;
import com.intellij.util.containers.ContainerUtil;
import com.siyeh.ig.psiutils.*;
import org.jetbrains.annotations.Contract;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import javax.swing.*;
import java.util.*;
import java.util.function.Function;
/**
* @author Pavel.Dolgov
*/
public class JavaReflectionReferenceUtil {
// MethodHandle (Java 7) and VarHandle (Java 9) infrastructure
public static final String JAVA_LANG_INVOKE_METHOD_HANDLES_LOOKUP = "java.lang.invoke.MethodHandles.Lookup";
public static final String JAVA_LANG_INVOKE_METHOD_TYPE = "java.lang.invoke.MethodType";
public static final String METHOD_TYPE = "methodType";
public static final String GENERIC_METHOD_TYPE = "genericMethodType";
public static final String FIND_VIRTUAL = "findVirtual";
public static final String FIND_STATIC = "findStatic";
public static final String FIND_SPECIAL = "findSpecial";
public static final String FIND_GETTER = "findGetter";
public static final String FIND_SETTER = "findSetter";
public static final String FIND_STATIC_GETTER = "findStaticGetter";
public static final String FIND_STATIC_SETTER = "findStaticSetter";
public static final String FIND_VAR_HANDLE = "findVarHandle";
public static final String FIND_STATIC_VAR_HANDLE = "findStaticVarHandle";
public static final String FIND_CONSTRUCTOR = "findConstructor";
public static final String FIND_CLASS = "findClass";
public static final String[] HANDLE_FACTORY_METHOD_NAMES = {
FIND_VIRTUAL, FIND_STATIC, FIND_SPECIAL,
FIND_GETTER, FIND_SETTER,
FIND_STATIC_GETTER, FIND_STATIC_SETTER,
FIND_VAR_HANDLE, FIND_STATIC_VAR_HANDLE};
// Classic reflection infrastructure
public static final String GET_FIELD = "getField";
public static final String GET_DECLARED_FIELD = "getDeclaredField";
public static final String GET_METHOD = "getMethod";
public static final String GET_DECLARED_METHOD = "getDeclaredMethod";
public static final String GET_CONSTRUCTOR = "getConstructor";
public static final String GET_DECLARED_CONSTRUCTOR = "getDeclaredConstructor";
public static final String JAVA_LANG_CLASS_LOADER = "java.lang.ClassLoader";
public static final String FOR_NAME = "forName";
public static final String LOAD_CLASS = "loadClass";
public static final String GET_CLASS = "getClass";
public static final String NEW_INSTANCE = "newInstance";
public static final String TYPE = "TYPE";
// Atomic field updaters
public static final String NEW_UPDATER = "newUpdater";
public static final String ATOMIC_LONG_FIELD_UPDATER = "java.util.concurrent.atomic.AtomicLongFieldUpdater";
public static final String ATOMIC_INTEGER_FIELD_UPDATER = "java.util.concurrent.atomic.AtomicIntegerFieldUpdater";
public static final String ATOMIC_REFERENCE_FIELD_UPDATER = "java.util.concurrent.atomic.AtomicReferenceFieldUpdater";
private static final RecursionGuard ourGuard = RecursionManager.createGuard("JavaLangClassMemberReference");
@Contract("null -> null")
public static ReflectiveType getReflectiveType(@Nullable PsiExpression context) {
context = ParenthesesUtils.stripParentheses(context);
if (context == null) {
return null;
}
if (context instanceof PsiClassObjectAccessExpression) {
final PsiTypeElement operand = ((PsiClassObjectAccessExpression)context).getOperand();
return ReflectiveType.create(operand.getType(), true);
}
if (context instanceof PsiMethodCallExpression) {
final PsiMethodCallExpression methodCall = (PsiMethodCallExpression)context;
final String methodReferenceName = methodCall.getMethodExpression().getReferenceName();
if (FOR_NAME.equals(methodReferenceName)) {
final PsiMethod method = methodCall.resolveMethod();
if (method != null && isJavaLangClass(method.getContainingClass())) {
final PsiExpression[] expressions = methodCall.getArgumentList().getExpressions();
if (expressions.length == 1) {
final PsiExpression argument = findDefinition(ParenthesesUtils.stripParentheses(expressions[0]));
final String className = computeConstantExpression(argument, String.class);
if (className != null) {
return ReflectiveType.create(findClass(className, context), true);
}
}
}
}
else if (GET_CLASS.equals(methodReferenceName) && methodCall.getArgumentList().isEmpty()) {
final PsiMethod method = methodCall.resolveMethod();
if (method != null && isJavaLangObject(method.getContainingClass())) {
final PsiExpression qualifier = ParenthesesUtils.stripParentheses(methodCall.getMethodExpression().getQualifierExpression());
if (qualifier instanceof PsiReferenceExpression) {
final PsiExpression definition = findVariableDefinition((PsiReferenceExpression)qualifier);
if (definition != null) {
return getClassInstanceType(definition);
}
}
//TODO type of the qualifier may be a supertype of the actual value - need to compute the type of the actual value
// otherwise getDeclaredField and getDeclaredMethod may work not reliably
if (qualifier != null) {
return getClassInstanceType(qualifier);
}
}
}
}
if (context instanceof PsiReferenceExpression) {
PsiReferenceExpression reference = (PsiReferenceExpression)context;
final PsiElement resolved = reference.resolve();
if (resolved instanceof PsiVariable) {
PsiVariable variable = (PsiVariable)resolved;
if (isJavaLangClass(PsiTypesUtil.getPsiClass(variable.getType()))) {
final PsiExpression definition = findVariableDefinition(reference, variable);
if (definition != null) {
ReflectiveType result = ourGuard.doPreventingRecursion(variable, false, () -> getReflectiveType(definition));
if (result != null) {
return result;
}
}
}
}
}
final PsiType type = context.getType();
if (type instanceof PsiClassType) {
final PsiClassType.ClassResolveResult resolveResult = ((PsiClassType)type).resolveGenerics();
final PsiClass resolvedElement = resolveResult.getElement();
if (!isJavaLangClass(resolvedElement)) return null;
if (context instanceof PsiReferenceExpression && TYPE.equals(((PsiReferenceExpression)context).getReferenceName())) {
final PsiElement resolved = ((PsiReferenceExpression)context).resolve();
if (resolved instanceof PsiField) {
final PsiField field = (PsiField)resolved;
if (field.hasModifierProperty(PsiModifier.FINAL) && field.hasModifierProperty(PsiModifier.STATIC)) {
final PsiPrimitiveType unboxedType = tryUnbox(field.getContainingClass(), (PsiClassType)type);
if (unboxedType != null) {
return ReflectiveType.create(unboxedType, true);
}
}
}
}
final PsiTypeParameter[] parameters = resolvedElement.getTypeParameters();
if (parameters.length == 1) {
final PsiType typeArgument = resolveResult.getSubstitutor().substitute(parameters[0]);
final PsiType erasure = TypeConversionUtil.erasure(typeArgument);
final PsiClass argumentClass = PsiTypesUtil.getPsiClass(erasure);
if (argumentClass != null && !isJavaLangObject(argumentClass)) {
return ReflectiveType.create(argumentClass, false);
}
}
}
return null;
}
@Nullable
private static ReflectiveType getClassInstanceType(@Nullable PsiExpression expression) {
expression = ParenthesesUtils.stripParentheses(expression);
if (expression == null) {
return null;
}
if (expression instanceof PsiMethodCallExpression) {
final PsiMethodCallExpression methodCall = (PsiMethodCallExpression)expression;
final String methodReferenceName = methodCall.getMethodExpression().getReferenceName();
if (NEW_INSTANCE.equals(methodReferenceName)) {
final PsiMethod method = methodCall.resolveMethod();
if (method != null) {
final PsiExpression[] arguments = methodCall.getArgumentList().getExpressions();
if (arguments.length == 0 && isClassWithName(method.getContainingClass(), CommonClassNames.JAVA_LANG_CLASS)) {
final PsiExpression qualifier = methodCall.getMethodExpression().getQualifierExpression();
if (qualifier != null) {
return ourGuard.doPreventingRecursion(qualifier, false, () -> getReflectiveType(qualifier));
}
}
else if (arguments.length > 1 && isClassWithName(method.getContainingClass(), CommonClassNames.JAVA_LANG_REFLECT_ARRAY)) {
final PsiExpression typeExpression = arguments[0];
if (typeExpression != null) {
final ReflectiveType itemType =
ourGuard.doPreventingRecursion(typeExpression, false, () -> getReflectiveType(typeExpression));
return ReflectiveType.arrayOf(itemType);
}
}
}
}
}
return ReflectiveType.create(expression.getType(), false);
}
@Contract("null,_->null")
@Nullable
public static <T> T computeConstantExpression(@Nullable PsiExpression expression, @NotNull Class<T> expectedType) {
expression = ParenthesesUtils.stripParentheses(expression);
final Object computed = JavaConstantExpressionEvaluator.computeConstantExpression(expression, false);
return ObjectUtils.tryCast(computed, expectedType);
}
@Nullable
public static ReflectiveClass getReflectiveClass(PsiExpression context) {
final ReflectiveType reflectiveType = getReflectiveType(context);
return reflectiveType != null ? reflectiveType.getReflectiveClass() : null;
}
@Nullable
public static PsiExpression findDefinition(@Nullable PsiExpression expression) {
int preventEndlessLoop = 5;
while (expression instanceof PsiReferenceExpression) {
if (--preventEndlessLoop == 0) return null;
expression = findVariableDefinition((PsiReferenceExpression)expression);
}
return expression;
}
@Nullable
private static PsiExpression findVariableDefinition(@NotNull PsiReferenceExpression referenceExpression) {
final PsiElement resolved = referenceExpression.resolve();
return resolved instanceof PsiVariable ? findVariableDefinition(referenceExpression, (PsiVariable)resolved) : null;
}
@Nullable
private static PsiExpression findVariableDefinition(@NotNull PsiReferenceExpression referenceExpression, @NotNull PsiVariable variable) {
if (variable.hasModifierProperty(PsiModifier.FINAL)) {
final PsiExpression initializer = variable.getInitializer();
if (initializer != null) {
return initializer;
}
if (variable instanceof PsiField) {
return findFinalFieldDefinition(referenceExpression, (PsiField)variable);
}
}
return DeclarationSearchUtils.findDefinition(referenceExpression, variable);
}
@Nullable
private static PsiExpression findFinalFieldDefinition(@NotNull PsiReferenceExpression referenceExpression, @NotNull PsiField field) {
if (!field.hasModifierProperty(PsiModifier.FINAL)) return null;
final PsiClass psiClass = ObjectUtils.tryCast(field.getParent(), PsiClass.class);
if (psiClass != null) {
final boolean isStatic = field.hasModifierProperty(PsiModifier.STATIC);
final List<PsiClassInitializer> initializers =
ContainerUtil.filter(psiClass.getInitializers(), initializer -> initializer.hasModifierProperty(PsiModifier.STATIC) == isStatic);
for (PsiClassInitializer initializer : initializers) {
final PsiExpression assignedExpression = getAssignedExpression(initializer, field);
if (assignedExpression != null) {
return assignedExpression;
}
}
if (!isStatic) {
final PsiMethod[] constructors = psiClass.getConstructors();
if (constructors.length == 1) {
return getAssignedExpression(constructors[0], field);
}
for (PsiMethod constructor : constructors) {
if (PsiTreeUtil.isAncestor(constructor, referenceExpression, true)) {
return getAssignedExpression(constructor, field);
}
}
}
}
return null;
}
@Nullable
private static PsiExpression getAssignedExpression(@NotNull PsiMember maybeContainsAssignment, @NotNull PsiField field) {
final PsiAssignmentExpression assignment = SyntaxTraverser.psiTraverser(maybeContainsAssignment)
.filter(PsiAssignmentExpression.class)
.find(expression -> VariableAccessUtils.evaluatesToVariable(expression.getLExpression(), field));
return assignment != null ? assignment.getRExpression() : null;
}
@Nullable
private static PsiPrimitiveType tryUnbox(@Nullable PsiClass psiClass, @NotNull PsiClassType originalType) {
if (psiClass != null && TypeConversionUtil.isPrimitiveWrapper(psiClass.getQualifiedName())) {
final PsiElementFactory factory = JavaPsiFacade.getInstance(psiClass.getProject()).getElementFactory();
final PsiClassType classType = factory.createType(psiClass, PsiSubstitutor.EMPTY, originalType.getLanguageLevel());
final PsiPrimitiveType unboxedType = PsiPrimitiveType.getUnboxedType(classType);
if (unboxedType != null) {
return unboxedType;
}
}
return null;
}
private static PsiClass findClass(@NotNull String qualifiedName, @NotNull PsiElement context) {
final Project project = context.getProject();
return JavaPsiFacade.getInstance(project).findClass(qualifiedName, GlobalSearchScope.allScope(project));
}
@Contract("null -> false")
static boolean isJavaLangClass(@Nullable PsiClass aClass) {
return isClassWithName(aClass, CommonClassNames.JAVA_LANG_CLASS);
}
@Contract("null -> false")
static boolean isJavaLangObject(@Nullable PsiClass aClass) {
return isClassWithName(aClass, CommonClassNames.JAVA_LANG_OBJECT);
}
@Contract("null, _ -> false")
public static boolean isClassWithName(@Nullable PsiClass aClass, @NotNull String name) {
return aClass != null && name.equals(aClass.getQualifiedName());
}
@Contract("null -> false")
static boolean isRegularMethod(@Nullable PsiMethod method) {
return method != null && !method.isConstructor();
}
static boolean isPublic(@NotNull PsiMember member) {
return member.hasModifierProperty(PsiModifier.PUBLIC);
}
static boolean isAtomicallyUpdateable(@NotNull PsiField field) {
if (field.hasModifierProperty(PsiModifier.STATIC) || !field.hasModifierProperty(PsiModifier.VOLATILE)) {
return false;
}
final PsiType type = field.getType();
return !(type instanceof PsiPrimitiveType) || PsiType.INT.equals(type) || PsiType.LONG.equals(type);
}
@Nullable
static String getParameterTypesText(@NotNull PsiMethod method) {
final StringJoiner joiner = new StringJoiner(", ");
for (PsiParameter parameter : method.getParameterList().getParameters()) {
final String typeText = getTypeText(parameter.getType());
joiner.add(typeText + ".class");
}
return joiner.toString();
}
static void shortenArgumentsClassReferences(@NotNull InsertionContext context) {
final PsiElement parameter = PsiUtilCore.getElementAtOffset(context.getFile(), context.getStartOffset());
final PsiExpressionList parameterList = PsiTreeUtil.getParentOfType(parameter, PsiExpressionList.class);
if (parameterList != null && parameterList.getParent() instanceof PsiMethodCallExpression) {
JavaCodeStyleManager.getInstance(context.getProject()).shortenClassReferences(parameterList);
}
}
@NotNull
static LookupElement withPriority(@NotNull LookupElement lookupElement, boolean hasPriority) {
return hasPriority ? lookupElement : PrioritizedLookupElement.withPriority(lookupElement, -1);
}
@Nullable
static LookupElement withPriority(@Nullable LookupElement lookupElement, int priority) {
return priority == 0 || lookupElement == null ? lookupElement : PrioritizedLookupElement.withPriority(lookupElement, priority);
}
static int getMethodSortOrder(@NotNull PsiMethod method) {
return isJavaLangObject(method.getContainingClass()) ? 1 : isPublic(method) ? -1 : 0;
}
@Nullable
static String getMemberType(@Nullable PsiElement element) {
final PsiMethodCallExpression methodCall = PsiTreeUtil.getParentOfType(element, PsiMethodCallExpression.class);
return methodCall != null ? methodCall.getMethodExpression().getReferenceName() : null;
}
@NotNull
static LookupElement lookupField(@NotNull PsiField field) {
return JavaLookupElementBuilder.forField(field);
}
@Nullable
static LookupElement lookupMethod(@NotNull PsiMethod method, @Nullable InsertHandler<LookupElement> insertHandler) {
final ReflectiveSignature signature = getMethodSignature(method);
return signature != null
? LookupElementBuilder.create(signature, method.getName())
.withIcon(signature.getIcon())
.withTailText(signature.getShortArgumentTypes())
.withInsertHandler(insertHandler)
: null;
}
static void replaceText(@NotNull InsertionContext context, @NotNull String text) {
final PsiElement newElement = PsiUtilCore.getElementAtOffset(context.getFile(), context.getStartOffset());
final PsiElement params = newElement.getParent().getParent();
final int end = params.getTextRange().getEndOffset() - 1;
final int start = Math.min(newElement.getTextRange().getEndOffset(), end);
context.getDocument().replaceString(start, end, text);
context.commitDocument();
shortenArgumentsClassReferences(context);
}
@NotNull
public static String getTypeText(@NotNull PsiType type) {
final ReflectiveType reflectiveType = ReflectiveType.create(type, false);
return reflectiveType.getQualifiedName();
}
@Nullable
public static String getTypeText(@Nullable PsiExpression argument) {
final ReflectiveType reflectiveType = getReflectiveType(argument);
return reflectiveType != null ? reflectiveType.getQualifiedName() : null;
}
@Contract("null -> null")
@Nullable
public static ReflectiveSignature getMethodSignature(@Nullable PsiMethod method) {
if (method != null) {
final List<String> types = new ArrayList<>();
final PsiType returnType = method.getReturnType();
types.add(getTypeText(returnType != null ? returnType : PsiType.VOID)); // null return type means it's a constructor
for (PsiParameter parameter : method.getParameterList().getParameters()) {
types.add(getTypeText(parameter.getType()));
}
final Icon icon = method.getIcon(Iconable.ICON_FLAG_VISIBILITY);
return ReflectiveSignature.create(icon, types);
}
return null;
}
@NotNull
public static String getMethodTypeExpressionText(@NotNull ReflectiveSignature signature) {
final String types = signature.getText(true, type -> type + ".class");
return JAVA_LANG_INVOKE_METHOD_TYPE + "." + METHOD_TYPE + types;
}
public static boolean isCallToMethod(@NotNull PsiMethodCallExpression methodCall, @NotNull String className, @NotNull String methodName) {
return MethodCallUtils.isCallToMethod(methodCall, className, null, methodName, (PsiType[])null);
}
@Nullable
public static PsiExpression[] getVarargAsArray(@Nullable PsiExpression maybeArray) {
if (ExpressionUtils.isNullLiteral(maybeArray)) {
return PsiExpression.EMPTY_ARRAY;
}
if (isVarargAsArray(maybeArray)) {
final PsiExpression argumentsDefinition = findDefinition(maybeArray);
if (argumentsDefinition instanceof PsiArrayInitializerExpression) {
return ((PsiArrayInitializerExpression)argumentsDefinition).getInitializers();
}
if (argumentsDefinition instanceof PsiNewExpression) {
final PsiArrayInitializerExpression arrayInitializer = ((PsiNewExpression)argumentsDefinition).getArrayInitializer();
if (arrayInitializer != null) {
return arrayInitializer.getInitializers();
}
final PsiExpression[] dimensions = ((PsiNewExpression)argumentsDefinition).getArrayDimensions();
if (dimensions.length == 1) { // special case: new Object[0]
final Integer itemCount = computeConstantExpression(findDefinition(dimensions[0]), Integer.class);
if (itemCount != null && itemCount == 0) {
return PsiExpression.EMPTY_ARRAY;
}
}
}
}
return null;
}
@Contract("null -> false")
public static boolean isVarargAsArray(@Nullable PsiExpression maybeArray) {
final PsiType type = maybeArray != null ? maybeArray.getType() : null;
return type instanceof PsiArrayType &&
type.getArrayDimensions() == 1 &&
type.getDeepComponentType() instanceof PsiClassType;
}
/**
* Take method's return type and parameter types
* from arguments of MethodType.methodType(Class...) and MethodType.genericMethodType(int, boolean?)
*/
@Nullable
public static ReflectiveSignature composeMethodSignature(@Nullable PsiExpression methodTypeExpression) {
final PsiExpression typeDefinition = findDefinition(methodTypeExpression);
if (typeDefinition instanceof PsiMethodCallExpression) {
final PsiMethodCallExpression methodCallExpression = (PsiMethodCallExpression)typeDefinition;
final String referenceName = methodCallExpression.getMethodExpression().getReferenceName();
Function<PsiExpression[], ReflectiveSignature> composer = null;
if (METHOD_TYPE.equals(referenceName)) {
composer = JavaReflectionReferenceUtil::composeMethodSignatureFromTypes;
}
else if (GENERIC_METHOD_TYPE.equals(referenceName)) {
composer = JavaReflectionReferenceUtil::composeGenericMethodSignature;
}
if (composer != null) {
final PsiMethod method = methodCallExpression.resolveMethod();
if (method != null) {
final PsiClass psiClass = method.getContainingClass();
if (psiClass != null && JAVA_LANG_INVOKE_METHOD_TYPE.equals(psiClass.getQualifiedName())) {
final PsiExpression[] arguments = methodCallExpression.getArgumentList().getExpressions();
return composer.apply(arguments);
}
}
}
}
return null;
}
@Nullable
private static ReflectiveSignature composeMethodSignatureFromTypes(@NotNull PsiExpression[] returnAndParameterTypes) {
final List<String> typeTexts = ContainerUtil.map(returnAndParameterTypes, JavaReflectionReferenceUtil::getTypeText);
return ReflectiveSignature.create(typeTexts);
}
@Nullable
public static Pair.NonNull<Integer, Boolean> getGenericSignature(@NotNull PsiExpression[] genericSignatureShape) {
if (genericSignatureShape.length == 0 || genericSignatureShape.length > 2) {
return null;
}
final Integer objectArgCount = computeConstantExpression(genericSignatureShape[0], Integer.class);
final Boolean finalArray = // there's an additional parameter which is an ellipsis or an array
genericSignatureShape.length > 1 ? computeConstantExpression(genericSignatureShape[1], Boolean.class) : false;
if (objectArgCount == null || objectArgCount < 0 || objectArgCount > 255) {
return null;
}
if (finalArray == null || finalArray && objectArgCount > 254) {
return null;
}
return Pair.createNonNull(objectArgCount, finalArray);
}
/**
* All the types in the method signature are either unbounded type parameters or java.lang.Object (with possible vararg)
*/
@Nullable
private static ReflectiveSignature composeGenericMethodSignature(@NotNull PsiExpression[] genericSignatureShape) {
final Pair.NonNull<Integer, Boolean> signature = getGenericSignature(genericSignatureShape);
if (signature == null) return null;
final int objectArgCount = signature.getFirst();
final boolean finalArray = signature.getSecond();
final List<String> typeNames = new ArrayList<>();
typeNames.add(CommonClassNames.JAVA_LANG_OBJECT); // return type
for (int i = 0; i < objectArgCount; i++) {
typeNames.add(CommonClassNames.JAVA_LANG_OBJECT);
}
if (finalArray) {
typeNames.add(CommonClassNames.JAVA_LANG_OBJECT + "[]");
}
return ReflectiveSignature.create(typeNames);
}
public static class ReflectiveType {
final PsiType myType;
final boolean myIsExact;
private ReflectiveType(@NotNull PsiType erasedType, boolean isExact) {
myType = erasedType;
myIsExact = isExact;
}
@NotNull
public String getQualifiedName() {
return myType.getCanonicalText();
}
@Override
public String toString() {
return myType.getCanonicalText();
}
public boolean isEqualTo(@Nullable PsiType otherType) {
return otherType != null && myType.equals(erasure(otherType));
}
public boolean isAssignableFrom(@NotNull PsiType type) {
return myType.isAssignableFrom(type);
}
public boolean isPrimitive() {
return myType instanceof PsiPrimitiveType;
}
@NotNull
public PsiType getType() {
return myType;
}
public boolean isExact() {
return myIsExact;
}
@Nullable
public ReflectiveClass getReflectiveClass() {
PsiClass psiClass = getPsiClass();
if (psiClass != null) {
return new ReflectiveClass(psiClass, myIsExact);
}
return null;
}
@Nullable
public ReflectiveType getArrayComponentType() {
if (myType instanceof PsiArrayType) {
PsiType componentType = ((PsiArrayType)myType).getComponentType();
return new ReflectiveType(componentType, myIsExact);
}
return null;
}
@Nullable
public PsiClass getPsiClass() {
return PsiTypesUtil.getPsiClass(myType);
}
@Contract("!null,_ -> !null; null,_ -> null")
@Nullable
public static ReflectiveType create(@Nullable PsiType originalType, boolean isExact) {
if (originalType != null) {
return new ReflectiveType(erasure(originalType), isExact);
}
return null;
}
@Contract("!null,_ -> !null; null,_ -> null")
@Nullable
public static ReflectiveType create(@Nullable PsiClass psiClass, boolean isExact) {
if (psiClass != null) {
final PsiElementFactory factory = JavaPsiFacade.getInstance(psiClass.getProject()).getElementFactory();
return new ReflectiveType(factory.createType(psiClass), isExact);
}
return null;
}
@Contract("!null -> !null; null -> null")
@Nullable
public static ReflectiveType arrayOf(@Nullable ReflectiveType itemType) {
if (itemType != null) {
return new ReflectiveType(itemType.myType.createArrayType(), itemType.myIsExact);
}
return null;
}
@NotNull
private static PsiType erasure(@NotNull PsiType type) {
final PsiType erasure = TypeConversionUtil.erasure(type);
if (erasure instanceof PsiEllipsisType) {
return ((PsiEllipsisType)erasure).toArrayType();
}
return erasure;
}
}
public static class ReflectiveClass {
final PsiClass myPsiClass;
final boolean myIsExact;
public ReflectiveClass(@NotNull PsiClass psiClass, boolean isExact) {
myPsiClass = psiClass;
myIsExact = isExact;
}
@NotNull
public PsiClass getPsiClass() {
return myPsiClass;
}
public boolean isExact() {
return myIsExact || myPsiClass.hasModifierProperty(PsiModifier.FINAL);
}
}
public static class ReflectiveSignature implements Comparable<ReflectiveSignature> {
public static final ReflectiveSignature NO_ARGUMENT_CONSTRUCTOR_SIGNATURE =
new ReflectiveSignature(null, PsiKeyword.VOID, ArrayUtil.EMPTY_STRING_ARRAY);
private final Icon myIcon;
@NotNull private final String myReturnType;
@NotNull private final String[] myArgumentTypes;
@Nullable
public static ReflectiveSignature create(@NotNull List<String> typeTexts) {
return create(null, typeTexts);
}
@Nullable
public static ReflectiveSignature create(@Nullable Icon icon, @NotNull List<String> typeTexts) {
if (!typeTexts.isEmpty() && !typeTexts.contains(null)) {
final String[] argumentTypes = ArrayUtil.toStringArray(typeTexts.subList(1, typeTexts.size()));
return new ReflectiveSignature(icon, typeTexts.get(0), argumentTypes);
}
return null;
}
private ReflectiveSignature(@Nullable Icon icon, @NotNull String returnType, @NotNull String[] argumentTypes) {
myIcon = icon;
myReturnType = returnType;
myArgumentTypes = argumentTypes;
}
public String getText(boolean withReturnType, @NotNull Function<? super String, String> transformation) {
return getText(withReturnType, true, transformation);
}
public String getText(boolean withReturnType, boolean withParentheses, @NotNull Function<? super String, String> transformation) {
final StringJoiner joiner = new StringJoiner(", ", withParentheses ? "(" : "", withParentheses ? ")" : "");
if (withReturnType) {
joiner.add(transformation.apply(myReturnType));
}
for (String argumentType : myArgumentTypes) {
joiner.add(transformation.apply(argumentType));
}
return joiner.toString();
}
@NotNull
public String getShortReturnType() {
return PsiNameHelper.getShortClassName(myReturnType);
}
@NotNull
public String getShortArgumentTypes() {
return getText(false, PsiNameHelper::getShortClassName);
}
@Nullable
public Icon getIcon() {
return myIcon != null ? myIcon : PlatformIcons.METHOD_ICON;
}
@Override
public int compareTo(@NotNull ReflectiveSignature other) {
int c = myArgumentTypes.length - other.myArgumentTypes.length;
if (c != 0) return c;
c = ArrayUtil.lexicographicCompare(myArgumentTypes, other.myArgumentTypes);
if (c != 0) return c;
return myReturnType.compareTo(other.myReturnType);
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (!(o instanceof ReflectiveSignature)) return false;
final ReflectiveSignature other = (ReflectiveSignature)o;
return Objects.equals(myReturnType, other.myReturnType) &&
Arrays.equals(myArgumentTypes, other.myArgumentTypes);
}
@Override
public int hashCode() {
return Objects.hash(myReturnType, myArgumentTypes);
}
@Override
public String toString() {
return myReturnType + " " + Arrays.toString(myArgumentTypes);
}
}
}
| |
package com.dianping.cat.report.page.app.processor;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.Comparator;
import java.util.Date;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.codehaus.plexus.util.StringUtils;
import org.unidal.helper.Splitters;
import org.unidal.lookup.annotation.Inject;
import com.dianping.cat.consumer.problem.ProblemAnalyzer;
import com.dianping.cat.consumer.problem.model.entity.ProblemReport;
import com.dianping.cat.report.page.PayloadNormalizer;
import com.dianping.cat.report.page.app.Action;
import com.dianping.cat.report.page.app.Model;
import com.dianping.cat.report.page.app.Payload;
import com.dianping.cat.report.page.app.ProblemStatistics;
import com.dianping.cat.report.page.model.spi.ModelService;
import com.dianping.cat.report.service.ReportServiceManager;
import com.dianping.cat.service.ModelRequest;
import com.dianping.cat.service.ModelResponse;
public class CrashLogProcessor {
@Inject
private ReportServiceManager m_reportService;
@Inject(type = ModelService.class, value = ProblemAnalyzer.ID)
private ModelService<ProblemReport> m_service;
@Inject
private PayloadNormalizer m_normalizer;
private String APP_VERSIONS = "appVersions";
private String LEVELS = "levels";
private String MODULES = "modules";
private String PLATFORM_VERSIONS = "platformVersions";
private List<String> CRASH_LOG_DOMAINS = Arrays.asList("AndroidCrashLog", "iOSCrashLog", "MerchantAndroidCrashLog",
"MerchantIOSCrashLog");
private Set<String> findOrCreate(String key, Map<String, Set<String>> map) {
Set<String> value = map.get(key);
if (value == null) {
value = new HashSet<String>();
map.put(key, value);
}
return value;
}
private void sortFields(Map<String, Set<String>> fieldsMap, FieldsInfo fieldsInfo) {
Comparator<String> comparator = new Comparator<String>() {
public int compare(String s1, String s2) {
return s2.compareTo(s1);
}
};
List<String> v = new ArrayList<String>(fieldsMap.get(APP_VERSIONS));
List<String> l = new ArrayList<String>(fieldsMap.get(LEVELS));
List<String> m = new ArrayList<String>(fieldsMap.get(MODULES));
List<String> p = new ArrayList<String>(fieldsMap.get(PLATFORM_VERSIONS));
Collections.sort(v, comparator);
Collections.sort(p, comparator);
Collections.sort(m);
Collections.sort(l);
fieldsInfo.setAppVersions(v).setPlatVersions(p).setModules(m).setLevels(l);
}
private FieldsInfo buildFeildsInfo(ProblemReport report) {
FieldsInfo fieldsInfo = new FieldsInfo();
Set<String> fields = report.getIps();
Map<String, Set<String>> fieldsMap = new HashMap<String, Set<String>>();
for (String field : fields) {
String[] fs = field.split(":");
findOrCreate(APP_VERSIONS, fieldsMap).add(fs[0]);
findOrCreate(PLATFORM_VERSIONS, fieldsMap).add(fs[1]);
findOrCreate(MODULES, fieldsMap).add(fs[2]);
findOrCreate(LEVELS, fieldsMap).add(fs[3]);
}
if (!fieldsMap.isEmpty()) {
sortFields(fieldsMap, fieldsInfo);
}
return fieldsInfo;
}
private String queryDomain(Payload payload) {
String domain = "";
if (StringUtils.isNotEmpty(payload.getQuery1())) {
domain = Splitters.by(";").split(payload.getQuery1()).get(0);
}
if (StringUtils.isEmpty(domain)) {
return CRASH_LOG_DOMAINS.get(0);
} else if (CRASH_LOG_DOMAINS.contains(domain)) {
return domain;
} else {
throw new RuntimeException("Unknown crash log domain: " + domain);
}
}
private ProblemReport getHourlyReport(Payload payload, String queryType, String domain) {
ModelRequest request = new ModelRequest(domain, payload.getDate()).//
setProperty("queryType", queryType);
if (!StringUtils.isEmpty(payload.getType())) {
request.setProperty("type", "error");
}
if (!StringUtils.isEmpty(payload.getStatus())) {
request.setProperty("name", payload.getStatus());
}
if (m_service.isEligable(request)) {
ModelResponse<ProblemReport> response = m_service.invoke(request);
ProblemReport report = response.getModel();
return report;
} else {
throw new RuntimeException("Internal error: no eligible problem service registered for " + request + "!");
}
}
public void process(Action action, Payload payload, Model model) {
m_normalizer.normalize(model, payload);
ProblemReport report = null;
switch (action) {
case HOURLY_CRASH_LOG:
report = getHourlyReport(payload, "view", queryDomain(payload));
break;
case HISTORY_CRASH_LOG:
report = showSummarizeReport(model, payload, queryDomain(payload));
break;
default:
throw new RuntimeException("Error action name " + action.getName());
}
ProblemStatistics problemStatistics = buildProblemStatistics(payload.getQuery1(), report);
model.setFieldsInfo(buildFeildsInfo(report));
model.setProblemStatistics(problemStatistics);
model.setProblemReport(report);
}
private ProblemReport showSummarizeReport(Model model, Payload payload, String domain) {
Date start = payload.getHistoryStartDate();
Date end = payload.getHistoryEndDate();
ProblemReport problemReport = m_reportService.queryProblemReport(domain, start, end);
return problemReport;
}
private ProblemStatistics buildProblemStatistics(String query, ProblemReport report) {
ProblemStatistics problemStatistics = new ProblemStatistics();
if (StringUtils.isNotEmpty(query)) {
List<String> querys = Splitters.by(";").split(query);
if (querys.size() == 5) {
problemStatistics.setAppVersions(Splitters.by(":").noEmptyItem().split(querys.get(1)));
problemStatistics.setPlatformVersions(Splitters.by(":").noEmptyItem().split(querys.get(2)));
problemStatistics.setModules(Splitters.by(":").noEmptyItem().split(querys.get(3)));
problemStatistics.setLevels(Splitters.by(":").noEmptyItem().split(querys.get(4)));
}
}
problemStatistics.visitProblemReport(report);
return problemStatistics;
}
public class FieldsInfo {
private List<String> m_platVersions;
private List<String> m_appVersions;
private List<String> m_modules;
private List<String> m_levels;
public FieldsInfo setPlatVersions(List<String> platVersions) {
m_platVersions = platVersions;
return this;
}
public FieldsInfo setAppVersions(List<String> appVersions) {
m_appVersions = appVersions;
return this;
}
public FieldsInfo setModules(List<String> modules) {
m_modules = modules;
return this;
}
public FieldsInfo setLevels(List<String> levels) {
m_levels = levels;
return this;
}
public List<String> getAppVersions() {
return m_appVersions;
}
public List<String> getLevels() {
return m_levels;
}
public List<String> getModules() {
return m_modules;
}
public List<String> getPlatVersions() {
return m_platVersions;
}
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.yarn.server.resourcemanager.scheduler;
import static org.apache.hadoop.yarn.exceptions
.InvalidResourceRequestException.InvalidResourceType
.GREATER_THEN_MAX_ALLOCATION;
import static org.apache.hadoop.yarn.exceptions
.InvalidResourceRequestException.InvalidResourceType.LESS_THAN_ZERO;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import static org.mockito.Matchers.any;
import static org.mockito.Matchers.anyBoolean;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.net.InetSocketAddress;
import java.security.PrivilegedAction;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
import java.util.Set;
import com.google.common.collect.ImmutableMap;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.security.Credentials;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.security.token.Token;
import org.apache.hadoop.security.token.TokenIdentifier;
import org.apache.hadoop.yarn.LocalConfigurationProvider;
import org.apache.hadoop.yarn.api.ApplicationMasterProtocol;
import org.apache.hadoop.yarn.api.protocolrecords.AllocateRequest;
import org.apache.hadoop.yarn.api.protocolrecords.RegisterApplicationMasterRequest;
import org.apache.hadoop.yarn.api.records.ApplicationAccessType;
import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
import org.apache.hadoop.yarn.api.records.ApplicationId;
import org.apache.hadoop.yarn.api.records.ContainerExitStatus;
import org.apache.hadoop.yarn.api.records.ContainerId;
import org.apache.hadoop.yarn.api.records.ContainerStatus;
import org.apache.hadoop.yarn.api.records.NodeLabel;
import org.apache.hadoop.yarn.api.records.Priority;
import org.apache.hadoop.yarn.api.records.QueueInfo;
import org.apache.hadoop.yarn.api.records.Resource;
import org.apache.hadoop.yarn.api.records.ResourceBlacklistRequest;
import org.apache.hadoop.yarn.api.records.ResourceRequest;
import org.apache.hadoop.yarn.api.records.impl.pb.ResourceRequestPBImpl;
import org.apache.hadoop.yarn.conf.YarnConfiguration;
import org.apache.hadoop.yarn.event.EventHandler;
import org.apache.hadoop.yarn.exceptions.InvalidLabelResourceRequestException;
import org.apache.hadoop.yarn.exceptions.InvalidResourceBlacklistRequestException;
import org.apache.hadoop.yarn.exceptions.InvalidResourceRequestException;
import org.apache.hadoop.yarn.exceptions.InvalidResourceRequestException
.InvalidResourceType;
import org.apache.hadoop.yarn.exceptions.YarnException;
import org.apache.hadoop.yarn.ipc.YarnRPC;
import org.apache.hadoop.yarn.nodelabels.CommonNodeLabelsManager;
import org.apache.hadoop.yarn.resourcetypes.ResourceTypesTestHelper;
import org.apache.hadoop.yarn.server.resourcemanager.MockNM;
import org.apache.hadoop.yarn.server.resourcemanager.RMContext;
import org.apache.hadoop.yarn.server.resourcemanager.TestAMAuthorization.MockRMWithAMS;
import org.apache.hadoop.yarn.server.resourcemanager.TestAMAuthorization.MyContainerManager;
import org.apache.hadoop.yarn.server.resourcemanager.nodelabels.NullRMNodeLabelsManager;
import org.apache.hadoop.yarn.server.resourcemanager.nodelabels.RMNodeLabelsManager;
import org.apache.hadoop.yarn.server.resourcemanager.rmapp.RMApp;
import org.apache.hadoop.yarn.server.resourcemanager.rmapp.RMAppState;
import org.apache.hadoop.yarn.server.resourcemanager.rmapp.attempt.RMAppAttempt;
import org.apache.hadoop.yarn.server.resourcemanager.rmapp.attempt.RMAppAttemptState;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.event.AppAddedSchedulerEvent;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.event.AppRemovedSchedulerEvent;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.event.SchedulerEvent;
import org.apache.hadoop.yarn.server.utils.BuilderUtils;
import org.apache.hadoop.yarn.util.Records;
import org.apache.hadoop.yarn.util.resource.DefaultResourceCalculator;
import org.apache.hadoop.yarn.util.resource.DominantResourceCalculator;
import org.apache.hadoop.yarn.util.resource.ResourceCalculator;
import org.apache.hadoop.yarn.util.resource.ResourceUtils;
import org.apache.hadoop.yarn.util.resource.Resources;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Sets;
import org.junit.rules.ExpectedException;
import org.mockito.Mockito;
public class TestSchedulerUtils {
private static final Log LOG = LogFactory.getLog(TestSchedulerUtils.class);
private static Resource configuredMaxAllocation;
private static class CustomResourceTypesConfigurationProvider
extends LocalConfigurationProvider {
@Override
public InputStream getConfigurationInputStream(Configuration bootstrapConf,
String name) throws YarnException, IOException {
if (YarnConfiguration.RESOURCE_TYPES_CONFIGURATION_FILE.equals(name)) {
return new ByteArrayInputStream(
("<configuration>\n" +
" <property>\n" +
" <name>yarn.resource-types</name>\n" +
" <value>custom-resource-1," +
"custom-resource-2,custom-resource-3</value>\n" +
" </property>\n" +
" <property>\n" +
" <name>yarn.resource-types" +
".custom-resource-1.units</name>\n" +
" <value>G</value>\n" +
" </property>\n" +
" <property>\n" +
" <name>yarn.resource-types" +
".custom-resource-2.units</name>\n" +
" <value>G</value>\n" +
" </property>\n" +
"</configuration>\n").getBytes());
} else {
return super.getConfigurationInputStream(bootstrapConf, name);
}
}
}
private RMContext rmContext = getMockRMContext();
private static YarnConfiguration conf = new YarnConfiguration();
@Rule
public ExpectedException exception = ExpectedException.none();
private void initResourceTypes() {
Configuration yarnConf = new Configuration();
yarnConf.set(YarnConfiguration.RM_CONFIGURATION_PROVIDER_CLASS,
CustomResourceTypesConfigurationProvider.class.getName());
ResourceUtils.resetResourceTypes(yarnConf);
}
@Before
public void setUp() {
initResourceTypes();
//this needs to be initialized after initResourceTypes is called
configuredMaxAllocation = Resource.newInstance(8192, 4,
ImmutableMap.<String,
Long>builder()
.put("custom-resource-1", Long.MAX_VALUE)
.put("custom-resource-2", Long.MAX_VALUE)
.put("custom-resource-3", Long.MAX_VALUE)
.build());
}
@Test(timeout = 30000)
public void testNormalizeRequest() {
ResourceCalculator resourceCalculator = new DefaultResourceCalculator();
final int minMemory = 1024;
final int maxMemory = 8192;
Resource minResource = Resources.createResource(minMemory, 0);
Resource maxResource = Resources.createResource(maxMemory, 0);
ResourceRequest ask = new ResourceRequestPBImpl();
// case negative memory
ask.setCapability(Resources.createResource(-1024));
SchedulerUtils.normalizeRequest(ask, resourceCalculator, minResource,
maxResource);
assertEquals(minMemory, ask.getCapability().getMemorySize());
// case zero memory
ask.setCapability(Resources.createResource(0));
SchedulerUtils.normalizeRequest(ask, resourceCalculator, minResource,
maxResource);
assertEquals(minMemory, ask.getCapability().getMemorySize());
// case memory is a multiple of minMemory
ask.setCapability(Resources.createResource(2 * minMemory));
SchedulerUtils.normalizeRequest(ask, resourceCalculator, minResource,
maxResource);
assertEquals(2 * minMemory, ask.getCapability().getMemorySize());
// case memory is not a multiple of minMemory
ask.setCapability(Resources.createResource(minMemory + 10));
SchedulerUtils.normalizeRequest(ask, resourceCalculator, minResource,
maxResource);
assertEquals(2 * minMemory, ask.getCapability().getMemorySize());
// case memory is equal to max allowed
ask.setCapability(Resources.createResource(maxMemory));
SchedulerUtils.normalizeRequest(ask, resourceCalculator, minResource,
maxResource);
assertEquals(maxMemory, ask.getCapability().getMemorySize());
// case memory is just less than max
ask.setCapability(Resources.createResource(maxMemory - 10));
SchedulerUtils.normalizeRequest(ask, resourceCalculator, minResource,
maxResource);
assertEquals(maxMemory, ask.getCapability().getMemorySize());
// max is not a multiple of min
maxResource = Resources.createResource(maxMemory - 10, 0);
ask.setCapability(Resources.createResource(maxMemory - 100));
// multiple of minMemory > maxMemory, then reduce to maxMemory
SchedulerUtils.normalizeRequest(ask, resourceCalculator, minResource,
maxResource);
assertEquals(maxResource.getMemorySize(),
ask.getCapability().getMemorySize());
// ask is more than max
maxResource = Resources.createResource(maxMemory, 0);
ask.setCapability(Resources.createResource(maxMemory + 100));
SchedulerUtils.normalizeRequest(ask, resourceCalculator, minResource,
maxResource);
assertEquals(maxResource.getMemorySize(),
ask.getCapability().getMemorySize());
}
@Test(timeout = 30000)
public void testNormalizeRequestWithDominantResourceCalculator() {
ResourceCalculator resourceCalculator = new DominantResourceCalculator();
Resource minResource = Resources.createResource(1024, 1);
Resource maxResource = Resources.createResource(10240, 10);
Resource clusterResource = Resources.createResource(10 * 1024, 10);
ResourceRequest ask = new ResourceRequestPBImpl();
// case negative memory/vcores
ask.setCapability(Resources.createResource(-1024, -1));
SchedulerUtils.normalizeRequest(
ask, resourceCalculator, minResource, maxResource);
assertEquals(minResource, ask.getCapability());
// case zero memory/vcores
ask.setCapability(Resources.createResource(0, 0));
SchedulerUtils.normalizeRequest(
ask, resourceCalculator, minResource, maxResource);
assertEquals(minResource, ask.getCapability());
assertEquals(1, ask.getCapability().getVirtualCores());
assertEquals(1024, ask.getCapability().getMemorySize());
// case non-zero memory & zero cores
ask.setCapability(Resources.createResource(1536, 0));
SchedulerUtils.normalizeRequest(
ask, resourceCalculator, minResource, maxResource);
assertEquals(Resources.createResource(2048, 1), ask.getCapability());
assertEquals(1, ask.getCapability().getVirtualCores());
assertEquals(2048, ask.getCapability().getMemorySize());
}
@Test(timeout = 30000)
public void testValidateResourceRequestWithErrorLabelsPermission()
throws IOException {
// mock queue and scheduler
ResourceScheduler scheduler = mock(ResourceScheduler.class);
Set<String> queueAccessibleNodeLabels = Sets.newHashSet();
QueueInfo queueInfo = mock(QueueInfo.class);
when(queueInfo.getQueueName()).thenReturn("queue");
when(queueInfo.getAccessibleNodeLabels())
.thenReturn(queueAccessibleNodeLabels);
when(scheduler.getQueueInfo(any(String.class), anyBoolean(), anyBoolean()))
.thenReturn(queueInfo);
when(rmContext.getScheduler()).thenReturn(scheduler);
Resource maxResource = Resources.createResource(
YarnConfiguration.DEFAULT_RM_SCHEDULER_MAXIMUM_ALLOCATION_MB,
YarnConfiguration.DEFAULT_RM_SCHEDULER_MAXIMUM_ALLOCATION_VCORES);
// queue has labels, success cases
try {
// set queue accessible node labesl to [x, y]
queueAccessibleNodeLabels.clear();
queueAccessibleNodeLabels.addAll(Arrays.asList("x", "y"));
rmContext.getNodeLabelManager().addToCluserNodeLabels(
ImmutableSet.of(NodeLabel.newInstance("x"),
NodeLabel.newInstance("y")));
Resource resource = Resources.createResource(
0,
YarnConfiguration.DEFAULT_RM_SCHEDULER_MINIMUM_ALLOCATION_VCORES);
ResourceRequest resReq = BuilderUtils.newResourceRequest(
mock(Priority.class), ResourceRequest.ANY, resource, 1);
resReq.setNodeLabelExpression("x");
normalizeAndvalidateRequest(resReq, "queue",
scheduler, rmContext, maxResource);
resReq.setNodeLabelExpression("y");
normalizeAndvalidateRequest(resReq, "queue",
scheduler, rmContext, maxResource);
resReq.setNodeLabelExpression("");
normalizeAndvalidateRequest(resReq, "queue",
scheduler, rmContext, maxResource);
resReq.setNodeLabelExpression(" ");
normalizeAndvalidateRequest(resReq, "queue",
scheduler, rmContext, maxResource);
} catch (InvalidResourceRequestException e) {
e.printStackTrace();
fail("Should be valid when request labels is a subset of queue labels");
} finally {
rmContext.getNodeLabelManager().removeFromClusterNodeLabels(
Arrays.asList("x", "y"));
}
// same as above, but cluster node labels don't contains label being
// requested. should fail
try {
// set queue accessible node labesl to [x, y]
queueAccessibleNodeLabels.clear();
queueAccessibleNodeLabels.addAll(Arrays.asList("x", "y"));
Resource resource = Resources.createResource(
0,
YarnConfiguration.DEFAULT_RM_SCHEDULER_MINIMUM_ALLOCATION_VCORES);
ResourceRequest resReq = BuilderUtils.newResourceRequest(
mock(Priority.class), ResourceRequest.ANY, resource, 1);
resReq.setNodeLabelExpression("x");
normalizeAndvalidateRequest(resReq, "queue",
scheduler, rmContext, maxResource);
fail("Should fail");
} catch (InvalidResourceRequestException e) {
}
// queue has labels, failed cases (when ask a label not included by queue)
try {
// set queue accessible node labesl to [x, y]
queueAccessibleNodeLabels.clear();
queueAccessibleNodeLabels.addAll(Arrays.asList("x", "y"));
rmContext.getNodeLabelManager().addToCluserNodeLabels(
ImmutableSet.of(NodeLabel.newInstance("x"),
NodeLabel.newInstance("y")));
Resource resource = Resources.createResource(
0,
YarnConfiguration.DEFAULT_RM_SCHEDULER_MINIMUM_ALLOCATION_VCORES);
ResourceRequest resReq = BuilderUtils.newResourceRequest(
mock(Priority.class), ResourceRequest.ANY, resource, 1);
resReq.setNodeLabelExpression("z");
normalizeAndvalidateRequest(resReq, "queue",
scheduler, rmContext, maxResource);
fail("Should fail");
} catch (InvalidResourceRequestException e) {
} finally {
rmContext.getNodeLabelManager().removeFromClusterNodeLabels(
Arrays.asList("x", "y"));
}
// we don't allow specify more than two node labels in a single expression
// now
try {
// set queue accessible node labesl to [x, y]
queueAccessibleNodeLabels.clear();
queueAccessibleNodeLabels.addAll(Arrays.asList("x", "y"));
rmContext.getNodeLabelManager().addToCluserNodeLabels(
ImmutableSet.of(NodeLabel.newInstance("x"),
NodeLabel.newInstance("y")));
Resource resource = Resources.createResource(
0,
YarnConfiguration.DEFAULT_RM_SCHEDULER_MINIMUM_ALLOCATION_VCORES);
ResourceRequest resReq = BuilderUtils.newResourceRequest(
mock(Priority.class), ResourceRequest.ANY, resource, 1);
resReq.setNodeLabelExpression("x && y");
normalizeAndvalidateRequest(resReq, "queue",
scheduler, rmContext, maxResource);
fail("Should fail");
} catch (InvalidResourceRequestException e) {
} finally {
rmContext.getNodeLabelManager().removeFromClusterNodeLabels(
Arrays.asList("x", "y"));
}
// queue doesn't have label, succeed (when request no label)
queueAccessibleNodeLabels.clear();
try {
// set queue accessible node labels to empty
queueAccessibleNodeLabels.clear();
Resource resource = Resources.createResource(
0,
YarnConfiguration.DEFAULT_RM_SCHEDULER_MINIMUM_ALLOCATION_VCORES);
ResourceRequest resReq = BuilderUtils.newResourceRequest(
mock(Priority.class), ResourceRequest.ANY, resource, 1);
normalizeAndvalidateRequest(resReq, "queue",
scheduler, rmContext, maxResource);
resReq.setNodeLabelExpression("");
normalizeAndvalidateRequest(resReq, "queue",
scheduler, rmContext, maxResource);
resReq.setNodeLabelExpression(" ");
normalizeAndvalidateRequest(resReq, "queue",
scheduler, rmContext, maxResource);
} catch (InvalidResourceRequestException e) {
e.printStackTrace();
fail("Should be valid when request labels is empty");
}
boolean invalidlabelexception = false;
// queue doesn't have label, failed (when request any label)
try {
// set queue accessible node labels to empty
queueAccessibleNodeLabels.clear();
rmContext.getNodeLabelManager().addToCluserNodeLabels(
ImmutableSet.of(NodeLabel.newInstance("x")));
Resource resource = Resources.createResource(
0,
YarnConfiguration.DEFAULT_RM_SCHEDULER_MINIMUM_ALLOCATION_VCORES);
ResourceRequest resReq = BuilderUtils.newResourceRequest(
mock(Priority.class), ResourceRequest.ANY, resource, 1);
resReq.setNodeLabelExpression("x");
normalizeAndvalidateRequest(resReq, "queue",
scheduler, rmContext, maxResource);
fail("Should fail");
} catch (InvalidLabelResourceRequestException e) {
invalidlabelexception = true;
} catch (InvalidResourceRequestException e) {
} finally {
rmContext.getNodeLabelManager().removeFromClusterNodeLabels(
Arrays.asList("x"));
}
Assert.assertTrue("InvalidLabelResourceRequestException expected",
invalidlabelexception);
// queue is "*", always succeeded
try {
// set queue accessible node labels to empty
queueAccessibleNodeLabels.clear();
queueAccessibleNodeLabels.add(RMNodeLabelsManager.ANY);
rmContext.getNodeLabelManager().addToCluserNodeLabels(
ImmutableSet.of(NodeLabel.newInstance("x"),
NodeLabel.newInstance("y"), NodeLabel.newInstance("z")));
Resource resource = Resources.createResource(
0,
YarnConfiguration.DEFAULT_RM_SCHEDULER_MINIMUM_ALLOCATION_VCORES);
ResourceRequest resReq = BuilderUtils.newResourceRequest(
mock(Priority.class), ResourceRequest.ANY, resource, 1);
resReq.setNodeLabelExpression("x");
normalizeAndvalidateRequest(resReq, "queue",
scheduler, rmContext, maxResource);
resReq.setNodeLabelExpression("y");
normalizeAndvalidateRequest(resReq, "queue",
scheduler, rmContext, maxResource);
resReq.setNodeLabelExpression("z");
normalizeAndvalidateRequest(resReq, "queue",
scheduler, rmContext, maxResource);
} catch (InvalidResourceRequestException e) {
e.printStackTrace();
fail("Should be valid when queue can access any labels");
} finally {
rmContext.getNodeLabelManager().removeFromClusterNodeLabels(
Arrays.asList("x", "y", "z"));
}
// same as above, but cluster node labels don't contains label, should fail
try {
// set queue accessible node labels to empty
queueAccessibleNodeLabels.clear();
queueAccessibleNodeLabels.add(RMNodeLabelsManager.ANY);
Resource resource = Resources.createResource(
0,
YarnConfiguration.DEFAULT_RM_SCHEDULER_MINIMUM_ALLOCATION_VCORES);
ResourceRequest resReq = BuilderUtils.newResourceRequest(
mock(Priority.class), ResourceRequest.ANY, resource, 1);
resReq.setNodeLabelExpression("x");
normalizeAndvalidateRequest(resReq, "queue",
scheduler, rmContext, maxResource);
fail("Should fail");
} catch (InvalidResourceRequestException e) {
}
// we don't allow resource name other than ANY and specify label
try {
// set queue accessible node labesl to [x, y]
queueAccessibleNodeLabels.clear();
queueAccessibleNodeLabels.addAll(Arrays.asList("x", "y"));
rmContext.getNodeLabelManager().addToCluserNodeLabels(
ImmutableSet.of(NodeLabel.newInstance("x"),
NodeLabel.newInstance("y")));
Resource resource = Resources.createResource(
0,
YarnConfiguration.DEFAULT_RM_SCHEDULER_MINIMUM_ALLOCATION_VCORES);
ResourceRequest resReq = BuilderUtils.newResourceRequest(
mock(Priority.class), "rack", resource, 1);
resReq.setNodeLabelExpression("x");
normalizeAndvalidateRequest(resReq, "queue",
scheduler, rmContext, maxResource);
fail("Should fail");
} catch (InvalidResourceRequestException e) {
} finally {
rmContext.getNodeLabelManager().removeFromClusterNodeLabels(
Arrays.asList("x", "y"));
}
// we don't allow resource name other than ANY and specify label even if
// queue has accessible label = *
try {
// set queue accessible node labesl to *
queueAccessibleNodeLabels.clear();
queueAccessibleNodeLabels.addAll(Arrays
.asList(CommonNodeLabelsManager.ANY));
rmContext.getNodeLabelManager().addToCluserNodeLabels(
ImmutableSet.of(NodeLabel.newInstance("x")));
Resource resource = Resources.createResource(
0,
YarnConfiguration.DEFAULT_RM_SCHEDULER_MINIMUM_ALLOCATION_VCORES);
ResourceRequest resReq = BuilderUtils.newResourceRequest(
mock(Priority.class), "rack", resource, 1);
resReq.setNodeLabelExpression("x");
normalizeAndvalidateRequest(resReq, "queue",
scheduler, rmContext, maxResource);
fail("Should fail");
} catch (InvalidResourceRequestException e) {
} finally {
rmContext.getNodeLabelManager().removeFromClusterNodeLabels(
Arrays.asList("x"));
}
try {
Resource resource = Resources.createResource(0,
YarnConfiguration.DEFAULT_RM_SCHEDULER_MINIMUM_ALLOCATION_VCORES);
ResourceRequest resReq1 = BuilderUtils
.newResourceRequest(mock(Priority.class), "*", resource, 1, "x");
normalizeAndvalidateRequest(resReq1, "queue",
scheduler, rmContext, maxResource);
fail("Should fail");
} catch (InvalidResourceRequestException e) {
assertEquals("Invalid label resource request, cluster do not contain , "
+ "label= x", e.getMessage());
}
try {
rmContext.getYarnConfiguration()
.set(YarnConfiguration.NODE_LABELS_ENABLED, "false");
Resource resource = Resources.createResource(0,
YarnConfiguration.DEFAULT_RM_SCHEDULER_MINIMUM_ALLOCATION_VCORES);
ResourceRequest resReq1 = BuilderUtils
.newResourceRequest(mock(Priority.class), "*", resource, 1, "x");
normalizeAndvalidateRequest(resReq1, "queue",
scheduler, rmContext, maxResource);
Assert.assertEquals(RMNodeLabelsManager.NO_LABEL,
resReq1.getNodeLabelExpression());
} catch (InvalidResourceRequestException e) {
assertEquals("Invalid resource request, node label not enabled but "
+ "request contains label expression", e.getMessage());
}
}
@Test(timeout = 30000)
public void testValidateResourceRequest() throws IOException {
ResourceScheduler mockScheduler = mock(ResourceScheduler.class);
QueueInfo queueInfo = mock(QueueInfo.class);
when(queueInfo.getQueueName()).thenReturn("queue");
Resource maxResource =
Resources.createResource(
YarnConfiguration.DEFAULT_RM_SCHEDULER_MAXIMUM_ALLOCATION_MB,
YarnConfiguration.DEFAULT_RM_SCHEDULER_MAXIMUM_ALLOCATION_VCORES);
when(rmContext.getScheduler()).thenReturn(mockScheduler);
when(mockScheduler.getQueueInfo(Mockito.anyString(), Mockito.anyBoolean(),
Mockito.anyBoolean())).thenReturn(queueInfo);
// zero memory
try {
Resource resource =
Resources.createResource(0,
YarnConfiguration.DEFAULT_RM_SCHEDULER_MINIMUM_ALLOCATION_VCORES);
ResourceRequest resReq =
BuilderUtils.newResourceRequest(mock(Priority.class),
ResourceRequest.ANY, resource, 1);
normalizeAndvalidateRequest(resReq, null,
mockScheduler, rmContext, maxResource);
} catch (InvalidResourceRequestException e) {
fail("Zero memory should be accepted");
}
// zero vcores
try {
Resource resource =
Resources.createResource(
YarnConfiguration.DEFAULT_RM_SCHEDULER_MINIMUM_ALLOCATION_MB, 0);
ResourceRequest resReq =
BuilderUtils.newResourceRequest(mock(Priority.class),
ResourceRequest.ANY, resource, 1);
normalizeAndvalidateRequest(resReq, null,
mockScheduler, rmContext, maxResource);
} catch (InvalidResourceRequestException e) {
fail("Zero vcores should be accepted");
}
// max memory
try {
Resource resource =
Resources.createResource(
YarnConfiguration.DEFAULT_RM_SCHEDULER_MAXIMUM_ALLOCATION_MB,
YarnConfiguration.DEFAULT_RM_SCHEDULER_MINIMUM_ALLOCATION_VCORES);
ResourceRequest resReq =
BuilderUtils.newResourceRequest(mock(Priority.class),
ResourceRequest.ANY, resource, 1);
normalizeAndvalidateRequest(resReq, null,
mockScheduler, rmContext, maxResource);
} catch (InvalidResourceRequestException e) {
fail("Max memory should be accepted");
}
// max vcores
try {
Resource resource =
Resources.createResource(
YarnConfiguration.DEFAULT_RM_SCHEDULER_MINIMUM_ALLOCATION_MB,
YarnConfiguration.DEFAULT_RM_SCHEDULER_MAXIMUM_ALLOCATION_VCORES);
ResourceRequest resReq =
BuilderUtils.newResourceRequest(mock(Priority.class),
ResourceRequest.ANY, resource, 1);
normalizeAndvalidateRequest(resReq, null,
mockScheduler, rmContext, maxResource);
} catch (InvalidResourceRequestException e) {
fail("Max vcores should not be accepted");
}
// negative memory
try {
Resource resource =
Resources.createResource(-1,
YarnConfiguration.DEFAULT_RM_SCHEDULER_MINIMUM_ALLOCATION_VCORES);
ResourceRequest resReq =
BuilderUtils.newResourceRequest(mock(Priority.class),
ResourceRequest.ANY, resource, 1);
normalizeAndvalidateRequest(resReq, null,
mockScheduler, rmContext, maxResource);
fail("Negative memory should not be accepted");
} catch (InvalidResourceRequestException e) {
assertEquals(LESS_THAN_ZERO, e.getInvalidResourceType());
}
// negative vcores
try {
Resource resource =
Resources.createResource(
YarnConfiguration.DEFAULT_RM_SCHEDULER_MINIMUM_ALLOCATION_MB, -1);
ResourceRequest resReq =
BuilderUtils.newResourceRequest(mock(Priority.class),
ResourceRequest.ANY, resource, 1);
normalizeAndvalidateRequest(resReq, null,
mockScheduler, rmContext, maxResource);
fail("Negative vcores should not be accepted");
} catch (InvalidResourceRequestException e) {
assertEquals(LESS_THAN_ZERO, e.getInvalidResourceType());
}
// more than max memory
try {
Resource resource =
Resources.createResource(
YarnConfiguration.DEFAULT_RM_SCHEDULER_MAXIMUM_ALLOCATION_MB + 1,
YarnConfiguration.DEFAULT_RM_SCHEDULER_MINIMUM_ALLOCATION_VCORES);
ResourceRequest resReq =
BuilderUtils.newResourceRequest(mock(Priority.class),
ResourceRequest.ANY, resource, 1);
normalizeAndvalidateRequest(resReq, null,
mockScheduler, rmContext, maxResource);
fail("More than max memory should not be accepted");
} catch (InvalidResourceRequestException e) {
assertEquals(GREATER_THEN_MAX_ALLOCATION, e.getInvalidResourceType());
}
// more than max vcores
try {
Resource resource = Resources.createResource(
YarnConfiguration.DEFAULT_RM_SCHEDULER_MINIMUM_ALLOCATION_MB,
YarnConfiguration.DEFAULT_RM_SCHEDULER_MAXIMUM_ALLOCATION_VCORES + 1);
ResourceRequest resReq =
BuilderUtils.newResourceRequest(mock(Priority.class),
ResourceRequest.ANY, resource, 1);
normalizeAndvalidateRequest(resReq, null,
mockScheduler, rmContext, maxResource);
fail("More than max vcores should not be accepted");
} catch (InvalidResourceRequestException e) {
assertEquals(GREATER_THEN_MAX_ALLOCATION, e.getInvalidResourceType());
}
}
@Test
public void testValidateResourceBlacklistRequest() throws Exception {
MyContainerManager containerManager = new MyContainerManager();
final MockRMWithAMS rm =
new MockRMWithAMS(new YarnConfiguration(), containerManager);
rm.start();
MockNM nm1 = rm.registerNode("localhost:1234", 5120);
Map<ApplicationAccessType, String> acls =
new HashMap<ApplicationAccessType, String>(2);
acls.put(ApplicationAccessType.VIEW_APP, "*");
RMApp app = rm.submitApp(1024, "appname", "appuser", acls);
nm1.nodeHeartbeat(true);
RMAppAttempt attempt = app.getCurrentAppAttempt();
ApplicationAttemptId applicationAttemptId = attempt.getAppAttemptId();
waitForLaunchedState(attempt);
// Create a client to the RM.
final Configuration yarnConf = rm.getConfig();
final YarnRPC rpc = YarnRPC.create(yarnConf);
UserGroupInformation currentUser =
UserGroupInformation.createRemoteUser(applicationAttemptId.toString());
Credentials credentials = containerManager.getContainerCredentials();
final InetSocketAddress rmBindAddress =
rm.getApplicationMasterService().getBindAddress();
Token<? extends TokenIdentifier> amRMToken =
MockRMWithAMS.setupAndReturnAMRMToken(rmBindAddress,
credentials.getAllTokens());
currentUser.addToken(amRMToken);
ApplicationMasterProtocol client =
currentUser.doAs(new PrivilegedAction<ApplicationMasterProtocol>() {
@Override
public ApplicationMasterProtocol run() {
return (ApplicationMasterProtocol) rpc.getProxy(
ApplicationMasterProtocol.class, rmBindAddress, yarnConf);
}
});
RegisterApplicationMasterRequest request = Records
.newRecord(RegisterApplicationMasterRequest.class);
client.registerApplicationMaster(request);
ResourceBlacklistRequest blacklistRequest =
ResourceBlacklistRequest.newInstance(
Collections.singletonList(ResourceRequest.ANY), null);
AllocateRequest allocateRequest =
AllocateRequest.newInstance(0, 0.0f, null, null, blacklistRequest);
boolean error = false;
try {
client.allocate(allocateRequest);
} catch (InvalidResourceBlacklistRequestException e) {
error = true;
}
rm.stop();
Assert.assertTrue(
"Didn't not catch InvalidResourceBlacklistRequestException", error);
}
private void waitForLaunchedState(RMAppAttempt attempt)
throws InterruptedException {
int waitCount = 0;
while (attempt.getAppAttemptState() != RMAppAttemptState.LAUNCHED
&& waitCount++ < 20) {
LOG.info("Waiting for AppAttempt to reach LAUNCHED state. "
+ "Current state is " + attempt.getAppAttemptState());
Thread.sleep(1000);
}
Assert.assertEquals(attempt.getAppAttemptState(),
RMAppAttemptState.LAUNCHED);
}
@Test
public void testComparePriorities() {
Priority high = Priority.newInstance(1);
Priority low = Priority.newInstance(2);
assertTrue(high.compareTo(low) > 0);
}
@Test
public void testCreateAbnormalContainerStatus() {
ContainerStatus cd = SchedulerUtils.createAbnormalContainerStatus(
ContainerId.newContainerId(ApplicationAttemptId.newInstance(
ApplicationId.newInstance(System.currentTimeMillis(), 1), 1), 1), "x");
Assert.assertEquals(ContainerExitStatus.ABORTED, cd.getExitStatus());
}
@Test
public void testCreatePreemptedContainerStatus() {
ContainerStatus cd = SchedulerUtils.createPreemptedContainerStatus(
ContainerId.newContainerId(ApplicationAttemptId.newInstance(
ApplicationId.newInstance(System.currentTimeMillis(), 1), 1), 1), "x");
Assert.assertEquals(ContainerExitStatus.PREEMPTED, cd.getExitStatus());
}
@Test(timeout = 30000)
public void testNormalizeNodeLabelExpression()
throws IOException {
// mock queue and scheduler
ResourceScheduler scheduler = mock(ResourceScheduler.class);
Set<String> queueAccessibleNodeLabels = Sets.newHashSet();
QueueInfo queueInfo = mock(QueueInfo.class);
when(queueInfo.getQueueName()).thenReturn("queue");
when(queueInfo.getAccessibleNodeLabels()).thenReturn(queueAccessibleNodeLabels);
when(queueInfo.getDefaultNodeLabelExpression()).thenReturn(" x ");
when(scheduler.getQueueInfo(any(String.class), anyBoolean(), anyBoolean()))
.thenReturn(queueInfo);
Resource maxResource = Resources.createResource(
YarnConfiguration.DEFAULT_RM_SCHEDULER_MAXIMUM_ALLOCATION_MB,
YarnConfiguration.DEFAULT_RM_SCHEDULER_MAXIMUM_ALLOCATION_VCORES);
when(rmContext.getScheduler()).thenReturn(scheduler);
// queue has labels, success cases
try {
// set queue accessible node labels to [x, y]
queueAccessibleNodeLabels.clear();
queueAccessibleNodeLabels.addAll(Arrays.asList("x", "y"));
rmContext.getNodeLabelManager().addToCluserNodeLabels(
ImmutableSet.of(NodeLabel.newInstance("x"),
NodeLabel.newInstance("y")));
Resource resource = Resources.createResource(
0,
YarnConfiguration.DEFAULT_RM_SCHEDULER_MINIMUM_ALLOCATION_VCORES);
ResourceRequest resReq = BuilderUtils.newResourceRequest(
mock(Priority.class), ResourceRequest.ANY, resource, 1);
normalizeAndvalidateRequest(resReq, "queue",
scheduler, rmContext, maxResource);
Assert.assertEquals("x", resReq.getNodeLabelExpression());
resReq.setNodeLabelExpression(" y ");
normalizeAndvalidateRequest(resReq, "queue",
scheduler, rmContext, maxResource);
Assert.assertEquals("y", resReq.getNodeLabelExpression());
} catch (InvalidResourceRequestException e) {
e.printStackTrace();
fail("Should be valid when request labels is a subset of queue labels");
} finally {
rmContext.getNodeLabelManager().removeFromClusterNodeLabels(
Arrays.asList("x", "y"));
}
}
@Test
public void testCustomResourceRequestedUnitIsSmallerThanAvailableUnit()
throws InvalidResourceRequestException {
Resource requestedResource =
ResourceTypesTestHelper.newResource(1, 1,
ImmutableMap.of("custom-resource-1", "11"));
Resource availableResource =
ResourceTypesTestHelper.newResource(1, 1,
ImmutableMap.of("custom-resource-1", "0G"));
exception.expect(InvalidResourceRequestException.class);
exception.expectMessage(InvalidResourceRequestExceptionMessageGenerator
.create().withRequestedResourceType("custom-resource-1")
.withRequestedResource(requestedResource)
.withAvailableAllocation(availableResource)
.withMaxAllocation(configuredMaxAllocation)
.withInvalidResourceType(GREATER_THEN_MAX_ALLOCATION)
.build());
SchedulerUtils.checkResourceRequestAgainstAvailableResource(
requestedResource, availableResource);
}
@Test
public void testCustomResourceRequestedUnitIsSmallerThanAvailableUnit2() {
Resource requestedResource =
ResourceTypesTestHelper.newResource(1, 1,
ImmutableMap.of("custom-resource-1", "11"));
Resource availableResource =
ResourceTypesTestHelper.newResource(1, 1,
ImmutableMap.of("custom-resource-1", "1G"));
try {
SchedulerUtils.checkResourceRequestAgainstAvailableResource(
requestedResource, availableResource);
} catch (InvalidResourceRequestException e) {
fail(String.format(
"Resource request should be accepted. Requested: %s, available: %s",
requestedResource, availableResource));
}
}
@Test
public void testCustomResourceRequestedUnitIsGreaterThanAvailableUnit()
throws InvalidResourceRequestException {
Resource requestedResource =
ResourceTypesTestHelper.newResource(1, 1,
ImmutableMap.of("custom-resource-1", "1M"));
Resource availableResource = ResourceTypesTestHelper.newResource(1, 1,
ImmutableMap.<String, String>builder().put("custom-resource-1",
"120k")
.build());
exception.expect(InvalidResourceRequestException.class);
exception.expectMessage(InvalidResourceRequestExceptionMessageGenerator
.create().withRequestedResourceType("custom-resource-1")
.withRequestedResource(requestedResource)
.withAvailableAllocation(availableResource)
.withMaxAllocation(configuredMaxAllocation)
.withInvalidResourceType(GREATER_THEN_MAX_ALLOCATION)
.build());
SchedulerUtils.checkResourceRequestAgainstAvailableResource(
requestedResource, availableResource);
}
@Test
public void testCustomResourceRequestedUnitIsGreaterThanAvailableUnit2() {
Resource requestedResource = ResourceTypesTestHelper.newResource(1, 1,
ImmutableMap.<String, String>builder().put("custom-resource-1", "11M")
.build());
Resource availableResource =
ResourceTypesTestHelper.newResource(1, 1,
ImmutableMap.of("custom-resource-1", "1G"));
try {
SchedulerUtils.checkResourceRequestAgainstAvailableResource(
requestedResource, availableResource);
} catch (InvalidResourceRequestException e) {
fail(String.format(
"Resource request should be accepted. Requested: %s, available: %s",
requestedResource, availableResource));
}
}
@Test
public void testCustomResourceRequestedUnitIsSameAsAvailableUnit() {
Resource requestedResource = ResourceTypesTestHelper.newResource(1, 1,
ImmutableMap.of("custom-resource-1", "11M"));
Resource availableResource = ResourceTypesTestHelper.newResource(1, 1,
ImmutableMap.of("custom-resource-1", "100M"));
try {
SchedulerUtils.checkResourceRequestAgainstAvailableResource(
requestedResource, availableResource);
} catch (InvalidResourceRequestException e) {
fail(String.format(
"Resource request should be accepted. Requested: %s, available: %s",
requestedResource, availableResource));
}
}
@Test
public void testCustomResourceRequestedUnitIsSameAsAvailableUnit2()
throws InvalidResourceRequestException {
Resource requestedResource = ResourceTypesTestHelper.newResource(1, 1,
ImmutableMap.of("custom-resource-1", "110M"));
Resource availableResource = ResourceTypesTestHelper.newResource(1, 1,
ImmutableMap.of("custom-resource-1", "100M"));
exception.expect(InvalidResourceRequestException.class);
exception.expectMessage(InvalidResourceRequestExceptionMessageGenerator
.create().withRequestedResourceType("custom-resource-1")
.withRequestedResource(requestedResource)
.withAvailableAllocation(availableResource)
.withInvalidResourceType(GREATER_THEN_MAX_ALLOCATION)
.withMaxAllocation(configuredMaxAllocation)
.build());
SchedulerUtils.checkResourceRequestAgainstAvailableResource(
requestedResource, availableResource);
}
public static void waitSchedulerApplicationAttemptStopped(
AbstractYarnScheduler ys,
ApplicationAttemptId attemptId) throws InterruptedException {
SchedulerApplicationAttempt schedulerApp =
ys.getApplicationAttempt(attemptId);
if (null == schedulerApp) {
return;
}
// Wait at most 5 secs to make sure SchedulerApplicationAttempt stopped
int tick = 0;
while (tick < 100) {
if (schedulerApp.isStopped()) {
return;
}
tick++;
Thread.sleep(50);
}
// Only print, don't throw exception
System.err.println("Failed to wait scheduler application attempt stopped.");
}
public static SchedulerApplication<SchedulerApplicationAttempt>
verifyAppAddedAndRemovedFromScheduler(
Map<ApplicationId, SchedulerApplication<SchedulerApplicationAttempt>> applications,
EventHandler<SchedulerEvent> handler, String queueName) {
ApplicationId appId =
ApplicationId.newInstance(System.currentTimeMillis(), 1);
AppAddedSchedulerEvent appAddedEvent =
new AppAddedSchedulerEvent(appId, queueName, "user");
handler.handle(appAddedEvent);
SchedulerApplication<SchedulerApplicationAttempt> app =
applications.get(appId);
// verify application is added.
Assert.assertNotNull(app);
Assert.assertEquals("user", app.getUser());
AppRemovedSchedulerEvent appRemoveEvent =
new AppRemovedSchedulerEvent(appId, RMAppState.FINISHED);
handler.handle(appRemoveEvent);
Assert.assertNull(applications.get(appId));
return app;
}
private static RMContext getMockRMContext() {
RMContext rmContext = mock(RMContext.class);
RMNodeLabelsManager nlm = new NullRMNodeLabelsManager();
nlm.init(new Configuration(false));
when(rmContext.getYarnConfiguration()).thenReturn(conf);
rmContext.getYarnConfiguration().set(YarnConfiguration.NODE_LABELS_ENABLED,
"true");
when(rmContext.getNodeLabelManager()).thenReturn(nlm);
return rmContext;
}
private static void normalizeAndvalidateRequest(ResourceRequest resReq,
String queueName, YarnScheduler scheduler, RMContext rmContext,
Resource maxAllocation)
throws InvalidResourceRequestException {
SchedulerUtils.normalizeAndValidateRequest(resReq, maxAllocation, queueName,
scheduler, rmContext, null);
}
private static class InvalidResourceRequestExceptionMessageGenerator {
private StringBuilder sb;
private Resource requestedResource;
private Resource availableAllocation;
private Resource configuredMaxAllowedAllocation;
private String resourceType;
private InvalidResourceType invalidResourceType;
InvalidResourceRequestExceptionMessageGenerator(StringBuilder sb) {
this.sb = sb;
}
public static InvalidResourceRequestExceptionMessageGenerator create() {
return new InvalidResourceRequestExceptionMessageGenerator(
new StringBuilder());
}
InvalidResourceRequestExceptionMessageGenerator withRequestedResource(
Resource r) {
this.requestedResource = r;
return this;
}
InvalidResourceRequestExceptionMessageGenerator withRequestedResourceType(
String rt) {
this.resourceType = rt;
return this;
}
InvalidResourceRequestExceptionMessageGenerator withAvailableAllocation(
Resource r) {
this.availableAllocation = r;
return this;
}
InvalidResourceRequestExceptionMessageGenerator withMaxAllocation(
Resource r) {
this.configuredMaxAllowedAllocation = r;
return this;
}
InvalidResourceRequestExceptionMessageGenerator
withInvalidResourceType(InvalidResourceType invalidResourceType) {
this.invalidResourceType = invalidResourceType;
return this;
}
public String build() {
if (invalidResourceType == LESS_THAN_ZERO) {
return sb.append("Invalid resource request! " +
"Cannot allocate containers as " +
"requested resource is less than 0! ")
.append("Requested resource type=[")
.append(resourceType).append("]")
.append(", Requested resource=")
.append(requestedResource).toString();
} else if (invalidResourceType == GREATER_THEN_MAX_ALLOCATION) {
return sb.append("Invalid resource request! " +
"Cannot allocate containers as "
+ "requested resource is greater than " +
"maximum allowed allocation. ")
.append("Requested resource type=[").append(resourceType)
.append("], ")
.append("Requested resource=").append(requestedResource)
.append(", maximum allowed allocation=")
.append(availableAllocation)
.append(", please note that maximum allowed allocation is " +
"calculated by scheduler based on maximum resource " +
"of registered NodeManagers, which might be less " +
"than configured maximum allocation=")
.append(configuredMaxAllowedAllocation)
.toString();
}
throw new IllegalStateException("Wrong type of InvalidResourceType is " +
"detected!");
}
}
}
| |
/**
* This example gets the DPU associated with the stream given on the command
* line. It presents it in a nice ASCII table.
*/
package org.datasift.examples;
import java.io.BufferedInputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.text.DecimalFormat;
import java.text.NumberFormat;
import java.util.ArrayList;
import java.util.HashMap;
import org.datasift.Config;
import org.datasift.DPU;
import org.datasift.DPUItem;
import org.datasift.Definition;
import org.datasift.EAPIError;
import org.datasift.EAccessDenied;
import org.datasift.ECompileFailed;
import org.datasift.EInvalidData;
import org.datasift.User;
/**
* @author MediaSift
* @version 0.1
*/
public class DPUBreakdown {
/**
* @param args
*/
public static void main(String[] args) {
// if (args.length != 1) {
// System.out.println("Please specify the filename of a CSDL definition.");
// return;
// }
try {
// String csdl = DPUBreakdown.readFileAsString(args[0]);
String csdl = DPUBreakdown.readFileAsString("csdl.txt");
// Authenticate
System.out.println("Creating user...");
User user = new User(Config.username, Config.api_key);
// Create the definition
System.out.println("Creating definition...");
System.out.println(" " + csdl);
Definition def = user.createDefinition(csdl);
// Get the cost
DPU c = def.getDPUBreakdown();
HashMap<String, DPUItem> dpus = c.getDPU();
// Build an array of TableRows
ArrayList<TableRow> dputable = new ArrayList<TableRow>();
// Keep track of the maxlen of each column
int maxlen_Target = "Target".length();
int maxlen_TimesUsed = "Times used".length();
int maxlen_Complexity = "Complexity".length();
// Create a row for the total at the end
double totalDPU = c.getTotal();
TableRow totalRow = new TableRow("Total", 0, totalDPU);
if (totalRow.getTargetLength() > maxlen_Target) {
maxlen_Target = totalRow.getTargetLength();
}
if (totalRow.getComplexityLength() > maxlen_Complexity) {
maxlen_Complexity = totalRow.getComplexityLength();
}
// Create a row for each DPU item and its targets and add it to the
// row array
for (String key : dpus.keySet()) {
DPUItem ci = dpus.get(key);
TableRow r = new TableRow(key, ci.getCount(), ci.getDPU());
if (r.getTargetLength() > maxlen_Target) {
maxlen_Target = r.getTargetLength();
}
if (r.getTimesUsedLength() > maxlen_TimesUsed) {
maxlen_TimesUsed = r.getTimesUsedLength();
}
if (r.getComplexityLength() > maxlen_Complexity) {
maxlen_Complexity = r.getComplexityLength();
}
dputable.add(r);
if (ci.hasTargets()) {
HashMap<String, DPUItem> targets = ci.getTargets();
for (String target : targets.keySet()) {
DPUItem ci1 = targets.get(target);
TableRow r1 = new TableRow(" " + target,
ci1.getCount(), ci1.getDPU());
if (r1.getTargetLength() > maxlen_Target) {
maxlen_Target = r1.getTargetLength();
}
if (r1.getTimesUsedLength() > maxlen_TimesUsed) {
maxlen_TimesUsed = r1.getTimesUsedLength();
}
if (r1.getComplexityLength() > maxlen_Complexity) {
maxlen_Complexity = r1.getComplexityLength();
}
dputable.add(r1);
}
}
}
System.out.println();
// Top border
System.out.print("/-");
System.out.print(repeatString("-", maxlen_Target));
System.out.print("---");
System.out.print(repeatString("-", maxlen_TimesUsed));
System.out.print("---");
System.out.print(repeatString("-", maxlen_Complexity));
System.out.println("-\\");
// Header row
System.out.print("| ");
System.out.print(String.format("%1$-" + maxlen_Target + "s",
"Target"));
System.out.print(" | ");
System.out.print(String.format("%1$-" + maxlen_TimesUsed + "s",
"Times used"));
System.out.print(" | ");
System.out.print(String.format("%1$-" + maxlen_Complexity + "s",
"Complexity"));
System.out.println(" |");
// Header bottom border
System.out.print("|-");
System.out.print(repeatString("-", maxlen_Target));
System.out.print("-+-");
System.out.print(repeatString("-", maxlen_TimesUsed));
System.out.print("-+-");
System.out.print(repeatString("-", maxlen_Complexity));
System.out.println("-|");
// Now the rows
for (Object row : dputable.toArray()) {
System.out.print("| ");
System.out.print(((TableRow) row).getTarget(maxlen_Target));
System.out.print(" | ");
System.out.print(((TableRow) row)
.getTimesUsed(maxlen_TimesUsed));
System.out.print(" | ");
System.out.print(((TableRow) row)
.getComplexity(maxlen_Complexity));
System.out.println(" |");
}
// Total top border
System.out.print("|-");
System.out.print(repeatString("-", maxlen_Target));
System.out.print("-+-");
System.out.print(repeatString("-", maxlen_TimesUsed));
System.out.print("-+-");
System.out.print(repeatString("-", maxlen_Complexity));
System.out.println("-|");
// Total
System.out.print("| ");
System.out.print(String.format("%1$"
+ (maxlen_Target + 3 + maxlen_TimesUsed) + "s",
totalRow.getTarget(totalRow.getTargetLength())));
System.out.print(" = ");
System.out.print(totalRow.getComplexity(maxlen_Complexity));
System.out.println(" |");
// Bottom border
System.out.print("\\-");
System.out.print(repeatString("-", maxlen_Target));
System.out.print("---");
System.out.print(repeatString("-", maxlen_TimesUsed));
System.out.print("---");
System.out.print(repeatString("-", maxlen_Complexity));
System.out.println("-/");
System.out.println();
} catch (EInvalidData e) {
System.out.print("InvalidData: ");
System.out.println(e.getMessage());
} catch (ECompileFailed e) {
System.out.print("CompileFailed: ");
System.out.println(e.getMessage());
} catch (EAccessDenied e) {
System.out.print("AccessDenied: ");
System.out.println(e.getMessage());
} catch (EAPIError e) {
System.out.print("APIError: ");
System.out.println(e.getMessage());
e.printStackTrace();
} catch (IOException e) {
System.out.print("IOException: ");
System.out.println(e.getMessage());
}
}
/**
*
* @param filePath
* @return
* @throws java.io.IOException
*/
private static String readFileAsString(String filePath)
throws java.io.IOException {
byte[] buffer = new byte[(int) new File(filePath).length()];
BufferedInputStream f = null;
try {
f = new BufferedInputStream(new FileInputStream(filePath));
f.read(buffer);
} finally {
if (f != null)
try {
f.close();
} catch (IOException ignored) {
}
}
return new String(buffer);
}
private static String repeatString(String s, int reps) {
if (reps < 0) {
return "";
}
if (s == null) {
return null;
}
StringBuilder stringBuilder = new StringBuilder(s.length() * reps);
for (int i = 0; i < reps; i++) {
stringBuilder.append(s);
}
return stringBuilder.toString();
}
public static class TableRow {
private String _target = "";
private int _timesused = 0;
private double _complexity = 0;
private NumberFormat _f = null;
private NumberFormat _fc = null;
public TableRow(String target, int timesused, double complexity) {
_target = target;
_timesused = timesused;
_complexity = complexity;
_f = new DecimalFormat("#,###,###");
_fc = new DecimalFormat("#,###,###.##");
}
public int getTargetLength() {
return _target.length();
}
public String getTarget(int width) {
return String.format("%1$-" + width + "s", _target);
}
public int getTimesUsedLength() {
return _f.format(_timesused).length();
}
public String getTimesUsed(int width) {
return String.format("%1$" + width + "s", _f.format(_timesused));
}
public int getComplexityLength() {
return _fc.format(_complexity).length();
}
public String getComplexity(int width) {
return String.format("%1$" + width + "s", _fc.format(_complexity));
}
}
}
| |
/*
* Zed Attack Proxy (ZAP) and its related class files.
*
* ZAP is an HTTP/HTTPS proxy for assessing web application security.
*
* Copyright 2018 The ZAP Development Team
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.zaproxy.zap.extension.pscanrulesBeta;
import java.util.ArrayList;
import java.util.List;
import net.htmlparser.jericho.Element;
import net.htmlparser.jericho.HTMLElementName;
import net.htmlparser.jericho.Source;
import org.apache.commons.httpclient.URI;
import org.apache.commons.httpclient.URIException;
import org.apache.log4j.Logger;
import org.parosproxy.paros.Constant;
import org.parosproxy.paros.core.scanner.Alert;
import org.parosproxy.paros.core.scanner.Plugin;
import org.parosproxy.paros.core.scanner.Plugin.AlertThreshold;
import org.parosproxy.paros.model.Model;
import org.parosproxy.paros.network.HttpMessage;
import org.zaproxy.zap.extension.pscan.PassiveScanThread;
import org.zaproxy.zap.extension.pscan.PluginPassiveScanner;
import org.zaproxy.zap.model.Context;
public class LinkTargetScanner extends PluginPassiveScanner {
// TODO Replace "rules.domains.trusted" with RuleConfigParam.RULE_DOMAINS_TRUSTED once
// available.
public static final String TRUSTED_DOMAINS_PROPERTY = "rules.domains.trusted";
private static final String MESSAGE_PREFIX = "pscanbeta.linktarget.";
private static final String REL_ATTRIBUTE = "rel";
private static final String TARGET_ATTRIBUTE = "target";
private static final String _BLANK = "_blank";
private static final String NOOPENER = "noopener";
private static final String NOREFERRER = "noreferrer";
private String trustedConfig = "";
private List<String> trustedDomainRegexes = new ArrayList<String>();
private PassiveScanThread parent = null;
private Model model = null;
private static final Logger LOG = Logger.getLogger(PluginPassiveScanner.class);
@Override
public void setParent(PassiveScanThread parent) {
this.parent = parent;
}
@Override
public void scanHttpRequestSend(HttpMessage msg, int id) {}
@Override
public int getPluginId() {
return 10108;
}
private Model getModel() {
if (this.model == null) {
this.model = Model.getSingleton();
}
return this.model;
}
/*
* Just for use in the unit tests
*/
protected void setModel(Model model) {
this.model = model;
}
private boolean isLinkFromOtherDomain(String host, String link, List<Context> contextList) {
if (link == null
|| !link.startsWith("//")
&& (link.startsWith("/")
|| link.startsWith("./")
|| link.startsWith("../"))) {
return false;
}
boolean otherDomain = false;
try {
URI linkURI = new URI(link, true);
String linkURIStr = linkURI.toString();
String linkHost = linkURI.getHost();
if (linkHost != null && !linkHost.toLowerCase().equals(host.toLowerCase())) {
otherDomain = true;
}
if (otherDomain && !Plugin.AlertThreshold.LOW.equals(this.getAlertThreshold())) {
// Get a list of contexts that contain the original URL
for (Context context : contextList) {
if (context.isInContext(linkURIStr)) {
// The linkURI is in a context that the original URI is in
return false; // No need to loop further
}
}
}
} catch (URIException e) {
// Ignore
}
if (otherDomain) {
// check the trusted domains
for (String regex : this.trustedDomainRegexes) {
try {
if (link.matches(regex)) {
return false;
}
} catch (Exception e) {
LOG.warn("Invalid regex in rule " + TRUSTED_DOMAINS_PROPERTY + ": " + regex, e);
}
}
}
return otherDomain;
}
private void checkIgnoreList() {
String trustedConf = getConfig().getString(TRUSTED_DOMAINS_PROPERTY, "");
if (!trustedConf.equals(this.trustedConfig)) {
// Its changed
trustedDomainRegexes.clear();
this.trustedConfig = trustedConf;
for (String regex : trustedConf.split(",")) {
String regexTrim = regex.trim();
if (regexTrim.length() > 0) {
trustedDomainRegexes.add(regexTrim);
}
}
}
}
private boolean checkElement(Element link, HttpMessage msg, int id) {
// get target, check if its _blank
String target = link.getAttributeValue(TARGET_ATTRIBUTE);
if (target != null) {
if (AlertThreshold.HIGH.equals(this.getAlertThreshold())
&& !_BLANK.equalsIgnoreCase(target)) {
// Only report _blank link targets at a high threshold
return false;
}
// Not looking good,
String relAtt = link.getAttributeValue(REL_ATTRIBUTE);
if (relAtt != null) {
relAtt = relAtt.toLowerCase();
if (relAtt.contains(NOOPENER) && relAtt.contains(NOREFERRER)) {
// Its ok
return false;
}
}
// Its bad
Alert alert =
new Alert(getPluginId(), Alert.RISK_MEDIUM, Alert.CONFIDENCE_MEDIUM, getName());
alert.setDetail(
getDescription(),
msg.getRequestHeader().getURI().toString(),
"", // Param
"", // Attack
"", // Other info
getSolution(),
getReference(),
link.toString(), // Evidence
0, // CWE Id
0, // WASC Id
msg);
parent.raiseAlert(id, alert);
return true;
}
return false;
}
@Override
public void scanHttpResponseReceive(HttpMessage msg, int id, Source source) {
if (msg.getResponseBody().length() == 0 || !msg.getResponseHeader().isHtml()) {
// No point attempting to parse non-HTML content, it will not be correctly interpreted.
return;
}
// Check to see if the configs have changed
checkIgnoreList();
String host = msg.getRequestHeader().getHostName();
List<Context> contextList =
getModel()
.getSession()
.getContextsForUrl(msg.getRequestHeader().getURI().toString());
for (Element link : source.getAllElements(HTMLElementName.A)) {
if (this.isLinkFromOtherDomain(host, link.getAttributeValue("href"), contextList)) {
if (this.checkElement(link, msg, id)) {
return;
}
}
}
for (Element link : source.getAllElements(HTMLElementName.AREA)) {
if (this.isLinkFromOtherDomain(host, link.getAttributeValue("href"), contextList)) {
if (this.checkElement(link, msg, id)) {
return;
}
}
}
}
@Override
public String getName() {
return Constant.messages.getString(MESSAGE_PREFIX + "name");
}
private String getDescription() {
return Constant.messages.getString(MESSAGE_PREFIX + "desc");
}
private String getSolution() {
return Constant.messages.getString(MESSAGE_PREFIX + "soln");
}
private String getReference() {
return Constant.messages.getString(MESSAGE_PREFIX + "refs");
}
}
| |
// Copyright 2000-2017 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package org.jetbrains.jps.incremental;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.util.LowMemoryWatcher;
import com.intellij.openapi.util.Pair;
import com.intellij.openapi.util.UserDataHolder;
import com.intellij.openapi.util.UserDataHolderBase;
import com.intellij.openapi.util.io.FileUtil;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.util.Function;
import com.intellij.util.SmartList;
import com.intellij.util.concurrency.AppExecutorUtil;
import com.intellij.util.containers.ContainerUtil;
import com.intellij.util.containers.MultiMap;
import com.intellij.util.containers.Predicate;
import com.intellij.util.io.MappingFailedException;
import com.intellij.util.io.PersistentEnumerator;
import gnu.trove.THashMap;
import gnu.trove.THashSet;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import org.jetbrains.jps.ModuleChunk;
import org.jetbrains.jps.TimingLog;
import org.jetbrains.jps.api.CanceledStatus;
import org.jetbrains.jps.api.GlobalOptions;
import org.jetbrains.jps.builders.*;
import org.jetbrains.jps.builders.impl.BuildOutputConsumerImpl;
import org.jetbrains.jps.builders.impl.BuildTargetChunk;
import org.jetbrains.jps.builders.impl.DirtyFilesHolderBase;
import org.jetbrains.jps.builders.java.JavaBuilderExtension;
import org.jetbrains.jps.builders.java.JavaBuilderUtil;
import org.jetbrains.jps.builders.java.JavaSourceRootDescriptor;
import org.jetbrains.jps.builders.java.dependencyView.Callbacks;
import org.jetbrains.jps.builders.logging.ProjectBuilderLogger;
import org.jetbrains.jps.builders.storage.BuildDataCorruptedException;
import org.jetbrains.jps.builders.storage.SourceToOutputMapping;
import org.jetbrains.jps.cmdline.BuildRunner;
import org.jetbrains.jps.cmdline.ProjectDescriptor;
import org.jetbrains.jps.incremental.fs.BuildFSState;
import org.jetbrains.jps.incremental.fs.CompilationRound;
import org.jetbrains.jps.incremental.fs.FilesDelta;
import org.jetbrains.jps.incremental.messages.*;
import org.jetbrains.jps.incremental.storage.BuildTargetConfiguration;
import org.jetbrains.jps.incremental.storage.OneToManyPathsMapping;
import org.jetbrains.jps.incremental.storage.OutputToTargetRegistry;
import org.jetbrains.jps.incremental.storage.SourceToOutputMappingImpl;
import org.jetbrains.jps.indices.ModuleExcludeIndex;
import org.jetbrains.jps.javac.ExternalJavacManager;
import org.jetbrains.jps.javac.JavacMain;
import org.jetbrains.jps.model.java.JpsJavaExtensionService;
import org.jetbrains.jps.model.java.compiler.JpsJavaCompilerConfiguration;
import org.jetbrains.jps.model.module.JpsModule;
import org.jetbrains.jps.service.JpsServiceManager;
import org.jetbrains.jps.service.SharedThreadPool;
import org.jetbrains.jps.util.JpsPathUtil;
import java.io.File;
import java.io.IOException;
import java.lang.reflect.InvocationHandler;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.lang.reflect.Proxy;
import java.util.*;
import java.util.concurrent.*;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicLong;
import java.util.concurrent.atomic.AtomicReference;
/**
* @author Eugene Zhuravlev
*/
public class IncProjectBuilder {
private static final Logger LOG = Logger.getInstance("#org.jetbrains.jps.incremental.IncProjectBuilder");
private static final String CLASSPATH_INDEX_FILE_NAME = "classpath.index";
//private static final boolean GENERATE_CLASSPATH_INDEX = Boolean.parseBoolean(System.getProperty(GlobalOptions.GENERATE_CLASSPATH_INDEX_OPTION, "false"));
private static final boolean SYNC_DELETE = Boolean.parseBoolean(System.getProperty("jps.sync.delete", "false"));
private static final GlobalContextKey<Set<BuildTarget<?>>> TARGET_WITH_CLEARED_OUTPUT = GlobalContextKey.create("_targets_with_cleared_output_");
public static final int MAX_BUILDER_THREADS;
static {
int maxThreads = Math.min(6, Runtime.getRuntime().availableProcessors() - 1);
try {
maxThreads = Math.max(1, Integer.parseInt(System.getProperty(GlobalOptions.COMPILE_PARALLEL_MAX_THREADS_OPTION, Integer.toString(maxThreads))));
}
catch (NumberFormatException ignored) {
maxThreads = Math.max(1, maxThreads);
}
MAX_BUILDER_THREADS = maxThreads;
}
private final ProjectDescriptor myProjectDescriptor;
private final BuilderRegistry myBuilderRegistry;
private final Map<String, String> myBuilderParams;
private final CanceledStatus myCancelStatus;
@Nullable private final Callbacks.ConstantAffectionResolver myJavaConstantResolver;
private final List<MessageHandler> myMessageHandlers = new ArrayList<>();
private final MessageHandler myMessageDispatcher = new MessageHandler() {
public void processMessage(BuildMessage msg) {
for (MessageHandler h : myMessageHandlers) {
h.processMessage(msg);
}
}
};
private final boolean myIsTestMode;
private volatile float myTargetsProcessed = 0.0f;
private volatile float myTotalTargetsWork;
private final int myTotalModuleLevelBuilderCount;
private final List<Future> myAsyncTasks = Collections.synchronizedList(new ArrayList<Future>());
private final ConcurrentMap<Builder, AtomicLong> myElapsedTimeNanosByBuilder = ContainerUtil.newConcurrentMap();
private final ConcurrentMap<Builder, AtomicInteger> myNumberOfSourcesProcessedByBuilder = ContainerUtil.newConcurrentMap();
public IncProjectBuilder(ProjectDescriptor pd, BuilderRegistry builderRegistry, Map<String, String> builderParams, CanceledStatus cs,
@Nullable Callbacks.ConstantAffectionResolver javaConstantResolver, final boolean isTestMode) {
myProjectDescriptor = pd;
myBuilderRegistry = builderRegistry;
myBuilderParams = builderParams;
myCancelStatus = cs;
myJavaConstantResolver = javaConstantResolver;
myTotalTargetsWork = pd.getBuildTargetIndex().getAllTargets().size();
myTotalModuleLevelBuilderCount = builderRegistry.getModuleLevelBuilderCount();
myIsTestMode = isTestMode;
}
public void addMessageHandler(MessageHandler handler) {
myMessageHandlers.add(handler);
}
public void checkUpToDate(CompileScope scope) {
CompileContextImpl context = null;
try {
context = createContext(scope);
final BuildFSState fsState = myProjectDescriptor.fsState;
for (BuildTarget<?> target : myProjectDescriptor.getBuildTargetIndex().getAllTargets()) {
if (scope.isAffected(target)) {
BuildOperations.ensureFSStateInitialized(context, target);
final FilesDelta delta = fsState.getEffectiveFilesDelta(context, target);
delta.lockData();
try {
for (Set<File> files : delta.getSourcesToRecompile().values()) {
for (File file : files) {
if (scope.isAffected(target, file)) {
// this will serve as a marker that compiler has work to do
myMessageDispatcher.processMessage(DoneSomethingNotification.INSTANCE);
return;
}
}
}
}
finally {
delta.unlockData();
}
}
}
}
catch (Exception e) {
LOG.info(e);
// this will serve as a marker that compiler has work to do
myMessageDispatcher.processMessage(DoneSomethingNotification.INSTANCE);
}
finally {
if (context != null) {
flushContext(context);
}
}
}
public void build(CompileScope scope, boolean forceCleanCaches) throws RebuildRequestedException {
final LowMemoryWatcher memWatcher = LowMemoryWatcher.register(() -> {
myProjectDescriptor.getFSCache().clear();
JavacMain.clearCompilerZipFileCache();
myProjectDescriptor.dataManager.flush(false);
myProjectDescriptor.timestamps.getStorage().force();
});
startTempDirectoryCleanupTask();
CompileContextImpl context = null;
try {
context = createContext(scope);
runBuild(context, forceCleanCaches);
myProjectDescriptor.dataManager.saveVersion();
reportRebuiltModules(context);
reportUnprocessedChanges(context);
}
catch (StopBuildException e) {
reportRebuiltModules(context);
reportUnprocessedChanges(context);
// some builder decided to stop the build
// report optional progress message if any
final String msg = e.getMessage();
if (!StringUtil.isEmptyOrSpaces(msg)) {
myMessageDispatcher.processMessage(new ProgressMessage(msg));
}
}
catch (BuildDataCorruptedException e) {
LOG.info(e);
requestRebuild(e, e);
}
catch (ProjectBuildException e) {
LOG.info(e);
final Throwable cause = e.getCause();
if (cause instanceof PersistentEnumerator.CorruptedException ||
cause instanceof MappingFailedException ||
cause instanceof IOException ||
cause instanceof BuildDataCorruptedException ||
(cause instanceof RuntimeException && cause.getCause() instanceof IOException)) {
requestRebuild(e, cause);
}
else {
// should stop the build with error
final String errMessage = e.getMessage();
final CompilerMessage msg;
if (StringUtil.isEmptyOrSpaces(errMessage)) {
msg = new CompilerMessage("", cause != null ? cause : e);
}
else {
final String causeMessage = cause != null ? cause.getMessage() : "";
msg = new CompilerMessage("", BuildMessage.Kind.ERROR, StringUtil.isEmptyOrSpaces(causeMessage) || errMessage.trim().endsWith(causeMessage)
? errMessage
: errMessage + ": " + causeMessage);
}
myMessageDispatcher.processMessage(msg);
}
}
finally {
memWatcher.stop();
flushContext(context);
// wait for async tasks
final CanceledStatus status = context == null ? CanceledStatus.NULL : context.getCancelStatus();
synchronized (myAsyncTasks) {
for (Future task : myAsyncTasks) {
if (status.isCanceled()) {
break;
}
waitForTask(status, task);
}
}
}
}
private void requestRebuild(Exception e, Throwable cause) throws RebuildRequestedException {
myMessageDispatcher.processMessage(new CompilerMessage("", BuildMessage.Kind.INFO,
"Internal caches are corrupted or have outdated format, forcing project rebuild: " +
e.getMessage()));
throw new RebuildRequestedException(cause);
}
private static void waitForTask(@NotNull CanceledStatus status, Future task) {
try {
while (true) {
try {
task.get(500L, TimeUnit.MILLISECONDS);
break;
}
catch (TimeoutException ignored) {
if (status.isCanceled()) {
break;
}
}
}
}
catch (Throwable th) {
LOG.info(th);
}
}
private static void reportRebuiltModules(CompileContextImpl context) {
final Set<JpsModule> modules = BuildTargetConfiguration.MODULES_WITH_TARGET_CONFIG_CHANGED_KEY.get(context);
if (modules == null || modules.isEmpty()) {
return;
}
final StringBuilder message = new StringBuilder();
if (modules.size() > 1) {
message.append("Modules ");
final int namesLimit = 5;
int idx = 0;
for (Iterator<JpsModule> iterator = modules.iterator(); iterator.hasNext(); ) {
final JpsModule module = iterator.next();
if (idx == namesLimit && iterator.hasNext()) {
message.append(" and ").append(modules.size() - namesLimit).append(" others");
break;
}
if (idx > 0) {
message.append(", ");
}
message.append("\"").append(module.getName()).append("\"");
idx += 1;
}
message.append(" were");
}
else {
message.append("Module \"").append(modules.iterator().next().getName()).append("\" was");
}
message.append(" fully rebuilt due to project configuration");
if (ModuleBuildTarget.REBUILD_ON_DEPENDENCY_CHANGE) {
message.append("/dependencies");
}
message.append(" changes");
context.processMessage(new CompilerMessage("", BuildMessage.Kind.INFO, message.toString()));
}
private static void reportUnprocessedChanges(CompileContextImpl context) {
final ProjectDescriptor pd = context.getProjectDescriptor();
final BuildFSState fsState = pd.fsState;
for (BuildTarget<?> target : pd.getBuildTargetIndex().getAllTargets()) {
if (fsState.hasUnprocessedChanges(context, target)) {
context.processMessage(new UnprocessedFSChangesNotification());
break;
}
}
}
private static void flushContext(CompileContext context) {
if (context != null) {
final ProjectDescriptor pd = context.getProjectDescriptor();
pd.timestamps.getStorage().force();
pd.dataManager.flush(false);
}
final ExternalJavacManager server = ExternalJavacManager.KEY.get(context);
if (server != null) {
server.stop();
ExternalJavacManager.KEY.set(context, null);
}
}
private void runBuild(final CompileContextImpl context, boolean forceCleanCaches) throws ProjectBuildException {
context.setDone(0.0f);
LOG.info("Building project; isRebuild:" +
context.isProjectRebuild() +
"; isMake:" +
context.isMake() +
" parallel compilation:" +
BuildRunner.PARALLEL_BUILD_ENABLED);
context.addBuildListener(new ChainedTargetsBuildListener(context));
//Deletes class loader classpath index files for changed output roots
context.addBuildListener(new BuildListener() {
@Override
public void filesGenerated(FileGeneratedEvent event) {
final Set<File> outputs = new THashSet<>(FileUtil.FILE_HASHING_STRATEGY);
for (Pair<String, String> pair : event.getPaths()) {
outputs.add(new File(pair.getFirst()));
}
for (File root : outputs) {
//noinspection ResultOfMethodCallIgnored
new File(root, CLASSPATH_INDEX_FILE_NAME).delete();
}
}
@Override
public void filesDeleted(FileDeletedEvent event) {
}
});
for (TargetBuilder builder : myBuilderRegistry.getTargetBuilders()) {
builder.buildStarted(context);
}
for (ModuleLevelBuilder builder : myBuilderRegistry.getModuleLevelBuilders()) {
builder.buildStarted(context);
}
try {
// clean roots for targets for which rebuild is forced
cleanOutputRoots(context, context.isProjectRebuild() || forceCleanCaches);
context.processMessage(new ProgressMessage("Running 'before' tasks"));
runTasks(context, myBuilderRegistry.getBeforeTasks());
TimingLog.LOG.debug("'before' tasks finished");
context.processMessage(new ProgressMessage("Checking sources"));
buildChunks(context);
TimingLog.LOG.debug("Building targets finished");
context.processMessage(new ProgressMessage("Running 'after' tasks"));
runTasks(context, myBuilderRegistry.getAfterTasks());
TimingLog.LOG.debug("'after' tasks finished");
sendElapsedTimeMessages(context);
}
finally {
for (TargetBuilder builder : myBuilderRegistry.getTargetBuilders()) {
builder.buildFinished(context);
}
for (ModuleLevelBuilder builder : myBuilderRegistry.getModuleLevelBuilders()) {
builder.buildFinished(context);
}
context.processMessage(new ProgressMessage("Finished, saving caches..."));
}
}
private void sendElapsedTimeMessages(CompileContext context) {
for (Map.Entry<Builder, AtomicLong> entry : myElapsedTimeNanosByBuilder.entrySet()) {
AtomicInteger processedSourcesRef = myNumberOfSourcesProcessedByBuilder.get(entry.getKey());
int processedSources = processedSourcesRef != null ? processedSourcesRef.get() : 0;
context.processMessage(new BuilderStatisticsMessage(entry.getKey().getPresentableName(), processedSources, entry.getValue().get()/1000000));
}
}
private void startTempDirectoryCleanupTask() {
final String tempPath = System.getProperty("java.io.tmpdir", null);
if (StringUtil.isEmptyOrSpaces(tempPath)) {
return;
}
final File tempDir = new File(tempPath);
final File dataRoot = myProjectDescriptor.dataManager.getDataPaths().getDataStorageRoot();
if (!FileUtil.isAncestor(dataRoot, tempDir, true)) {
// cleanup only 'local' temp
return;
}
final File[] files = tempDir.listFiles();
if (files != null && files.length != 0) {
final RunnableFuture<Void> task = new FutureTask<>(() -> {
for (File tempFile : files) {
FileUtil.delete(tempFile);
}
}, null);
final Thread thread = new Thread(task, "Temp directory cleanup");
thread.setPriority(Thread.MIN_PRIORITY);
thread.setDaemon(true);
thread.start();
myAsyncTasks.add(task);
}
}
private CompileContextImpl createContext(CompileScope scope) throws ProjectBuildException {
final CompileContextImpl context = new CompileContextImpl(scope, myProjectDescriptor, myMessageDispatcher, myBuilderParams, myCancelStatus);
// in project rebuild mode performance gain is hard to observe, so it is better to save memory
// in make mode it is critical to traverse file system as fast as possible, so we choose speed over memory savings
myProjectDescriptor.setFSCache(context.isProjectRebuild() ? FSCache.NO_CACHE : new FSCache());
final Callbacks.ConstantAffectionResolver javaResolver = myJavaConstantResolver;
if (javaResolver == null) {
JavaBuilderUtil.CONSTANT_SEARCH_SERVICE.set(context, null);
}
else {
final List<Callbacks.ConstantAffectionResolver> resolvers = getResolvers();
resolvers.add(javaResolver);
for (JavaBuilderExtension provider : JpsServiceManager.getInstance().getExtensions(JavaBuilderExtension.class)) {
final Callbacks.ConstantAffectionResolver extResolver = provider.getConstantSearch(context);
if (extResolver != null) {
resolvers.add(extResolver);
}
}
JavaBuilderUtil.CONSTANT_SEARCH_SERVICE.set(context, resolvers.size() == 1? resolvers.get(0) : new CompositeConstantResolver(resolvers));
}
return context;
}
@NotNull
private SmartList<Callbacks.ConstantAffectionResolver> getResolvers() {
return new SmartList<>();
}
private void cleanOutputRoots(CompileContext context, boolean cleanCaches) throws ProjectBuildException {
final ProjectDescriptor projectDescriptor = context.getProjectDescriptor();
ProjectBuildException ex = null;
try {
final JpsJavaCompilerConfiguration configuration = JpsJavaExtensionService.getInstance().getOrCreateCompilerConfiguration(projectDescriptor.getProject());
final boolean shouldClear = configuration.isClearOutputDirectoryOnRebuild();
if (shouldClear) {
clearOutputs(context);
}
else {
for (BuildTarget<?> target : projectDescriptor.getBuildTargetIndex().getAllTargets()) {
context.checkCanceled();
if (context.getScope().isBuildForced(target)) {
clearOutputFilesUninterruptibly(context, target);
}
}
}
for (BuildTargetType<?> type : TargetTypeRegistry.getInstance().getTargetTypes()) {
if (context.getScope().isAllTargetsOfTypeAffected(type)) {
cleanOutputOfStaleTargets(type, context);
}
}
}
catch (ProjectBuildException e) {
ex = e;
}
finally {
if (cleanCaches) {
try {
projectDescriptor.timestamps.getStorage().clean();
}
catch (IOException e) {
if (ex == null) {
ex = new ProjectBuildException("Error cleaning timestamps storage", e);
}
else {
LOG.info("Error cleaning timestamps storage", e);
}
}
finally {
try {
projectDescriptor.dataManager.clean();
}
catch (IOException e) {
if (ex == null) {
ex = new ProjectBuildException("Error cleaning compiler storages", e);
}
else {
LOG.info("Error cleaning compiler storages", e);
}
}
finally {
projectDescriptor.fsState.clearAll();
if (ex != null) {
throw ex;
}
}
}
}
}
}
private void cleanOutputOfStaleTargets(BuildTargetType<?> type, CompileContext context) {
List<Pair<String, Integer>> targetIds = myProjectDescriptor.dataManager.getTargetsState().getStaleTargetIds(type);
if (targetIds.isEmpty()) return;
context.processMessage(new ProgressMessage("Cleaning old output directories..."));
for (Pair<String, Integer> ids : targetIds) {
String stringId = ids.first;
try {
SourceToOutputMappingImpl mapping = null;
try {
mapping = myProjectDescriptor.dataManager.createSourceToOutputMapForStaleTarget(type, stringId);
clearOutputFiles(context, mapping, type, ids.second);
}
finally {
if (mapping != null) {
mapping.close();
}
}
FileUtil.delete(myProjectDescriptor.dataManager.getDataPaths().getTargetDataRoot(type, stringId));
myProjectDescriptor.dataManager.getTargetsState().cleanStaleTarget(type, stringId);
}
catch (IOException e) {
LOG.warn(e);
myMessageDispatcher.processMessage(new CompilerMessage("", BuildMessage.Kind.WARNING, "Failed to delete output files from obsolete '" + stringId + "' target: " + e.toString()));
}
}
}
public static void clearOutputFiles(CompileContext context, BuildTarget<?> target) throws IOException {
final SourceToOutputMapping map = context.getProjectDescriptor().dataManager.getSourceToOutputMap(target);
BuildTargetType<?> targetType = target.getTargetType();
clearOutputFiles(context, map, targetType, context.getProjectDescriptor().dataManager.getTargetsState().getBuildTargetId(target));
registerTargetsWithClearedOutput(context, Collections.singletonList(target));
}
private static void clearOutputFiles(CompileContext context,
SourceToOutputMapping mapping,
BuildTargetType<?> targetType,
int targetId) throws IOException {
final THashSet<File> dirsToDelete = targetType instanceof ModuleBasedBuildTargetType<?>
? new THashSet<>(FileUtil.FILE_HASHING_STRATEGY) : null;
OutputToTargetRegistry outputToTargetRegistry = context.getProjectDescriptor().dataManager.getOutputToTargetRegistry();
for (String srcPath : mapping.getSources()) {
final Collection<String> outs = mapping.getOutputs(srcPath);
if (outs != null && !outs.isEmpty()) {
List<String> deletedPaths = new ArrayList<>();
for (String out : outs) {
BuildOperations.deleteRecursively(out, deletedPaths, dirsToDelete);
}
outputToTargetRegistry.removeMapping(outs, targetId);
if (!deletedPaths.isEmpty()) {
context.processMessage(new FileDeletedEvent(deletedPaths));
}
}
}
if (dirsToDelete != null) {
FSOperations.pruneEmptyDirs(context, dirsToDelete);
}
}
private static void registerTargetsWithClearedOutput(CompileContext context, Collection<? extends BuildTarget<?>> targets) {
synchronized (TARGET_WITH_CLEARED_OUTPUT) {
Set<BuildTarget<?>> data = context.getUserData(TARGET_WITH_CLEARED_OUTPUT);
if (data == null) {
data = new THashSet<>();
context.putUserData(TARGET_WITH_CLEARED_OUTPUT, data);
}
data.addAll(targets);
}
}
private static boolean isTargetOutputCleared(CompileContext context, BuildTarget<?> target) {
synchronized (TARGET_WITH_CLEARED_OUTPUT) {
Set<BuildTarget<?>> data = context.getUserData(TARGET_WITH_CLEARED_OUTPUT);
return data != null && data.contains(target);
}
}
private enum Applicability {
NONE, PARTIAL, ALL;
static <T> Applicability calculate(Predicate<T> p, Collection<T> collection) {
int count = 0;
int item = 0;
for (T elem : collection) {
item++;
if (p.apply(elem)) {
count++;
if (item > count) {
return PARTIAL;
}
}
else {
if (count > 0) {
return PARTIAL;
}
}
}
return count == 0? NONE : ALL;
}
}
private void clearOutputs(CompileContext context) throws ProjectBuildException {
final long cleanStart = System.currentTimeMillis();
final MultiMap<File, BuildTarget<?>> rootsToDelete = MultiMap.createSet();
final Set<File> allSourceRoots = ContainerUtil.newTroveSet(FileUtil.FILE_HASHING_STRATEGY);
final ProjectDescriptor projectDescriptor = context.getProjectDescriptor();
final List<? extends BuildTarget<?>> allTargets = projectDescriptor.getBuildTargetIndex().getAllTargets();
for (BuildTarget<?> target : allTargets) {
if (target instanceof ModuleBasedTarget) {
for (File file : target.getOutputRoots(context)) {
rootsToDelete.putValue(file, target);
}
}
else {
if (context.getScope().isBuildForced(target)) {
clearOutputFilesUninterruptibly(context, target);
}
}
}
final ModuleExcludeIndex moduleIndex = projectDescriptor.getModuleExcludeIndex();
for (BuildTarget<?> target : allTargets) {
for (BuildRootDescriptor descriptor : projectDescriptor.getBuildRootIndex().getTargetRoots(target, context)) {
// excluding from checks roots with generated sources; because it is safe to delete generated stuff
if (!descriptor.isGenerated()) {
File rootFile = descriptor.getRootFile();
//some roots aren't marked by as generated but in fact they are produced by some builder and it's safe to remove them.
//However if a root isn't excluded it means that its content will be shown in 'Project View' and an user can create new files under it so it would be dangerous to clean such roots
if (moduleIndex.isInContent(rootFile)) {
allSourceRoots.add(rootFile);
}
}
}
}
// check that output and source roots are not overlapping
final CompileScope compileScope = context.getScope();
final List<File> filesToDelete = new ArrayList<>();
final Predicate<BuildTarget<?>> forcedBuild = input -> compileScope.isBuildForced(input);
for (Map.Entry<File, Collection<BuildTarget<?>>> entry : rootsToDelete.entrySet()) {
context.checkCanceled();
final File outputRoot = entry.getKey();
final Collection<BuildTarget<?>> rootTargets = entry.getValue();
final Applicability applicability = Applicability.calculate(forcedBuild, rootTargets);
if (applicability == Applicability.NONE) {
continue;
}
boolean okToDelete = applicability == Applicability.ALL;
if (okToDelete && !moduleIndex.isExcluded(outputRoot)) {
// if output root itself is directly or indirectly excluded,
// there cannot be any manageable sources under it, even if the output root is located under some source root
// so in this case it is safe to delete such root
if (JpsPathUtil.isUnder(allSourceRoots, outputRoot)) {
okToDelete = false;
}
else {
final Set<File> _outRoot = ContainerUtil.newTroveSet(FileUtil.FILE_HASHING_STRATEGY, outputRoot);
for (File srcRoot : allSourceRoots) {
if (JpsPathUtil.isUnder(_outRoot, srcRoot)) {
okToDelete = false;
break;
}
}
}
}
if (okToDelete) {
// do not delete output root itself to avoid lots of unnecessary "roots_changed" events in IDEA
final File[] children = outputRoot.listFiles();
if (children != null) {
for (File child : children) {
if (!child.delete()) {
filesToDelete.add(child);
}
}
}
else { // the output root must be file
if (!outputRoot.delete()) {
filesToDelete.add(outputRoot);
}
}
registerTargetsWithClearedOutput(context, rootTargets);
}
else {
if (applicability == Applicability.ALL) {
// only warn if unable to delete because of roots intersection
context.processMessage(new CompilerMessage(
"", BuildMessage.Kind.WARNING, "Output path " + outputRoot.getPath() + " intersects with a source root. Only files that were created by build will be cleaned.")
);
}
context.processMessage(new ProgressMessage("Cleaning output directories..."));
// clean only those files we are aware of
for (BuildTarget<?> target : rootTargets) {
if (compileScope.isBuildForced(target)) {
clearOutputFilesUninterruptibly(context, target);
}
}
}
}
if (!filesToDelete.isEmpty()) {
context.processMessage(new ProgressMessage("Cleaning output directories..."));
if (SYNC_DELETE) {
for (File file : filesToDelete) {
context.checkCanceled();
FileUtil.delete(file);
}
}
else {
myAsyncTasks.add(FileUtil.asyncDelete(filesToDelete));
}
}
LOG.info("Cleaned output directories in " + (System.currentTimeMillis() - cleanStart) + " ms");
}
private static void clearOutputFilesUninterruptibly(CompileContext context, BuildTarget<?> target) {
try {
clearOutputFiles(context, target);
}
catch (Throwable e) {
LOG.info(e);
String reason = e.getMessage();
if (reason == null) {
reason = e.getClass().getName();
}
context.processMessage(new CompilerMessage("", BuildMessage.Kind.WARNING, "Problems clearing output files for target \"" + target.getPresentableName() + "\": " + reason));
}
}
private static void runTasks(CompileContext context, final List<BuildTask> tasks) throws ProjectBuildException {
for (BuildTask task : tasks) {
task.build(context);
}
}
private void buildChunks(final CompileContextImpl context) throws ProjectBuildException {
try {
final CompileScope scope = context.getScope();
final ProjectDescriptor pd = context.getProjectDescriptor();
final BuildTargetIndex targetIndex = pd.getBuildTargetIndex();
// for better progress dynamics consider only actually affected chunks
int totalAffected = 0;
for (BuildTargetChunk chunk : targetIndex.getSortedTargetChunks(context)) {
if (isAffected(context.getScope(), chunk)) {
totalAffected += chunk.getTargets().size();
}
}
myTotalTargetsWork = totalAffected;
boolean compileInParallel = BuildRunner.PARALLEL_BUILD_ENABLED;
if (compileInParallel && MAX_BUILDER_THREADS <= 1) {
LOG.info("Switched off parallel compilation because maximum number of builder threads is less than 2. Set '"
+ GlobalOptions.COMPILE_PARALLEL_MAX_THREADS_OPTION + "' system property to a value greater than 1 to really enable parallel compilation.");
compileInParallel = false;
}
if (compileInParallel) {
new BuildParallelizer(context).buildInParallel();
}
else {
// non-parallel build
for (BuildTargetChunk chunk : targetIndex.getSortedTargetChunks(context)) {
try {
buildChunkIfAffected(context, scope, chunk);
}
finally {
pd.dataManager.closeSourceToOutputStorages(Collections.singleton(chunk));
pd.dataManager.flush(true);
}
}
}
}
catch (IOException e) {
throw new ProjectBuildException(e);
}
}
private static class BuildChunkTask {
private final BuildTargetChunk myChunk;
private final Set<BuildChunkTask> myNotBuiltDependencies = new THashSet<>();
private final List<BuildChunkTask> myTasksDependsOnThis = new ArrayList<>();
private BuildChunkTask(BuildTargetChunk chunk) {
myChunk = chunk;
}
public BuildTargetChunk getChunk() {
return myChunk;
}
public boolean isReady() {
return myNotBuiltDependencies.isEmpty();
}
public void addDependency(BuildChunkTask dependency) {
if (myNotBuiltDependencies.add(dependency)) {
dependency.myTasksDependsOnThis.add(this);
}
}
public List<BuildChunkTask> markAsFinishedAndGetNextReadyTasks() {
List<BuildChunkTask> nextTasks = new SmartList<>();
for (BuildChunkTask task : myTasksDependsOnThis) {
final boolean removed = task.myNotBuiltDependencies.remove(this);
LOG.assertTrue(removed, task.getChunk().toString() + " didn't have " + getChunk().toString());
if (task.isReady()) {
nextTasks.add(task);
}
}
return nextTasks;
}
}
private class BuildParallelizer {
private final ExecutorService myParallelBuildExecutor = AppExecutorUtil.createBoundedApplicationPoolExecutor(
"IncProjectBuilder Executor Pool", SharedThreadPool.getInstance(), MAX_BUILDER_THREADS);
private final CompileContext myContext;
private final AtomicReference<Throwable> myException = new AtomicReference<>();
private final Object myQueueLock = new Object();
private final CountDownLatch myTasksCountDown;
private final List<BuildChunkTask> myTasks;
private BuildParallelizer(CompileContext context) {
myContext = context;
final ProjectDescriptor pd = myContext.getProjectDescriptor();
final BuildTargetIndex targetIndex = pd.getBuildTargetIndex();
List<BuildTargetChunk> chunks = targetIndex.getSortedTargetChunks(myContext);
myTasks = new ArrayList<>(chunks.size());
Map<BuildTarget<?>, BuildChunkTask> targetToTask = new THashMap<>();
for (BuildTargetChunk chunk : chunks) {
BuildChunkTask task = new BuildChunkTask(chunk);
myTasks.add(task);
for (BuildTarget<?> target : chunk.getTargets()) {
targetToTask.put(target, task);
}
}
for (BuildChunkTask task : myTasks) {
for (BuildTarget<?> target : task.getChunk().getTargets()) {
for (BuildTarget<?> dependency : targetIndex.getDependencies(target, myContext)) {
BuildChunkTask depTask = targetToTask.get(dependency);
if (depTask != null && depTask != task) {
task.addDependency(depTask);
}
}
}
}
myTasksCountDown = new CountDownLatch(myTasks.size());
}
public void buildInParallel() throws IOException, ProjectBuildException {
List<BuildChunkTask> initialTasks = new ArrayList<>();
for (BuildChunkTask task : myTasks) {
if (task.isReady()) {
initialTasks.add(task);
}
}
queueTasks(initialTasks);
try {
myTasksCountDown.await();
}
catch (InterruptedException e) {
LOG.info(e);
}
final Throwable throwable = myException.get();
if (throwable instanceof ProjectBuildException) {
throw (ProjectBuildException)throwable;
}
else if (throwable != null) {
throw new ProjectBuildException(throwable);
}
}
private void queueTasks(List<BuildChunkTask> tasks) {
if (LOG.isDebugEnabled() && !tasks.isEmpty()) {
final List<BuildTargetChunk> chunksToLog = new ArrayList<>();
for (BuildChunkTask task : tasks) {
chunksToLog.add(task.getChunk());
}
final StringBuilder logBuilder = new StringBuilder("Queuing " + chunksToLog.size() + " chunks in parallel: ");
chunksToLog.sort(Comparator.comparing(BuildTargetChunk::toString));
for (BuildTargetChunk chunk : chunksToLog) {
logBuilder.append(chunk.toString()).append("; ");
}
LOG.debug(logBuilder.toString());
}
for (BuildChunkTask task : tasks) {
queueTask(task);
}
}
private void queueTask(final BuildChunkTask task) {
final CompileContext chunkLocalContext = createContextWrapper(myContext);
myParallelBuildExecutor.execute(() -> {
try {
try {
if (myException.get() == null) {
buildChunkIfAffected(chunkLocalContext, myContext.getScope(), task.getChunk());
}
}
finally {
myProjectDescriptor.dataManager.closeSourceToOutputStorages(Collections.singletonList(task.getChunk()));
myProjectDescriptor.dataManager.flush(true);
}
}
catch (Throwable e) {
myException.compareAndSet(null, e);
LOG.info(e);
}
finally {
LOG.debug("Finished compilation of " + task.getChunk().toString());
myTasksCountDown.countDown();
List<BuildChunkTask> nextTasks;
synchronized (myQueueLock) {
nextTasks = task.markAsFinishedAndGetNextReadyTasks();
}
if (!nextTasks.isEmpty()) {
queueTasks(nextTasks);
}
}
});
}
}
private void buildChunkIfAffected(CompileContext context, CompileScope scope, BuildTargetChunk chunk) throws ProjectBuildException {
if (isAffected(scope, chunk)) {
buildTargetsChunk(context, chunk);
}
}
private static boolean isAffected(CompileScope scope, BuildTargetChunk chunk) {
for (BuildTarget<?> target : chunk.getTargets()) {
if (scope.isAffected(target)) {
return true;
}
}
return false;
}
private boolean runBuildersForChunk(final CompileContext context, final BuildTargetChunk chunk) throws ProjectBuildException, IOException {
Set<? extends BuildTarget<?>> targets = chunk.getTargets();
if (targets.size() > 1) {
Set<ModuleBuildTarget> moduleTargets = new LinkedHashSet<>();
for (BuildTarget<?> target : targets) {
if (target instanceof ModuleBuildTarget) {
moduleTargets.add((ModuleBuildTarget)target);
}
else {
String targetsString = StringUtil.join(targets,
(Function<BuildTarget<?>, String>)target1 -> StringUtil.decapitalize(target1.getPresentableName()), ", ");
context.processMessage(new CompilerMessage(
"", BuildMessage.Kind.ERROR, "Cannot build " + StringUtil.decapitalize(target.getPresentableName()) + " because it is included into a circular dependency (" +
targetsString + ")")
);
return false;
}
}
return runModuleLevelBuilders(context, new ModuleChunk(moduleTargets));
}
final BuildTarget<?> target = targets.iterator().next();
if (target instanceof ModuleBuildTarget) {
return runModuleLevelBuilders(context, new ModuleChunk(Collections.singleton((ModuleBuildTarget)target)));
}
// In general the set of files corresponding to changed source file may be different
// Need this for example, to keep up with case changes in file names for case-insensitive OSes:
// deleting the output before copying is the only way to ensure the case of the output file's name is exactly the same as source file's case
cleanOldOutputs(context, target);
final List<TargetBuilder<?, ?>> builders = BuilderRegistry.getInstance().getTargetBuilders();
final float builderProgressDelta = 1.0f / builders.size();
for (TargetBuilder<?, ?> builder : builders) {
buildTarget(target, context, builder);
updateDoneFraction(context, builderProgressDelta);
}
return true;
}
private <R extends BuildRootDescriptor, T extends BuildTarget<R>>
void buildTarget(final T target, final CompileContext context, TargetBuilder<?, ?> builder) throws ProjectBuildException, IOException {
if (builder.getTargetTypes().contains(target.getTargetType())) {
DirtyFilesHolder<R, T> holder = new DirtyFilesHolderBase<R, T>(context) {
@Override
public void processDirtyFiles(@NotNull FileProcessor<R, T> processor) throws IOException {
context.getProjectDescriptor().fsState.processFilesToRecompile(context, target, processor);
}
};
//noinspection unchecked
BuildOutputConsumerImpl outputConsumer = new BuildOutputConsumerImpl(target, context);
long start = System.nanoTime();
((TargetBuilder<R, T>)builder).build(target, holder, outputConsumer, context);
storeBuilderStatistics(builder, System.nanoTime() - start, outputConsumer.getNumberOfProcessedSources());
outputConsumer.fireFileGeneratedEvent();
context.checkCanceled();
}
}
private static <T extends BuildRootDescriptor>
void cleanOldOutputs(final CompileContext context, final BuildTarget<T> target) throws ProjectBuildException, IOException {
if (!context.getScope().isBuildForced(target)) {
BuildOperations.cleanOutputsCorrespondingToChangedFiles(context, new DirtyFilesHolderBase<T, BuildTarget<T>>(context) {
public void processDirtyFiles(@NotNull FileProcessor<T, BuildTarget<T>> processor) throws IOException {
context.getProjectDescriptor().fsState.processFilesToRecompile(context, target, processor);
}
});
}
}
private void updateDoneFraction(CompileContext context, final float delta) {
myTargetsProcessed += delta;
float processed = myTargetsProcessed;
context.setDone(processed / myTotalTargetsWork);
}
private void buildTargetsChunk(CompileContext context, final BuildTargetChunk chunk) throws ProjectBuildException {
final BuildFSState fsState = myProjectDescriptor.fsState;
boolean doneSomething;
try {
context.setCompilationStartStamp(chunk.getTargets(), System.currentTimeMillis());
sendBuildingTargetMessages(chunk.getTargets(), BuildingTargetProgressMessage.Event.STARTED);
Utils.ERRORS_DETECTED_KEY.set(context, Boolean.FALSE);
for (BuildTarget<?> target : chunk.getTargets()) {
BuildOperations.ensureFSStateInitialized(context, target);
}
doneSomething = processDeletedPaths(context, chunk.getTargets());
fsState.beforeChunkBuildStart(context, chunk);
doneSomething |= runBuildersForChunk(context, chunk);
fsState.clearContextRoundData(context);
fsState.clearContextChunk(context);
BuildOperations.markTargetsUpToDate(context, chunk);
//if (doneSomething && GENERATE_CLASSPATH_INDEX) {
// myAsyncTasks.add(SharedThreadPool.getInstance().executeOnPooledThread(new Runnable() {
// @Override
// public void run() {
// createClasspathIndex(chunk);
// }
// }));
//}
}
catch (BuildDataCorruptedException | ProjectBuildException e) {
throw e;
}
catch (Throwable e) {
final StringBuilder message = new StringBuilder();
message.append(chunk.getPresentableName()).append(": ").append(e.getClass().getName());
final String exceptionMessage = e.getMessage();
if (exceptionMessage != null) {
message.append(": ").append(exceptionMessage);
}
throw new ProjectBuildException(message.toString(), e);
}
finally {
for (BuildRootDescriptor rd : context.getProjectDescriptor().getBuildRootIndex().clearTempRoots(context)) {
context.getProjectDescriptor().fsState.clearRecompile(rd);
}
try {
// restore deleted paths that were not processed by 'integrate'
final Map<BuildTarget<?>, Collection<String>> map = Utils.REMOVED_SOURCES_KEY.get(context);
if (map != null) {
for (Map.Entry<BuildTarget<?>, Collection<String>> entry : map.entrySet()) {
final BuildTarget<?> target = entry.getKey();
final Collection<String> paths = entry.getValue();
if (paths != null) {
for (String path : paths) {
fsState.registerDeleted(context, target, new File(path), null);
}
}
}
}
}
catch (IOException e) {
//noinspection ThrowFromFinallyBlock
throw new ProjectBuildException(e);
}
finally {
Utils.REMOVED_SOURCES_KEY.set(context, null);
sendBuildingTargetMessages(chunk.getTargets(), BuildingTargetProgressMessage.Event.FINISHED);
}
}
}
private void sendBuildingTargetMessages(@NotNull Set<? extends BuildTarget<?>> targets, @NotNull BuildingTargetProgressMessage.Event event) {
myMessageDispatcher.processMessage(new BuildingTargetProgressMessage(targets, event));
}
//private static void createClasspathIndex(final BuildTargetChunk chunk) {
// final Set<File> outputDirs = new THashSet<File>(FileUtil.FILE_HASHING_STRATEGY);
// for (BuildTarget<?> target : chunk.getTargets()) {
// if (target instanceof ModuleBuildTarget) {
// File outputDir = ((ModuleBuildTarget)target).getOutputDir();
// if (outputDir != null && outputDirs.add(outputDir)) {
// try {
// BufferedWriter writer = new BufferedWriter(new FileWriter(new File(outputDir, CLASSPATH_INDEX_FILE_NAME)));
// try {
// writeIndex(writer, outputDir, "");
// }
// finally {
// writer.close();
// }
// }
// catch (IOException e) {
// // Ignore. Failed to create optional classpath index
// }
// }
// }
// }
//}
//private static void writeIndex(final BufferedWriter writer, final File file, final String path) throws IOException {
// writer.write(path);
// writer.write('\n');
// final File[] files = file.listFiles();
// if (files != null) {
// for (File child : files) {
// final String _path = path.isEmpty() ? child.getName() : path + "/" + child.getName();
// writeIndex(writer, child, _path);
// }
// }
//}
private boolean processDeletedPaths(CompileContext context, final Set<? extends BuildTarget<?>> targets) throws ProjectBuildException {
boolean doneSomething = false;
try {
// cleanup outputs
final Map<BuildTarget<?>, Collection<String>> targetToRemovedSources = new HashMap<>();
final THashSet<File> dirsToDelete = new THashSet<>(FileUtil.FILE_HASHING_STRATEGY);
for (BuildTarget<?> target : targets) {
final Collection<String> deletedPaths = myProjectDescriptor.fsState.getAndClearDeletedPaths(target);
if (deletedPaths.isEmpty()) {
continue;
}
targetToRemovedSources.put(target, deletedPaths);
if (isTargetOutputCleared(context, target)) {
continue;
}
final int buildTargetId = context.getProjectDescriptor().getTargetsState().getBuildTargetId(target);
final boolean shouldPruneEmptyDirs = target instanceof ModuleBasedTarget;
final SourceToOutputMapping sourceToOutputStorage = context.getProjectDescriptor().dataManager.getSourceToOutputMap(target);
final ProjectBuilderLogger logger = context.getLoggingManager().getProjectBuilderLogger();
// actually delete outputs associated with removed paths
final Collection<String> pathsForIteration;
if (myIsTestMode) {
// ensure predictable order in test logs
pathsForIteration = new ArrayList<>(deletedPaths);
Collections.sort((List<String>)pathsForIteration);
}
else {
pathsForIteration = deletedPaths;
}
for (String deletedSource : pathsForIteration) {
// deleting outputs corresponding to non-existing source
final Collection<String> outputs = sourceToOutputStorage.getOutputs(deletedSource);
if (outputs != null && !outputs.isEmpty()) {
List<String> deletedOutputPaths = new ArrayList<>();
final OutputToTargetRegistry outputToSourceRegistry = context.getProjectDescriptor().dataManager.getOutputToTargetRegistry();
for (String output : outputToSourceRegistry.getSafeToDeleteOutputs(outputs, buildTargetId)) {
final boolean deleted = BuildOperations.deleteRecursively(output, deletedOutputPaths, shouldPruneEmptyDirs ? dirsToDelete : null);
if (deleted) {
doneSomething = true;
}
}
for (String outputPath : outputs) {
outputToSourceRegistry.removeMapping(outputPath, buildTargetId);
}
if (!deletedOutputPaths.isEmpty()) {
if (logger.isEnabled()) {
logger.logDeletedFiles(deletedOutputPaths);
}
context.processMessage(new FileDeletedEvent(deletedOutputPaths));
}
}
if (target instanceof ModuleBuildTarget) {
// check if deleted source was associated with a form
final OneToManyPathsMapping sourceToFormMap = context.getProjectDescriptor().dataManager.getSourceToFormMap();
final Collection<String> boundForms = sourceToFormMap.getState(deletedSource);
if (boundForms != null) {
for (String formPath : boundForms) {
final File formFile = new File(formPath);
if (formFile.exists()) {
FSOperations.markDirty(context, CompilationRound.CURRENT, formFile);
}
}
sourceToFormMap.remove(deletedSource);
}
}
}
}
if (!targetToRemovedSources.isEmpty()) {
final Map<BuildTarget<?>, Collection<String>> existing = Utils.REMOVED_SOURCES_KEY.get(context);
if (existing != null) {
for (Map.Entry<BuildTarget<?>, Collection<String>> entry : existing.entrySet()) {
final Collection<String> paths = targetToRemovedSources.get(entry.getKey());
if (paths != null) {
paths.addAll(entry.getValue());
}
else {
targetToRemovedSources.put(entry.getKey(), entry.getValue());
}
}
}
Utils.REMOVED_SOURCES_KEY.set(context, targetToRemovedSources);
}
FSOperations.pruneEmptyDirs(context, dirsToDelete);
}
catch (IOException e) {
throw new ProjectBuildException(e);
}
return doneSomething;
}
// return true if changed something, false otherwise
private boolean runModuleLevelBuilders(final CompileContext context, final ModuleChunk chunk) throws ProjectBuildException, IOException {
for (BuilderCategory category : BuilderCategory.values()) {
for (ModuleLevelBuilder builder : myBuilderRegistry.getBuilders(category)) {
builder.chunkBuildStarted(context, chunk);
}
}
boolean doneSomething = false;
boolean rebuildFromScratchRequested = false;
float stageCount = myTotalModuleLevelBuilderCount;
final int modulesInChunk = chunk.getModules().size();
int buildersPassed = 0;
boolean nextPassRequired;
ChunkBuildOutputConsumerImpl outputConsumer = new ChunkBuildOutputConsumerImpl(context);
try {
do {
nextPassRequired = false;
myProjectDescriptor.fsState.beforeNextRoundStart(context, chunk);
DirtyFilesHolder<JavaSourceRootDescriptor, ModuleBuildTarget> dirtyFilesHolder =
new DirtyFilesHolderBase<JavaSourceRootDescriptor, ModuleBuildTarget>(context) {
@Override
public void processDirtyFiles(@NotNull FileProcessor<JavaSourceRootDescriptor, ModuleBuildTarget> processor)
throws IOException {
FSOperations.processFilesToRecompile(context, chunk, processor);
}
};
if (!JavaBuilderUtil.isForcedRecompilationAllJavaModules(context)) {
final Map<ModuleBuildTarget, Set<File>> cleanedSources = BuildOperations
.cleanOutputsCorrespondingToChangedFiles(context, dirtyFilesHolder);
for (Map.Entry<ModuleBuildTarget, Set<File>> entry : cleanedSources.entrySet()) {
final ModuleBuildTarget target = entry.getKey();
final Set<File> files = entry.getValue();
if (!files.isEmpty()) {
final SourceToOutputMapping mapping = context.getProjectDescriptor().dataManager.getSourceToOutputMap(target);
for (File srcFile : files) {
mapping.setOutputs(srcFile.getPath(), Collections.emptyList());
}
}
}
}
try {
BUILDER_CATEGORY_LOOP:
for (BuilderCategory category : BuilderCategory.values()) {
final List<ModuleLevelBuilder> builders = myBuilderRegistry.getBuilders(category);
if (category == BuilderCategory.CLASS_POST_PROCESSOR) {
// ensure changes from instrumenters are visible to class post-processors
saveInstrumentedClasses(outputConsumer);
}
if (builders.isEmpty()) {
continue;
}
try {
for (ModuleLevelBuilder builder : builders) {
processDeletedPaths(context, chunk.getTargets());
long start = System.nanoTime();
int processedSourcesBefore = outputConsumer.getNumberOfProcessedSources();
final ModuleLevelBuilder.ExitCode buildResult = builder.build(context, chunk, dirtyFilesHolder, outputConsumer);
storeBuilderStatistics(builder, System.nanoTime() - start,
outputConsumer.getNumberOfProcessedSources() - processedSourcesBefore);
doneSomething |= (buildResult != ModuleLevelBuilder.ExitCode.NOTHING_DONE);
if (buildResult == ModuleLevelBuilder.ExitCode.ABORT) {
throw new StopBuildException("Builder " + builder.getPresentableName() + " requested build stop");
}
context.checkCanceled();
if (buildResult == ModuleLevelBuilder.ExitCode.ADDITIONAL_PASS_REQUIRED) {
nextPassRequired = true;
}
else if (buildResult == ModuleLevelBuilder.ExitCode.CHUNK_REBUILD_REQUIRED) {
if (!rebuildFromScratchRequested && !JavaBuilderUtil.isForcedRecompilationAllJavaModules(context)) {
notifyChunkRebuildRequested(context, chunk, builder);
// allow rebuild from scratch only once per chunk
rebuildFromScratchRequested = true;
try {
// forcibly mark all files in the chunk dirty
context.getProjectDescriptor().fsState.clearContextRoundData(context);
FSOperations.markDirty(context, CompilationRound.NEXT, chunk, null);
// reverting to the beginning
myTargetsProcessed -= (buildersPassed * modulesInChunk) / stageCount;
stageCount = myTotalModuleLevelBuilderCount;
buildersPassed = 0;
nextPassRequired = true;
outputConsumer.clear();
break BUILDER_CATEGORY_LOOP;
}
catch (Exception e) {
throw new ProjectBuildException(e);
}
}
else {
LOG.debug("Builder " + builder.getPresentableName() + " requested second chunk rebuild");
}
}
buildersPassed++;
updateDoneFraction(context, modulesInChunk / (stageCount));
}
}
finally {
final boolean moreToCompile = JavaBuilderUtil.updateMappingsOnRoundCompletion(context, dirtyFilesHolder, chunk);
if (moreToCompile) {
nextPassRequired = true;
}
if (nextPassRequired && !rebuildFromScratchRequested) {
// recalculate basis
myTargetsProcessed -= (buildersPassed * modulesInChunk) / stageCount;
stageCount += myTotalModuleLevelBuilderCount;
myTargetsProcessed += (buildersPassed * modulesInChunk) / stageCount;
}
}
}
}
finally {
JavaBuilderUtil.clearDataOnRoundCompletion(context);
}
}
while (nextPassRequired);
}
finally {
saveInstrumentedClasses(outputConsumer);
outputConsumer.fireFileGeneratedEvents();
outputConsumer.clear();
for (BuilderCategory category : BuilderCategory.values()) {
for (ModuleLevelBuilder builder : myBuilderRegistry.getBuilders(category)) {
builder.chunkBuildFinished(context, chunk);
}
}
}
return doneSomething;
}
private static void notifyChunkRebuildRequested(CompileContext context, ModuleChunk chunk, ModuleLevelBuilder builder) {
String infoMessage = "Builder \"" + builder.getPresentableName() + "\" requested rebuild of module chunk \"" + chunk.getName() + "\"";
LOG.info(infoMessage);
BuildMessage.Kind kind = BuildMessage.Kind.JPS_INFO;
final CompileScope scope = context.getScope();
for (ModuleBuildTarget target : chunk.getTargets()) {
if (!scope.isWholeTargetAffected(target)) {
infoMessage += ".\nConsider building whole project or rebuilding the module.";
kind = BuildMessage.Kind.INFO;
break;
}
}
context.processMessage(new CompilerMessage("", kind, infoMessage));
}
private void storeBuilderStatistics(Builder builder, long elapsedTime, int processedFiles) {
myElapsedTimeNanosByBuilder.computeIfAbsent(builder, b -> new AtomicLong()).addAndGet(elapsedTime);
myNumberOfSourcesProcessedByBuilder.computeIfAbsent(builder, b -> new AtomicInteger()).addAndGet(processedFiles);
}
private static void saveInstrumentedClasses(ChunkBuildOutputConsumerImpl outputConsumer) throws IOException {
for (CompiledClass compiledClass : outputConsumer.getCompiledClasses().values()) {
if (compiledClass.isDirty()) {
compiledClass.save();
}
}
}
private static CompileContext createContextWrapper(final CompileContext delegate) {
final ClassLoader loader = delegate.getClass().getClassLoader();
final UserDataHolderBase localDataHolder = new UserDataHolderBase();
final Set<Object> deletedKeysSet = ContainerUtil.newConcurrentSet();
final Class<UserDataHolder> dataHolderInterface = UserDataHolder.class;
final Class<MessageHandler> messageHandlerInterface = MessageHandler.class;
return (CompileContext)Proxy.newProxyInstance(loader, new Class[]{CompileContext.class}, new InvocationHandler() {
@Override
public Object invoke(Object proxy, Method method, Object[] args) throws Throwable {
final Class<?> declaringClass = method.getDeclaringClass();
if (dataHolderInterface.equals(declaringClass)) {
final Object firstArgument = args[0];
if (!(firstArgument instanceof GlobalContextKey)) {
final boolean isWriteOperation = args.length == 2 /*&& void.class.equals(method.getReturnType())*/;
if (isWriteOperation) {
if (args[1] == null) {
deletedKeysSet.add(firstArgument);
}
else {
deletedKeysSet.remove(firstArgument);
}
}
else {
if (deletedKeysSet.contains(firstArgument)) {
return null;
}
}
final Object result = method.invoke(localDataHolder, args);
if (isWriteOperation || result != null) {
return result;
}
}
}
else if (messageHandlerInterface.equals(declaringClass)) {
final BuildMessage msg = (BuildMessage)args[0];
if (msg.getKind() == BuildMessage.Kind.ERROR) {
Utils.ERRORS_DETECTED_KEY.set(localDataHolder, Boolean.TRUE);
}
}
try {
return method.invoke(delegate, args);
}
catch (InvocationTargetException e) {
final Throwable targetEx = e.getTargetException();
if (targetEx instanceof ProjectBuildException) {
throw targetEx;
}
throw e;
}
}
});
}
}
| |
package com.anders.reride.data;
import android.util.Log;
import org.json.JSONArray;
import org.json.JSONException;
import org.json.JSONObject;
import java.util.HashMap;
import java.util.Map;
/**
* Helper class for creating ReRide data JSON representations
*
* JSON schema:
* {
"title": "State",
"type": "object",
"properties": {
"recorded": {
"type": "object",
"properties": {
"id": { "type" : "string" },
"time": { "type" : "string" },
"longitude": { "type" : "string" },
"latitude": { "type" : "string" },
"sensors": {
"type": "array",
"items": {
"type": "object",
"properties": {
"name": { "type" : "string" },
"characteristic": { "type" : "string" },
"value": { "type" : "string" },
"unit": { "type" : "string" }
},
"required": ["name", "value", "unit"]
},
"minItems": 1,
"uniqueItems": true
}
},
"required": ["id", "time", "sensors"]
}
}
}
example:
{
"state": {
"recorded": {
"id": "10",
"time": "20170426123500",
"sensors": [
{
"name": "flex sensor",
"characteristic": "Apparent Wind Direction",
"value": "45",
"unit": "degrees"
}
],
"longitude": "12.324534",
"latitude": "55.123124"
}
}
}
*/
public class ReRideJSON {
private static final String TAG = ReRideJSON.class.getSimpleName();
private JSONObject mState;
private JSONObject mRecorded;
private JSONObject mRiderProperties;
private JSONArray mSensors;
private Map<String, Integer> mSensorIndex;
private int mCurrentIndex;
static final String LATITUDE = "latitude";
static final String LONGITUDE = "longitude";
static final String TIME = "time";
static final String SENSORS = "sensors";
static final String SENSOR_NAME = "name";
static final String CHARACTERISTIC = "characteristic";
static final String SENSOR_UNIT = "unit";
static final String ID = "id";
static final String RECORDED = "recorded";
static final String STATE = "state";
static final String VALUE = "value";
private static ReRideJSON mReRideJSON;
private ReRideJSON(String id) {
mCurrentIndex = 0;
mState = new JSONObject();
mRecorded = new JSONObject();
mRiderProperties = new JSONObject();
mSensors = new JSONArray();
mSensorIndex = new HashMap<>();
try {
mRiderProperties.put(SENSORS, mSensors);
mRiderProperties.put(ID, id);
mRecorded.put(RECORDED, mRiderProperties);
mState.put(STATE, mRecorded);
} catch (JSONException e) {
e.printStackTrace();
}
}
public boolean addSensor(String sensorName, String unit, String characteristicUuid) {
try {
JSONObject sensor = new JSONObject();
sensor.put(SENSOR_NAME, sensorName);
sensor.put(SENSOR_UNIT, unit);
sensor.put(CHARACTERISTIC, characteristicUuid);
mSensorIndex.put(characteristicUuid, mCurrentIndex);
mSensors.put(mCurrentIndex, sensor);
mCurrentIndex++;
return true;
} catch (JSONException e) {
e.printStackTrace();
return false;
}
}
/**
* Singleton getter
* @return Singleton reference for this builder class
*/
public static ReRideJSON getInstance(String id) {
if (mReRideJSON == null) {
mReRideJSON = new ReRideJSON(id);
}
return mReRideJSON;
}
public boolean putRiderProperties(String time, double lon, double lat) {
try {
mRiderProperties.put(TIME, time);
mRiderProperties.put(LONGITUDE, lon);
mRiderProperties.put(LATITUDE, lat);
return true;
} catch (JSONException e) {
e.printStackTrace();
return false;
}
}
public boolean putSensorValue(String characteristicUuid, String value) {
try {
JSONObject sensor = mSensors.getJSONObject(mSensorIndex.get(characteristicUuid));
sensor.put(VALUE, value);
return true;
} catch (JSONException e) {
e.printStackTrace();
return false;
}
}
public JSONObject getState() {
return mState;
}
public JSONObject getRiderProperties() {
return mRiderProperties;
}
public void removeSensor(String sensorName) {
try {
for (int i = 0; i < mSensors.length(); i++) {
JSONObject sensor = mSensors.getJSONObject(i);
if (sensor.getString(SENSOR_NAME).equals(sensorName)) {
removeSensor(i, sensor.getString(CHARACTERISTIC));
}
}
} catch (JSONException e) {
Log.d(TAG, e.getMessage());
}
}
private void removeSensor(int index, String characteristic) {
mSensors.remove(index);
mSensorIndex.remove(characteristic);
mCurrentIndex--;
}
public void clear() {
for (int i = 0; i < mSensors.length(); i++) {
try {
removeSensor(i, mSensors.getJSONObject(i).getString(CHARACTERISTIC));
} catch (JSONException e) {
Log.d(TAG, e.getMessage());
}
}
mState.remove(STATE);
mRecorded.remove(RECORDED);
mRiderProperties.remove(ID);
mRiderProperties.remove(SENSORS);
mRiderProperties.remove(TIME);
mRiderProperties.remove(LONGITUDE);
mRiderProperties.remove(LATITUDE);
}
}
| |
/**
* NovaForge(TM) is a web-based forge offering a Collaborative Development and
* Project Management Environment.
*
* Copyright (C) 2007-2012 BULL SAS
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as
* published by the Free Software Foundation, either version 3 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see http://www.gnu.org/licenses/.
*/
package org.vaadin.addon.itemlayout.demo.client.ui;
import org.vaadin.addon.itemlayout.event.ItemClickEvent;
import org.vaadin.addon.itemlayout.event.ItemClickListener;
import org.vaadin.addon.itemlayout.grid.ItemGrid;
import org.vaadin.addon.itemlayout.horizontal.ItemHorizontal;
import org.vaadin.addon.itemlayout.layout.AbstractItemLayout;
import org.vaadin.addon.itemlayout.vertical.ItemVertical;
import com.vaadin.annotations.Theme;
import com.vaadin.data.Property.ValueChangeEvent;
import com.vaadin.data.Property.ValueChangeListener;
import com.vaadin.data.util.IndexedContainer;
import com.vaadin.server.VaadinRequest;
import com.vaadin.ui.Button;
import com.vaadin.ui.Button.ClickEvent;
import com.vaadin.ui.Component;
import com.vaadin.ui.HorizontalLayout;
import com.vaadin.ui.Label;
import com.vaadin.ui.Notification;
import com.vaadin.ui.Notification.Type;
import com.vaadin.ui.OptionGroup;
import com.vaadin.ui.UI;
import com.vaadin.ui.VerticalLayout;
/**
* *
*
* @author Guillaume Lamirand
*/
@Theme("itemlayoutdemo")
public class ItemLayoutDemoUI extends UI
{
/**
* Serial version id
*/
private static final long serialVersionUID = -7764280046700991233L;
private final IndexedContainer container = new IndexedContainer();
/**
* {@inheritDoc}
*/
@Override
protected void init(final VaadinRequest request)
{
// Demo datas
buildDefaultContainer();
// Main layout
final VerticalLayout layout = new VerticalLayout();
layout.setMargin(true);
layout.addComponent(new Label("Demo for ItemLayout addon"));
final Component demoButtons = initActionButtons();
layout.addComponent(demoButtons);
final Component itemGrid = initItemGridExamples();
layout.addComponent(itemGrid);
final Component itemHorizontal = initItemHorizontalExamples();
layout.addComponent(itemHorizontal);
final Component itemVertical = initItemVerticalExamples();
layout.addComponent(itemVertical);
setContent(layout);
}
private Component initActionButtons()
{
final HorizontalLayout buttonsLayout = new HorizontalLayout();
final Button addItemButton = new Button("Add an item");
final Button removeItemButton = new Button("Remove last item");
addItemButton.addClickListener(new Button.ClickListener()
{
@Override
public void buttonClick(final ClickEvent event)
{
final int currentIndex = container.size();
container.addItem(currentIndex);
container.getContainerProperty(currentIndex, "caption").setValue("Item " + currentIndex);
container.getContainerProperty(currentIndex, "description").setValue("Item at index " + currentIndex);
}
});
removeItemButton.addClickListener(new Button.ClickListener()
{
@Override
public void buttonClick(final ClickEvent event)
{
final int lastIndex = container.size() - 1;
container.removeItem(lastIndex);
}
});
buttonsLayout.addComponent(addItemButton);
buttonsLayout.addComponent(removeItemButton);
return buttonsLayout;
}
private Component initItemGridExamples()
{
// Layout to show examples
final HorizontalLayout example = new HorizontalLayout();
example.addComponent(new Label("Demo for ItemGrid"));
final HorizontalLayout horizontalLayout = new HorizontalLayout();
horizontalLayout.setMargin(true);
example.addComponent(horizontalLayout);
final ItemGrid itemGrid = buildDefaultItemGrid();
itemGrid.addItemClickListener(buildClickListener());
final OptionGroup sample = buildSelectableOption();
sample.addValueChangeListener(buildValueChangeListener(itemGrid));
horizontalLayout.addComponent(sample);
horizontalLayout.addComponent(itemGrid);
return example;
}
private Component initItemVerticalExamples()
{
// Layout to show examples
final VerticalLayout example = new VerticalLayout();
example.setHeight(100, Unit.PERCENTAGE);
example.addComponent(new Label("Demo for ItemVertical"));
final HorizontalLayout horizontalLayout = new HorizontalLayout();
horizontalLayout.setHeight(100, Unit.PERCENTAGE);
horizontalLayout.setMargin(true);
example.addComponent(horizontalLayout);
final ItemVertical itemVertical = buildDefaultItemVertical();
itemVertical.addItemClickListener(buildClickListener());
final OptionGroup sample = buildSelectableOption();
sample.addValueChangeListener(buildValueChangeListener(itemVertical));
horizontalLayout.addComponent(sample);
horizontalLayout.addComponent(itemVertical);
return example;
}
private Component initItemHorizontalExamples()
{
// Layout to show examples
final VerticalLayout example = new VerticalLayout();
example.addComponent(new Label("Demo for ItemHorizontal"));
final HorizontalLayout horizontalLayout = new HorizontalLayout();
horizontalLayout.setWidth(100, Unit.PERCENTAGE);
horizontalLayout.setMargin(true);
example.addComponent(horizontalLayout);
final ItemHorizontal itemHorizontal = buildDefaultItemHorizontal();
itemHorizontal.addItemClickListener(buildClickListener());
final OptionGroup sample = buildSelectableOption();
sample.addValueChangeListener(buildValueChangeListener(itemHorizontal));
horizontalLayout.addComponent(sample);
horizontalLayout.addComponent(itemHorizontal);
horizontalLayout.setExpandRatio(itemHorizontal, 1f);
return example;
}
private ItemGrid buildDefaultItemGrid()
{
final ItemGrid item = new ItemGrid();
item.setColumns(5);
item.setContainerDataSource(container);
return item;
}
private ItemHorizontal buildDefaultItemHorizontal()
{
final ItemHorizontal item = new ItemHorizontal();
item.setContainerDataSource(container);
return item;
}
private ItemVertical buildDefaultItemVertical()
{
final ItemVertical item = new ItemVertical();
item.setContainerDataSource(container);
return item;
}
private IndexedContainer buildDefaultContainer()
{
container.addContainerProperty("caption", String.class, null);
container.addContainerProperty("description", String.class, null);
for (int i = 0; i < 25; i++)
{
container.addItem(i);
container.getContainerProperty(i, "caption").setValue("Item " + i);
container.getContainerProperty(i, "description").setValue("Item at index " + i);
}
return container;
}
private OptionGroup buildSelectableOption()
{
final OptionGroup sample = new OptionGroup("Select a selectable mode");
sample.addItem(1);
sample.setItemCaption(1, "Disable");
sample.addItem(2);
sample.setItemCaption(2, "Single selection");
sample.addItem(3);
sample.setItemCaption(3, "Multi selection");
sample.select(1);
sample.setNullSelectionAllowed(false);
sample.setHtmlContentAllowed(true);
sample.setImmediate(true);
return sample;
}
private ItemClickListener buildClickListener()
{
return new ItemClickListener()
{
@Override
public void onItemClick(final ItemClickEvent pEvent)
{
Notification.show("Item clicked:", "" + pEvent.getItemId(), Type.TRAY_NOTIFICATION);
}
};
}
private ValueChangeListener buildValueChangeListener(final AbstractItemLayout pLayout)
{
return new ValueChangeListener()
{
@Override
public void valueChange(final ValueChangeEvent event)
{
final int valueString = Integer.valueOf(event.getProperty().getValue().toString());
if (valueString == 1)
{
pLayout.setSelectable(false);
}
else if (valueString == 2)
{
pLayout.setSelectable(true);
pLayout.setMultiSelect(false);
}
else if (valueString == 3)
{
pLayout.setSelectable(true);
pLayout.setMultiSelect(true);
}
}
};
}
}
| |
package com.mikepenz.materialdrawer;
import android.content.Context;
import android.os.Build;
import android.support.v4.widget.DrawerLayout;
import android.view.Gravity;
import android.view.View;
import android.view.ViewGroup;
import android.widget.LinearLayout;
import android.widget.RelativeLayout;
import com.mikepenz.materialdrawer.adapter.BaseDrawerAdapter;
import com.mikepenz.materialdrawer.holder.ColorHolder;
import com.mikepenz.materialdrawer.model.ContainerDrawerItem;
import com.mikepenz.materialdrawer.model.PrimaryDrawerItem;
import com.mikepenz.materialdrawer.model.SecondaryDrawerItem;
import com.mikepenz.materialdrawer.model.interfaces.IDrawerItem;
import com.mikepenz.materialdrawer.model.interfaces.Selectable;
import com.mikepenz.materialdrawer.util.DrawerUIUtils;
import com.mikepenz.materialize.util.UIUtils;
/**
* Created by mikepenz on 23.05.15.
*/
class DrawerUtils {
/**
* helper method to handle the onClick of the footer
*
* @param drawer
* @param drawerItem
* @param v
* @param fireOnClick true if we should call the listener, false if not, null to not call the listener and not close the drawer
*/
public static void onFooterDrawerItemClick(DrawerBuilder drawer, IDrawerItem drawerItem, View v, Boolean fireOnClick) {
boolean checkable = !(drawerItem != null && drawerItem instanceof Selectable && !((Selectable) drawerItem).isSelectable());
if (checkable) {
drawer.resetStickyFooterSelection();
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.HONEYCOMB) {
v.setActivated(true);
}
v.setSelected(true);
//remove the selection in the list
drawer.getAdapter().handleSelection(null, -1);
//set currentSelection to -1 because we selected a stickyFooter element
drawer.mCurrentSelection = -1;
//find the position of the clicked footer item
if (drawer.mStickyFooterView != null && drawer.mStickyFooterView instanceof LinearLayout) {
LinearLayout footer = (LinearLayout) drawer.mStickyFooterView;
for (int i = 0; i < footer.getChildCount(); i++) {
if (footer.getChildAt(i) == v) {
drawer.mCurrentStickyFooterSelection = i;
break;
}
}
}
}
if (fireOnClick != null) {
boolean consumed = false;
if (fireOnClick && drawer.mOnDrawerItemClickListener != null) {
consumed = drawer.mOnDrawerItemClickListener.onItemClick(v, -1, drawerItem);
}
if (!consumed) {
//close the drawer after click
drawer.closeDrawerDelayed();
}
}
}
/**
* helper method to set the selection in the lsit
*
* @param drawer
* @param position
* @param fireOnClick
* @return
*/
public static boolean setRecyclerViewSelection(DrawerBuilder drawer, int position, boolean fireOnClick) {
return setRecyclerViewSelection(drawer, position, fireOnClick, null);
}
/**
* helper method to set the selection in the list
*
* @param drawer
* @param position
* @param fireOnClick
* @param drawerItem
* @return
*/
public static boolean setRecyclerViewSelection(DrawerBuilder drawer, int position, boolean fireOnClick, IDrawerItem drawerItem) {
if (position >= -1) {
//predefine selection (should be the first element
if (drawer.mAdapter != null) {
drawer.resetStickyFooterSelection();
drawer.mAdapter.handleSelection(null, position);
drawer.mCurrentSelection = position;
drawer.mCurrentStickyFooterSelection = -1;
}
if (fireOnClick && drawer.mOnDrawerItemClickListener != null) {
return drawer.mOnDrawerItemClickListener.onItemClick(null, position, drawerItem);
}
}
return false;
}
/**
* helper method to set the selection of the footer
*
* @param drawer
* @param position
* @param fireOnClick
*/
public static void setStickyFooterSelection(DrawerBuilder drawer, int position, Boolean fireOnClick) {
if (position > -1) {
if (drawer.mStickyFooterView != null && drawer.mStickyFooterView instanceof LinearLayout) {
LinearLayout footer = (LinearLayout) drawer.mStickyFooterView;
if (footer.getChildCount() > position && position >= 0) {
IDrawerItem drawerItem = (IDrawerItem) footer.getChildAt(position).getTag();
onFooterDrawerItemClick(drawer, drawerItem, footer.getChildAt(position), fireOnClick);
}
}
}
}
/**
* calculates the position of an drawerItem. searching by it's identifier
*
* @param identifier
* @return
*/
public static int getPositionByIdentifier(DrawerBuilder drawer, int identifier) {
if (identifier >= 0) {
BaseDrawerAdapter adapter = drawer.getAdapter();
for (int i = 0; i < adapter.getItemCount(); i++) {
if (adapter.getItem(i).getIdentifier() == identifier) {
return i;
}
}
}
return -1;
}
/**
* calculates the position of an drawerItem inside the footer. searching by it's identifier
*
* @param identifier
* @return
*/
public static int getStickyFooterPositionByIdentifier(DrawerBuilder drawer, int identifier) {
if (identifier >= 0) {
if (drawer.mStickyFooterView != null && drawer.mStickyFooterView instanceof LinearLayout) {
LinearLayout footer = (LinearLayout) drawer.mStickyFooterView;
for (int i = 0; i < footer.getChildCount(); i++) {
Object o = footer.getChildAt(i).getTag();
if (o != null && o instanceof IDrawerItem && ((IDrawerItem) o).getIdentifier() == identifier) {
return i;
}
}
}
}
return -1;
}
/**
* helper method to handle the headerView
*
* @param drawer
*/
public static void handleHeaderView(DrawerBuilder drawer) {
//use the AccountHeader if set
if (drawer.mAccountHeader != null) {
if (drawer.mAccountHeaderSticky) {
drawer.mStickyHeaderView = drawer.mAccountHeader.getView();
} else {
drawer.mHeaderView = drawer.mAccountHeader.getView();
drawer.mHeaderDivider = drawer.mAccountHeader.mAccountHeaderBuilder.mDividerBelowHeader;
}
}
//sticky header view
if (drawer.mStickyHeaderView != null) {
//add the sticky footer view and align it to the bottom
RelativeLayout.LayoutParams layoutParams = new RelativeLayout.LayoutParams(RelativeLayout.LayoutParams.MATCH_PARENT, RelativeLayout.LayoutParams.WRAP_CONTENT);
layoutParams.addRule(RelativeLayout.ALIGN_PARENT_TOP, 1);
drawer.mStickyHeaderView.setId(R.id.material_drawer_sticky_header);
drawer.mSliderLayout.addView(drawer.mStickyHeaderView, 0, layoutParams);
//now align the recyclerView below the stickyFooterView ;)
RelativeLayout.LayoutParams layoutParamsListView = (RelativeLayout.LayoutParams) drawer.mRecyclerView.getLayoutParams();
layoutParamsListView.addRule(RelativeLayout.BELOW, R.id.material_drawer_sticky_header);
drawer.mRecyclerView.setLayoutParams(layoutParamsListView);
//set a background color or the elevation will not work
drawer.mStickyHeaderView.setBackgroundColor(UIUtils.getThemeColorFromAttrOrRes(drawer.mActivity, R.attr.material_drawer_background, R.color.material_drawer_background));
//add a shadow
if (Build.VERSION.SDK_INT >= 21) {
drawer.mStickyHeaderView.setElevation(UIUtils.convertDpToPixel(4, drawer.mActivity));
} else {
View view = new View(drawer.mActivity);
view.setBackgroundResource(R.drawable.material_drawer_shadow_bottom);
drawer.mSliderLayout.addView(view, RelativeLayout.LayoutParams.MATCH_PARENT, (int) UIUtils.convertDpToPixel(4, drawer.mActivity));
//now align the shadow below the stickyHeader ;)
RelativeLayout.LayoutParams lps = (RelativeLayout.LayoutParams) view.getLayoutParams();
lps.addRule(RelativeLayout.BELOW, R.id.material_drawer_sticky_header);
view.setLayoutParams(lps);
}
//remove the padding of the recyclerView again we have the header on top of it
drawer.mRecyclerView.setPadding(0, 0, 0, 0);
}
// set the header (do this before the setAdapter because some devices will crash else
if (drawer.mHeaderView != null) {
if (drawer.mRecyclerView == null) {
throw new RuntimeException("can't use a headerView without a recyclerView");
}
if (drawer.mHeaderDivider) {
drawer.getAdapter().addHeaderDrawerItems(new ContainerDrawerItem().withView(drawer.mHeaderView).withViewPosition(ContainerDrawerItem.Position.TOP));
} else {
drawer.getAdapter().addHeaderDrawerItems(new ContainerDrawerItem().withView(drawer.mHeaderView).withViewPosition(ContainerDrawerItem.Position.NONE));
}
//set the padding on the top to 0
drawer.mRecyclerView.setPadding(drawer.mRecyclerView.getPaddingLeft(), 0, drawer.mRecyclerView.getPaddingRight(), drawer.mRecyclerView.getPaddingBottom());
}
}
/**
* small helper to rebuild the FooterView
*
* @param drawer
*/
public static void rebuildStickyFooterView(final DrawerBuilder drawer) {
if (drawer.mSliderLayout != null) {
if (drawer.mStickyFooterView != null) {
drawer.mStickyFooterView.removeAllViews();
//fill the footer with items
com.mikepenz.materialdrawer.DrawerUtils.fillStickyDrawerItemFooter(drawer, drawer.mStickyFooterView, new View.OnClickListener() {
@Override
public void onClick(View v) {
IDrawerItem drawerItem = (IDrawerItem) v.getTag();
com.mikepenz.materialdrawer.DrawerUtils.onFooterDrawerItemClick(drawer, drawerItem, v, true);
}
});
drawer.mStickyFooterView.setVisibility(View.VISIBLE);
} else {
//there was no footer yet. now just create one
DrawerUtils.handleFooterView(drawer, new View.OnClickListener() {
@Override
public void onClick(View v) {
IDrawerItem drawerItem = (IDrawerItem) v.getTag();
DrawerUtils.onFooterDrawerItemClick(drawer, drawerItem, v, true);
}
});
}
setStickyFooterSelection(drawer, drawer.mCurrentStickyFooterSelection, false);
}
}
/**
* helper method to handle the footerView
*
* @param drawer
*/
public static void handleFooterView(DrawerBuilder drawer, View.OnClickListener onClickListener) {
Context ctx = drawer.mSliderLayout.getContext();
//use the StickyDrawerItems if set
if (drawer.mStickyDrawerItems != null && drawer.mStickyDrawerItems.size() > 0) {
drawer.mStickyFooterView = DrawerUtils.buildStickyDrawerItemFooter(ctx, drawer, onClickListener);
}
//sticky footer view
if (drawer.mStickyFooterView != null) {
//add the sticky footer view and align it to the bottom
RelativeLayout.LayoutParams layoutParams = new RelativeLayout.LayoutParams(RelativeLayout.LayoutParams.MATCH_PARENT, RelativeLayout.LayoutParams.WRAP_CONTENT);
layoutParams.addRule(RelativeLayout.ALIGN_PARENT_BOTTOM, 1);
drawer.mStickyFooterView.setId(R.id.material_drawer_sticky_footer);
drawer.mSliderLayout.addView(drawer.mStickyFooterView, layoutParams);
if ((drawer.mTranslucentNavigationBar || drawer.mFullscreen) && Build.VERSION.SDK_INT >= 19) {
drawer.mStickyFooterView.setPadding(0, 0, 0, UIUtils.getNavigationBarHeight(ctx));
}
//now align the recyclerView above the stickyFooterView ;)
RelativeLayout.LayoutParams layoutParamsListView = (RelativeLayout.LayoutParams) drawer.mRecyclerView.getLayoutParams();
layoutParamsListView.addRule(RelativeLayout.ABOVE, R.id.material_drawer_sticky_footer);
drawer.mRecyclerView.setLayoutParams(layoutParamsListView);
//handle shadow on top of the sticky footer
if (drawer.mStickyFooterShadow) {
drawer.mStickyFooterShadowView = new View(ctx);
drawer.mStickyFooterShadowView.setBackgroundResource(R.drawable.material_drawer_shadow_top);
drawer.mSliderLayout.addView(drawer.mStickyFooterShadowView, RelativeLayout.LayoutParams.MATCH_PARENT, (int) UIUtils.convertDpToPixel(4, ctx));
//now align the shadow below the stickyHeader ;)
RelativeLayout.LayoutParams lps = (RelativeLayout.LayoutParams) drawer.mStickyFooterShadowView.getLayoutParams();
lps.addRule(RelativeLayout.ABOVE, R.id.material_drawer_sticky_footer);
drawer.mStickyFooterShadowView.setLayoutParams(lps);
}
//remove the padding of the recyclerView again we have the footer below it
drawer.mRecyclerView.setPadding(drawer.mRecyclerView.getPaddingLeft(), drawer.mRecyclerView.getPaddingTop(), drawer.mRecyclerView.getPaddingRight(), ctx.getResources().getDimensionPixelSize(R.dimen.material_drawer_padding));
}
// set the footer (do this before the setAdapter because some devices will crash else
if (drawer.mFooterView != null) {
if (drawer.mRecyclerView == null) {
throw new RuntimeException("can't use a footerView without a recyclerView");
}
if (drawer.mFooterDivider) {
drawer.getAdapter().addFooterDrawerItems(new ContainerDrawerItem().withView(drawer.mFooterView).withViewPosition(ContainerDrawerItem.Position.BOTTOM));
} else {
drawer.getAdapter().addFooterDrawerItems(new ContainerDrawerItem().withView(drawer.mFooterView).withViewPosition(ContainerDrawerItem.Position.NONE));
}
}
}
/**
* build the sticky footer item view
*
* @return
*/
public static ViewGroup buildStickyDrawerItemFooter(Context ctx, DrawerBuilder drawer, View.OnClickListener onClickListener) {
//create the container view
final LinearLayout linearLayout = new LinearLayout(ctx);
linearLayout.setLayoutParams(new LinearLayout.LayoutParams(ViewGroup.LayoutParams.MATCH_PARENT, ViewGroup.LayoutParams.WRAP_CONTENT));
linearLayout.setOrientation(LinearLayout.VERTICAL);
//set the background color to the drawer background color (if it has alpha the shadow won't be visible)
linearLayout.setBackgroundColor(UIUtils.getThemeColorFromAttrOrRes(ctx, R.attr.material_drawer_background, R.color.material_drawer_background));
//create the divider
if (drawer.mStickyFooterDivider) {
LinearLayout divider = new LinearLayout(ctx);
LinearLayout.LayoutParams dividerParams = new LinearLayout.LayoutParams(ViewGroup.LayoutParams.MATCH_PARENT, ViewGroup.LayoutParams.WRAP_CONTENT);
//remove bottomMargin --> See inbox it also has no margin here
//dividerParams.bottomMargin = mActivity.getResources().getDimensionPixelSize(R.dimen.material_drawer_padding);
divider.setMinimumHeight((int) UIUtils.convertDpToPixel(1, ctx));
divider.setOrientation(LinearLayout.VERTICAL);
divider.setBackgroundColor(UIUtils.getThemeColorFromAttrOrRes(ctx, R.attr.material_drawer_divider, R.color.material_drawer_divider));
linearLayout.addView(divider, dividerParams);
}
fillStickyDrawerItemFooter(drawer, linearLayout, onClickListener);
return linearLayout;
}
/**
* helper method to fill the sticky footer with it's elements
*
* @param drawer
* @param container
* @param onClickListener
*/
public static void fillStickyDrawerItemFooter(DrawerBuilder drawer, ViewGroup container, View.OnClickListener onClickListener) {
//add all drawer items
for (IDrawerItem drawerItem : drawer.mStickyDrawerItems) {
//get the selected_color
int selected_color = UIUtils.getThemeColorFromAttrOrRes(container.getContext(), R.attr.material_drawer_selected, R.color.material_drawer_selected);
if (drawerItem instanceof PrimaryDrawerItem) {
selected_color = ColorHolder.color(((PrimaryDrawerItem) drawerItem).getSelectedColor(), container.getContext(), R.attr.material_drawer_selected, R.color.material_drawer_selected);
} else if (drawerItem instanceof SecondaryDrawerItem) {
selected_color = ColorHolder.color(((SecondaryDrawerItem) drawerItem).getSelectedColor(), container.getContext(), R.attr.material_drawer_selected, R.color.material_drawer_selected);
}
View view = drawerItem.generateView(container.getContext(), container);
view.setTag(drawerItem);
if (drawerItem.isEnabled()) {
UIUtils.setBackground(view, DrawerUIUtils.getSelectableBackground(container.getContext(), selected_color));
view.setOnClickListener(onClickListener);
}
container.addView(view);
//for android API 17 --> Padding not applied via xml
DrawerUIUtils.setDrawerVerticalPadding(view);
}
//and really. don't ask about this. it won't set the padding if i don't set the padding for the container
container.setPadding(0, 0, 0, 0);
}
/**
* helper to extend the layoutParams of the drawer
*
* @param params
* @return
*/
public static DrawerLayout.LayoutParams processDrawerLayoutParams(DrawerBuilder drawer, DrawerLayout.LayoutParams params) {
if (params != null) {
if (drawer.mDrawerGravity != null && (drawer.mDrawerGravity == Gravity.RIGHT || drawer.mDrawerGravity == Gravity.END)) {
params.rightMargin = 0;
if (Build.VERSION.SDK_INT >= 17) {
params.setMarginEnd(0);
}
params.leftMargin = drawer.mActivity.getResources().getDimensionPixelSize(R.dimen.material_drawer_margin);
if (Build.VERSION.SDK_INT >= 17) {
params.setMarginEnd(drawer.mActivity.getResources().getDimensionPixelSize(R.dimen.material_drawer_margin));
}
}
if (drawer.mDisplayBelowStatusBar != null && drawer.mDisplayBelowStatusBar && Build.VERSION.SDK_INT >= 19) {
params.topMargin = UIUtils.getStatusBarHeight(drawer.mActivity, true);
}
if (drawer.mDrawerWidth > -1) {
params.width = drawer.mDrawerWidth;
} else {
params.width = DrawerUIUtils.getOptimalDrawerWidth(drawer.mActivity);
}
}
return params;
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ambari.server.view;
import org.apache.ambari.server.orm.entities.ViewInstanceEntity;
import org.apache.ambari.view.PersistenceException;
import org.apache.ambari.view.ViewInstanceDefinition;
import org.apache.ambari.view.migration.ViewDataMigrationContext;
import org.apache.ambari.view.migration.ViewDataMigrationException;
import org.apache.ambari.view.migration.ViewDataMigrator;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.Map;
/**
* Helper class for view data migration.
*/
public class ViewDataMigrationUtility {
/**
* The logger.
*/
protected final static Logger LOG = LoggerFactory.getLogger(ViewDataMigrationUtility.class);
/**
* The View Registry.
*/
private ViewRegistry viewRegistry;
/**
* Constructor.
* @param viewRegistry the view registry
*/
public ViewDataMigrationUtility(ViewRegistry viewRegistry) {
this.viewRegistry = viewRegistry;
}
/**
* Migrates data from source to target instance
* @param targetInstanceDefinition target instance entity
* @param sourceInstanceDefinition source instance entity
* @param migrateOnce cancel if previously migrated
*
* @throws ViewDataMigrationException when view does not support migration or an error during migration occurs.
*/
public void migrateData(ViewInstanceEntity targetInstanceDefinition, ViewInstanceEntity sourceInstanceDefinition,
boolean migrateOnce)
throws ViewDataMigrationException {
ViewDataMigrationContextImpl migrationContext = getViewDataMigrationContext(targetInstanceDefinition, sourceInstanceDefinition);
if (migrateOnce) {
if (!isTargetEmpty(migrationContext)) {
LOG.error("Migration canceled because target instance is not empty");
return;
}
}
ViewDataMigrator dataMigrator = getViewDataMigrator(targetInstanceDefinition, migrationContext);
LOG.debug("Running before-migration hook");
if (!dataMigrator.beforeMigration()) {
String msg = "View " + targetInstanceDefinition.getInstanceName() + " canceled the migration process";
LOG.error(msg);
throw new ViewDataMigrationException(msg);
}
Map<String, Class> originClasses = migrationContext.getOriginEntityClasses();
Map<String, Class> currentClasses = migrationContext.getCurrentEntityClasses();
for (Map.Entry<String, Class> originEntity : originClasses.entrySet()) {
LOG.debug("Migrating persistence entity " + originEntity.getKey());
if (currentClasses.containsKey(originEntity.getKey())) {
Class entity = currentClasses.get(originEntity.getKey());
dataMigrator.migrateEntity(originEntity.getValue(), entity);
} else {
LOG.debug("Entity " + originEntity.getKey() + " not found in target view");
dataMigrator.migrateEntity(originEntity.getValue(), null);
}
}
LOG.debug("Migrating instance data");
dataMigrator.migrateInstanceData();
LOG.debug("Running after-migration hook");
dataMigrator.afterMigration();
LOG.debug("Copying user permissions");
viewRegistry.copyPrivileges(sourceInstanceDefinition, targetInstanceDefinition);
migrationContext.putCurrentInstanceData("upgrade", "upgradedFrom", sourceInstanceDefinition.getViewEntity().getVersion());
migrationContext.closeMigration();
}
private boolean isTargetEmpty(ViewDataMigrationContext migrationContext) {
if (migrationContext.getCurrentInstanceDataByUser().size() > 0) {
return false;
}
try {
for (Class entity : migrationContext.getCurrentEntityClasses().values()) {
if (migrationContext.getCurrentDataStore().findAll(entity, null).size() > 0) {
return false;
}
}
} catch (PersistenceException e) {
ViewInstanceDefinition current = migrationContext.getCurrentInstanceDefinition();
LOG.error("Persistence exception while check if instance is empty: " +
current.getViewDefinition().getViewName() + "{" + current.getViewDefinition().getVersion() + "}/" +
current.getInstanceName(), e);
}
return true;
}
/**
* Create the data migration context for DataMigrator to access data of current
* and origin instances.
* @param targetInstanceDefinition target instance definition
* @param sourceInstanceDefinition source instance definition
* @return data migration context
*/
protected ViewDataMigrationContextImpl getViewDataMigrationContext(ViewInstanceEntity targetInstanceDefinition,
ViewInstanceEntity sourceInstanceDefinition) {
return new ViewDataMigrationContextImpl(sourceInstanceDefinition, targetInstanceDefinition);
}
/**
* Get the migrator instance for view instance with injected migration context.
* If versions of instances are same returns copy-all-data migrator.
* If versions are different, loads the migrator from the current view (view should
* contain ViewDataMigrator implementation, otherwise exception will be raised).
*
* @param currentInstanceDefinition the current view instance definition
* @param migrationContext the migration context to inject into migrator
* @throws ViewDataMigrationException if view does not support migration
* @return the data migration instance
*/
protected ViewDataMigrator getViewDataMigrator(ViewInstanceEntity currentInstanceDefinition,
ViewDataMigrationContextImpl migrationContext)
throws ViewDataMigrationException {
ViewDataMigrator dataMigrator;
LOG.info("Migrating " + currentInstanceDefinition.getInstanceName() +
" data from " + migrationContext.getOriginDataVersion() + " to " +
migrationContext.getCurrentDataVersion() + " data version");
if (migrationContext.getOriginDataVersion() == migrationContext.getCurrentDataVersion()) {
LOG.info("Instances of same version, copying all data.");
dataMigrator = new CopyAllDataMigrator(migrationContext);
} else {
try {
dataMigrator = currentInstanceDefinition.getDataMigrator(migrationContext);
if (dataMigrator == null) {
throw new ViewDataMigrationException("A view instance " +
currentInstanceDefinition.getInstanceName() + " does not support migration.");
}
LOG.debug("Data migrator loaded");
} catch (ClassNotFoundException e) {
String msg = "Caught exception loading data migrator of " + currentInstanceDefinition.getInstanceName();
LOG.error(msg, e);
throw new RuntimeException(msg);
}
}
return dataMigrator;
}
/**
* The data migrator implementation that copies all data without modification.
* Used to copy data between instances of same version.
*/
public static class CopyAllDataMigrator implements ViewDataMigrator {
private ViewDataMigrationContext migrationContext;
public CopyAllDataMigrator(ViewDataMigrationContext migrationContext) {
this.migrationContext = migrationContext;
}
@Override
public boolean beforeMigration() {
return true;
}
@Override
public void afterMigration() {
}
@Override
public void migrateEntity(Class originEntityClass, Class currentEntityClass)
throws ViewDataMigrationException {
if (currentEntityClass == null) {
return;
}
migrationContext.copyAllObjects(originEntityClass, currentEntityClass);
}
@Override
public void migrateInstanceData() {
migrationContext.copyAllInstanceData();
}
}
}
| |
/*
* Copyright 2002-2015 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.crazydog.util.spring;
import java.math.BigDecimal;
import java.math.BigInteger;
import java.text.DecimalFormat;
import java.text.NumberFormat;
import java.text.ParseException;
import java.util.Collections;
import java.util.HashSet;
import java.util.Set;
/**
* Miscellaneous utility methods for number conversion and parsing.
* Mainly for internal use within the framework; consider Apache's
* Commons Lang for a more comprehensive suite of string utilities.
*
* @author Juergen Hoeller
* @author Rob Harrop
* @since 1.1.2
*/
public abstract class NumberUtils {
private static final BigInteger LONG_MIN = BigInteger.valueOf(Long.MIN_VALUE);
private static final BigInteger LONG_MAX = BigInteger.valueOf(Long.MAX_VALUE);
/**
* Standard number types (all immutable):
* Byte, Short, Integer, Long, BigInteger, Float, Double, BigDecimal.
*/
public static final Set<Class<?>> STANDARD_NUMBER_TYPES;
static {
Set<Class<?>> numberTypes = new HashSet<Class<?>>(8);
numberTypes.add(Byte.class);
numberTypes.add(Short.class);
numberTypes.add(Integer.class);
numberTypes.add(Long.class);
numberTypes.add(BigInteger.class);
numberTypes.add(Float.class);
numberTypes.add(Double.class);
numberTypes.add(BigDecimal.class);
STANDARD_NUMBER_TYPES = Collections.unmodifiableSet(numberTypes);
}
/**
* Convert the given number into an instance of the given target class.
*
* @param number the number to convert
* @param targetClass the target class to convert to
* @return the converted number
* @throws IllegalArgumentException if the target class is not supported
* (i.e. not a standard Number subclass as included in the JDK)
* @see Byte
* @see Short
* @see Integer
* @see Long
* @see BigInteger
* @see Float
* @see Double
* @see BigDecimal
*/
@SuppressWarnings("unchecked")
public static <T extends Number> T convertNumberToTargetClass(Number number, Class<T> targetClass)
throws IllegalArgumentException {
org.springframework.util.Assert.notNull(number, "Number must not be null");
org.springframework.util.Assert.notNull(targetClass, "Target class must not be null");
if (targetClass.isInstance(number)) {
return (T) number;
} else if (Byte.class == targetClass) {
long value = number.longValue();
if (value < Byte.MIN_VALUE || value > Byte.MAX_VALUE) {
raiseOverflowException(number, targetClass);
}
return (T) new Byte(number.byteValue());
} else if (Short.class == targetClass) {
long value = number.longValue();
if (value < Short.MIN_VALUE || value > Short.MAX_VALUE) {
raiseOverflowException(number, targetClass);
}
return (T) new Short(number.shortValue());
} else if (Integer.class == targetClass) {
long value = number.longValue();
if (value < Integer.MIN_VALUE || value > Integer.MAX_VALUE) {
raiseOverflowException(number, targetClass);
}
return (T) new Integer(number.intValue());
} else if (Long.class == targetClass) {
BigInteger bigInt = null;
if (number instanceof BigInteger) {
bigInt = (BigInteger) number;
} else if (number instanceof BigDecimal) {
bigInt = ((BigDecimal) number).toBigInteger();
}
// Effectively analogous to JDK 8's BigInteger.longValueExact()
if (bigInt != null && (bigInt.compareTo(LONG_MIN) < 0 || bigInt.compareTo(LONG_MAX) > 0)) {
raiseOverflowException(number, targetClass);
}
return (T) new Long(number.longValue());
} else if (BigInteger.class == targetClass) {
if (number instanceof BigDecimal) {
// do not lose precision - use BigDecimal's own conversion
return (T) ((BigDecimal) number).toBigInteger();
} else {
// original value is not a Big* number - use standard long conversion
return (T) BigInteger.valueOf(number.longValue());
}
} else if (Float.class == targetClass) {
return (T) new Float(number.floatValue());
} else if (Double.class == targetClass) {
return (T) new Double(number.doubleValue());
} else if (BigDecimal.class == targetClass) {
// always use BigDecimal(String) here to avoid unpredictability of BigDecimal(double)
// (see BigDecimal javadoc for details)
return (T) new BigDecimal(number.toString());
} else {
throw new IllegalArgumentException("Could not convert number [" + number + "] of type [" +
number.getClass().getName() + "] to unknown target class [" + targetClass.getName() + "]");
}
}
/**
* Raise an overflow exception for the given number and target class.
*
* @param number the number we tried to convert
* @param targetClass the target class we tried to convert to
*/
private static void raiseOverflowException(Number number, Class<?> targetClass) {
throw new IllegalArgumentException("Could not convert number [" + number + "] of type [" +
number.getClass().getName() + "] to target class [" + targetClass.getName() + "]: overflow");
}
/**
* Parse the given text into a number instance of the given target class,
* using the corresponding {@code decode} / {@code valueOf} methods.
* <p>Trims the input {@code String} before attempting to parse the number.
* Supports numbers in hex format (with leading "0x", "0X" or "#") as well.
*
* @param text the text to convert
* @param targetClass the target class to parse into
* @return the parsed number
* @throws IllegalArgumentException if the target class is not supported
* (i.e. not a standard Number subclass as included in the JDK)
* @see Byte#decode
* @see Short#decode
* @see Integer#decode
* @see Long#decode
* @see #decodeBigInteger(String)
* @see Float#valueOf
* @see Double#valueOf
* @see BigDecimal#BigDecimal(String)
*/
@SuppressWarnings("unchecked")
public static <T extends Number> T parseNumber(String text, Class<T> targetClass) {
org.springframework.util.Assert.notNull(text, "Text must not be null");
org.springframework.util.Assert.notNull(targetClass, "Target class must not be null");
String trimmed = StringUtils.trimAllWhitespace(text);
if (Byte.class == targetClass) {
return (T) (isHexNumber(trimmed) ? Byte.decode(trimmed) : Byte.valueOf(trimmed));
} else if (Short.class == targetClass) {
return (T) (isHexNumber(trimmed) ? Short.decode(trimmed) : Short.valueOf(trimmed));
} else if (Integer.class == targetClass) {
return (T) (isHexNumber(trimmed) ? Integer.decode(trimmed) : Integer.valueOf(trimmed));
} else if (Long.class == targetClass) {
return (T) (isHexNumber(trimmed) ? Long.decode(trimmed) : Long.valueOf(trimmed));
} else if (BigInteger.class == targetClass) {
return (T) (isHexNumber(trimmed) ? decodeBigInteger(trimmed) : new BigInteger(trimmed));
} else if (Float.class == targetClass) {
return (T) Float.valueOf(trimmed);
} else if (Double.class == targetClass) {
return (T) Double.valueOf(trimmed);
} else if (BigDecimal.class == targetClass || Number.class == targetClass) {
return (T) new BigDecimal(trimmed);
} else {
throw new IllegalArgumentException(
"Cannot convert String [" + text + "] to target class [" + targetClass.getName() + "]");
}
}
/**
* Parse the given text into a number instance of the given target class,
* using the given NumberFormat. Trims the input {@code String}
* before attempting to parse the number.
*
* @param text the text to convert
* @param targetClass the target class to parse into
* @param numberFormat the NumberFormat to use for parsing (if {@code null},
* this method falls back to {@code parseNumber(String, Class)})
* @return the parsed number
* @throws IllegalArgumentException if the target class is not supported
* (i.e. not a standard Number subclass as included in the JDK)
* @see NumberFormat#parse
* @see #convertNumberToTargetClass
* @see #parseNumber(String, Class)
*/
public static <T extends Number> T parseNumber(String text, Class<T> targetClass, NumberFormat numberFormat) {
if (numberFormat != null) {
org.springframework.util.Assert.notNull(text, "Text must not be null");
org.springframework.util.Assert.notNull(targetClass, "Target class must not be null");
DecimalFormat decimalFormat = null;
boolean resetBigDecimal = false;
if (numberFormat instanceof DecimalFormat) {
decimalFormat = (DecimalFormat) numberFormat;
if (BigDecimal.class == targetClass && !decimalFormat.isParseBigDecimal()) {
decimalFormat.setParseBigDecimal(true);
resetBigDecimal = true;
}
}
try {
Number number = numberFormat.parse(StringUtils.trimAllWhitespace(text));
return convertNumberToTargetClass(number, targetClass);
} catch (ParseException ex) {
throw new IllegalArgumentException("Could not parse number: " + ex.getMessage());
} finally {
if (resetBigDecimal) {
decimalFormat.setParseBigDecimal(false);
}
}
} else {
return parseNumber(text, targetClass);
}
}
/**
* Determine whether the given value String indicates a hex number, i.e. needs to be
* passed into {@code Integer.decode} instead of {@code Integer.valueOf} (etc).
*/
private static boolean isHexNumber(String value) {
int index = (value.startsWith("-") ? 1 : 0);
return (value.startsWith("0x", index) || value.startsWith("0X", index) || value.startsWith("#", index));
}
/**
* Decode a {@link BigInteger} from a {@link String} value.
* Supports decimal, hex and octal notation.
*
* @see BigInteger#BigInteger(String, int)
*/
private static BigInteger decodeBigInteger(String value) {
int radix = 10;
int index = 0;
boolean negative = false;
// Handle minus sign, if present.
if (value.startsWith("-")) {
negative = true;
index++;
}
// Handle radix specifier, if present.
if (value.startsWith("0x", index) || value.startsWith("0X", index)) {
index += 2;
radix = 16;
} else if (value.startsWith("#", index)) {
index++;
radix = 16;
} else if (value.startsWith("0", index) && value.length() > 1 + index) {
index++;
radix = 8;
}
BigInteger result = new BigInteger(value.substring(index), radix);
return (negative ? result.negate() : result);
}
}
| |
// Copyright 2010 Google Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.enterprise.adaptor.secmgr.modules;
import com.google.common.base.Preconditions;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Maps;
import com.google.common.collect.Ordering;
import com.google.enterprise.adaptor.secmgr.common.AuthzStatus;
import java.util.Collection;
import java.util.Map;
import java.util.Set;
import javax.annotation.concurrent.Immutable;
import javax.annotation.concurrent.NotThreadSafe;
/**
* Instances of this class are the return value from an authorization process.
* At the moment this is just some fancy constructors that create a delegated
* map.
*/
@Immutable
public final class AuthzResult implements Map<String, AuthzStatus> {
private final ImmutableMap<String, AuthzStatus> statusMap;
private final ImmutableSet<String> cacheResults;
private AuthzResult(ImmutableMap<String, AuthzStatus> statusMap,
ImmutableSet<String> determinedByCache) {
this.statusMap = statusMap;
this.cacheResults = determinedByCache;
}
/**
* Get an {@link AuthzResult} builder that has a minimum set of resources.
* This builder generates an AuthzResult instance that's guaranteed to have a
* status for every resource in {@code resources}; those that aren't
* explicitly set in the builder will map to {@link
* AuthzStatus#INDETERMINATE}.
*
* @param resources The minimum set of resources to use for the returned
* builder.
* @return An {@link AuthzResult} builder.
*/
public static Builder builder(Iterable<String> resources) {
Preconditions.checkNotNull(resources);
return new Builder(resources);
}
/**
* Get an {@link AuthzResult} builder that expands on a previous result.
*
* @param map The previous result.
* @return An {@link AuthzResult} builder.
*/
public static Builder builder(Map<String, AuthzStatus> map) {
Preconditions.checkNotNull(map);
return new Builder(map);
}
/**
* Get an {@link AuthzResult} builder that has a no minimum set of resources;
* this is equivalent to passing an empty set of resources as an argument.
*
* @return An {@link AuthzResult} builder.
*/
public static Builder builder() {
ImmutableList<String> resources = ImmutableList.of();
return new Builder(resources);
}
/**
* Find out if a resource's determination (PERMIT or DENY) came from
* UserCacheConnector.
*
* @return True if resource's determiantion came from UserCacheConnector.
*/
public boolean wasDeterminedByCache(String resource) {
return cacheResults.contains(resource);
}
/**
* A builder class for AuthzResult instances.
*/
@NotThreadSafe
public static final class Builder {
private final Map<String, AuthzStatus> map;
private final ImmutableSet.Builder<String> determinedByCache; // PERMIT or DENY.
private Builder(Iterable<String> resources) {
map = Maps.newHashMap();
for (String resource : resources) {
map.put(resource, AuthzStatus.INDETERMINATE);
}
determinedByCache = ImmutableSet.builder();
}
private Builder(Map<String, AuthzStatus> map) {
this.map = Maps.newHashMap(map);
determinedByCache = ImmutableSet.builder();
}
/**
* Add an entry to the builder.
*
* @param resource The resource for the new entry.
* @param status The status for the new entry.
* @return The builder, for convenience.
*/
public Builder put(String resource, AuthzStatus status) {
Preconditions.checkNotNull(resource);
Preconditions.checkNotNull(status);
map.put(resource, status);
return this;
}
/**
* Add PERMIT or DENY from cache to the builder.
*
* @param resource The resource for the new entry.
* @param status The status for the new entry.
* @return The builder, for convenience.
*/
public Builder putStatusFromCache(String resource, AuthzStatus status) {
Preconditions.checkNotNull(resource);
Preconditions.checkNotNull(status);
map.put(resource, status);
if (AuthzStatus.INDETERMINATE != status) {
determinedByCache.add(resource);
}
return this;
}
/**
* Add an entry to the builder.
*
* @param entry The entry to add.
* @return The builder, for convenience.
*/
public Builder add(Map.Entry<String, AuthzStatus> entry) {
Preconditions.checkNotNull(entry);
return put(entry.getKey(), entry.getValue());
}
/**
* Add a map of entries to the builder.
*
* @param map The map to add.
* @return The builder, for convenience.
*/
public Builder addAll(Map<String, AuthzStatus> map) {
Preconditions.checkNotNull(map);
for (Map.Entry<String, AuthzStatus> entry : map.entrySet()) {
put(entry.getKey(), entry.getValue());
}
return this;
}
/**
* @return The {@link AuthzResult} instance that's been constructed by this
* builder. Calling this method more than once on the same builder
* instance is not guaranteed to work.
*/
public AuthzResult build() {
return new AuthzResult(ImmutableMap.copyOf(map), determinedByCache.build());
}
}
/**
* Make an {@link AuthzResult} in which all of the resources have
* indeterminate status.
*
* @param resources The resources in the result.
* @return An AuthzResult instance.
*/
public static AuthzResult makeIndeterminate(Iterable<String> resources) {
return builder(resources).build();
}
/**
* @return An empty {@link AuthzResult}.
*/
public static AuthzResult of() {
return builder().build();
}
/**
* Get an {@link AuthzResult} with a single entry.
*
* @param r1 The single resource.
* @param s1 The status for that resource.
* @return The result instance.
*/
public static AuthzResult of(String r1, AuthzStatus s1) {
return AuthzResult.builder()
.put(r1, s1)
.build();
}
/**
* Get an {@link AuthzResult} with two entries.
*
* @param r1 The first resource.
* @param s1 The status for the first resource.
* @param r2 The second resource.
* @param s2 The status for the second resource.
* @return The result instance.
*/
public static AuthzResult of(String r1, AuthzStatus s1, String r2, AuthzStatus s2) {
return AuthzResult.builder()
.put(r1, s1)
.put(r2, s2)
.build();
}
/**
* Get an {@link AuthzResult} with three entries.
*
* @param r1 The first resource.
* @param s1 The status for the first resource.
* @param r2 The second resource.
* @param s2 The status for the second resource.
* @param r3 The third resource.
* @param s3 The status for the third resource.
* @return The result instance.
*/
public static AuthzResult of(String r1, AuthzStatus s1, String r2, AuthzStatus s2,
String r3, AuthzStatus s3) {
return AuthzResult.builder()
.put(r1, s1)
.put(r2, s2)
.put(r3, s3)
.build();
}
/**
* Get an {@link AuthzResult} with four entries.
*
* @param r1 The first resource.
* @param s1 The status for the first resource.
* @param r2 The second resource.
* @param s2 The status for the second resource.
* @param r3 The third resource.
* @param s3 The status for the third resource.
* @param r4 The fourth resource.
* @param s4 The status for the fourth resource.
* @return The result instance.
*/
public static AuthzResult of(String r1, AuthzStatus s1, String r2, AuthzStatus s2,
String r3, AuthzStatus s3, String r4, AuthzStatus s4) {
return AuthzResult.builder()
.put(r1, s1)
.put(r2, s2)
.put(r3, s3)
.put(r4, s4)
.build();
}
@Override
public Set<String> keySet() {
return statusMap.keySet();
}
@Override
public Set<Map.Entry<String, AuthzStatus>> entrySet() {
return statusMap.entrySet();
}
@Override
public int size() {
return statusMap.size();
}
@Override
public AuthzStatus get(Object resource) {
return statusMap.get(resource);
}
@Override
public boolean containsKey(Object key) {
return statusMap.containsKey(key);
}
@Override
public boolean containsValue(Object value) {
return statusMap.containsValue(value);
}
@Override
public boolean isEmpty() {
return statusMap.isEmpty();
}
@Override
public Collection<AuthzStatus> values() {
return statusMap.values();
}
@Override
public boolean equals(Object object) {
return statusMap.equals(object);
}
@Override
public int hashCode() {
return statusMap.hashCode();
}
@Override
public void clear() {
throw new UnsupportedOperationException();
}
@Override
public AuthzStatus put(String key, AuthzStatus value) {
throw new UnsupportedOperationException();
}
@Override
public void putAll(Map<? extends String, ? extends AuthzStatus> m) {
throw new UnsupportedOperationException();
}
@Override
public AuthzStatus remove(Object key) {
throw new UnsupportedOperationException();
}
@Override
public String toString() {
StringBuilder builder = new StringBuilder();
boolean firstElt = true;
builder.append("{");
for (String resource : Ordering.natural().sortedCopy(statusMap.keySet())) {
if (!firstElt) {
builder.append(',');
} else {
firstElt = false;
}
builder.append("\n \"");
builder.append(resource);
builder.append("\", ");
builder.append(statusMap.get(resource));
}
builder.append("\n}");
return builder.toString();
}
}
| |
/*
* Copyright Camunda Services GmbH and/or licensed to Camunda Services GmbH
* under one or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information regarding copyright
* ownership. Camunda licenses this file to you under the Apache License,
* Version 2.0; you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.camunda.bpm.engine.rest.history;
import static io.restassured.RestAssured.given;
import static org.camunda.bpm.engine.rest.helper.MockProvider.EXAMPLE_BATCH_ID;
import static org.camunda.bpm.engine.rest.util.JsonPathUtil.from;
import static org.hamcrest.Matchers.equalTo;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import static org.mockito.Matchers.eq;
import static org.mockito.Mockito.RETURNS_DEEP_STUBS;
import static org.mockito.Mockito.doThrow;
import static org.mockito.Mockito.inOrder;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.verifyNoMoreInteractions;
import static org.mockito.Mockito.when;
import javax.ws.rs.core.Response.Status;
import io.restassured.http.ContentType;
import org.camunda.bpm.engine.BadUserRequestException;
import org.camunda.bpm.engine.HistoryService;
import org.camunda.bpm.engine.batch.Batch;
import org.camunda.bpm.engine.batch.history.HistoricBatch;
import org.camunda.bpm.engine.batch.history.HistoricBatchQuery;
import org.camunda.bpm.engine.history.SetRemovalTimeSelectModeForHistoricBatchesBuilder;
import org.camunda.bpm.engine.history.SetRemovalTimeToHistoricBatchesBuilder;
import org.camunda.bpm.engine.impl.calendar.DateTimeUtil;
import org.camunda.bpm.engine.rest.AbstractRestServiceTest;
import org.camunda.bpm.engine.rest.dto.batch.BatchDto;
import org.camunda.bpm.engine.rest.dto.history.batch.HistoricBatchDto;
import org.camunda.bpm.engine.rest.exception.InvalidRequestException;
import org.camunda.bpm.engine.rest.helper.MockProvider;
import org.camunda.bpm.engine.rest.util.JsonPathUtil;
import org.camunda.bpm.engine.rest.util.container.TestContainerRule;
import org.junit.Before;
import org.junit.ClassRule;
import org.junit.Test;
import org.mockito.InOrder;
import io.restassured.response.Response;
import java.util.Collections;
import java.util.Date;
import java.util.HashMap;
import java.util.Map;
public class HistoricBatchRestServiceInteractionTest extends AbstractRestServiceTest {
@ClassRule
public static TestContainerRule rule = new TestContainerRule();
protected static final String HISTORIC_BATCH_RESOURCE_URL = TEST_RESOURCE_ROOT_PATH + "/history/batch";
protected static final String HISTORIC_SINGLE_BATCH_RESOURCE_URL = HISTORIC_BATCH_RESOURCE_URL + "/{id}";
protected static final String SET_REMOVAL_TIME_HISTORIC_BATCHES_ASYNC_URL = HISTORIC_BATCH_RESOURCE_URL + "/set-removal-time";
protected HistoryService historyServiceMock;
protected HistoricBatchQuery queryMock;
@Before
public void setUpHistoricBatchQueryMock() {
HistoricBatch historicBatchMock = MockProvider.createMockHistoricBatch();
queryMock = mock(HistoricBatchQuery.class);
when(queryMock.batchId(eq(MockProvider.EXAMPLE_BATCH_ID))).thenReturn(queryMock);
when(queryMock.singleResult()).thenReturn(historicBatchMock);
historyServiceMock = mock(HistoryService.class);
when(historyServiceMock.createHistoricBatchQuery()).thenReturn(queryMock);
when(processEngine.getHistoryService()).thenReturn(historyServiceMock);
}
@Test
public void testGetHistoricBatch() {
Response response = given()
.pathParam("id", MockProvider.EXAMPLE_BATCH_ID)
.then().expect()
.statusCode(Status.OK.getStatusCode())
.when()
.get(HISTORIC_SINGLE_BATCH_RESOURCE_URL);
InOrder inOrder = inOrder(queryMock);
inOrder.verify(queryMock).batchId(MockProvider.EXAMPLE_BATCH_ID);
inOrder.verify(queryMock).singleResult();
inOrder.verifyNoMoreInteractions();
verifyHistoricBatchJson(response.asString());
}
@Test
public void testGetNonExistingHistoricBatch() {
String nonExistingId = MockProvider.NON_EXISTING_ID;
HistoricBatchQuery historicBatchQuery = mock(HistoricBatchQuery.class);
when(historicBatchQuery.batchId(nonExistingId)).thenReturn(historicBatchQuery);
when(historicBatchQuery.singleResult()).thenReturn(null);
when(historyServiceMock.createHistoricBatchQuery()).thenReturn(historicBatchQuery);
given()
.pathParam("id", nonExistingId)
.then().expect()
.statusCode(Status.NOT_FOUND.getStatusCode())
.body("type", equalTo(InvalidRequestException.class.getSimpleName()))
.body("message", equalTo("Historic batch with id '" + nonExistingId + "' does not exist"))
.when()
.get(HISTORIC_SINGLE_BATCH_RESOURCE_URL);
}
@Test
public void deleteHistoricBatch() {
given()
.pathParam("id", MockProvider.EXAMPLE_BATCH_ID)
.then().expect()
.statusCode(Status.NO_CONTENT.getStatusCode())
.when()
.delete(HISTORIC_SINGLE_BATCH_RESOURCE_URL);
verify(historyServiceMock).deleteHistoricBatch(eq(MockProvider.EXAMPLE_BATCH_ID));
verifyNoMoreInteractions(historyServiceMock);
}
@Test
public void deleteNonExistingHistoricBatch() {
String nonExistingId = MockProvider.NON_EXISTING_ID;
doThrow(new BadUserRequestException("Historic batch for id '" + nonExistingId + "' cannot be found"))
.when(historyServiceMock).deleteHistoricBatch(eq(nonExistingId));
given()
.pathParam("id", nonExistingId)
.then().expect()
.statusCode(Status.BAD_REQUEST.getStatusCode())
.body("type", equalTo(InvalidRequestException.class.getSimpleName()))
.body("message", equalTo("Unable to delete historic batch with id '" + nonExistingId + "'"))
.when()
.delete(HISTORIC_SINGLE_BATCH_RESOURCE_URL);
}
@Test
public void shouldSetRemovalTime_ByIds() {
SetRemovalTimeSelectModeForHistoricBatchesBuilder builderMock =
mock(SetRemovalTimeSelectModeForHistoricBatchesBuilder.class, RETURNS_DEEP_STUBS);
when(historyServiceMock.setRemovalTimeToHistoricBatches()).thenReturn(builderMock);
Map<String, Object> payload = new HashMap<>();
payload.put("historicBatchIds", Collections.singletonList(EXAMPLE_BATCH_ID));
payload.put("calculatedRemovalTime", true);
given()
.contentType(ContentType.JSON)
.body(payload)
.then()
.expect().statusCode(Status.OK.getStatusCode())
.when()
.post(SET_REMOVAL_TIME_HISTORIC_BATCHES_ASYNC_URL);
SetRemovalTimeSelectModeForHistoricBatchesBuilder builder =
historyServiceMock.setRemovalTimeToHistoricBatches();
verify(builder).calculatedRemovalTime();
verify(builder).byIds(EXAMPLE_BATCH_ID);
verify(builder).byQuery(null);
verify(builder).executeAsync();
verifyNoMoreInteractions(builder);
}
@Test
public void shouldSetRemovalTime_ByQuery() {
SetRemovalTimeSelectModeForHistoricBatchesBuilder builderMock =
mock(SetRemovalTimeSelectModeForHistoricBatchesBuilder.class, RETURNS_DEEP_STUBS);
when(historyServiceMock.setRemovalTimeToHistoricBatches()).thenReturn(builderMock);
Map<String, Object> payload = new HashMap<>();
payload.put("calculatedRemovalTime", true);
payload.put("historicBatchQuery", Collections.singletonMap("batchId", EXAMPLE_BATCH_ID));
given()
.contentType(ContentType.JSON)
.body(payload)
.then()
.expect().statusCode(Status.OK.getStatusCode())
.when()
.post(SET_REMOVAL_TIME_HISTORIC_BATCHES_ASYNC_URL);
SetRemovalTimeSelectModeForHistoricBatchesBuilder builder =
historyServiceMock.setRemovalTimeToHistoricBatches();
verify(queryMock).batchId(EXAMPLE_BATCH_ID);
verify(builder).calculatedRemovalTime();
verify(builder).byIds(null);
verify(builder).byQuery(queryMock);
verify(builder).executeAsync();
verifyNoMoreInteractions(builder);
}
@Test
public void shouldSetRemovalTime_Absolute() {
Date removalTime = new Date();
SetRemovalTimeSelectModeForHistoricBatchesBuilder builderMock =
mock(SetRemovalTimeSelectModeForHistoricBatchesBuilder.class, RETURNS_DEEP_STUBS);
when(historyServiceMock.setRemovalTimeToHistoricBatches()).thenReturn(builderMock);
Map<String, Object> payload = new HashMap<>();
payload.put("historicBatchIds", Collections.singletonList(EXAMPLE_BATCH_ID));
payload.put("absoluteRemovalTime", removalTime);
given()
.contentType(ContentType.JSON)
.body(payload)
.then()
.expect().statusCode(Status.OK.getStatusCode())
.when()
.post(SET_REMOVAL_TIME_HISTORIC_BATCHES_ASYNC_URL);
SetRemovalTimeSelectModeForHistoricBatchesBuilder builder =
historyServiceMock.setRemovalTimeToHistoricBatches();
verify(builder).absoluteRemovalTime(removalTime);
verify(builder).byIds(EXAMPLE_BATCH_ID);
verify(builder).byQuery(null);
verify(builder).executeAsync();
verifyNoMoreInteractions(builder);
}
@Test
public void shouldNotSetRemovalTime_Absolute() {
SetRemovalTimeSelectModeForHistoricBatchesBuilder builderMock =
mock(SetRemovalTimeSelectModeForHistoricBatchesBuilder.class, RETURNS_DEEP_STUBS);
when(historyServiceMock.setRemovalTimeToHistoricBatches()).thenReturn(builderMock);
Map<String, Object> payload = new HashMap<>();
payload.put("historicBatchIds", Collections.singletonList(EXAMPLE_BATCH_ID));
payload.put("absoluteRemovalTime", null);
given()
.contentType(ContentType.JSON)
.body(payload)
.then()
.expect().statusCode(Status.OK.getStatusCode())
.when()
.post(SET_REMOVAL_TIME_HISTORIC_BATCHES_ASYNC_URL);
SetRemovalTimeToHistoricBatchesBuilder builder =
historyServiceMock.setRemovalTimeToHistoricBatches();
verify(builder).byIds(EXAMPLE_BATCH_ID);
verify(builder).byQuery(null);
verify(builder).executeAsync();
verifyNoMoreInteractions(builder);
}
@Test
public void shouldClearRemovalTime() {
SetRemovalTimeSelectModeForHistoricBatchesBuilder builderMock =
mock(SetRemovalTimeSelectModeForHistoricBatchesBuilder.class, RETURNS_DEEP_STUBS);
when(historyServiceMock.setRemovalTimeToHistoricBatches())
.thenReturn(builderMock);
Map<String, Object> payload = new HashMap<>();
payload.put("historicBatchIds", Collections.singletonList(EXAMPLE_BATCH_ID));
payload.put("clearedRemovalTime", true);
given()
.contentType(ContentType.JSON)
.body(payload)
.then()
.expect().statusCode(Status.OK.getStatusCode())
.when()
.post(SET_REMOVAL_TIME_HISTORIC_BATCHES_ASYNC_URL);
SetRemovalTimeSelectModeForHistoricBatchesBuilder builder =
historyServiceMock.setRemovalTimeToHistoricBatches();
verify(builder).clearedRemovalTime();
verify(builder).byIds(EXAMPLE_BATCH_ID);
verify(builder).byQuery(null);
verify(builder).executeAsync();
verifyNoMoreInteractions(builder);
}
@Test
public void shouldSetRemovalTime_Response() {
SetRemovalTimeSelectModeForHistoricBatchesBuilder builderMock =
mock(SetRemovalTimeSelectModeForHistoricBatchesBuilder.class, RETURNS_DEEP_STUBS);
when(historyServiceMock.setRemovalTimeToHistoricBatches()).thenReturn(builderMock);
Batch batchEntity = MockProvider.createMockBatch();
when(builderMock.executeAsync()).thenReturn(batchEntity);
Response response = given()
.contentType(ContentType.JSON)
.body(Collections.emptyMap())
.then()
.expect().statusCode(Status.OK.getStatusCode())
.when()
.post(SET_REMOVAL_TIME_HISTORIC_BATCHES_ASYNC_URL);
verifyBatchJson(response.asString());
}
@Test
public void shouldSetRemovalTime_ThrowBadUserException() {
SetRemovalTimeSelectModeForHistoricBatchesBuilder builderMock =
mock(SetRemovalTimeSelectModeForHistoricBatchesBuilder.class, RETURNS_DEEP_STUBS);
when(historyServiceMock.setRemovalTimeToHistoricBatches()).thenReturn(builderMock);
doThrow(BadUserRequestException.class).when(builderMock).executeAsync();
given()
.contentType(ContentType.JSON)
.body(Collections.emptyMap())
.then()
.expect().statusCode(Status.BAD_REQUEST.getStatusCode())
.when()
.post(SET_REMOVAL_TIME_HISTORIC_BATCHES_ASYNC_URL);
}
protected void verifyBatchJson(String batchJson) {
BatchDto batch = JsonPathUtil.from(batchJson).getObject("", BatchDto.class);
assertNotNull("The returned batch should not be null.", batch);
assertEquals(MockProvider.EXAMPLE_BATCH_ID, batch.getId());
assertEquals(MockProvider.EXAMPLE_BATCH_TYPE, batch.getType());
assertEquals(MockProvider.EXAMPLE_BATCH_TOTAL_JOBS, batch.getTotalJobs());
assertEquals(MockProvider.EXAMPLE_BATCH_JOBS_PER_SEED, batch.getBatchJobsPerSeed());
assertEquals(MockProvider.EXAMPLE_INVOCATIONS_PER_BATCH_JOB, batch.getInvocationsPerBatchJob());
assertEquals(MockProvider.EXAMPLE_SEED_JOB_DEFINITION_ID, batch.getSeedJobDefinitionId());
assertEquals(MockProvider.EXAMPLE_MONITOR_JOB_DEFINITION_ID, batch.getMonitorJobDefinitionId());
assertEquals(MockProvider.EXAMPLE_BATCH_JOB_DEFINITION_ID, batch.getBatchJobDefinitionId());
assertEquals(MockProvider.EXAMPLE_TENANT_ID, batch.getTenantId());
}
protected void verifyHistoricBatchJson(String historicBatchJson) {
HistoricBatchDto historicBatch = from(historicBatchJson).getObject("", HistoricBatchDto.class);
assertNotNull("The returned historic batch should not be null.", historicBatch);
assertEquals(MockProvider.EXAMPLE_BATCH_ID, historicBatch.getId());
assertEquals(MockProvider.EXAMPLE_BATCH_TYPE, historicBatch.getType());
assertEquals(MockProvider.EXAMPLE_BATCH_TOTAL_JOBS, historicBatch.getTotalJobs());
assertEquals(MockProvider.EXAMPLE_BATCH_JOBS_PER_SEED, historicBatch.getBatchJobsPerSeed());
assertEquals(MockProvider.EXAMPLE_INVOCATIONS_PER_BATCH_JOB, historicBatch.getInvocationsPerBatchJob());
assertEquals(MockProvider.EXAMPLE_SEED_JOB_DEFINITION_ID, historicBatch.getSeedJobDefinitionId());
assertEquals(MockProvider.EXAMPLE_MONITOR_JOB_DEFINITION_ID, historicBatch.getMonitorJobDefinitionId());
assertEquals(MockProvider.EXAMPLE_BATCH_JOB_DEFINITION_ID, historicBatch.getBatchJobDefinitionId());
assertEquals(MockProvider.EXAMPLE_TENANT_ID, historicBatch.getTenantId());
assertEquals(MockProvider.EXAMPLE_USER_ID, historicBatch.getCreateUserId());
assertEquals(DateTimeUtil.parseDate(MockProvider.EXAMPLE_HISTORIC_BATCH_START_TIME), historicBatch.getStartTime());
assertEquals(DateTimeUtil.parseDate(MockProvider.EXAMPLE_HISTORIC_BATCH_END_TIME), historicBatch.getEndTime());
assertEquals(DateTimeUtil.parseDate(MockProvider.EXAMPLE_HISTORIC_BATCH_REMOVAL_TIME), historicBatch.getRemovalTime());
}
}
| |
/*
* Copyright 2000-2012 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.refactoring.introduceField;
import com.intellij.codeInsight.TestFrameworks;
import com.intellij.ui.ListCellRendererWrapper;
import com.intellij.psi.*;
import com.intellij.psi.util.PsiTreeUtil;
import com.intellij.refactoring.JavaRefactoringSettings;
import com.intellij.refactoring.RefactoringBundle;
import com.intellij.refactoring.introduce.inplace.KeyboardComboSwitcher;
import com.intellij.refactoring.ui.TypeSelectorManager;
import org.jetbrains.annotations.Nullable;
import javax.swing.*;
import java.awt.*;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import java.awt.event.ItemListener;
/**
* User: anna
* Date: 4/8/11
*/
public class IntroduceFieldPopupPanel extends IntroduceFieldCentralPanel {
private @Nullable JComboBox myInitializerCombo;
private JComboBox myVisibilityCombo;
private DefaultComboBoxModel myInitialisersPlaceModel;
public IntroduceFieldPopupPanel(PsiClass parentClass,
PsiExpression initializerExpression,
PsiLocalVariable localVariable,
boolean isCurrentMethodConstructor,
boolean isInvokedOnDeclaration,
boolean willBeDeclaredStatic,
PsiExpression[] occurrences,
boolean allowInitInMethod,
boolean allowInitInMethodIfAll,
TypeSelectorManager typeSelectorManager) {
super(parentClass, initializerExpression, localVariable, isCurrentMethodConstructor, isInvokedOnDeclaration, willBeDeclaredStatic,
occurrences, allowInitInMethod, allowInitInMethodIfAll, typeSelectorManager);
}
protected void initializeInitializerPlace(PsiExpression initializerExpression,
BaseExpressionToFieldHandler.InitializationPlace ourLastInitializerPlace) {
if (initializerExpression != null) {
setEnabledInitializationPlaces(initializerExpression);
if (!myAllowInitInMethod) {
myInitialisersPlaceModel.removeElement(BaseExpressionToFieldHandler.InitializationPlace.IN_CURRENT_METHOD);
}
} else {
myInitialisersPlaceModel.removeAllElements();
}
final PsiMethod setUpMethod = TestFrameworks.getInstance().findSetUpMethod(myParentClass);
final boolean setupEnabled = hasSetUpChoice();
if (ourLastInitializerPlace == BaseExpressionToFieldHandler.InitializationPlace.IN_SETUP_METHOD &&
setupEnabled && (myInitializerExpression != null && PsiTreeUtil.isAncestor(setUpMethod, myInitializerExpression, false) ||
TestFrameworks.getInstance().isTestClass(myParentClass))) {
myInitialisersPlaceModel.setSelectedItem(BaseExpressionToFieldHandler.InitializationPlace.IN_SETUP_METHOD);
}
else if (ourLastInitializerPlace == BaseExpressionToFieldHandler.InitializationPlace.IN_CONSTRUCTOR &&
myInitialisersPlaceModel.getIndexOf(BaseExpressionToFieldHandler.InitializationPlace.IN_CONSTRUCTOR) > -1 && myParentClass.getConstructors().length > 0) {
myInitialisersPlaceModel.setSelectedItem(BaseExpressionToFieldHandler.InitializationPlace.IN_CONSTRUCTOR);
}
else if (ourLastInitializerPlace == BaseExpressionToFieldHandler.InitializationPlace.IN_FIELD_DECLARATION &&
myInitialisersPlaceModel.getIndexOf(BaseExpressionToFieldHandler.InitializationPlace.IN_FIELD_DECLARATION) > -1) {
myInitialisersPlaceModel.setSelectedItem(BaseExpressionToFieldHandler.InitializationPlace.IN_FIELD_DECLARATION);
}
else {
selectInCurrentMethod();
}
}
@Override
protected void initializeControls(PsiExpression initializerExpression,
BaseExpressionToFieldHandler.InitializationPlace ourLastInitializerPlace) {
}
@Override
public boolean isDeclareFinal() {
return ourLastCbFinalState && allowFinal();
}
private void selectInCurrentMethod() {
if (myInitialisersPlaceModel.getIndexOf(BaseExpressionToFieldHandler.InitializationPlace.IN_CURRENT_METHOD) > -1) {
myInitialisersPlaceModel.setSelectedItem(BaseExpressionToFieldHandler.InitializationPlace.IN_CURRENT_METHOD);
}
else if (myInitialisersPlaceModel.getIndexOf(BaseExpressionToFieldHandler.InitializationPlace.IN_FIELD_DECLARATION) > -1) {
myInitialisersPlaceModel.setSelectedItem(BaseExpressionToFieldHandler.InitializationPlace.IN_FIELD_DECLARATION);
}
else {
myInitialisersPlaceModel.setSelectedItem(BaseExpressionToFieldHandler.InitializationPlace.IN_CURRENT_METHOD);
}
}
public BaseExpressionToFieldHandler.InitializationPlace getInitializerPlace() {
if (myInitializerCombo != null) {
return (BaseExpressionToFieldHandler.InitializationPlace)myInitializerCombo.getSelectedItem();
}
return (BaseExpressionToFieldHandler.InitializationPlace)myInitialisersPlaceModel.getElementAt(0);
}
public String getFieldVisibility() {
String visibility = JavaRefactoringSettings.getInstance().INTRODUCE_FIELD_VISIBILITY;
if (visibility == null) {
visibility = PsiModifier.PRIVATE;
}
return visibility;
}
protected JComponent createInitializerPlacePanel(final ItemListener itemListener, final ItemListener finalUpdater) {
JPanel groupPanel = new JPanel(new GridBagLayout());
final GridBagConstraints gridBagConstraints =
new GridBagConstraints(0, 0, 1, 1, 1, 0, GridBagConstraints.NORTHWEST, GridBagConstraints.HORIZONTAL,
new Insets(0, 0, 0, 0), 0, 0);
myInitialisersPlaceModel = new DefaultComboBoxModel();
myInitialisersPlaceModel.addElement(BaseExpressionToFieldHandler.InitializationPlace.IN_CURRENT_METHOD);
myInitialisersPlaceModel.addElement(BaseExpressionToFieldHandler.InitializationPlace.IN_FIELD_DECLARATION);
myInitialisersPlaceModel.addElement(BaseExpressionToFieldHandler.InitializationPlace.IN_CONSTRUCTOR);
if (TestFrameworks.getInstance().isTestClass(myParentClass)) {
myInitialisersPlaceModel.addElement(BaseExpressionToFieldHandler.InitializationPlace.IN_SETUP_METHOD);
}
initializeInitializerPlace(myInitializerExpression, InplaceIntroduceFieldPopup.ourLastInitializerPlace);
if (myInitialisersPlaceModel.getSize() > 1) {
final JLabel initLabel = new JLabel(RefactoringBundle.message("initialize.in.border.title") + ":");
initLabel.setDisplayedMnemonic('i');
gridBagConstraints.insets.left = 5;
gridBagConstraints.anchor = GridBagConstraints.WEST;
groupPanel.add(initLabel, gridBagConstraints);
JComboBox initializersCombo = new JComboBox(myInitialisersPlaceModel);
KeyboardComboSwitcher.setupActions(initializersCombo, myParentClass.getProject());
initLabel.setLabelFor(initializersCombo);
initializersCombo.setRenderer(new ListCellRendererWrapper<BaseExpressionToFieldHandler.InitializationPlace>() {
@Override
public void customize(JList list,
BaseExpressionToFieldHandler.InitializationPlace value,
int index,
boolean selected,
boolean hasFocus) {
setText(getPresentableText(value));
}
});
initializersCombo.addActionListener(new ActionListener() {
@Override
public void actionPerformed(ActionEvent e) {
itemListener.itemStateChanged(null);
finalUpdater.itemStateChanged(null);
}
});
gridBagConstraints.gridx = 1;
gridBagConstraints.insets.top = 0;
gridBagConstraints.insets.left = 0;
groupPanel.add(initializersCombo, gridBagConstraints);
myInitializerCombo = initializersCombo;
}
return groupPanel;
}
@Nullable
private static String getPresentableText(BaseExpressionToFieldHandler.InitializationPlace value) {
if (value == BaseExpressionToFieldHandler.InitializationPlace.IN_CURRENT_METHOD) {
return "current method";
} else if (value == BaseExpressionToFieldHandler.InitializationPlace.IN_CONSTRUCTOR) {
return "constructor";
} else if (value == BaseExpressionToFieldHandler.InitializationPlace.IN_FIELD_DECLARATION) {
return "field declaration";
} else if (value == BaseExpressionToFieldHandler.InitializationPlace.IN_SETUP_METHOD){
return "setUp";
}
return null;
}
@Override
protected boolean updateInitializationPlaceModel(boolean initializedInSetup, boolean initializedInConstructor) {
myInitialisersPlaceModel.removeElement(BaseExpressionToFieldHandler.InitializationPlace.IN_FIELD_DECLARATION);
if (!initializedInConstructor) {
myInitialisersPlaceModel.removeElement(BaseExpressionToFieldHandler.InitializationPlace.IN_CONSTRUCTOR);
}
if (!initializedInSetup) {
myInitialisersPlaceModel.removeElement(BaseExpressionToFieldHandler.InitializationPlace.IN_SETUP_METHOD);
} else {
return true;
}
return false;
}
@Override
protected boolean hasSetUpChoice() {
return myInitialisersPlaceModel.getIndexOf(BaseExpressionToFieldHandler.InitializationPlace.IN_SETUP_METHOD) > -1;
}
public void setInitializeInFieldDeclaration() {
LOG.assertTrue(myInitializerCombo != null);
myInitializerCombo.setSelectedItem(BaseExpressionToFieldHandler.InitializationPlace.IN_FIELD_DECLARATION);
}
public void setVisibility(String visibility) {
myVisibilityCombo.setSelectedItem(visibility);
}
@Override
protected void updateCbFinal() {
}
@Override
protected boolean allowFinal() {
final Object selectedItem = getInitializerPlace();
boolean allowFinal = selectedItem == BaseExpressionToFieldHandler.InitializationPlace.IN_FIELD_DECLARATION ||
(selectedItem == BaseExpressionToFieldHandler.InitializationPlace.IN_CONSTRUCTOR && !myWillBeDeclaredStatic);
if (selectedItem == BaseExpressionToFieldHandler.InitializationPlace.IN_CURRENT_METHOD && myIsCurrentMethodConstructor) {
final PsiMethod[] constructors = myParentClass.getConstructors();
allowFinal = constructors.length <= 1;
}
return super.allowFinal() && allowFinal;
}
@Override
protected void updateInitializerSelection() {
if (myAllowInitInMethodIfAll || !isReplaceAllOccurrences()) {
if (myInitialisersPlaceModel.getIndexOf(BaseExpressionToFieldHandler.InitializationPlace.IN_CURRENT_METHOD) == -1) {
myInitialisersPlaceModel.insertElementAt(BaseExpressionToFieldHandler.InitializationPlace.IN_CURRENT_METHOD, 0);
}
} else {
myInitialisersPlaceModel.removeElement(BaseExpressionToFieldHandler.InitializationPlace.IN_CURRENT_METHOD);
}
}
@Override
protected boolean shouldUpdateTypeSelector() {
return false;
}
@Override
protected JPanel appendCheckboxes(ItemListener itemListener) {
final JPanel panel = new JPanel(new GridBagLayout());
appendOccurrences(itemListener, new GridBagConstraints(0,0,1,1,0,0, GridBagConstraints.NORTHWEST, GridBagConstraints.NONE, new Insets(0,0,0,0), 0,0), panel);
return panel;
}
protected JPanel composeWholePanel(JComponent initializerPlacePanel, JPanel checkboxPanel) {
final JPanel panel = new JPanel(new GridBagLayout());
final GridBagConstraints constraints =
new GridBagConstraints(0, 0, 1, 1, 1, 0, GridBagConstraints.NORTHWEST, GridBagConstraints.NONE,
new Insets(0, 0, 0, 0), 0, 0);
panel.add(initializerPlacePanel, constraints);
constraints.gridy++;
panel.add(checkboxPanel, constraints);
return panel;
}
}
| |
/**
* Copyright (C) FuseSource, Inc.
* http://fusesource.com
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.fusesource.fabric.agent.mvn;
import java.net.MalformedURLException;
/**
* Parser for mvn: protocol.<br/>
*
* @author Alin Dreghiciu
* @since August 10, 2007
*/
public class Parser {
/**
* Syntax for the url; to be shown on exception messages.
*/
private static final String SYNTAX = "mvn:[repository_url!]groupId/artifactId[/[version]/[type]]";
/**
* Separator between repository and artifact definition.
*/
private static final String REPOSITORY_SEPARATOR = "!";
/**
* Artifact definition segments separator.
*/
private static final String ARTIFACT_SEPARATOR = "/";
/**
* Default version if none present in the url.
*/
private static final String VERSION_LATEST = "LATEST";
/**
* Snapshot version
*/
private static final String VERSION_SNAPSHOT = "SNAPSHOT";
/**
* Default type if not present in the url.
*/
private static final String TYPE_JAR = "jar";
/**
* Final artifact path separator.
*/
public static final String FILE_SEPARATOR = "/";
/**
* Group id path separator.
*/
private static final String GROUP_SEPARATOR = "\\.";
/**
* Separator used to constructs the artifact file name.
*/
private static final String VERSION_SEPARATOR = "-";
/**
* Artifact extension(type) separator.
*/
private static final String TYPE_SEPARATOR = ".";
/**
* Separator used to separate classifier in artifact name.
*/
private static final String CLASSIFIER_SEPARATOR = "-";
/**
* Maven metadata file.
*/
private static final String METADATA_FILE = "maven-metadata.xml";
/**
* Maven local metadata file.
*/
private static final String METADATA_FILE_LOCAL = "maven-metadata-local.xml";
/**
* Repository URL. Null if not present.
*/
private MavenRepositoryURL m_repositoryURL;
/**
* Artifact group id.
*/
private String m_group;
/**
* Artifact id.
*/
private String m_artifact;
/**
* Artifact version.
*/
private String m_version;
/**
* Artifact type.
*/
private String m_type;
/**
* Artifact classifier.
*/
private String m_classifier;
/**
* Artifact classifier to use to build artifact name.
*/
private String m_fullClassifier;
/**
* Creates a new protocol parser.
*
* @param path the path part of the url (without starting mvn:)
* @throws MalformedURLException if provided path does not comply to expected syntax or an malformed repository URL
*/
public Parser(final String path)
throws MalformedURLException {
if (path == null) {
throw new MalformedURLException("Path cannot be null. Syntax " + SYNTAX);
}
if (path.startsWith(REPOSITORY_SEPARATOR) || path.endsWith(REPOSITORY_SEPARATOR)) {
throw new MalformedURLException(
"Path cannot start or end with " + REPOSITORY_SEPARATOR + ". Syntax " + SYNTAX
);
}
if (path.contains(REPOSITORY_SEPARATOR)) {
int pos = path.lastIndexOf(REPOSITORY_SEPARATOR);
parseArtifactPart(path.substring(pos + 1));
m_repositoryURL = new MavenRepositoryURL(path.substring(0, pos) + "@snapshots");
} else {
parseArtifactPart(path);
}
}
/**
* Parses the artifact part of the url ( without the repository).
*
* @param part url part without protocol and repository.
* @throws MalformedURLException if provided path does not comply to syntax.
*/
private void parseArtifactPart(final String part)
throws MalformedURLException {
String[] segments = part.split(ARTIFACT_SEPARATOR);
if (segments.length < 2) {
throw new MalformedURLException("Invalid path. Syntax " + SYNTAX);
}
// we must have a valid group
m_group = segments[0];
if (m_group.trim().length() == 0) {
throw new MalformedURLException("Invalid groupId. Syntax " + SYNTAX);
}
// valid artifact
m_artifact = segments[1];
if (m_artifact.trim().length() == 0) {
throw new MalformedURLException("Invalid artifactId. Syntax " + SYNTAX);
}
// version is optional but we have a default value
m_version = VERSION_LATEST;
if (segments.length >= 3 && segments[2].trim().length() > 0) {
m_version = segments[2];
}
// type is optional but we have a default value
m_type = TYPE_JAR;
if (segments.length >= 4 && segments[3].trim().length() > 0) {
m_type = segments[3];
}
// classifier is optional (if not pressent or empty we will have a null classsifier
m_fullClassifier = "";
if (segments.length >= 5 && segments[4].trim().length() > 0) {
m_classifier = segments[4];
m_fullClassifier = CLASSIFIER_SEPARATOR + m_classifier;
}
}
/**
* Returns the repository URL if present, null otherwise
*
* @return repository URL
*/
public MavenRepositoryURL getRepositoryURL() {
return m_repositoryURL;
}
/**
* Returns the group id of the artifact.
*
* @return group Id
*/
public String getGroup() {
return m_group;
}
/**
* Returns the artifact id.
*
* @return artifact id
*/
public String getArtifact() {
return m_artifact;
}
/**
* Returns the artifact version.
*
* @return version
*/
public String getVersion() {
return m_version;
}
/**
* Returns the artifact type.
*
* @return type
*/
public String getType() {
return m_type;
}
/**
* Returns the artifact classifier.
*
* @return classifier
*/
public String getClassifier() {
return m_classifier;
}
/**
* Returns the complete path to artifact as stated by Maven 2 repository layout.
*
* @return artifact path
*/
public String getArtifactPath() {
return getArtifactPath(m_version);
}
/**
* Returns the complete path to artifact as stated by Maven 2 repository layout.
*
* @param version The version of the artifact.
* @return artifact path
*/
public String getArtifactPath(final String version) {
return new StringBuilder()
.append(m_group.replaceAll(GROUP_SEPARATOR, FILE_SEPARATOR))
.append(FILE_SEPARATOR)
.append(m_artifact)
.append(FILE_SEPARATOR)
.append(version)
.append(FILE_SEPARATOR)
.append(m_artifact)
.append(VERSION_SEPARATOR)
.append(version)
.append(m_fullClassifier)
.append(TYPE_SEPARATOR)
.append(m_type)
.toString();
}
/**
* Returns the version for an artifact for a snapshot version.
*
* @param version The version of the snapshot.
* @param timestamp The timestamp of the snapshot.
* @param buildnumber The buildnumber of the snapshot.
* @return artifact path
*/
public String getSnapshotVersion(final String version, final String timestamp, final String buildnumber) {
return version.replace(VERSION_SNAPSHOT, timestamp) + VERSION_SEPARATOR + buildnumber;
}
/**
* Returns the complete path to artifact for a snapshot file.
*
* @param version The version of the snapshot.
* @param timestamp The timestamp of the snapshot.
* @param buildnumber The buildnumber of the snapshot.
* @return artifact path
*/
public String getSnapshotPath(final String version, final String timestamp, final String buildnumber) {
return new StringBuilder()
.append(m_group.replaceAll(GROUP_SEPARATOR, FILE_SEPARATOR))
.append(FILE_SEPARATOR)
.append(m_artifact)
.append(FILE_SEPARATOR)
.append(version)
.append(FILE_SEPARATOR)
.append(m_artifact)
.append(VERSION_SEPARATOR)
.append(getSnapshotVersion(version, timestamp, buildnumber))
.append(m_fullClassifier)
.append(TYPE_SEPARATOR)
.append(m_type)
.toString();
}
/**
* Returns the path to metdata file corresponding to this artifact version.
*
* @param version The version of the the metadata.
* @return metadata file path
*/
public String getVersionMetadataPath(final String version) {
return new StringBuilder()
.append(m_group.replaceAll(GROUP_SEPARATOR, FILE_SEPARATOR))
.append(FILE_SEPARATOR)
.append(m_artifact)
.append(FILE_SEPARATOR)
.append(version)
.append(FILE_SEPARATOR)
.append(METADATA_FILE)
.toString();
}
/**
* Returns the path to local metdata file corresponding to this artifact version.
*
* @param version The version of the the metadata.
* @return metadata file path
*/
public String getVersionLocalMetadataPath(final String version) {
return new StringBuilder()
.append(m_group.replaceAll(GROUP_SEPARATOR, FILE_SEPARATOR))
.append(FILE_SEPARATOR)
.append(m_artifact)
.append(FILE_SEPARATOR)
.append(version)
.append(FILE_SEPARATOR)
.append(METADATA_FILE_LOCAL)
.toString();
}
/**
* Returns the complete path to artifact local metadata file.
*
* @return artifact path
*/
public String getArtifactLocalMetdataPath() {
return new StringBuilder()
.append(m_group.replaceAll(GROUP_SEPARATOR, FILE_SEPARATOR))
.append(FILE_SEPARATOR)
.append(m_artifact)
.append(FILE_SEPARATOR)
.append(METADATA_FILE_LOCAL)
.toString();
}
/**
* Returns the complete path to artifact metadata file.
*
* @return artifact path
*/
public String getArtifactMetdataPath() {
return new StringBuilder()
.append(m_group.replaceAll(GROUP_SEPARATOR, FILE_SEPARATOR))
.append(FILE_SEPARATOR)
.append(m_artifact)
.append(FILE_SEPARATOR)
.append(METADATA_FILE)
.toString();
}
}
| |
/*
* Copyright 2015 Realm Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.realm;
import android.os.Handler;
import android.os.Looper;
import android.os.Message;
import java.lang.ref.Reference;
import java.lang.ref.ReferenceQueue;
import java.lang.ref.WeakReference;
import java.util.ArrayList;
import java.util.IdentityHashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.CopyOnWriteArrayList;
import java.util.concurrent.Future;
import io.realm.internal.IdentitySet;
import io.realm.internal.RealmObjectProxy;
import io.realm.internal.Row;
import io.realm.internal.SharedGroup;
import io.realm.internal.async.BadVersionException;
import io.realm.internal.async.QueryUpdateTask;
import io.realm.internal.log.RealmLog;
/**
* Centralises all Handler callbacks, including updating async queries and refreshing the Realm.
*/
final class HandlerController implements Handler.Callback {
static final int REALM_CHANGED = 14930352; // Hopefully it won't clash with other message IDs.
static final int COMPLETED_UPDATE_ASYNC_QUERIES = 24157817;
static final int COMPLETED_ASYNC_REALM_RESULTS = 39088169;
static final int COMPLETED_ASYNC_REALM_OBJECT = 63245986;
static final int REALM_ASYNC_BACKGROUND_EXCEPTION = 102334155;
private final static Boolean NO_REALM_QUERY = Boolean.TRUE;
// Keep a strong reference to the registered RealmChangeListener
// user should unregister those listeners
final CopyOnWriteArrayList<RealmChangeListener<? extends BaseRealm>> changeListeners = new CopyOnWriteArrayList<RealmChangeListener<? extends BaseRealm>>();
// Keep a weak reference to the registered RealmChangeListener those are Weak since
// for some UC (ex: RealmBaseAdapter) we don't know when it's the best time to unregister the listener
final List<WeakReference<RealmChangeListener<? extends BaseRealm>>> weakChangeListeners =
new CopyOnWriteArrayList<WeakReference<RealmChangeListener<? extends BaseRealm>>>();
final BaseRealm realm;
private boolean autoRefresh; // Requires a Looper thread to be true.
// pending update of async queries
private Future updateAsyncQueriesTask;
private final ReferenceQueue<RealmResults<? extends RealmModel>> referenceQueueAsyncRealmResults =
new ReferenceQueue<RealmResults<? extends RealmModel>>();
private final ReferenceQueue<RealmResults<? extends RealmModel>> referenceQueueSyncRealmResults =
new ReferenceQueue<RealmResults<? extends RealmModel>>();
final ReferenceQueue<RealmModel> referenceQueueRealmObject = new ReferenceQueue<RealmModel>();
// keep a WeakReference list to RealmResults obtained asynchronously in order to update them
// RealmQuery is not WeakReferenced to prevent it from being GC'd. RealmQuery should be
// cleaned if RealmResults is cleaned. we need to keep RealmQuery because it contains the query
// pointer (to handover for each update) + all the arguments necessary to rerun the query:
// sorting orders, soring columns, type (findAll, findFirst, findAllSorted etc.)
final Map<WeakReference<RealmResults<? extends RealmModel>>, RealmQuery<? extends RealmModel>> asyncRealmResults =
new IdentityHashMap<WeakReference<RealmResults<? extends RealmModel>>, RealmQuery<? extends RealmModel>>();
// Keep a WeakReference to the currently empty RealmObjects obtained asynchronously. We need to keep re-running
// the query in the background for each commit, until we got a valid Row (pointer)
final Map<WeakReference<RealmObjectProxy>, RealmQuery<? extends RealmModel>> emptyAsyncRealmObject =
new ConcurrentHashMap<WeakReference<RealmObjectProxy>, RealmQuery<? extends RealmModel>>();
// Keep a reference to the list of sync RealmResults, we'll use it
// to deliver type based notification once the shared_group advance
final IdentitySet<WeakReference<RealmResults<? extends RealmModel>>> syncRealmResults =
new IdentitySet<WeakReference<RealmResults<? extends RealmModel>>>();
// Since ConcurrentHashMap doesn't support null value, and since java.util.Optional are not
// yet an option (using Java 6) we use an Object with the dummy value Boolean.TRUE to indicate
// a null value (no RealmQuery<? extends RealmModel>) this is the same approach used in the JDK
// ex here https://android.googlesource.com/platform/libcore/+/refs/heads/master/luni/src/main/java/java/util/concurrent/ConcurrentSkipListSet.java#214
final ConcurrentHashMap<WeakReference<RealmObjectProxy>, Object> realmObjects =
new ConcurrentHashMap<WeakReference<RealmObjectProxy>, Object>();
public HandlerController(BaseRealm realm) {
this.realm = realm;
}
@Override
public boolean handleMessage(Message message) {
// Due to how a ConcurrentHashMap iterator is created we cannot be sure that other threads are
// aware when this threads handler is removed before they send messages to it. We don't wish to synchronize
// access to the handlers as they are the prime mean of notifying about updates. Instead we make sure
// that if a message does slip though (however unlikely), it will not try to update a SharedGroup that no
// longer exists. `sharedGroupManager` will only be null if a Realm is really closed.
if (realm.sharedGroupManager != null) {
QueryUpdateTask.Result result;
switch (message.what) {
case REALM_CHANGED:
realmChanged();
break;
case COMPLETED_ASYNC_REALM_RESULTS:
result = (QueryUpdateTask.Result) message.obj;
completedAsyncRealmResults(result);
break;
case COMPLETED_ASYNC_REALM_OBJECT:
result = (QueryUpdateTask.Result) message.obj;
completedAsyncRealmObject(result);
break;
case COMPLETED_UPDATE_ASYNC_QUERIES:
// this is called once the background thread completed the update of the async queries
result = (QueryUpdateTask.Result) message.obj;
completedAsyncQueriesUpdate(result);
break;
case REALM_ASYNC_BACKGROUND_EXCEPTION:
// Don't fail silently in the background in case of Core exception
throw (Error) message.obj;
default:
throw new IllegalArgumentException("Unknown message: " + message.what);
}
}
return true;
}
void addChangeListener(RealmChangeListener<? extends BaseRealm> listener) {
changeListeners.addIfAbsent(listener);
}
/**
* For internal use only.
* <p>
* Sometimes we don't know when to unregister listeners (e.g., {@code RealmBaseAdapter}). Using
* a WeakReference the listener doesn't need to be explicitly unregistered.
*
* @param listener the change listener.
*/
void addChangeListenerAsWeakReference(RealmChangeListener<? extends BaseRealm> listener) {
Iterator<WeakReference<RealmChangeListener<? extends BaseRealm>>> iterator = weakChangeListeners.iterator();
List<WeakReference<RealmChangeListener<? extends BaseRealm>>> toRemoveList = null;
boolean addListener = true;
while (iterator.hasNext()) {
WeakReference<RealmChangeListener<? extends BaseRealm>> weakRef = iterator.next();
RealmChangeListener<? extends BaseRealm> weakListener = weakRef.get();
// Collect all listeners that are GC'ed
if (weakListener == null) {
if (toRemoveList == null) {
toRemoveList = new ArrayList<WeakReference<RealmChangeListener<? extends BaseRealm>>>(weakChangeListeners.size());
}
toRemoveList.add(weakRef);
}
// Check if Listener already exists
if (weakListener == listener) {
addListener = false;
}
}
if (toRemoveList != null) {
weakChangeListeners.removeAll(toRemoveList);
}
if (addListener) {
weakChangeListeners.add(new WeakReference<RealmChangeListener<? extends BaseRealm>>(listener));
}
}
void removeWeakChangeListener(RealmChangeListener<? extends BaseRealm> listener) {
List<WeakReference<RealmChangeListener<? extends BaseRealm>>> toRemoveList = null;
for (int i = 0; i < weakChangeListeners.size(); i++) {
WeakReference<RealmChangeListener<? extends BaseRealm>> weakRef = weakChangeListeners.get(i);
RealmChangeListener<? extends BaseRealm> weakListener = weakRef.get();
// Collect all listeners that are GC'ed or we need to remove
if (weakListener == null || weakListener == listener) {
if (toRemoveList == null) {
toRemoveList = new ArrayList<WeakReference<RealmChangeListener<? extends BaseRealm>>>(weakChangeListeners.size());
}
toRemoveList.add(weakRef);
}
}
weakChangeListeners.removeAll(toRemoveList);
}
void removeChangeListener(RealmChangeListener<? extends BaseRealm> listener) {
changeListeners.remove(listener);
}
void removeAllChangeListeners() {
changeListeners.clear();
}
private void notifyGlobalListeners() {
// notify strong reference listener
Iterator<RealmChangeListener<? extends BaseRealm>> iteratorStrongListeners = changeListeners.iterator();
while (iteratorStrongListeners.hasNext() && !realm.isClosed()) { // every callback could close the realm
RealmChangeListener listener = iteratorStrongListeners.next();
listener.onChange(realm);
}
// notify weak reference listener (internals)
Iterator<WeakReference<RealmChangeListener<? extends BaseRealm>>> iteratorWeakListeners = weakChangeListeners.iterator();
List<WeakReference<RealmChangeListener<? extends BaseRealm>>> toRemoveList = null;
while (iteratorWeakListeners.hasNext() && !realm.isClosed()) {
WeakReference<RealmChangeListener<? extends BaseRealm>> weakRef = iteratorWeakListeners.next();
RealmChangeListener listener = weakRef.get();
if (listener == null) {
if (toRemoveList == null) {
toRemoveList = new ArrayList<WeakReference<RealmChangeListener<? extends BaseRealm>>>(weakChangeListeners.size());
}
toRemoveList.add(weakRef);
} else {
listener.onChange(realm);
}
}
if (toRemoveList != null) {
weakChangeListeners.removeAll(toRemoveList);
}
}
private void updateAsyncEmptyRealmObject() {
Iterator<Map.Entry<WeakReference<RealmObjectProxy>, RealmQuery<?>>> iterator = emptyAsyncRealmObject.entrySet().iterator();
while (iterator.hasNext()) {
Map.Entry<WeakReference<RealmObjectProxy>, RealmQuery<?>> next = iterator.next();
if (next.getKey().get() != null) {
Realm.asyncTaskExecutor
.submit(QueryUpdateTask.newBuilder()
.realmConfiguration(realm.getConfiguration())
.addObject(next.getKey(),
next.getValue().handoverQueryPointer(),
next.getValue().getArgument())
.sendToHandler(realm.handler, COMPLETED_ASYNC_REALM_OBJECT)
.build());
} else {
iterator.remove();
}
}
}
void notifyAllListeners() {
notifyGlobalListeners();
notifyTypeBasedListeners();
// empty async RealmObject shouldn't block the realm to advance
// they're empty so no risk for running into a corrupt state
// where the pointer (Row) is using one version of a Realm, whereas the
// current Realm is advancing to a newer version (they're empty anyway)
if (!realm.isClosed() && threadContainsAsyncEmptyRealmObject()) {
updateAsyncEmptyRealmObject();
}
}
private void notifyTypeBasedListeners() {
notifyAsyncRealmResultsCallbacks();
notifySyncRealmResultsCallbacks();
notifyRealmObjectCallbacks();
}
private void notifyAsyncRealmResultsCallbacks() {
notifyRealmResultsCallbacks(asyncRealmResults.keySet().iterator());
}
private void notifySyncRealmResultsCallbacks() {
notifyRealmResultsCallbacks(syncRealmResults.keySet().iterator());
}
private void notifyRealmResultsCallbacks(Iterator<WeakReference<RealmResults<? extends RealmModel>>> iterator) {
List<RealmResults<? extends RealmModel>> resultsToBeNotified =
new ArrayList<RealmResults<? extends RealmModel>>();
while (iterator.hasNext()) {
WeakReference<RealmResults<? extends RealmModel>> weakRealmResults = iterator.next();
RealmResults<? extends RealmModel> realmResults = weakRealmResults.get();
if (realmResults == null) {
iterator.remove();
} else {
// It should be legal to modify asyncRealmResults and syncRealmResults in the listener
resultsToBeNotified.add(realmResults);
}
}
for (Iterator<RealmResults<? extends RealmModel>> it = resultsToBeNotified.iterator(); it.hasNext() && !realm.isClosed(); ) {
RealmResults<? extends RealmModel> realmResults = it.next();
realmResults.notifyChangeListeners();
}
}
private void notifyRealmObjectCallbacks() {
List<RealmObjectProxy> objectsToBeNotified = new ArrayList<RealmObjectProxy>();
Iterator<WeakReference<RealmObjectProxy>> iterator = realmObjects.keySet().iterator();
while (iterator.hasNext()) {
WeakReference<RealmObjectProxy> weakRealmObject = iterator.next();
RealmObjectProxy realmObject = weakRealmObject.get();
if (realmObject == null) {
iterator.remove();
} else {
if (realmObject.realmGet$proxyState().getRow$realm().isAttached()) {
// It should be legal to modify realmObjects in the listener
objectsToBeNotified.add(realmObject);
} else if (realmObject.realmGet$proxyState().getRow$realm() != Row.EMPTY_ROW) {
iterator.remove();
}
}
}
for (Iterator<RealmObjectProxy> it = objectsToBeNotified.iterator(); it.hasNext() && !realm.isClosed(); ) {
RealmObjectProxy realmObject = it.next();
realmObject.realmGet$proxyState().notifyChangeListeners$realm();
}
}
private void updateAsyncQueries() {
if (updateAsyncQueriesTask != null && !updateAsyncQueriesTask.isDone()) {
// try to cancel any pending update since we're submitting a new one anyway
updateAsyncQueriesTask.cancel(true);
Realm.asyncTaskExecutor.getQueue().remove(updateAsyncQueriesTask);
RealmLog.d("REALM_CHANGED realm:" + HandlerController.this + " cancelling pending COMPLETED_UPDATE_ASYNC_QUERIES updates");
}
RealmLog.d("REALM_CHANGED realm:"+ HandlerController.this + " updating async queries, total: " + asyncRealmResults.size());
// prepare a QueryUpdateTask to current async queries in this thread
QueryUpdateTask.Builder.UpdateQueryStep updateQueryStep = QueryUpdateTask.newBuilder()
.realmConfiguration(realm.getConfiguration());
QueryUpdateTask.Builder.RealmResultsQueryStep realmResultsQueryStep = null;
// we iterate over non GC'd async RealmResults then add them to the list to be updated (in a batch)
Iterator<Map.Entry<WeakReference<RealmResults<? extends RealmModel>>, RealmQuery<?>>> iterator = asyncRealmResults.entrySet().iterator();
while (iterator.hasNext()) {
Map.Entry<WeakReference<RealmResults<? extends RealmModel>>, RealmQuery<?>> entry = iterator.next();
WeakReference<RealmResults<? extends RealmModel>> weakReference = entry.getKey();
RealmResults<? extends RealmModel> realmResults = weakReference.get();
if (realmResults == null) {
// GC'd instance remove from the list
iterator.remove();
} else {
realmResultsQueryStep = updateQueryStep.add(weakReference,
entry.getValue().handoverQueryPointer(),
entry.getValue().getArgument());
}
// Note: we're passing an WeakRef of a RealmResults to another thread
// this is safe as long as we don't invoke any of the RealmResults methods.
// we're just using it as a Key in an IdentityHashMap (i.e doesn't call
// AbstractList's hashCode, that require accessing objects from another thread)
//
// watch out when you debug, as you're IDE try to evaluate RealmResults
// which break the Thread confinement constraints.
}
if (realmResultsQueryStep != null) {
QueryUpdateTask queryUpdateTask = realmResultsQueryStep
.sendToHandler(realm.handler, COMPLETED_UPDATE_ASYNC_QUERIES)
.build();
updateAsyncQueriesTask = Realm.asyncTaskExecutor.submit(queryUpdateTask);
}
}
private void realmChanged() {
deleteWeakReferences();
if (threadContainsAsyncQueries()) {
updateAsyncQueries();
} else {
RealmLog.d("REALM_CHANGED realm:" + HandlerController.this + " no async queries, advance_read");
realm.sharedGroupManager.advanceRead();
notifyAllListeners();
}
}
private void completedAsyncRealmResults(QueryUpdateTask.Result result) {
Set<WeakReference<RealmResults<? extends RealmModel>>> updatedTableViewsKeys = result.updatedTableViews.keySet();
if (updatedTableViewsKeys.size() > 0) {
WeakReference<RealmResults<? extends RealmModel>> weakRealmResults = updatedTableViewsKeys.iterator().next();
RealmResults<? extends RealmModel> realmResults = weakRealmResults.get();
if (realmResults == null) {
asyncRealmResults.remove(weakRealmResults);
RealmLog.d("[COMPLETED_ASYNC_REALM_RESULTS "+ weakRealmResults + "] realm:"+ HandlerController.this + " RealmResults GC'd ignore results");
} else {
SharedGroup.VersionID callerVersionID = realm.sharedGroupManager.getVersion();
int compare = callerVersionID.compareTo(result.versionID);
if (compare == 0) {
// if the RealmResults is empty (has not completed yet) then use the value
// otherwise a task (grouped update) has already updated this RealmResults
if (!realmResults.isLoaded()) {
RealmLog.d("[COMPLETED_ASYNC_REALM_RESULTS "+ weakRealmResults + "] , realm:"+ HandlerController.this + " same versions, using results (RealmResults is not loaded)");
// swap pointer
realmResults.swapTableViewPointer(result.updatedTableViews.get(weakRealmResults));
// notify callbacks
realmResults.notifyChangeListeners();
} else {
RealmLog.d("[COMPLETED_ASYNC_REALM_RESULTS "+ weakRealmResults + "] , realm:"+ HandlerController.this + " ignoring result the RealmResults (is already loaded)");
}
} else if (compare > 0) {
// we have two use cases:
// 1- this RealmResults is not empty, this means that after we started the async
// query, we received a REALM_CHANGE that triggered an update of all async queries
// including the last async submitted, so no need to use the provided TableView pointer
// (or the user forced the sync behaviour .load())
// 2- This RealmResults is still empty but this caller thread is advanced than the worker thread
// this could happen if the current thread advanced the shared_group (via a write or refresh)
// this means that we need to rerun the query against a newer worker thread.
if (!realmResults.isLoaded()) { // UC2
// UC covered by this test: RealmAsyncQueryTests#testFindAllAsyncRetry
RealmLog.d("[COMPLETED_ASYNC_REALM_RESULTS " + weakRealmResults + "] , realm:"+ HandlerController.this + " caller is more advanced & RealmResults is not loaded, rerunning the query against the latest version");
RealmQuery<?> query = asyncRealmResults.get(weakRealmResults);
QueryUpdateTask queryUpdateTask = QueryUpdateTask.newBuilder()
.realmConfiguration(realm.getConfiguration())
.add(weakRealmResults,
query.handoverQueryPointer(),
query.getArgument())
.sendToHandler(realm.handler, COMPLETED_ASYNC_REALM_RESULTS)
.build();
Realm.asyncTaskExecutor.submit(queryUpdateTask);
} else {
// UC covered by this test: RealmAsyncQueryTests#testFindAllCallerIsAdvanced
RealmLog.d("[COMPLETED_ASYNC_REALM_RESULTS "+ weakRealmResults + "] , realm:"+ HandlerController.this + " caller is more advanced & RealmResults is loaded ignore the outdated result");
}
} else {
// the caller thread is behind the worker thread,
// no need to rerun the query, since we're going to receive the update signal
// & batch update all async queries including this one
// UC covered by this test: RealmAsyncQueryTests#testFindAllCallerThreadBehind
RealmLog.d("[COMPLETED_ASYNC_REALM_RESULTS "+ weakRealmResults + "] , realm:"+ HandlerController.this + " caller thread behind worker thread, ignore results (a batch update will update everything including this query)");
}
}
}
}
private void completedAsyncQueriesUpdate(QueryUpdateTask.Result result) {
SharedGroup.VersionID callerVersionID = realm.sharedGroupManager.getVersion();
int compare = callerVersionID.compareTo(result.versionID);
if (compare > 0) {
// if the caller thread is advanced i.e it already sent a REALM_CHANGE that will update the queries
RealmLog.d("COMPLETED_UPDATE_ASYNC_QUERIES realm:" + HandlerController.this + " caller is more advanced, Looper will updates queries");
} else {
// We're behind or on the same version as the worker thread
// only advance if we're behind
if (compare != 0) {
// no need to remove old pointers from TableView, since they're
// imperative TV, they will not rerun if the SharedGroup advance
// UC covered by this test: RealmAsyncQueryTests#testFindAllCallerThreadBehind
RealmLog.d("COMPLETED_UPDATE_ASYNC_QUERIES realm:"+ HandlerController.this + " caller is behind advance_read");
// refresh the Realm to the version provided by the worker thread
// (advanceRead to the latest version may cause a version mismatch error) preventing us
// from importing correctly the handover table view
try {
realm.sharedGroupManager.advanceRead(result.versionID);
} catch (BadVersionException e) {
// The version comparison above should have ensured that that the Caller version is less than the
// Worker version. In that case it should always be safe to advance_read.
throw new IllegalStateException("Failed to advance Caller Realm to Worker Realm version", e);
}
}
ArrayList<RealmResults<? extends RealmModel>> callbacksToNotify = new ArrayList<RealmResults<? extends RealmModel>>(result.updatedTableViews.size());
// use updated TableViews pointers for the existing async RealmResults
for (Map.Entry<WeakReference<RealmResults<? extends RealmModel>>, Long> query : result.updatedTableViews.entrySet()) {
WeakReference<RealmResults<? extends RealmModel>> weakRealmResults = query.getKey();
RealmResults<? extends RealmModel> realmResults = weakRealmResults.get();
if (realmResults == null) {
// don't update GC'd instance
asyncRealmResults.remove(weakRealmResults);
} else {
// update the instance with the new pointer
realmResults.swapTableViewPointer(query.getValue());
// it's dangerous to notify the callback about new results before updating
// the pointers, because the callback may use another RealmResults not updated yet
// this is why we defer the notification until we're done updating all pointers
callbacksToNotify.add(realmResults);
RealmLog.d("COMPLETED_UPDATE_ASYNC_QUERIES realm:"+ HandlerController.this + " updating RealmResults " + weakRealmResults);
}
}
for (RealmResults<? extends RealmModel> query : callbacksToNotify) {
query.notifyChangeListeners();
}
// We need to notify the rest of listeners, since the original REALM_CHANGE
// was delayed/swallowed in order to be able to update async queries
notifyGlobalListeners();
notifySyncRealmResultsCallbacks();
notifyRealmObjectCallbacks();
updateAsyncQueriesTask = null;
}
}
private void completedAsyncRealmObject(QueryUpdateTask.Result result) {
Set<WeakReference<RealmObjectProxy>> updatedRowKey = result.updatedRow.keySet();
if (updatedRowKey.size() > 0) {
WeakReference<RealmObjectProxy> realmObjectWeakReference = updatedRowKey.iterator().next();
RealmObjectProxy proxy = realmObjectWeakReference.get();
if (proxy != null) {
SharedGroup.VersionID callerVersionID = realm.sharedGroupManager.getVersion();
int compare = callerVersionID.compareTo(result.versionID);
// we always query on the same version
// only two use cases could happen 1. we're on the same version or 2. the caller has advanced in the meanwhile
if (compare == 0) { //same version import the handover
long rowPointer = result.updatedRow.get(realmObjectWeakReference);
if (rowPointer != 0 && emptyAsyncRealmObject.containsKey(realmObjectWeakReference)) {
// cleanup a previously empty async RealmObject
emptyAsyncRealmObject.remove(realmObjectWeakReference);
realmObjects.put(realmObjectWeakReference, NO_REALM_QUERY);
}
proxy.realmGet$proxyState().onCompleted$realm(rowPointer);
proxy.realmGet$proxyState().notifyChangeListeners$realm();
} else if (compare > 0) {
// the caller has advanced we need to
// retry against the current version of the caller if it's still empty
if (RealmObject.isValid(proxy)) { // already completed & has a valid pointer no need to re-run
RealmLog.d("[COMPLETED_ASYNC_REALM_OBJECT "+ proxy + "] , realm:" + HandlerController.this
+ " RealmObject is already loaded, just notify it.");
proxy.realmGet$proxyState().notifyChangeListeners$realm();
} else {
RealmLog.d("[COMPLETED_ASYNC_REALM_OBJECT " + proxy + "] , realm:" + HandlerController.this
+ " RealmObject is not loaded yet. Rerun the query.");
Object value = realmObjects.get(realmObjectWeakReference);
RealmQuery<? extends RealmModel> realmQuery;
if (value == null || value == NO_REALM_QUERY) { // this is a retry of an empty RealmObject
realmQuery = emptyAsyncRealmObject.get(realmObjectWeakReference);
} else {
realmQuery = (RealmQuery<? extends RealmModel>) value;
}
QueryUpdateTask queryUpdateTask = QueryUpdateTask.newBuilder()
.realmConfiguration(realm.getConfiguration())
.addObject(realmObjectWeakReference,
realmQuery.handoverQueryPointer(),
realmQuery.getArgument())
.sendToHandler(realm.handler, COMPLETED_ASYNC_REALM_OBJECT)
.build();
Realm.asyncTaskExecutor.submit(queryUpdateTask);
}
} else {
// should not happen, since the the background thread position itself against the provided version
// and the caller thread can only go forward (advance_read)
throw new IllegalStateException("Caller thread behind the Worker thread");
}
} // else: element GC'd in the meanwhile
}
}
/**
* Indicate the presence of {@code RealmResults} obtained asynchronously, this will prevent advancing the Realm
* before updating the {@code RealmResults}, otherwise we will potentially re-run the queries in this thread.
*
* @return {@code true} if there is at least one (non GC'ed) instance of {@link RealmResults} {@code false}
* otherwise.
*/
private boolean threadContainsAsyncQueries() {
boolean isEmpty = true;
Iterator<Map.Entry<WeakReference<RealmResults<? extends RealmModel>>, RealmQuery<?>>> iterator = asyncRealmResults.entrySet().iterator();
while (iterator.hasNext()) {
Map.Entry<WeakReference<RealmResults<? extends RealmModel>>, RealmQuery<?>> next = iterator.next();
if (next.getKey().get() == null) {
iterator.remove();
} else {
isEmpty = false;
}
}
return !isEmpty;
}
/**
* Indicates the presence of empty {@code RealmObject} obtained asynchronously using {@link RealmQuery#findFirstAsync()}.
* Empty means no pointer to a valid Row. This will help caller to decide when to rerun the query.
*
* @return {@code true} if there is at least one (non GC'ed) instance of {@link RealmObject}, {@code false} otherwise.
*/
boolean threadContainsAsyncEmptyRealmObject() {
boolean isEmpty = true;
Iterator<Map.Entry<WeakReference<RealmObjectProxy>, RealmQuery<?>>> iterator = emptyAsyncRealmObject.entrySet().iterator();
while (iterator.hasNext()) {
Map.Entry<WeakReference<RealmObjectProxy>, RealmQuery<?>> next = iterator.next();
if (next.getKey().get() == null) {
iterator.remove();
} else {
isEmpty = false;
}
}
return !isEmpty;
}
private void deleteWeakReferences() {
Reference<? extends RealmResults<? extends RealmModel>> weakReferenceResults;
Reference<? extends RealmModel> weakReferenceObject;
while ((weakReferenceResults = referenceQueueAsyncRealmResults.poll()) != null ) { // Does not wait for a reference to become available.
asyncRealmResults.remove(weakReferenceResults);
}
while ((weakReferenceResults = referenceQueueSyncRealmResults.poll()) != null ) {
syncRealmResults.remove(weakReferenceResults);
}
while ((weakReferenceObject = referenceQueueRealmObject.poll()) != null ) {
realmObjects.remove(weakReferenceObject);
}
}
WeakReference<RealmResults<? extends RealmModel>> addToAsyncRealmResults(RealmResults<? extends RealmModel> realmResults, RealmQuery<? extends RealmModel> realmQuery) {
WeakReference<RealmResults<? extends RealmModel>> weakRealmResults = new WeakReference<RealmResults<? extends RealmModel>>(realmResults,
referenceQueueAsyncRealmResults);
asyncRealmResults.put(weakRealmResults, realmQuery);
return weakRealmResults;
}
void addToRealmResults(RealmResults<? extends RealmModel> realmResults) {
WeakReference<RealmResults<? extends RealmModel>> realmResultsWeakReference
= new WeakReference<RealmResults<? extends RealmModel>>(realmResults, referenceQueueSyncRealmResults);
syncRealmResults.add(realmResultsWeakReference);
}
// Add to the list of RealmObject to be notified after a commit.
// This method will check if the object exists in the list. It won't add the same object multiple times
<E extends RealmObjectProxy> void addToRealmObjects(E realmObject) {
for (WeakReference<RealmObjectProxy> ref : realmObjects.keySet()) {
if (ref.get() == realmObject) {
return;
}
}
final WeakReference<RealmObjectProxy> realmObjectWeakReference =
new WeakReference<RealmObjectProxy>(realmObject, referenceQueueRealmObject);
realmObjects.put(realmObjectWeakReference, NO_REALM_QUERY);
}
<E extends RealmObjectProxy> WeakReference<RealmObjectProxy> addToAsyncRealmObject(E realmObject, RealmQuery<? extends RealmModel> realmQuery) {
final WeakReference<RealmObjectProxy> realmObjectWeakReference = new WeakReference<RealmObjectProxy>(realmObject, referenceQueueRealmObject);
realmObjects.put(realmObjectWeakReference, realmQuery);
return realmObjectWeakReference;
}
void removeFromAsyncRealmObject(WeakReference<RealmObjectProxy> realmObjectWeakReference) {
realmObjects.remove(realmObjectWeakReference);
}
void addToEmptyAsyncRealmObject(WeakReference<RealmObjectProxy> realmObjectWeakReference, RealmQuery<? extends RealmModel> realmQuery) {
emptyAsyncRealmObject.put(realmObjectWeakReference, realmQuery);
}
/**
* Refreshes all synchronous RealmResults by calling {@code sync_if_needed} on them. This will cause any backing queries
* to rerun and any deleted objects will be removed from the TableView.
* <p>
* WARNING: This will _NOT_ refresh TableViews created from async queries.
* <p>
* Note this will _not_ notify any registered listeners.
*/
public void refreshSynchronousTableViews() {
Iterator<WeakReference<RealmResults<? extends RealmModel>>> iterator = syncRealmResults.keySet().iterator();
while (iterator.hasNext()) {
WeakReference<RealmResults<? extends RealmModel>> weakRealmResults = iterator.next();
RealmResults<? extends RealmModel> realmResults = weakRealmResults.get();
if (realmResults == null) {
iterator.remove();
} else {
realmResults.syncIfNeeded();
}
}
}
public void setAutoRefresh(boolean autoRefresh) {
if (autoRefresh && Looper.myLooper() == null) {
throw new IllegalStateException("Cannot enabled autorefresh on a non-looper thread.");
}
this.autoRefresh = autoRefresh;
}
public boolean isAutoRefreshEnabled() {
return autoRefresh;
}
/**
* Notifies the current thread that the Realm has changed. This will also trigger change listener asynchronously.
*/
public void notifyCurrentThreadRealmChanged() {
if (realm != null) {
realm.handler.sendEmptyMessage(HandlerController.REALM_CHANGED);
}
}
}
| |
package com.sequenceiq.cloudbreak.cm.client.retry;
import java.util.function.Function;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.Scope;
import com.cloudera.api.swagger.AllHostsResourceApi;
import com.cloudera.api.swagger.AuthRolesResourceApi;
import com.cloudera.api.swagger.BatchResourceApi;
import com.cloudera.api.swagger.CdpResourceApi;
import com.cloudera.api.swagger.ClouderaManagerResourceApi;
import com.cloudera.api.swagger.ClustersResourceApi;
import com.cloudera.api.swagger.CommandsResourceApi;
import com.cloudera.api.swagger.ExternalAccountsResourceApi;
import com.cloudera.api.swagger.ExternalUserMappingsResourceApi;
import com.cloudera.api.swagger.HostTemplatesResourceApi;
import com.cloudera.api.swagger.HostsResourceApi;
import com.cloudera.api.swagger.MgmtRoleConfigGroupsResourceApi;
import com.cloudera.api.swagger.MgmtRolesResourceApi;
import com.cloudera.api.swagger.MgmtServiceResourceApi;
import com.cloudera.api.swagger.ParcelResourceApi;
import com.cloudera.api.swagger.ParcelsResourceApi;
import com.cloudera.api.swagger.RoleCommandsResourceApi;
import com.cloudera.api.swagger.RoleConfigGroupsResourceApi;
import com.cloudera.api.swagger.RolesResourceApi;
import com.cloudera.api.swagger.ServicesResourceApi;
import com.cloudera.api.swagger.ToolsResourceApi;
import com.cloudera.api.swagger.UsersResourceApi;
import com.cloudera.api.swagger.client.ApiClient;
@Configuration
public class CmClientConfig {
// factory functions:
@Bean
public Function<ApiClient, ClouderaManagerResourceApi> clouderaManagerResourceApiFactory() {
return this::clouderaManagerResourceApi;
}
@Bean
public Function<ApiClient, MgmtServiceResourceApi> mgmtServiceResourceApiFactory() {
return this::mgmtServiceResourceApi;
}
@Bean
public Function<ApiClient, ExternalUserMappingsResourceApi> externalUserMappingsResourceApiFactory() {
return this::externalUserMappingsResourceApi;
}
@Bean
public Function<ApiClient, AuthRolesResourceApi> authRolesResourceApiFactory() {
return this::authRolesResourceApi;
}
@Bean
public Function<ApiClient, ClustersResourceApi> clustersResourceApiFactory() {
return this::clustersResourceApi;
}
@Bean
public Function<ApiClient, HostsResourceApi> hostsResourceApiFactory() {
return this::hostsResourceApi;
}
@Bean
public Function<ApiClient, ServicesResourceApi> servicesResourceApiFactory() {
return this::servicesResourceApi;
}
@Bean
public Function<ApiClient, RolesResourceApi> rolesResourceApiFactory() {
return this::rolesResourceApi;
}
@Bean
public Function<ApiClient, RoleConfigGroupsResourceApi> roleConfigGroupsResourceApiFactory() {
return this::roleConfigGroupsResourceApi;
}
@Bean
public Function<ApiClient, RoleCommandsResourceApi> roleCommandsResourceApiFactory() {
return this::roleCommandsResourceApi;
}
@Bean
public Function<ApiClient, HostTemplatesResourceApi> hostTemplatesResourceApiFactory() {
return this::hostTemplatesResourceApi;
}
@Bean
public Function<ApiClient, ParcelResourceApi> parcelResourceApiFactory() {
return this::parcelResourceApi;
}
@Bean
public Function<ApiClient, ParcelsResourceApi> parcelsResourceApiFactory() {
return this::parcelsResourceApi;
}
@Bean
public Function<ApiClient, CommandsResourceApi> commandsResourceApiFactory() {
return this::commandsResourceApi;
}
@Bean
public Function<ApiClient, UsersResourceApi> usersResourceApiFactory() {
return this::usersResourceApi;
}
@Bean
public Function<ApiClient, CdpResourceApi> cdpResourceApiFactory() {
return this::cdpResourceApi;
}
@Bean
public Function<ApiClient, MgmtRolesResourceApi> mgmtRolesResourceApiFactory() {
return this::mgmtRolesResourceApi;
}
@Bean
public Function<ApiClient, MgmtRoleConfigGroupsResourceApi> mgmtRoleConfigGroupsResourceApiFactory() {
return this::mgmtRoleConfigGroupsResourceApi;
}
@Bean
public Function<ApiClient, AllHostsResourceApi> allHostsResourceApiFactory() {
return this::allHostsResourceApi;
}
@Bean
public Function<ApiClient, ToolsResourceApi> toolsResourceApiFactory() {
return this::toolsResourceApi;
}
@Bean
public Function<ApiClient, ExternalAccountsResourceApi> externalAccountsResourceApiFactory() {
return this::externalAccountsResourceApi;
}
@Bean
public Function<ApiClient, BatchResourceApi> batchResourceApiFactory() {
return this::batchResourceApi;
}
// prototype bean declarations:
// CHECKSTYLE:OFF
@Bean
@Scope(value = "prototype")
public ClouderaManagerResourceApi clouderaManagerResourceApi(ApiClient apiClient) {
return new ClouderaManagerResourceApi(apiClient);
}
@Bean
@Scope(value = "prototype")
public MgmtServiceResourceApi mgmtServiceResourceApi(ApiClient apiClient) {
return new MgmtServiceResourceApi(apiClient);
}
@Bean
@Scope(value = "prototype")
public ExternalUserMappingsResourceApi externalUserMappingsResourceApi(ApiClient apiClient) {
return new ExternalUserMappingsResourceApi(apiClient);
}
@Bean
@Scope(value = "prototype")
public AuthRolesResourceApi authRolesResourceApi(ApiClient apiClient) {
return new AuthRolesResourceApi(apiClient);
}
@Bean
@Scope(value = "prototype")
public ClustersResourceApi clustersResourceApi(ApiClient apiClient) {
return new ClustersResourceApi(apiClient);
}
@Bean
@Scope(value = "prototype")
public HostsResourceApi hostsResourceApi(ApiClient apiClient) {
return new HostsResourceApi(apiClient);
}
@Bean
@Scope(value = "prototype")
public ServicesResourceApi servicesResourceApi(ApiClient apiClient) {
return new ServicesResourceApi(apiClient);
}
@Bean
@Scope(value = "prototype")
public RolesResourceApi rolesResourceApi(ApiClient apiClient) {
return new RolesResourceApi(apiClient);
}
@Bean
@Scope(value = "prototype")
public RoleConfigGroupsResourceApi roleConfigGroupsResourceApi(ApiClient apiClient) {
return new RoleConfigGroupsResourceApi(apiClient);
}
@Bean
@Scope(value = "prototype")
public RoleCommandsResourceApi roleCommandsResourceApi(ApiClient apiClient) {
return new RoleCommandsResourceApi(apiClient);
}
@Bean
@Scope(value = "prototype")
public HostTemplatesResourceApi hostTemplatesResourceApi(ApiClient apiClient) {
return new HostTemplatesResourceApi(apiClient);
}
@Bean
@Scope(value = "prototype")
public ParcelResourceApi parcelResourceApi(ApiClient apiClient) {
return new ParcelResourceApi(apiClient);
}
@Bean
@Scope(value = "prototype")
public ParcelsResourceApi parcelsResourceApi(ApiClient apiClient) {
return new ParcelsResourceApi(apiClient);
}
@Bean
@Scope(value = "prototype")
public CommandsResourceApi commandsResourceApi(ApiClient apiClient) {
return new CommandsResourceApi(apiClient);
}
@Bean
@Scope(value = "prototype")
public UsersResourceApi usersResourceApi(ApiClient apiClient) {
return new UsersResourceApi(apiClient);
}
@Bean
@Scope(value = "prototype")
public CdpResourceApi cdpResourceApi(ApiClient apiClient) {
return new CdpResourceApi(apiClient);
}
@Bean
@Scope(value = "prototype")
public MgmtRolesResourceApi mgmtRolesResourceApi(ApiClient apiClient) {
return new MgmtRolesResourceApi(apiClient);
}
@Bean
@Scope(value = "prototype")
public MgmtRoleConfigGroupsResourceApi mgmtRoleConfigGroupsResourceApi(ApiClient apiClient) {
return new MgmtRoleConfigGroupsResourceApi(apiClient);
}
@Bean
@Scope(value = "prototype")
public AllHostsResourceApi allHostsResourceApi(ApiClient apiClient) {
return new AllHostsResourceApi(apiClient);
}
@Bean
@Scope(value = "prototype")
public ToolsResourceApi toolsResourceApi(ApiClient apiClient) {
return new ToolsResourceApi(apiClient);
}
@Bean
@Scope(value = "prototype")
public ExternalAccountsResourceApi externalAccountsResourceApi(ApiClient apiClient) {
return new ExternalAccountsResourceApi(apiClient);
}
@Bean
@Scope(value = "prototype")
public BatchResourceApi batchResourceApi(ApiClient apiClient) {
return new BatchResourceApi(apiClient);
}
// CHECKSTYLE:ON
}
| |
/*
* Copyright Red Hat Inc. and/or its affiliates and other contributors
* as indicated by the authors tag. All rights reserved.
*
* This copyrighted material is made available to anyone wishing to use,
* modify, copy, or redistribute it subject to the terms and conditions
* of the GNU General Public License version 2.
*
* This particular file is subject to the "Classpath" exception as provided in the
* LICENSE file that accompanied this code.
*
* This program is distributed in the hope that it will be useful, but WITHOUT A
* WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A
* PARTICULAR PURPOSE. See the GNU General Public License for more details.
* You should have received a copy of the GNU General Public License,
* along with this distribution; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
* MA 02110-1301, USA.
*/
package com.redhat.ceylon.compiler.java.test.cargeneration;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertTrue;
import java.io.File;
import java.io.IOException;
import java.util.Arrays;
import java.util.Enumeration;
import java.util.LinkedList;
import java.util.List;
import java.util.jar.JarEntry;
import java.util.jar.JarFile;
import java.util.jar.Manifest;
import java.util.zip.ZipEntry;
import org.junit.Test;
import com.redhat.ceylon.compiler.java.test.CompilerError;
import com.redhat.ceylon.compiler.java.test.CompilerTests;
import com.redhat.ceylon.compiler.java.test.ErrorCollector;
import com.redhat.ceylon.compiler.java.tools.CeyloncTaskImpl;
import com.redhat.ceylon.javax.tools.Diagnostic;
public class CarGenerationTests extends CompilerTests {
@Test
public void testCarResourceSimple() throws IOException{
List<String> options = new LinkedList<String>();
options.add("-src");
options.add(getPackagePath() + "resmodules/simple/source");
options.add("-res");
options.add(getPackagePath() + "resmodules/simple/resource");
options.addAll(defaultOptions);
CeyloncTaskImpl task = getCompilerTask(options,
null,
Arrays.asList("test.simple"));
Boolean ret = task.call();
assertTrue(ret);
File carFile = getModuleArchive("test.simple", "1.0");
assertTrue(carFile.exists());
JarFile car = new JarFile(carFile);
ZipEntry moduleClass = car.getEntry("test/simple/README.txt");
assertNotNull(moduleClass);
moduleClass = car.getEntry("test/simple/subdir/SUBDIR.txt");
assertNotNull(moduleClass);
moduleClass = car.getEntry("test/simple/$module_.class");
assertNotNull(moduleClass);
car.close();
}
@Test
public void testCarResourceFiles() throws IOException{
testCarResourceFilesSub(false);
testCarResourceFilesSub(true);
}
private void testCarResourceFilesSub(boolean alternative) throws IOException{
List<String> options = new LinkedList<String>();
options.add("-src");
options.add(getPackagePath() + "resmodules/files/source");
options.add("-res");
options.add(getPackagePath() + "resmodules/files/resource");
options.addAll(defaultOptions);
CeyloncTaskImpl task;
if (alternative) {
task = getCompilerTask(options,
// "resmodules/files/source/test/files/module.ceylon",
"resmodules/files/resource/test/files/extrafile");
} else {
task = getCompilerTask(options,
"resmodules/files/source/test/files/module.ceylon",
"resmodules/files/resource/test/files/README.txt");
}
Boolean ret = task.call();
assertTrue(ret);
File carFile = getModuleArchive("test.files", "1.0");
assertTrue(carFile.exists());
JarFile car = new JarFile(carFile);
ZipEntry moduleClass = car.getEntry("test/files/README.txt");
assertNotNull(moduleClass);
moduleClass = car.getEntry("test/files/extrafile");
if (alternative) {
assertNotNull(moduleClass);
} else {
assertNull(moduleClass);
}
moduleClass = car.getEntry("test/files/$module_.class");
assertNotNull(moduleClass);
car.close();
}
@Test
public void testCarResourceMultiple() throws IOException{
assertEquals(testCarResourceMultipleSub(false), 40);
assertEquals(testCarResourceMultipleSub(true), 108);
}
private long testCarResourceMultipleSub(boolean reverse) throws IOException{
List<String> options = new LinkedList<String>();
options.add("-src");
options.add(getPackagePath() + "resmodules/multiple/source");
if (reverse) {
options.add("-res");
options.add(getPackagePath() + "resmodules/multiple/resource2");
options.add("-res");
options.add(getPackagePath() + "resmodules/multiple/resource");
} else {
options.add("-res");
options.add(getPackagePath() + "resmodules/multiple/resource");
options.add("-res");
options.add(getPackagePath() + "resmodules/multiple/resource2");
}
options.addAll(defaultOptions);
CeyloncTaskImpl task = getCompilerTask(options,
null,
Arrays.asList("test.multiple"));
Boolean ret = task.call();
assertTrue(ret);
File carFile = getModuleArchive("test.multiple", "1.0");
assertTrue(carFile.exists());
JarFile car = new JarFile(carFile);
ZipEntry moduleClass = car.getEntry("test/multiple/README.txt");
long result = moduleClass.getSize();
assertNotNull(moduleClass);
moduleClass = car.getEntry("test/multiple/README2.txt");
assertNotNull(moduleClass);
moduleClass = car.getEntry("test/multiple/$module_.class");
assertNotNull(moduleClass);
car.close();
return result;
}
@Test
public void testCarResourceDefault() throws IOException{
List<String> options = new LinkedList<String>();
options.add("-src");
options.add(getPackagePath() + "resmodules/default/source");
options.add("-res");
options.add(getPackagePath() + "resmodules/default/resource");
options.addAll(defaultOptions);
CeyloncTaskImpl task = getCompilerTask(options,
"resmodules/default/resource/README.txt",
"resmodules/default/resource/subdir/SUBDIR.txt");
Boolean ret = task.call();
assertTrue(ret);
File carFile = getModuleArchive("default", null);
assertTrue(carFile.exists());
JarFile car = new JarFile(carFile);
ZipEntry moduleClass = car.getEntry("README.txt");
assertNotNull(moduleClass);
moduleClass = car.getEntry("subdir/SUBDIR.txt");
assertNotNull(moduleClass);
car.close();
}
@Test
public void testCarResourceRoot() throws IOException{
List<String> options = new LinkedList<String>();
options.add("-src");
options.add(getPackagePath() + "resmodules/rootdir/source");
options.add("-res");
options.add(getPackagePath() + "resmodules/rootdir/resource");
options.addAll(defaultOptions);
CeyloncTaskImpl task = getCompilerTask(options,
null,
Arrays.asList("test.rootdir"));
Boolean ret = task.call();
assertTrue(ret);
File carFile = getModuleArchive("test.rootdir", "1.0");
assertTrue(carFile.exists());
JarFile car = new JarFile(carFile);
ZipEntry carEntry = car.getEntry("test/rootdir/README.txt");
assertNotNull(carEntry);
carEntry = car.getEntry("rootfile");
assertNotNull(carEntry);
carEntry = car.getEntry("rootdir/rootsubdirfile");
assertNotNull(carEntry);
carEntry = car.getEntry("test/rootdir/$module_.class");
assertNotNull(carEntry);
car.close();
}
@Test
public void testCarResourceAlternativeRoot() throws IOException{
List<String> options = new LinkedList<String>();
options.add("-src");
options.add(getPackagePath() + "resmodules/altrootdir/source");
options.add("-res");
options.add(getPackagePath() + "resmodules/altrootdir/resource");
options.add("-resroot");
options.add("ALTROOT");
options.addAll(defaultOptions);
CeyloncTaskImpl task = getCompilerTask(options,
null,
Arrays.asList("test.altrootdir"));
Boolean ret = task.call();
assertTrue(ret);
File carFile = getModuleArchive("test.altrootdir", "1.0");
assertTrue(carFile.exists());
JarFile car = new JarFile(carFile);
ZipEntry carEntry = car.getEntry("test/altrootdir/README.txt");
assertNotNull(carEntry);
carEntry = car.getEntry("rootfile");
assertNotNull(carEntry);
carEntry = car.getEntry("test/altrootdir/$module_.class");
assertNotNull(carEntry);
car.close();
}
/*
* Although not in the JAR specification, there are tools and APIs
* which rely on the MANIFEST.MF being the first file entry in a Jar file.
*
* Test for the case of a compiler-generated MANIFEST.MF
*/
@Test
public void testCarGeneratedManifestComesFirst() throws IOException{
ErrorCollector ec = new ErrorCollector();
List<String> options = new LinkedList<String>();
options.add("-src");
options.add(getPackagePath() + "meta/generatedmanifest/source");
options.addAll(defaultOptions);
CeyloncTaskImpl task = getCompilerTask(options,
ec,
Arrays.asList("test.generatedmanifest"));
assertTrue(task.call());
assertTrue(ec.get(Diagnostic.Kind.ERROR, Diagnostic.Kind.WARNING).isEmpty());
File carFile = getModuleArchive("test.generatedmanifest", "1.0");
assertTrue(carFile.exists());
try (JarFile car = new JarFile(carFile)) {
Manifest manifest = car.getManifest();
assertTrue(manifest != null);
assertEquals("test.generatedmanifest", manifest.getMainAttributes().getValue("Bundle-SymbolicName"));
Enumeration<JarEntry> entries = car.entries();
assertTrue(entries.hasMoreElements());
JarEntry entry = entries.nextElement();
assertEquals("META-INF/", entry.getName());
assertTrue(entry.isDirectory());
entry = entries.nextElement();
assertEquals("META-INF/MANIFEST.MF", entry.getName());
assertTrue(!entry.isDirectory());
}
// Now test incremental compilation
task = getCompilerTask(options,
ec,
"meta/generatedmanifest/source/test/generatedmanifest/run.ceylon");
assertTrue(task.call());
assertTrue(carFile.exists());
assertTrue(ec.get(Diagnostic.Kind.ERROR, Diagnostic.Kind.WARNING).isEmpty());
try (JarFile car = new JarFile(carFile)) {
Manifest manifest = car.getManifest();
assertTrue(manifest != null);
assertEquals("test.generatedmanifest", manifest.getMainAttributes().getValue("Bundle-SymbolicName"));
Enumeration<JarEntry> entries = car.entries();
assertTrue(entries.hasMoreElements());
JarEntry entry = entries.nextElement();
assertEquals("META-INF/", entry.getName());
assertTrue(entry.isDirectory());
entry = entries.nextElement();
assertEquals("META-INF/MANIFEST.MF", entry.getName());
assertTrue(!entry.isDirectory());
}
}
/*
* Although not in the JAR specification, there are tools and APIs
* which rely on the MANIFEST.MF being the first file entry in a Jar file.
*
* Test for the case of an existing MANIFEST.MF in resources
*/
@Test
public void testCarMergedManifestComesFirst() throws IOException{
ErrorCollector ec = new ErrorCollector();
List<String> options = new LinkedList<String>();
options.add("-src");
options.add(getPackagePath() + "meta/mergedmanifest/source");
options.add("-res");
options.add(getPackagePath() + "meta/mergedmanifest/resource");
options.addAll(defaultOptions);
CeyloncTaskImpl task = getCompilerTask(options,
ec,
Arrays.asList("test.mergedmanifest"));
assertTrue(task.call());
File carFile = getModuleArchive("test.mergedmanifest", "1.0");
assertTrue(carFile.exists());
assertTrue(ec.get(Diagnostic.Kind.ERROR).isEmpty());
// When the compiler value overrides a user value, we expect a warning
assertTrue(ec.get(Diagnostic.Kind.WARNING).size() == 1);
assertTrue(ec.get(Diagnostic.Kind.WARNING).iterator().next().equals(new CompilerError(Diagnostic.Kind.WARNING, null, -1,
"manifest attribute provided by compiler: ignoring value from 'Bundle-ActivationPolicy' for module 'test.mergedmanifest'")));
try (JarFile car = new JarFile(carFile)) {
Manifest manifest = car.getManifest();
manifest.write(System.err);
assertTrue(manifest != null);
assertEquals("test.mergedmanifest", manifest.getMainAttributes().getValue("Bundle-SymbolicName"));
assertEquals("Baz", manifest.getMainAttributes().getValue("Foo-Bar"));
assertEquals("whatever", manifest.getMainAttributes().getValue("LastLineWithoutNewline"));
Enumeration<JarEntry> entries = car.entries();
assertTrue(entries.hasMoreElements());
JarEntry entry = entries.nextElement();
assertEquals("META-INF/", entry.getName());
assertTrue(entry.isDirectory());
entry = entries.nextElement();
assertEquals("META-INF/MANIFEST.MF", entry.getName());
assertTrue(!entry.isDirectory());
}
}
}
| |
package org.osmdroid.diag;
import android.Manifest;
import android.content.Context;
import android.content.pm.PackageManager;
import android.hardware.Sensor;
import android.hardware.SensorManager;
import android.location.GpsSatellite;
import android.location.GpsStatus;
import android.location.Location;
import android.location.LocationListener;
import android.location.LocationManager;
import android.location.LocationProvider;
import android.os.Bundle;
import android.support.v4.content.ContextCompat;
import android.support.v7.app.AppCompatActivity;
import android.support.v7.widget.Toolbar;
import android.view.View;
import android.widget.TextView;
import org.osmdroid.R;
import org.osmdroid.tileprovider.util.StorageUtils;
import java.util.Iterator;
import java.util.List;
/**
* created on 2/6/2018.
*
* @author Alex O'Ree
*/
public class DiagnosticsActivity extends AppCompatActivity
implements View.OnClickListener, LocationListener, GpsStatus.Listener {
TextView output = null;
LocationManager lm = null;
Location currentLocation = null;
GpsStatus gpsStatus = null;
@Override
public void onCreate(final Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_diag);
Toolbar toolbar = findViewById(R.id.my_toolbar);
setSupportActionBar(toolbar);
//noinspection ConstantConditions
getSupportActionBar().setDisplayHomeAsUpEnabled(true);
getSupportActionBar().setDisplayShowHomeEnabled(true);
findViewById(R.id.diag_location).setOnClickListener(this);
findViewById(R.id.diag_orientation).setOnClickListener(this);
findViewById(R.id.diag_gps).setOnClickListener(this);
findViewById(R.id.diag_permissions).setOnClickListener(this);
findViewById(R.id.diag_storage).setOnClickListener(this);
output = findViewById(R.id.diag_output);
}
@Override
public boolean onSupportNavigateUp() {
onBackPressed();
return true;
}
@Override
public void onClick(View v) {
switch (v.getId()) {
case R.id.diag_location:
probeLocation();
break;
case R.id.diag_orientation:
probeOrientation();
break;
case R.id.diag_permissions:
checkPermissions();
break;
case R.id.diag_storage:
probeStorage();
break;
case R.id.diag_gps:
probeGps();
break;
}
}
public void onResume() {
super.onResume();
lm = (LocationManager) getSystemService(Context.LOCATION_SERVICE);
try {
lm.addGpsStatusListener(this);
lm.requestLocationUpdates(LocationManager.GPS_PROVIDER, 0, 0, this);
} catch (SecurityException e) {
} catch (RuntimeException r) {
}
}
public void onPause() {
super.onPause();
lm = (LocationManager) getSystemService(Context.LOCATION_SERVICE);
try {
lm.removeUpdates(this);
lm.removeGpsStatusListener(this);
} catch (SecurityException e) {
} catch (RuntimeException r) {
}
}
private void probeStorage() {
StringBuilder sb = new StringBuilder();
List<StorageUtils.StorageInfo> storageInfos = StorageUtils.getStorageList(this);
for (StorageUtils.StorageInfo storageInfo : storageInfos) {
sb.append(storageInfo.path).append("\n");
}
output.setText(sb.toString());
}
private void probeGps() {
StringBuilder sb = new StringBuilder();
if (currentLocation != null) {
sb.append("Current Location:\n");
sb.append(currentLocation.getLatitude()).append(",").append(currentLocation.getLongitude()).append("\n");
sb.append("Alt ").append(currentLocation.getAltitude()).append("\n");
sb.append("Accuracy ").append(currentLocation.getAccuracy()).append("\n");
sb.append("Bearing ").append(currentLocation.getBearing()).append("\n");
sb.append("Speed ").append(currentLocation.getSpeed()).append("\n\n");
}
try {
if (gpsStatus != null) {
Iterator<GpsSatellite> iterator = gpsStatus.getSatellites().iterator();
while (iterator.hasNext()) {
GpsSatellite next = iterator.next();
sb.append("Sat PRN " + next.getPrn() + " Elevation " + next.getElevation() + " Azimuth " + next.getAzimuth() + "SNR " + next.getSnr()).append("\n");
}
}
} catch (Exception e) {
sb.append(e.toString());
}
output.setText(sb.toString());
}
private void checkPermissions() {
StringBuilder sb = new StringBuilder();
sb.append("Fine Location Granted: ");
if (ContextCompat.checkSelfPermission(this, Manifest.permission.ACCESS_FINE_LOCATION) == PackageManager.PERMISSION_GRANTED) {
sb.append("yes\n");
} else sb.append("no\n");
sb.append("Write External Storage: ");
if (ContextCompat.checkSelfPermission(this, Manifest.permission.WRITE_EXTERNAL_STORAGE) == PackageManager.PERMISSION_GRANTED) {
sb.append("yes\n");
} else sb.append("no\n");
output.setText(sb.toString());
}
private void probeOrientation() {
StringBuilder sb = new StringBuilder();
SensorManager mSensorManager = (SensorManager) getSystemService(Context.SENSOR_SERVICE);
List<Sensor> sensorList = mSensorManager.getSensorList(Sensor.TYPE_ORIENTATION);
for (Sensor s : sensorList) {
sb.append(s.getName() + ":" + s.toString() + "\n");
}
output.setText(sb.toString());
}
private void probeLocation() {
StringBuilder sb = new StringBuilder();
List<String> allProviders = lm.getAllProviders();
for (String s : allProviders) {
sb.append(s).append("\n");
LocationProvider provider = lm.getProvider(s);
sb.append("Name " + provider.getName()).append("\n");
sb.append("Cell " + provider.requiresCell()).append("\n");
sb.append("Network " + provider.requiresNetwork()).append("\n");
sb.append("Satellite " + provider.requiresSatellite()).append("\n");
sb.append("Altitude " + provider.supportsAltitude()).append("\n");
sb.append("Bearing " + provider.supportsBearing()).append("\n");
sb.append("Speed " + provider.supportsSpeed()).append("\n\n");
//GpsStatus gpsStatus = lm.getGpsStatus(null);
//gpsStatus.
}
output.setText(sb.toString());
}
@Override
public void onLocationChanged(Location location) {
this.currentLocation = location;
}
@Override
public void onStatusChanged(String provider, int status, Bundle extras) {
}
@Override
public void onProviderEnabled(String provider) {
}
@Override
public void onProviderDisabled(String provider) {
}
@Override
public void onGpsStatusChanged(int event) {
switch (event) {
case GpsStatus.GPS_EVENT_SATELLITE_STATUS:
try {
gpsStatus = lm.getGpsStatus(gpsStatus);
} catch (SecurityException e) {
e.printStackTrace();
}
break;
case GpsStatus.GPS_EVENT_FIRST_FIX:
// Do something.
break;
}
}
}
| |
/*
* Copyright (c) 2018. Open Text Corporation. All Rights Reserved.
*/
package com.emc.documentum.rest.client.sample.model.xml.jaxb;
import java.util.List;
import java.util.Objects;
import javax.xml.bind.annotation.XmlElement;
import javax.xml.bind.annotation.XmlElementWrapper;
import javax.xml.bind.annotation.XmlRootElement;
import com.emc.documentum.rest.client.sample.client.util.Equals;
import com.emc.documentum.rest.client.sample.model.Lifecycle.EntryCriteria;
import com.emc.documentum.rest.client.sample.model.Lifecycle.LifecycleState;
import com.emc.documentum.rest.client.sample.model.Lifecycle.Module;
import com.emc.documentum.rest.client.sample.model.Lifecycle.Procedure;
@XmlRootElement(name = "state")
public class JaxbLifecycleState implements LifecycleState {
private String name;
private String type;
private String description;
private boolean exceptional;
private String exceptionState;
private boolean allowAttach;
private boolean allowSchedule;
private boolean allowReturnToBase;
private boolean allowDemote;
private int no;
private int index;
private List<String> returnConditions;
private EntryCriteria entryCriteria;
private Procedure userCriteria;
private Procedure action;
private Procedure userAction;
private Procedure userPostAction;
private Module userCriteriaService;
private Module userActionService;
private Module userPostService;
private Module systemAction;
private String typeOverrideId;
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public String getType() {
return type;
}
public void setType(String type) {
this.type = type;
}
public String getDescription() {
return description;
}
public void setDescription(String description) {
this.description = description;
}
public boolean isExceptional() {
return exceptional;
}
public void setExceptional(boolean exceptional) {
this.exceptional = exceptional;
}
@XmlElement(name="exception-state")
public String getExceptionState() {
return exceptionState;
}
public void setExceptionState(String exceptionState) {
this.exceptionState = exceptionState;
}
@XmlElement(name="allow-attach")
public boolean isAllowAttach() {
return allowAttach;
}
public void setAllowAttach(boolean allowAttach) {
this.allowAttach = allowAttach;
}
@XmlElement(name="allow-schedule")
public boolean isAllowSchedule() {
return allowSchedule;
}
public void setAllowSchedule(boolean allowSchedule) {
this.allowSchedule = allowSchedule;
}
@XmlElement(name="allow-return-to-base")
public boolean isAllowReturnToBase() {
return allowReturnToBase;
}
public void setAllowReturnToBase(boolean allowReturnToBase) {
this.allowReturnToBase = allowReturnToBase;
}
@XmlElement(name="allow-demote")
public boolean isAllowDemote() {
return allowDemote;
}
public void setAllowDemote(boolean allowDemote) {
this.allowDemote = allowDemote;
}
public int getNo() {
return no;
}
public void setNo(int no) {
this.no = no;
}
public int getIndex() {
return index;
}
public void setIndex(int index) {
this.index = index;
}
@XmlElementWrapper(name="return-conditions")
@XmlElement(name="return-condition")
public List<String> getReturnConditions() {
return returnConditions;
}
public void setReturnConditions(List<String> returnConditions) {
this.returnConditions = returnConditions;
}
@XmlElement(name="entry-criteria", type=JaxbEntryCriteria.class)
public EntryCriteria getEntryCriteria() {
return entryCriteria;
}
public void setEntryCriteria(EntryCriteria entryCriteria) {
this.entryCriteria = entryCriteria;
}
@XmlElement(name="user-criteria", type=JaxbProcedure.class)
public Procedure getUserCriteria() {
return userCriteria;
}
public void setUserCriteria(Procedure userCriteria) {
this.userCriteria = userCriteria;
}
@XmlElement(type=JaxbProcedure.class)
public Procedure getAction() {
return action;
}
public void setAction(Procedure action) {
this.action = action;
}
@XmlElement(name="user-action", type=JaxbProcedure.class)
public Procedure getUserAction() {
return userAction;
}
public void setUserAction(Procedure userAction) {
this.userAction = userAction;
}
@XmlElement(name="user-post-action", type=JaxbProcedure.class)
public Procedure getUserPostAction() {
return userPostAction;
}
public void setUserPostAction(Procedure userPostAction) {
this.userPostAction = userPostAction;
}
@XmlElement(name="user-criteria-service", type=JaxbModule.class)
public Module getUserCriteriaService() {
return userCriteriaService;
}
public void setUserCriteriaService(Module userCriteriaService) {
this.userCriteriaService = userCriteriaService;
}
@XmlElement(name="user-action-service", type=JaxbModule.class)
public Module getUserActionService() {
return userActionService;
}
public void setUserActionService(Module userActionService) {
this.userActionService = userActionService;
}
@XmlElement(name="user-post-service", type=JaxbModule.class)
public Module getUserPostService() {
return userPostService;
}
public void setUserPostService(Module userPostService) {
this.userPostService = userPostService;
}
@XmlElement(name="system-action", type=JaxbModule.class)
public Module getSystemAction() {
return systemAction;
}
public void setSystemAction(Module systemAction) {
this.systemAction = systemAction;
}
@XmlElement(name="type-override-id")
public String getTypeOverrideId() {
return typeOverrideId;
}
public void setTypeOverrideId(String typeOverrideId) {
this.typeOverrideId = typeOverrideId;
}
@Override
public boolean equals(Object obj) {
JaxbLifecycleState that = (JaxbLifecycleState) obj;
return Equals.equal(name, that.name)
&& Equals.equal(description, that.description)
&& Equals.equal(type, that.type)
&& Equals.equal(exceptional, that.exceptional)
&& Equals.equal(exceptionState, that.exceptionState)
&& Equals.equal(allowAttach, that.allowAttach)
&& Equals.equal(allowSchedule, that.allowSchedule)
&& Equals.equal(allowReturnToBase, that.allowReturnToBase)
&& Equals.equal(allowDemote, that.allowDemote)
&& Equals.equal(no, that.no)
&& Equals.equal(index, that.index)
&& Equals.equal(returnConditions, that.returnConditions)
&& Equals.equal(entryCriteria, that.entryCriteria)
&& Equals.equal(userCriteria, that.userCriteria)
&& Equals.equal(action, that.action)
&& Equals.equal(userAction, that.userAction)
&& Equals.equal(userPostAction, that.userPostAction)
&& Equals.equal(userCriteriaService, that.userCriteriaService)
&& Equals.equal(userActionService, that.userActionService)
&& Equals.equal(userPostService, that.userPostService)
&& Equals.equal(systemAction, that.systemAction)
&& Equals.equal(typeOverrideId, that.typeOverrideId);
}
@Override
public int hashCode() {
return Objects.hash(name, exceptional, exceptionState, no, entryCriteria);
}
public static class JaxbEntryCriteria implements EntryCriteria {
private String expression;
private String id;
public String getId() {
return id;
}
public void setId(String id) {
this.id = id;
}
public String getExpression() {
return expression;
}
public void setExpression(String expression) {
this.expression = expression;
}
@Override
public boolean equals(Object obj) {
JaxbEntryCriteria that = (JaxbEntryCriteria) obj;
return Equals.equal(id, that.id)
&& Equals.equal(expression, that.expression);
}
@Override
public int hashCode() {
return Objects.hash(id, expression);
}
}
public static class JaxbProcedure implements Procedure {
private String name;
private String version;
private String id;
public String getId() {
return id;
}
public void setId(String id) {
this.id = id;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public String getVersion() {
return version;
}
public void setVersion(String version) {
this.version = version;
}
@Override
public boolean equals(Object obj) {
JaxbProcedure that = (JaxbProcedure) obj;
return Equals.equal(id, that.id)
&& Equals.equal(name, that.name)
&& Equals.equal(version, that.version);
}
@Override
public int hashCode() {
return Objects.hash(id, name, version);
}
}
public static class JaxbModule implements Module {
private String id;
private String name;
private String primaryClass;
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
@XmlElement(name="primary-class")
public String getPrimaryClass() {
return primaryClass;
}
public void setPrimaryClass(String primaryClass) {
this.primaryClass = primaryClass;
}
public String getId() {
return id;
}
public void setId(String id) {
this.id = id;
}
@Override
public boolean equals(Object obj) {
JaxbModule that = (JaxbModule) obj;
return Equals.equal(id, that.id)
&& Equals.equal(name, that.name)
&& Equals.equal(primaryClass, that.primaryClass);
}
@Override
public int hashCode() {
return Objects.hash(id, name, primaryClass);
}
}
}
| |
/*
* Copyright 2018 Red Hat, Inc. and/or its affiliates.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.kie.workbench.common.stunner.client.lienzo.canvas.export;
import java.util.List;
import java.util.Optional;
import com.ait.lienzo.client.core.Context2D;
import com.ait.lienzo.client.core.shape.Layer;
import com.ait.lienzo.client.core.shape.MultiPath;
import com.ait.lienzo.client.core.shape.Viewport;
import com.ait.lienzo.client.core.shape.wires.WiresManager;
import com.ait.lienzo.client.core.shape.wires.WiresShape;
import com.ait.lienzo.client.core.types.BoundingBox;
import com.ait.lienzo.client.core.types.Point2D;
import com.ait.lienzo.client.core.types.Transform;
import com.ait.lienzo.client.core.util.ScratchPad;
import com.ait.lienzo.shared.core.types.DataURLType;
import com.ait.lienzo.test.LienzoMockitoTestRunner;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.kie.workbench.common.stunner.client.lienzo.canvas.wires.WiresCanvas;
import org.kie.workbench.common.stunner.client.lienzo.canvas.wires.WiresCanvasView;
import org.kie.workbench.common.stunner.client.lienzo.canvas.wires.WiresLayer;
import org.kie.workbench.common.stunner.core.api.DefinitionManager;
import org.kie.workbench.common.stunner.core.client.canvas.AbstractCanvasHandler;
import org.kie.workbench.common.stunner.core.client.canvas.export.CanvasExportSettings;
import org.kie.workbench.common.stunner.core.client.canvas.export.CanvasURLExportSettings;
import org.kie.workbench.common.stunner.core.definition.adapter.AdapterManager;
import org.kie.workbench.common.stunner.core.definition.adapter.DefinitionSetAdapter;
import org.kie.workbench.common.stunner.core.diagram.Diagram;
import org.kie.workbench.common.stunner.core.diagram.Metadata;
import org.kie.workbench.common.stunner.core.registry.definition.TypeDefinitionSetRegistry;
import org.mockito.ArgumentCaptor;
import org.mockito.Mock;
import org.uberfire.ext.editor.commons.client.file.exports.svg.IContext2D;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import static org.kie.workbench.common.stunner.core.client.canvas.export.CanvasExport.URLDataType.JPG;
import static org.kie.workbench.common.stunner.core.client.canvas.export.CanvasExport.URLDataType.PNG;
import static org.mockito.Matchers.any;
import static org.mockito.Matchers.eq;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
@RunWith(LienzoMockitoTestRunner.class)
public class LienzoCanvasExportTest {
@Mock
private AbstractCanvasHandler canvasHandler;
@Mock
private WiresCanvas canvas;
@Mock
private WiresCanvasView canvasView;
@Mock
private WiresLayer lienzoLayer;
@Mock
private Layer layer;
@Mock
private Viewport viewport;
@Mock
private ScratchPad scratchPad;
@Mock
private Context2D context2D;
private LienzoCanvasExport tested;
@Mock
private LienzoCanvasExport.BoundsProvider boundsProvider;
@Mock
private Diagram diagram;
@Mock
private Metadata metadata;
private final String DEF_SET_ID = "DEF_SET_ID";
@Mock
private DefinitionManager definitionManager;
@Mock
private TypeDefinitionSetRegistry definitionSets;
@Mock
private Object defSet;
@Mock
private AdapterManager adapters;
@Mock
private DefinitionSetAdapter<Object> definitionSetAdapter;
@Before
public void setup() {
when(canvasHandler.getCanvas()).thenReturn(canvas);
when(canvas.getView()).thenReturn(canvasView);
when(canvasView.getLayer()).thenReturn(lienzoLayer);
when(lienzoLayer.getLienzoLayer()).thenReturn(layer);
when(layer.getViewport()).thenReturn(viewport);
when(layer.uuid()).thenReturn("someLayer");
when(layer.getScratchPad()).thenReturn(scratchPad);
when(layer.getWidth()).thenReturn(100);
when(layer.getHeight()).thenReturn(200);
when(boundsProvider.compute(eq(lienzoLayer), any(CanvasExportSettings.class))).thenReturn(new int[]{0, 0, 100, 200});
when(scratchPad.getContext()).thenReturn(context2D);
when(canvasHandler.getDiagram()).thenReturn(diagram);
when(diagram.getMetadata()).thenReturn(metadata);
when(metadata.getDefinitionSetId()).thenReturn(DEF_SET_ID);
when(canvasHandler.getDefinitionManager()).thenReturn(definitionManager);
when(definitionManager.definitionSets()).thenReturn(definitionSets);
when(definitionSets.getDefinitionSetById(DEF_SET_ID)).thenReturn(defSet);
when(definitionManager.adapters()).thenReturn(adapters);
when(adapters.forDefinitionSet()).thenReturn(definitionSetAdapter);
when(definitionSetAdapter.getSvgNodeId(defSet)).thenReturn(Optional.of("id"));
this.tested = new LienzoCanvasExport(boundsProvider);
}
@Test
public void testToJpgImageData() {
tested.toImageData(canvasHandler,
CanvasURLExportSettings.build(JPG));
verify(context2D,
times(1)).setFillColor(eq(LienzoCanvasExport.BG_COLOR));
verify(context2D,
times(1)).fillRect(eq(0d),
eq(0d),
eq(100d),
eq(200d));
verify(layer,
times(1)).drawWithTransforms(eq(context2D),
eq(1d),
any(BoundingBox.class));
verify(scratchPad,
times(1)).toDataURL(eq(DataURLType.JPG),
eq(1d));
verify(scratchPad,
times(1)).clear();
}
@Test
public void testToPngImageData() {
tested.toImageData(canvasHandler,
CanvasURLExportSettings.build(PNG));
verify(context2D,
times(1)).setFillColor(eq(LienzoCanvasExport.BG_COLOR));
verify(context2D,
times(1)).fillRect(eq(0d),
eq(0d),
eq(100d),
eq(200d));
verify(layer,
times(1)).drawWithTransforms(eq(context2D),
eq(1d),
any(BoundingBox.class));
verify(scratchPad,
times(1)).toDataURL(eq(DataURLType.PNG),
eq(1d));
verify(scratchPad,
times(1)).clear();
}
@Test
public void testWiresLayerBoundsProviderEmpty() {
layer = new Layer();
when(lienzoLayer.getLienzoLayer()).thenReturn(layer);
WiresManager.get(layer);
LienzoCanvasExport.WiresLayerBoundsProvider provider = new LienzoCanvasExport.WiresLayerBoundsProvider();
int[] size0 = provider.compute(lienzoLayer, CanvasExportSettings.build());
assertEquals(0, size0[0]);
assertEquals(0, size0[1]);
assertEquals(25, size0[2]);
assertEquals(25, size0[3]);
}
@Test
public void testWiresLayerBoundsProvider() {
layer = new Layer();
when(lienzoLayer.getLienzoLayer()).thenReturn(layer);
WiresManager wiresManager = WiresManager.get(layer);
com.ait.lienzo.client.core.shape.wires.WiresLayer wiresLayer = wiresManager.getLayer();
wiresLayer.add(new WiresShape(new MultiPath().rect(0, 0, 50, 50)).setLocation(new Point2D(12, 44)));
wiresLayer.add(new WiresShape(new MultiPath().rect(0, 0, 100, 150)).setLocation(new Point2D(1, 3)));
LienzoCanvasExport.WiresLayerBoundsProvider provider = new LienzoCanvasExport.WiresLayerBoundsProvider();
int[] size0 = provider.compute(lienzoLayer, CanvasExportSettings.build());
assertEquals(0, size0[0]);
assertEquals(0, size0[1]);
assertEquals(151, size0[2]);
assertEquals(203, size0[3]);
}
@Test
public void testWiresLayerBoundsProviderWithSize() {
LienzoCanvasExport.WiresLayerBoundsProvider provider = new LienzoCanvasExport.WiresLayerBoundsProvider();
int[] size0 = provider.compute(lienzoLayer, CanvasExportSettings.build(11, 33));
assertEquals(0, size0[0]);
assertEquals(0, size0[1]);
assertEquals(36, size0[2]);
assertEquals(58, size0[3]);
}
@Test
public void testToContext2D() {
Transform transform = new Transform().translate(11, 33).scale(0.1, 0.3);
when(viewport.getTransform()).thenReturn(transform);
IContext2D iContext2D = tested.toContext2D(canvasHandler, CanvasExportSettings.build());
assertNotNull(iContext2D);
verify(layer, times(1)).draw(any(Context2D.class));
ArgumentCaptor<Transform> transformArgumentCaptor = ArgumentCaptor.forClass(Transform.class);
verify(viewport, times(2)).setTransform(transformArgumentCaptor.capture());
verify(layer).draw();
List<Transform> transforms = transformArgumentCaptor.getAllValues();
Transform t0 = transforms.get(0);
Transform t1 = transforms.get(1);
assertEquals(0d, t0.getTranslateX(), 0d);
assertEquals(0d, t0.getTranslateY(), 0d);
assertEquals(1d, t0.getScaleX(), 0d);
assertEquals(1d, t0.getScaleY(), 0d);
assertEquals(11d, t1.getTranslateX(), 0d);
assertEquals(33d, t1.getTranslateY(), 0d);
assertEquals(0.1d, t1.getScaleX(), 0d);
assertEquals(0.3d, t1.getScaleY(), 0d);
}
}
| |
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.facebook.presto.execution.buffer;
import com.facebook.presto.OutputBuffers;
import com.facebook.presto.OutputBuffers.OutputBufferId;
import com.facebook.presto.block.BlockAssertions;
import com.facebook.presto.operator.PageAssertions;
import com.facebook.presto.spi.Page;
import com.facebook.presto.spi.type.Type;
import com.google.common.collect.ImmutableList;
import com.google.common.util.concurrent.ListenableFuture;
import io.airlift.units.DataSize;
import io.airlift.units.Duration;
import java.util.List;
import java.util.Optional;
import java.util.concurrent.Future;
import java.util.stream.Collectors;
import static com.facebook.presto.execution.buffer.TestingPagesSerdeFactory.testingPagesSerde;
import static com.google.common.base.Preconditions.checkArgument;
import static io.airlift.concurrent.MoreFutures.tryGetFutureValue;
import static io.airlift.units.DataSize.Unit.BYTE;
import static java.util.concurrent.TimeUnit.MILLISECONDS;
import static java.util.concurrent.TimeUnit.SECONDS;
import static org.testng.Assert.assertEquals;
import static org.testng.Assert.assertFalse;
import static org.testng.Assert.assertTrue;
public final class BufferTestUtils
{
private BufferTestUtils() {}
static final PagesSerde PAGES_SERDE = testingPagesSerde();
static final Duration NO_WAIT = new Duration(0, MILLISECONDS);
static final Duration MAX_WAIT = new Duration(1, SECONDS);
private static final DataSize BUFFERED_PAGE_SIZE = new DataSize(PAGES_SERDE.serialize(createPage(42)).getRetainedSizeInBytes(), BYTE);
static BufferResult getFuture(ListenableFuture<BufferResult> future, Duration maxWait)
{
Optional<BufferResult> bufferResult = tryGetFutureValue(future, (int) maxWait.toMillis(), MILLISECONDS);
checkArgument(bufferResult.isPresent(), "bufferResult is empty");
return bufferResult.get();
}
static void assertBufferResultEquals(List<? extends Type> types, BufferResult actual, BufferResult expected)
{
assertEquals(actual.getSerializedPages().size(), expected.getSerializedPages().size(), "page count");
assertEquals(actual.getToken(), expected.getToken(), "token");
for (int i = 0; i < actual.getSerializedPages().size(); i++) {
Page actualPage = PAGES_SERDE.deserialize(actual.getSerializedPages().get(i));
Page expectedPage = PAGES_SERDE.deserialize(expected.getSerializedPages().get(i));
assertEquals(actualPage.getChannelCount(), expectedPage.getChannelCount());
PageAssertions.assertPageEquals(types, actualPage, expectedPage);
}
assertEquals(actual.isBufferComplete(), expected.isBufferComplete(), "buffer complete");
}
static BufferResult createBufferResult(String bufferId, long token, List<Page> pages)
{
checkArgument(!pages.isEmpty(), "pages is empty");
return new BufferResult(
bufferId,
token,
token + pages.size(),
false,
pages.stream()
.map(PAGES_SERDE::serialize)
.collect(Collectors.toList()));
}
public static Page createPage(int i)
{
return new Page(BlockAssertions.createLongsBlock(i));
}
static DataSize sizeOfPages(int count)
{
return new DataSize(BUFFERED_PAGE_SIZE.toBytes() * count, BYTE);
}
static BufferResult getBufferResult(OutputBuffer buffer, OutputBufferId bufferId, long sequenceId, DataSize maxSize, Duration maxWait)
{
ListenableFuture<BufferResult> future = buffer.get(bufferId, sequenceId, maxSize);
return getFuture(future, maxWait);
}
// TODO: remove this after PR #7987 is landed
static void acknowledgeBufferResult(OutputBuffer buffer, OutputBuffers.OutputBufferId bufferId, long sequenceId)
{
buffer.acknowledge(bufferId, sequenceId);
}
static ListenableFuture<?> enqueuePage(OutputBuffer buffer, Page page)
{
buffer.enqueue(ImmutableList.of(PAGES_SERDE.serialize(page)));
ListenableFuture<?> future = buffer.isFull();
assertFalse(future.isDone());
return future;
}
static ListenableFuture<?> enqueuePage(OutputBuffer buffer, Page page, int partition)
{
buffer.enqueue(partition, ImmutableList.of(PAGES_SERDE.serialize(page)));
ListenableFuture<?> future = buffer.isFull();
assertFalse(future.isDone());
return future;
}
public static void addPage(OutputBuffer buffer, Page page)
{
buffer.enqueue(ImmutableList.of(PAGES_SERDE.serialize(page)));
assertTrue(buffer.isFull().isDone(), "Expected add page to not block");
}
public static void addPage(OutputBuffer buffer, Page page, int partition)
{
buffer.enqueue(partition, ImmutableList.of(PAGES_SERDE.serialize(page)));
assertTrue(buffer.isFull().isDone(), "Expected add page to not block");
}
static void assertQueueState(
OutputBuffer buffer,
OutputBuffers.OutputBufferId bufferId,
int bufferedPages,
int pagesSent)
{
assertEquals(
getBufferInfo(buffer, bufferId),
new BufferInfo(
bufferId,
false,
bufferedPages,
pagesSent,
new PageBufferInfo(
bufferId.getId(),
bufferedPages,
sizeOfPages(bufferedPages).toBytes(),
bufferedPages + pagesSent, // every page has one row
bufferedPages + pagesSent)));
}
static void assertQueueState(
OutputBuffer buffer,
int unassignedPages,
OutputBuffers.OutputBufferId bufferId,
int bufferedPages,
int pagesSent)
{
OutputBufferInfo outputBufferInfo = buffer.getInfo();
long assignedPages = outputBufferInfo.getBuffers().stream().mapToInt(BufferInfo::getBufferedPages).sum();
assertEquals(
outputBufferInfo.getTotalBufferedPages() - assignedPages,
unassignedPages,
"unassignedPages");
BufferInfo bufferInfo = outputBufferInfo.getBuffers().stream()
.filter(info -> info.getBufferId().equals(bufferId))
.findAny()
.orElse(null);
assertEquals(
bufferInfo,
new BufferInfo(
bufferId,
false,
bufferedPages,
pagesSent,
new PageBufferInfo(
bufferId.getId(),
bufferedPages,
sizeOfPages(bufferedPages).toBytes(),
bufferedPages + pagesSent, // every page has one row
bufferedPages + pagesSent)));
}
@SuppressWarnings("ConstantConditions")
static void assertQueueClosed(OutputBuffer buffer, OutputBuffers.OutputBufferId bufferId, int pagesSent)
{
BufferInfo bufferInfo = getBufferInfo(buffer, bufferId);
assertEquals(bufferInfo.getBufferedPages(), 0);
assertEquals(bufferInfo.getPagesSent(), pagesSent);
assertEquals(bufferInfo.isFinished(), true);
}
@SuppressWarnings("ConstantConditions")
static void assertQueueClosed(OutputBuffer buffer, int unassignedPages, OutputBuffers.OutputBufferId bufferId, int pagesSent)
{
OutputBufferInfo outputBufferInfo = buffer.getInfo();
long assignedPages = outputBufferInfo.getBuffers().stream().mapToInt(BufferInfo::getBufferedPages).sum();
assertEquals(
outputBufferInfo.getTotalBufferedPages() - assignedPages,
unassignedPages,
"unassignedPages");
BufferInfo bufferInfo = outputBufferInfo.getBuffers().stream()
.filter(info -> info.getBufferId().equals(bufferId))
.findAny()
.orElse(null);
assertEquals(bufferInfo.getBufferedPages(), 0);
assertEquals(bufferInfo.getPagesSent(), pagesSent);
assertEquals(bufferInfo.isFinished(), true);
}
static void assertFinished(OutputBuffer buffer)
{
assertTrue(buffer.isFinished());
for (BufferInfo bufferInfo : buffer.getInfo().getBuffers()) {
assertTrue(bufferInfo.isFinished());
assertEquals(bufferInfo.getBufferedPages(), 0);
}
}
static void assertFutureIsDone(Future<?> future)
{
tryGetFutureValue(future, 5, SECONDS);
assertTrue(future.isDone());
}
private static BufferInfo getBufferInfo(OutputBuffer buffer, OutputBuffers.OutputBufferId bufferId)
{
for (BufferInfo bufferInfo : buffer.getInfo().getBuffers()) {
if (bufferInfo.getBufferId().equals(bufferId)) {
return bufferInfo;
}
}
return null;
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hive.ql.io.sarg;
import parquet.filter2.predicate.FilterPredicate;
import java.util.List;
/**
* Primary interface for <a href="http://en.wikipedia.org/wiki/Sargable">
* SearchArgument</a>, which are the subset of predicates
* that can be pushed down to the RecordReader. Each SearchArgument consists
* of a series of SearchClauses that must each be true for the row to be
* accepted by the filter.
*
* This requires that the filter be normalized into conjunctive normal form
* (<a href="http://en.wikipedia.org/wiki/Conjunctive_normal_form">CNF</a>).
*/
public interface SearchArgument {
/**
* The potential result sets of logical operations.
*/
public static enum TruthValue {
YES, NO, NULL, YES_NULL, NO_NULL, YES_NO, YES_NO_NULL;
/**
* Compute logical or between the two values.
* @param right the other argument or null
* @return the result
*/
public TruthValue or(TruthValue right) {
if (right == null || right == this) {
return this;
}
if (right == YES || this == YES) {
return YES;
}
if (right == YES_NULL || this == YES_NULL) {
return YES_NULL;
}
if (right == NO) {
return this;
}
if (this == NO) {
return right;
}
if (this == NULL) {
if (right == NO_NULL) {
return NULL;
} else {
return YES_NULL;
}
}
if (right == NULL) {
if (this == NO_NULL) {
return NULL;
} else {
return YES_NULL;
}
}
return YES_NO_NULL;
}
/**
* Compute logical AND between the two values.
* @param right the other argument or null
* @return the result
*/
public TruthValue and(TruthValue right) {
if (right == null || right == this) {
return this;
}
if (right == NO || this == NO) {
return NO;
}
if (right == NO_NULL || this == NO_NULL) {
return NO_NULL;
}
if (right == YES) {
return this;
}
if (this == YES) {
return right;
}
if (this == NULL) {
if (right == YES_NULL) {
return NULL;
} else {
return NO_NULL;
}
}
if (right == NULL) {
if (this == YES_NULL) {
return NULL;
} else {
return NO_NULL;
}
}
return YES_NO_NULL;
}
public TruthValue not() {
switch (this) {
case NO:
return YES;
case YES:
return NO;
case NULL:
case YES_NO:
case YES_NO_NULL:
return this;
case NO_NULL:
return YES_NULL;
case YES_NULL:
return NO_NULL;
default:
throw new IllegalArgumentException("Unknown value: " + this);
}
}
/**
* Does the RecordReader need to include this set of records?
* @return true unless none of the rows qualify
*/
public boolean isNeeded() {
switch (this) {
case NO:
case NULL:
case NO_NULL:
return false;
default:
return true;
}
}
}
/**
* Get the leaf predicates that are required to evaluate the predicate. The
* list will have the duplicates removed.
* @return the list of leaf predicates
*/
public List<PredicateLeaf> getLeaves();
/**
* Evaluate the entire predicate based on the values for the leaf predicates.
* @param leaves the value of each leaf predicate
* @return the value of hte entire predicate
*/
public TruthValue evaluate(TruthValue[] leaves);
/**
* Serialize the SARG as a kyro object and return the base64 string.
*
* Hive should replace the current XML-based AST serialization for predicate pushdown
* with the Kryo serialization of the SARG because the representation is much more
* compact and focused on what is needed for predicate pushdown.
*
* @return the serialized SARG
*/
public String toKryo();
/**
* Translate the search argument to the filter predicate parquet used
* @return
*/
public FilterPredicate toFilterPredicate();
/**
* A builder object for contexts outside of Hive where it isn't easy to
* get a ExprNodeDesc. The user must call startOr, startAnd, or startNot
* before adding any leaves.
*/
public interface Builder {
/**
* Start building an or operation and push it on the stack.
* @return this
*/
public Builder startOr();
/**
* Start building an and operation and push it on the stack.
* @return this
*/
public Builder startAnd();
/**
* Start building a not operation and push it on the stack.
* @return this
*/
public Builder startNot();
/**
* Finish the current operation and pop it off of the stack. Each start
* call must have a matching end.
* @return this
*/
public Builder end();
/**
* Add a less than leaf to the current item on the stack.
* @param column the name of the column
* @param literal the literal
* @return this
*/
public Builder lessThan(String column, Object literal);
/**
* Add a less than equals leaf to the current item on the stack.
* @param column the name of the column
* @param literal the literal
* @return this
*/
public Builder lessThanEquals(String column, Object literal);
/**
* Add an equals leaf to the current item on the stack.
* @param column the name of the column
* @param literal the literal
* @return this
*/
public Builder equals(String column, Object literal);
/**
* Add a null safe equals leaf to the current item on the stack.
* @param column the name of the column
* @param literal the literal
* @return this
*/
public Builder nullSafeEquals(String column, Object literal);
/**
* Add an in leaf to the current item on the stack.
* @param column the name of the column
* @param literal the literal
* @return this
*/
public Builder in(String column, Object... literal);
/**
* Add an is null leaf to the current item on the stack.
* @param column the name of the column
* @return this
*/
public Builder isNull(String column);
/**
* Add a between leaf to the current item on the stack.
* @param column the name of the column
* @param lower the literal
* @param upper the literal
* @return this
*/
public Builder between(String column, Object lower, Object upper);
/**
* Build and return the SearchArgument that has been defined. All of the
* starts must have been ended before this call.
* @return the new SearchArgument
*/
public SearchArgument build();
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package java.util.zip;
import java.io.EOFException;
import java.io.IOException;
import java.io.InputStream;
import java.io.RandomAccessFile;
import java.nio.charset.Charsets;
import java.util.Calendar;
import java.util.Date;
import java.util.GregorianCalendar;
/**
* An instance of {@code ZipEntry} represents an entry within a <i>ZIP-archive</i>.
* An entry has attributes such as name (= path) or the size of its data. While
* an entry identifies data stored in an archive, it does not hold the data
* itself. For example when reading a <i>ZIP-file</i> you will first retrieve
* all its entries in a collection and then read the data for a specific entry
* through an input stream.
*
* @see ZipFile
* @see ZipOutputStream
*/
public class ZipEntry implements ZipConstants, Cloneable {
String name, comment;
long compressedSize = -1, crc = -1, size = -1;
int compressionMethod = -1, time = -1, modDate = -1;
byte[] extra;
int nameLen = -1;
long mLocalHeaderRelOffset = -1;
/**
* Zip entry state: Deflated.
*/
public static final int DEFLATED = 8;
/**
* Zip entry state: Stored.
*/
public static final int STORED = 0;
/**
* Constructs a new {@code ZipEntry} with the specified name.
*
* @param name
* the name of the ZIP entry.
* @throws IllegalArgumentException
* if the name length is outside the range (> 0xFFFF).
*/
public ZipEntry(String name) {
if (name == null) {
throw new NullPointerException();
}
if (name.length() > 0xFFFF) {
throw new IllegalArgumentException();
}
this.name = name;
}
/**
* Gets the comment for this {@code ZipEntry}.
*
* @return the comment for this {@code ZipEntry}, or {@code null} if there
* is no comment. If we're reading an archive with
* {@code ZipInputStream} the comment is not available.
*/
public String getComment() {
return comment;
}
/**
* Gets the compressed size of this {@code ZipEntry}.
*
* @return the compressed size, or -1 if the compressed size has not been
* set.
*/
public long getCompressedSize() {
return compressedSize;
}
/**
* Gets the checksum for this {@code ZipEntry}.
*
* @return the checksum, or -1 if the checksum has not been set.
*/
public long getCrc() {
return crc;
}
/**
* Gets the extra information for this {@code ZipEntry}.
*
* @return a byte array containing the extra information, or {@code null} if
* there is none.
*/
public byte[] getExtra() {
return extra;
}
/**
* Gets the compression method for this {@code ZipEntry}.
*
* @return the compression method, either {@code DEFLATED}, {@code STORED}
* or -1 if the compression method has not been set.
*/
public int getMethod() {
return compressionMethod;
}
/**
* Gets the name of this {@code ZipEntry}.
*
* @return the entry name.
*/
public String getName() {
return name;
}
/**
* Gets the uncompressed size of this {@code ZipEntry}.
*
* @return the uncompressed size, or {@code -1} if the size has not been
* set.
*/
public long getSize() {
return size;
}
/**
* Gets the last modification time of this {@code ZipEntry}.
*
* @return the last modification time as the number of milliseconds since
* Jan. 1, 1970.
*/
public long getTime() {
if (time != -1) {
GregorianCalendar cal = new GregorianCalendar();
cal.set(Calendar.MILLISECOND, 0);
cal.set(1980 + ((modDate >> 9) & 0x7f), ((modDate >> 5) & 0xf) - 1,
modDate & 0x1f, (time >> 11) & 0x1f, (time >> 5) & 0x3f,
(time & 0x1f) << 1);
return cal.getTime().getTime();
}
return -1;
}
/**
* Determine whether or not this {@code ZipEntry} is a directory.
*
* @return {@code true} when this {@code ZipEntry} is a directory, {@code
* false} otherwise.
*/
public boolean isDirectory() {
return name.charAt(name.length() - 1) == '/';
}
/**
* Sets the comment for this {@code ZipEntry}.
*
* @param string
* the comment for this entry.
*/
public void setComment(String string) {
if (string == null || string.length() <= 0xFFFF) {
comment = string;
} else {
throw new IllegalArgumentException();
}
}
/**
* Sets the compressed size for this {@code ZipEntry}.
*
* @param value
* the compressed size (in bytes).
*/
public void setCompressedSize(long value) {
compressedSize = value;
}
/**
* Sets the checksum for this {@code ZipEntry}.
*
* @param value
* the checksum for this entry.
* @throws IllegalArgumentException
* if {@code value} is < 0 or > 0xFFFFFFFFL.
*/
public void setCrc(long value) {
if (value >= 0 && value <= 0xFFFFFFFFL) {
crc = value;
} else {
throw new IllegalArgumentException();
}
}
/**
* Sets the extra information for this {@code ZipEntry}.
*
* @param data
* a byte array containing the extra information.
* @throws IllegalArgumentException
* when the length of data is greater than 0xFFFF bytes.
*/
public void setExtra(byte[] data) {
if (data == null || data.length <= 0xFFFF) {
extra = data;
} else {
throw new IllegalArgumentException();
}
}
/**
* Sets the compression method for this {@code ZipEntry}.
*
* @param value
* the compression method, either {@code DEFLATED} or {@code
* STORED}.
* @throws IllegalArgumentException
* when value is not {@code DEFLATED} or {@code STORED}.
*/
public void setMethod(int value) {
if (value != STORED && value != DEFLATED) {
throw new IllegalArgumentException();
}
compressionMethod = value;
}
/**
* Sets the uncompressed size of this {@code ZipEntry}.
*
* @param value
* the uncompressed size for this entry.
* @throws IllegalArgumentException
* if {@code value} < 0 or {@code value} > 0xFFFFFFFFL.
*/
public void setSize(long value) {
if (value >= 0 && value <= 0xFFFFFFFFL) {
size = value;
} else {
throw new IllegalArgumentException();
}
}
/**
* Sets the modification time of this {@code ZipEntry}.
*
* @param value
* the modification time as the number of milliseconds since Jan.
* 1, 1970.
*/
public void setTime(long value) {
GregorianCalendar cal = new GregorianCalendar();
cal.setTime(new Date(value));
int year = cal.get(Calendar.YEAR);
if (year < 1980) {
modDate = 0x21;
time = 0;
} else {
modDate = cal.get(Calendar.DATE);
modDate = (cal.get(Calendar.MONTH) + 1 << 5) | modDate;
modDate = ((cal.get(Calendar.YEAR) - 1980) << 9) | modDate;
time = cal.get(Calendar.SECOND) >> 1;
time = (cal.get(Calendar.MINUTE) << 5) | time;
time = (cal.get(Calendar.HOUR_OF_DAY) << 11) | time;
}
}
/**
* Returns the string representation of this {@code ZipEntry}.
*
* @return the string representation of this {@code ZipEntry}.
*/
@Override
public String toString() {
return name;
}
/**
* Constructs a new {@code ZipEntry} using the values obtained from {@code
* ze}.
*
* @param ze
* the {@code ZipEntry} from which to obtain values.
*/
public ZipEntry(ZipEntry ze) {
name = ze.name;
comment = ze.comment;
time = ze.time;
size = ze.size;
compressedSize = ze.compressedSize;
crc = ze.crc;
compressionMethod = ze.compressionMethod;
modDate = ze.modDate;
extra = ze.extra;
nameLen = ze.nameLen;
mLocalHeaderRelOffset = ze.mLocalHeaderRelOffset;
}
/**
* Returns a shallow copy of this entry.
*
* @return a copy of this entry.
*/
@Override
public Object clone() {
return new ZipEntry(this);
}
/**
* Returns the hash code for this {@code ZipEntry}.
*
* @return the hash code of the entry.
*/
@Override
public int hashCode() {
return name.hashCode();
}
/*
* Internal constructor. Creates a new ZipEntry by reading the
* Central Directory Entry from "in", which must be positioned at
* the CDE signature.
*
* On exit, "in" will be positioned at the start of the next entry.
*/
ZipEntry(LittleEndianReader ler, InputStream in) throws IOException {
/*
* We're seeing performance issues when we call readShortLE and
* readIntLE, so we're going to read the entire header at once
* and then parse the results out without using any function calls.
* Uglier, but should be much faster.
*
* Note that some lines look a bit different, because the corresponding
* fields or locals are long and so we need to do & 0xffffffffl to avoid
* problems induced by sign extension.
*/
byte[] hdrBuf = ler.hdrBuf;
myReadFully(in, hdrBuf);
long sig = (hdrBuf[0] & 0xff) | ((hdrBuf[1] & 0xff) << 8) |
((hdrBuf[2] & 0xff) << 16) | ((hdrBuf[3] << 24) & 0xffffffffL);
if (sig != CENSIG) {
throw new ZipException("Central Directory Entry not found");
}
compressionMethod = (hdrBuf[10] & 0xff) | ((hdrBuf[11] & 0xff) << 8);
time = (hdrBuf[12] & 0xff) | ((hdrBuf[13] & 0xff) << 8);
modDate = (hdrBuf[14] & 0xff) | ((hdrBuf[15] & 0xff) << 8);
crc = (hdrBuf[16] & 0xff) | ((hdrBuf[17] & 0xff) << 8)
| ((hdrBuf[18] & 0xff) << 16)
| ((hdrBuf[19] << 24) & 0xffffffffL);
compressedSize = (hdrBuf[20] & 0xff) | ((hdrBuf[21] & 0xff) << 8)
| ((hdrBuf[22] & 0xff) << 16)
| ((hdrBuf[23] << 24) & 0xffffffffL);
size = (hdrBuf[24] & 0xff) | ((hdrBuf[25] & 0xff) << 8)
| ((hdrBuf[26] & 0xff) << 16)
| ((hdrBuf[27] << 24) & 0xffffffffL);
nameLen = (hdrBuf[28] & 0xff) | ((hdrBuf[29] & 0xff) << 8);
int extraLen = (hdrBuf[30] & 0xff) | ((hdrBuf[31] & 0xff) << 8);
int commentLen = (hdrBuf[32] & 0xff) | ((hdrBuf[33] & 0xff) << 8);
mLocalHeaderRelOffset = (hdrBuf[42] & 0xff) | ((hdrBuf[43] & 0xff) << 8)
| ((hdrBuf[44] & 0xff) << 16)
| ((hdrBuf[45] << 24) & 0xffffffffL);
byte[] nameBytes = new byte[nameLen];
myReadFully(in, nameBytes);
byte[] commentBytes = null;
if (commentLen > 0) {
commentBytes = new byte[commentLen];
myReadFully(in, commentBytes);
}
if (extraLen > 0) {
extra = new byte[extraLen];
myReadFully(in, extra);
}
// The RI has always assumed UTF-8. (If GPBF_UTF8_FLAG isn't set, the encoding is
// actually IBM-437.)
name = new String(nameBytes, 0, nameBytes.length, Charsets.UTF_8);
if (commentBytes != null) {
comment = new String(commentBytes, 0, commentBytes.length, Charsets.UTF_8);
} else {
comment = null;
}
}
private void myReadFully(InputStream in, byte[] b) throws IOException {
int len = b.length;
int off = 0;
while (len > 0) {
int count = in.read(b, off, len);
if (count <= 0) {
throw new EOFException();
}
off += count;
len -= count;
}
}
/*
* Read a four-byte int in little-endian order.
*/
static long readIntLE(RandomAccessFile raf) throws IOException {
int b0 = raf.read();
int b1 = raf.read();
int b2 = raf.read();
int b3 = raf.read();
if (b3 < 0) {
throw new EOFException("in ZipEntry.readIntLE(RandomAccessFile)");
}
return b0 | (b1 << 8) | (b2 << 16) | (b3 << 24); // ATTENTION: DOES SIGN EXTENSION: IS THIS WANTED?
}
static class LittleEndianReader {
private byte[] b = new byte[4];
byte[] hdrBuf = new byte[CENHDR];
/*
* Read a two-byte short in little-endian order.
*/
int readShortLE(InputStream in) throws IOException {
if (in.read(b, 0, 2) == 2) {
return (b[0] & 0XFF) | ((b[1] & 0XFF) << 8);
} else {
throw new EOFException("in ZipEntry.readShortLE(InputStream)");
}
}
/*
* Read a four-byte int in little-endian order.
*/
long readIntLE(InputStream in) throws IOException {
if (in.read(b, 0, 4) == 4) {
return ( ((b[0] & 0XFF))
| ((b[1] & 0XFF) << 8)
| ((b[2] & 0XFF) << 16)
| ((b[3] & 0XFF) << 24))
& 0XFFFFFFFFL; // Here for sure NO sign extension is wanted.
} else {
throw new EOFException("in ZipEntry.readIntLE(InputStream)");
}
}
}
}
| |
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0 and the Server Side Public License, v 1; you may not use this file except
* in compliance with, at your election, the Elastic License 2.0 or the Server
* Side Public License, v 1.
*/
package org.elasticsearch.search.aggregations.bucket;
import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.search.aggregations.Aggregator.SubAggCollectionMode;
import org.elasticsearch.search.aggregations.BucketOrder;
import org.elasticsearch.search.aggregations.bucket.terms.Terms;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery;
import static org.elasticsearch.search.aggregations.AggregationBuilders.terms;
import static org.hamcrest.Matchers.equalTo;
public class ShardSizeTermsIT extends ShardSizeTestCase {
public void testNoShardSizeString() throws Exception {
createIdx("type=keyword");
indexData();
SearchResponse response = client().prepareSearch("idx")
.setQuery(matchAllQuery())
.addAggregation(terms("keys").field("key").size(3)
.collectMode(randomFrom(SubAggCollectionMode.values())).order(BucketOrder.count(false)))
.get();
Terms terms = response.getAggregations().get("keys");
List<? extends Terms.Bucket> buckets = terms.getBuckets();
assertThat(buckets.size(), equalTo(3));
Map<String, Long> expected = new HashMap<>();
expected.put("1", 8L);
expected.put("3", 8L);
expected.put("2", 5L);
for (Terms.Bucket bucket : buckets) {
assertThat(bucket.getDocCount(), equalTo(expected.get(bucket.getKeyAsString())));
}
}
public void testShardSizeEqualsSizeString() throws Exception {
createIdx("type=keyword");
indexData();
SearchResponse response = client().prepareSearch("idx")
.setQuery(matchAllQuery())
.addAggregation(terms("keys").field("key").size(3).shardSize(3)
.collectMode(randomFrom(SubAggCollectionMode.values())).order(BucketOrder.count(false)))
.get();
Terms terms = response.getAggregations().get("keys");
List<? extends Terms.Bucket> buckets = terms.getBuckets();
assertThat(buckets.size(), equalTo(3));
Map<String, Long> expected = new HashMap<>();
expected.put("1", 8L);
expected.put("3", 8L);
expected.put("2", 4L);
for (Terms.Bucket bucket : buckets) {
assertThat(bucket.getDocCount(), equalTo(expected.get(bucket.getKeyAsString())));
}
}
public void testWithShardSizeString() throws Exception {
createIdx("type=keyword");
indexData();
SearchResponse response = client().prepareSearch("idx")
.setQuery(matchAllQuery())
.addAggregation(terms("keys").field("key").size(3)
.collectMode(randomFrom(SubAggCollectionMode.values())).shardSize(5).order(BucketOrder.count(false)))
.get();
Terms terms = response.getAggregations().get("keys");
List<? extends Terms.Bucket> buckets = terms.getBuckets();
assertThat(buckets.size(), equalTo(3)); // we still only return 3 entries (based on the 'size' param)
Map<String, Long> expected = new HashMap<>();
expected.put("1", 8L);
expected.put("3", 8L);
expected.put("2", 5L); // <-- count is now fixed
for (Terms.Bucket bucket : buckets) {
assertThat(bucket.getDocCount(), equalTo(expected.get(bucket.getKeyAsString())));
}
}
public void testWithShardSizeStringSingleShard() throws Exception {
createIdx("type=keyword");
indexData();
SearchResponse response = client().prepareSearch("idx").setRouting(routing1)
.setQuery(matchAllQuery())
.addAggregation(terms("keys").field("key").size(3)
.collectMode(randomFrom(SubAggCollectionMode.values())).shardSize(5).order(BucketOrder.count(false)))
.get();
Terms terms = response.getAggregations().get("keys");
List<? extends Terms.Bucket> buckets = terms.getBuckets();
assertThat(buckets.size(), equalTo(3)); // we still only return 3 entries (based on the 'size' param)
Map<String, Long> expected = new HashMap<>();
expected.put("1", 5L);
expected.put("2", 4L);
expected.put("3", 3L); // <-- count is now fixed
for (Terms.Bucket bucket: buckets) {
assertThat(bucket.getDocCount(), equalTo(expected.get(bucket.getKey())));
}
}
public void testNoShardSizeTermOrderString() throws Exception {
createIdx("type=keyword");
indexData();
SearchResponse response = client().prepareSearch("idx")
.setQuery(matchAllQuery())
.addAggregation(terms("keys").field("key").size(3)
.collectMode(randomFrom(SubAggCollectionMode.values())).order(BucketOrder.key(true)))
.get();
Terms terms = response.getAggregations().get("keys");
List<? extends Terms.Bucket> buckets = terms.getBuckets();
assertThat(buckets.size(), equalTo(3));
Map<String, Long> expected = new HashMap<>();
expected.put("1", 8L);
expected.put("2", 5L);
expected.put("3", 8L);
for (Terms.Bucket bucket : buckets) {
assertThat(bucket.getDocCount(), equalTo(expected.get(bucket.getKeyAsString())));
}
}
public void testNoShardSizeLong() throws Exception {
createIdx("type=long");
indexData();
SearchResponse response = client().prepareSearch("idx")
.setQuery(matchAllQuery())
.addAggregation(terms("keys").field("key").size(3)
.collectMode(randomFrom(SubAggCollectionMode.values())).order(BucketOrder.count(false)))
.get();
Terms terms = response.getAggregations().get("keys");
List<? extends Terms.Bucket> buckets = terms.getBuckets();
assertThat(buckets.size(), equalTo(3));
Map<Integer, Long> expected = new HashMap<>();
expected.put(1, 8L);
expected.put(3, 8L);
expected.put(2, 5L);
for (Terms.Bucket bucket : buckets) {
assertThat(bucket.getDocCount(), equalTo(expected.get(bucket.getKeyAsNumber().intValue())));
}
}
public void testShardSizeEqualsSizeLong() throws Exception {
createIdx("type=long");
indexData();
SearchResponse response = client().prepareSearch("idx")
.setQuery(matchAllQuery())
.addAggregation(terms("keys").field("key").size(3).shardSize(3)
.collectMode(randomFrom(SubAggCollectionMode.values())).order(BucketOrder.count(false)))
.get();
Terms terms = response.getAggregations().get("keys");
List<? extends Terms.Bucket> buckets = terms.getBuckets();
assertThat(buckets.size(), equalTo(3));
Map<Integer, Long> expected = new HashMap<>();
expected.put(1, 8L);
expected.put(3, 8L);
expected.put(2, 4L);
for (Terms.Bucket bucket : buckets) {
assertThat(bucket.getDocCount(), equalTo(expected.get(bucket.getKeyAsNumber().intValue())));
}
}
public void testWithShardSizeLong() throws Exception {
createIdx("type=long");
indexData();
SearchResponse response = client().prepareSearch("idx")
.setQuery(matchAllQuery())
.addAggregation(terms("keys").field("key").size(3)
.collectMode(randomFrom(SubAggCollectionMode.values())).shardSize(5).order(BucketOrder.count(false)))
.get();
Terms terms = response.getAggregations().get("keys");
List<? extends Terms.Bucket> buckets = terms.getBuckets();
assertThat(buckets.size(), equalTo(3)); // we still only return 3 entries (based on the 'size' param)
Map<Integer, Long> expected = new HashMap<>();
expected.put(1, 8L);
expected.put(3, 8L);
expected.put(2, 5L); // <-- count is now fixed
for (Terms.Bucket bucket : buckets) {
assertThat(bucket.getDocCount(), equalTo(expected.get(bucket.getKeyAsNumber().intValue())));
}
}
public void testWithShardSizeLongSingleShard() throws Exception {
createIdx("type=long");
indexData();
SearchResponse response = client().prepareSearch("idx").setRouting(routing1)
.setQuery(matchAllQuery())
.addAggregation(terms("keys").field("key").size(3)
.collectMode(randomFrom(SubAggCollectionMode.values())).shardSize(5).order(BucketOrder.count(false)))
.get();
Terms terms = response.getAggregations().get("keys");
List<? extends Terms.Bucket> buckets = terms.getBuckets();
assertThat(buckets.size(), equalTo(3)); // we still only return 3 entries (based on the 'size' param)
Map<Integer, Long> expected = new HashMap<>();
expected.put(1, 5L);
expected.put(2, 4L);
expected.put(3, 3L);
for (Terms.Bucket bucket : buckets) {
assertThat(bucket.getDocCount(), equalTo(expected.get(bucket.getKeyAsNumber().intValue())));
}
}
public void testNoShardSizeTermOrderLong() throws Exception {
createIdx("type=long");
indexData();
SearchResponse response = client().prepareSearch("idx")
.setQuery(matchAllQuery())
.addAggregation(terms("keys").field("key").size(3)
.collectMode(randomFrom(SubAggCollectionMode.values())).order(BucketOrder.key(true)))
.get();
Terms terms = response.getAggregations().get("keys");
List<? extends Terms.Bucket> buckets = terms.getBuckets();
assertThat(buckets.size(), equalTo(3));
Map<Integer, Long> expected = new HashMap<>();
expected.put(1, 8L);
expected.put(2, 5L);
expected.put(3, 8L);
for (Terms.Bucket bucket : buckets) {
assertThat(bucket.getDocCount(), equalTo(expected.get(bucket.getKeyAsNumber().intValue())));
}
}
public void testNoShardSizeDouble() throws Exception {
createIdx("type=double");
indexData();
SearchResponse response = client().prepareSearch("idx")
.setQuery(matchAllQuery())
.addAggregation(terms("keys").field("key").size(3)
.collectMode(randomFrom(SubAggCollectionMode.values())).order(BucketOrder.count(false)))
.get();
Terms terms = response.getAggregations().get("keys");
List<? extends Terms.Bucket> buckets = terms.getBuckets();
assertThat(buckets.size(), equalTo(3));
Map<Integer, Long> expected = new HashMap<>();
expected.put(1, 8L);
expected.put(3, 8L);
expected.put(2, 5L);
for (Terms.Bucket bucket : buckets) {
assertThat(bucket.getDocCount(), equalTo(expected.get(bucket.getKeyAsNumber().intValue())));
}
}
public void testShardSizeEqualsSizeDouble() throws Exception {
createIdx("type=double");
indexData();
SearchResponse response = client().prepareSearch("idx")
.setQuery(matchAllQuery())
.addAggregation(terms("keys").field("key").size(3).shardSize(3)
.collectMode(randomFrom(SubAggCollectionMode.values())).order(BucketOrder.count(false)))
.get();
Terms terms = response.getAggregations().get("keys");
List<? extends Terms.Bucket> buckets = terms.getBuckets();
assertThat(buckets.size(), equalTo(3));
Map<Integer, Long> expected = new HashMap<>();
expected.put(1, 8L);
expected.put(3, 8L);
expected.put(2, 4L);
for (Terms.Bucket bucket : buckets) {
assertThat(bucket.getDocCount(), equalTo(expected.get(bucket.getKeyAsNumber().intValue())));
}
}
public void testWithShardSizeDouble() throws Exception {
createIdx("type=double");
indexData();
SearchResponse response = client().prepareSearch("idx")
.setQuery(matchAllQuery())
.addAggregation(terms("keys").field("key").size(3)
.collectMode(randomFrom(SubAggCollectionMode.values())).shardSize(5).order(BucketOrder.count(false)))
.get();
Terms terms = response.getAggregations().get("keys");
List<? extends Terms.Bucket> buckets = terms.getBuckets();
assertThat(buckets.size(), equalTo(3));
Map<Integer, Long> expected = new HashMap<>();
expected.put(1, 8L);
expected.put(3, 8L);
expected.put(2, 5L); // <-- count is now fixed
for (Terms.Bucket bucket : buckets) {
assertThat(bucket.getDocCount(), equalTo(expected.get(bucket.getKeyAsNumber().intValue())));
}
}
public void testWithShardSizeDoubleSingleShard() throws Exception {
createIdx("type=double");
indexData();
SearchResponse response = client().prepareSearch("idx").setRouting(routing1)
.setQuery(matchAllQuery())
.addAggregation(terms("keys").field("key").size(3)
.collectMode(randomFrom(SubAggCollectionMode.values())).shardSize(5).order(BucketOrder.count(false)))
.get();
Terms terms = response.getAggregations().get("keys");
List<? extends Terms.Bucket> buckets = terms.getBuckets();
assertThat(buckets.size(), equalTo(3));
Map<Integer, Long> expected = new HashMap<>();
expected.put(1, 5L);
expected.put(2, 4L);
expected.put(3, 3L);
for (Terms.Bucket bucket : buckets) {
assertThat(bucket.getDocCount(), equalTo(expected.get(bucket.getKeyAsNumber().intValue())));
}
}
public void testNoShardSizeTermOrderDouble() throws Exception {
createIdx("type=double");
indexData();
SearchResponse response = client().prepareSearch("idx")
.setQuery(matchAllQuery())
.addAggregation(terms("keys").field("key").size(3)
.collectMode(randomFrom(SubAggCollectionMode.values())).order(BucketOrder.key(true)))
.get();
Terms terms = response.getAggregations().get("keys");
List<? extends Terms.Bucket> buckets = terms.getBuckets();
assertThat(buckets.size(), equalTo(3));
Map<Integer, Long> expected = new HashMap<>();
expected.put(1, 8L);
expected.put(2, 5L);
expected.put(3, 8L);
for (Terms.Bucket bucket : buckets) {
assertThat(bucket.getDocCount(), equalTo(expected.get(bucket.getKeyAsNumber().intValue())));
}
}
}
| |
/*******************************************************************************
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
******************************************************************************/
package org.apache.olingo.odata2.core.uri.expression;
import java.io.IOException;
import java.io.StringWriter;
import java.util.List;
import org.apache.olingo.odata2.api.edm.Edm;
import org.apache.olingo.odata2.api.edm.EdmException;
import org.apache.olingo.odata2.api.edm.EdmLiteral;
import org.apache.olingo.odata2.api.edm.EdmType;
import org.apache.olingo.odata2.api.edm.EdmTyped;
import org.apache.olingo.odata2.api.uri.expression.BinaryExpression;
import org.apache.olingo.odata2.api.uri.expression.BinaryOperator;
import org.apache.olingo.odata2.api.uri.expression.CommonExpression;
import org.apache.olingo.odata2.api.uri.expression.ExpressionVisitor;
import org.apache.olingo.odata2.api.uri.expression.FilterExpression;
import org.apache.olingo.odata2.api.uri.expression.LiteralExpression;
import org.apache.olingo.odata2.api.uri.expression.MemberExpression;
import org.apache.olingo.odata2.api.uri.expression.MethodExpression;
import org.apache.olingo.odata2.api.uri.expression.MethodOperator;
import org.apache.olingo.odata2.api.uri.expression.OrderByExpression;
import org.apache.olingo.odata2.api.uri.expression.OrderExpression;
import org.apache.olingo.odata2.api.uri.expression.PropertyExpression;
import org.apache.olingo.odata2.api.uri.expression.SortOrder;
import org.apache.olingo.odata2.api.uri.expression.UnaryExpression;
import org.apache.olingo.odata2.api.uri.expression.UnaryOperator;
import org.apache.olingo.odata2.core.ep.util.JsonStreamWriter;
/**
*
*/
public class JsonVisitor implements ExpressionVisitor {
@Override
public Object visitFilterExpression(final FilterExpression filterExpression, final String expressionString,
final Object expression) {
return expression;
}
@Override
public Object visitBinary(final BinaryExpression binaryExpression, final BinaryOperator operator,
final Object leftSide, final Object rightSide) {
try {
StringWriter writer = new StringWriter();
JsonStreamWriter jsonStreamWriter = new JsonStreamWriter(writer);
jsonStreamWriter.beginObject().namedStringValueRaw("nodeType", binaryExpression.getKind().toString()).separator()
.namedStringValue("operator", operator.toUriLiteral()).separator().namedStringValueRaw("type",
getType(binaryExpression)).separator().name("left").unquotedValue(leftSide.toString()).separator().name(
"right").unquotedValue(rightSide.toString()).endObject();
writer.flush();
return writer.toString();
} catch (final IOException e) {
return null;
}
}
@Override
public Object visitOrderByExpression(final OrderByExpression orderByExpression, final String expressionString,
final List<Object> orders) {
try {
StringWriter writer = new StringWriter();
JsonStreamWriter jsonStreamWriter = new JsonStreamWriter(writer);
jsonStreamWriter.beginObject().namedStringValueRaw("nodeType", "order collection").separator().name("orders")
.beginArray();
boolean first = true;
for (final Object order : orders) {
if (first) {
first = false;
} else {
jsonStreamWriter.separator();
}
jsonStreamWriter.unquotedValue(order.toString());
}
jsonStreamWriter.endArray().endObject();
writer.flush();
return writer.toString();
} catch (final IOException e) {
return null;
}
}
@Override
public Object visitOrder(final OrderExpression orderExpression, final Object filterResult,
final SortOrder sortOrder) {
try {
StringWriter writer = new StringWriter();
JsonStreamWriter jsonStreamWriter = new JsonStreamWriter(writer);
jsonStreamWriter.beginObject().namedStringValueRaw("nodeType", orderExpression.getKind().toString()).separator()
.namedStringValueRaw("sortorder", sortOrder.toString()).separator().name("expression").unquotedValue(
filterResult.toString()).endObject();
writer.flush();
return writer.toString();
} catch (final IOException e) {
return null;
}
}
@Override
public Object visitLiteral(final LiteralExpression literal, final EdmLiteral edmLiteral) {
try {
StringWriter writer = new StringWriter();
JsonStreamWriter jsonStreamWriter = new JsonStreamWriter(writer);
jsonStreamWriter.beginObject().namedStringValueRaw("nodeType", literal.getKind().toString()).separator()
.namedStringValueRaw("type", getType(literal)).separator().namedStringValue("value", edmLiteral.getLiteral())
.endObject();
writer.flush();
return writer.toString();
} catch (final IOException e) {
return null;
}
}
@Override
public Object visitMethod(final MethodExpression methodExpression, final MethodOperator method,
final List<Object> parameters) {
try {
StringWriter writer = new StringWriter();
JsonStreamWriter jsonStreamWriter = new JsonStreamWriter(writer);
jsonStreamWriter.beginObject().namedStringValueRaw("nodeType", methodExpression.getKind().toString()).separator()
.namedStringValueRaw("operator", method.toUriLiteral()).separator().namedStringValueRaw("type",
getType(methodExpression)).separator().name("parameters").beginArray();
boolean first = true;
for (Object parameter : parameters) {
if (first) {
first = false;
} else {
jsonStreamWriter.separator();
}
jsonStreamWriter.unquotedValue(parameter.toString());
}
jsonStreamWriter.endArray().endObject();
writer.flush();
return writer.toString();
} catch (final IOException e) {
return null;
}
}
@Override
public Object visitMember(final MemberExpression memberExpression, final Object path, final Object property) {
try {
StringWriter writer = new StringWriter();
JsonStreamWriter jsonStreamWriter = new JsonStreamWriter(writer);
jsonStreamWriter.beginObject().namedStringValueRaw("nodeType", memberExpression.getKind().toString()).separator()
.namedStringValueRaw("type", getType(memberExpression)).separator().name("source").unquotedValue(
path.toString()).separator().name("path").unquotedValue(property.toString()).endObject();
writer.flush();
return writer.toString();
} catch (final IOException e) {
return null;
}
}
@Override
public Object visitProperty(final PropertyExpression propertyExpression, final String uriLiteral,
final EdmTyped edmProperty) {
try {
StringWriter writer = new StringWriter();
JsonStreamWriter jsonStreamWriter = new JsonStreamWriter(writer);
jsonStreamWriter.beginObject().namedStringValueRaw("nodeType", propertyExpression.getKind().toString())
.separator().namedStringValue("name", uriLiteral).separator().namedStringValueRaw("type",
getType(propertyExpression)).endObject();
writer.flush();
return writer.toString();
} catch (final IOException e) {
return null;
}
}
@Override
public Object visitUnary(final UnaryExpression unaryExpression, final UnaryOperator operator, final Object operand) {
try {
StringWriter writer = new StringWriter();
JsonStreamWriter jsonStreamWriter = new JsonStreamWriter(writer);
jsonStreamWriter.beginObject().namedStringValueRaw("nodeType", unaryExpression.getKind().toString()).separator()
.namedStringValueRaw("operator", operator.toUriLiteral()).separator().namedStringValueRaw("type",
getType(unaryExpression)).separator().name("operand").unquotedValue(operand.toString()).endObject();
writer.flush();
return writer.toString();
} catch (final IOException e) {
return null;
}
}
private static String getType(final CommonExpression expression) {
try {
final EdmType type = expression.getEdmType();
return type == null ? null : type.getNamespace() + Edm.DELIMITER + type.getName();
} catch (final EdmException e) {
return "EdmException occurred: " + e.getMessage();
}
}
}
| |
/*
* Copyright (c) 2016, SRCH2
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
* * Neither the name of the SRCH2 nor the
* names of its contributors may be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL SRCH2 BE LIABLE FOR ANY
* DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package com.srch2.android.sdk;
import android.os.Bundle;
import android.util.Log;
import org.json.JSONArray;
import org.json.JSONException;
import org.json.JSONObject;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import static junit.framework.Assert.*;
public class CrudTestActivity extends TestableActivity {
public static final String TAG = "srch2:: MyActivity";
public TestSearchResultsListener mResultListener = new TestSearchResultsListener();
public TestableIndex mIndex1 = new TestIndex();
public TestableIndex mIndex2 = new TestIndex2();
public TestGeoIndex mIndexGeo = new TestGeoIndex();
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setTitle(TAG);
Log.d(TAG, "onCreate");
}
@Override
protected void onResume() {
super.onResume();
Log.d(TAG, "onResume");
}
@Override
protected void onPause() {
super.onPause();
Log.d(TAG, "onPause");
}
@Override
protected void onStart() {
super.onStart();
Log.d(TAG, "onResume");
}
@Override
protected void onStop() {
super.onStop();
Log.d(TAG, "onPause");
}
@Override
protected void onRestart() {
super.onRestart();
Log.d(TAG, "onRestart");
}
@Override
protected void onDestroy() {
super.onDestroy();
Log.d(TAG, "onDestroy");
}
public void initializeSRCH2EngineAndCallStart() {
initializeSRCH2Engine();
try {
Thread.currentThread().sleep(300);
} catch (InterruptedException e) {
}
callSRCH2EngineStart();
}
public void initializeSRCH2Engine() {
SRCH2Service.clearServerLogEntriesForTest(getApplicationContext());
deleteSrch2Files();
SRCH2Engine.setIndexables(mIndex1, mIndex2, mIndexGeo);
SRCH2Engine.setSearchResultsListener(mResultListener);
SRCH2Engine.setAutomatedTestingMode(true);
SRCH2Engine.onResume(getApplicationContext());
}
public void callSRCH2EngineStart() {
SRCH2Engine.onResume(this);
}
public void callSRCH2EngineStop() {
SRCH2Engine.onPause(this);
}
private void reset() {
mResultListener.reset();
}
public String getInsertResponse(TestableIndex index) {
Util.waitForResponse(Util.ResponseType.Insert, index);
return index.insertResponse;
}
public String getDeleteResponse(TestableIndex index) {
Util.waitForResponse(Util.ResponseType.Delete, index);
return index.deleteResponse;
}
public String getUpdateResponse(TestableIndex index) {
Util.waitForResponse(Util.ResponseType.Update, index);
return index.updateResponse;
}
public String getRecordResponse(TestableIndex index) {
Util.waitForResponse(Util.ResponseType.GetRecord, index);
return index.getRecordResponse;
}
public SearchResultsListener getSearchResult() {
Util.waitForResultResponse(mResultListener);
return mResultListener;
}
public void waitForEngineReady() {
Util.waitSRCH2EngineIsReady();
}
public void testAll() {
try {
for (TestableIndex index : new TestableIndex[]{ mIndex1, mIndexGeo}) {
testOneRecordCRUD(index);
testBatchRecordCRUD(index);
}
} catch (JSONException e) {
e.printStackTrace();
//Assert.fail();
}
}
public void testStartEngine() {
TestableIndex[] indexes = {mIndex1, mIndex2, mIndexGeo};
for (TestableIndex index : indexes) {
assertTrue(SRCH2Engine.getIndex(index.getIndexName()).getRecordCount() == Indexable.INDEX_RECORD_COUNT_NOT_SET);
}
Log.i(TAG, "testStartEngine");
waitForEngineReady();
for (TestableIndex index : indexes) {
assertTrue(SRCH2Engine.getIndex(index.getIndexName()).getRecordCount() == 0);
assertTrue(index.indexIsReadyCalled);
index.indexIsReadyCalled = false;
}
}
public void testOneRecordCRUD(TestableIndex index) throws JSONException {
Log.i(TAG, "testIndexableWithNoRecordsHasZeroRecordCount");
testIndexableGetRecordCountMatches(index, 0);
Log.i(TAG, "testOneRecordCRUD");
JSONObject record = index.getSucceedToInsertRecord();
Log.i(TAG, "testInsertShouldSuccess");
testInsertShouldSuccess(index, record);
Log.i(TAG, "testIndexableWithOneRecordedInsertedHasOneRecordCount");
testIndexableGetRecordCountMatches(index, 1);
Log.i(TAG, "testInsertShouldFail");
testInsertShouldFail(index, index.getFailToInsertRecord());
Log.i(TAG, "testIndexableWithOneRecordedInsertedHasOneRecordCountAndOneFailedInsert");
testIndexableGetRecordCountMatches(index, 1);
Log.i(TAG, "testGetRecordIdShouldSuccess");
testGetRecordIdShouldSuccess(index, new JSONArray(Arrays.asList(record)));
Log.i(TAG, "testSearchStringShouldSuccess");
testSearchStringShouldSuccess(index, index.getSucceedToSearchString(new JSONArray(Arrays.asList(record))));
Log.i(TAG, "testSearchStringShouldFail");
testSearchStringShouldFail(index, index.getFailToSearchString(new JSONArray(Arrays.asList(record))));
Log.i(TAG, "testSearchQueryShouldSuccess");
testSearchQueryShouldSuccess(index, index.getSucceedToSearchQuery(new JSONArray(Arrays.asList(record))));
Log.i(TAG, "testSearchQueryShouldFail");
testSearchQueryShouldFail(index, index.getFailToSearchQuery(new JSONArray(Arrays.asList(record))));
Log.i(TAG, "testUpdateExistShouldSuccess");
testUpdateExistShouldSuccess(index, index.getSucceedToUpdateExistRecord());
Log.i(TAG, "testUpdateNewShouldSuccess");
testUpdateNewShouldSuccess(index, index.getSucceedToUpsertRecord());
Log.i(TAG, "testIndexableWithOneRecordedInsertedAndOneRecordUpsertedGetRecordCount");
testIndexableGetRecordCountMatches(index, 2);
Log.i(TAG, "testUpdateShouldFail");
// TODO too much problem inside the http error responds inside th engine, need time to clean up
// testUpdateShouldFail(index, index.getFailToUpdateRecord());
Log.i(TAG, "testDeleteShouldSuccess");
testDeleteShouldSuccess(index, Arrays.asList(record.getString(index.getPrimaryKeyFieldName())));
Log.i(TAG, "testIndexableWithTwoRecordsAddedThenOneDeletedGetRecordCount");
testIndexableGetRecordCountMatches(index, 1);
testDeleteShouldSuccess(index, Arrays.asList(index.getSucceedToUpsertRecord().getString(index.getPrimaryKeyFieldName())));
Log.i(TAG, "testIndexableWithTwoRecordsAddedThenBothDeleted");
testIndexableGetRecordCountMatches(index, 0);
Log.i(TAG, "testDeleteShouldFail");
testDeleteShouldFail(index, index.getFailToDeleteRecord());
}
public void testBatchRecordCRUD(TestableIndex index) throws JSONException {
JSONArray records = index.getSucceedToInsertBatchRecords();
Log.i(TAG, "testIndexableGetRecordBeforeBatchInsert");
// testIndexableGetRecordCountMatches(index, 0);
Log.i(TAG, "testBatchInsertShouldSuccess");
testBatchInsertShouldSuccess(index, records);
Log.i(TAG, "testIndexableWith200BatchInsertsGetRecordShouldMatch");
testIndexableGetRecordCountMatches(index, records.length());
Log.i(TAG, "testGetRecordIdShouldSuccess");
testGetRecordIdShouldSuccess(index, records);
Log.i(TAG, "testBatchInsertShouldFail");
testBatchInsertShouldFail(index, index.getFailToInsertBatchRecord());
Log.i(TAG, "testSearchStringShouldSuccess");
testSearchStringShouldSuccess(index, index.getSucceedToSearchString(records));
Log.i(TAG, "testSearchStringShouldFail");
testSearchStringShouldFail(index, index.getFailToSearchString(records));
Log.i(TAG, "testSearchQueryShouldSuccess");
testSearchQueryShouldSuccess(index, index.getSucceedToSearchQuery(records));
Log.i(TAG, "testSearchQueryShouldFail");
testSearchQueryShouldFail(index, index.getFailToSearchQuery(records));
Log.i(TAG, "testBatchUpdateShouldSuccess");
testBatchUpdateShouldSuccess(index, index.getSucceedToUpdateBatchRecords());
Log.i(TAG, "testBatchUpdateShouldFail");
//TODO recover this test after fix the engine response
//testBatchUpdateShouldFail(index, index.getFailToUpdateBatchRecords());
ArrayList<String> ids = new ArrayList<String>();
for (int i = 0; i < records.length(); ++i) {
ids.add(records.getJSONObject(i).getString(index.getPrimaryKeyFieldName()));
}
Log.i(TAG, "testDeleteShouldSuccess");
testDeleteShouldSuccess(index, ids);
Log.i(TAG, "testIndexableWithAllRecordsDeleted");
testIndexableGetRecordCountMatches(index, 0);
Log.i(TAG, "testDeleteShouldFail");
testDeleteShouldFail(index, ids);
}
private void testGetRecordIdShouldSuccess(TestableIndex index, JSONArray records) throws JSONException {
for (int i = 0; i < records.length(); i++) {
index.getRecordbyID(records.getJSONObject(i).getString(index.getPrimaryKeyFieldName()));
getRecordResponse(index);
// Log.i(TAG, "expected record::tostring():" + records.getJSONObject(i).toString());
// Log.i(TAG, "actual response::tostring():" + mControlListener.recordResponse.record.toString());
// TODO wait engine to fix the all string type record
//assertTrue(mControlListener.recordResponse.record.toString().equals(records.getJSONObject(i).toString()));
JSONObject resultRecord = index.recordRetreived;
JSONObject record = resultRecord.getJSONObject(Indexable.SEARCH_RESULT_JSON_KEY_RECORD);
assertTrue(record.getString(
index.getPrimaryKeyFieldName()).equals(records.getJSONObject(i).getString(index.getPrimaryKeyFieldName())));
index.resetGetRecordResponseFields();
}
}
public void testMultiCoreSearch() {
// simplify the test cases, the mIndex1 and mIndex2 are of the same
TestableIndex [] testIndexes= {mIndex1, mIndex2, mIndexGeo};
JSONArray records = mIndex1.getSucceedToInsertBatchRecords();
Log.d(TAG, records.toString());
for(TestableIndex index : testIndexes) {
Log.i(TAG, "testBatchInsertShouldSuccess");
testBatchInsertShouldSuccess(index, records);
try {
Thread.currentThread().sleep(1000);
} catch (InterruptedException e) {
e.printStackTrace();
}
}
for (String query : mIndex1.getSucceedToSearchString(records)) {
mResultListener.reset();
SRCH2Engine.searchAllIndexes(query);
getSearchResult();
assertTrue(mResultListener.resultRecordMap.size() == testIndexes.length);
for(TestableIndex index : testIndexes) {
assertTrue(index.verifyResult(query, mResultListener.resultRecordMap.get(index.getIndexName())));
}
}
for (Query query : mIndex1.getSucceedToSearchQuery(records)) {
mResultListener.reset();
SRCH2Engine.advancedSearchOnAllIndexes(query);
getSearchResult();
assertTrue(mResultListener.resultRecordMap.size() == testIndexes.length);
for(TestableIndex index : testIndexes) {
assertTrue(index.verifyResult(query, mResultListener.resultRecordMap.get(index.getIndexName())));
}
}
}
public void testIndexableGetRecordCountMatches(TestableIndex index, int expectedNumberOfRecords) {
assertEquals(index.getRecordCount(), expectedNumberOfRecords);
}
public void testInsertShouldSuccess(TestableIndex index, JSONObject record) {
index.resetInsertResponseFields();
index.insert(record);
getInsertResponse(index);
assertTrue(index.insertSuccessCount == 1);
}
public void testInsertShouldFail(TestableIndex index, JSONObject record) {
index.resetInsertResponseFields();
index.insert(record);
getInsertResponse(index);
assertTrue(index.insertSuccessCount == 0);
assertTrue(index.insertFailedCount == 1);
}
public void testBatchInsertShouldSuccess(TestableIndex index, JSONArray array) {
index.resetInsertResponseFields();
index.insert(array);
getInsertResponse(index);
assertTrue(index.insertSuccessCount == array.length());
assertTrue(index.insertFailedCount == 0);
}
public void testBatchInsertShouldFail(TestableIndex index, JSONArray array) {
index.resetInsertResponseFields();
index.insert(array);
getInsertResponse(index);
assertTrue(index.insertSuccessCount == 0);
assertTrue(index.insertFailedCount == array.length());
}
public void testSearchStringShouldSuccess(TestableIndex index, List<String> queries) {
for (String query : queries) {
mResultListener.reset();
index.search(query);
getSearchResult();
HashMap<String, ArrayList<JSONObject>> recordMap = mResultListener.resultRecordMap;
assertTrue(recordMap.size() == 1);
ArrayList<JSONObject> records = recordMap.get(index.getIndexName());
assertNotNull(records);
assertTrue(index.verifyResult(query, records));
}
}
public void testSearchStringShouldFail(TestableIndex index, List<String> queries) {
for (String query : queries) {
mResultListener.reset();
index.search(query);
getSearchResult();
assertTrue(mResultListener.resultRecordMap.size() == 1);
assertTrue(mResultListener.resultRecordMap.get(index.getIndexName()).size() == 0);
}
}
public void testSearchQueryShouldSuccess(TestableIndex index, List<Query> queries) {
for (Query query : queries) {
mResultListener.reset();
index.advancedSearch(query);
getSearchResult();
assertTrue(mResultListener.resultRecordMap.size() == 1);
assertTrue(mResultListener.resultRecordMap.get(index.getIndexName()) != null);
assertTrue(index.verifyResult(query, mResultListener.resultRecordMap.get(index.getIndexName())));
}
}
public void testSearchQueryShouldFail(TestableIndex index, List<Query> queries) {
for (Query query : queries) {
mResultListener.reset();
index.advancedSearch(query);
getSearchResult();
assertTrue(mResultListener.resultRecordMap.size() == 1);
Cat.d("testSearchQueryShouldFail::Query:", query.toString());
assertTrue(mResultListener.resultRecordMap.get(index.getIndexName()).size() == 0);
}
}
public void testUpdateExistShouldSuccess(TestableIndex index, JSONObject record) {
index.resetUpdateResponseFields();
index.update(record);
getUpdateResponse(index);
Cat.d("testUpdateExistShouldSuccess:", index.updateResponse);
assertTrue(index.updateSuccessCount == 1);
assertTrue(index.upsertSuccessCount == 0);
assertTrue(index.updateFailedCount == 0);
}
public void testUpdateNewShouldSuccess(TestableIndex index, JSONObject record) {
index.resetUpdateResponseFields();
index.update(record);
getUpdateResponse(index);
assertTrue(index.updateSuccessCount == 0);
assertTrue(index.upsertSuccessCount == 1);
assertTrue(index.updateFailedCount == 0);
}
public void testUpdateShouldFail(TestableIndex index, JSONObject record) {
index.resetUpdateResponseFields();
index.update(record);
getUpdateResponse(index);
assertTrue(index.updateSuccessCount == 0);
assertTrue(index.updateFailedCount == 1);
}
public void testBatchUpdateShouldSuccess(TestableIndex index, JSONArray array) {
index.resetUpdateResponseFields();
index.update(array);
getUpdateResponse(index);
assertTrue(index.updateSuccessCount == array.length());
assertTrue(index.updateFailedCount == 0);
}
public void testBatchUpdateShouldFail(TestableIndex index, JSONArray array) {
index.resetUpdateResponseFields();
index.update(array);
getUpdateResponse(index);
assertTrue(index.updateSuccessCount == 0);
assertTrue(index.updateFailedCount == array.length());
}
public void testDeleteShouldSuccess(TestableIndex index, List<String> ids) {
for (String id : ids) {
index.resetDeleteResponseFields();
index.delete(id);
getDeleteResponse(index);
assertTrue(index.deleteSuccessCount == 1);
assertTrue(index.deleteFailedCount == 0);
}
}
public void testDeleteShouldFail(TestableIndex index, List<String> ids) {
for (String id : ids) {
index.resetDeleteResponseFields();
index.delete(id);
getDeleteResponse(index);
assertTrue(index.deleteSuccessCount == 0);
assertTrue(index.deleteFailedCount == 1);
}
}
@Override
public List<String> getTestMethodNameListWithOrder() {
return Arrays.asList(new String[]{
"testStartEngine"
,"testAll"
// ,"testMultiCoreSearch"
});
}
@Override
public void beforeAll() {
initializeSRCH2EngineAndCallStart();
}
@Override
public void afterAll() {
callSRCH2EngineStop();
}
@Override
public void beforeEach() {
}
@Override
public void afterEach() {
}
}
| |
/*-
* * Copyright 2016 Skymind, Inc.
* *
* * Licensed under the Apache License, Version 2.0 (the "License");
* * you may not use this file except in compliance with the License.
* * You may obtain a copy of the License at
* *
* * http://www.apache.org/licenses/LICENSE-2.0
* *
* * Unless required by applicable law or agreed to in writing, software
* * distributed under the License is distributed on an "AS IS" BASIS,
* * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* * See the License for the specific language governing permissions and
* * limitations under the License.
*/
package org.datavec.api.records.writer.impl.misc;
import lombok.extern.slf4j.Slf4j;
import org.datavec.api.conf.Configuration;
import org.datavec.api.records.reader.impl.misc.SVMLightRecordReader;
import org.datavec.api.records.writer.impl.FileRecordWriter;
import org.datavec.api.split.partition.PartitionMetaData;
import org.datavec.api.writable.ArrayWritable;
import org.datavec.api.writable.Writable;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
/**
* Record writer for SVMLight format, which can generally
* be described as
*
* LABEL INDEX:VALUE INDEX:VALUE ...
*
* SVMLight format is well-suited to sparse data (e.g.,
* bag-of-words) because it omits all features with value
* zero.
*
* We support an "extended" version that allows for multiple
* targets (or labels) separated by a comma, as follows:
*
* LABEL1,LABEL2,... INDEX:VALUE INDEX:VALUE ...
*
* This can be used to represent either multitask problems or
* multilabel problems with sparse binary labels (controlled
* via the "MULTILABEL" configuration option).
*
* Like scikit-learn, we support both zero-based and one-based indexing.
*
* Further details on the format can be found at
* - http://svmlight.joachims.org/
* - http://www.csie.ntu.edu.tw/~cjlin/libsvmtools/datasets/multilabel.html
* - http://scikit-learn.org/stable/modules/generated/sklearn.datasets.load_svmlight_file.html
*
* @author Adam Gibson (original)
* @author Josh Patterson
* @author dave@skymind.io
*/
@Slf4j
public class SVMLightRecordWriter extends FileRecordWriter {
/* Configuration options. */
public static final String NAME_SPACE = SVMLightRecordWriter.class.getName();
public static final String FEATURE_FIRST_COLUMN = NAME_SPACE + ".featureStartColumn";
public static final String FEATURE_LAST_COLUMN = NAME_SPACE + ".featureEndColumn";
public static final String ZERO_BASED_INDEXING = NAME_SPACE + ".zeroBasedIndexing";
public static final String ZERO_BASED_LABEL_INDEXING = NAME_SPACE + ".zeroBasedLabelIndexing";
public static final String HAS_LABELS = NAME_SPACE + ".hasLabel";
public static final String MULTILABEL = NAME_SPACE + ".multilabel";
public static final String LABEL_FIRST_COLUMN = NAME_SPACE + ".labelStartColumn";
public static final String LABEL_LAST_COLUMN = NAME_SPACE + ".labelEndColumn";
/* Constants. */
public static final String UNLABELED = "";
protected int featureFirstColumn = 0; // First column with feature
protected int featureLastColumn = -1; // Last column with feature
protected boolean zeroBasedIndexing = false; // whether to use zero-based indexing, false is safest
protected boolean zeroBasedLabelIndexing = false; // whether to use zero-based label indexing (NONSTANDARD!)
protected boolean hasLabel = true; // Whether record has label
protected boolean multilabel = false; // Whether labels are for multilabel binary classification
protected int labelFirstColumn = -1; // First column with label
protected int labelLastColumn = -1; // Last column with label
public SVMLightRecordWriter() {}
/**
* Set DataVec configuration
*
* @param conf
*/
@Override
public void setConf(Configuration conf) {
super.setConf(conf);
featureFirstColumn = conf.getInt(FEATURE_FIRST_COLUMN, 0);
hasLabel = conf.getBoolean(HAS_LABELS, true);
multilabel = conf.getBoolean(MULTILABEL, false);
labelFirstColumn = conf.getInt(LABEL_FIRST_COLUMN, -1);
labelLastColumn = conf.getInt(LABEL_LAST_COLUMN, -1);
featureLastColumn = conf.getInt(FEATURE_LAST_COLUMN, labelFirstColumn > 0 ? labelFirstColumn-1 : -1);
zeroBasedIndexing = conf.getBoolean(ZERO_BASED_INDEXING, false);
zeroBasedLabelIndexing = conf.getBoolean(ZERO_BASED_LABEL_INDEXING, false);
}
/**
* Write next record.
*
* @param record
* @throws IOException
*/
@Override
public PartitionMetaData write(List<Writable> record) throws IOException {
if (!record.isEmpty()) {
List<Writable> recordList = record instanceof List ? (List<Writable>) record : new ArrayList<>(record);
/* Infer label columns, if necessary. The default is
* to assume that last column is a label and that the
* first label column immediately follows the
* last feature column.
*/
if (hasLabel) {
if (labelLastColumn < 0)
labelLastColumn = record.size() - 1;
if (labelFirstColumn < 0) {
if (featureLastColumn > 0)
labelFirstColumn = featureLastColumn + 1;
else
labelFirstColumn = record.size() - 1;
}
}
/* Infer feature columns, if necessary. The default is
* to assume that the first column is a feature and that
* the last feature column immediately precedes the first
* label column, if there are any.
*/
if (featureLastColumn < 0) {
if (labelFirstColumn > 0)
featureLastColumn = labelFirstColumn - 1;
else
featureLastColumn = recordList.size() - 1;
}
StringBuilder result = new StringBuilder();
// Process labels
if (hasLabel) {
// Track label indeces
int labelIndex = zeroBasedLabelIndexing ? 0 : 1;
for (int i = labelFirstColumn; i <= labelLastColumn; i++) {
Writable w = record.get(i);
// Handle array-structured Writables, which themselves have multiple columns
if (w instanceof ArrayWritable) {
ArrayWritable arr = (ArrayWritable) w;
for (int j = 0; j < arr.length(); j++) {
double val = arr.getDouble(j);
// If multilabel, only store indeces of non-zero labels
if (multilabel) {
if (val == 1.0) {
result.append(SVMLightRecordReader.LABEL_DELIMITER + labelIndex);
} else if (val != 0.0 && val != -1.0)
throw new NumberFormatException("Expect value -1, 0, or 1 for multilabel targets (found " + val + ")");
} else { // Store value of standard label
result.append(SVMLightRecordReader.LABEL_DELIMITER + val);
}
labelIndex++; // Increment label index for each entry in array
}
} else { // Handle scalar Writables
// If multilabel, only store indeces of non-zero labels
if (multilabel) {
double val = Double.valueOf(w.toString());
if (val == 1.0) {
result.append(SVMLightRecordReader.LABEL_DELIMITER + labelIndex);
} else if (val != 0.0 && val != -1.0)
throw new NumberFormatException("Expect value -1, 0, or 1 for multilabel targets (found " + val + ")");
} else { // Store value of standard label
try { // Encode label as integer, if possible
int val = Integer.valueOf(w.toString());
result.append(SVMLightRecordReader.LABEL_DELIMITER + val);
} catch (Exception e) {
double val = Double.valueOf(w.toString());
result.append(SVMLightRecordReader.LABEL_DELIMITER + val);
}
}
labelIndex++; // Increment label index once per scalar Writable
}
}
}
if (result.toString().equals("")) { // Add "unlabeled" label if no labels found
result.append(SVMLightRecordReader.LABEL_DELIMITER + UNLABELED);
}
// Track feature indeces
int featureIndex = zeroBasedIndexing ? 0 : 1;
for (int i = featureFirstColumn; i <= featureLastColumn; i++) {
Writable w = record.get(i);
// Handle array-structured Writables, which themselves have multiple columns
if (w instanceof ArrayWritable) {
ArrayWritable arr = (ArrayWritable) w;
for (int j = 0; j < arr.length(); j++) {
double val = arr.getDouble(j);
if (val != 0) {
result.append(SVMLightRecordReader.PREFERRED_DELIMITER + featureIndex);
result.append(SVMLightRecordReader.FEATURE_DELIMITER + val);
}
featureIndex++; // Increment feature index for each entry in array
}
} else {
double val = w.toDouble();
if (val != 0) {
result.append(SVMLightRecordReader.PREFERRED_DELIMITER + featureIndex);
result.append(SVMLightRecordReader.FEATURE_DELIMITER + val);
}
featureIndex++; // Increment feature index once per scalar Writable
}
}
// Remove extra label delimiter at beginning
String line = result.substring(1).toString();
out.write(line.getBytes());
out.write(NEW_LINE.getBytes());
}
return PartitionMetaData.builder().numRecordsUpdated(1).build();
}
}
| |
/*
* reserved comment block
* DO NOT REMOVE OR ALTER!
*/
package com.sun.org.apache.bcel.internal.generic;
/* ====================================================================
* The Apache Software License, Version 1.1
*
* Copyright (c) 2001 The Apache Software Foundation. All rights
* reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
*
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
*
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in
* the documentation and/or other materials provided with the
* distribution.
*
* 3. The end-user documentation included with the redistribution,
* if any, must include the following acknowledgment:
* "This product includes software developed by the
* Apache Software Foundation (http://www.apache.org/)."
* Alternately, this acknowledgment may appear in the software itself,
* if and wherever such third-party acknowledgments normally appear.
*
* 4. The names "Apache" and "Apache Software Foundation" and
* "Apache BCEL" must not be used to endorse or promote products
* derived from this software without prior written permission. For
* written permission, please contact apache@apache.org.
*
* 5. Products derived from this software may not be called "Apache",
* "Apache BCEL", nor may "Apache" appear in their name, without
* prior written permission of the Apache Software Foundation.
*
* THIS SOFTWARE IS PROVIDED ``AS IS'' AND ANY EXPRESSED OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
* OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE APACHE SOFTWARE FOUNDATION OR
* ITS CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
* LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF
* USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
* OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT
* OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
* SUCH DAMAGE.
* ====================================================================
*
* This software consists of voluntary contributions made by many
* individuals on behalf of the Apache Software Foundation. For more
* information on the Apache Software Foundation, please see
* <http://www.apache.org/>.
*/
import com.sun.org.apache.bcel.internal.Constants;
import com.sun.org.apache.bcel.internal.classfile.*;
import java.util.ArrayList;
/**
* Abstract super class for all possible java types, namely basic types
* such as int, object types like String and array types, e.g. int[]
*
* @author <A HREF="mailto:markus.dahm@berlin.de">M. Dahm</A>
*/
public abstract class Type implements java.io.Serializable {
protected byte type;
protected String signature; // signature for the type
/** Predefined constants
*/
public static final BasicType VOID = new BasicType(Constants.T_VOID);
public static final BasicType BOOLEAN = new BasicType(Constants.T_BOOLEAN);
public static final BasicType INT = new BasicType(Constants.T_INT);
public static final BasicType SHORT = new BasicType(Constants.T_SHORT);
public static final BasicType BYTE = new BasicType(Constants.T_BYTE);
public static final BasicType LONG = new BasicType(Constants.T_LONG);
public static final BasicType DOUBLE = new BasicType(Constants.T_DOUBLE);
public static final BasicType FLOAT = new BasicType(Constants.T_FLOAT);
public static final BasicType CHAR = new BasicType(Constants.T_CHAR);
public static final ObjectType OBJECT = new ObjectType("java.lang.Object");
public static final ObjectType STRING = new ObjectType("java.lang.String");
public static final ObjectType STRINGBUFFER = new ObjectType("java.lang.StringBuffer");
public static final ObjectType THROWABLE = new ObjectType("java.lang.Throwable");
public static final Type[] NO_ARGS = new Type[0];
public static final ReferenceType NULL = new ReferenceType(){};
public static final Type UNKNOWN = new Type(Constants.T_UNKNOWN,
"<unknown object>"){};
protected Type(byte t, String s) {
type = t;
signature = s;
}
/**
* @return signature for given type.
*/
public String getSignature() { return signature; }
/**
* @return type as defined in Constants
*/
public byte getType() { return type; }
/**
* @return stack size of this type (2 for long and double, 0 for void, 1 otherwise)
*/
public int getSize() {
switch(type) {
case Constants.T_DOUBLE:
case Constants.T_LONG: return 2;
case Constants.T_VOID: return 0;
default: return 1;
}
}
/**
* @return Type string, e.g. `int[]'
*/
public String toString() {
return ((this.equals(Type.NULL) || (type >= Constants.T_UNKNOWN)))? signature :
Utility.signatureToString(signature, false);
}
/**
* Convert type to Java method signature, e.g. int[] f(java.lang.String x)
* becomes (Ljava/lang/String;)[I
*
* @param return_type what the method returns
* @param arg_types what are the argument types
* @return method signature for given type(s).
*/
public static String getMethodSignature(Type return_type, Type[] arg_types) {
StringBuffer buf = new StringBuffer("(");
int length = (arg_types == null)? 0 : arg_types.length;
for(int i=0; i < length; i++)
buf.append(arg_types[i].getSignature());
buf.append(')');
buf.append(return_type.getSignature());
return buf.toString();
}
private static int consumed_chars=0; // Remember position in string, see getArgumentTypes
/**
* Convert signature to a Type object.
* @param signature signature string such as Ljava/lang/String;
* @return type object
*/
public static final Type getType(String signature)
throws StringIndexOutOfBoundsException
{
byte type = Utility.typeOfSignature(signature);
if(type <= Constants.T_VOID) {
consumed_chars = 1;
return BasicType.getType(type);
} else if(type == Constants.T_ARRAY) {
int dim=0;
do { // Count dimensions
dim++;
} while(signature.charAt(dim) == '[');
// Recurse, but just once, if the signature is ok
Type t = getType(signature.substring(dim));
consumed_chars += dim; // update counter
return new ArrayType(t, dim);
} else { // type == T_REFERENCE
int index = signature.indexOf(';'); // Look for closing `;'
if(index < 0)
throw new ClassFormatException("Invalid signature: " + signature);
consumed_chars = index + 1; // "Lblabla;" `L' and `;' are removed
return new ObjectType(signature.substring(1, index).replace('/', '.'));
}
}
/**
* Convert return value of a method (signature) to a Type object.
*
* @param signature signature string such as (Ljava/lang/String;)V
* @return return type
*/
public static Type getReturnType(String signature) {
try {
// Read return type after `)'
int index = signature.lastIndexOf(')') + 1;
return getType(signature.substring(index));
} catch(StringIndexOutOfBoundsException e) { // Should never occur
throw new ClassFormatException("Invalid method signature: " + signature);
}
}
/**
* Convert arguments of a method (signature) to an array of Type objects.
* @param signature signature string such as (Ljava/lang/String;)V
* @return array of argument types
*/
public static Type[] getArgumentTypes(String signature) {
ArrayList vec = new ArrayList();
int index;
Type[] types;
try { // Read all declarations between for `(' and `)'
if(signature.charAt(0) != '(')
throw new ClassFormatException("Invalid method signature: " + signature);
index = 1; // current string position
while(signature.charAt(index) != ')') {
vec.add(getType(signature.substring(index)));
index += consumed_chars; // update position
}
} catch(StringIndexOutOfBoundsException e) { // Should never occur
throw new ClassFormatException("Invalid method signature: " + signature);
}
types = new Type[vec.size()];
vec.toArray(types);
return types;
}
/** Convert runtime java.lang.Class to BCEL Type object.
* @param cl Java class
* @return corresponding Type object
*/
public static Type getType(java.lang.Class cl) {
if(cl == null) {
throw new IllegalArgumentException("Class must not be null");
}
/* That's an amzingly easy case, because getName() returns
* the signature. That's what we would have liked anyway.
*/
if(cl.isArray()) {
return getType(cl.getName());
} else if(cl.isPrimitive()) {
if(cl == Integer.TYPE) {
return INT;
} else if(cl == Void.TYPE) {
return VOID;
} else if(cl == Double.TYPE) {
return DOUBLE;
} else if(cl == Float.TYPE) {
return FLOAT;
} else if(cl == Boolean.TYPE) {
return BOOLEAN;
} else if(cl == Byte.TYPE) {
return BYTE;
} else if(cl == Short.TYPE) {
return SHORT;
} else if(cl == Byte.TYPE) {
return BYTE;
} else if(cl == Long.TYPE) {
return LONG;
} else if(cl == Character.TYPE) {
return CHAR;
} else {
throw new IllegalStateException("Ooops, what primitive type is " + cl);
}
} else { // "Real" class
return new ObjectType(cl.getName());
}
}
public static String getSignature(java.lang.reflect.Method meth) {
StringBuffer sb = new StringBuffer("(");
Class[] params = meth.getParameterTypes(); // avoid clone
for(int j = 0; j < params.length; j++) {
sb.append(getType(params[j]).getSignature());
}
sb.append(")");
sb.append(getType(meth.getReturnType()).getSignature());
return sb.toString();
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.mapreduce;
import static java.lang.String.format;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
import java.io.IOException;
import java.util.Arrays;
import java.util.HashMap;
import java.util.Iterator;
import java.util.Map;
import java.util.Set;
import java.util.TreeSet;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellComparatorImpl;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.IntegrationTestingUtility;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.KeyValue.Type;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.client.Table;
import org.apache.hadoop.hbase.testclassification.IntegrationTests;
import org.apache.hadoop.hbase.tool.LoadIncrementalHFiles;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.mapreduce.lib.partition.TotalOrderPartitioner;
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.Rule;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.rules.TestName;
/**
* Validate ImportTsv + LoadIncrementalHFiles on a distributed cluster.
*/
@Category(IntegrationTests.class)
public class IntegrationTestImportTsv extends Configured implements Tool {
private static final String NAME = IntegrationTestImportTsv.class.getSimpleName();
private static final Log LOG = LogFactory.getLog(IntegrationTestImportTsv.class);
protected static final String simple_tsv =
"row1\t1\tc1\tc2\n" +
"row2\t1\tc1\tc2\n" +
"row3\t1\tc1\tc2\n" +
"row4\t1\tc1\tc2\n" +
"row5\t1\tc1\tc2\n" +
"row6\t1\tc1\tc2\n" +
"row7\t1\tc1\tc2\n" +
"row8\t1\tc1\tc2\n" +
"row9\t1\tc1\tc2\n" +
"row10\t1\tc1\tc2\n";
@Rule
public TestName name = new TestName();
protected static final Set<KeyValue> simple_expected =
new TreeSet<KeyValue>(CellComparatorImpl.COMPARATOR) {
private static final long serialVersionUID = 1L;
{
byte[] family = Bytes.toBytes("d");
for (String line : simple_tsv.split("\n")) {
String[] row = line.split("\t");
byte[] key = Bytes.toBytes(row[0]);
long ts = Long.parseLong(row[1]);
byte[][] fields = { Bytes.toBytes(row[2]), Bytes.toBytes(row[3]) };
add(new KeyValue(key, family, fields[0], ts, Type.Put, fields[0]));
add(new KeyValue(key, family, fields[1], ts, Type.Put, fields[1]));
}
}
};
// this instance is initialized on first access when the test is run from
// JUnit/Maven or by main when run from the CLI.
protected static IntegrationTestingUtility util = null;
public Configuration getConf() {
return util.getConfiguration();
}
public void setConf(Configuration conf) {
LOG.debug("Ignoring setConf call.");
}
@BeforeClass
public static void provisionCluster() throws Exception {
if (null == util) {
util = new IntegrationTestingUtility();
}
util.initializeCluster(1);
if (!util.isDistributedCluster()) {
// also need MR when running without a real cluster
util.startMiniMapReduceCluster();
}
}
@AfterClass
public static void releaseCluster() throws Exception {
util.restoreCluster();
if (!util.isDistributedCluster()) {
util.shutdownMiniMapReduceCluster();
}
util = null;
}
/**
* Verify the data described by <code>simple_tsv</code> matches
* <code>simple_expected</code>.
*/
protected void doLoadIncrementalHFiles(Path hfiles, TableName tableName)
throws Exception {
String[] args = { hfiles.toString(), tableName.getNameAsString() };
LOG.info(format("Running LoadIncrememntalHFiles with args: %s", Arrays.asList(args)));
assertEquals("Loading HFiles failed.",
0, ToolRunner.run(new LoadIncrementalHFiles(new Configuration(getConf())), args));
Table table = null;
Scan scan = new Scan() {{
setCacheBlocks(false);
setCaching(1000);
}};
try {
table = util.getConnection().getTable(tableName);
Iterator<Result> resultsIt = table.getScanner(scan).iterator();
Iterator<KeyValue> expectedIt = simple_expected.iterator();
while (resultsIt.hasNext() && expectedIt.hasNext()) {
Result r = resultsIt.next();
for (Cell actual : r.rawCells()) {
assertTrue(
"Ran out of expected values prematurely!",
expectedIt.hasNext());
KeyValue expected = expectedIt.next();
assertTrue(
format("Scan produced surprising result. expected: <%s>, actual: %s",
expected, actual),
CellComparatorImpl.COMPARATOR.compare(expected, actual) == 0);
}
}
assertFalse("Did not consume all expected values.", expectedIt.hasNext());
assertFalse("Did not consume all scan results.", resultsIt.hasNext());
} finally {
if (null != table) table.close();
}
}
/**
* Confirm the absence of the {@link TotalOrderPartitioner} partitions file.
*/
protected static void validateDeletedPartitionsFile(Configuration conf) throws IOException {
if (!conf.getBoolean(IntegrationTestingUtility.IS_DISTRIBUTED_CLUSTER, false))
return;
FileSystem fs = FileSystem.get(conf);
Path partitionsFile = new Path(TotalOrderPartitioner.getPartitionFile(conf));
assertFalse("Failed to clean up partitions file.", fs.exists(partitionsFile));
}
@Test
public void testGenerateAndLoad() throws Exception {
LOG.info("Running test testGenerateAndLoad.");
final TableName table = TableName.valueOf(name.getMethodName());
String cf = "d";
Path hfiles = new Path(
util.getDataTestDirOnTestFS(table.getNameAsString()), "hfiles");
Map<String, String> args = new HashMap<>();
args.put(ImportTsv.BULK_OUTPUT_CONF_KEY, hfiles.toString());
args.put(ImportTsv.COLUMNS_CONF_KEY,
format("HBASE_ROW_KEY,HBASE_TS_KEY,%s:c1,%s:c2", cf, cf));
// configure the test harness to NOT delete the HFiles after they're
// generated. We need those for doLoadIncrementalHFiles
args.put(TestImportTsv.DELETE_AFTER_LOAD_CONF, "false");
// run the job, complete the load.
util.createTable(table, new String[]{cf});
Tool t = TestImportTsv.doMROnTableTest(util, table, cf, simple_tsv, args);
doLoadIncrementalHFiles(hfiles, table);
// validate post-conditions
validateDeletedPartitionsFile(t.getConf());
// clean up after ourselves.
util.deleteTable(table);
util.cleanupDataTestDirOnTestFS(table.getNameAsString());
LOG.info("testGenerateAndLoad completed successfully.");
}
public int run(String[] args) throws Exception {
if (args.length != 0) {
System.err.println(format("%s [genericOptions]", NAME));
System.err.println(" Runs ImportTsv integration tests against a distributed cluster.");
System.err.println();
ToolRunner.printGenericCommandUsage(System.err);
return 1;
}
// adding more test methods? Don't forget to add them here... or consider doing what
// IntegrationTestsDriver does.
provisionCluster();
testGenerateAndLoad();
releaseCluster();
return 0;
}
public static void main(String[] args) throws Exception {
Configuration conf = HBaseConfiguration.create();
IntegrationTestingUtility.setUseDistributedCluster(conf);
util = new IntegrationTestingUtility(conf);
int status = ToolRunner.run(conf, new IntegrationTestImportTsv(), args);
System.exit(status);
}
}
| |
package org.terifan.ocr;
import java.awt.BasicStroke;
import java.awt.Color;
import java.awt.Font;
import java.awt.Graphics2D;
import java.awt.Insets;
import java.awt.Polygon;
import java.awt.Rectangle;
import java.awt.RenderingHints;
import java.awt.image.BufferedImage;
import java.util.ArrayList;
import java.util.Collections;
class CurvatureClassifier
{
private final static int WHITE_THRESHOLD = 160;
public final static String DEFAULT_ALPHABET =
"ABCDEFGHIJKLM" + "NOPQRSTUVWXYZ"
+ "abcdefghijklm" + "nopqrstuvwxyz"
+ "0123456789@+'" + "/\\\"*.-:,&()=*";
private ArrayList<Symbol> mSymbols;
private Page mPage;
private int mMatrixSize;
private double mOneThirdMatrix;
public CurvatureClassifier(int aMatrixSize)
{
mMatrixSize = aMatrixSize;
mOneThirdMatrix = mMatrixSize / 3.0;
reset();
}
public ArrayList<Symbol> getSymbols()
{
return mSymbols;
}
public void learn(String aFontName, Bitmap aBitmap, String aAlphabet)
{
if (aAlphabet == null)
{
aAlphabet = DEFAULT_ALPHABET;
}
if (aAlphabet.length() != DEFAULT_ALPHABET.length())
{
throw new IllegalArgumentException("Alphabet must contain " + aAlphabet.length() + " characters");
}
int GW = 71;
int GH = 69;
ArrayList<TextBox> textBoxes = new ArrayList<>();
for (int y = 0; y < 6; y++)
{
for (int x = 0; x < 13; x++)
{
textBoxes.add(new TextBox(new Rectangle(GW * x + 1, GH * y + 1, GW - 2, GH - 2)));
}
}
for (TextBox box : textBoxes)
{
Symbol symbol = learnSymbol(aBitmap, aFontName, box, DEFAULT_ALPHABET, aAlphabet);
if (symbol != null)
{
generateCurvatureBitmap(symbol, box);
symbol.setSymbolBitmap(box.getSymbolBitmap());
}
}
}
public void reset()
{
mSymbols = new ArrayList<>();
}
private void extractBitmap(Bitmap aBitmap, Symbol aSymbol)
{
TextBox box = aSymbol.mTextBox;
Insets borders = aBitmap.getBorders(box.x, box.y, box.width, box.height);
aSymbol.mBorders = borders;
BufferedImage tmp = aBitmap.getRegion(box.x + borders.left, box.y + borders.top, box.x + box.width - borders.right + 1, box.y + box.height - borders.bottom + 1);
box.setBitmap(tmp);
tmp = ImageTools.resize(tmp, mMatrixSize, mMatrixSize, RenderingHints.VALUE_INTERPOLATION_BILINEAR, BufferedImage.TYPE_INT_RGB);
for (int y = 0; y < mMatrixSize; y++)
{
for (int x = 0; x < mMatrixSize; x++)
{
int c = tmp.getRGB(x, y);
c = ((255 & (c >> 16)) + (255 & (c >> 8)) + (255 & c)) / 3;
tmp.setRGB(x, y, c > WHITE_THRESHOLD ? 0xffffff : 0x000000);
}
}
aSymbol.setBitmap(new Bitmap(tmp));
}
private void extractTemplateDistance(Symbol aSymbol)
{
aSymbol.mClosestPixel = new int[mMatrixSize][mMatrixSize];
for (int y = 0; y < mMatrixSize; y++)
{
for (int x = 0; x < mMatrixSize; x++)
{
aSymbol.mClosestPixel[y][x] = findClosestPixel(aSymbol, x, y);
}
}
}
private void extractContour(Symbol aSymbol)
{
Bitmap image = aSymbol.getBitmap();
double[][] contour = new double[8][mMatrixSize];
int[][] count = new int[8][mMatrixSize];
int w = image.getWidth();
int h = image.getHeight();
double fx = mMatrixSize / (double)w;
double fy = mMatrixSize / (double)h;
for (int ori = 0; ori < 2; ori++)
{
for (int y = 0; y < h; y++)
{
int x = w * ori / 2;
for (; x < w; x++)
{
if (image.isBlack(x, y))
{
break;
}
}
int z = (int)Math.round(y * fy);
contour[4 * ori + 0][z] += fx * x;
count[4 * ori + 0][z] += 1;
}
for (int y = 0; y < h; y++)
{
int x = w - 1 - w * ori / 2;
for (; x >= 0; x--)
{
if (image.isBlack(x, y))
{
break;
}
}
int z = (int)Math.round(y * fy);
contour[4 * ori + 1][z] += fx * x;
count[4 * ori + 1][z] += 1;
}
for (int x = 0; x < w; x++)
{
int y = h * ori / 2;
for (; y < h; y++)
{
if (image.isBlack(x, y))
{
break;
}
}
int z = (int)Math.round(x * fx);
contour[4 * ori + 2][z] += fy * y;
count[4 * ori + 2][z] += 1;
}
for (int x = 0; x < w; x++)
{
int y = h - 1 - h * ori / 2;
for (; y >= 0; y--)
{
if (image.isBlack(x, y))
{
break;
}
}
int z = (int)Math.round(x * fx);
contour[4 * ori + 3][z] += fy * y;
count[4 * ori + 3][z] += 1;
}
}
for (int ori = 0; ori < 8; ori++)
{
for (int i = 0; i < mMatrixSize; i++)
{
contour[ori][i] /= count[ori][i];
}
}
aSymbol.mContour = contour;
}
private void extractSlopes(Symbol aSymbol)
{
int[][] slopes = new int[8][mMatrixSize];
int[][] slopes2 = new int[8][mMatrixSize];
String[][] slopesX = new String[8][mMatrixSize];
double[][] contour = aSymbol.mContour;
for (int orientation = 0; orientation < 8; orientation++)
{
for (int index = 0; index < mMatrixSize; index++)
{
int b = (int)(contour[orientation][index ]);
int a = index == 0 ? b : (int)(contour[orientation][index - 1]);
int c = index == mMatrixSize - 1 ? b : (int)(contour[orientation][index + 1]);
if (a == -1) a = mMatrixSize;
if (b == -1) b = mMatrixSize;
if (c == -1) c = mMatrixSize;
slopesX[orientation][index] = a+":"+b+":"+c;
int s;
int t;
if (b == mMatrixSize)
{
s = -1;
t = 0;
}
else if (a == b && b == c)
{
s = 0;
t = 1;
}
else if (a == mMatrixSize && b == c)
{
s = 0;
t = 2;
}
else if (a == b && c == mMatrixSize)
{
s = 0;
t = 3;
}
else if (a < b && c < b)
{
s = 2;
t = 4;
}
else if (a > b && c > b)
{
s = 0;
t = 5;
}
else if (a > b && c <= b)
{
s = 1;
t = 6;
}
else if (a >= b && c < b)
{
s = 1;
t = 7;
}
else if (a == mMatrixSize && c < b)
{
s = 1;
t = 8;
}
else if (a > b && c == mMatrixSize)
{
s = 1;
t = 9;
}
else if (a < b && c >= b)
{
s = 2;
t = 10;
}
else if (a <= b && c > b)
{
s = 2;
t = 11;
}
else if (a == mMatrixSize && c > b)
{
s = 2;
t = 12;
}
else if (a < b && c == mMatrixSize)
{
s = 3;
t = 13;
}
else if (a == mMatrixSize && b == mMatrixSize && c == mMatrixSize)
{
s = -1;
t = 14;
}
else
{
s = -1;
t = 15;
}
slopes[orientation][index] = s;
slopes2[orientation][index] = t;
}
}
aSymbol.mSlopes = slopes;
aSymbol.mSlopes2 = slopes2;
aSymbol.mSlopesX = slopesX;
}
private void extractCurvature(Symbol aSymbol)
{
double[][] contour = aSymbol.mContour;
int[][] slopes = aSymbol.mSlopes;
aSymbol.mCurvature = new Polygon[8][];
aSymbol.mCurvatureSlopes = new int[8][];
ArrayList<Polygon> polygons = new ArrayList<>();
ArrayList<Integer> polygonSlopes = new ArrayList<>();
for (int orientation = 0; orientation < 8; orientation++)
{
boolean hor = (orientation == 2 || orientation == 3 || orientation == 6 || orientation == 7);
int tx = 0;
int fromX = 0;
int fromY = 0;
boolean first = true;
for (int i = 0; i < mMatrixSize; i++)
{
if (first)
{
tx = (int)contour[orientation][i];
}
else
{
for (int startSlope = slopes[orientation][i]; i < mMatrixSize; i++)
{
if (contour[orientation][i] == -1)
{
break;
}
if (startSlope != slopes[orientation][i] && slopes[orientation][i] != 0)
{
break;
}
tx = (int)contour[orientation][i];
}
}
if (tx == -1 || tx == mMatrixSize)
{
first = true;
continue;
}
int toX, toY;
if (hor)
{
toX = i - (first ? 0 : 1);
toY = tx;
}
else
{
toX = tx;
toY = i - (first ? 0 : 1);
}
// if (!first && tx > -1 && tx < MATRIX_SIZE && (fromX != toX || Math.abs(fromY - toY) > 0) && (fromY != toY || Math.abs(fromX - toX) > 0))
if (!first && tx > -1 && tx < mMatrixSize && (fromX != toX || fromY != toY))
{
int x1 = fromX + (orientation % 4) * mMatrixSize;
int y1 = fromY + (orientation / 4) * mMatrixSize;
int x2 = toX + (orientation % 4) * mMatrixSize;
int y2 = toY + (orientation / 4) * mMatrixSize;
int slope;
if (orientation == 0 || orientation == 4)
{
if (x2 < x1)
{
slope = 1;
}
else
{
slope = -1;
}
}
else if (orientation == 1 || orientation == 5)
{
if (x2 < x1)
{
slope = -1;
}
else
{
slope = 1;
}
}
else if (orientation == 2 || orientation == 6)
{
if (y2 < y1)
{
slope = -1;
}
else
{
slope = 1;
}
}
else
{
if (y2 < y1)
{
slope = 1;
}
else
{
slope = -1;
}
}
Polygon polyA = new Polygon(new int[]
{
fromX, toX, fromX
}, new int[]
{
fromY, toY, toY
}, 3);
Polygon polyB = new Polygon(new int[]
{
fromX, toX, toX
}, new int[]
{
fromY, toY, fromY
}, 3);
polygons.add(slope == 1 ? polyA : polyB);
polygonSlopes.add(hor ? (slope == 1 ? -1 : 1) : slope);
}
if (orientation == 0 && aSymbol.mTextBox.x == 476 && aSymbol.mTextBox.y == 2405)
{
System.out.println(fromX+","+fromY+" -> "+toX+","+toY);
}
fromX = toX;
fromY = toY;
first = false;
if (i < mMatrixSize && contour[orientation][i] == -1)
{
first = true;
}
}
aSymbol.mCurvature[orientation] = polygons.toArray(new Polygon[polygons.size()]);
aSymbol.mCurvatureSlopes[orientation] = new int[polygonSlopes.size()];
for (int i = 0; i < polygonSlopes.size(); i++)
{
aSymbol.mCurvatureSlopes[orientation][i] = polygonSlopes.get(i);
}
polygons.clear();
polygonSlopes.clear();
}
}
private void generateCurvatureBitmap(Symbol aSymbol, TextBox aTextBox)
{
int scale = 8;
int padLeft = 40;
int padRight = 10;
int padBottom = 10;
int padX = 32;
int padY = 64;
int size = mMatrixSize * scale;
BufferedImage output = new BufferedImage(padLeft + 1 * (size + padX) + padRight, 8 * (size + padY) + padBottom, BufferedImage.TYPE_INT_RGB);
Graphics2D g = output.createGraphics();
g.setRenderingHint(RenderingHints.KEY_ANTIALIASING, RenderingHints.VALUE_ANTIALIAS_ON);
g.setColor(Color.WHITE);
g.fillRect(0, 0, output.getWidth(), output.getHeight());
g.setFont(new Font("arial",Font.PLAIN,8));
for (int orientation = 0; orientation < 8; orientation++)
{
int x = orientation / 8;
int y = orientation % 8;
int ox = padX + x * (padX + size) + padLeft;
int oy = padY + y * (padY + size);
g.drawImage(aSymbol.getBitmap().getImage(), ox, oy, size, size, null);
g.setColor(new Color(255, 255, 255, 240));
switch (orientation)
{
case 4:
g.fillRect(ox, oy, size / 2, size);
break;
case 5:
g.fillRect(ox + size / 2, oy, size / 2, size);
break;
case 6:
g.fillRect(ox, oy, size, size / 2);
break;
case 7:
g.fillRect(ox, oy + size / 2, size, size / 2);
break;
}
g.setColor(new Color(255, 255, 255, 64));
for (int i = 0; i <= mMatrixSize; i++)
{
g.drawLine(ox + i * scale, oy, ox + i * scale, oy + size);
g.drawLine(ox, oy + i * scale, ox + size, oy + i * scale);
}
}
g.setStroke(new BasicStroke(3));
g.setColor(new Color(0, 0, 0, 16));
for (int orientation = 0; orientation < 8; orientation++)
{
int x = orientation / 8;
int y = orientation % 8;
for (int i = 1; i < 3; i++)
{
int ox = padX + x * padX + x * size + padLeft;
int oy = padY + y * padY + y * size;
if (orientation > 3)
{
g.drawLine(ox + i * size / 3, oy, ox + i * size / 3, oy + size);
}
else
{
g.drawLine(ox, oy + i * size / 3, ox + size, oy + i * size / 3);
}
}
}
double[][] contour = aSymbol.mContour;
int[][] slopes = aSymbol.mSlopes;
for (int orientation = 0; orientation < 8; orientation++)
{
int x = orientation / 8;
int y = orientation % 8;
boolean hor = (orientation == 2 || orientation == 3 || orientation == 6 || orientation == 7);
int tmp = 0;
int fromX = 0;
int fromY = 0;
boolean first = true;
for (int i = 0; i < mMatrixSize; i++)
{
if (first)
{
tmp = (int)contour[orientation][i];
}
else
{
for (int startSlope = slopes[orientation][i]; i < mMatrixSize; i++)
{
if (contour[orientation][i] == -1)
{
break;
}
if (startSlope != slopes[orientation][i] && slopes[orientation][i] != 0)
{
break;
}
tmp = (int)contour[orientation][i];
}
}
if (tmp == -1 || tmp == mMatrixSize)
{
first = true;
continue;
}
int toX, toY;
if (hor)
{
toX = i - (first ? 0 : 1);
toY = tmp;
}
else
{
toX = tmp;
toY = i - (first ? 0 : 1);
}
if (!first && tmp > -1 && tmp < mMatrixSize && (fromX != toX || fromY != toY))
{
int x1 = (int)(padX + scale * fromX) + x * (padX + scale * mMatrixSize) + scale / 2 + padLeft;
int y1 = (int)(padY + scale * fromY) + y * (padY + scale * mMatrixSize) + scale / 2;
int x2 = (int)(padX + scale * toX) + x * (padX + scale * mMatrixSize) + scale / 2 + padLeft;
int y2 = (int)(padY + scale * toY) + y * (padY + scale * mMatrixSize) + scale / 2;
int slope;
if (orientation == 0 || orientation == 4)
{
if (x2 < x1)
{
slope = 1;
}
else
{
slope = -1;
}
}
else if (orientation == 1 || orientation == 5)
{
if (x2 < x1)
{
slope = -1;
}
else
{
slope = 1;
}
}
else if (orientation == 2 || orientation == 6)
{
if (y2 < y1)
{
slope = -1;
}
else
{
slope = 1;
}
}
else
{
if (y2 < y1)
{
slope = 1;
}
else
{
slope = -1;
}
}
int[][] points = slope == 1 ? new int[][]{{x1, x2, x1},{y1, y2, y2}} : new int[][]{{x1, x2, x2},{y1, y2, y1}};
Color c = Color.getHSBColor((hor ? (slope == 1 ? -1 : 1) : slope) == 1 ? 0f : 0.5f, 1, 1);
g.setColor(new Color(c.getRed(), c.getGreen(), c.getBlue(), 128));
g.fillPolygon(points[0], points[1], 3);
g.setColor(c);
g.drawLine(x1, y1, x2, y2);
}
fromX = toX;
fromY = toY;
first = false;
if (i < mMatrixSize && contour[orientation][i] == -1)
{
first = true;
}
}
g.setColor(Color.BLACK);
for (int zone = 0; zone < 3; zone++)
{
for (int type = 0; type < 2; type++)
{
String text = "" + (int)aSymbol.mCurvatureVector[orientation][type][zone];
int tx = padX + x * (padX + size) + 25 * type + padLeft;
int ty = padY + y * (padY + size) + 11 * zone - 40;
g.drawString(text, tx, ty);
}
}
}
g.drawString("" + aTextBox.x + ", " + aTextBox.y, 0, 10);
g.setColor(Color.GREEN);
for (int orientation = 0; orientation < 8; orientation++)
{
int x = orientation / 8;
int y = orientation % 8;
for (int i = 0; i < mMatrixSize; i++)
{
String text = aSymbol.mSlopesX[orientation][i]+" "+slopes[orientation][i]+" "+aSymbol.mSlopes2[orientation][i]+" "+(int)aSymbol.mContour[orientation][i];
int tx = padX + x * (padX + size) - 30;
int ty = padY + y * (padY + size) + i * scale + scale;
g.drawString(text, tx, ty);
}
}
aTextBox.setSymbolBitmap(output);
}
private void extractCurvatureVector(Symbol aSymbol)
{
Polygon[][] polygons = aSymbol.mCurvature;
double[][][] fill = aSymbol.mCurvatureVector = new double[8][2][3];
for (int orientation = 0; orientation < 8; orientation++)
{
boolean hor = orientation == 0 || orientation == 1 || orientation == 4 || orientation == 5;
for (int i = 0; i < polygons[orientation].length; i++)
{
Polygon p = polygons[orientation][i];
int type = aSymbol.mCurvatureSlopes[orientation][i] == -1 ? 0 : 1;
double area1, area2, area3;
int v0 = (int)(0 * mOneThirdMatrix);
int v1 = (int)(1 * mOneThirdMatrix);
int v2 = (int)(2 * mOneThirdMatrix);
int v3 = (int)(3 * mOneThirdMatrix);
if (hor)
{
area1 = intersect(p, 0, v0, mMatrixSize, v1);
area2 = intersect(p, 0, v1, mMatrixSize, v2);
area3 = intersect(p, 0, v2, mMatrixSize, v3);
}
else
{
area1 = intersect(p, v0, 0, v1, mMatrixSize);
area2 = intersect(p, v1, 0, v2, mMatrixSize);
area3 = intersect(p, v2, 0, v3, mMatrixSize);
}
fill[orientation][type][0] += area1;
fill[orientation][type][1] += area2;
fill[orientation][type][2] += area3;
}
}
}
private int intersect(Polygon p, int rx0, int ry0, int rx1, int ry1)
{
int n = 0;
if (rx0 > rx1){int t = rx1; rx1 = rx0; rx0 = t;}
if (ry0 > ry1){int t = ry1; ry1 = ry0; ry0 = t;}
for (double y = ry0; y < ry1; y++)
{
for (double x = rx0; x < rx1; x++)
{
if (p.contains(x, y))
{
n++;
}
}
}
return n;
}
private Symbol learnSymbol(Bitmap aBitmap, String aFontName, TextBox aTextBox, String aDefaultAlphabet, String aAlphabet)
{
Symbol symbol = new Symbol(aTextBox);
int charIndex = 13 * (aTextBox.y / 69) + (aTextBox.x / 71);
String character = " ";
String defCharacter = " ";
if (charIndex < aAlphabet.length())
{
character = "" + aAlphabet.charAt(charIndex);
defCharacter = "" + aDefaultAlphabet.charAt(charIndex);
}
symbol.mFontName = aFontName;
symbol.mCharacter = character;
symbol.mDefCharacter = defCharacter;
// if (debug)
// {
// System.out.println("");
// System.out.println(character + " - " + aTextBox);
// System.out.println("");
// }
extractBitmap(aBitmap, symbol);
if (symbol.getBitmap().getRectFillFactor(0, 0, mMatrixSize, mMatrixSize) == 0)
{
return null;
}
extractContour(symbol);
extractSlopes(symbol);
extractCurvature(symbol);
extractCurvatureVector(symbol);
extractTemplateDistance(symbol);
mSymbols.add(symbol);
return symbol;
}
public Result classifySymbol(Page aPage, TextBox aTextBox, Resolver aResolver)
{
mPage = aPage;
Symbol symbol = new Symbol(aTextBox);
extractBitmap(aPage.getBitmap(), symbol);
extractContour(symbol);
extractSlopes(symbol);
extractCurvature(symbol);
extractCurvatureVector(symbol);
extractTemplateDistance(symbol);
generateCurvatureBitmap(symbol, aTextBox);
ArrayList<Result> results = classifySymbolByCurvature(symbol, aTextBox, aResolver);
Collections.sort(results);
return results.get(0);
}
// private ArrayList<Result> classifySymbolByContour(Symbol aSymbol, TextBox aTextBox, Resolver aResolver)
// {
// ArrayList<Result> results = new ArrayList<>();
//
// for (Symbol cmpSymbol : mSymbols)
// {
// if (!aResolver.acceptSymbol(mPage, aTextBox, cmpSymbol))
// {
// continue;
// }
//
// double cmpDiff = 0;
//
// for (int orientation = 0; orientation < 8; orientation++)
// {
// double[] symCont = aSymbol.mContour[orientation];
// double[] cmpCont = cmpSymbol.mContour[orientation];
// for (int i = 0; i < MATRIX_SIZE; i++)
// {
// cmpDiff += Math.abs(symCont[i] - cmpCont[i]);
// }
// }
//
// cmpDiff /= 8 * MATRIX_SIZE * MATRIX_SIZE;
// cmpDiff = 1 - cmpDiff;
//
//// if (debug)
//// {
//// System.out.println(cmpSymbol.mCharacter + " = " + cmpDiff);
//// }
//
// Result result = new Result(cmpDiff, cmpSymbol);
//
// results.add(result);
// }
//
// return results;
// }
//
//
// private ArrayList<Result> classifySymbolByTemplate(Symbol aSymbol, TextBox aTextBox, Resolver aResolver)
// {
// ArrayList<Result> results = new ArrayList<>();
//
// for (Symbol symbol : mSymbols)
// {
// if (!aResolver.acceptSymbol(mPage, aTextBox, symbol))
// {
// continue;
// }
//
// double score = 0;
//
// for (int y = 0; y < MATRIX_SIZE; y++)
// {
// for (int x = 0; x < MATRIX_SIZE; x++)
// {
// score += Math.abs(aSymbol.mClosestPixel[y][x] - symbol.mClosestPixel[y][x]);
// }
// }
//
// score /= MATRIX_SIZE * MATRIX_SIZE * MATRIX_SIZE;
// score = 1 - score;
//
//// if (debug)
//// {
//// System.out.println(symbol.mCharacter + " = " + score);
//// }
//
// results.add(new Result(score, symbol));
// }
//
// return results;
// }
private int findClosestPixel(Symbol aSymbol, int x, int y)
{
Bitmap bitmap = aSymbol.getBitmap();
for (int s = 0; s < mMatrixSize; s++)
{
for (int i = -s; i <= s; i++)
{
if (bitmap.isBlack(x + i, y - s, false))
{
return s;
}
if (bitmap.isBlack(x + i, y + s, false))
{
return s;
}
if (bitmap.isBlack(x - s, y + i, false))
{
return s;
}
if (bitmap.isBlack(x + s, y + i, false))
{
return s;
}
}
}
return mMatrixSize;
}
private ArrayList<Result> classifySymbolByCurvature(Symbol aSymbol, TextBox aTextBox, Resolver aResolver)
{
ArrayList<Result> results = new ArrayList<>();
int[] points = new int[mSymbols.size()];
for (int orientation = 0; orientation < 8; orientation++)
{
for (int zone = 0; zone < 3; zone++)
{
for (int type = 0; type < 2; type++)
{
for (int sym = 0; sym < mSymbols.size(); sym++)
{
Symbol cmpSymbol = mSymbols.get(sym);
if (!aResolver.acceptSymbol(mPage, aTextBox, cmpSymbol))
{
continue;
}
double s1 = cmpSymbol.mCurvatureVector[orientation][type][zone];
double s2 = aSymbol.mCurvatureVector[orientation][type][zone];
points[sym] += Math.abs(s1 - s2);
}
}
}
}
Symbol cmpSymbol = null;
int smallest = Integer.MAX_VALUE;
for (int sym = 0; sym < mSymbols.size(); sym++)
{
if (points[sym] < smallest)
{
cmpSymbol = mSymbols.get(sym);
smallest = points[sym];
}
}
Result result = new Result(0, cmpSymbol);
results.add(result);
// for (Symbol cmpSymbol : mSymbols)
// {
// if (!aResolver.acceptSymbol(mPage, aTextBox, cmpSymbol))
// {
// continue;
// }
//
// double cmpTotal = 0;
//
// for (int orientation = 0; orientation < 8; orientation++)
// {
// for (int zone = 0; zone < 3; zone++)
// {
// for (int type = 0; type < 2; type++)
// {
// double s1 = cmpSymbol.mCurvatureVector[orientation][type][zone];
// double s2 = aSymbol.mCurvatureVector[orientation][type][zone];
//
// double d = Math.pow(Math.abs(s1 - s2), 2);
//
// cmpTotal += d;
// }
// }
// }
//
// Result result = new Result(cmpTotal, cmpSymbol);
//
// results.add(result);
// }
return results;
}
}
| |
package appInspector;
import java.awt.MenuBar;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.util.prefs.BackingStoreException;
import java.util.prefs.Preferences;
import javafx.collections.ObservableList;
import javafx.event.ActionEvent;
import javafx.event.Event;
import javafx.event.EventHandler;
import javafx.fxml.FXML;
import javafx.fxml.FXMLLoader;
import javafx.geometry.Insets;
import javafx.geometry.Pos;
import javafx.scene.Node;
import javafx.scene.Parent;
import javafx.scene.Scene;
import javafx.scene.control.Alert;
import javafx.scene.control.Button;
import javafx.scene.control.Alert.AlertType;
import javafx.scene.control.ContextMenu;
import javafx.scene.control.ContextMenuBuilder;
import javafx.scene.control.MenuItem;
import javafx.scene.control.MenuItemBuilder;
import javafx.scene.control.TextArea;
import javafx.scene.control.TextField;
import javafx.scene.control.TreeCell;
import javafx.scene.control.TreeItem;
import javafx.scene.control.TreeView;
import javafx.scene.input.KeyCode;
import javafx.scene.input.KeyEvent;
import javafx.scene.input.MouseButton;
import javafx.scene.input.MouseEvent;
import javafx.scene.layout.AnchorPane;
import javafx.scene.layout.BorderPane;
import javafx.scene.layout.VBoxBuilder;
import javafx.scene.text.Text;
import javafx.stage.Modality;
import javafx.stage.Stage;
import javafx.stage.StageStyle;
import javafx.stage.WindowEvent;
import javafx.util.Callback;
import javafx.scene.image.Image;
import javafx.scene.image.ImageView;
public class ControllerInspector {
// Reference to the main application
private Main mainApp;
private Utils utilsTools;
private DctmPreferences dctmPrefs;
private Nodes nodeTree;
// Value injected by FXMLLoader
// public MenuBar mnuBar;
public MenuItem mnuFileExit;
public MenuItem mnuToolDiscovery;
public MenuItem mnuPopupAdd;
public TreeView<String> treeViewProject;
public MenuItem treeMenuCxtAdd;
public MenuItem treeMenuCxtDel;
public TextArea txtComment;
public int depth;
public String label = "";
public void initialize()
{
Nodes root= new Nodes("Root", mainApp.NODE_TYPE_ROOT);
root.setExpanded(true);
Nodes rootProject= new Nodes("Projects",mainApp.NODE_TYPE_PROJECT);
Nodes rootEnv= new Nodes("Env", mainApp.NODE_TYPE_ENV);
root.getChildren().add(rootProject);
rootProject.getChildren().add(rootEnv);
treeViewProject.setRoot(root);
treeViewProject.setEditable(true);
treeViewProject.setCellFactory(new Callback<TreeView<String>,TreeCell<String>>(){
@Override
public TreeCell<String> call(TreeView<String> p) {
return new TextFieldTreeCellImpl();
}
});
}
/**
* Is called by the main application to give a reference back to itself.
*
* @param mainApp
*/
public void setMainApp(Main mainApp) {
this.mainApp = mainApp;
}
/**
* Menu Actions.
* @throws Exception
* @throws IOException
*/
@FXML public void handleExit() throws Exception {
utilsTools.savePreferences("prefs.xml");
utilsTools.printDebug("Exit");
dctmPrefs.savePref();
System.exit(0);
}
@FXML public void handleShowTree() {
utilsTools.printDebug("handleShowTree");
printTree((Nodes)treeViewProject.getRoot());
}
private void printTree(Nodes sub) {
ObservableList<Nodes> list = sub.getChildren();
System.out.println(sub.getDepth()+" "+sub.getValue());
for(Nodes child: list){
if(child.getChildren().isEmpty()){
System.out.println(child.getDepth()+" "+child.getValue());
} else {
printTree(child);
}
}
}
@FXML public void handleAbout() {
utilsTools.printDebug("About Dialog");
Alert alert = new Alert(AlertType.INFORMATION);
alert.setTitle("Information Dialog");
alert.setHeaderText(mainApp.codeName+" "+mainApp.codeVersion);
alert.setContentText("Inspector for ECM Documentum");
alert.showAndWait();
}
@FXML public void treeViewMouseClick(MouseEvent mouseEvent)
{
// System.out.println("treeViewMouseClick:"+mouseEvent.toString());
TreeView jtree = (TreeView)mouseEvent.getSource();
nodeTree = (Nodes)jtree.getSelectionModel().getSelectedItem();
// System.out.println("nodeTree:"+nodeTree.getDepth());
depth = nodeTree.getDepth();
label = utilsTools.getDepth(depth);
utilsTools.printDebug("Tree->treeViewMouseClick Depth:"+depth+" Label:"+label+" Button:"+mouseEvent.getButton());
}
@FXML void onMouseMove(MouseEvent event) {
// System.out.println("Mouse move X:"+event.getX()+" Y:"+event.getY());
}
@FXML void mnuPopupAddProjectAction()
{
utilsTools.printDebug("Tree->Add Project Action");
System.out.println(nodeTree.toString());
// boolean okClicked = mainApp.showDiscoveryBroker();
}
@FXML void handleMenuDocbroker() throws Throwable {
utilsTools.printDebug("Menu->Show Docbroker");
showWindow("Hello");
}
@FXML void handleTreeEditStart()
{
utilsTools.printDebug("Tree->Edit Start | Depth:"+depth+" Label:"+label);
int level = nodeTree.getDepth();
if (level < mainApp.NODE_TYPE_DOCBASE)
{
String label = utilsTools.getDepth(depth+1);
Nodes newItem = new Nodes(label, depth+1);
nodeTree.getChildren().add(newItem);
if (!nodeTree.isExpanded())
nodeTree.setExpanded(true);
}
}
@FXML void handleTreeEditCommit()
{
utilsTools.printDebug("Tree->Edit Commit");
}
@FXML void handleTreeEditCancel()
{
utilsTools.printDebug("Tree->Edit Cancel");
if (nodeTree.isLeaf())
System.out.println("Ultimo");
else
System.out.println("Sottonodi");
}
// -----------------------------------------------------------------------------
private final class TextFieldTreeCellImpl extends TreeCell<String> {
private TextField textField;
private ContextMenu contextMenu = new ContextMenu();
public TextFieldTreeCellImpl() {
MenuItem treeMenuCxtAdd = new MenuItem("Add Item");
MenuItem treeMenuCxtDel = new MenuItem("Remove Item");
contextMenu.getItems().add(treeMenuCxtAdd);
contextMenu.getItems().add(treeMenuCxtDel);
treeMenuCxtAdd.setOnAction(new EventHandler<ActionEvent>() {
public void handle(ActionEvent e) {
label = utilsTools.getDepth(depth+1);
System.out.println("Add Item->Depth:"+depth+" Label:"+label);
nodeTree = (Nodes)getTreeItem();
depth = nodeTree.getDepth()+1;
Nodes newItem = new Nodes(label, depth);
nodeTree.getChildren().add(newItem);
}
});
treeMenuCxtDel.setOnAction(new EventHandler<ActionEvent>() {
public void handle(ActionEvent e) {
nodeTree = (Nodes)getTreeItem();
boolean remove = nodeTree.getParent().getChildren().remove(nodeTree);
System.out.println("Remove Item->Depth:"+depth+" Label:"+utilsTools.getDepth(depth));
System.out.println(nodeTree.toString());
}
});
contextMenu.setOnShown(new EventHandler<WindowEvent>() {
public void handle(WindowEvent e) {
treeMenuCxtAdd.setText("Add "+utilsTools.getDepth(depth+1));
treeMenuCxtDel.setText("Remove "+utilsTools.getDepth(depth));
// System.out.println("shown");
}
});
}
@Override
public void startEdit() {
super.startEdit();
if (textField == null) {
createTextField();
}
setText(null);
setGraphic(textField);
textField.selectAll();
utilsTools.printDebug("TextFieldTreeCellImpl->startEdit:"+this.getId());
}
@Override
public void cancelEdit() {
super.cancelEdit();
setText((String) getItem());
setGraphic(getTreeItem().getGraphic());
utilsTools.printDebug("TextFieldTreeCellImpl->cancelEdit");
}
@Override
public void updateItem(String item, boolean empty) {
super.updateItem(item, empty);
if (empty) {
setText(null);
setGraphic(null);
} else {
if (isEditing()) {
if (textField != null) {
textField.setText(getString());
}
setText(null);
setGraphic(textField);
} else {
setText(getString());
setGraphic(getTreeItem().getGraphic());
setContextMenu(contextMenu);
/*
if ( getTreeItem().getParent()!= null)
{
setContextMenu(addMenu);
}
*/
}
}
// utilsTools.printDebug("TextFieldTreeCellImpl->updateItem");
}
private void createTextField() {
textField = new TextField(getString());
textField.setOnKeyReleased(new EventHandler<KeyEvent>() {
@Override
public void handle(KeyEvent t) {
if (t.getCode() == KeyCode.ENTER) {
commitEdit(textField.getText());
} else if (t.getCode() == KeyCode.ESCAPE) {
cancelEdit();
}
}
});
utilsTools.printDebug("TextFieldTreeCellImpl->createTextField");
}
private String getString() {
return getItem() == null ? "" : getItem().toString();
}
}
// -----------------------------------------------------------------------------
private void showWindow(String message) throws IOException {
final FXMLLoader loader = new FXMLLoader(getClass().getResource("ui/Dialog.fxml"));
loader.setController(new ControllerDialog(message));
final Parent root = loader.load();
final Scene scene = new Scene(root, 250, 150);
Stage stage = new Stage();
stage.initModality(Modality.APPLICATION_MODAL);
stage.initStyle(StageStyle.UNDECORATED);
stage.setScene(scene);
stage.show();
}
}
| |
/*
* Copyright (C) 2011 The Guava Authors
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package com.google.common.hash;
import static com.google.common.base.Preconditions.checkArgument;
import static com.google.common.base.Preconditions.checkNotNull;
import static com.google.common.base.Preconditions.checkState;
import com.google.common.base.Preconditions;
import com.google.common.primitives.Ints;
import com.google.common.primitives.UnsignedInts;
import com.google.errorprone.annotations.CanIgnoreReturnValue;
import java.io.Serializable;
import javax.annotation.CheckForNull;
/**
* An immutable hash code of arbitrary bit length.
*
* @author Dimitris Andreou
* @author Kurt Alfred Kluever
* @since 11.0
*/
@ElementTypesAreNonnullByDefault
public abstract class HashCode {
HashCode() {}
/** Returns the number of bits in this hash code; a positive multiple of 8. */
public abstract int bits();
/**
* Returns the first four bytes of {@linkplain #asBytes() this hashcode's bytes}, converted to an
* {@code int} value in little-endian order.
*
* @throws IllegalStateException if {@code bits() < 32}
*/
public abstract int asInt();
/**
* Returns the first eight bytes of {@linkplain #asBytes() this hashcode's bytes}, converted to a
* {@code long} value in little-endian order.
*
* @throws IllegalStateException if {@code bits() < 64}
*/
public abstract long asLong();
/**
* If this hashcode has enough bits, returns {@code asLong()}, otherwise returns a {@code long}
* value with {@code asBytes()} as the least-significant bytes and {@code 0x00} as the remaining
* most-significant bytes.
*
* @since 14.0 (since 11.0 as {@code Hashing.padToLong(HashCode)})
*/
public abstract long padToLong();
/**
* Returns the value of this hash code as a byte array. The caller may modify the byte array;
* changes to it will <i>not</i> be reflected in this {@code HashCode} object or any other arrays
* returned by this method.
*/
// TODO(user): consider ByteString here, when that is available
public abstract byte[] asBytes();
/**
* Copies bytes from this hash code into {@code dest}.
*
* @param dest the byte array into which the hash code will be written
* @param offset the start offset in the data
* @param maxLength the maximum number of bytes to write
* @return the number of bytes written to {@code dest}
* @throws IndexOutOfBoundsException if there is not enough room in {@code dest}
*/
@CanIgnoreReturnValue
public int writeBytesTo(byte[] dest, int offset, int maxLength) {
maxLength = Ints.min(maxLength, bits() / 8);
Preconditions.checkPositionIndexes(offset, offset + maxLength, dest.length);
writeBytesToImpl(dest, offset, maxLength);
return maxLength;
}
abstract void writeBytesToImpl(byte[] dest, int offset, int maxLength);
/**
* Returns a mutable view of the underlying bytes for the given {@code HashCode} if it is a
* byte-based hashcode. Otherwise it returns {@link HashCode#asBytes}. Do <i>not</i> mutate this
* array or else you will break the immutability contract of {@code HashCode}.
*/
byte[] getBytesInternal() {
return asBytes();
}
/**
* Returns whether this {@code HashCode} and that {@code HashCode} have the same value, given that
* they have the same number of bits.
*/
abstract boolean equalsSameBits(HashCode that);
/**
* Creates a 32-bit {@code HashCode} representation of the given int value. The underlying bytes
* are interpreted in little endian order.
*
* @since 15.0 (since 12.0 in HashCodes)
*/
public static HashCode fromInt(int hash) {
return new IntHashCode(hash);
}
private static final class IntHashCode extends HashCode implements Serializable {
final int hash;
IntHashCode(int hash) {
this.hash = hash;
}
@Override
public int bits() {
return 32;
}
@Override
public byte[] asBytes() {
return new byte[] {(byte) hash, (byte) (hash >> 8), (byte) (hash >> 16), (byte) (hash >> 24)};
}
@Override
public int asInt() {
return hash;
}
@Override
public long asLong() {
throw new IllegalStateException("this HashCode only has 32 bits; cannot create a long");
}
@Override
public long padToLong() {
return UnsignedInts.toLong(hash);
}
@Override
void writeBytesToImpl(byte[] dest, int offset, int maxLength) {
for (int i = 0; i < maxLength; i++) {
dest[offset + i] = (byte) (hash >> (i * 8));
}
}
@Override
boolean equalsSameBits(HashCode that) {
return hash == that.asInt();
}
private static final long serialVersionUID = 0;
}
/**
* Creates a 64-bit {@code HashCode} representation of the given long value. The underlying bytes
* are interpreted in little endian order.
*
* @since 15.0 (since 12.0 in HashCodes)
*/
public static HashCode fromLong(long hash) {
return new LongHashCode(hash);
}
private static final class LongHashCode extends HashCode implements Serializable {
final long hash;
LongHashCode(long hash) {
this.hash = hash;
}
@Override
public int bits() {
return 64;
}
@Override
public byte[] asBytes() {
return new byte[] {
(byte) hash,
(byte) (hash >> 8),
(byte) (hash >> 16),
(byte) (hash >> 24),
(byte) (hash >> 32),
(byte) (hash >> 40),
(byte) (hash >> 48),
(byte) (hash >> 56)
};
}
@Override
public int asInt() {
return (int) hash;
}
@Override
public long asLong() {
return hash;
}
@Override
public long padToLong() {
return hash;
}
@Override
void writeBytesToImpl(byte[] dest, int offset, int maxLength) {
for (int i = 0; i < maxLength; i++) {
dest[offset + i] = (byte) (hash >> (i * 8));
}
}
@Override
boolean equalsSameBits(HashCode that) {
return hash == that.asLong();
}
private static final long serialVersionUID = 0;
}
/**
* Creates a {@code HashCode} from a byte array. The array is defensively copied to preserve the
* immutability contract of {@code HashCode}. The array cannot be empty.
*
* @since 15.0 (since 12.0 in HashCodes)
*/
public static HashCode fromBytes(byte[] bytes) {
checkArgument(bytes.length >= 1, "A HashCode must contain at least 1 byte.");
return fromBytesNoCopy(bytes.clone());
}
/**
* Creates a {@code HashCode} from a byte array. The array is <i>not</i> copied defensively, so it
* must be handed-off so as to preserve the immutability contract of {@code HashCode}.
*/
static HashCode fromBytesNoCopy(byte[] bytes) {
return new BytesHashCode(bytes);
}
private static final class BytesHashCode extends HashCode implements Serializable {
final byte[] bytes;
BytesHashCode(byte[] bytes) {
this.bytes = checkNotNull(bytes);
}
@Override
public int bits() {
return bytes.length * 8;
}
@Override
public byte[] asBytes() {
return bytes.clone();
}
@Override
public int asInt() {
checkState(
bytes.length >= 4,
"HashCode#asInt() requires >= 4 bytes (it only has %s bytes).",
bytes.length);
return (bytes[0] & 0xFF)
| ((bytes[1] & 0xFF) << 8)
| ((bytes[2] & 0xFF) << 16)
| ((bytes[3] & 0xFF) << 24);
}
@Override
public long asLong() {
checkState(
bytes.length >= 8,
"HashCode#asLong() requires >= 8 bytes (it only has %s bytes).",
bytes.length);
return padToLong();
}
@Override
public long padToLong() {
long retVal = (bytes[0] & 0xFF);
for (int i = 1; i < Math.min(bytes.length, 8); i++) {
retVal |= (bytes[i] & 0xFFL) << (i * 8);
}
return retVal;
}
@Override
void writeBytesToImpl(byte[] dest, int offset, int maxLength) {
System.arraycopy(bytes, 0, dest, offset, maxLength);
}
@Override
byte[] getBytesInternal() {
return bytes;
}
@Override
boolean equalsSameBits(HashCode that) {
// We don't use MessageDigest.isEqual() here because its contract does not guarantee
// constant-time evaluation (no short-circuiting).
if (this.bytes.length != that.getBytesInternal().length) {
return false;
}
boolean areEqual = true;
for (int i = 0; i < this.bytes.length; i++) {
areEqual &= (this.bytes[i] == that.getBytesInternal()[i]);
}
return areEqual;
}
private static final long serialVersionUID = 0;
}
/**
* Creates a {@code HashCode} from a hexadecimal ({@code base 16}) encoded string. The string must
* be at least 2 characters long, and contain only valid, lower-cased hexadecimal characters.
*
* <p>This method accepts the exact format generated by {@link #toString}. If you require more
* lenient {@code base 16} decoding, please use {@link com.google.common.io.BaseEncoding#decode}
* (and pass the result to {@link #fromBytes}).
*
* @since 15.0
*/
public static HashCode fromString(String string) {
checkArgument(
string.length() >= 2, "input string (%s) must have at least 2 characters", string);
checkArgument(
string.length() % 2 == 0,
"input string (%s) must have an even number of characters",
string);
byte[] bytes = new byte[string.length() / 2];
for (int i = 0; i < string.length(); i += 2) {
int ch1 = decode(string.charAt(i)) << 4;
int ch2 = decode(string.charAt(i + 1));
bytes[i / 2] = (byte) (ch1 + ch2);
}
return fromBytesNoCopy(bytes);
}
private static int decode(char ch) {
if (ch >= '0' && ch <= '9') {
return ch - '0';
}
if (ch >= 'a' && ch <= 'f') {
return ch - 'a' + 10;
}
throw new IllegalArgumentException("Illegal hexadecimal character: " + ch);
}
/**
* Returns {@code true} if {@code object} is a {@link HashCode} instance with the identical byte
* representation to this hash code.
*
* <p><b>Security note:</b> this method uses a constant-time (not short-circuiting) implementation
* to protect against <a href="http://en.wikipedia.org/wiki/Timing_attack">timing attacks</a>.
*/
@Override
public final boolean equals(@CheckForNull Object object) {
if (object instanceof HashCode) {
HashCode that = (HashCode) object;
return bits() == that.bits() && equalsSameBits(that);
}
return false;
}
/**
* Returns a "Java hash code" for this {@code HashCode} instance; this is well-defined (so, for
* example, you can safely put {@code HashCode} instances into a {@code HashSet}) but is otherwise
* probably not what you want to use.
*/
@Override
public final int hashCode() {
// If we have at least 4 bytes (32 bits), just take the first 4 bytes. Since this is
// already a (presumably) high-quality hash code, any four bytes of it will do.
if (bits() >= 32) {
return asInt();
}
// If we have less than 4 bytes, use them all.
byte[] bytes = getBytesInternal();
int val = (bytes[0] & 0xFF);
for (int i = 1; i < bytes.length; i++) {
val |= ((bytes[i] & 0xFF) << (i * 8));
}
return val;
}
/**
* Returns a string containing each byte of {@link #asBytes}, in order, as a two-digit unsigned
* hexadecimal number in lower case.
*
* <p>Note that if the output is considered to be a single hexadecimal number, whether this string
* is big-endian or little-endian depends on the byte order of {@link #asBytes}. This may be
* surprising for implementations of {@code HashCode} that represent the number in big-endian
* since everything else in the hashing API uniformly treats multibyte values as little-endian.
*
* <p>To create a {@code HashCode} from its string representation, see {@link #fromString}.
*/
@Override
public final String toString() {
byte[] bytes = getBytesInternal();
StringBuilder sb = new StringBuilder(2 * bytes.length);
for (byte b : bytes) {
sb.append(hexDigits[(b >> 4) & 0xf]).append(hexDigits[b & 0xf]);
}
return sb.toString();
}
private static final char[] hexDigits = "0123456789abcdef".toCharArray();
}
| |
/* $Id$
*
* Copyright 2000, 2001, Compaq Computer Corporation;
* 2006 dsrg.org.
*/
package javafe.tc;
import java.io.ByteArrayOutputStream;
import javafe.ast.*;
import javafe.tc.TagConstants;
import javafe.util.Assert;
import javafe.util.Location;
public class Types
{
/**
* Types uses the inst pattern to allow subclasses to provide alternative
* implementations of some of the static methods here.
*/
static public /*@ non_null */ Types inst;
static {
inst = new Types();
}
/**
* Factory method for TypeSig structures
*/
//@ requires !(enclosingEnv instanceof EnvForCU);
//@ ensures \result != null;
public static TypeSig makeTypeSig(String simpleName,
/*@ non_null */ Env enclosingEnv,
/*@ non_null */ TypeDecl decl) {
return inst.makeTypeSigInstance(simpleName,
enclosingEnv,
decl);
}
//@ requires !(enclosingEnv instanceof EnvForCU);
//@ ensures \result != null;
protected TypeSig makeTypeSigInstance(String simpleName,
/*@ non_null */ Env enclosingEnv,
/*@ non_null */ TypeDecl decl) {
return new javafe.tc.TypeSig(simpleName,
enclosingEnv,
decl);
}
/**
* Factory method for TypeSig structures
*/
//@ requires \nonnullelements(packageName);
//@ requires (enclosingType != null) ==> (decl != null);
//@ requires (decl==null) == (CU==null);
//@ ensures \result != null;
protected static TypeSig makeTypeSig(String[] packageName,
/*@ non_null */ String simpleName,
TypeSig enclosingType,
TypeDecl decl,
CompilationUnit CU) {
return inst.makeTypeSigInstance(packageName,
simpleName,
enclosingType,
decl,
CU);
}
//@ requires \nonnullelements(packageName);
//@ requires (enclosingType != null) ==> (decl != null);
//@ requires (decl==null) == (CU==null);
//@ ensures \result != null;
protected TypeSig makeTypeSigInstance(String[] packageName,
/*@ non_null */ String simpleName,
TypeSig enclosingType,
TypeDecl decl,
CompilationUnit CU) {
return new javafe.tc.TypeSig(packageName,
simpleName,
enclosingType,
decl,
CU);
}
// ----------------------------------------------------------------------
// Hidden constructor
public Types() {}
/** Used to indicate the type of an illegal operation, so that error messages do
* not unnecessarily propagate; should only be used if the error has already been
* reported.
*/
//@ invariant errorType != null;
public static Type errorType = ErrorType.make();
// ----------------------------------------------------------------------
// Fields for primitive types
/*@ requires (tag == TagConstants.BOOLEANTYPE || tag == TagConstants.INTTYPE
|| tag == TagConstants.LONGTYPE || tag == TagConstants.CHARTYPE
|| tag == TagConstants.FLOATTYPE || tag == TagConstants.DOUBLETYPE
|| tag == TagConstants.VOIDTYPE || tag == TagConstants.NULLTYPE
|| tag == TagConstants.BYTETYPE || tag == TagConstants.SHORTTYPE); */
//@ ensures \result != null;
private static final /*@ non_null */ PrimitiveType makePrimitiveType(int tag) {
return JavafePrimitiveType.makeNonSyntax(tag);
}
//@ invariant voidType != null;
public static /*@ non_null */ PrimitiveType
voidType = makePrimitiveType( TagConstants.VOIDTYPE );
//@ invariant booleanType != null;
public static /*@ non_null */ PrimitiveType
booleanType = makePrimitiveType( TagConstants.BOOLEANTYPE );
//@ invariant intType != null;
public static /*@ non_null */ PrimitiveType
intType = makePrimitiveType( TagConstants.INTTYPE );
//@ invariant doubleType != null;
public static /*@ non_null */ PrimitiveType
doubleType = makePrimitiveType( TagConstants.DOUBLETYPE );
//@ invariant floatType != null;
public static /*@ non_null */ PrimitiveType
floatType = makePrimitiveType( TagConstants.FLOATTYPE );
//@ invariant longType != null;
public static /*@ non_null */ PrimitiveType
longType = makePrimitiveType( TagConstants.LONGTYPE );
//@ invariant charType != null;
public static /*@ non_null */ PrimitiveType
charType = makePrimitiveType( TagConstants.CHARTYPE );
//@ invariant nullType != null;
public static /*@ non_null */ PrimitiveType
nullType = makePrimitiveType( TagConstants.NULLTYPE );
//@ invariant byteType != null;
public static /*@ non_null */ PrimitiveType
byteType = makePrimitiveType( TagConstants.BYTETYPE );
//@ invariant shortType != null;
public static /*@ non_null */ PrimitiveType
shortType = makePrimitiveType( TagConstants.SHORTTYPE );
public static void remakeTypes() {
errorType = ErrorType.make();
voidType = makePrimitiveType( TagConstants.VOIDTYPE );
booleanType = makePrimitiveType( TagConstants.BOOLEANTYPE );
intType = makePrimitiveType( TagConstants.INTTYPE );
doubleType = makePrimitiveType( TagConstants.DOUBLETYPE );
floatType = makePrimitiveType( TagConstants.FLOATTYPE );
longType = makePrimitiveType( TagConstants.LONGTYPE );
charType = makePrimitiveType( TagConstants.CHARTYPE );
nullType = makePrimitiveType( TagConstants.NULLTYPE );
byteType = makePrimitiveType( TagConstants.BYTETYPE );
shortType = makePrimitiveType( TagConstants.SHORTTYPE );
s_javaLangPackage = null;
s_javaLangObject = null;
s_javaLangError = null;
s_javaLangException = null;
s_javaLangThrowable = null;
s_javaLangString = null;
s_javaLangCloneable = null;
s_javaLangRuntimeException = null;
s_javaLangClass = null;
s_javaLangSystem = null;
}
/***************************************************
* *
* Fields for java.lang types: *
* *
**************************************************/
/**
* Return the package java.lang as a String[] for use in calling
* OutsideEnv.lookup[deferred].
*/
//@ ensures \nonnullelements(\result);
public static String[] javaLangPackage() {
if (s_javaLangPackage==null) {
s_javaLangPackage = new String[2];
s_javaLangPackage[0] = "java";
s_javaLangPackage[1] = "lang";
}
return s_javaLangPackage;
}
//@ invariant s_javaLangPackage==null || \nonnullelements(s_javaLangPackage);
//@ spec_public
private static String[] s_javaLangPackage = null;
/**
* Find the TypeSig for the required package-member type
* java.lang.T.<p>
*
* If the type is not found in the classpath, an error message is
* reported via ErrorSet and an unloaded TypeSig is returned.<p>
*
* Precondition: the TypeSig has been initialized.<p>
*/
//@ requires T != null;
//@ ensures \result != null;
public static TypeSig getJavaLang(String T) {
return OutsideEnv.lookupDeferred(javaLangPackage(), T);
}
/*
* NOTE: All of the following javaLangXXX routines require that
* TypeSig be properly initialized as a precondition.
*/
//* Returns the TypeSig for java.lang.Object.
//@ ensures \result != null;
public static TypeSig javaLangObject() {
if (s_javaLangObject == null)
s_javaLangObject = getJavaLang("Object");
return s_javaLangObject;
}
private static TypeSig s_javaLangObject;
//* Returns the TypeSig for java.lang.System.
//@ ensures \result != null;
public static TypeSig javaLangSystem() {
if (s_javaLangSystem == null)
s_javaLangSystem = getJavaLang("System");
return s_javaLangSystem;
}
private static TypeSig s_javaLangSystem;
//* Returns the TypeSig for java.lang.Error.
//@ ensures \result != null;
public static TypeSig javaLangError() {
if (s_javaLangError == null)
s_javaLangError = getJavaLang("Error");
return s_javaLangError;
}
private static TypeSig s_javaLangError;
//* Returns the TypeSig for java.lang.Exception.
//@ ensures \result != null;
public static TypeSig javaLangException() {
if (s_javaLangException == null)
s_javaLangException = getJavaLang("Exception");
return s_javaLangException;
}
private static TypeSig s_javaLangException;
//* Returns the TypeSig for java.lang.Throwable.
//@ ensures \result != null;
public static TypeSig javaLangThrowable() {
if (s_javaLangThrowable == null)
s_javaLangThrowable = getJavaLang("Throwable");
return s_javaLangThrowable;
}
private static TypeSig s_javaLangThrowable;
//* Returns the TypeSig for java.lang.String.
//@ ensures \result != null;
public static TypeSig javaLangString() {
if (s_javaLangString == null)
s_javaLangString = getJavaLang("String");
return s_javaLangString;
}
private static TypeSig s_javaLangString;
//* Returns the TypeSig for java.lang.RuntimeException.
//@ ensures \result != null;
public static TypeSig javaLangRuntimeException() {
if (s_javaLangRuntimeException == null)
s_javaLangRuntimeException =
getJavaLang("RuntimeException");
return s_javaLangRuntimeException;
}
private static TypeSig s_javaLangRuntimeException;
//* Returns the TypeSig for java.lang.Cloneable.
//@ ensures \result != null;
public static TypeSig javaLangCloneable() {
if (s_javaLangCloneable == null)
s_javaLangCloneable = getJavaLang("Cloneable");
return s_javaLangCloneable;
}
private static TypeSig s_javaLangCloneable;
//* Returns the TypeSig for java.lang.Class
//@ ensures \result != null;
public static TypeSig javaLangClass() {
if (s_javaLangClass == null)
s_javaLangClass = getJavaLang("Class");
return s_javaLangClass;
}
private static TypeSig s_javaLangClass;
/***************************************************
* *
* Predicates on types: *
* *
**************************************************/
public static boolean isErrorType(Type t) {
return t instanceof ErrorType;
}
public static boolean isReferenceType(Type t) {
return !(t instanceof PrimitiveType)
&& !isErrorType(t);
}
public static boolean isReferenceOrNullType(Type t) {
return isReferenceType(t)
|| t.getTag() == TagConstants.NULLTYPE;
}
private static boolean isPrimitiveType(Type t, int tag) {
return (t instanceof PrimitiveType) && ( ((PrimitiveType)t).tag == tag);
}
public static boolean isByteType(Type t) {
return isPrimitiveType( t, TagConstants.BYTETYPE );
}
public static boolean isBooleanType(Type t) {
return isPrimitiveType( t, TagConstants.BOOLEANTYPE );
}
public static boolean isShortType(Type t){
return isPrimitiveType( t, TagConstants.SHORTTYPE );
}
public static boolean isCharType(Type t){
return isPrimitiveType( t, TagConstants.CHARTYPE );
}
public static boolean isDoubleType(Type t){
return isPrimitiveType( t, TagConstants.DOUBLETYPE );
}
public static boolean isFloatType(Type t){
return isPrimitiveType( t, TagConstants.FLOATTYPE );
}
public static boolean isIntType(Type t){
return isPrimitiveType( t, TagConstants.INTTYPE );
}
public static boolean isLongType(Type t){
return isPrimitiveType( t, TagConstants.LONGTYPE );
}
public static boolean isVoidType(Type t){
return isPrimitiveType( t, TagConstants.VOIDTYPE );
}
public static boolean isNumericType(Type t){
return inst.isNumericTypeInstance(t);
}
public boolean isNumericTypeInstance(Type t){
if( t instanceof PrimitiveType ) {
switch( ((PrimitiveType)t).tag ) {
case TagConstants.BYTETYPE:
case TagConstants.SHORTTYPE:
case TagConstants.INTTYPE:
case TagConstants.LONGTYPE:
case TagConstants.CHARTYPE:
case TagConstants.FLOATTYPE:
case TagConstants.DOUBLETYPE:
return true;
default:
return false;
}
} else
return false;
}
public static boolean isIntegralType(Type t){
return inst.isIntegralTypeInstance(t);
}
//@ ensures \result ==> t instanceof PrimitiveType;
public boolean isIntegralTypeInstance(Type t){
if( t instanceof PrimitiveType ) {
switch( ((PrimitiveType)t).tag ) {
case TagConstants.BYTETYPE:
case TagConstants.SHORTTYPE:
case TagConstants.INTTYPE:
case TagConstants.LONGTYPE:
case TagConstants.CHARTYPE:
return true;
default:
return false;
}
} else
return false;
}
public static boolean isFloatingPointType(Type t){
return inst.isFloatingPointTypeInstance(t);
}
public boolean isFloatingPointTypeInstance(Type t){
if( t instanceof PrimitiveType ) {
switch( ((PrimitiveType)t).tag ) {
case TagConstants.FLOATTYPE:
case TagConstants.DOUBLETYPE:
return true;
default:
return false;
}
} else
return false;
}
// ======================================================================
// Conversions on Types
//@ requires x != null && y != null;
/*@ ensures \result ==>
(x instanceof PrimitiveType) == (y instanceof PrimitiveType); */
public static boolean isSameType( Type x, Type y ) {
return inst.isSameTypeInstance(x, y);
}
//@ requires x != null && y != null;
/*@ ensures \result ==>
(x instanceof PrimitiveType) == (y instanceof PrimitiveType); */
protected boolean isSameTypeInstance( Type x, Type y ) {
if( x instanceof TypeName ) x = TypeSig.getSig( (TypeName)x);
if( y instanceof TypeName ) y = TypeSig.getSig( (TypeName)y);
int xTag = x.getTag();
if( xTag != y.getTag() ) return false;
switch( xTag ) {
case TagConstants.ARRAYTYPE:
return isSameType( ((ArrayType)x).elemType, ((ArrayType)y).elemType );
case TagConstants.TYPESIG:
return x==y;
default:
// x and y are the same primitive type
return true;
}
}
/** Returns true if and only if <code>x</code> is a subclass or
* superinterface of <code>y</code>. (The occurrence of "class"
* in the name of the method is rather unfortunate.)
*/
//@ requires x != null && y != null;
//@ ensures \result ==> (x instanceof TypeSig) || (x instanceof TypeName);
public static boolean isSubclassOf( Type x, TypeSig y ) {
if (x instanceof TypeName)
x = TypeSig.getSig( (TypeName)x);
Assert.notNull(y);
if( x instanceof TypeSig ) {
return ((TypeSig)x).isSubtypeOf(y);
} else
return false;
}
/**
* Returns true iff <code>x</code> is a superclass or
* superinterface of <code>y</code>, or if <code>x</code> is the
* same type as <code>y</code>.
*
* <b>Warning</b>: This is *not* the same as is <code>x</code> a
* subtype of <code>y</code>! It does not consider short below
* int.
*/
//@ requires x != null && y != null;
public static boolean isSubClassOrEq(/*non_null*/ Type x,
/*non_null*/ Type y) {
if (x instanceof ArrayType && y instanceof ArrayType) {
return isSubClassOrEq(((ArrayType)x).elemType, ((ArrayType)y).elemType);
}
if (x instanceof TypeName)
x = TypeSig.getSig((TypeName)x);
if (y instanceof TypeName)
y = TypeSig.getSig((TypeName)y);
if (x instanceof TypeSig && y instanceof TypeSig)
return ((TypeSig)x).isSubtypeOf((TypeSig)y);
else
return isSameType(x, y);
}
/** Checks if one Type is castable to another.
See JLS, P.67.
*/
//@ requires s != null && t != null;
public static boolean isCastable( Type s, Type t ) {
// Replace TypeNames by corresponding TypeSigs
if( s instanceof TypeName ) s = TypeSig.getSig( (TypeName)s);
if( t instanceof TypeName ) t = TypeSig.getSig( (TypeName)t);
return inst.isCastableInstance(s, t);
}
//@ requires s != null && t != null;
protected boolean isCastableInstance( Type s, Type t ) {
Assert.notNull( s );
Assert.notNull( t );
if( s instanceof PrimitiveType )
{
if( t instanceof PrimitiveType ) {
return isAnyPrimitiveConvertable( (PrimitiveType)s, (PrimitiveType)t );
}
else if( s.getTag() == TagConstants.NULLTYPE ) {
// a cast from null to a reference type
return true;
}
}
else if( s instanceof TypeSig )
{
TypeSig sSig = (TypeSig)s;
TypeDecl sDecl = sSig.getTypeDecl();
if( sDecl instanceof ClassDecl )
{
// s is a class
if( t instanceof TypeSig )
{
TypeSig tSig = (TypeSig)t;
TypeDecl tDecl = tSig.getTypeDecl();
if( tDecl instanceof ClassDecl )
{
// t is a class
// must be related classes
return tSig.isSubtypeOf( sSig )
|| sSig.isSubtypeOf( tSig );
}
else
{
// t is an interface
// Require s is not final, or s implements t
return !Modifiers.isFinal( sDecl.modifiers )
|| sSig.isSubtypeOf( tSig );
}
}
else if( t instanceof ArrayType )
{
// t is an array type, s must be Object
return isSameType( sSig, javaLangObject() );
}
else
{
// t is a primitive type, s is a class, so not castable
Assert.notFalse( t instanceof PrimitiveType ); //@nowarn Pre;
return false;
}
}
else
{
// s is an interface
if( t instanceof TypeSig )
{
TypeSig tSig = (TypeSig)t;
TypeDecl tDecl = tSig.getTypeDecl();
if( tDecl instanceof ClassDecl )
{
// t is a class
// require t is not final, or t implements s
return !Modifiers.isFinal( tDecl.modifiers )
|| tSig.isSubtypeOf( sSig );
}
else
{
// t is an interface
// is s and t contain methods with the same signature but
// different return types, then an error occurs
// TO BE DONE
return true;
}
}
else
{
// t is a primitive or array type
// MAYBE SHOULD ALLOW CASTING OF CLONEABLE TO ARRAY
Assert.notFalse( t instanceof PrimitiveType //@ nowarn Pre;
|| t instanceof ArrayType );
return false;
}
}
}
else if( s instanceof ArrayType )
{
// s is an array
Type sElem = ((ArrayType)s).elemType;
if( t instanceof TypeSig )
{
// Must be Object or Cloneable
Type tSig = (TypeSig)t;
return isSameType( tSig, javaLangObject() )
|| isSameType( tSig, javaLangCloneable() );
}
else if( t instanceof ArrayType )
{
Type tElem = ((ArrayType)t).elemType;
if( sElem instanceof PrimitiveType
&& tElem instanceof PrimitiveType )
{
// require same element type
return sElem.getTag() == tElem.getTag();
}
else if( !(sElem instanceof PrimitiveType)
&& !(tElem instanceof PrimitiveType) )
{
// require elements to be castable
return isCastable( sElem, tElem );
}
else
return false;
}
else
{
Assert.notFalse( t instanceof PrimitiveType ); //@ nowarn Pre;
return false;
}
}
// Assert.fail("Fall thru2, s="+printName(s)+" t="+t+printName(t));
return false;
}
//@ requires x != null && y != null;
public static boolean isInvocationConvertable( Type x, Type y ) {
if( x instanceof TypeName ) x = TypeSig.getSig( (TypeName)x);
if( y instanceof TypeName ) y = TypeSig.getSig( (TypeName)y);
return inst.isInvocationConvertableInstance(x, y);
}
//@ requires x != null && y != null;
protected boolean isInvocationConvertableInstance( Type x, Type y ) {
if( isSameType(x,y) ) return true;
if( isWideningPrimitiveConvertable(x,y) ) return true;
if( isWideningReferenceConvertable(x,y) ) return true;
return false;
}
//@ requires x != null && y != null;
protected static boolean isWideningPrimitiveConvertable( Type x, Type y ) {
return inst.isWideningPrimitiveConvertableInstance(x,y);
}
//@ requires x != null && y != null;
protected boolean isWideningPrimitiveConvertableInstance( Type x, Type y ) {
switch( x.getTag() ) {
case TagConstants.BYTETYPE:
switch( y.getTag() ) {
case TagConstants.SHORTTYPE:
case TagConstants.INTTYPE: case TagConstants.LONGTYPE:
case TagConstants.FLOATTYPE: case TagConstants.DOUBLETYPE:
return true;
default:
return false;
}
case TagConstants.SHORTTYPE: case TagConstants.CHARTYPE:
switch( y.getTag() ) {
case TagConstants.INTTYPE: case TagConstants.LONGTYPE:
case TagConstants.FLOATTYPE: case TagConstants.DOUBLETYPE:
return true;
default:
return false;
}
case TagConstants.INTTYPE:
switch( y.getTag() ) {
case TagConstants.LONGTYPE:
case TagConstants.FLOATTYPE: case TagConstants.DOUBLETYPE:
return true;
default:
return false;
}
case TagConstants.LONGTYPE:
switch( y.getTag() ) {
case TagConstants.FLOATTYPE: case TagConstants.DOUBLETYPE:
return true;
default:
return false;
}
case TagConstants.FLOATTYPE:
switch( y.getTag() ) {
case TagConstants.DOUBLETYPE:
return true;
default:
return false;
}
default:
return false;
}
}
/** Returns true iff the first argument is convertable to the second
* argument, either through a widening primitive conversion,
* a narrowing primitive conversion, or the identity conversion.
*/
protected static boolean isAnyPrimitiveConvertable( Type x, Type y ) {
return true;
}
//@ requires s != null && t != null;
protected static boolean isWideningReferenceConvertable( Type s, Type t ) {
return inst.isWideningReferenceConvertableInstance(s, t);
}
//@ requires s != null && t != null;
protected boolean isWideningReferenceConvertableInstance( Type s, Type t ) {
if( s instanceof TypeName ) s = TypeSig.getSig( (TypeName)s);
if( t instanceof TypeName ) t = TypeSig.getSig( (TypeName)t);
if(s instanceof TypeSig
&& t instanceof TypeSig
&& ((TypeSig)s).isSubtypeOf( (TypeSig)t ))
return true;
if( s.getTag() == TagConstants.NULLTYPE &&
( t instanceof TypeSig || t.getTag() == TagConstants.ARRAYTYPE ) )
return true;
if( s.getTag() == TagConstants.ARRAYTYPE ) {
if( t.getTag() == TagConstants.ARRAYTYPE ) {
Type sElem = ((ArrayType)s).elemType;
Type tElem = ((ArrayType)t).elemType;
return isSameType( sElem, tElem )
|| isWideningReferenceConvertable(sElem,tElem);
}
else if( Types.isSameType( t, javaLangObject() ) ) {
return true;
}
else
return false;
}
return false;
}
/** Returns the TypeSig for a Type x, if x denotes a class type,
otherwise returns null. */
//@ requires x != null;
public static TypeSig toClassTypeSig( Type x ) {
switch( x.getTag() ) {
case TagConstants.TYPENAME:
{
x = TypeSig.getSig( (TypeName)x);
// fall thru
}
case TagConstants.TYPESIG:
{
TypeSig tsig = (TypeSig)x;
if( tsig.getTypeDecl() instanceof ClassDecl ) {
return tsig;
} else {
// must be an interface type
return null;
}
}
default:
// x is a primitive type or an array type
return null;
}
}
// ----------------------------------------------------------------------
// Numeric promotions
//@ requires t != null;
//@ ensures \result != null;
public static Type unaryPromote(Type t) {
if( isByteType(t) || isShortType(t) || isCharType(t) )
return intType;
else if( isNumericType(t) )
return t;
else {
Assert.fail("Not a numeric type");
return null; // dummy return
}
}
//@ ensures \result != null;
public static Type binaryNumericPromotion(Type x, Type y) {
Assert.notFalse( isNumericType(x) && isNumericType(y) ); //@ nowarn Pre;
if( isDoubleType(x) || isDoubleType(y) )
return doubleType;
else if( isFloatType(x) || isFloatType(y) )
return floatType;
else if( isLongType(x) || isLongType(y) )
return longType;
else
return intType;
}
public static Type baseType(Type t) {
if (!(t instanceof ArrayType)) return t;
return baseType( ((ArrayType)t).elemType );
}
public static LiteralExpr zeroEquivalent(Type t) {
if (isReferenceType(t)) {
return LiteralExpr.make(TagConstants.NULLLIT,null,Location.NULL);
} else if (isIntType(t)) {
return LiteralExpr.make(TagConstants.INTLIT, new Integer(0), Location.NULL);
} else if (isLongType(t)) {
return LiteralExpr.make(TagConstants.LONGLIT, new Long(0), Location.NULL);
} else if (isBooleanType(t)) {
return LiteralExpr.make(TagConstants.BOOLEANLIT, Boolean.FALSE, Location.NULL);
} else if (isDoubleType(t)) {
return LiteralExpr.make(TagConstants.DOUBLELIT, new Double(0), Location.NULL);
} else if (isFloatType(t)) {
return LiteralExpr.make(TagConstants.FLOATLIT, new Float(0), Location.NULL);
} else if (isShortType(t)) {
return LiteralExpr.make(TagConstants.SHORTLIT, new Short((short)0), Location.NULL);
} else if (isByteType(t)) {
return LiteralExpr.make(TagConstants.BYTELIT, new Byte((byte)0), Location.NULL);
} else if (isCharType(t)) {
return LiteralExpr.make(TagConstants.CHARLIT, new Character((char)0), Location.NULL);
}
System.out.println("UNSUPPORTED TYPE - zeroEquivalent " + printName(t));
return null;
}
// ----------------------------------------------------------------------
// Miscilaneous operations
//@ requires x != null && y != null;
public static boolean isSameMethodSig(MethodDecl x, MethodDecl y) {
if( x.id != y.id ) return false;
return isSameFormalParaDeclVec( x.args, y.args );
}
//@ requires x != null && y != null;
public static boolean
isSameFormalParaDeclVec(FormalParaDeclVec x, FormalParaDeclVec y) {
if(x.size() != y.size() ) return false;
for( int i=0; i<x.size(); i++ )
if( !isSameType( x.elementAt(i).type, y.elementAt(i).type ) )
return false;
return true;
}
//@ requires x != null && y != null;
//@ requires x.args.count == y.args.count;
public static boolean routineMoreSpecific( RoutineDecl x, RoutineDecl y ) {
// should check that type containing x is invocation convertable
// to type containing y
Assert.notFalse( x.args.size() == y.args.size() );
for( int i=0; i<x.args.size(); i++ )
{
if( !isInvocationConvertable(x.args.elementAt(i).type,
y.args.elementAt(i).type ))
return false;
}
return true;
}
// *********************************************************************
/**
* Is an exception a checked one?
*/
static boolean isCheckedException(/*@ non_null */ Type E) {
return !Types.isSubclassOf(E, Types.javaLangRuntimeException())
&& !Types.isSubclassOf(E, Types.javaLangError());
}
/**
* Is "throws <x>" a valid overriding of "throws <y>"? <p>
*
* Answer: Each exception E in the list <x> must be either:
* (a) an unchecked exception
* (b) a subtype of some exception in the list <y>
*/
//@ requires x != null && y != null;
static boolean isCompatibleRaises( TypeNameVec x, TypeNameVec y) {
nextx:
for (int i=0; i<x.size(); i++) {
TypeSig xsig = TypeSig.getSig(x.elementAt(i));
// Check (a):
if (!isCheckedException(xsig))
continue;
// Check (b):
for (int j=0; j<y.size(); j++) {
if (xsig.isSubtypeOf(TypeSig.getSig(y.elementAt(j))))
continue nextx;
}
//CF: For Houdini uses, disable this check for now
return false;
}
return true;
}
static boolean isCompatibleAccess( int x, int y ) {
if( Modifiers.isPublic(y) && !Modifiers.isPublic(x) )
return false;
if(Modifiers.isProtected(y) && !Modifiers.isPublic(x)
&& !Modifiers.isProtected(x) )
return false;
if( Modifiers.isPackage(y) && Modifiers.isPrivate(x) )
return false;
return true;
}
//@ requires args != null;
//@ ensures \nonnullelements(\result);
public static Type[] getFormalParaTypes( FormalParaDeclVec args ) {
Type[] r = new Type[ args.size() ];
for( int i=0; i<args.size(); i++ )
r[i] = args.elementAt(i).type;
return r;
}
/***************************************************
* *
* Generating print names for Type(s): *
* *
**************************************************/
/**
* Returns the name of a <code>Type</code> as a
* <code>String</code>. The resulting name will be fully qualified
* if the <code>Type</code> has been name resolved. <p>
*
* Note: <code>t</code> may safely be null.<p>
*
* Precondition: <code>PrettyPrint.inst</code> != null <p>
*/
//@ ensures \result != null;
public static String printName(Type t) {
return inst.printNameInstance(t);
}
//@ ensures \result != null;
protected String printNameInstance(Type t) {
if (t instanceof TypeName) {
TypeSig sig = TypeSig.getRawSig((TypeName)t);
if (sig != null)
return sig.toString();
} else if (t instanceof ArrayType)
return printName(((ArrayType)t).elemType) + "[]";
ByteArrayOutputStream result = new ByteArrayOutputStream(20);
javafe.ast.PrettyPrint.inst.print(result, t);
return result.toString();
}
/**
* Formats an array of <code>Type</code>s as a <code>String</code>
* containing a parenthesized list of user-readable names. The
* resulting names will be fully qualified if the
* <code>Type</code>s have been name resolved. <p>
*
* Sample output: "(int, javafe.tc.TypeSig, char[])" <p>
*
* Precondition: <code>PrettyPrint.inst</code> != null,
* <code>ts</code> != null <p>
*/
//@ requires ts != null;
public static String printName(Type[] ts) {
StringBuffer s = new StringBuffer("(");
for (int i=0; i<ts.length; i++ ) {
if (i != 0)
s.append(", ");
s.append(printName(ts[i]));
}
s.append(")");
return s.toString();
}
// ======================================================================
protected static /*@ non_null */ Identifier lenId = Identifier.intern("length");
//@ invariant lengthFieldDecl.id == lenId;
public static /*@ non_null */ FieldDecl lengthFieldDecl
= FieldDecl.makeInternal(Modifiers.ACC_PUBLIC|Modifiers.ACC_FINAL,
null,
lenId,
Types.intType,
null);
//@ requires t != null && caller != null;
//@ ensures \result != null;
//@ ensures \result.id == id;
public static FieldDecl lookupField(Type t, Identifier id, TypeSig caller)
throws LookupException
{
return inst.lookupFieldInstance(t, id, caller);
}
//@ requires t != null && caller != null;
//@ ensures \result != null;
//@ ensures \result.id == id;
protected FieldDecl lookupFieldInstance(Type t, Identifier id, TypeSig caller)
throws LookupException
{
Assert.notNull(t);
if( t instanceof TypeName)
t = TypeSig.getSig( (TypeName) t );
if( t instanceof TypeSig) {
return ((TypeSig)t).lookupField(id, caller );
} else if( t instanceof ArrayType ) {
if( id == lenId )
return lengthFieldDecl;
else
// Arrays inherit all fields from java.lang.Object:
return javaLangObject().lookupField(id, caller);
} else if( t instanceof PrimitiveType || isErrorType(t) ) {
throw new LookupException( LookupException.NOTFOUND );
} else {
Assert.fail("Unexpected type "+ t + "(" + t.getTag() + "), " + TagConstants.toString(t.getTag()));
return null;
}
}
//@ requires \nonnullelements(args) && caller != null;
//@ ensures \result != null;
//@ ensures \result.id == id;
public static MethodDecl lookupMethod(Type t, Identifier id,
Type[] args, TypeSig caller )
throws LookupException
{
return inst.lookupMethodInstance(t, id, args, caller);
}
//@ requires \nonnullelements(args) && caller != null;
//@ ensures \result != null;
//@ ensures \result.id == id;
protected MethodDecl lookupMethodInstance(Type t, Identifier id,
Type[] args, TypeSig caller )
throws LookupException
{
// Convert TypeName's to their corresponding TypeSig:
if (t instanceof TypeName)
t = TypeSig.getSig( (TypeName) t );
// All array methods are methods of java.lang.Object:
if (t instanceof ArrayType)
t = javaLangObject();
// Remaining cases: TypeSig, PrimitiveType, <unknown>
if (t instanceof TypeSig)
return ((TypeSig)t).lookupMethod(id, args, caller );
if (!(t instanceof PrimitiveType) && !isErrorType(t))
Assert.fail("Unexpected type: "+t);
throw new LookupException( LookupException.NOTFOUND );
}
} // end of class Types
/*
* Local Variables:
* Mode: Java
* fill-column: 85
* End:
*/
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/**
* Autogenerated by Thrift Compiler (0.15.0)
*
* DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
* @generated
*/
package org.apache.accumulo.core.tabletserver.thrift;
@SuppressWarnings({"cast", "rawtypes", "serial", "unchecked", "unused"})
public class TCompactionStats implements org.apache.thrift.TBase<TCompactionStats, TCompactionStats._Fields>, java.io.Serializable, Cloneable, Comparable<TCompactionStats> {
private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("TCompactionStats");
private static final org.apache.thrift.protocol.TField ENTRIES_READ_FIELD_DESC = new org.apache.thrift.protocol.TField("entriesRead", org.apache.thrift.protocol.TType.I64, (short)1);
private static final org.apache.thrift.protocol.TField ENTRIES_WRITTEN_FIELD_DESC = new org.apache.thrift.protocol.TField("entriesWritten", org.apache.thrift.protocol.TType.I64, (short)2);
private static final org.apache.thrift.protocol.TField FILE_SIZE_FIELD_DESC = new org.apache.thrift.protocol.TField("fileSize", org.apache.thrift.protocol.TType.I64, (short)3);
private static final org.apache.thrift.scheme.SchemeFactory STANDARD_SCHEME_FACTORY = new TCompactionStatsStandardSchemeFactory();
private static final org.apache.thrift.scheme.SchemeFactory TUPLE_SCHEME_FACTORY = new TCompactionStatsTupleSchemeFactory();
public long entriesRead; // required
public long entriesWritten; // required
public long fileSize; // required
/** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
public enum _Fields implements org.apache.thrift.TFieldIdEnum {
ENTRIES_READ((short)1, "entriesRead"),
ENTRIES_WRITTEN((short)2, "entriesWritten"),
FILE_SIZE((short)3, "fileSize");
private static final java.util.Map<java.lang.String, _Fields> byName = new java.util.HashMap<java.lang.String, _Fields>();
static {
for (_Fields field : java.util.EnumSet.allOf(_Fields.class)) {
byName.put(field.getFieldName(), field);
}
}
/**
* Find the _Fields constant that matches fieldId, or null if its not found.
*/
@org.apache.thrift.annotation.Nullable
public static _Fields findByThriftId(int fieldId) {
switch(fieldId) {
case 1: // ENTRIES_READ
return ENTRIES_READ;
case 2: // ENTRIES_WRITTEN
return ENTRIES_WRITTEN;
case 3: // FILE_SIZE
return FILE_SIZE;
default:
return null;
}
}
/**
* Find the _Fields constant that matches fieldId, throwing an exception
* if it is not found.
*/
public static _Fields findByThriftIdOrThrow(int fieldId) {
_Fields fields = findByThriftId(fieldId);
if (fields == null) throw new java.lang.IllegalArgumentException("Field " + fieldId + " doesn't exist!");
return fields;
}
/**
* Find the _Fields constant that matches name, or null if its not found.
*/
@org.apache.thrift.annotation.Nullable
public static _Fields findByName(java.lang.String name) {
return byName.get(name);
}
private final short _thriftId;
private final java.lang.String _fieldName;
_Fields(short thriftId, java.lang.String fieldName) {
_thriftId = thriftId;
_fieldName = fieldName;
}
public short getThriftFieldId() {
return _thriftId;
}
public java.lang.String getFieldName() {
return _fieldName;
}
}
// isset id assignments
private static final int __ENTRIESREAD_ISSET_ID = 0;
private static final int __ENTRIESWRITTEN_ISSET_ID = 1;
private static final int __FILESIZE_ISSET_ID = 2;
private byte __isset_bitfield = 0;
public static final java.util.Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap;
static {
java.util.Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new java.util.EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class);
tmpMap.put(_Fields.ENTRIES_READ, new org.apache.thrift.meta_data.FieldMetaData("entriesRead", org.apache.thrift.TFieldRequirementType.DEFAULT,
new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.I64)));
tmpMap.put(_Fields.ENTRIES_WRITTEN, new org.apache.thrift.meta_data.FieldMetaData("entriesWritten", org.apache.thrift.TFieldRequirementType.DEFAULT,
new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.I64)));
tmpMap.put(_Fields.FILE_SIZE, new org.apache.thrift.meta_data.FieldMetaData("fileSize", org.apache.thrift.TFieldRequirementType.DEFAULT,
new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.I64)));
metaDataMap = java.util.Collections.unmodifiableMap(tmpMap);
org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(TCompactionStats.class, metaDataMap);
}
public TCompactionStats() {
}
public TCompactionStats(
long entriesRead,
long entriesWritten,
long fileSize)
{
this();
this.entriesRead = entriesRead;
setEntriesReadIsSet(true);
this.entriesWritten = entriesWritten;
setEntriesWrittenIsSet(true);
this.fileSize = fileSize;
setFileSizeIsSet(true);
}
/**
* Performs a deep copy on <i>other</i>.
*/
public TCompactionStats(TCompactionStats other) {
__isset_bitfield = other.__isset_bitfield;
this.entriesRead = other.entriesRead;
this.entriesWritten = other.entriesWritten;
this.fileSize = other.fileSize;
}
public TCompactionStats deepCopy() {
return new TCompactionStats(this);
}
@Override
public void clear() {
setEntriesReadIsSet(false);
this.entriesRead = 0;
setEntriesWrittenIsSet(false);
this.entriesWritten = 0;
setFileSizeIsSet(false);
this.fileSize = 0;
}
public long getEntriesRead() {
return this.entriesRead;
}
public TCompactionStats setEntriesRead(long entriesRead) {
this.entriesRead = entriesRead;
setEntriesReadIsSet(true);
return this;
}
public void unsetEntriesRead() {
__isset_bitfield = org.apache.thrift.EncodingUtils.clearBit(__isset_bitfield, __ENTRIESREAD_ISSET_ID);
}
/** Returns true if field entriesRead is set (has been assigned a value) and false otherwise */
public boolean isSetEntriesRead() {
return org.apache.thrift.EncodingUtils.testBit(__isset_bitfield, __ENTRIESREAD_ISSET_ID);
}
public void setEntriesReadIsSet(boolean value) {
__isset_bitfield = org.apache.thrift.EncodingUtils.setBit(__isset_bitfield, __ENTRIESREAD_ISSET_ID, value);
}
public long getEntriesWritten() {
return this.entriesWritten;
}
public TCompactionStats setEntriesWritten(long entriesWritten) {
this.entriesWritten = entriesWritten;
setEntriesWrittenIsSet(true);
return this;
}
public void unsetEntriesWritten() {
__isset_bitfield = org.apache.thrift.EncodingUtils.clearBit(__isset_bitfield, __ENTRIESWRITTEN_ISSET_ID);
}
/** Returns true if field entriesWritten is set (has been assigned a value) and false otherwise */
public boolean isSetEntriesWritten() {
return org.apache.thrift.EncodingUtils.testBit(__isset_bitfield, __ENTRIESWRITTEN_ISSET_ID);
}
public void setEntriesWrittenIsSet(boolean value) {
__isset_bitfield = org.apache.thrift.EncodingUtils.setBit(__isset_bitfield, __ENTRIESWRITTEN_ISSET_ID, value);
}
public long getFileSize() {
return this.fileSize;
}
public TCompactionStats setFileSize(long fileSize) {
this.fileSize = fileSize;
setFileSizeIsSet(true);
return this;
}
public void unsetFileSize() {
__isset_bitfield = org.apache.thrift.EncodingUtils.clearBit(__isset_bitfield, __FILESIZE_ISSET_ID);
}
/** Returns true if field fileSize is set (has been assigned a value) and false otherwise */
public boolean isSetFileSize() {
return org.apache.thrift.EncodingUtils.testBit(__isset_bitfield, __FILESIZE_ISSET_ID);
}
public void setFileSizeIsSet(boolean value) {
__isset_bitfield = org.apache.thrift.EncodingUtils.setBit(__isset_bitfield, __FILESIZE_ISSET_ID, value);
}
public void setFieldValue(_Fields field, @org.apache.thrift.annotation.Nullable java.lang.Object value) {
switch (field) {
case ENTRIES_READ:
if (value == null) {
unsetEntriesRead();
} else {
setEntriesRead((java.lang.Long)value);
}
break;
case ENTRIES_WRITTEN:
if (value == null) {
unsetEntriesWritten();
} else {
setEntriesWritten((java.lang.Long)value);
}
break;
case FILE_SIZE:
if (value == null) {
unsetFileSize();
} else {
setFileSize((java.lang.Long)value);
}
break;
}
}
@org.apache.thrift.annotation.Nullable
public java.lang.Object getFieldValue(_Fields field) {
switch (field) {
case ENTRIES_READ:
return getEntriesRead();
case ENTRIES_WRITTEN:
return getEntriesWritten();
case FILE_SIZE:
return getFileSize();
}
throw new java.lang.IllegalStateException();
}
/** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */
public boolean isSet(_Fields field) {
if (field == null) {
throw new java.lang.IllegalArgumentException();
}
switch (field) {
case ENTRIES_READ:
return isSetEntriesRead();
case ENTRIES_WRITTEN:
return isSetEntriesWritten();
case FILE_SIZE:
return isSetFileSize();
}
throw new java.lang.IllegalStateException();
}
@Override
public boolean equals(java.lang.Object that) {
if (that instanceof TCompactionStats)
return this.equals((TCompactionStats)that);
return false;
}
public boolean equals(TCompactionStats that) {
if (that == null)
return false;
if (this == that)
return true;
boolean this_present_entriesRead = true;
boolean that_present_entriesRead = true;
if (this_present_entriesRead || that_present_entriesRead) {
if (!(this_present_entriesRead && that_present_entriesRead))
return false;
if (this.entriesRead != that.entriesRead)
return false;
}
boolean this_present_entriesWritten = true;
boolean that_present_entriesWritten = true;
if (this_present_entriesWritten || that_present_entriesWritten) {
if (!(this_present_entriesWritten && that_present_entriesWritten))
return false;
if (this.entriesWritten != that.entriesWritten)
return false;
}
boolean this_present_fileSize = true;
boolean that_present_fileSize = true;
if (this_present_fileSize || that_present_fileSize) {
if (!(this_present_fileSize && that_present_fileSize))
return false;
if (this.fileSize != that.fileSize)
return false;
}
return true;
}
@Override
public int hashCode() {
int hashCode = 1;
hashCode = hashCode * 8191 + org.apache.thrift.TBaseHelper.hashCode(entriesRead);
hashCode = hashCode * 8191 + org.apache.thrift.TBaseHelper.hashCode(entriesWritten);
hashCode = hashCode * 8191 + org.apache.thrift.TBaseHelper.hashCode(fileSize);
return hashCode;
}
@Override
public int compareTo(TCompactionStats other) {
if (!getClass().equals(other.getClass())) {
return getClass().getName().compareTo(other.getClass().getName());
}
int lastComparison = 0;
lastComparison = java.lang.Boolean.compare(isSetEntriesRead(), other.isSetEntriesRead());
if (lastComparison != 0) {
return lastComparison;
}
if (isSetEntriesRead()) {
lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.entriesRead, other.entriesRead);
if (lastComparison != 0) {
return lastComparison;
}
}
lastComparison = java.lang.Boolean.compare(isSetEntriesWritten(), other.isSetEntriesWritten());
if (lastComparison != 0) {
return lastComparison;
}
if (isSetEntriesWritten()) {
lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.entriesWritten, other.entriesWritten);
if (lastComparison != 0) {
return lastComparison;
}
}
lastComparison = java.lang.Boolean.compare(isSetFileSize(), other.isSetFileSize());
if (lastComparison != 0) {
return lastComparison;
}
if (isSetFileSize()) {
lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.fileSize, other.fileSize);
if (lastComparison != 0) {
return lastComparison;
}
}
return 0;
}
@org.apache.thrift.annotation.Nullable
public _Fields fieldForId(int fieldId) {
return _Fields.findByThriftId(fieldId);
}
public void read(org.apache.thrift.protocol.TProtocol iprot) throws org.apache.thrift.TException {
scheme(iprot).read(iprot, this);
}
public void write(org.apache.thrift.protocol.TProtocol oprot) throws org.apache.thrift.TException {
scheme(oprot).write(oprot, this);
}
@Override
public java.lang.String toString() {
java.lang.StringBuilder sb = new java.lang.StringBuilder("TCompactionStats(");
boolean first = true;
sb.append("entriesRead:");
sb.append(this.entriesRead);
first = false;
if (!first) sb.append(", ");
sb.append("entriesWritten:");
sb.append(this.entriesWritten);
first = false;
if (!first) sb.append(", ");
sb.append("fileSize:");
sb.append(this.fileSize);
first = false;
sb.append(")");
return sb.toString();
}
public void validate() throws org.apache.thrift.TException {
// check for required fields
// check for sub-struct validity
}
private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException {
try {
write(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(out)));
} catch (org.apache.thrift.TException te) {
throw new java.io.IOException(te);
}
}
private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, java.lang.ClassNotFoundException {
try {
// it doesn't seem like you should have to do this, but java serialization is wacky, and doesn't call the default constructor.
__isset_bitfield = 0;
read(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(in)));
} catch (org.apache.thrift.TException te) {
throw new java.io.IOException(te);
}
}
private static class TCompactionStatsStandardSchemeFactory implements org.apache.thrift.scheme.SchemeFactory {
public TCompactionStatsStandardScheme getScheme() {
return new TCompactionStatsStandardScheme();
}
}
private static class TCompactionStatsStandardScheme extends org.apache.thrift.scheme.StandardScheme<TCompactionStats> {
public void read(org.apache.thrift.protocol.TProtocol iprot, TCompactionStats struct) throws org.apache.thrift.TException {
org.apache.thrift.protocol.TField schemeField;
iprot.readStructBegin();
while (true)
{
schemeField = iprot.readFieldBegin();
if (schemeField.type == org.apache.thrift.protocol.TType.STOP) {
break;
}
switch (schemeField.id) {
case 1: // ENTRIES_READ
if (schemeField.type == org.apache.thrift.protocol.TType.I64) {
struct.entriesRead = iprot.readI64();
struct.setEntriesReadIsSet(true);
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
break;
case 2: // ENTRIES_WRITTEN
if (schemeField.type == org.apache.thrift.protocol.TType.I64) {
struct.entriesWritten = iprot.readI64();
struct.setEntriesWrittenIsSet(true);
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
break;
case 3: // FILE_SIZE
if (schemeField.type == org.apache.thrift.protocol.TType.I64) {
struct.fileSize = iprot.readI64();
struct.setFileSizeIsSet(true);
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
break;
default:
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
iprot.readFieldEnd();
}
iprot.readStructEnd();
// check for required fields of primitive type, which can't be checked in the validate method
struct.validate();
}
public void write(org.apache.thrift.protocol.TProtocol oprot, TCompactionStats struct) throws org.apache.thrift.TException {
struct.validate();
oprot.writeStructBegin(STRUCT_DESC);
oprot.writeFieldBegin(ENTRIES_READ_FIELD_DESC);
oprot.writeI64(struct.entriesRead);
oprot.writeFieldEnd();
oprot.writeFieldBegin(ENTRIES_WRITTEN_FIELD_DESC);
oprot.writeI64(struct.entriesWritten);
oprot.writeFieldEnd();
oprot.writeFieldBegin(FILE_SIZE_FIELD_DESC);
oprot.writeI64(struct.fileSize);
oprot.writeFieldEnd();
oprot.writeFieldStop();
oprot.writeStructEnd();
}
}
private static class TCompactionStatsTupleSchemeFactory implements org.apache.thrift.scheme.SchemeFactory {
public TCompactionStatsTupleScheme getScheme() {
return new TCompactionStatsTupleScheme();
}
}
private static class TCompactionStatsTupleScheme extends org.apache.thrift.scheme.TupleScheme<TCompactionStats> {
@Override
public void write(org.apache.thrift.protocol.TProtocol prot, TCompactionStats struct) throws org.apache.thrift.TException {
org.apache.thrift.protocol.TTupleProtocol oprot = (org.apache.thrift.protocol.TTupleProtocol) prot;
java.util.BitSet optionals = new java.util.BitSet();
if (struct.isSetEntriesRead()) {
optionals.set(0);
}
if (struct.isSetEntriesWritten()) {
optionals.set(1);
}
if (struct.isSetFileSize()) {
optionals.set(2);
}
oprot.writeBitSet(optionals, 3);
if (struct.isSetEntriesRead()) {
oprot.writeI64(struct.entriesRead);
}
if (struct.isSetEntriesWritten()) {
oprot.writeI64(struct.entriesWritten);
}
if (struct.isSetFileSize()) {
oprot.writeI64(struct.fileSize);
}
}
@Override
public void read(org.apache.thrift.protocol.TProtocol prot, TCompactionStats struct) throws org.apache.thrift.TException {
org.apache.thrift.protocol.TTupleProtocol iprot = (org.apache.thrift.protocol.TTupleProtocol) prot;
java.util.BitSet incoming = iprot.readBitSet(3);
if (incoming.get(0)) {
struct.entriesRead = iprot.readI64();
struct.setEntriesReadIsSet(true);
}
if (incoming.get(1)) {
struct.entriesWritten = iprot.readI64();
struct.setEntriesWrittenIsSet(true);
}
if (incoming.get(2)) {
struct.fileSize = iprot.readI64();
struct.setFileSizeIsSet(true);
}
}
}
private static <S extends org.apache.thrift.scheme.IScheme> S scheme(org.apache.thrift.protocol.TProtocol proto) {
return (org.apache.thrift.scheme.StandardScheme.class.equals(proto.getScheme()) ? STANDARD_SCHEME_FACTORY : TUPLE_SCHEME_FACTORY).getScheme();
}
private static void unusedMethod() {}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package npanday.packaging;
import npanday.PathUtil;
import org.apache.maven.archiver.MavenArchiveConfiguration;
import org.apache.maven.artifact.repository.ArtifactRepository;
import org.apache.maven.execution.MavenSession;
import org.apache.maven.plugin.assembly.AssemblerConfigurationSource;
import org.apache.maven.project.MavenProject;
import org.apache.maven.shared.filtering.MavenFileFilter;
import java.io.File;
import java.util.Collections;
import java.util.List;
/**
* Used as a quite static configuration source for preparation of NPanday
* packages.
*
* @author <a href="mailto:lcorneliussen@apache.org">Lars Corneliussen</a>
*/
public class PackagePreparationConfigurationSource
implements AssemblerConfigurationSource
{
private File outputDirectory;
private String finalName;
private String assemblyDescriptorRef;
private String assemblyDescriptorFile;
private MavenSession session;
private MavenFileFilter fileFilter;
private MavenProject project;
public PackagePreparationConfigurationSource( MavenSession session, MavenFileFilter fileFilter )
{
this.session = session;
this.project = session.getCurrentProject();
this.fileFilter = fileFilter;
File prepackageDir = PathUtil.getPreparedPackageFolder( project );
outputDirectory = prepackageDir.getParentFile();
finalName = prepackageDir.getName();
}
public String getDescriptor()
{
return null;
}
public String getDescriptorId()
{
return null;
}
public String[] getDescriptors()
{
if (assemblyDescriptorFile != null){
return new String[] {assemblyDescriptorFile};
}
return new String[0];
}
public String[] getDescriptorReferences()
{
if ( assemblyDescriptorRef != null){
return new String[] { assemblyDescriptorRef };
}
return new String[0];
}
public File getDescriptorSourceDirectory()
{
return null;
}
public File getBasedir()
{
return project.getBasedir();
}
public MavenProject getProject()
{
return project;
}
public boolean isSiteIncluded()
{
return false;
}
public File getSiteDirectory()
{
throw new UnsupportedOperationException();
}
public String getFinalName()
{
return finalName;
}
public boolean isAssemblyIdAppended()
{
return false;
}
public String getClassifier()
{
throw new UnsupportedOperationException("The classifier will be part of the final name already.");
}
public String getTarLongFileMode()
{
throw new UnsupportedOperationException("Tar is not supported for packaging preparations!");
}
public File getOutputDirectory()
{
return this.outputDirectory;
}
public File getWorkingDirectory()
{
return new File(project.getBuild().getDirectory(), "packages\\temp\\workdir");
}
public MavenArchiveConfiguration getJarArchiveConfiguration()
{
throw new UnsupportedOperationException();
}
public ArtifactRepository getLocalRepository()
{
return session.getLocalRepository();
}
public File getTemporaryRootDirectory()
{
return new File(project.getBuild().getDirectory(), "packages\\temp\\tmpdir");
}
public File getArchiveBaseDirectory()
{
return project.getBasedir();
}
public List<String> getFilters()
{
return null;
}
public List<MavenProject> getReactorProjects()
{
throw new UnsupportedOperationException();
}
public List<ArtifactRepository> getRemoteRepositories()
{
return Collections.<ArtifactRepository>emptyList();
}
public boolean isDryRun()
{
return false;
}
public boolean isIgnoreDirFormatExtensions()
{
return true;
}
public boolean isIgnoreMissingDescriptor()
{
return false;
}
public MavenSession getMavenSession()
{
return session;
}
public String getArchiverConfig()
{
// we won't do any archiving
return null;
}
public MavenFileFilter getMavenFileFilter()
{
return fileFilter;
}
public boolean isUpdateOnly()
{
return false;
}
public boolean isUseJvmChmod()
{
return false;
}
public boolean isIgnorePermissions()
{
return false;
}
public void setDescriptorFile( String assemblyDescriptorFile )
{
this.assemblyDescriptorFile = assemblyDescriptorFile;
}
public void setDescriptorRef( String assemblyDescriptorRef )
{
this.assemblyDescriptorRef = assemblyDescriptorRef;
}
}
| |
package com.getirkit.example.activity;
/**
* Created by eqiglii on 2016/1/27.
*/
import java.io.BufferedReader;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.util.Calendar;
import android.app.Activity;
import android.app.AlarmManager;
import android.app.PendingIntent;
import android.app.TimePickerDialog;
import android.app.TimePickerDialog.OnTimeSetListener;
import android.content.Context;
import android.content.Intent;
import android.os.Build;
import android.os.Bundle;
import android.support.v4.widget.SimpleCursorAdapter;
import android.support.v7.app.AppCompatActivity;
import android.util.Log;
import android.view.Menu;
import android.view.MenuInflater;
import android.view.MenuItem;
import android.view.View;
import android.view.View.OnClickListener;
import android.widget.AdapterView;
import android.widget.ArrayAdapter;
import android.widget.Button;
import android.widget.EditText;
import android.widget.ListView;
import android.widget.TextView;
import android.widget.TimePicker;
import com.getirkit.example.R;
import com.getirkit.example.adapter.SignalListAdapter;
import com.getirkit.example.fragment.SelectScheduleActionDialogFragment;
import com.getirkit.irkit.IRKit;
public class ScheduleActivity extends AppCompatActivity
implements SelectScheduleActionDialogFragment.SelectScheduleActionDialogFragmentListener {
public static final String TAG = ScheduleActivity.class.getSimpleName();
TimePicker myTimePicker;
Button buttonstartSetDialog;
TextView textAlarmPrompt;
ListView scheduleListView;
TimePickerDialog timePickerDialog;
private int selectedSignalPosition = -1;
private String signalName = "";
private String click_filename = "";
/** Called when the activity is first created. */
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setResult(RESULT_CANCELED);
Intent intent = getIntent();
Bundle args = intent.getExtras();
// Use savedInstanceState if it exists
if (savedInstanceState != null) {
args = savedInstanceState;
}
setContentView(R.layout.activity_schedule);
getSupportActionBar().setDisplayHomeAsUpEnabled(true);
if (args == null) {
throw new IllegalArgumentException("extras are not passed via Intent");
}
selectedSignalPosition = args.getInt("selectedSignalPosition");
signalName = args.getString("signalName");
if (signalName == "" || selectedSignalPosition == -1) {
throw new IllegalArgumentException("signal attribute is not passed via Intent");
}
textAlarmPrompt = (TextView)findViewById(R.id.alarmprompt);
buttonstartSetDialog = (Button)findViewById(R.id.startSetDialog);
buttonstartSetDialog.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
textAlarmPrompt.setText("");
openTimePickerDialog(true);
}
});
displaySchedule();
}
private void openTimePickerDialog(boolean is24r){
Calendar calendar = Calendar.getInstance();
timePickerDialog = new TimePickerDialog(
ScheduleActivity.this,
onTimeSetListener,
calendar.get(Calendar.HOUR_OF_DAY),
calendar.get(Calendar.MINUTE),
is24r);
timePickerDialog.setTitle("Set Alarm Schedule");
timePickerDialog.show();
}
OnTimeSetListener onTimeSetListener
= new OnTimeSetListener(){
@Override
public void onTimeSet(TimePicker view, int hourofDay, int minute) {
addSchedule(hourofDay, minute);
};
};
public void addSchedule(int hour, int minute) {
if (selectedSignalPosition == -1) {
return;
}
// 1. Wake up the device to fire the alarm at approximately 6:00 a.m., and repeat once a day at the same time:
// e.g. Set the alarm to start at approximately 6:30 a.m.
int scheduleTime = hour*100 + minute; // scheduled time must be unique in order to avoid multiple alarm conflicts
Calendar calNow = Calendar.getInstance();
Calendar calSet = (Calendar) calNow.clone();
calSet.set(Calendar.HOUR_OF_DAY, hour);
calSet.set(Calendar.MINUTE, minute);
calSet.set(Calendar.SECOND, 0);
calSet.set(Calendar.MILLISECOND, 0);
if(calSet.compareTo(calNow) <= 0){
//Today Set time passed, count to tomorrow
calSet.add(Calendar.DATE, 1);
}
// Use AlarmManager for managing alarms
AlarmManager alarmMgr = (AlarmManager) getSystemService(Context.ALARM_SERVICE);
Intent intent = new Intent(this, AlarmReceiver.class);
int alarmId = (selectedSignalPosition + 1) * scheduleTime; // identity of the alarm, must be unique
// the alarm shall identify a file that is going to persist the "selectedSignalPosition" value, for that particular alarm
String filename = signalName + "," + String.valueOf(scheduleTime); // filename is "signalName,630"
intent.putExtra("filename", filename);
// With setInexactRepeating(), you have to use one of the AlarmManager interval
// constants--in this case, AlarmManager.INTERVAL_DAY.
PendingIntent alarmIntent = PendingIntent.getBroadcast(this, alarmId, intent, 0);
alarmMgr.setRepeating (AlarmManager.RTC_WAKEUP, calSet.getTimeInMillis(),
AlarmManager.INTERVAL_DAY, alarmIntent);
// 2. Persist the "selectedSignalPosition" into the alarm-identified-specific file
String string = String.valueOf(selectedSignalPosition); // filecontent is "selectedSignalPosition"
FileOutputStream outputStream;
try {
outputStream = openFileOutput(filename, Context.MODE_PRIVATE);
outputStream.write(string.getBytes());
outputStream.close();
} catch (Exception e) {
e.printStackTrace();
}
textAlarmPrompt.setText(
"\n\n***\n"
+ "Alarm is set@ " + calSet.getTime() + "\n"
+ "***\n");
// refresh the schedule listview
displaySchedule();
}
public void displaySchedule() {
// Read the alarm list from the private file
final String[] filenames = fileList();
// Display the view list
scheduleListView = (ListView) findViewById(R.id.schedule__listview);
// Define a new Adapter
// First parameter - Context
// Second parameter - Layout for the row
// Third parameter - ID of the TextView to which the data is written
// Forth - the Array of data
ArrayAdapter<String> adapter = new ArrayAdapter<String>(this,
android.R.layout.simple_list_item_1, android.R.id.text1, filenames);
scheduleListView.setAdapter(adapter);
scheduleListView.setOnItemClickListener(new AdapterView.OnItemClickListener() {
@Override
public void onItemClick(AdapterView<?> parent, View view, int position, long id) {
String itemValue = (String) scheduleListView.getItemAtPosition(position);
click_filename = itemValue;
SelectScheduleActionDialogFragment dialog = new SelectScheduleActionDialogFragment();
dialog.show(getSupportFragmentManager(), "SelectScheduleActionDialogFragment");
}
});
}
@Override
public void onSelectScheduleActionDelete() {
if (click_filename.isEmpty()){
Log.e(TAG, "File name is empty, nothing to be deleted");
return;
}
// first split the filename and get the scheduleTime
int scheduleTime = 0;
if (click_filename.contains(",")) {
String[] parts = click_filename.split(","); // filename is "signalName,630"
String part1 = parts[0]; // signalName
String part2 = parts[1]; // 630
try {
scheduleTime = Integer.valueOf(part2);
}
catch(NumberFormatException e) {
System.out.println("parse value is not valid : " + e);
}
} else {
try {
scheduleTime = Integer.valueOf(click_filename); // // filename is "630"
}
catch(NumberFormatException e) {
System.out.println("parse value is not valid : " + e);
}
}
// then read the persisted "selectedSignalPosition" from the file created by "onSelectSignalActionSchedule()"
String readfromFile = ""; // for store the "selectedSignalPosition"
try {
InputStream inputStream = openFileInput(click_filename);
if ( inputStream != null ) {
InputStreamReader inputStreamReader = new InputStreamReader(inputStream);
BufferedReader bufferedReader = new BufferedReader(inputStreamReader);
String receiveString = "";
StringBuilder stringBuilder = new StringBuilder();
while ( (receiveString = bufferedReader.readLine()) != null ) {
stringBuilder.append(receiveString);
}
inputStream.close();
readfromFile = stringBuilder.toString();
}
}
catch (FileNotFoundException e) {
Log.e(TAG, "File not found: " + e.toString());
} catch (IOException e) {
Log.e(TAG, "Can not read file: " + e.toString());
}
// pass the value to "selectedSignalPosition"
int signalPosition = 0;
try {
signalPosition = Integer.valueOf(readfromFile);
}
catch(NumberFormatException e) {
System.out.println("parse value is not valid : " + e);
}
// constract the alarmId
int alarmId = (signalPosition + 1) * scheduleTime; // identity of the alarm, must be unique
// Use AlarmManager for managing alarms
AlarmManager alarmMgr = (AlarmManager) getSystemService(Context.ALARM_SERVICE);
Intent intent = new Intent(this, AlarmReceiver.class);
// With setInexactRepeating(), you have to use one of the AlarmManager interval
// constants--in this case, AlarmManager.INTERVAL_DAY.
PendingIntent alarmIntent = PendingIntent.getBroadcast(this, alarmId, intent, 0);
// If the alarm has been set, cancel it.
if (alarmMgr!= null) {
alarmMgr.cancel(alarmIntent);
}
// Also delete the file persisted for that scheduler
File dir = getFilesDir();
File file = new File(dir, click_filename);
boolean deleted = file.delete();
Log.v("log_tag", "file deleted: " + deleted);
// refresh the schedule listview
displaySchedule();
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
// Inflate the menu items for use in the action bar
MenuInflater inflater = getMenuInflater();
inflater.inflate(R.menu.schedule_activity_actions, menu);
return super.onCreateOptionsMenu(menu);
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
int id = item.getItemId();
if (id == android.R.id.home) {
Intent resultIntent = new Intent();
resultIntent.putExtra("back_to_home", true);
setResult(RESULT_CANCELED, resultIntent);
finish();
return true;
} else if (id == R.id.activity_schedule__action_close) {
Intent resultIntent = new Intent();
Bundle args = new Bundle();
args.putString("action", "close");
resultIntent.putExtras(args);
setResult(RESULT_OK, resultIntent);
finish();
return true;
}
return super.onOptionsItemSelected(item);
}
@Override
public void onActivityResult(int requestCode, int resultCode, Intent data) {
super.onActivityResult(requestCode, resultCode, data);
if (resultCode == Activity.RESULT_OK) {
}
}
@Override
public void onBackPressed() {
super.onBackPressed();
}
}
| |
/*
* Copyright 2012-2015 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.boot.autoconfigure.jackson;
import java.io.IOException;
import java.text.DateFormat;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.HashSet;
import java.util.Set;
import org.joda.time.DateTime;
import org.joda.time.DateTimeZone;
import org.joda.time.LocalDateTime;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.springframework.boot.autoconfigure.web.HttpMessageConvertersAutoConfiguration;
import org.springframework.boot.test.EnvironmentTestUtils;
import org.springframework.context.annotation.AnnotationConfigApplicationContext;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.Primary;
import org.springframework.http.converter.json.Jackson2ObjectMapperBuilder;
import com.fasterxml.jackson.annotation.JsonInclude;
import com.fasterxml.jackson.core.JsonGenerator;
import com.fasterxml.jackson.core.JsonParser;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.core.ObjectCodec;
import com.fasterxml.jackson.databind.DeserializationFeature;
import com.fasterxml.jackson.databind.JsonSerializer;
import com.fasterxml.jackson.databind.MapperFeature;
import com.fasterxml.jackson.databind.Module;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.PropertyNamingStrategy.LowerCaseWithUnderscoresStrategy;
import com.fasterxml.jackson.databind.SerializationFeature;
import com.fasterxml.jackson.databind.SerializerProvider;
import com.fasterxml.jackson.databind.module.SimpleModule;
import com.fasterxml.jackson.databind.util.StdDateFormat;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.hasItem;
import static org.hamcrest.Matchers.instanceOf;
import static org.hamcrest.Matchers.is;
import static org.hamcrest.Matchers.nullValue;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertThat;
import static org.junit.Assert.assertTrue;
import static org.mockito.Mockito.mock;
/**
* Tests for {@link JacksonAutoConfiguration}.
*
* @author Dave Syer
* @author Oliver Gierke
* @author Andy Wilkinson
* @author Marcel Overdijk
* @author Sebastien Deleuze
*/
public class JacksonAutoConfigurationTests {
AnnotationConfigApplicationContext context;
@Before
public void setUp() {
this.context = new AnnotationConfigApplicationContext();
}
@After
public void tearDown() {
if (this.context != null) {
this.context.close();
}
}
@Test
public void registersJodaModuleAutomatically() {
this.context.register(JacksonAutoConfiguration.class);
this.context.refresh();
ObjectMapper objectMapper = this.context.getBean(ObjectMapper.class);
assertThat(objectMapper.canSerialize(LocalDateTime.class), is(true));
}
@Test
public void doubleModuleRegistration() throws Exception {
this.context.register(DoubleModulesConfig.class,
HttpMessageConvertersAutoConfiguration.class);
this.context.refresh();
ObjectMapper mapper = this.context.getBean(ObjectMapper.class);
assertEquals("{\"foo\":\"bar\"}", mapper.writeValueAsString(new Foo()));
}
/*
* ObjectMapper does not contain method to get the date format of the mapper. See
* https://github.com/FasterXML/jackson-databind/issues/559 If such a method will be
* provided below tests can be simplified.
*/
@Test
public void noCustomDateFormat() throws Exception {
this.context.register(JacksonAutoConfiguration.class);
this.context.refresh();
ObjectMapper mapper = this.context.getBean(ObjectMapper.class);
assertThat(mapper.getDateFormat(), is(instanceOf(StdDateFormat.class)));
}
@Test
public void customDateFormat() throws Exception {
this.context.register(JacksonAutoConfiguration.class);
EnvironmentTestUtils.addEnvironment(this.context,
"spring.jackson.date-format:yyyyMMddHHmmss");
this.context.refresh();
ObjectMapper mapper = this.context.getBean(ObjectMapper.class);
DateFormat dateFormat = mapper.getDateFormat();
assertThat(dateFormat, is(instanceOf(SimpleDateFormat.class)));
assertThat(((SimpleDateFormat) dateFormat).toPattern(),
is(equalTo("yyyyMMddHHmmss")));
}
@Test
public void customJodaDateTimeFormat() throws Exception {
this.context.register(JacksonAutoConfiguration.class);
EnvironmentTestUtils.addEnvironment(this.context,
"spring.jackson.date-format:yyyyMMddHHmmss",
"spring.jackson.joda-date-time-format:yyyy-MM-dd HH:mm:ss");
this.context.refresh();
ObjectMapper mapper = this.context.getBean(ObjectMapper.class);
DateTime dateTime = new DateTime(1988, 6, 25, 20, 30, DateTimeZone.UTC);
assertEquals("\"1988-06-25 20:30:00\"", mapper.writeValueAsString(dateTime));
Date date = new DateTime(1988, 6, 25, 20, 30).toDate();
assertEquals("\"19880625203000\"", mapper.writeValueAsString(date));
}
@Test
public void customDateFormatClass() throws Exception {
this.context.register(JacksonAutoConfiguration.class);
EnvironmentTestUtils
.addEnvironment(
this.context,
"spring.jackson.date-format:org.springframework.boot.autoconfigure.jackson.JacksonAutoConfigurationTests.MyDateFormat");
this.context.refresh();
ObjectMapper mapper = this.context.getBean(ObjectMapper.class);
assertThat(mapper.getDateFormat(), is(instanceOf(MyDateFormat.class)));
}
public static class MyDateFormat extends SimpleDateFormat {
public MyDateFormat() {
super("yyyy-MM-dd HH:mm:ss");
}
}
@Test
public void noCustomPropertyNamingStrategy() throws Exception {
this.context.register(JacksonAutoConfiguration.class);
this.context.refresh();
ObjectMapper mapper = this.context.getBean(ObjectMapper.class);
assertThat(mapper.getPropertyNamingStrategy(), is(nullValue()));
}
@Test
public void customPropertyNamingStrategyField() throws Exception {
this.context.register(JacksonAutoConfiguration.class);
EnvironmentTestUtils
.addEnvironment(this.context,
"spring.jackson.property-naming-strategy:CAMEL_CASE_TO_LOWER_CASE_WITH_UNDERSCORES");
this.context.refresh();
ObjectMapper mapper = this.context.getBean(ObjectMapper.class);
assertThat(mapper.getPropertyNamingStrategy(),
is(instanceOf(LowerCaseWithUnderscoresStrategy.class)));
}
@Test
public void customPropertyNamingStrategyClass() throws Exception {
this.context.register(JacksonAutoConfiguration.class);
EnvironmentTestUtils
.addEnvironment(
this.context,
"spring.jackson.property-naming-strategy:com.fasterxml.jackson.databind.PropertyNamingStrategy.LowerCaseWithUnderscoresStrategy");
this.context.refresh();
ObjectMapper mapper = this.context.getBean(ObjectMapper.class);
assertThat(mapper.getPropertyNamingStrategy(),
is(instanceOf(LowerCaseWithUnderscoresStrategy.class)));
}
@Test
public void enableSerializationFeature() throws Exception {
this.context.register(JacksonAutoConfiguration.class);
EnvironmentTestUtils.addEnvironment(this.context,
"spring.jackson.serialization.indent_output:true");
this.context.refresh();
ObjectMapper mapper = this.context.getBean(ObjectMapper.class);
assertFalse(SerializationFeature.INDENT_OUTPUT.enabledByDefault());
assertTrue(mapper.getSerializationConfig().hasSerializationFeatures(
SerializationFeature.INDENT_OUTPUT.getMask()));
}
@Test
public void disableSerializationFeature() throws Exception {
this.context.register(JacksonAutoConfiguration.class);
EnvironmentTestUtils.addEnvironment(this.context,
"spring.jackson.serialization.write_dates_as_timestamps:false");
this.context.refresh();
ObjectMapper mapper = this.context.getBean(ObjectMapper.class);
assertTrue(SerializationFeature.WRITE_DATES_AS_TIMESTAMPS.enabledByDefault());
assertFalse(mapper.getSerializationConfig().hasSerializationFeatures(
SerializationFeature.WRITE_DATES_AS_TIMESTAMPS.getMask()));
}
@Test
public void enableDeserializationFeature() throws Exception {
this.context.register(JacksonAutoConfiguration.class);
EnvironmentTestUtils.addEnvironment(this.context,
"spring.jackson.deserialization.use_big_decimal_for_floats:true");
this.context.refresh();
ObjectMapper mapper = this.context.getBean(ObjectMapper.class);
assertFalse(DeserializationFeature.USE_BIG_DECIMAL_FOR_FLOATS.enabledByDefault());
assertTrue(mapper.getDeserializationConfig().hasDeserializationFeatures(
DeserializationFeature.USE_BIG_DECIMAL_FOR_FLOATS.getMask()));
}
@Test
public void disableDeserializationFeature() throws Exception {
this.context.register(JacksonAutoConfiguration.class);
EnvironmentTestUtils.addEnvironment(this.context,
"spring.jackson.deserialization.fail-on-unknown-properties:false");
this.context.refresh();
ObjectMapper mapper = this.context.getBean(ObjectMapper.class);
assertTrue(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES.enabledByDefault());
assertFalse(mapper.getDeserializationConfig().hasDeserializationFeatures(
DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES.getMask()));
}
@Test
public void enableMapperFeature() throws Exception {
this.context.register(JacksonAutoConfiguration.class);
EnvironmentTestUtils.addEnvironment(this.context,
"spring.jackson.mapper.require_setters_for_getters:true");
this.context.refresh();
ObjectMapper mapper = this.context.getBean(ObjectMapper.class);
assertFalse(MapperFeature.REQUIRE_SETTERS_FOR_GETTERS.enabledByDefault());
assertTrue(mapper.getSerializationConfig().hasMapperFeatures(
MapperFeature.REQUIRE_SETTERS_FOR_GETTERS.getMask()));
assertTrue(mapper.getDeserializationConfig().hasMapperFeatures(
MapperFeature.REQUIRE_SETTERS_FOR_GETTERS.getMask()));
}
@Test
public void disableMapperFeature() throws Exception {
this.context.register(JacksonAutoConfiguration.class);
EnvironmentTestUtils.addEnvironment(this.context,
"spring.jackson.mapper.use_annotations:false");
this.context.refresh();
ObjectMapper mapper = this.context.getBean(ObjectMapper.class);
assertTrue(MapperFeature.USE_ANNOTATIONS.enabledByDefault());
assertFalse(mapper.getDeserializationConfig().hasMapperFeatures(
MapperFeature.USE_ANNOTATIONS.getMask()));
assertFalse(mapper.getSerializationConfig().hasMapperFeatures(
MapperFeature.USE_ANNOTATIONS.getMask()));
}
@Test
public void enableParserFeature() throws Exception {
this.context.register(JacksonAutoConfiguration.class);
EnvironmentTestUtils.addEnvironment(this.context,
"spring.jackson.parser.allow_single_quotes:true");
this.context.refresh();
ObjectMapper mapper = this.context.getBean(ObjectMapper.class);
assertFalse(JsonParser.Feature.ALLOW_SINGLE_QUOTES.enabledByDefault());
assertTrue(mapper.getFactory().isEnabled(JsonParser.Feature.ALLOW_SINGLE_QUOTES));
}
@Test
public void disableParserFeature() throws Exception {
this.context.register(JacksonAutoConfiguration.class);
EnvironmentTestUtils.addEnvironment(this.context,
"spring.jackson.parser.auto_close_source:false");
this.context.refresh();
ObjectMapper mapper = this.context.getBean(ObjectMapper.class);
assertTrue(JsonParser.Feature.AUTO_CLOSE_SOURCE.enabledByDefault());
assertFalse(mapper.getFactory().isEnabled(JsonParser.Feature.AUTO_CLOSE_SOURCE));
}
@Test
public void enableGeneratorFeature() throws Exception {
this.context.register(JacksonAutoConfiguration.class);
EnvironmentTestUtils.addEnvironment(this.context,
"spring.jackson.generator.write_numbers_as_strings:true");
this.context.refresh();
ObjectMapper mapper = this.context.getBean(ObjectMapper.class);
assertFalse(JsonGenerator.Feature.WRITE_NUMBERS_AS_STRINGS.enabledByDefault());
assertTrue(mapper.getFactory().isEnabled(
JsonGenerator.Feature.WRITE_NUMBERS_AS_STRINGS));
}
@Test
public void disableGeneratorFeature() throws Exception {
this.context.register(JacksonAutoConfiguration.class);
EnvironmentTestUtils.addEnvironment(this.context,
"spring.jackson.generator.auto_close_target:false");
this.context.refresh();
ObjectMapper mapper = this.context.getBean(ObjectMapper.class);
assertTrue(JsonGenerator.Feature.AUTO_CLOSE_TARGET.enabledByDefault());
assertFalse(mapper.getFactory()
.isEnabled(JsonGenerator.Feature.AUTO_CLOSE_TARGET));
}
@Test
public void defaultObjectMapperBuilder() throws Exception {
this.context.register(JacksonAutoConfiguration.class);
this.context.refresh();
Jackson2ObjectMapperBuilder builder = this.context
.getBean(Jackson2ObjectMapperBuilder.class);
ObjectMapper mapper = builder.build();
assertTrue(MapperFeature.DEFAULT_VIEW_INCLUSION.enabledByDefault());
assertFalse(mapper.getDeserializationConfig().isEnabled(
MapperFeature.DEFAULT_VIEW_INCLUSION));
assertTrue(MapperFeature.DEFAULT_VIEW_INCLUSION.enabledByDefault());
assertFalse(mapper.getDeserializationConfig().isEnabled(
MapperFeature.DEFAULT_VIEW_INCLUSION));
assertFalse(mapper.getSerializationConfig().isEnabled(
MapperFeature.DEFAULT_VIEW_INCLUSION));
assertTrue(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES.enabledByDefault());
assertFalse(mapper.getDeserializationConfig().isEnabled(
DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES));
}
@Test
public void moduleBeansAndWellKnownModulesAreRegisteredWithTheObjectMapperBuilder() {
this.context.register(ModuleConfig.class, JacksonAutoConfiguration.class);
this.context.refresh();
ObjectMapper objectMapper = this.context.getBean(
Jackson2ObjectMapperBuilder.class).build();
assertThat(this.context.getBean(CustomModule.class).getOwners(),
hasItem((ObjectCodec) objectMapper));
assertThat(objectMapper.canSerialize(LocalDateTime.class), is(true));
}
@Test
public void defaultSerializationInclusion() {
this.context.register(JacksonAutoConfiguration.class);
this.context.refresh();
ObjectMapper objectMapper = this.context.getBean(
Jackson2ObjectMapperBuilder.class).build();
assertThat(objectMapper.getSerializationConfig().getSerializationInclusion(),
is(JsonInclude.Include.ALWAYS));
}
@Test
public void customSerializationInclusion() {
this.context.register(JacksonAutoConfiguration.class);
EnvironmentTestUtils.addEnvironment(this.context,
"spring.jackson.serialization-inclusion:non_null");
this.context.refresh();
ObjectMapper objectMapper = this.context.getBean(
Jackson2ObjectMapperBuilder.class).build();
assertThat(objectMapper.getSerializationConfig().getSerializationInclusion(),
is(JsonInclude.Include.NON_NULL));
}
@Configuration
protected static class MockObjectMapperConfig {
@Bean
@Primary
public ObjectMapper objectMapper() {
return mock(ObjectMapper.class);
}
}
@Configuration
protected static class ModuleConfig {
@Bean
public CustomModule jacksonModule() {
return new CustomModule();
}
}
@Configuration
protected static class DoubleModulesConfig {
@Bean
public Module jacksonModule() {
SimpleModule module = new SimpleModule();
module.addSerializer(Foo.class, new JsonSerializer<Foo>() {
@Override
public void serialize(Foo value, JsonGenerator jgen,
SerializerProvider provider) throws IOException,
JsonProcessingException {
jgen.writeStartObject();
jgen.writeStringField("foo", "bar");
jgen.writeEndObject();
}
});
return module;
}
@Bean
@Primary
public ObjectMapper objectMapper() {
ObjectMapper mapper = new ObjectMapper();
mapper.registerModule(jacksonModule());
return mapper;
}
}
protected static class Foo {
private String name;
private Foo() {
}
static Foo create() {
return new Foo();
}
public String getName() {
return this.name;
}
public void setName(String name) {
this.name = name;
}
}
protected static class Bar {
private String propertyName;
public String getPropertyName() {
return this.propertyName;
}
public void setPropertyName(String propertyName) {
this.propertyName = propertyName;
}
}
private static class CustomModule extends SimpleModule {
private Set<ObjectCodec> owners = new HashSet<ObjectCodec>();
@Override
public void setupModule(SetupContext context) {
this.owners.add(context.getOwner());
}
Set<ObjectCodec> getOwners() {
return this.owners;
}
}
}
| |
/**
* A doubly linked list implementation.
*
* @author William Fiset, william.alexandre.fiset@gmail.com
*/
package com.williamfiset.algorithms.datastructures.linkedlist;
public class DoublyLinkedList<T> implements Iterable<T> {
private int size = 0;
private Node<T> head = null;
private Node<T> tail = null;
// Internal node class to represent data
private static class Node<T> {
private T data;
private Node<T> prev, next;
public Node(T data, Node<T> prev, Node<T> next) {
this.data = data;
this.prev = prev;
this.next = next;
}
@Override
public String toString() {
return data.toString();
}
}
// Empty this linked list, O(n)
public void clear() {
Node<T> trav = head;
while (trav != null) {
Node<T> next = trav.next;
trav.prev = trav.next = null;
trav.data = null;
trav = next;
}
head = tail = trav = null;
size = 0;
}
// Return the size of this linked list
public int size() {
return size;
}
// Is this linked list empty?
public boolean isEmpty() {
return size() == 0;
}
// Add an element to the tail of the linked list, O(1)
public void add(T elem) {
addLast(elem);
}
// Add a node to the tail of the linked list, O(1)
public void addLast(T elem) {
if (isEmpty()) {
head = tail = new Node<T>(elem, null, null);
} else {
tail.next = new Node<T>(elem, tail, null);
tail = tail.next;
}
size++;
}
// Add an element to the beginning of this linked list, O(1)
public void addFirst(T elem) {
if (isEmpty()) {
head = tail = new Node<T>(elem, null, null);
} else {
head.prev = new Node<T>(elem, null, head);
head = head.prev;
}
size++;
}
// Add an element at a specified index
public void addAt(int index, T data) throws Exception {
if (index < 0 || index > size) {
throw new Exception("Illegal Index");
}
if (index == 0) {
addFirst(data);
return;
}
if (index == size) {
addLast(data);
return;
}
Node<T> temp = head;
for (int i = 0; i < index - 1; i++) {
temp = temp.next;
}
Node<T> newNode = new Node<>(data, temp, temp.next);
temp.next.prev = newNode;
temp.next = newNode;
size++;
}
// Check the value of the first node if it exists, O(1)
public T peekFirst() {
if (isEmpty()) throw new RuntimeException("Empty list");
return head.data;
}
// Check the value of the last node if it exists, O(1)
public T peekLast() {
if (isEmpty()) throw new RuntimeException("Empty list");
return tail.data;
}
// Remove the first value at the head of the linked list, O(1)
public T removeFirst() {
// Can't remove data from an empty list
if (isEmpty()) throw new RuntimeException("Empty list");
// Extract the data at the head and move
// the head pointer forwards one node
T data = head.data;
head = head.next;
--size;
// If the list is empty set the tail to null
if (isEmpty()) tail = null;
// Do a memory cleanup of the previous node
else head.prev = null;
// Return the data that was at the first node we just removed
return data;
}
// Remove the last value at the tail of the linked list, O(1)
public T removeLast() {
// Can't remove data from an empty list
if (isEmpty()) throw new RuntimeException("Empty list");
// Extract the data at the tail and move
// the tail pointer backwards one node
T data = tail.data;
tail = tail.prev;
--size;
// If the list is now empty set the head to null
if (isEmpty()) head = null;
// Do a memory clean of the node that was just removed
else tail.next = null;
// Return the data that was in the last node we just removed
return data;
}
// Remove an arbitrary node from the linked list, O(1)
private T remove(Node<T> node) {
// If the node to remove is somewhere either at the
// head or the tail handle those independently
if (node.prev == null) return removeFirst();
if (node.next == null) return removeLast();
// Make the pointers of adjacent nodes skip over 'node'
node.next.prev = node.prev;
node.prev.next = node.next;
// Temporarily store the data we want to return
T data = node.data;
// Memory cleanup
node.data = null;
node = node.prev = node.next = null;
--size;
// Return the data in the node we just removed
return data;
}
// Remove a node at a particular index, O(n)
public T removeAt(int index) {
// Make sure the index provided is valid
if (index < 0 || index >= size) {
throw new IllegalArgumentException();
}
int i;
Node<T> trav;
// Search from the front of the list
if (index < size / 2) {
for (i = 0, trav = head; i != index; i++) {
trav = trav.next;
}
// Search from the back of the list
} else {
for (i = size - 1, trav = tail; i != index; i--) {
trav = trav.prev;
}
}
return remove(trav);
}
// Remove a particular value in the linked list, O(n)
public boolean remove(Object obj) {
Node<T> trav = head;
// Support searching for null
if (obj == null) {
for (trav = head; trav != null; trav = trav.next) {
if (trav.data == null) {
remove(trav);
return true;
}
}
// Search for non null object
} else {
for (trav = head; trav != null; trav = trav.next) {
if (obj.equals(trav.data)) {
remove(trav);
return true;
}
}
}
return false;
}
// Find the index of a particular value in the linked list, O(n)
public int indexOf(Object obj) {
int index = 0;
Node<T> trav = head;
// Support searching for null
if (obj == null) {
for (; trav != null; trav = trav.next, index++) {
if (trav.data == null) {
return index;
}
}
// Search for non null object
} else {
for (; trav != null; trav = trav.next, index++) {
if (obj.equals(trav.data)) {
return index;
}
}
}
return -1;
}
// Check is a value is contained within the linked list
public boolean contains(Object obj) {
return indexOf(obj) != -1;
}
@Override
public java.util.Iterator<T> iterator() {
return new java.util.Iterator<T>() {
private Node<T> trav = head;
@Override
public boolean hasNext() {
return trav != null;
}
@Override
public T next() {
T data = trav.data;
trav = trav.next;
return data;
}
@Override
public void remove() {
throw new UnsupportedOperationException();
}
};
}
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("[ ");
Node<T> trav = head;
while (trav != null) {
sb.append(trav.data);
if (trav.next != null) {
sb.append(", ");
}
trav = trav.next;
}
sb.append(" ]");
return sb.toString();
}
}
| |
/*******************************************************************************
* Copyright Duke Comprehensive Cancer Center and SemanticBits
*
* Distributed under the OSI-approved BSD 3-Clause License.
* See http://ncip.github.com/c3pr/LICENSE.txt for details.
******************************************************************************/
package edu.duke.cabig.c3pr.domain;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import javax.persistence.Entity;
import javax.persistence.FetchType;
import javax.persistence.Inheritance;
import javax.persistence.InheritanceType;
import javax.persistence.ManyToMany;
import javax.persistence.OneToMany;
import javax.persistence.Table;
import javax.persistence.Transient;
import org.hibernate.annotations.Cascade;
import org.hibernate.annotations.CascadeType;
import org.hibernate.annotations.GenericGenerator;
import org.hibernate.annotations.Parameter;
import org.hibernate.annotations.Where;
import edu.duke.cabig.c3pr.constants.OrganizationIdentifierTypeEnum;
import edu.duke.cabig.c3pr.utils.StringUtils;
import gov.nih.nci.cabig.ctms.collections.LazyListHelper;
/**
* The Class HealthcareSite.
*
* @author Priyatam
* @author Kulasekaran
*
* Currently points to the newly renamed organizations table instead of the healthcareSite table.
*/
@Entity
@Table(name = "organizations")
@Inheritance(strategy = InheritanceType.SINGLE_TABLE)
@GenericGenerator(name = "id-generator", strategy = "native", parameters = { @Parameter(name = "sequence", value = "organizations_id_seq") })
@Where(clause = "retired_indicator = 'false'")
public abstract class HealthcareSite extends Organization implements Comparable<HealthcareSite> {
/** The healthcare site investigators. */
private List<HealthcareSiteInvestigator> healthcareSiteInvestigators = new ArrayList<HealthcareSiteInvestigator>();
/** The personUsers. */
private List<PersonUser> personUsers = new ArrayList<PersonUser>();
/** The participants. */
private List<Participant> participants = new ArrayList<Participant>();
/** The external organizations. */
protected List<HealthcareSite> externalOrganizations = new ArrayList<HealthcareSite>();
/**
* Sets the external organizations.
*
* @param externalOrganizations the new external organizations
*/
public void setExternalOrganizations(List<HealthcareSite> externalOrganizations) {
this.externalOrganizations = externalOrganizations;
}
/** The lazy list helper. */
private LazyListHelper lazyListHelper;
/**
* Instantiates a new healthcare site.
*/
public HealthcareSite() {
lazyListHelper = new LazyListHelper();
lazyListHelper.add(InvestigatorGroup.class,
new BiDirectionalInstantiateFactory<InvestigatorGroup>(
InvestigatorGroup.class, this, "HealthcareSite", new Class[] { HealthcareSite.class }));
}
/**
* Adds the healthcare site investigator.
*
* @param hcsi the hcsi
*/
public void addHealthcareSiteInvestigator(HealthcareSiteInvestigator hcsi) {
healthcareSiteInvestigators.add(hcsi);
hcsi.setHealthcareSite(this);
}
/**
* Adds the investigator group.
*
* @param invGroup the inv group
*/
public void addInvestigatorGroup(InvestigatorGroup invGroup) {
this.getInvestigatorGroups().add(invGroup);
}
/**
* Removes the healthcare site investigator.
*
* @param hcsi the hcsi
*/
public void removeHealthcareSiteInvestigator(HealthcareSiteInvestigator hcsi) {
healthcareSiteInvestigators.remove(hcsi);
}
/**
* Gets the healthcare site investigators.
*
* @return the healthcare site investigators
*/
@OneToMany(mappedBy = "healthcareSite", fetch = FetchType.LAZY, orphanRemoval=true)
@Cascade(value = { CascadeType.ALL})
public List<HealthcareSiteInvestigator> getHealthcareSiteInvestigators() {
return healthcareSiteInvestigators;
}
/**
* Sets the healthcare site investigators.
*
* @param healthcareSiteInvestigators the new healthcare site investigators
*/
public void setHealthcareSiteInvestigators(
List<HealthcareSiteInvestigator> healthcareSiteInvestigators) {
this.healthcareSiteInvestigators = healthcareSiteInvestigators;
}
/**
* Gets the person users.
*
* @return the person users
*/
@ManyToMany(mappedBy = "healthcareSites" )
@Cascade(value = { CascadeType.ALL})
public List<PersonUser> getPersonUsers() {
return personUsers;
}
/**
* Sets the personUsers.
*
* @param personUsers the new personUsers
*/
public void setPersonUsers(List<PersonUser> personUsers) {
this.personUsers = personUsers;
}
/**
* Adds the person user.
*
* @param personUser the personUser
*/
public void addPersonUser(PersonUser personUser) {
personUsers.add(personUser);
}
/**
* Removes the person user.
*
* @param personUser the personUser
*/
public void removePersonUser(PersonUser personUser) {
personUsers.remove(personUser);
}
/**
* Gets the ctep code.
*
* @return the ctep code
*/
@Transient
public String getCtepCode() {
if(getOrganizationAssignedIdentifiers().size() > 0){
Iterator iter = getOrganizationAssignedIdentifiers().iterator();
OrganizationAssignedIdentifier identifier = null;
while(iter.hasNext()){
identifier = (OrganizationAssignedIdentifier)iter.next();
if(identifier.getType().equals(OrganizationIdentifierTypeEnum.CTEP)){
return identifier.getValue();
}
}
}
return "";
}
/**
* Gets the Primary code.
*
* @return the Primary code
*/
@Transient
public String getPrimaryIdentifier() {
Iterator iter = getIdentifiersAssignedToOrganization().iterator();
Identifier identifier = null;
while(iter.hasNext()){
identifier = (Identifier)iter.next();
if(identifier.isPrimary()){
return identifier.getValue();
}
}
return null;
}
/**
* Gets the nci code.
*
* @return the nci code
*/
@Transient
public String getNCICode() {
if(getOrganizationAssignedIdentifiers().size() > 0){
Iterator iter = getOrganizationAssignedIdentifiers().iterator();
OrganizationAssignedIdentifier identifier = null;
while(iter.hasNext()){
identifier = (OrganizationAssignedIdentifier)iter.next();
if(identifier.getType().equals(OrganizationIdentifierTypeEnum.NCI)){
return identifier.getValue();
}
}
}
return "";
}
/**
* Sets the Ctep code in the IdentifiersAssignedToOrganization.
*
* @param nciInstituteCode the new nci institute code
*/
public void setCtepCode(String ctepCode) {
setCtepCode(ctepCode, true);
}
public void setCtepCode(String ctepCode, Boolean primaryIndicator) {
if(!StringUtils.isEmpty(ctepCode)){
OrganizationAssignedIdentifier identifier = null;
for(OrganizationAssignedIdentifier tempIdentifier : getOrganizationAssignedIdentifiers()){
if(tempIdentifier.getType() == OrganizationIdentifierTypeEnum.CTEP){
identifier = (OrganizationAssignedIdentifier)tempIdentifier;
break;
}
}
if(identifier == null){
identifier= new OrganizationAssignedIdentifier();
identifier.setType(OrganizationIdentifierTypeEnum.CTEP);
identifier.setPrimaryIndicator(primaryIndicator);
getIdentifiersAssignedToOrganization().add(identifier);
}
identifier.setValue(ctepCode);
}
}
/**
* Sets the nci code.
*
* @param nciCode the nci code
* @param primaryIndicator the primary indicator
*/
public void setNCICode(String nciCode, boolean primaryIndicator) {
if(!StringUtils.isEmpty(nciCode)){
OrganizationAssignedIdentifier identifier = null;
for(OrganizationAssignedIdentifier tempIdentifier : getOrganizationAssignedIdentifiers()){
if(tempIdentifier.getType() == OrganizationIdentifierTypeEnum.NCI){
identifier = (OrganizationAssignedIdentifier)tempIdentifier;
break;
}
}
if(identifier == null){
identifier= new OrganizationAssignedIdentifier();
identifier.setType(OrganizationIdentifierTypeEnum.NCI);
identifier.setPrimaryIndicator(primaryIndicator);
getIdentifiersAssignedToOrganization().add(identifier);
}
identifier.setValue(nciCode);
}
}
public void setNCICode(String nciCode) {
setNCICode(nciCode, false);
}
/* (non-Javadoc)
* @see java.lang.Comparable#compareTo(java.lang.Object)
*/
public int compareTo(HealthcareSite o) {
if (this.equals((HealthcareSite) o)) return 0;
else return 1;
}
/* (non-Javadoc)
* @see edu.duke.cabig.c3pr.domain.Organization#hashCode()
*/
@Override
public int hashCode() {
final int PRIME = 31;
int result = super.hashCode();
result = PRIME * result + ((getPrimaryIdentifier() == null) ? 0 : getPrimaryIdentifier().hashCode());
return result;
}
/* (non-Javadoc)
* @see edu.duke.cabig.c3pr.domain.Organization#equals(java.lang.Object)
*/
@Override
public boolean equals(Object obj) {
if (this == obj) return true;
if (!super.equals(obj)) return false;
if (getClass() != obj.getClass()) return false;
final HealthcareSite other = (HealthcareSite) obj;
if (getPrimaryIdentifier() == null) {
if (other.getPrimaryIdentifier() != null) return false;
}
else if (!getPrimaryIdentifier().equals(other.getPrimaryIdentifier())) return false;
return true;
}
/**
* Gets the investigator groups internal.
*
* @return the investigator groups internal
*/
@OneToMany(mappedBy = "healthcareSite", fetch = FetchType.LAZY, orphanRemoval=true)
@Cascade(value = { CascadeType.ALL})
public List<InvestigatorGroup> getInvestigatorGroupsInternal() {
return lazyListHelper.getInternalList(InvestigatorGroup.class);
}
/**
* Sets the investigator groups internal.
*
* @param investigatorGroups the new investigator groups internal
*/
public void setInvestigatorGroupsInternal(List<InvestigatorGroup> investigatorGroups) {
this.lazyListHelper.setInternalList(InvestigatorGroup.class, investigatorGroups);
}
/**
* Gets the investigator groups.
*
* @return the investigator groups
*/
@Transient
public List<InvestigatorGroup> getInvestigatorGroups() {
return lazyListHelper.getLazyList(InvestigatorGroup.class);
}
/**
* Sets the investigator groups.
*
* @param investigatorGroups the new investigator groups
*/
public void setInvestigatorGroups(List<InvestigatorGroup> investigatorGroups) {
}
/**
* Gets the participants.
*
* @return the participants
*/
@ManyToMany(mappedBy = "healthcareSites" )
@Cascade(value = { CascadeType.LOCK})
public List<Participant> getParticipants() {
return participants;
}
/**
* Sets the participants.
*
* @param participants the new participants
*/
public void setParticipants(List<Participant> participants) {
this.participants = participants;
}
/**
* Gets the external organizations.
*
* @return the external organizations
*/
@Transient
public List<HealthcareSite> getExternalOrganizations() {
return externalOrganizations;
}
/**
* Adds the external organization.
*
* @param externalHealthcareSite the external healthcare site
*/
public void addExternalOrganization(HealthcareSite externalHealthcareSite){
this.getExternalOrganizations().add(externalHealthcareSite);
}
@Override
public String toString() {
return getName() + " (" + getPrimaryIdentifier() + ")";
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.cassandra.db.marshal;
import org.apache.cassandra.serializers.TypeSerializer;
import org.apache.cassandra.serializers.BytesSerializer;
import org.apache.cassandra.serializers.MarshalException;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
/**
* A class avoiding class duplication between CompositeType and
* DynamicCompositeType.
* Those two differs only in that for DynamicCompositeType, the comparators
* are in the encoded column name at the front of each component.
*/
public abstract class AbstractCompositeType extends AbstractType<ByteBuffer>
{
// changes bb position
protected static int getShortLength(ByteBuffer bb)
{
int length = (bb.get() & 0xFF) << 8;
return length | (bb.get() & 0xFF);
}
// changes bb position
protected static void putShortLength(ByteBuffer bb, int length)
{
bb.put((byte) ((length >> 8) & 0xFF));
bb.put((byte) (length & 0xFF));
}
// changes bb position
protected static ByteBuffer getBytes(ByteBuffer bb, int length)
{
ByteBuffer copy = bb.duplicate();
copy.limit(copy.position() + length);
bb.position(bb.position() + length);
return copy;
}
// changes bb position
protected static ByteBuffer getWithShortLength(ByteBuffer bb)
{
int length = getShortLength(bb);
return getBytes(bb, length);
}
public int compare(ByteBuffer o1, ByteBuffer o2)
{
if (o1 == null)
return o2 == null ? 0 : -1;
ByteBuffer bb1 = o1.duplicate();
ByteBuffer bb2 = o2.duplicate();
int i = 0;
ByteBuffer previous = null;
while (bb1.remaining() > 0 && bb2.remaining() > 0)
{
AbstractType<?> comparator = getComparator(i, bb1, bb2);
ByteBuffer value1 = getWithShortLength(bb1);
ByteBuffer value2 = getWithShortLength(bb2);
int cmp = comparator.compareCollectionMembers(value1, value2, previous);
if (cmp != 0)
return cmp;
previous = value1;
byte b1 = bb1.get();
byte b2 = bb2.get();
if (b1 < 0)
{
if (b2 >= 0)
return -1;
}
else if (b1 > 0)
{
if (b2 <= 0)
return 1;
}
else
{
// b1 == 0
if (b2 != 0)
return -b2;
}
++i;
}
if (bb1.remaining() == 0)
return bb2.remaining() == 0 ? 0 : -1;
// bb1.remaining() > 0 && bb2.remaining() == 0
return 1;
}
/**
* Split a composite column names into it's components.
*/
public ByteBuffer[] split(ByteBuffer name)
{
List<ByteBuffer> l = new ArrayList<ByteBuffer>();
ByteBuffer bb = name.duplicate();
int i = 0;
while (bb.remaining() > 0)
{
getComparator(i++, bb);
l.add(getWithShortLength(bb));
bb.get(); // skip end-of-component
}
return l.toArray(new ByteBuffer[l.size()]);
}
public static class CompositeComponent
{
public AbstractType<?> comparator;
public ByteBuffer value;
public CompositeComponent( AbstractType<?> comparator, ByteBuffer value )
{
this.comparator = comparator;
this.value = value;
}
}
public List<CompositeComponent> deconstruct( ByteBuffer bytes )
{
List<CompositeComponent> list = new ArrayList<CompositeComponent>();
ByteBuffer bb = bytes.duplicate();
int i = 0;
while (bb.remaining() > 0)
{
AbstractType comparator = getComparator(i, bb);
ByteBuffer value = getWithShortLength(bb);
list.add( new CompositeComponent(comparator,value) );
byte b = bb.get(); // Ignore; not relevant here
++i;
}
return list;
}
/*
* Escapes all occurences of the ':' character from the input, replacing them by "\:".
* Furthermore, if the last character is '\' or '!', a '!' is appended.
*/
static String escape(String input)
{
if (input.isEmpty())
return input;
String res = input.replaceAll(":", "\\\\:");
char last = res.charAt(res.length() - 1);
return last == '\\' || last == '!' ? res + '!' : res;
}
/*
* Reverses the effect of espace().
* Replaces all occurences of "\:" by ":" and remove last character if it is '!'.
*/
static String unescape(String input)
{
if (input.isEmpty())
return input;
String res = input.replaceAll("\\\\:", ":");
char last = res.charAt(res.length() - 1);
return last == '!' ? res.substring(0, res.length() - 1) : res;
}
/*
* Split the input on character ':', unless the previous character is '\'.
*/
static List<String> split(String input)
{
if (input.isEmpty())
return Collections.<String>emptyList();
List<String> res = new ArrayList<String>();
int prev = 0;
for (int i = 0; i < input.length(); i++)
{
if (input.charAt(i) != ':' || (i > 0 && input.charAt(i-1) == '\\'))
continue;
res.add(input.substring(prev, i));
prev = i + 1;
}
res.add(input.substring(prev, input.length()));
return res;
}
public String getString(ByteBuffer bytes)
{
StringBuilder sb = new StringBuilder();
ByteBuffer bb = bytes.duplicate();
int i = 0;
while (bb.remaining() > 0)
{
if (bb.remaining() != bytes.remaining())
sb.append(":");
AbstractType<?> comparator = getAndAppendComparator(i, bb, sb);
ByteBuffer value = getWithShortLength(bb);
sb.append(escape(comparator.getString(value)));
byte b = bb.get();
if (b != 0)
{
sb.append(":!");
break;
}
++i;
}
return sb.toString();
}
public ByteBuffer fromString(String source)
{
List<String> parts = split(source);
List<ByteBuffer> components = new ArrayList<ByteBuffer>(parts.size());
List<ParsedComparator> comparators = new ArrayList<ParsedComparator>(parts.size());
int totalLength = 0, i = 0;
boolean lastByteIsOne = false;
for (String part : parts)
{
if (part.equals("!"))
{
lastByteIsOne = true;
break;
}
ParsedComparator p = parseComparator(i, part);
AbstractType<?> type = p.getAbstractType();
part = p.getRemainingPart();
ByteBuffer component = type.fromString(unescape(part));
totalLength += p.getComparatorSerializedSize() + 2 + component.remaining() + 1;
components.add(component);
comparators.add(p);
++i;
}
ByteBuffer bb = ByteBuffer.allocate(totalLength);
i = 0;
for (ByteBuffer component : components)
{
comparators.get(i).serializeComparator(bb);
putShortLength(bb, component.remaining());
bb.put(component); // it's ok to consume component as we won't use it anymore
bb.put((byte)0);
++i;
}
if (lastByteIsOne)
bb.put(bb.limit() - 1, (byte)1);
bb.rewind();
return bb;
}
public void validate(ByteBuffer bytes) throws MarshalException
{
ByteBuffer bb = bytes.duplicate();
int i = 0;
ByteBuffer previous = null;
while (bb.remaining() > 0)
{
AbstractType<?> comparator = validateComparator(i, bb);
if (bb.remaining() < 2)
throw new MarshalException("Not enough bytes to read value size of component " + i);
int length = getShortLength(bb);
if (bb.remaining() < length)
throw new MarshalException("Not enough bytes to read value of component " + i);
ByteBuffer value = getBytes(bb, length);
comparator.validateCollectionMember(value, previous);
if (bb.remaining() == 0)
throw new MarshalException("Not enough bytes to read the end-of-component byte of component" + i);
byte b = bb.get();
if (b != 0 && bb.remaining() != 0)
throw new MarshalException("Invalid bytes remaining after an end-of-component at component" + i);
previous = value;
++i;
}
}
public abstract ByteBuffer decompose(Object... objects);
public ByteBuffer compose(ByteBuffer bytes)
{
return bytes;
}
public ByteBuffer decompose(ByteBuffer value)
{
return value;
}
@Override
public TypeSerializer<ByteBuffer> getSerializer()
{
return BytesSerializer.instance;
}
/**
* @return the comparator for the given component. static CompositeType will consult
* @param i DynamicCompositeType will read the type information from @param bb
* @param bb name of type definition
*/
abstract protected AbstractType<?> getComparator(int i, ByteBuffer bb);
/**
* Adds DynamicCompositeType type information from @param bb1 to @param bb2.
* @param i is ignored.
*/
abstract protected AbstractType<?> getComparator(int i, ByteBuffer bb1, ByteBuffer bb2);
/**
* Adds type information from @param bb to @param sb. @param i is ignored.
*/
abstract protected AbstractType<?> getAndAppendComparator(int i, ByteBuffer bb, StringBuilder sb);
/**
* Like getComparator, but validates that @param i does not exceed the defined range
*/
abstract protected AbstractType<?> validateComparator(int i, ByteBuffer bb) throws MarshalException;
/**
* Used by fromString
*/
abstract protected ParsedComparator parseComparator(int i, String part);
protected static interface ParsedComparator
{
AbstractType<?> getAbstractType();
String getRemainingPart();
int getComparatorSerializedSize();
void serializeComparator(ByteBuffer bb);
}
}
| |
/*
* Copyright 2010-2012 Luca Garulli (l.garulli--at--orientechnologies.com)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.orientechnologies.orient.test.database.auto;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
import javassist.util.proxy.Proxy;
import org.testng.Assert;
import org.testng.annotations.AfterClass;
import org.testng.annotations.BeforeClass;
import org.testng.annotations.Optional;
import org.testng.annotations.Parameters;
import org.testng.annotations.Test;
import com.orientechnologies.orient.core.id.ORID;
import com.orientechnologies.orient.core.record.impl.ODocument;
import com.orientechnologies.orient.core.serialization.serializer.object.OObjectSerializer;
import com.orientechnologies.orient.core.sql.query.OSQLSynchQuery;
import com.orientechnologies.orient.object.db.OObjectDatabasePool;
import com.orientechnologies.orient.object.db.OObjectDatabaseTx;
import com.orientechnologies.orient.object.enhancement.OObjectEntitySerializer;
import com.orientechnologies.orient.object.serialization.OObjectSerializerContext;
import com.orientechnologies.orient.object.serialization.OObjectSerializerHelper;
import com.orientechnologies.orient.test.domain.base.JavaCascadeDeleteTestClass;
import com.orientechnologies.orient.test.domain.base.JavaComplexTestClass;
import com.orientechnologies.orient.test.domain.base.JavaSimpleTestClass;
import com.orientechnologies.orient.test.domain.base.Planet;
import com.orientechnologies.orient.test.domain.base.Satellite;
import com.orientechnologies.orient.test.domain.business.Address;
import com.orientechnologies.orient.test.domain.business.Child;
import com.orientechnologies.orient.test.domain.business.City;
import com.orientechnologies.orient.test.domain.business.Country;
import com.orientechnologies.orient.test.domain.customserialization.Sec;
import com.orientechnologies.orient.test.domain.customserialization.SecurityRole;
import com.orientechnologies.orient.test.domain.whiz.Profile;
@Test(groups = { "record-object" })
public class ObjectTreeTest {
private OObjectDatabaseTx database;
protected long startRecordNumber;
private long beginCities;
private String url;
protected int serialized;
protected int unserialized;
public class CustomClass {
private String name;
private Long age;
private CustomType custom;
private List<CustomType> customTypeList;
private Set<CustomType> customTypeSet;
private Map<Long, CustomType> customTypeMap;
public CustomClass() {
}
public CustomClass(String iName, Long iAge, CustomType iCustom, List<CustomType> iCustomTypeList,
Set<CustomType> iCustomTypeSet, Map<Long, CustomType> iCustomTypeMap) {
name = iName;
age = iAge;
custom = iCustom;
customTypeList = iCustomTypeList;
customTypeSet = iCustomTypeSet;
customTypeMap = iCustomTypeMap;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public Long getAge() {
return age;
}
public void setAge(Long age) {
this.age = age;
}
public CustomType getCustom() {
return custom;
}
public void setCustom(CustomType custom) {
this.custom = custom;
}
public List<CustomType> getCustomTypeList() {
return customTypeList;
}
public void setCustomTypeList(List<CustomType> customTypeList) {
this.customTypeList = customTypeList;
}
public Set<CustomType> getCustomTypeSet() {
return customTypeSet;
}
public void setCustomTypeSet(Set<CustomType> customTypeSet) {
this.customTypeSet = customTypeSet;
}
public Map<Long, CustomType> getCustomTypeMap() {
return customTypeMap;
}
public void setCustomTypeMap(Map<Long, CustomType> customTypeMap) {
this.customTypeMap = customTypeMap;
}
}
public class CustomType {
public long value;
public CustomType() {
}
public CustomType(Long iFieldValue) {
value = iFieldValue;
}
public long getValue() {
return value;
}
public void setValue(long value) {
this.value = value;
}
}
public ObjectTreeTest() {
}
@Parameters(value = "url")
public ObjectTreeTest(@Optional(value = "memory:test") String iURL) {
url = iURL;
}
@AfterClass
public void close() {
database.close();
}
@BeforeClass
public void open() {
database = new OObjectDatabaseTx(url);
database.getEntityManager().registerEntityClasses("com.orientechnologies.orient.test.domain.business");
database.getEntityManager().registerEntityClasses("com.orientechnologies.orient.test.domain.whiz");
database.getEntityManager().registerEntityClasses("com.orientechnologies.orient.test.domain.base");
if ("memory:test".equals(database.getURL())) {
database.create();
} else {
database.open("admin", "admin");
}
}
@Test
public void testPool() throws IOException {
final OObjectDatabaseTx[] dbs = new OObjectDatabaseTx[OObjectDatabasePool.global().getMaxSize()];
for (int i = 0; i < 10; ++i) {
for (int db = 0; db < dbs.length; ++db)
dbs[db] = OObjectDatabasePool.global().acquire(url, "admin", "admin");
for (int db = 0; db < dbs.length; ++db)
dbs[db].close();
}
}
@Test
public void testPersonSaving() {
final long beginProfiles = database.countClusterElements("Profile");
beginCities = database.countClusterElements("City");
Country italy = database.newInstance(Country.class, "Italy");
Profile garibaldi = database.newInstance(Profile.class, "GGaribaldi", "Giuseppe", "Garibaldi", null);
garibaldi.setLocation(database.newInstance(Address.class, "Residence", database.newInstance(City.class, italy, "Rome"),
"Piazza Navona, 1"));
Profile bonaparte = database.newInstance(Profile.class, "NBonaparte", "Napoleone", "Bonaparte", garibaldi);
bonaparte.setLocation(database.newInstance(Address.class, "Residence", garibaldi.getLocation().getCity(),
"Piazza di Spagna, 111"));
database.save(bonaparte);
Assert.assertEquals(database.countClusterElements("Profile"), beginProfiles + 2);
}
@Test(dependsOnMethods = "testPersonSaving")
public void testCitySaving() {
Assert.assertEquals(database.countClusterElements("City"), beginCities + 1);
}
@Test(dependsOnMethods = "testCitySaving")
public void testCityEquality() {
List<Profile> resultset = database.query(new OSQLSynchQuery<Object>("select from profile where location.city.name = 'Rome'"));
Assert.assertEquals(resultset.size(), 2);
Profile p1 = resultset.get(0);
Profile p2 = resultset.get(1);
Assert.assertNotSame(p1, p2);
Assert.assertSame(OObjectEntitySerializer.getDocument((Proxy) p1.getLocation().getCity()),
OObjectEntitySerializer.getDocument((Proxy) p2.getLocation().getCity()));
}
@Test(dependsOnMethods = "testCityEquality")
public void testSaveCircularLink() {
Profile winston = database.newInstance(Profile.class, "WChurcill", "Winston", "Churcill", null);
winston.setLocation(database.newInstance(Address.class, "Residence",
database.newInstance(City.class, database.newInstance(Country.class, "England"), "London"), "unknown"));
Profile nicholas = database.newInstance(Profile.class, "NChurcill", "Nicholas ", "Churcill", winston);
nicholas.setLocation(winston.getLocation());
nicholas.setInvitedBy(winston);
winston.setInvitedBy(nicholas);
database.save(nicholas);
}
@Test(dependsOnMethods = "testSaveCircularLink")
public void testQueryCircular() {
List<Profile> result = database.query(new OSQLSynchQuery<ODocument>("select * from Profile"));
Profile parent;
for (Profile r : result) {
System.out.println(r.getNick());
parent = r.getInvitedBy();
if (parent != null)
System.out.println("- parent: " + parent.getName() + " " + parent.getSurname());
}
}
@Test(dependsOnMethods = "testQueryCircular")
public void testSaveMultiCircular() {
startRecordNumber = database.countClusterElements("Profile");
Profile bObama = database.newInstance(Profile.class, "ThePresident", "Barack", "Obama", null);
bObama.setLocation(database.newInstance(Address.class, "Residence",
database.newInstance(City.class, database.newInstance(Country.class, "Hawaii"), "Honolulu"), "unknown"));
bObama.addFollower(database.newInstance(Profile.class, "PresidentSon1", "Malia Ann", "Obama", bObama));
bObama.addFollower(database.newInstance(Profile.class, "PresidentSon2", "Natasha", "Obama", bObama));
database.save(bObama);
}
@SuppressWarnings("unchecked")
@Test(dependsOnMethods = "testSaveMultiCircular")
public void testQueryMultiCircular() {
Assert.assertEquals(database.countClusterElements("Profile"), startRecordNumber + 3);
List<ODocument> result = database.getUnderlying()
.command(new OSQLSynchQuery<ODocument>("select * from Profile where name = 'Barack' and surname = 'Obama'")).execute();
Assert.assertEquals(result.size(), 1);
for (ODocument profile : result) {
System.out.println(profile.field("name") + " " + profile.field("surname"));
final Collection<ODocument> followers = profile.field("followers");
if (followers != null) {
for (ODocument follower : followers) {
Assert.assertTrue(((Collection<ODocument>) follower.field("followings")).contains(profile));
System.out.println("- follower: " + follower.field("name") + " " + follower.field("surname") + " (parent: "
+ follower.field("name") + " " + follower.field("surname") + ")");
}
}
}
}
@Test(dependsOnMethods = "testQueryMultiCircular")
public void testSetFieldSize() {
JavaComplexTestClass test = database.newInstance(JavaComplexTestClass.class);
for (int i = 0; i < 100; i++) {
Child child = database.newInstance(Child.class);
child.setName(String.valueOf(i));
test.getSet().add(child);
}
Assert.assertNotNull(test.getSet());
Assert.assertEquals(test.getSet().size(), 100);
database.save(test);
//Assert.assertEquals(test.getSet().size(), 100);
ORID rid = database.getIdentity(test);
close();
open();
test = database.load(rid);
Assert.assertNotNull(test.getSet());
Iterator<Child> it = test.getSet().iterator();
while (it.hasNext()) {
Child child = it.next();
Assert.assertNotNull(child.getName());
Assert.assertTrue(Integer.valueOf(child.getName()) < 100);
Assert.assertTrue(Integer.valueOf(child.getName()) >= 0);
}
Assert.assertEquals(test.getSet().size(), 100);
database.delete(test);
}
@Test(dependsOnMethods = "testQueryMultiCircular")
public void testCascadeDeleteSimpleObject() {
JavaCascadeDeleteTestClass test = database.newInstance(JavaCascadeDeleteTestClass.class);
JavaSimpleTestClass simple = database.newInstance(JavaSimpleTestClass.class);
simple.setText("asdasd");
test.setSimpleClass(simple);
database.save(test);
ORID testRid = database.getRecordByUserObject(test, false).getIdentity();
ORID simpleRid = database.getRecordByUserObject(simple, false).getIdentity();
close();
open();
database.delete(testRid);
simple = database.load(simpleRid);
Assert.assertNull(simple);
// TEST SET NULL
close();
open();
test = database.newInstance(JavaCascadeDeleteTestClass.class);
simple = database.newInstance(JavaSimpleTestClass.class);
simple.setText("asdasd");
test.setSimpleClass(simple);
database.save(test);
testRid = database.getRecordByUserObject(test, false).getIdentity();
simpleRid = database.getRecordByUserObject(simple, false).getIdentity();
close();
open();
test.setSimpleClass(null);
database.save(test);
simple = database.load(simpleRid);
Assert.assertNull(simple);
database.delete(test);
// TEST CHANGE NEW RECORD
test = database.newInstance(JavaCascadeDeleteTestClass.class);
simple = database.newInstance(JavaSimpleTestClass.class);
simple.setText("asdasd");
test.setSimpleClass(simple);
database.save(test);
testRid = database.getRecordByUserObject(test, false).getIdentity();
simpleRid = database.getRecordByUserObject(simple, false).getIdentity();
close();
open();
simple = database.newInstance(JavaSimpleTestClass.class);
database.save(simple);
test.setSimpleClass(simple);
database.save(test);
simple = database.load(simpleRid);
Assert.assertNull(simple);
database.delete(test);
}
@Test(dependsOnMethods = "testCascadeDeleteSimpleObject")
public void testCascadeDeleteCollections() {
JavaCascadeDeleteTestClass test = database.newInstance(JavaCascadeDeleteTestClass.class);
Child listChild1 = database.newInstance(Child.class);
listChild1.setName("list1");
test.getList().add(listChild1);
Child listChild2 = database.newInstance(Child.class);
listChild2.setName("list2");
test.getList().add(listChild2);
Child listChild3 = database.newInstance(Child.class);
listChild3.setName("list3");
test.getList().add(listChild3);
Child setChild1 = database.newInstance(Child.class);
setChild1.setName("set1");
test.getSet().add(setChild1);
Child setChild2 = database.newInstance(Child.class);
setChild2.setName("set2");
test.getSet().add(setChild2);
Child setChild3 = database.newInstance(Child.class);
setChild3.setName("set3");
test.getSet().add(setChild3);
database.save(test);
ORID testRid = database.getRecordByUserObject(test, false).getIdentity();
ORID list1Rid = database.getRecordByUserObject(listChild1, false).getIdentity();
ORID list2Rid = database.getRecordByUserObject(listChild2, false).getIdentity();
ORID list3Rid = database.getRecordByUserObject(listChild3, false).getIdentity();
ORID set1Rid = database.getRecordByUserObject(setChild1, false).getIdentity();
ORID set2Rid = database.getRecordByUserObject(setChild2, false).getIdentity();
ORID set3Rid = database.getRecordByUserObject(setChild3, false).getIdentity();
close();
open();
database.delete(testRid);
listChild1 = database.load(list1Rid);
listChild2 = database.load(list2Rid);
listChild3 = database.load(list3Rid);
setChild1 = database.load(set1Rid);
setChild2 = database.load(set2Rid);
setChild3 = database.load(set3Rid);
Assert.assertNull(listChild1);
Assert.assertNull(listChild2);
Assert.assertNull(listChild3);
Assert.assertNull(setChild1);
Assert.assertNull(setChild2);
Assert.assertNull(setChild3);
// LIST UPDATE TEST
test = database.newInstance(JavaCascadeDeleteTestClass.class);
listChild1 = database.newInstance(Child.class);
listChild1.setName("list1");
test.getList().add(listChild1);
listChild2 = database.newInstance(Child.class);
listChild2.setName("list2");
test.getList().add(listChild2);
listChild3 = database.newInstance(Child.class);
listChild3.setName("list3");
test.getList().add(listChild3);
Child listChild4 = database.newInstance(Child.class);
listChild4.setName("list4");
test.getList().add(listChild4);
setChild1 = database.newInstance(Child.class);
setChild1.setName("set1");
test.getSet().add(setChild1);
setChild2 = database.newInstance(Child.class);
setChild2.setName("set2");
test.getSet().add(setChild2);
setChild3 = database.newInstance(Child.class);
setChild3.setName("set3");
test.getSet().add(setChild3);
Child setChild4 = database.newInstance(Child.class);
setChild4.setName("set4");
test.getSet().add(setChild4);
database.save(test);
testRid = database.getRecordByUserObject(test, false).getIdentity();
list1Rid = database.getRecordByUserObject(listChild1, false).getIdentity();
list2Rid = database.getRecordByUserObject(listChild2, false).getIdentity();
list3Rid = database.getRecordByUserObject(listChild3, false).getIdentity();
ORID list4Rid = database.getRecordByUserObject(listChild4, false).getIdentity();
set1Rid = database.getRecordByUserObject(setChild1, false).getIdentity();
set2Rid = database.getRecordByUserObject(setChild2, false).getIdentity();
set3Rid = database.getRecordByUserObject(setChild3, false).getIdentity();
ORID set4Rid = database.getRecordByUserObject(setChild4, false).getIdentity();
close();
open();
test = database.load(testRid);
test.getList().remove(listChild4);
test.getList().remove(0);
test.getList().remove(listChild3);
test.getList().add(listChild4);
Iterator<Child> it = test.getList().iterator();
it.next();
it.remove();
test.getSet().remove(setChild1);
test.getSet().remove(setChild4);
Assert.assertFalse(test.getSet().contains(setChild1));
Assert.assertFalse(test.getSet().contains(setChild4));
it = test.getSet().iterator();
it.next();
it.remove();
Assert.assertTrue((!test.getSet().contains(setChild2) || !test.getSet().contains(setChild3)));
test.getSet().add(setChild4);
database.save(test);
listChild1 = database.load(list1Rid);
listChild2 = database.load(list2Rid);
listChild3 = database.load(list3Rid);
listChild4 = database.load(list4Rid);
setChild1 = database.load(set1Rid);
setChild2 = database.load(set2Rid);
setChild3 = database.load(set3Rid);
setChild4 = database.load(set4Rid);
Assert.assertNull(listChild1);
Assert.assertNull(listChild2);
Assert.assertNull(listChild3);
Assert.assertNotNull(listChild4);
Assert.assertNull(setChild1);
Assert.assertTrue((setChild3 != null && setChild2 == null) || (setChild3 == null && setChild2 != null));
Assert.assertNotNull(setChild4);
database.delete(test);
}
@Test(dependsOnMethods = "testCascadeDeleteCollections")
public void testCascadeDeleteMap() {
JavaCascadeDeleteTestClass test = database.newInstance(JavaCascadeDeleteTestClass.class);
Child mapChild1 = database.newInstance(Child.class);
mapChild1.setName("map1");
test.getChildren().put("1", mapChild1);
Child mapChild2 = database.newInstance(Child.class);
mapChild2.setName("map2");
test.getChildren().put("2", mapChild2);
Child mapChild3 = database.newInstance(Child.class);
mapChild3.setName("map3");
test.getChildren().put("3", mapChild3);
database.save(test);
ORID testRid = database.getRecordByUserObject(test, false).getIdentity();
ORID map1Rid = database.getRecordByUserObject(mapChild1, false).getIdentity();
ORID map2Rid = database.getRecordByUserObject(mapChild2, false).getIdentity();
ORID map3Rid = database.getRecordByUserObject(mapChild3, false).getIdentity();
close();
open();
database.delete(testRid);
mapChild1 = database.load(map1Rid);
mapChild2 = database.load(map2Rid);
mapChild3 = database.load(map3Rid);
Assert.assertNull(mapChild1);
Assert.assertNull(mapChild2);
Assert.assertNull(mapChild3);
close();
open();
// MAP UPDATE TEST
test = database.newInstance(JavaCascadeDeleteTestClass.class);
mapChild1 = database.newInstance(Child.class);
mapChild1.setName("map1");
test.getChildren().put("1", mapChild1);
mapChild2 = database.newInstance(Child.class);
mapChild2.setName("map2");
test.getChildren().put("2", mapChild2);
mapChild3 = database.newInstance(Child.class);
mapChild3.setName("map3");
test.getChildren().put("3", mapChild3);
Child mapChild4 = database.newInstance(Child.class);
mapChild4.setName("map4");
test.getChildren().put("4", mapChild4);
Child mapChild5 = database.newInstance(Child.class);
mapChild5.setName("map5");
test.getChildren().put("5", mapChild5);
database.save(test);
testRid = database.getIdentity(test);
map1Rid = database.getRecordByUserObject(mapChild1, false).getIdentity();
map2Rid = database.getRecordByUserObject(mapChild2, false).getIdentity();
map3Rid = database.getRecordByUserObject(mapChild3, false).getIdentity();
ORID map4Rid = database.getRecordByUserObject(mapChild4, false).getIdentity();
ORID map5Rid = database.getRecordByUserObject(mapChild5, false).getIdentity();
close();
open();
test = database.load(testRid);
Assert.assertNotNull(test.getChildren().get("1"));
Assert.assertNotNull(test.getChildren().get("2"));
Assert.assertNotNull(test.getChildren().get("3"));
Assert.assertNotNull(test.getChildren().get("4"));
Assert.assertNotNull(test.getChildren().get("5"));
test.getChildren().remove("5");
test.getChildren().put("1", mapChild1);
test.getChildren().put("2", mapChild1);
test.getChildren().put("3", null);
test.getChildren().remove("4");
test.getChildren().put("3", mapChild5);
database.save(test);
mapChild1 = database.load(map1Rid);
mapChild2 = database.load(map2Rid);
mapChild3 = database.load(map3Rid);
mapChild4 = database.load(map4Rid);
mapChild5 = database.load(map5Rid);
Assert.assertNotNull(mapChild1);
Assert.assertNull(mapChild2);
Assert.assertNull(mapChild3);
Assert.assertNull(mapChild4);
Assert.assertNotNull(mapChild5);
database.delete(test);
}
@Test(dependsOnMethods = "testPool")
public void testCustomTypes() {
OObjectSerializerContext serializerContext = new OObjectSerializerContext();
serializerContext.bind(new OObjectSerializer<CustomType, Long>() {
public Long serializeFieldValue(Class<?> itype, CustomType iFieldValue) {
serialized++;
return iFieldValue.value;
}
public CustomType unserializeFieldValue(Class<?> itype, Long iFieldValue) {
unserialized++;
return new CustomType(iFieldValue);
}
});
OObjectSerializerHelper.bindSerializerContext(null, serializerContext);
database.getEntityManager().registerEntityClass(CustomClass.class);
if (!database.getMetadata().getSchema().existsClass("CustomClass"))
database.getMetadata().getSchema().createClass("CustomClass");
List<CustomType> customTypesList = new ArrayList<CustomType>();
customTypesList.add(new CustomType(102L));
Set<CustomType> customTypeSet = new HashSet<CustomType>();
customTypeSet.add(new CustomType(103L));
Map<Long, CustomType> customTypeMap = new HashMap<Long, CustomType>();
customTypeMap.put(1L, new CustomType(104L));
CustomClass pojo = new CustomClass("test", 33L, new CustomType(101L), customTypesList, customTypeSet, customTypeMap);
// init counters
serialized = 0;
unserialized = 0;
pojo = database.save(pojo);
Assert.assertEquals(serialized, 4);
Assert.assertEquals(unserialized, 0);
pojo = database.reload(pojo);
Assert.assertEquals(unserialized, 0);
pojo.getCustom();
Assert.assertEquals(unserialized, 1);
Assert.assertTrue(pojo.getCustom() instanceof CustomType);
pojo.getCustomTypeList().iterator().next();
Assert.assertEquals(unserialized, 2);
Assert.assertTrue(pojo.getCustomTypeList().iterator().next() instanceof CustomType);
unserialized--;
pojo.getCustomTypeSet().iterator().next();
Assert.assertEquals(unserialized, 3);
Assert.assertTrue(pojo.getCustomTypeSet().iterator().next() instanceof CustomType);
unserialized--;
pojo.getCustomTypeMap().get(1L);
Assert.assertEquals(serialized, 4);
Assert.assertEquals(unserialized, 4);
Assert.assertTrue(pojo.getCustomTypeMap().get(1L) instanceof CustomType);
}
@Test(dependsOnMethods = "testCustomTypes")
public void testCustomTypesDatabaseNewInstance() {
OObjectDatabaseTx database = OObjectDatabasePool.global().acquire(url, "admin", "admin");
ORID rid = null;
try {
// init counters
serialized = 0;
unserialized = 0;
List<CustomType> customTypesList = new ArrayList<CustomType>();
customTypesList.add(new CustomType(102L));
Set<CustomType> customTypeSet = new HashSet<CustomType>();
customTypeSet.add(new CustomType(103L));
Map<Long, CustomType> customTypeMap = new HashMap<Long, CustomType>();
customTypeMap.put(1L, new CustomType(104L));
CustomClass pojo = database.newInstance(CustomClass.class, "test", 33L, new CustomType(101L), customTypesList, customTypeSet,
customTypeMap);
Assert.assertEquals(serialized, 4);
Assert.assertEquals(unserialized, 0);
pojo = database.save(pojo);
rid = database.getIdentity(pojo);
database.close();
database = OObjectDatabasePool.global().acquire(url, "admin", "admin");
pojo = database.load(rid);
Assert.assertEquals(unserialized, 0);
pojo.getCustom();
Assert.assertEquals(unserialized, 1);
Assert.assertTrue(pojo.getCustom() instanceof CustomType);
pojo.getCustomTypeList().iterator().next();
Assert.assertEquals(unserialized, 2);
Assert.assertTrue(pojo.getCustomTypeList().iterator().next() instanceof CustomType);
unserialized--;
pojo.getCustomTypeSet().iterator().next();
Assert.assertEquals(unserialized, 3);
Assert.assertTrue(pojo.getCustomTypeSet().iterator().next() instanceof CustomType);
unserialized--;
pojo.getCustomTypeMap().get(1L);
Assert.assertEquals(serialized, 4);
Assert.assertEquals(unserialized, 4);
Assert.assertTrue(pojo.getCustomTypeMap().get(1L) instanceof CustomType);
} finally {
database.close();
}
}
@Test(dependsOnMethods = "testCustomTypesDatabaseNewInstance")
public void testEnumListWithCustomTypes() {
OObjectDatabaseTx database = OObjectDatabasePool.global().acquire(url, "admin", "admin");
ORID rid = null;
try {
OObjectSerializerContext serializerContext = new OObjectSerializerContext();
serializerContext.bind(new OObjectSerializer<SecurityRole, String>() {
public Object serializeFieldValue(Class<?> type, SecurityRole role) {
return role.name();
}
public Object unserializeFieldValue(Class<?> type, String str) {
return SecurityRole.getByName(str);
}
});
OObjectSerializerHelper.bindSerializerContext(null, serializerContext);
database.getEntityManager().registerEntityClasses("com.orientechnologies.orient.test.domain.customserialization");
Sec s = new Sec();
s.getSecurityRoleList().add(SecurityRole.LOGIN);
Assert.assertTrue(s.getSecurityRoleList().contains(SecurityRole.LOGIN));
s = database.save(s);
rid = database.getRecordByUserObject(s, false).getIdentity();
database.close();
database = OObjectDatabasePool.global().acquire(url, "admin", "admin");
s = database.load(rid);
Assert.assertTrue(s.getSecurityRoleList().contains(SecurityRole.LOGIN));
} finally {
database.close();
}
}
@Test(dependsOnMethods = "testEnumListWithCustomTypes")
public void childUpdateTest() {
OObjectDatabaseTx database = OObjectDatabasePool.global().acquire(url, "admin", "admin");
Planet p = database.newInstance(Planet.class);
Satellite sat = database.newInstance(Satellite.class);
p.setName("Earth");
p.setDistanceSun(1000);
sat.setDiameter(50);
p.addSatellite(sat);
database.save(p);
ORID rid = database.getIdentity(p);
database.close();
database = OObjectDatabasePool.global().acquire(url, "admin", "admin");
p = database.load(rid);
sat = p.getSatellites().get(0);
Assert.assertEquals(sat.getDiameter(), 50);
Assert.assertEquals(p.getDistanceSun(), 1000);
Assert.assertEquals(p.getName(), "Earth");
sat.setDiameter(500);
// p.addSatellite(new Satellite("Moon", 70));
// db.save(sat);
database.save(p);
database.close();
database = OObjectDatabasePool.global().acquire(url, "admin", "admin");
p = database.load(rid);
sat = p.getSatellites().get(0);
Assert.assertEquals(sat.getDiameter(), 500);
Assert.assertEquals(p.getDistanceSun(), 1000);
Assert.assertEquals(p.getName(), "Earth");
database.close();
}
@Test(dependsOnMethods = "childUpdateTest")
public void childNLevelUpdateTest() {
OObjectDatabaseTx database = OObjectDatabasePool.global().acquire(url, "admin", "admin");
Planet p = database.newInstance(Planet.class);
Planet near = database.newInstance(Planet.class);
Satellite sat = database.newInstance(Satellite.class);
Satellite satNear = database.newInstance(Satellite.class);
sat.setDiameter(50);
sat.setNear(near);
satNear.setDiameter(10);
near.addSatellite(satNear);
p.addSatellite(sat);
database.save(p);
ORID rid = database.getIdentity(p);
database.close();
database = OObjectDatabasePool.global().acquire(url, "admin", "admin");
p = database.load(rid);
sat = p.getSatellites().get(0);
near = sat.getNear();
satNear = near.getSatellites().get(0);
Assert.assertEquals(satNear.getDiameter(), 10);
satNear.setDiameter(100);
// p.addSatellite(new Satellite("Moon", 70));
// db.save(sat);
database.save(p);
database.close();
database = OObjectDatabasePool.global().acquire(url, "admin", "admin");
p = database.load(rid);
sat = p.getSatellites().get(0);
near = sat.getNear();
satNear = near.getSatellites().get(0);
Assert.assertEquals(satNear.getDiameter(), 100);
database.close();
}
@Test(dependsOnMethods = "childNLevelUpdateTest")
public void childMapUpdateTest() {
OObjectDatabaseTx database = OObjectDatabasePool.global().acquire(url, "admin", "admin");
Planet p = database.newInstance(Planet.class);
p.setName("Earth");
p.setDistanceSun(1000);
Satellite sat = database.newInstance(Satellite.class);
sat.setDiameter(50);
sat.setName("Moon");
p.addSatelliteMap(sat);
database.save(p);
Assert.assertEquals(p.getDistanceSun(), 1000);
Assert.assertEquals(p.getName(), "Earth");
ORID rid = database.getIdentity(p);
database.close();
database = OObjectDatabasePool.global().acquire(url, "admin", "admin");
p = database.load(rid);
sat = p.getSatellitesMap().get("Moon");
Assert.assertEquals(p.getDistanceSun(), 1000);
Assert.assertEquals(p.getName(), "Earth");
Assert.assertEquals(sat.getDiameter(), 50);
sat.setDiameter(500);
database.save(p);
database.close();
database = OObjectDatabasePool.global().acquire(url, "admin", "admin");
p = database.load(rid);
sat = p.getSatellitesMap().get("Moon");
Assert.assertEquals(sat.getDiameter(), 500);
Assert.assertEquals(p.getDistanceSun(), 1000);
Assert.assertEquals(p.getName(), "Earth");
database.close();
}
@Test(dependsOnMethods = "childMapUpdateTest")
public void childMapNLevelUpdateTest() {
OObjectDatabaseTx database = OObjectDatabasePool.global().acquire(url, "admin", "admin");
Planet jupiter = database.newInstance(Planet.class);
jupiter.setName("Jupiter");
jupiter.setDistanceSun(3000);
Planet mercury = database.newInstance(Planet.class);
mercury.setName("Mercury");
mercury.setDistanceSun(5000);
Satellite jupiterMoon = database.newInstance(Satellite.class);
Satellite mercuryMoon = database.newInstance(Satellite.class);
jupiterMoon.setDiameter(50);
jupiterMoon.setNear(mercury);
jupiterMoon.setName("JupiterMoon");
mercuryMoon.setDiameter(10);
mercuryMoon.setName("MercuryMoon");
mercury.addSatelliteMap(mercuryMoon);
jupiter.addSatelliteMap(jupiterMoon);
database.save(jupiter);
ORID rid = database.getIdentity(jupiter);
database.close();
database = OObjectDatabasePool.global().acquire(url, "admin", "admin");
jupiter = database.load(rid);
jupiterMoon = jupiter.getSatellitesMap().get("JupiterMoon");
mercury = jupiterMoon.getNear();
mercuryMoon = mercury.getSatellitesMap().get("MercuryMoon");
Assert.assertEquals(mercuryMoon.getDiameter(), 10);
Assert.assertEquals(mercuryMoon.getName(), "MercuryMoon");
Assert.assertEquals(jupiterMoon.getDiameter(), 50);
Assert.assertEquals(jupiterMoon.getName(), "JupiterMoon");
Assert.assertEquals(jupiter.getName(), "Jupiter");
Assert.assertEquals(jupiter.getDistanceSun(), 3000);
Assert.assertEquals(mercury.getName(), "Mercury");
Assert.assertEquals(mercury.getDistanceSun(), 5000);
mercuryMoon.setDiameter(100);
// p.addSatellite(new Satellite("Moon", 70));
// db.save(sat);
database.save(jupiter);
database.close();
database = OObjectDatabasePool.global().acquire(url, "admin", "admin");
jupiter = database.load(rid);
jupiterMoon = jupiter.getSatellitesMap().get("JupiterMoon");
mercury = jupiterMoon.getNear();
mercuryMoon = mercury.getSatellitesMap().get("MercuryMoon");
Assert.assertEquals(mercuryMoon.getDiameter(), 100);
Assert.assertEquals(mercuryMoon.getName(), "MercuryMoon");
Assert.assertEquals(jupiterMoon.getDiameter(), 50);
Assert.assertEquals(jupiterMoon.getName(), "JupiterMoon");
Assert.assertEquals(jupiter.getName(), "Jupiter");
Assert.assertEquals(jupiter.getDistanceSun(), 3000);
Assert.assertEquals(mercury.getName(), "Mercury");
Assert.assertEquals(mercury.getDistanceSun(), 5000);
database.close();
}
}
| |
/**
* Copyright 2007-2016, Kaazing Corporation. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.kaazing.gateway.resource.address.uri;
import java.net.URI;
import java.net.URISyntaxException;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.kaazing.gateway.resource.address.URLUtils;
/**
* Utils class over URI methods
*
*/
public final class URIUtils {
public static final String NETWORK_INTERFACE_AUTHORITY_PORT = "^(\\[@[a-zA-Z0-9 :]*\\]|@[a-zA-Z0-9:]*):([0-9]*)$";
public static final String NETWORK_INTERFACE_AUTHORITY = "(\\[{0,1}@[a-zA-Z0-9 :]*\\]{0,1})";
private static final String MOCK_HOST = "127.0.0.1";
/**
* Helper method for toString conversion
* @param uri
* @return
*/
public static String uriToString(URI uri) {
return uri.toString();
}
/**
* Helper method for toString conversion
* @param uri
* @return
*/
public static String uriToString(NetworkInterfaceURI uri) {
return uri.toString();
}
/**
* Helper method for retrieving host
* @param uriString
* @return
*/
public static String getHost(String uriString) {
try {
URI uri = new URI(uriString);
if (uri.getAuthority().startsWith("@") && !uri.getHost().startsWith("@")) {
return "@" + uri.getHost();
}
return uri.getHost();
}
catch (URISyntaxException e) {
try {
return (new NetworkInterfaceURI(uriString)).getHost();
}
catch (IllegalArgumentException ne) {
throw new IllegalArgumentException(ne.getMessage(), ne);
}
}
}
/**
* Helper method for retrieving scheme
* @param uriString
* @return
*/
public static String getScheme(String uriString) {
try {
return (new URI(uriString)).getScheme();
}
catch (URISyntaxException e) {
try {
return (new NetworkInterfaceURI(uriString)).getScheme();
}
catch (IllegalArgumentException ne) {
throw new IllegalArgumentException(ne.getMessage(), ne);
}
}
}
/**
* Helper method for retrieving authority
* @param uriString
* @return
*/
public static String getAuthority(String uriString) {
try {
return (new URI(uriString)).getAuthority();
}
catch (URISyntaxException e) {
try {
return (new NetworkInterfaceURI(uriString)).getAuthority();
}
catch (IllegalArgumentException ne) {
throw new IllegalArgumentException(ne.getMessage(), ne);
}
}
}
/**
* Helper method for retrieving fragment
* @param uriString
* @return
*/
public static String getFragment(String uriString) {
try {
return (new URI(uriString)).getFragment();
}
catch (URISyntaxException e) {
try {
return (new NetworkInterfaceURI(uriString)).getFragment();
}
catch (IllegalArgumentException ne) {
throw new IllegalArgumentException(ne.getMessage(), ne);
}
}
}
/**
* Helper method for retrieving path
* @param uriString
* @return
*/
public static String getPath(String uriString) {
try {
return (new URI(uriString)).getPath();
}
catch (URISyntaxException e) {
try {
return (new NetworkInterfaceURI(uriString)).getPath();
}
catch (IllegalArgumentException ne) {
throw new IllegalArgumentException(ne.getMessage(), ne);
}
}
}
/**
* Helper method for retrieving query
* @param uriString
* @return
*/
public static String getQuery(String uriString) {
try {
return (new URI(uriString)).getQuery();
}
catch (URISyntaxException e) {
try {
return (new NetworkInterfaceURI(uriString)).getQuery();
}
catch (IllegalArgumentException ne) {
throw new IllegalArgumentException(ne.getMessage(), ne);
}
}
}
/**
* Helper method for retrieving port
* @param uriString
* @return
*/
public static int getPort(String uriString) {
try {
return (new URI(uriString)).getPort();
}
catch (URISyntaxException e) {
try {
return (new NetworkInterfaceURI(uriString)).getPort();
}
catch (IllegalArgumentException ne) {
throw new IllegalArgumentException(ne.getMessage(), ne);
}
}
}
/**
* Helper method for retrieving port
* @param uriString
* @return
*/
public static String getUserInfo(String uriString) {
try {
return (new URI(uriString)).getUserInfo();
}
catch (URISyntaxException e) {
try {
return (new NetworkInterfaceURI(uriString)).getUserInfo();
}
catch (IllegalArgumentException ne) {
throw new IllegalArgumentException(ne.getMessage(), ne);
}
}
}
/**
* Helper method for building URI as String
* @param scheme
* @param authority
* @param path
* @param query
* @param fragment
* @return
* @throws URISyntaxException
*/
public static String buildURIAsString(String scheme, String authority, String path,
String query, String fragment) throws URISyntaxException {
URI helperURI;
try {
helperURI = new URI(scheme, authority, path, query, fragment);
} catch (URISyntaxException e) {
return NetworkInterfaceURI.buildURIToString(scheme, authority, path, query, fragment);
}
return helperURI.toString();
}
/**
* Helper method for building URI as String
* @param scheme
* @param userInfo
* @param host
* @param port
* @param path
* @param query
* @param fragment
* @return
* @throws URISyntaxException
*/
public static String buildURIAsString(String scheme, String userInfo,
String host, int port, String path, String query, String fragment) throws URISyntaxException {
URI helperURI;
try {
helperURI = new URI(scheme, userInfo, host, port, path, query, fragment);
} catch (URISyntaxException e) {
return NetworkInterfaceURI.buildURIToString(scheme, userInfo, host, port, path, query, fragment);
}
return helperURI.toString();
}
/**
* Helper method for performing resolve as String
* @param uriInitial
* @param uriString
* @return
*/
public static String resolve(String uriInitial, String uriString) {
try {
return uriToString((new URI(uriInitial)).resolve(uriString));
}
catch (URISyntaxException e) {
try {
return (new NetworkInterfaceURI(uriInitial)).resolve(uriString);
}
catch (IllegalArgumentException ne) {
throw new IllegalArgumentException(ne.getMessage(), ne);
}
}
}
/**
* Helper method for modifying URI scheme
* @param uri
* @param newScheme
* @return
*/
public static String modifyURIScheme(String uri, String newScheme) {
try {
URI uriObj = new URI(uri);
return uriToString(URLUtils.modifyURIScheme(uriObj, newScheme));
}
catch (URISyntaxException e) {
try {
return (new NetworkInterfaceURI(uri)).modifyURIScheme(newScheme);
}
catch (IllegalArgumentException ne) {
throw new IllegalArgumentException(ne.getMessage(), ne);
}
}
}
/**
* Helper method for modifying URI authority
* @param uri
* @param newAuthority
* @return
*/
public static String modifyURIAuthority(String uri, String newAuthority) {
try {
URI uriObj = new URI(uri);
// code below modifies new authority considering also network interface syntax
Pattern pattern = Pattern.compile(NETWORK_INTERFACE_AUTHORITY);
Matcher matcher = pattern.matcher(newAuthority);
String matchedToken = MOCK_HOST;
// if newAuthority corresponds to NetworkInterfaceURI syntax
if (matcher.find()) {
matchedToken = matcher.group(0);
newAuthority = newAuthority.replace(matchedToken, MOCK_HOST);
}
URI modifiedURIAuthority = URLUtils.modifyURIAuthority(uriObj, newAuthority);
String uriWithModifiedAuthority = URIUtils.uriToString(modifiedURIAuthority).replace(MOCK_HOST, matchedToken);
return uriWithModifiedAuthority;
}
catch (URISyntaxException e) {
try {
return (new NetworkInterfaceURI(uri)).modifyURIAuthority(newAuthority);
}
catch (IllegalArgumentException ne) {
throw new IllegalArgumentException(ne.getMessage(), ne);
}
}
}
/**
* Helper method for modifying URI port
* @param uri
* @param newPort
* @return
*/
public static String modifyURIPort(String uri, int newPort) {
try {
URI uriObj = new URI(uri);
return uriToString(URLUtils.modifyURIPort(uriObj, newPort));
}
catch (URISyntaxException e) {
try {
return (new NetworkInterfaceURI(uri)).modifyURIPort(newPort);
}
catch (IllegalArgumentException ne) {
throw new IllegalArgumentException(ne.getMessage(), ne);
}
}
}
/**
* Helper method for modiffying the URI path
* @param uri
* @param newPath
* @return
*/
public static String modifyURIPath(String uri, String newPath) {
try {
URI uriObj = new URI(uri);
return uriToString(URLUtils.modifyURIPath(uriObj, newPath));
}
catch (URISyntaxException e) {
try {
return (new NetworkInterfaceURI(uri)).modifyURIPath(newPath);
}
catch (IllegalArgumentException ne) {
throw new IllegalArgumentException(ne.getMessage(), ne);
}
}
}
public static boolean isAbsolute(String uri) {
try {
return (new URI(uri)).isAbsolute();
}
catch (URISyntaxException e) {
try {
return (new NetworkInterfaceURI(uri)).isAbsolute();
}
catch (IllegalArgumentException ne) {
throw new IllegalArgumentException(ne.getMessage(), ne);
}
}
}
/**
* Class performing logic similar to java.net.URI class which supports network interface syntax
*
*/
private static class NetworkInterfaceURI {
private static final String HOST_TEMPLATE = "127.0.0.1";
private URI mockNetworkInterfaceURI;
private Parser parser;
// -- Properties and components of this instance -- similar to java.net.URI
// Components of all URIs: [<scheme>:]<scheme-specific-part>[#<fragment>]
private String scheme; // null ==> relative URI
private String fragment;
// Hierarchical URI components: [//<authority>]<path>[?<query>]
private String authority; // Registry or server
// Server-based authority: [<userInfo>@]<host>[:<port>]
private String userInfo;
private String host; // null ==> registry-based
private int port = -1; // -1 ==> undefined
// Remaining components of hierarchical URIs
private String path; // null ==> opaque
private String query;
private boolean absolute;
public static String buildURIToString(String scheme, String authority, String path, String query, String fragment) {
URI helperURI;
try {
helperURI = new URI(scheme, HOST_TEMPLATE, path, query, fragment);
} catch (URISyntaxException e) {
throw new IllegalArgumentException(e.getMessage(), e);
}
return helperURI.toString().replace(HOST_TEMPLATE, authority);
}
public static String buildURIToString(String scheme, String userInfo, String host, int port, String path, String query,
String fragment) {
URI helperURI;
try {
helperURI = new URI(scheme, userInfo, HOST_TEMPLATE, port, path, query, fragment);
} catch (URISyntaxException e) {
throw new IllegalArgumentException(e.getMessage(), e);
}
return helperURI.toString().replace(HOST_TEMPLATE, host);
}
public NetworkInterfaceURI(String uri) throws IllegalArgumentException {
parser = new Parser(uri);
parser.parse();
}
/**
* Method retrieving host.
* @return - host in uri
*/
public String getHost() {
return host;
}
/**
* Method retrieving scheme.
* @return - scheme in uri
*/
public String getScheme() {
return scheme;
}
/**
* Method retrieving authority.
* @return - authority in uri
*/
public String getAuthority() {
return authority;
}
/**
* Method retrieving fragment.
* @return - fragment in uri
*/
public String getFragment() {
return fragment;
}
/**
* Method retrieving path.
* @return - path in uri
*/
public String getPath() {
return path;
}
/**
* Method retrieving query.
* @return - query in uri
*/
public String getQuery() {
return query;
}
/**
* Method retrieving port.
* @return - port in uri
*/
public int getPort() {
return port;
}
/**
* Method retrieving user info section.
* @return - user info in uri
*/
public String getUserInfo() {
return userInfo;
}
/**
* Method retrieving whether uri is absolute.
* @return - boolean
*/
public boolean isAbsolute() {
return absolute;
}
/**
* Method resolving uris
* @param uriString
* @return
*/
public String resolve(String uriString) {
return parser.resolve(uriString);
}
/**
* Method modifying URI scheme
* @param newScheme
* @return - modified uri
*/
public String modifyURIScheme(String newScheme) {
return buildURIFromTokens(newScheme, host, port, path, query, fragment);
}
/**
* Method modifying UrI authority
* @param newAuthority
* @return - modified uri
*/
public String modifyURIAuthority(String newAuthority) {
return buildURIFromTokens(scheme, newAuthority, path, query, fragment);
}
/**
* Method modifying uri port
* @param newPort
* @return - modified uri
*/
public String modifyURIPort(int newPort) {
return buildURIFromTokens(scheme, host, newPort, path, query, fragment);
}
/**
* Method modifying uri path
* @param newPath
* @return - modified uri
*/
public String modifyURIPath(String newPath) {
return buildURIFromTokens(scheme, host, port, newPath, query, fragment);
}
/**
* Parser performing NetworkInterfaceSyntax validation and String tokens extraction
*
*/
private class Parser {
private String uri;
private String matchedToken;
public Parser(String uri) {
this.uri = uri;
}
/**
* Method performing parsing
*/
private void parse() throws IllegalArgumentException {
if (!uri.startsWith("tcp://") && !uri.startsWith("udp://")) {
throw new IllegalArgumentException("Network interface URI syntax should only "
+ "be applicable for tcp and udp schemes");
}
Pattern pattern = Pattern.compile(NETWORK_INTERFACE_AUTHORITY);
Matcher matcher = pattern.matcher(uri);
if (!matcher.find()) {
throw new IllegalArgumentException("Invalid network interface URI syntax");
}
matchedToken = matcher.group(0);
if (matchedToken.matches(".*:.*:.*")) {
throw new IllegalArgumentException("Multiple ':' characters within network interface syntax not allowed");
}
if (matchedToken.contains(" ") && (!matchedToken.startsWith("[") || !matchedToken.endsWith("]"))) {
throw new IllegalArgumentException("Network interface syntax host contains spaces but misses bracket(s)");
}
mockNetworkInterfaceURI = URI.create(uri.replace(matchedToken, HOST_TEMPLATE));
populateUriDataFromMockInterfaceURI();
}
private String resolve(String uriString) {
return uriToString(mockNetworkInterfaceURI.resolve(uriString)).replace(HOST_TEMPLATE, matchedToken);
}
private void populateUriDataFromMockInterfaceURI() {
scheme = mockNetworkInterfaceURI.getScheme();
fragment = mockNetworkInterfaceURI.getFragment();
authority = mockNetworkInterfaceURI.getAuthority().replace(HOST_TEMPLATE, matchedToken);
userInfo = mockNetworkInterfaceURI.getUserInfo();
host = mockNetworkInterfaceURI.getHost().replace(HOST_TEMPLATE, matchedToken);
port = mockNetworkInterfaceURI.getPort();
path = mockNetworkInterfaceURI.getPath();
query = mockNetworkInterfaceURI.getQuery();
absolute = mockNetworkInterfaceURI.isAbsolute();
}
}
//TODO: Check whether algorithm is correct with java.net.URI
private String buildURIFromTokens(String scheme, String host, int port, String path,
String query, String fragment) {
return scheme + "://" + host + ":" + port + (path.isEmpty() ? "" : "/") + path +
(query != null ? "?" + query : "") +
(fragment != null ? "#" + fragment : "");
}
//TODO: Check whether algorithm is correct with java.net.URI
private String buildURIFromTokens(String scheme, String authority, String path,
String query, String fragment) {
return scheme + "://" + authority + (path.isEmpty() ? "" : "/") + path +
(query != null ? "?" + query : "") +
(fragment != null ? "#" + fragment : "");
}
}
/**
* Create a canonical URI from a given URI. A canonical URI is a URI with:<ul> <li>the host part of the authority
* lower-case since URI semantics dictate that hostnames are case insensitive <li>(optionally, NOT appropriate for Origin
* headers) the path part set to "/" if there was no path in the input URI (this conforms to the WebSocket and HTTP protocol
* specifications and avoids us having to do special handling for path throughout the server code). </ul>
*
* @param uriString the URI to canonicalize, in string form
* @param canonicalizePath if true, append trailing '/' when missing
* @return a URI with the host part of the authority lower-case and (optionally) trailing / added, or null if the uri is null
* @throws IllegalArgumentException if the uriString is not valid syntax
*/
public static String getCanonicalURI(String uriString, boolean canonicalizePath) {
if ((uriString != null) && !"".equals(uriString)) {
return getCanonicalizedURI(uriString, canonicalizePath);
}
return null;
}
/**
* Create a canonical URI from a given URI. A canonical URI is a URI with:<ul> <li>the host part of the authority
* lower-case since URI semantics dictate that hostnames are case insensitive <li>(optionally, NOT appropriate for Origin
* headers) the path part set to "/" except for tcp uris if there was no path in the input URI (this conforms to the
* WebSocket and HTTP protocol specifications and avoids us having to do special handling for path throughout the server
* code). </ul>
*
* @param uri the URI to canonicalize
* @param canonicalizePath if true, append trailing '/' when missing
* @return a URI with the host part of the authority lower-case and (optionally if not tcp) trailing / added, or null if the
* uri is null
* @throws IllegalArgumentException if the uri is not valid syntax
*/
public static String getCanonicalizedURI(String uri, boolean canonicalizePath) {
String canonicalURI = uri;
if (uri != null) {
String host = getHost(uri);
String path = getPath(uri);
final boolean emptyPath = "".equals(path);
final boolean noPathToCanonicalize = canonicalizePath && (path == null || emptyPath);
final boolean trailingSlashPath = "/".equals(path);
final String scheme = getScheme(uri);
final boolean pathlessScheme = "ssl".equals(scheme) || "tcp".equals(scheme) || "pipe".equals(scheme)
|| "udp".equals(scheme) || "mux".equals(scheme);
final boolean trailingSlashWithPathlessScheme = trailingSlashPath && pathlessScheme;
String newPath = trailingSlashWithPathlessScheme ? "" :
noPathToCanonicalize ? (pathlessScheme ? null : "/") : null;
if (((host != null) && !host.equals(host.toLowerCase())) || newPath != null) {
path = newPath == null ? path : newPath;
try {
canonicalURI = buildURIAsString(scheme, getUserInfo(uri), host == null ?
null : host.toLowerCase(), getPort(uri), path, getQuery(uri), getFragment(uri));
} catch (URISyntaxException ex) {
throw new IllegalArgumentException("Invalid URI: " + uri + " in Gateway configuration file", ex);
}
}
}
return canonicalURI;
}
}
| |
package com.tinytimrob.jfmod5;
/** 'Channel' API. */
public final class FmodChannel extends FmodChannelControl
{
FmodChannel(long pointer)
{
super(pointer);
}
/* functions overridden from ChannelControl in the C++ API
@Override FMOD_RESULT F_API FMOD_Channel_GetSystemObject (FMOD_CHANNEL *channel, FMOD_SYSTEM **system);
*/
@Override
// @Override FMOD_RESULT F_API FMOD_Channel_Stop(FMOD_CHANNEL *channel);
public FmodResult stop()
{
if (this.pointer == 0)
{
throw new PointerAlreadyFreedException();
}
return FmodResult.convert(JNIFmod.INSTANCE.FMOD_Channel_Stop(this.pointer));
}
@Override
// @Override FMOD_RESULT F_API FMOD_Channel_SetPaused(FMOD_CHANNEL *channel, FMOD_BOOL paused);
public FmodResult setPaused(boolean paused)
{
if (this.pointer == 0)
{
throw new PointerAlreadyFreedException();
}
return FmodResult.convert(JNIFmod.INSTANCE.FMOD_Channel_SetPaused(this.pointer, paused));
}
@Override
// @Override FMOD_RESULT F_API FMOD_Channel_GetPaused(FMOD_CHANNEL *channel, FMOD_BOOL *paused);
public FmodResult getPaused(boolean[] paused) throws InvalidDestinationArraySizeException
{
if (this.pointer == 0)
{
throw new PointerAlreadyFreedException();
}
if (paused.length != 1)
{
throw new InvalidDestinationArraySizeException("paused", 1, paused.length);
}
return FmodResult.convert(JNIFmod.INSTANCE.FMOD_Channel_GetPaused(this.pointer, paused));
}
@Override
// @Override FMOD_RESULT F_API FMOD_Channel_SetVolume(FMOD_CHANNEL *channel, float volume);
public FmodResult setVolume(float volume)
{
if (this.pointer == 0)
{
throw new PointerAlreadyFreedException();
}
return FmodResult.convert(JNIFmod.INSTANCE.FMOD_Channel_SetVolume(this.pointer, volume));
}
@Override
// @Override FMOD_RESULT F_API FMOD_Channel_GetVolume(FMOD_CHANNEL *channel, float *volume);
public FmodResult getVolume(float[] volume) throws InvalidDestinationArraySizeException
{
if (this.pointer == 0)
{
throw new PointerAlreadyFreedException();
}
if (volume.length != 1)
{
throw new InvalidDestinationArraySizeException("volume", 1, volume.length);
}
return FmodResult.convert(JNIFmod.INSTANCE.FMOD_Channel_GetVolume(this.pointer, volume));
}
/*
@Override FMOD_RESULT F_API FMOD_Channel_SetVolumeRamp (FMOD_CHANNEL *channel, FMOD_BOOL ramp);
@Override FMOD_RESULT F_API FMOD_Channel_GetVolumeRamp (FMOD_CHANNEL *channel, FMOD_BOOL *ramp);
@Override FMOD_RESULT F_API FMOD_Channel_GetAudibility (FMOD_CHANNEL *channel, float *audibility);
@Override FMOD_RESULT F_API FMOD_Channel_SetPitch (FMOD_CHANNEL *channel, float pitch);
@Override FMOD_RESULT F_API FMOD_Channel_GetPitch (FMOD_CHANNEL *channel, float *pitch);
@Override FMOD_RESULT F_API FMOD_Channel_SetMute (FMOD_CHANNEL *channel, FMOD_BOOL mute);
@Override FMOD_RESULT F_API FMOD_Channel_GetMute (FMOD_CHANNEL *channel, FMOD_BOOL *mute);
@Override FMOD_RESULT F_API FMOD_Channel_SetReverbProperties (FMOD_CHANNEL *channel, int instance, float wet);
@Override FMOD_RESULT F_API FMOD_Channel_GetReverbProperties (FMOD_CHANNEL *channel, int instance, float *wet);
@Override FMOD_RESULT F_API FMOD_Channel_SetLowPassGain (FMOD_CHANNEL *channel, float gain);
@Override FMOD_RESULT F_API FMOD_Channel_GetLowPassGain (FMOD_CHANNEL *channel, float *gain);
@Override FMOD_RESULT F_API FMOD_Channel_SetMode (FMOD_CHANNEL *channel, FMOD_MODE mode);
@Override FMOD_RESULT F_API FMOD_Channel_GetMode (FMOD_CHANNEL *channel, FMOD_MODE *mode);
@Override FMOD_RESULT F_API FMOD_Channel_SetCallback (FMOD_CHANNEL *channel, FMOD_CHANNELCONTROL_CALLBACK callback);
*/
@Override
// @Override FMOD_RESULT F_API FMOD_Channel_IsPlaying(FMOD_CHANNEL *channel, FMOD_BOOL *isplaying);
public FmodResult isPlaying(boolean[] playing) throws InvalidDestinationArraySizeException
{
if (this.pointer == 0)
{
throw new PointerAlreadyFreedException();
}
if (playing.length != 1)
{
throw new InvalidDestinationArraySizeException("playing", 1, playing.length);
}
return FmodResult.convert(JNIFmod.INSTANCE.FMOD_Channel_IsPlaying(this.pointer, playing));
}
/*
@Override FMOD_RESULT F_API FMOD_Channel_SetPan (FMOD_CHANNEL *channel, float pan);
@Override FMOD_RESULT F_API FMOD_Channel_SetMixLevelsOutput (FMOD_CHANNEL *channel, float frontleft, float frontright, float center, float lfe, float surroundleft, float surroundright, float backleft, float backright);
@Override FMOD_RESULT F_API FMOD_Channel_SetMixLevelsInput (FMOD_CHANNEL *channel, float *levels, int numlevels);
@Override FMOD_RESULT F_API FMOD_Channel_SetMixMatrix (FMOD_CHANNEL *channel, float *matrix, int outchannels, int inchannels, int inchannel_hop);
@Override FMOD_RESULT F_API FMOD_Channel_GetMixMatrix (FMOD_CHANNEL *channel, float *matrix, int *outchannels, int *inchannels, int inchannel_hop);
@Override FMOD_RESULT F_API FMOD_Channel_GetDSPClock (FMOD_CHANNEL *channel, unsigned long long *dspclock, unsigned long long *parentclock);
@Override FMOD_RESULT F_API FMOD_Channel_SetDelay (FMOD_CHANNEL *channel, unsigned long long dspclock_start, unsigned long long dspclock_end, FMOD_BOOL stopchannels);
@Override FMOD_RESULT F_API FMOD_Channel_GetDelay (FMOD_CHANNEL *channel, unsigned long long *dspclock_start, unsigned long long *dspclock_end, FMOD_BOOL *stopchannels);
@Override FMOD_RESULT F_API FMOD_Channel_AddFadePoint (FMOD_CHANNEL *channel, unsigned long long dspclock, float volume);
@Override FMOD_RESULT F_API FMOD_Channel_SetFadePointRamp (FMOD_CHANNEL *channel, unsigned long long dspclock, float volume);
@Override FMOD_RESULT F_API FMOD_Channel_RemoveFadePoints (FMOD_CHANNEL *channel, unsigned long long dspclock_start, unsigned long long dspclock_end);
@Override FMOD_RESULT F_API FMOD_Channel_GetFadePoints (FMOD_CHANNEL *channel, unsigned int *numpoints, unsigned long long *point_dspclock, float *point_volume);
@Override FMOD_RESULT F_API FMOD_Channel_GetDSP (FMOD_CHANNEL *channel, int index, FMOD_DSP **dsp);
*/
@Override
// @Override FMOD_RESULT F_API FMOD_Channel_AddDSP(FMOD_CHANNEL *channel, int index, FMOD_DSP *dsp);
public FmodResult addDSP(int index, FmodDSP dsp)
{
if (this.pointer == 0)
{
throw new PointerAlreadyFreedException();
}
return FmodResult.convert(JNIFmod.INSTANCE.FMOD_Channel_AddDSP(this.pointer, index, dsp == null ? 0 : dsp.pointer));
}
@Override
// @Override FMOD_RESULT F_API FMOD_Channel_RemoveDSP(FMOD_CHANNEL *channel, FMOD_DSP *dsp);
public FmodResult removeDSP(FmodDSP dsp)
{
if (this.pointer == 0)
{
throw new PointerAlreadyFreedException();
}
return FmodResult.convert(JNIFmod.INSTANCE.FMOD_Channel_RemoveDSP(this.pointer, dsp == null ? 0 : dsp.pointer));
}
/*
@Override FMOD_RESULT F_API FMOD_Channel_GetNumDSPs (FMOD_CHANNEL *channel, int *numdsps);
@Override FMOD_RESULT F_API FMOD_Channel_SetDSPIndex (FMOD_CHANNEL *channel, FMOD_DSP *dsp, int index);
@Override FMOD_RESULT F_API FMOD_Channel_GetDSPIndex (FMOD_CHANNEL *channel, FMOD_DSP *dsp, int *index);
@Override FMOD_RESULT F_API FMOD_Channel_OverridePanDSP (FMOD_CHANNEL *channel, FMOD_DSP *pan);
@Override FMOD_RESULT F_API FMOD_Channel_Set3DAttributes (FMOD_CHANNEL *channel, const FMOD_VECTOR *pos, const FMOD_VECTOR *vel, const FMOD_VECTOR *alt_pan_pos);
@Override FMOD_RESULT F_API FMOD_Channel_Get3DAttributes (FMOD_CHANNEL *channel, FMOD_VECTOR *pos, FMOD_VECTOR *vel, FMOD_VECTOR *alt_pan_pos);
@Override FMOD_RESULT F_API FMOD_Channel_Set3DMinMaxDistance (FMOD_CHANNEL *channel, float mindistance, float maxdistance);
@Override FMOD_RESULT F_API FMOD_Channel_Get3DMinMaxDistance (FMOD_CHANNEL *channel, float *mindistance, float *maxdistance);
@Override FMOD_RESULT F_API FMOD_Channel_Set3DConeSettings (FMOD_CHANNEL *channel, float insideconeangle, float outsideconeangle, float outsidevolume);
@Override FMOD_RESULT F_API FMOD_Channel_Get3DConeSettings (FMOD_CHANNEL *channel, float *insideconeangle, float *outsideconeangle, float *outsidevolume);
@Override FMOD_RESULT F_API FMOD_Channel_Set3DConeOrientation (FMOD_CHANNEL *channel, FMOD_VECTOR *orientation);
@Override FMOD_RESULT F_API FMOD_Channel_Get3DConeOrientation (FMOD_CHANNEL *channel, FMOD_VECTOR *orientation);
@Override FMOD_RESULT F_API FMOD_Channel_Set3DCustomRolloff (FMOD_CHANNEL *channel, FMOD_VECTOR *points, int numpoints);
@Override FMOD_RESULT F_API FMOD_Channel_Get3DCustomRolloff (FMOD_CHANNEL *channel, FMOD_VECTOR **points, int *numpoints);
@Override FMOD_RESULT F_API FMOD_Channel_Set3DOcclusion (FMOD_CHANNEL *channel, float directocclusion, float reverbocclusion);
@Override FMOD_RESULT F_API FMOD_Channel_Get3DOcclusion (FMOD_CHANNEL *channel, float *directocclusion, float *reverbocclusion);
@Override FMOD_RESULT F_API FMOD_Channel_Set3DSpread (FMOD_CHANNEL *channel, float angle);
@Override FMOD_RESULT F_API FMOD_Channel_Get3DSpread (FMOD_CHANNEL *channel, float *angle);
@Override FMOD_RESULT F_API FMOD_Channel_Set3DLevel (FMOD_CHANNEL *channel, float level);
@Override FMOD_RESULT F_API FMOD_Channel_Get3DLevel (FMOD_CHANNEL *channel, float *level);
@Override FMOD_RESULT F_API FMOD_Channel_Set3DDopplerLevel (FMOD_CHANNEL *channel, float level);
@Override FMOD_RESULT F_API FMOD_Channel_Get3DDopplerLevel (FMOD_CHANNEL *channel, float *level);
@Override FMOD_RESULT F_API FMOD_Channel_Set3DDistanceFilter (FMOD_CHANNEL *channel, FMOD_BOOL custom, float customLevel, float centerFreq);
@Override FMOD_RESULT F_API FMOD_Channel_Get3DDistanceFilter (FMOD_CHANNEL *channel, FMOD_BOOL *custom, float *customLevel, float *centerFreq);
*/
/* functions which are unique to Channel */
/**
* Sets the channel frequency or playback rate, in Hz. <br/><br/>
* When a sound is played, it plays at the default frequency of the sound which can be set by {@link FmodSound#setDefaults}. <br/><br/>
* For most file formats, the default frequency is determined by the audio format.
* @param frequency Frequency value in Hz. This value can also be negative to play the sound backwards (negative frequencies allowed with non-stream sounds only).
* @return If the function succeeds then the return value is {@link FmodResult#OK}. If the function fails then the return value will be one of the values defined in the {@link FmodResult} enumeration.
*/
// FMOD_RESULT F_API FMOD_Channel_SetFrequency(FMOD_CHANNEL *channel, float frequency);
public FmodResult setFrequency(float frequency)
{
if (this.pointer == 0)
{
throw new PointerAlreadyFreedException();
}
return FmodResult.convert(JNIFmod.INSTANCE.FMOD_Channel_SetFrequency(this.pointer, frequency));
}
/**
* Retrieves the channel frequency or playback rate, in Hz.
* @param frequency A single-length destination array that receives the current frequency of the channel in Hz.
* @return If the function succeeds then the return value is {@link FmodResult#OK}. If the function fails then the return value will be one of the values defined in the {@link FmodResult} enumeration.
* @throws InvalidDestinationArraySizeException If the destination array length is not 1
*/
// FMOD_RESULT F_API FMOD_Channel_GetFrequency(FMOD_CHANNEL *channel, float *frequency);
public FmodResult getFrequency(float[] frequency) throws InvalidDestinationArraySizeException
{
if (this.pointer == 0)
{
throw new PointerAlreadyFreedException();
}
if (frequency.length != 1)
{
throw new InvalidDestinationArraySizeException("frequency", 1, frequency.length);
}
return FmodResult.convert(JNIFmod.INSTANCE.FMOD_Channel_GetFrequency(this.pointer, frequency));
}
/*
FMOD_RESULT F_API FMOD_Channel_SetPriority (FMOD_CHANNEL *channel, int priority);
FMOD_RESULT F_API FMOD_Channel_GetPriority (FMOD_CHANNEL *channel, int *priority);
*/
/** Sets the playback position for the currently playing sound to the specified offset. <br/><br/>
* Certain timeunits do not work depending on the file format. For example {@link FmodTimeUnit#MODORDER} will not work with an MP3 file. <br/><br/>
* If you are calling this function on a stream, it has to possibly reflush its buffer to get zero latency playback when it resumes playing, therefore
* it could potentially cause a stall or take a small amount of time to do this. <br/><br/>
* If you are using {@link FmodMode#NONBLOCKING}, note that a stream will go into {@link FmodOpenState#SETPOSITION} state (see {@link FmodSound#getOpenState})
* and sound commands will return {@link FmodResult#ERR_NOTREADY}. {@link FmodChannel#getPosition} will also not update until this non-blocking setposition
* operation has completed. <br/><br/>
* <b>Warning!</b> Using a VBR source that does not have an associated seek table or seek information (such as MP3 or MOD/S3M/XM/IT) may cause inaccurate seeking
* if you specify {@link FmodTimeUnit#MS} or {@link FmodTimeUnit#PCM}. If you want FMOD to create a PCM vs bytes seek table so that seeking is accurate,
* you will have to specify {@link FmodMode#ACCURATETIME} when loading or opening the sound. This means there is a slight delay as FMOD scans the whole file
* when loading the sound to create this table.
*
* @param position Position of the channel to set in units specified in the 'postype' parameter.
* @param postype Time unit to set the channel position by. See {@link FmodTimeUnit}.
* @return If the function succeeds then the return value is {@link FmodResult#OK}. If the function fails then the return value will be one of the values defined in the {@link FmodResult} enumeration.
*/
//FMOD_RESULT F_API FMOD_Channel_SetPosition(FMOD_CHANNEL *channel, unsigned int position, FMOD_TIMEUNIT postype);
public FmodResult setPosition(long position, long postype)
{
if (this.pointer == 0)
{
throw new PointerAlreadyFreedException();
}
return FmodResult.convert(JNIFmod.INSTANCE.FMOD_Channel_SetPosition(this.pointer, position, postype));
}
/** Returns the current playback position for the specified channel.
*
* @param position Address of a variable that receives the position of the sound.
* @param postype Time unit to retrieve into the position parameter. See {@link FmodTimeUnit}.
* @return If the function succeeds then the return value is {@link FmodResult#OK}. If the function fails then the return value will be one of the values defined in the {@link FmodResult} enumeration.
* @throws InvalidDestinationArraySizeException If the destination array length is not 1
*/
//FMOD_RESULT F_API FMOD_Channel_GetPosition(FMOD_CHANNEL *channel, unsigned int *position, FMOD_TIMEUNIT postype);
public FmodResult getPosition(long[] position, long postype) throws InvalidDestinationArraySizeException
{
if (this.pointer == 0)
{
throw new PointerAlreadyFreedException();
}
if (position.length != 1)
{
throw new InvalidDestinationArraySizeException("position", 1, position.length);
}
return FmodResult.convert(JNIFmod.INSTANCE.FMOD_Channel_GetPosition(this.pointer, position, postype));
}
/*
FMOD_RESULT F_API FMOD_Channel_SetChannelGroup (FMOD_CHANNEL *channel, FMOD_CHANNELGROUP *channelgroup);
FMOD_RESULT F_API FMOD_Channel_GetChannelGroup (FMOD_CHANNEL *channel, FMOD_CHANNELGROUP **channelgroup);
FMOD_RESULT F_API FMOD_Channel_SetLoopCount (FMOD_CHANNEL *channel, int loopcount);
FMOD_RESULT F_API FMOD_Channel_GetLoopCount (FMOD_CHANNEL *channel, int *loopcount);
FMOD_RESULT F_API FMOD_Channel_SetLoopPoints (FMOD_CHANNEL *channel, unsigned int loopstart, FMOD_TIMEUNIT loopstarttype, unsigned int loopend, FMOD_TIMEUNIT loopendtype);
FMOD_RESULT F_API FMOD_Channel_GetLoopPoints (FMOD_CHANNEL *channel, unsigned int *loopstart, FMOD_TIMEUNIT loopstarttype, unsigned int *loopend, FMOD_TIMEUNIT loopendtype);
FMOD_RESULT F_API FMOD_Channel_IsVirtual (FMOD_CHANNEL *channel, FMOD_BOOL *isvirtual);
FMOD_RESULT F_API FMOD_Channel_GetCurrentSound (FMOD_CHANNEL *channel, FMOD_SOUND **sound);
FMOD_RESULT F_API FMOD_Channel_GetIndex (FMOD_CHANNEL *channel, int *index);
*/
}
| |
/*======================================================================*
* Copyright (c) 2011, OpenX Technologies, Inc. All rights reserved. *
* *
* Licensed under the New BSD License (the "License"); you may not use *
* this file except in compliance with the License. Unless required *
* by applicable law or agreed to in writing, software distributed *
* under the License is distributed on an "AS IS" BASIS, WITHOUT *
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. *
* See the License for the specific language governing permissions and *
* limitations under the License. See accompanying LICENSE file. *
*======================================================================*/
package org.openx.data.jsonserde.objectinspector;
import java.util.*;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory;
import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory;
import org.apache.hadoop.hive.serde2.objectinspector.UnionObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.AbstractPrimitiveJavaObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
import org.apache.hadoop.hive.serde2.typeinfo.*;
import org.openx.data.jsonserde.objectinspector.primitive.JavaStringByteObjectInspector;
import org.openx.data.jsonserde.objectinspector.primitive.JavaStringDoubleObjectInspector;
import org.openx.data.jsonserde.objectinspector.primitive.JavaStringFloatObjectInspector;
import org.openx.data.jsonserde.objectinspector.primitive.JavaStringIntObjectInspector;
import org.openx.data.jsonserde.objectinspector.primitive.JavaStringLongObjectInspector;
import org.openx.data.jsonserde.objectinspector.primitive.JavaStringShortObjectInspector;
import org.openx.data.jsonserde.objectinspector.primitive.JavaStringTimestampObjectInspector;
/**
*
* @author rcongiu
*/
public class JsonObjectInspectorFactory {
static HashMap<TypeInfo, ObjectInspector> cachedJsonObjectInspector = new HashMap<TypeInfo, ObjectInspector>();
/**
*
*
* @param options
* @see JsonUtils
* @param typeInfo
* @return
*/
public static ObjectInspector getJsonObjectInspectorFromTypeInfo(
TypeInfo typeInfo, JsonStructOIOptions options) {
ObjectInspector result = cachedJsonObjectInspector.get(typeInfo);
if (result == null) {
switch (typeInfo.getCategory()) {
case PRIMITIVE: {
PrimitiveTypeInfo pti = (PrimitiveTypeInfo) typeInfo;
result
= getPrimitiveJavaObjectInspector(pti.getPrimitiveCategory());
break;
}
case LIST: {
ObjectInspector elementObjectInspector
= getJsonObjectInspectorFromTypeInfo(
((ListTypeInfo) typeInfo).getListElementTypeInfo(),
options);
result = JsonObjectInspectorFactory.getJsonListObjectInspector(elementObjectInspector);
break;
}
case MAP: {
MapTypeInfo mapTypeInfo = (MapTypeInfo) typeInfo;
ObjectInspector keyObjectInspector = getJsonObjectInspectorFromTypeInfo(mapTypeInfo.getMapKeyTypeInfo(), options);
ObjectInspector valueObjectInspector = getJsonObjectInspectorFromTypeInfo(mapTypeInfo.getMapValueTypeInfo(), options);
result = JsonObjectInspectorFactory.getJsonMapObjectInspector(keyObjectInspector,
valueObjectInspector);
break;
}
case STRUCT: {
StructTypeInfo structTypeInfo = (StructTypeInfo) typeInfo;
List<String> fieldNames = structTypeInfo.getAllStructFieldNames();
List<TypeInfo> fieldTypeInfos = structTypeInfo.getAllStructFieldTypeInfos();
List<ObjectInspector> fieldObjectInspectors = new ArrayList<ObjectInspector>(
fieldTypeInfos.size());
for (int i = 0; i < fieldTypeInfos.size(); i++) {
fieldObjectInspectors.add(getJsonObjectInspectorFromTypeInfo(
fieldTypeInfos.get(i), options));
}
result = JsonObjectInspectorFactory.getJsonStructObjectInspector(fieldNames,
fieldObjectInspectors, options);
break;
}
case UNION:{
UnionTypeInfo unionTypeInfo = (UnionTypeInfo) typeInfo;
List<ObjectInspector> ois = new LinkedList<ObjectInspector>();
for( TypeInfo ti : ((UnionTypeInfo) typeInfo).getAllUnionObjectTypeInfos()) {
ois.add(getJsonObjectInspectorFromTypeInfo(ti, options));
}
result = getJsonUnionObjectInspector(ois, options);
break;
}
default: {
result = null;
}
}
cachedJsonObjectInspector.put(typeInfo, result);
}
return result;
}
static HashMap<ArrayList<Object>, JsonUnionObjectInspector> cachedJsonUnionObjectInspector
= new HashMap<ArrayList<Object>, JsonUnionObjectInspector>();
public static JsonUnionObjectInspector getJsonUnionObjectInspector(
List<ObjectInspector> ois,
JsonStructOIOptions options) {
ArrayList<Object> signature = new ArrayList<Object>();
signature.add(ois);
signature.add(options);
JsonUnionObjectInspector result = cachedJsonUnionObjectInspector
.get(signature);
if (result == null) {
result = new JsonUnionObjectInspector(ois, options);
cachedJsonUnionObjectInspector.put(signature,result);
}
return result;
}
/*
* Caches Struct Object Inspectors
*/
static HashMap<ArrayList<Object>, JsonStructObjectInspector> cachedStandardStructObjectInspector
= new HashMap<ArrayList<Object>, JsonStructObjectInspector>();
public static JsonStructObjectInspector getJsonStructObjectInspector(
List<String> structFieldNames,
List<ObjectInspector> structFieldObjectInspectors,
JsonStructOIOptions options) {
ArrayList<Object> signature = new ArrayList<Object>();
signature.add(structFieldNames);
signature.add(structFieldObjectInspectors);
signature.add(options);
JsonStructObjectInspector result = cachedStandardStructObjectInspector.get(signature);
if (result == null) {
result = new JsonStructObjectInspector(structFieldNames,
structFieldObjectInspectors, options);
cachedStandardStructObjectInspector.put(signature, result);
}
return result;
}
/*
* Caches the List objecvt inspectors
*/
static HashMap<ArrayList<Object>, JsonListObjectInspector> cachedJsonListObjectInspector
= new HashMap<ArrayList<Object>, JsonListObjectInspector>();
public static JsonListObjectInspector getJsonListObjectInspector(
ObjectInspector listElementObjectInspector) {
ArrayList<Object> signature = new ArrayList<Object>();
signature.add(listElementObjectInspector);
JsonListObjectInspector result = cachedJsonListObjectInspector
.get(signature);
if (result == null) {
result = new JsonListObjectInspector(listElementObjectInspector);
cachedJsonListObjectInspector.put(signature, result);
}
return result;
}
/*
* Caches Map ObjectInspectors
*/
static HashMap<ArrayList<Object>, JsonMapObjectInspector> cachedJsonMapObjectInspector
= new HashMap<ArrayList<Object>, JsonMapObjectInspector>();
public static JsonMapObjectInspector getJsonMapObjectInspector(
ObjectInspector mapKeyObjectInspector,
ObjectInspector mapValueObjectInspector) {
ArrayList<Object> signature = new ArrayList<Object>();
signature.add(mapKeyObjectInspector);
signature.add(mapValueObjectInspector);
JsonMapObjectInspector result = cachedJsonMapObjectInspector
.get(signature);
if (result == null) {
result = new JsonMapObjectInspector(mapKeyObjectInspector,
mapValueObjectInspector);
cachedJsonMapObjectInspector.put(signature, result);
}
return result;
}
// static JsonStringJavaObjectInspector cachedStringObjectInspector = new JsonStringJavaObjectInspector();
static final Map<PrimitiveCategory, AbstractPrimitiveJavaObjectInspector> primitiveOICache
= new EnumMap<PrimitiveCategory, AbstractPrimitiveJavaObjectInspector>(PrimitiveCategory.class);
static {
primitiveOICache.put(PrimitiveCategory.BYTE, new JavaStringByteObjectInspector());
primitiveOICache.put(PrimitiveCategory.SHORT, new JavaStringShortObjectInspector());
primitiveOICache.put(PrimitiveCategory.INT, new JavaStringIntObjectInspector());
primitiveOICache.put(PrimitiveCategory.LONG, new JavaStringLongObjectInspector());
primitiveOICache.put(PrimitiveCategory.FLOAT, new JavaStringFloatObjectInspector());
primitiveOICache.put(PrimitiveCategory.DOUBLE, new JavaStringDoubleObjectInspector());
primitiveOICache.put(PrimitiveCategory.TIMESTAMP, new JavaStringTimestampObjectInspector());
}
/**
* gets the appropriate adapter wrapper around the object inspector if
* necessary, that is, if we're dealing with numbers. The JSON parser won't
* parse the number because it's deferred (lazy).
*
* @param primitiveCategory
* @return
*/
public static AbstractPrimitiveJavaObjectInspector getPrimitiveJavaObjectInspector(
PrimitiveCategory primitiveCategory) {
if(! primitiveOICache.containsKey(primitiveCategory)) {
primitiveOICache.put(primitiveCategory, PrimitiveObjectInspectorFactory.
getPrimitiveJavaObjectInspector(primitiveCategory));
}
return primitiveOICache.get(primitiveCategory);
}
}
| |
/*
Copyright (c) 2013, California State University Monterey Bay (CSUMB).
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
1. Redistributions of source code must retain the above copyright notice,
this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above
copyright notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
3. Neither the name of the CPUC, CSU Monterey Bay, nor the names of
its contributors may be used to endorse or promote products derived from
this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package gov.va.cgit.vaspeed.android;
import gov.va.cgit.vaspeed.android.AndroidUiServices;
import java.text.DecimalFormat;
import java.text.NumberFormat;
/**
*
* @author California State University Monterey Bay ITCD
*/
public class ProcessPing {
public String average;
public String minimum;
public String maximum;
public String loss;
private Integer state;
private Integer startIndex;
public String message;
public Boolean success;
public Float rollingSum;
public Integer rollingAverageCount;
private AndroidUiServices uiServices;
public Float phase1Average;
public Boolean isPhase2;
public ProcessPing(String message, AndroidUiServices uiServices) {
average = "NA";
minimum = "NA";
maximum = "NA";
loss = "NA";
state = 0;
startIndex = 0;
this.message = message;
success = false;
rollingSum = 0.0f;
rollingAverageCount = 0;
this.uiServices = uiServices;
this.phase1Average=0.0f;
this.isPhase2=false;
}
public void setPhase2(String firstPingAverage){
if (firstPingAverage.contains("NA")){
this.phase1Average = 0.0f;
}else{
this.phase1Average = Float.valueOf(firstPingAverage);
}
this.isPhase2 = true;
}
public void displayInitialMessage(){
uiServices.setResults(Constants.THREAD_WRITE_LATENCY_DATA, message, "0", false, false);
}
public void SetPingSuccess(String message) {
this.message = message;
success = true;
}
public void SetPingFail(String message) {
this.message = message;
success = false;
}
public void SetPingFinalStatus(String name) {
if (average.contains("NA") || loss.contains("100%")) {
SetPingFail("Delay Incomplete");
} else {
SetPingSuccess("Delay");
}
if (!isPhase2){
uiServices.setResults(Constants.THREAD_WRITE_LATENCY_DATA, message,average,!success,!success);
}
}
private static String formatFloatString(String value){
Float flAverage = Float.valueOf(value);
NumberFormat numberFormat = new DecimalFormat("#.0");
return(numberFormat.format(flAverage));
}
public void displayRollingAverage(){
//phase1Average is zero if phase 1 Ping Failed
Float rollingAvg;
if (isPhase2){
if (phase1Average != 0.0f){
rollingAvg = (phase1Average + (rollingSum/rollingAverageCount))/2;
}else{
// phase 1 Ping failed, so ignore it
rollingAvg = rollingSum/rollingAverageCount;
}
}else{
rollingAvg = rollingSum/rollingAverageCount;
}
String rollingString = formatFloatString(rollingAvg.toString());
uiServices.setResults(Constants.THREAD_WRITE_LATENCY_DATA, message,rollingString , false, false);
}
private void ParseLine(String line, String clientType) {
int indexStart = -1;
int indexEnd = -1;
if (clientType.contains("Phone")) {
String[] delimited;
switch (state) {
case 0:
indexStart = line.indexOf("received,");
if (indexStart != -1) {
indexEnd = line.indexOf("%", indexStart + 10);
loss = line.substring(indexStart + 10, indexEnd);
state = 1;
startIndex = 0;
}else{
indexStart = line.indexOf("time=");
if (indexStart != -1){
indexEnd = line.indexOf(" ms");
if (indexEnd != -1){
String time = line.substring(indexStart+5,indexEnd);
try {
float currentSpeed = Float.parseFloat(time);
rollingSum += currentSpeed;
rollingAverageCount++;
displayRollingAverage();
}catch (Exception e){
// ignore if not a number
}
}
}
}
break;
case 1:
indexStart = line.indexOf("/mdev = ");
if (indexStart != -1) {
String statsString = line.substring(indexStart + 8, line.length());
delimited = statsString.split("/");
minimum = delimited[0];
average = delimited[1];
Float flAverage = Float.valueOf(average);
NumberFormat numberFormat = new DecimalFormat("#.0");
average = numberFormat.format(flAverage);
maximum = delimited[2];
state = 2;
}
break;
default:
break;
}
} else {
// Netbook
switch (state) {
case 0:
indexEnd = line.indexOf("% loss)");
if (indexEnd != -1) {
indexStart = line.indexOf("(", indexEnd - 6);
loss = line.substring(indexStart + 1, indexEnd);
state = 1;
startIndex = 0;
}else{
indexStart = line.indexOf("time=");
if (indexStart != -1){
indexEnd = line.indexOf("ms");
if (indexEnd != -1){
String time = line.substring(indexStart+5,indexEnd);
try {
float currentSpeed = Float.parseFloat(time);
rollingSum += currentSpeed;
rollingAverageCount++;
}catch (Exception e){
// ignore if not a number
}
}
}
}
break;
case 1:
indexStart = line.indexOf("Minimum = ");
if (indexStart != -1) {
indexEnd = line.indexOf("ms", indexStart + 10);
if (indexEnd != -1) {
minimum = line.substring(indexStart + 10, indexEnd);
}
indexStart = line.indexOf("Maximum = ", indexEnd);
if (indexStart != -1) {
indexEnd = line.indexOf("ms", indexStart + 10);
if (indexEnd != -1) {
maximum = line.substring(indexStart + 10, indexEnd);
}
indexStart = line.indexOf("Average = ", indexEnd);
if (indexStart != -1) {
indexEnd = line.indexOf("ms", indexStart + 10);
if (indexEnd != -1) {
average = line.substring(indexStart + 10, indexEnd);
state = 2;
}
}
}
}
break;
default:
break;
}
}
}
public void ProcessOutput(String lineout, String clientType) {
Integer startIndex = 0;
Integer endIndex = 0;
while (startIndex < lineout.length()) {
endIndex = lineout.indexOf("\n", startIndex);
if (endIndex != -1) {
ParseLine(lineout.substring(startIndex, endIndex),clientType);
startIndex = endIndex + 1;
} else {
ParseLine(lineout.substring(startIndex, lineout.length() - 1),clientType);
startIndex = lineout.length();
}
}
}
}
| |
package filepickerlibrary.adapter;
import android.content.Context;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.graphics.drawable.Drawable;
import android.graphics.drawable.LayerDrawable;
import android.os.AsyncTask;
import android.support.v7.widget.RecyclerView;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.ImageView;
import android.widget.TextView;
import com.afollestad.materialdialogs.MaterialDialog;
import com.afollestad.materialdialogs.Theme;
//import com.github.developerpaul123.filepickerlibrary.R;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.lang.ref.WeakReference;
import java.text.DateFormat;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Calendar;
import java.util.List;
import edu.cmu.cs.faceswap.R;
import filepickerlibrary.enums.Scope;
/**
* Created by Paul on 10/3/2015.
* <p/>
* Recycler adapter for the recycler view in the Material Design File Picker activity.
*/
public class FileRecyclerViewAdapter extends RecyclerView.Adapter {
public static final int TYPE_HEADER = 123;
public static final int TYPE_ITEM = 124;
private final Drawable folderDrawable;
private int selectedPosition;
private final float iconPadding;
private final Scope mFileType;
private final File[] mFiles;
private final FileRecyclerViewAdapter.Callback mCallback;
private final View.OnClickListener viewClickListener = new View.OnClickListener() {
@Override
public void onClick(View view) {
if (mCallback != null) {
mCallback.onItemClicked(view, (Integer) view.getTag(R.id.POSITION_KEY));
}
}
};
private final View customView;
private final List<File> fileList;
private final Context context;
public FileRecyclerViewAdapter(Context context, File[] files, Scope scopeType, FileRecyclerViewAdapter.Callback callback) {
this.context = context;
mFiles = files;
iconPadding = context.getResources().getDimension(R.dimen.file_picker_lib_default_icon_padding);
mFileType = scopeType;
selectedPosition = -1;
folderDrawable = context.getResources().getDrawable(R.drawable.fplib_ic_folder);
fileList = new ArrayList<>(Arrays.asList(files));
if (mFileType == Scope.DIRECTORIES) {
for (int i = 0; i < fileList.size(); i++) {
String extension = fileExt(fileList.get(i).getPath());
if (extension != null) {
fileList.remove(i);
}
}
}
mCallback = callback;
customView = LayoutInflater.from(context).inflate(R.layout.file_info_layout, null);
}
/**
* Returns the file extension of a file.
*
* @param url the file path
* @return
*/
private String fileExt(String url) {
if (url.indexOf("?") > -1) {
url = url.substring(0, url.indexOf("?"));
}
if (url.lastIndexOf(".") == -1) {
return null;
} else {
String ext = url.substring(url.lastIndexOf("."));
if (ext.indexOf("%") > -1) {
ext = ext.substring(0, ext.indexOf("%"));
}
if (ext.indexOf("/") > -1) {
ext = ext.substring(0, ext.indexOf("/"));
}
return ext.toLowerCase();
}
}
/**
* From the google examples, decodes a bitmap as a byte array and then resizes it for the required
* width and hieght.
*
* @param picture the picture byte array
* @param reqWidth the required width
* @param reqHeight the required height
* @return a Bitmap
*/
public static Bitmap decodeSampledBitmapFromByteArray(byte[] picture,
int reqWidth, int reqHeight) {
// First decode with inJustDecodeBounds=true to check dimensions
BitmapFactory.Options options = new BitmapFactory.Options();
options.inJustDecodeBounds = true;
BitmapFactory.decodeByteArray(picture, 0, picture.length, options);
// Calculate inSampleSize
options.inSampleSize = calculateInSampleSize(options, reqWidth, reqHeight);
// Decode bitmap with inSampleSize set
options.inJustDecodeBounds = false;
return BitmapFactory.decodeByteArray(picture, 0, picture.length, options);
}
public static int calculateInSampleSize(
BitmapFactory.Options options, int reqWidth, int reqHeight) {
// Raw height and width of image
int height = options.outHeight;
int width = options.outWidth;
int inSampleSize = 1;
if (height > reqHeight || width > reqWidth) {
// Calculate ratios of height and width to requested height and width
int heightRatio = Math.round((float) height / (float) reqHeight);
int widthRatio = Math.round((float) width / (float) reqWidth);
// Choose the smallest ratio as inSampleSize value, this will guarantee
// a final image with both dimensions larger than or equal to the
// requested height and width.
inSampleSize = heightRatio < widthRatio ? heightRatio : widthRatio;
}
return inSampleSize;
}
/**
* Encodes a bitmap to a byte array.
*
* @param bitmap the bitmap to compress
* @param format the compression format for the Bitmap
* @return {@code byte[]} object
*/
public static byte[] encodeBitmapToArray(Bitmap bitmap, Bitmap.CompressFormat format) {
ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
bitmap.compress(format, 0, outputStream);
return outputStream.toByteArray();
}
@Override
public RecyclerView.ViewHolder onCreateViewHolder(ViewGroup parent, int viewType) {
if (viewType == TYPE_ITEM) {
View v = LayoutInflater.from(context).inflate(R.layout.file_list_item, parent, false);
return new FileRecyclerViewAdapter.FileViewHolder(v);
} else if (viewType == TYPE_HEADER) {
View v = LayoutInflater.from(context).inflate(R.layout.file_list_header_view, parent, false);
return new FileRecyclerViewAdapter.FileHeaderViewHolder(v);
}
throw new RuntimeException(context.getString(R.string.file_picker_recycler_adapter_view_holder_type_error));
}
@Override
public void onBindViewHolder(RecyclerView.ViewHolder holder, int position) {
File file = getItem(position);
if (holder instanceof FileRecyclerViewAdapter.FileViewHolder) {
FileRecyclerViewAdapter.FileViewHolder viewHolder = (FileRecyclerViewAdapter.FileViewHolder) holder;
viewHolder.fileTitle.setText(file.getName());
viewHolder.itemView.setTag(R.id.POSITION_KEY, position);
viewHolder.itemView.setOnClickListener(viewClickListener);
if (selectedPosition == position) {
viewHolder.itemView.setBackgroundColor(context.getResources()
.getColor(R.color.card_detailing));
} else {
viewHolder.itemView.setBackgroundColor(context.getResources()
.getColor(android.R.color.background_light));
}
final int i = position;
viewHolder.fileInfoButton.setBackgroundDrawable(getFileDrawable(R.drawable.fplib_ic_action_info));
viewHolder.fileInfoButton.setClickable(true);
viewHolder.fileInfoButton.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
TextView fileSize = (TextView) customView.findViewById(R.id.file_info_size);
TextView fileDate = (TextView) customView.findViewById(R.id.file_info_date_created);
TextView filePath = (TextView) customView.findViewById(R.id.file_info_path);
File file = fileList.get(i);
if (!file.isDirectory()) {
fileSize.setText(String.format(context.getString(R.string.file_picker_adapter_size_string), file.length()));
} else {
new GetFileSizeTask(fileSize, file, context.getString(R.string.file_picker_adapter_size_string)).execute();
}
Calendar cal = Calendar.getInstance();
cal.setTimeInMillis(file.lastModified());
DateFormat df = DateFormat.getDateInstance();
fileDate.setText(String.format(context.getString(R.string.file_picker_adapter_last_modified_string),
df.format(cal.getTime())));
filePath.setText(String.format(context.getString(R.string.file_picker_adapter_file_path_string),
file.getAbsolutePath()));
new MaterialDialog.Builder(v.getContext())
.title(String.format(context.getString(R.string.file_picker_file_info_dialog_file_path),
fileList.get(i).getName()))
.customView(customView, true)
.theme(Theme.LIGHT)
.show();
}
});
if (mFileType == Scope.ALL) {
viewHolder.fileTitle.setText(fileList.get(i).getName());
if (!fileList.get(i).isDirectory()) {
viewHolder.fileInfo.setText(String.format(context.getString(R.string.file_picker_adapter_file_size_only_string),
fileList.get(i).length()));
}
String fileExt = fileExt(fileList.get(i).toString());
if (fileList.get(i).isDirectory()) {
viewHolder.fileImage.setBackgroundDrawable(getFileDrawable(R.drawable.fplib_ic_action_file_folder));
} else {
if (fileExt != null) {
if (fileExt.equalsIgnoreCase(".doc")) {
viewHolder.fileImage.setBackgroundDrawable(getFileDrawable(R.drawable.fplib_ic_doc_file));
} else if (fileExt.equalsIgnoreCase(".docx")) {
viewHolder.fileImage.setBackgroundDrawable(getFileDrawable(R.drawable.fplib_ic_doc_file));
} else if (fileExt.equalsIgnoreCase(".xls")) {
viewHolder.fileImage.setBackgroundDrawable(getFileDrawable(R.drawable.fplib_ic_xls_file));
} else if (fileExt.equalsIgnoreCase(".xlsx")) {
viewHolder.fileImage.setBackgroundDrawable(getFileDrawable(R.drawable.fplib_ic_xlsx_file));
} else if (fileExt.equalsIgnoreCase(".xml")) {
viewHolder.fileImage.setBackgroundDrawable(getFileDrawable(R.drawable.fplib_ic_xml_file));
} else if (fileExt.equalsIgnoreCase(".html")) {
viewHolder.fileImage.setBackgroundDrawable(getFileDrawable(R.drawable.fplib_ic_html_file));
} else if (fileExt.equalsIgnoreCase(".pdf")) {
viewHolder.fileImage.setBackgroundDrawable(getFileDrawable(R.drawable.fplib_ic_pdf_file));
} else if (fileExt.equalsIgnoreCase(".txt")) {
viewHolder.fileImage.setBackgroundDrawable(getFileDrawable(R.drawable.fplib_ic_txt_file));
} else if (fileExt.equalsIgnoreCase(".jpeg")) {
viewHolder.fileImage.setBackgroundDrawable(context.getResources()
.getDrawable(R.drawable.fplib_rectangle));
new BitmapWorkerTask(viewHolder.fileImage, Bitmap.CompressFormat.JPEG).execute(fileList.get(i));
} else if (fileExt.equalsIgnoreCase(".jpg")) {
viewHolder.fileImage.setBackgroundDrawable(context.getResources()
.getDrawable(R.drawable.fplib_rectangle));
new BitmapWorkerTask(viewHolder.fileImage, Bitmap.CompressFormat.JPEG).execute(fileList.get(i));
} else if (fileExt.equalsIgnoreCase(".png")) {
viewHolder.fileImage.setBackgroundDrawable(context.getResources()
.getDrawable(R.drawable.fplib_rectangle));
new BitmapWorkerTask(viewHolder.fileImage, Bitmap.CompressFormat.PNG).execute(fileList.get(i));
} else {
viewHolder.fileImage.setBackgroundDrawable(context.getResources()
.getDrawable(R.drawable.fplib_ic_default_file));
}
}
}
} else if (mFileType == Scope.DIRECTORIES) {
if (fileList.get(i).isDirectory()) {
viewHolder.fileImage.setBackgroundDrawable(folderDrawable);
viewHolder.fileTitle.setText(fileList.get(i).getName());
}
}
} else if (holder instanceof FileRecyclerViewAdapter.FileHeaderViewHolder) {
FileRecyclerViewAdapter.FileHeaderViewHolder fileHeaderViewHolder = (FileRecyclerViewAdapter.FileHeaderViewHolder) holder;
fileHeaderViewHolder.text.setText("Folders");
fileHeaderViewHolder.image.setBackgroundColor(context.getResources().getColor(android.R.color.background_light));
}
}
@Override
public int getItemViewType(int position) {
if (fileList.size() > 0 && directoryExists(mFiles) && position == 0) {
return TYPE_HEADER;
} else if (fileList.size() == 0 || !directoryExists(mFiles) || position > 0) {
return TYPE_ITEM;
} else {
return TYPE_ITEM;
}
}
@Override
public int getItemCount() {
return fileList.size();
}
/**
* Checks if the files contain a directory.
*
* @param files the files.
* @return a boolean, true if there is a file that is a directory.
*/
public boolean directoryExists(File[] files) {
for (int i = 0; i < files.length; i++) {
if (files[i].isDirectory()) {
return true;
}
}
return false;
}
public File getItem(int i) {
return fileList.get(i);
}
private Drawable getFileDrawable(int fileResource) {
Drawable firstLayer = context.getResources().getDrawable(fileResource);
LayerDrawable drawable = new LayerDrawable(new Drawable[]{
context.getResources().getDrawable(R.drawable.fplib_circle),
firstLayer
});
drawable.setLayerInset(1, (int) iconPadding, (int) iconPadding,
(int) iconPadding, (int) iconPadding);
return drawable;
}
public void addFile(File file) {
fileList.add(file);
notifyDataSetChanged();
}
public void removeFile(int position) {
fileList.remove(position);
notifyDataSetChanged();
}
public int getSelectedPosition() {
return selectedPosition;
}
public void setSelectedPosition(int i) {
selectedPosition = i;
notifyDataSetChanged();
}
public static class FileHeaderViewHolder extends RecyclerView.ViewHolder {
ImageView image;
TextView text;
public FileHeaderViewHolder(View itemView) {
super(itemView);
image = (ImageView) itemView.findViewById(R.id.file_item_image_view);
text = (TextView) itemView.findViewById(R.id.file_item_file_name);
}
}
public static class FileViewHolder extends RecyclerView.ViewHolder {
TextView fileTitle;
TextView fileInfo;
ImageView fileImage;
ImageView fileInfoButton;
public FileViewHolder(View view) {
super(view);
fileInfo = (TextView) view.findViewById(R.id.file_item_file_info);
fileTitle = (TextView) view.findViewById(R.id.file_item_file_name);
fileImage = (ImageView) view.findViewById(R.id.file_item_image_view);
fileInfoButton = (ImageView) view.findViewById(R.id.file_item_file_info_button);
}
}
public abstract static class Callback {
public void onItemClicked(View item, int position) {
}
}
/**
* Class that handles the loading of a bitmap.
*/
private class BitmapWorkerTask extends AsyncTask<File, Void, Bitmap> {
private final WeakReference<ImageView> imageViewReference;
private byte[] data;
private final Bitmap.CompressFormat mFormat;
public BitmapWorkerTask(ImageView imageView, Bitmap.CompressFormat format) {
// Use a WeakReference to ensure the ImageView can be garbage collected
imageView.setBackgroundDrawable(imageView.getContext()
.getResources().getDrawable(R.drawable.fplib_rectangle));
imageViewReference = new WeakReference<ImageView>(imageView);
mFormat = format;
}
// Decode image in background.
@Override
protected Bitmap doInBackground(File... params) {
Bitmap mBitmap = BitmapFactory.decodeFile(params[0].getAbsolutePath());
//check if bitmap is null here.
if (mBitmap != null) {
data = encodeBitmapToArray(mBitmap, mFormat);
return decodeSampledBitmapFromByteArray(data, 54, 54);
} else {
return null;
}
}
// Once complete, see if ImageView is still around and set bitmap.
@Override
protected void onPostExecute(Bitmap bitmap) {
if (imageViewReference.get() != null && bitmap != null) {
ImageView imageView = imageViewReference.get();
if (imageView != null) {
imageView.setImageBitmap(bitmap);
imageView.setScaleType(ImageView.ScaleType.CENTER_CROP);
}
}
}
}
private class GetFileSizeTask extends AsyncTask<Void, Void, Long> {
private final WeakReference<TextView> textViewWeakReference;
private final File file;
private final String formatString;
private GetFileSizeTask(TextView textView, File f, String string) {
file = f;
textViewWeakReference = new WeakReference<TextView>(textView);
formatString = string;
}
private long getDirectorySize(File directory) {
File[] files = directory.listFiles();
int size = 0;
for (File file : files) {
if (file.isDirectory()) {
size += getDirectorySize(file);
} else {
size += file.length();
}
}
return size;
} @Override
protected Long doInBackground(Void... params) {
return getDirectorySize(file);
}
@Override
protected void onPostExecute(Long aLong) {
TextView textView = textViewWeakReference.get();
if (textView != null) {
textView.setText(String.format(formatString, aLong));
}
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.river.container;
import java.beans.BeanInfo;
import java.beans.IntrospectionException;
import java.beans.Introspector;
import java.beans.PropertyDescriptor;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.InputStream;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.net.MalformedURLException;
import java.net.URL;
import java.util.*;
import java.util.logging.Level;
import java.util.logging.Logger;
import javax.xml.XMLConstants;
import javax.xml.bind.JAXBContext;
import javax.xml.bind.JAXBException;
import javax.xml.bind.Unmarshaller;
import javax.xml.transform.Source;
import javax.xml.transform.stream.StreamSource;
import javax.xml.validation.Schema;
import javax.xml.validation.SchemaFactory;
import org.apache.river.container.config.*;
import org.xml.sax.SAXException;
/**
* Bootstrap loader for the container. Performs roughly the following: <ul>
* <li>Based on the configuration parameter fed in at the command line,
* determine the configuration directory and the config file.</li> <li>Read the
* configuration file</li> <li>Based on the classpath declared in the config
* file, create the container's classloader.</li> <li>Using that classloader,
* create the context.</li> <li>Load any command-line parameters into the
* context</li>
* <li>Create all the elements (beans, discovery sets, etc) that are called out
* in the config file and put them into the context. This will cause those beans
* to setup and initialize themselves.</li> </li>
*
* @author trasukg
*/
public class Bootstrap {
private static final Logger log
= Logger.getLogger(Bootstrap.class.getName(), MessageNames.BUNDLE_NAME);
public static void main(String args[]) {
try {
initializeContainer(args);
} catch (InvocationTargetException ex) {
log.log(Level.SEVERE, MessageNames.INITIALIZATION_EXCEPTION, ex.getCause());
ex.printStackTrace();
System.exit(-1);
} catch (Exception ex) {
log.log(Level.SEVERE, MessageNames.INITIALIZATION_EXCEPTION, ex);
ex.printStackTrace();
System.exit(-1);
}
}
static Unmarshaller createConfigUnmarshaller() throws SAXException, JAXBException {
JAXBContext ctx = JAXBContext.newInstance("org.apache.river.container.config");
Unmarshaller um = ctx.createUnmarshaller();
SchemaFactory sf = SchemaFactory.newInstance(XMLConstants.W3C_XML_SCHEMA_NS_URI);
Source source = new StreamSource(Bootstrap.class.getResourceAsStream("/schemas/config.xsd"));
Schema schema = sf.newSchema(source);
um.setSchema(schema);
return um;
}
static URL[] findClasspathURLS(String classpathStr) throws MalformedURLException {
StringTokenizer tok = new StringTokenizer(classpathStr, Strings.WHITESPACE_SEPARATORS);
List<URL> pathElements = new ArrayList<URL>();
while (tok.hasMoreTokens()) {
File f = new File(tok.nextToken());
pathElements.add(f.toURI().toURL());
}
URL[] urls = (URL[]) pathElements.toArray(new URL[0]);
return urls;
}
private static Map<String, ClassLoader> createClassLoaders(ContainerConfig config) throws MalformedURLException {
Map<String, ClassLoader> classLoaders = new HashMap<String, ClassLoader>();
classLoaders.put(Strings.BOOTSTRAP_CLASS_LOADER, Bootstrap.class.getClassLoader());
/*
Setup the classloaders according to the config file.
*/
List<String> seen = new LinkedList<String>();
Map<String, Classpath> classpaths = new HashMap<String, Classpath>();
for (Classpath classpath : config.getClasspath()) {
if (classpaths.containsKey(classpath.getId())) {
throw new ConfigurationException(MessageNames.DUPLICATE_CLASSPATH, classpath.getId());
}
classpaths.put(classpath.getId(), classpath);
}
for (String id : classpaths.keySet()) {
resolveClassLoader(classLoaders, seen, classpaths, id);
}
return classLoaders;
}
private static ClassLoader resolveClassLoader(Map<String, ClassLoader> classLoaders,
List<String> seen,
Map<String, Classpath> classpaths,
String id) throws MalformedURLException {
if (classLoaders.containsKey(id)) {
return classLoaders.get(id);
}
if (seen.contains(id)) {
throw new ConfigurationException(MessageNames.CIRCULAR_CLASSPATH, id);
}
// Add the id to the list of classloaders we have attempted to build.
seen.add(id);
Classpath classpath = classpaths.get(id);
if (classpath == null) {
throw new ConfigurationException(MessageNames.CLASSPATH_UNDEFINED, id);
}
String parentClasspathId = classpath.getParent();
ClassLoader parentClassLoader = null;
if (parentClasspathId != null && !Strings.EMPTY.equals(parentClasspathId)) {
parentClassLoader = resolveClassLoader(classLoaders, seen, classpaths, parentClasspathId);
} else {
/* Should be the 'extension' classloader. */
parentClassLoader = Bootstrap.class.getClassLoader().getParent();
}
URL[] classpathUrls;
classpathUrls = findClasspathURLS(classpath.getValue());
SettableCodebaseClassLoader classLoader = new SettableCodebaseClassLoader(classpathUrls,
parentClassLoader);
classLoaders.put(id, classLoader);
log.log(Level.FINE, MessageNames.CONFIGURED_CLASSPATH, new Object[]{
id,
Utils.format(classpathUrls)});
seen.remove(id);
return classLoader;
}
static void initializeContainer(String args[]) throws SAXException, JAXBException, FileNotFoundException, MalformedURLException, ClassNotFoundException, NoSuchMethodException, IllegalAccessException, IllegalArgumentException, InvocationTargetException, InstantiationException, ConfigurationException, Exception {
//Logger.getLogger("org.apache.river.container.AnnotatedClassDeployer").setLevel(Level.ALL);
Logger.getLogger(CommandLineArgumentParser.class.getName()).setLevel(Level.ALL);
ContainerConfig coreConfig = readCoreConfig();
Map<String, ClassLoader> classLoaders = createClassLoaders(coreConfig);
ClassLoader containerClassLoader = classLoaders.get(Strings.CONTAINER_CLASS_LOADER);
/*
Create the context object.
*/
Object context = Class.forName(Strings.CONTEXT_CLASS, true, containerClassLoader).newInstance();
Method putByNameMethod = context.getClass().getMethod(
Strings.PUT, new Class[]{String.class, Object.class});
Method initCompleteMethod = context.getClass().getMethod(Strings.INIT_COMPLETE, new Class[0]);
Thread.currentThread().setContextClassLoader(containerClassLoader);
putByNameMethod.invoke(context, Strings.CLASS_LOADERS, (Object) classLoaders);
/* Store a link to the context in the context. */
putByNameMethod.invoke(context, Strings.CONTEXT, context);
/*
Process the core configuration
*/
processConfiguration(coreConfig, containerClassLoader, context);
/*
We need to set the command line args after processing the core
configuration so that the items in the core-config get initialized.
*/
putByNameMethod.invoke(context, Strings.COMMAND_LINE_ARGS, (Object) args);
/*
The core configuration now loads the profile configuration...
*/
// processConfiguration(containerConfig, classLoader, putMethod, context, putByNameMethod);
initCompleteMethod.invoke(context, new Object[0]);
}
static void processConfiguration(ContainerConfig config, Object classLoader, Object context) throws InvocationTargetException, ConfigurationException, IllegalArgumentException, InstantiationException, ClassNotFoundException, IllegalAccessException, NoSuchMethodException, MalformedURLException, Exception {
Method putMethod = context.getClass().getMethod(Strings.PUT, new Class[]{Object.class});
Method putByNameMethod = context.getClass().getMethod(
Strings.PUT, new Class[]{String.class, Object.class});
/*
Add the classpath urls found in the configuration into the classloader.
Note that we have to do this by reflection because the classloader
we're handed may not be the same classloader that loaded this instance
of the Bootstrap class. In particular, this occurs when we are loading
the profile configuration by calling out ProfileConfigReader in
core-config.xml. In that case, the container classloader is created by
the original Bootstrap class inside the original classloader, but
ProfileConfigReader and hence another Bootstrap class gets loaded by
the container class loader.
*/
/*
Not really sure about this.... would be required if we wanted the
profile configs to add jar files to the classpath. Not sure if that is
really "on", seeing as how we don't really want users attempting to
extend the container. Having said that, it's possible that certain
deployers might be in a different project, hence different jar files,
so we might want to let the profiles add to the classpath. Needs more
thought. for (URL url : findClasspathURLS(config)) { new
Statement(classLoader, Strings.ADD_URL, new Object[]{url}).execute(); }
*/
for (Object element : config.getElements()) {
processElement(element, (ClassLoader) classLoader, putMethod, context, putByNameMethod);
}
}
private static ContainerConfig readCoreConfig() throws SAXException, JAXBException, FileNotFoundException {
Unmarshaller um = createConfigUnmarshaller();
InputStream is = Bootstrap.class.getResourceAsStream(Strings.CORE_CONFIG_XML);
ContainerConfig containerConfig = (ContainerConfig) um.unmarshal(is);
return containerConfig;
}
private static void processElement(Object element, ClassLoader classLoader, Method putMethod, Object context, Method putByNameMethod) throws ClassNotFoundException, InstantiationException, InvocationTargetException, ConfigurationException, IllegalAccessException, IllegalArgumentException {
if (element instanceof Component) {
Component c = (Component) element;
Class compClass = Class.forName(c.getClazz(), true, classLoader);
String name = c.getName();
Object instance = compClass.newInstance();
for (Property p : c.getProperty()) {
setPropertyOnComponent(instance, p.getName(), p.getValue());
log.log(Level.FINER, MessageNames.SET_PROPERTY_ON_COMPONENT,
new Object[] {
p.getName(),
c.getClazz(),
c.getName(),
p.getValue()
});
}
if (name == null || name.trim().length() == 0) {
putMethod.invoke(context, instance);
} else {
putByNameMethod.invoke(context, name, instance);
}
} else if (element instanceof Property) {
Property p = (Property) element;
putByNameMethod.invoke(context, p.getName(), p.getValue());
} else if (element instanceof DiscoveryContextType) {
/*
Just drop the element into the context under the appropriate name.
*/
DiscoveryContextType dct = (DiscoveryContextType) element;
if (dct.getName() == null) {
putByNameMethod.invoke(context, Strings.DEFAULT_DISCOVERY_CONTEXT, dct);
} else {
putByNameMethod.invoke(context, dct.getName(), dct);
}
} else {
throw new ConfigurationException(MessageNames.UNSUPPORTED_ELEMENT, element.getClass().getName());
}
}
private static void setPropertyOnComponent(Object instance, String propertyName, String propertyValue) {
try {
BeanInfo info = Introspector.getBeanInfo(instance.getClass());
PropertyDescriptor pd=findPropertyDescriptor(info, propertyName);
Object convertedValue=convert(propertyValue, pd.getPropertyType());
pd.getWriteMethod().invoke(instance, convertedValue);
} catch (Throwable t) {
throw new ConfigurationException(t, MessageNames.FAILED_TO_SET_PROPERTY, propertyName, instance.getClass(), propertyValue);
}
}
private static PropertyDescriptor findPropertyDescriptor(BeanInfo info, String propertyName) throws IntrospectionException {
for (PropertyDescriptor possible: info.getPropertyDescriptors()) {
if (propertyName.equals(possible.getName())) {
return possible;
}
}
throw new IntrospectionException(propertyName);
}
private static Object convert(String value, Class targetType) {
if (targetType.equals(Boolean.class) || targetType.equals(boolean.class)) {
return Boolean.parseBoolean(value);
} else if (targetType.equals(String.class)) {
return value;
} else if (targetType.equals(Integer.class) || targetType.equals(int.class)) {
return Integer.parseInt(value);
} else if (targetType.equals(Double.class) || targetType.equals(double.class)) {
return Double.parseDouble(value);
} else if (targetType.equals(Float.class) || targetType.equals(float.class)) {
return Float.parseFloat(value);
}
throw new UnsupportedOperationException();
}
/*
static URL[] findClasspathURLS(ContainerConfig containerConfig) throws
MalformedURLException { String classpathStr =
containerConfig.getClasspath(); URL[] urls =
findClasspathURLS(classpathStr); return urls; }
*/
}
| |
package net.mgsx.game.core.helpers.shaders;
import java.nio.IntBuffer;
import com.badlogic.gdx.Gdx;
import com.badlogic.gdx.files.FileHandle;
import com.badlogic.gdx.graphics.Color;
import com.badlogic.gdx.graphics.Cubemap;
import com.badlogic.gdx.graphics.GL20;
import com.badlogic.gdx.graphics.GLTexture;
import com.badlogic.gdx.graphics.Texture;
import com.badlogic.gdx.graphics.glutils.ShaderProgram;
import com.badlogic.gdx.math.Matrix4;
import com.badlogic.gdx.math.Vector2;
import com.badlogic.gdx.math.Vector3;
import com.badlogic.gdx.utils.Array;
import com.badlogic.gdx.utils.BufferUtils;
import com.badlogic.gdx.utils.GdxRuntimeException;
import com.badlogic.gdx.utils.Json;
import com.badlogic.gdx.utils.Json.Serializable;
import com.badlogic.gdx.utils.JsonValue;
import com.badlogic.gdx.utils.ObjectMap;
import com.badlogic.gdx.utils.ObjectMap.Entry;
import com.badlogic.gdx.utils.ObjectSet;
import net.mgsx.game.core.Kit;
import net.mgsx.game.core.annotations.Editable;
import net.mgsx.game.core.helpers.FileHelper;
import net.mgsx.game.core.helpers.ShaderProgramHelper;
import net.mgsx.game.core.helpers.StringHelper;
import net.mgsx.game.core.ui.accessors.Accessor;
import net.mgsx.game.core.ui.accessors.AccessorScanner;
/**
* Easy {@link ShaderProgram} editor wrapper.
*
* How to use :
* <ul>
* <li>Create a sub class with mandatory {@link ShaderInfo} annotation </li>
* <li>Add fields with {@link Uniform} and/or {@link Editable}</li>
* <li>Use instances in fields (system ..) with {@link Editable} annotation </li>
* </ul>
*
* Note :
* <ul>
* <li>fields with only {@link Uniform} are sent to the shader in all cases.</li>
* <li>fields with only {@link Editable} are never sent to the shader but is permit for data grouping.</li>
* <li>fields with both {@link Editable} and {@link Uniform} are sent to the shader in edit mode and
* are injected (as hardcoded value #DEFINE in shader code at compilation time).</li>
* </ul>
*
* Which impacts on shader code : do not set uniform varable in shader code, these are injected automatically
* at runtime.
*
* @author mgsx
*
*/
// TODO allow #include and other features (#define switches, #version injection ...)
abstract public class ShaderProgramManaged implements Serializable {
static interface ControlHandler {
void loaded();
}
transient ControlHandler handler;
// TODO maybe register an editor globally and allow partial scaning ... ?
@Editable(editor=ShaderProgramManagedEditor.class)
public final transient ShaderProgramManaged control = this;
private static class UniformInfo {
String name;
/** see https://www.khronos.org/registry/OpenGL-Refpages/gl4/html/glGetActiveUniform.xhtml */
int type;
/** always 1 execpt for arrays of uniforms */
int size;
int location;
boolean bound;
}
private abstract static class UniformAccessor{
protected Accessor accessor;
protected ShaderProgram shader;
protected UniformInfo info;
protected boolean freezable;
protected String name;
protected String[] only;
protected boolean enabled;
public boolean bind(ShaderProgram shader, UniformInfo info) {
this.shader = shader;
this.info = info;
init();
return check();
}
protected void init(){}
public abstract void update();
public abstract boolean check();
public abstract String value();
public abstract String type();
protected String inline(String...values){
String s = type() + "(";
for(int i=0 ; i<values.length ; i++){
if(i>0) s += ", ";
s += values[i];
}
s += ")";
return s;
}
}
private static class UAUndefined extends UniformAccessor{
@Override
public void update() {
// NOOP
}
@Override
public boolean check() {
return false;
}
@Override
public String value() {
return "";
}
@Override
public String type() {
return "";
}
}
private static class UAFloat extends UniformAccessor{
@Override
public void update() {
shader.setUniformf(info.location, accessor.get(Float.class));
}
@Override
public boolean check() {
return info.type == GL20.GL_FLOAT && info.size == 1;
}
@Override
public String value() {
return String.valueOf(accessor.get(Float.class));
}
@Override
public String type() {
return "float";
}
}
private static class UAVector2 extends UniformAccessor{
Vector2 value;
@Override
protected void init() {
super.init();
value = accessor.get(Vector2.class);
}
@Override
public void update() {
shader.setUniformf(info.location, value);
}
@Override
public boolean check() {
return info.type == GL20.GL_FLOAT_VEC2 && info.size == 1;
}
@Override
public String value() {
init();
return inline(String.valueOf(value.x), String.valueOf(value.y));
}
@Override
public String type() {
return "vec2";
}
}
private static class UAVector3 extends UniformAccessor{
Vector3 value;
@Override
protected void init() {
super.init();
value = accessor.get(Vector3.class);
}
@Override
public void update() {
shader.setUniformf(info.location, value);
}
@Override
public boolean check() {
return info.type == GL20.GL_FLOAT_VEC3 && info.size == 1;
}
@Override
public String value() {
init();
return inline(String.valueOf(value.x), String.valueOf(value.y), String.valueOf(value.z));
}
@Override
public String type() {
return "vec3";
}
}
private static class UAColor extends UniformAccessor{
Color value;
@Override
protected void init() {
super.init();
value = accessor.get(Color.class);
}
@Override
public void update() {
shader.setUniformf(info.location, value);
}
@Override
public boolean check() {
return info.type == GL20.GL_FLOAT_VEC4 && info.size == 1;
}
@Override
public String value() {
init();
return inline(String.valueOf(value.r), String.valueOf(value.g), String.valueOf(value.b), String.valueOf(value.a));
}
@Override
public String type() {
return "vec4";
}
}
private static class UAMatrix4 extends UniformAccessor{
Matrix4 value;
@Override
protected void init() {
super.init();
value = accessor.get(Matrix4.class);
}
@Override
public void update() {
shader.setUniformMatrix(info.location, value);
}
@Override
public boolean check() {
return info.type == GL20.GL_FLOAT_MAT4 && info.size == 1;
}
@Override
public String value() {
throw new GdxRuntimeException("inline matrix4 not supported yet");
}
@Override
public String type() {
return "mat4";
}
}
abstract private static class UASampler extends UniformAccessor{
GLTexture value;
int unit; // TODO
@Override
protected void init() {
super.init();
value = accessor.get(GLTexture.class);
}
@Override
public void update() {
value.bind(unit);
shader.setUniformi(info.location, unit);
}
@Override
public String value() {
throw new GdxRuntimeException("inline sampler not supported");
}
public UASampler unit(int unit){
this.unit = unit;
return this;
}
}
private static class UASampler2D extends UASampler{
@Override
public boolean check() {
return info.type == GL20.GL_SAMPLER_2D && info.size == 1;
}
@Override
public String type() {
return "sampler2D";
}
}
private static class UASamplerCube extends UASampler{
@Override
public boolean check() {
return info.type == GL20.GL_SAMPLER_CUBE && info.size == 1;
}
@Override
public String type() {
return "samplerCube";
}
}
// fields used for path persistence
private String vs, fs;
ObjectSet<String> currentConfig = new ObjectSet<String>();
ShaderInfo shaderInfo;
protected FileHandle vertexShader;
protected FileHandle fragmentShader;
private ShaderProgram shaderProgram;
private IntBuffer result = BufferUtils.newIntBuffer(16);
private IntBuffer type = BufferUtils.newIntBuffer(1);
private boolean frozen = true;
private Array<UniformAccessor> allUniformAccessors;
private Array<UniformAccessor> activeUniformAccessors;
private int samplerUnits;
ObjectSet<String> configs;
private boolean invalidated;
public ShaderProgramManaged() {
// no deep initialization here because of JSON serializer (check default)
shaderInfo = this.getClass().getAnnotation(ShaderInfo.class);
// frozen by default except if injection is not possible.
frozen = shaderInfo.inject();
}
public void freeze(boolean frozen){
this.frozen = frozen;
invalidate();
}
public boolean isFrozen(){
return frozen;
}
public ShaderProgram program() {
if(shouldBeReloaded()){
reload();
}
return shaderProgram;
}
private boolean shouldBeReloaded(){
return shaderProgram == null || invalidated;
}
/**
* Bind shader and send uniform.
*
* @return true if the program has been reloaded. Usefull to apply changes to dependent objects.
* eg. ShapeRenderer needs to be reconstructed, Batch need to be updated.
*/
public boolean begin(){
boolean hasChanged = false;
if(shouldBeReloaded()){
reload();
hasChanged = true;
}
shaderProgram.begin();
setUniforms();
return hasChanged;
}
/**
* send uniform to shader (shader must be bound before), called during {@link #begin()}
* useful if shader program is used by a Batch or a ShaderProvider.
*/
public void setUniforms() {
if(shouldBeReloaded()){
reload();
}
for(UniformAccessor ua : activeUniformAccessors){
if(!ua.freezable) ua.init(); // XXX because value could be updated by client code !!!
ua.update();
}
}
public void end(){
shaderProgram.end();
// unbind textures
// this is mainly a workaround because LibGDX assume current unit is 0 maybe to avoid some calls
// we just set current unit to zero
if(samplerUnits > 1){
Gdx.gl.glActiveTexture(GL20.GL_TEXTURE0);
}
// TODO is it necessary to do this ... seams not
// for(int i=0 ; i<samplerUnits ; i++){
// Gdx.gl.glActiveTexture(GL20.GL_TEXTURE0 + i);
// Gdx.gl.glBindTexture(GL20.GL_TEXTURE_2D, 0);
// }
}
public void dumpVS(){
if(shaderProgram != null){
Gdx.app.log("Shader", "\n" + shaderProgram.getVertexShaderSource());
}
}
public void dumpFS(){
if(shaderProgram != null){
Gdx.app.log("Shader", "\n" + shaderProgram.getFragmentShaderSource());
}
}
public void reload()
{
invalidated = false;
if(vs == null) vs = shaderInfo.vs();
if(fs == null) fs = shaderInfo.fs();
this.vertexShader = Gdx.files.internal(vs);
this.fragmentShader = Gdx.files.internal(fs);
// scan once : java code won't change (TODO even with code swap for annotation only ?)
if(allUniformAccessors == null){
allUniformAccessors = findAllUniformAccessors();
configs = findConfigs();
}
String preVertCode = "";
String preFragCode = "";
for(String cfg : currentConfig){
String code = "#define " + StringHelper.camelCaseToUnderScoreUpperCase(cfg) + "\n";
preVertCode += code;
preFragCode += code;
}
for(UniformAccessor ua : allUniformAccessors) {
if(ua.only.length == 0){
ua.enabled = true;
}else{
ua.enabled = false;
for(String only : ua.only){
for(String cfg : currentConfig){
if(cfg.equals(only)){
ua.enabled = true;
}
}
}
}
}
if(shaderInfo.inject()) {
// do the injections
for(UniformAccessor ua : allUniformAccessors) {
String code;
if(frozen && ua.freezable){
code = "#define " + ua.name + " " + ua.value() + "\n";
}else{
code = "uniform " + ua.type() + " " + ua.name + ";\n";
}
preVertCode += code;
preFragCode += code;
}
}
preVertCode += "#line 0\n";
preFragCode += "#line 0\n";
String preVertexCodeBefore = ShaderProgram.prependVertexCode;
String preFragmentCodeBefore = ShaderProgram.prependFragmentCode;
ShaderProgram.prependVertexCode = preVertexCodeBefore + preVertCode;
ShaderProgram.prependFragmentCode = preFragmentCodeBefore + preFragCode;
shaderProgram = ShaderProgramHelper.reload(shaderProgram, vertexShader, fragmentShader);
ShaderProgram.prependVertexCode = preVertexCodeBefore;
ShaderProgram.prependFragmentCode = preFragmentCodeBefore;
// Obtain the handle XXX not exposed ...
result.clear();
shaderProgram.begin();
Gdx.gl20.glGetIntegerv(GL20.GL_CURRENT_PROGRAM, result);
shaderProgram.end();
int handle = result.get(0);
// int handle = ReflectionHelper.get(shaderProgram, "program", Integer.class);
// first scan program to get all uniforms
result.clear();
Gdx.gl20.glGetProgramiv(handle, GL20.GL_ACTIVE_UNIFORMS, result);
int numUniforms = result.get(0);
ObjectMap<String, UniformInfo> uniformInfos = new ObjectMap<String, UniformInfo>();
for (int i = 0; i < numUniforms; i++) {
result.clear();
result.put(0, 1);
type.clear();
UniformInfo info = new UniformInfo();
String name = Gdx.gl20.glGetActiveUniform(handle, i, result, type);
info.location = Gdx.gl20.glGetUniformLocation(handle, name);
info.name = name;
info.size = result.get(0);
info.type = type.get(0);
uniformInfos.put(name, info);
}
activeUniformAccessors = new Array<UniformAccessor>();
for(UniformAccessor ua : allUniformAccessors) {
if(!ua.enabled) continue;
UniformInfo i = uniformInfos.get(ua.name);
boolean frozenUniform = ua.freezable && frozen;
if(!frozenUniform){
if(i == null){
Gdx.app.error("Shader", "missing uniform variable in GLSL code : " + ua.name);
}else if(!ua.bind(shaderProgram, i)){
// TODO log only ?
if(ua instanceof UAUndefined) {
throw new GdxRuntimeException("binding not supported for " + ua.name);
}else{
throw new GdxRuntimeException("uniform missmatch for " + ua.name + " " + ua.type() + " and " + ua.accessor.getType().getName());
}
}else{
i.bound = true;
activeUniformAccessors.add(ua);
}
}
}
// check for GLSL uniform not bound to Java
for(Entry<String, UniformInfo> e : uniformInfos){
if(!e.value.bound){
Gdx.app.error("Shader", "uniform not bound : name from program : " + e.value.name);
}
}
if(handler != null) handler.loaded();
}
private Array<UniformAccessor> findAllUniformAccessors() {
Array<UniformAccessor> all = new Array<UniformAccessor>();
samplerUnits = 0;
for(Accessor a : Kit.meta.accessorsFor(this, Uniform.class)) {
Uniform config = a.config(Uniform.class);
Editable edit = a.config(Editable.class);
String uniformName = config.value();
if(uniformName.isEmpty()){
uniformName = "u_" + a.getName();
}
UniformAccessor ua;
if(a.getType() == float.class){
ua = new UAFloat();
}
else if(a.getType() == Vector2.class){
ua = new UAVector2();
}
else if(a.getType() == Vector3.class){
ua = new UAVector3();
}
else if(a.getType() == Color.class){
ua = new UAColor();
}
else if(a.getType() == Matrix4.class){
ua = new UAMatrix4();
}
else if(a.getType() == Texture.class){
ua = new UASampler2D().unit(samplerUnits++);
}
else if(a.getType() == Cubemap.class){
ua = new UASamplerCube().unit(samplerUnits++);
}
else{
Gdx.app.error("Shader", "missing Java/GLSL type binding for " + uniformName + " type " + a.getType().getName());
ua = new UAUndefined();
}
// uniform with edit can be frozen, other one can't be
ua.freezable = edit != null;
ua.name = uniformName;
ua.accessor = a;
ua.only = config.only();
all.add(ua);
}
return all;
}
private ObjectSet<String> findConfigs()
{
ObjectSet<String> all = new ObjectSet<String>();
for(UniformAccessor ua : allUniformAccessors)
{
for(String only : ua.only){
all.add(only);
}
}
for(String c : shaderInfo.configs()){
all.add(c);
}
return all;
}
public void changeVS(FileHandle file) {
if(shaderInfo.storable()){
vs = FileHelper.stripPath(file.path());
}
vertexShader = file;
}
public void changeFS(FileHandle file) {
if(shaderInfo.storable()){
fs = FileHelper.stripPath(file.path());
}
fragmentShader = file;
}
public void invalidate() {
invalidated = true;
}
public boolean isEnabled(String config) {
return currentConfig.contains(config);
}
public void setConfig(String...configs){
this.currentConfig.clear();
for(String c : configs) this.currentConfig.add(c);
invalidate();
}
@Override
public void read(Json json, JsonValue jsonData) {
ShaderInfo info = this.getClass().getAnnotation(ShaderInfo.class);
if(info != null && info.storable()){
json.readField(this, "vs", jsonData);
json.readField(this, "fs", jsonData);
}
String[] cfg = json.readValue("config", String[].class, jsonData);
currentConfig.clear();
if(cfg != null){
for(String c : cfg){
currentConfig.add(c);
}
}
for(Accessor accessor : AccessorScanner.scan(this, true, false)){
json.readField(this, accessor.getName(), jsonData);
}
invalidate();
}
@Override
public void write(Json json) {
ShaderInfo info = this.getClass().getAnnotation(ShaderInfo.class);
if(info != null && info.storable()){
json.writeField(this, "vs");
json.writeField(this, "fs");
}
if(currentConfig.size > 0){
String[] cfg = new String[currentConfig.size];
int i=0;
for(String c : currentConfig){
cfg[i++] = c;
}
json.writeValue("config", cfg);
}
for(Accessor accessor : AccessorScanner.scan(this, true, false)){
json.writeField(this, accessor.getName());
}
}
}
| |
// Copyright 2008-2010 Victor Iacoban
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software distributed under
// the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
// either express or implied. See the License for the specific language governing permissions and
// limitations under the License.
package org.zmlx.hg4idea.provider.commit;
import com.intellij.dvcs.AmendComponent;
import com.intellij.dvcs.push.ui.VcsPushDialog;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.application.ModalityState;
import com.intellij.openapi.progress.ProgressIndicator;
import com.intellij.openapi.progress.Task;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.ui.Messages;
import com.intellij.openapi.vcs.CheckinProjectPanel;
import com.intellij.openapi.vcs.FilePath;
import com.intellij.openapi.vcs.VcsException;
import com.intellij.openapi.vcs.changes.*;
import com.intellij.openapi.vcs.checkin.CheckinEnvironment;
import com.intellij.openapi.vcs.ui.RefreshableOnComponent;
import com.intellij.openapi.vfs.VirtualFile;
import com.intellij.ui.GuiUtils;
import com.intellij.util.FunctionUtil;
import com.intellij.util.NullableFunction;
import com.intellij.util.PairConsumer;
import com.intellij.util.containers.ContainerUtil;
import com.intellij.util.ui.GridBag;
import com.intellij.util.ui.JBUI;
import com.intellij.vcsUtil.VcsUtil;
import com.intellij.xml.util.XmlStringUtil;
import javax.annotation.Nonnull;
import org.zmlx.hg4idea.*;
import org.zmlx.hg4idea.action.HgActionUtil;
import org.zmlx.hg4idea.command.*;
import org.zmlx.hg4idea.command.mq.HgQNewCommand;
import org.zmlx.hg4idea.execution.HgCommandException;
import org.zmlx.hg4idea.execution.HgCommandExecutor;
import org.zmlx.hg4idea.execution.HgCommandResult;
import org.zmlx.hg4idea.provider.HgCurrentBinaryContentRevision;
import org.zmlx.hg4idea.repo.HgRepository;
import org.zmlx.hg4idea.repo.HgRepositoryManager;
import org.zmlx.hg4idea.util.HgUtil;
import javax.annotation.Nullable;
import javax.swing.*;
import java.awt.*;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import java.util.*;
import java.util.List;
import static com.intellij.util.ObjectUtils.assertNotNull;
import static org.zmlx.hg4idea.util.HgUtil.getRepositoryManager;
public class HgCheckinEnvironment implements CheckinEnvironment {
private final Project myProject;
private boolean myNextCommitIsPushed;
private boolean myNextCommitAmend; // If true, the next commit is amended
private boolean myShouldCommitSubrepos;
private boolean myMqNewPatch;
private boolean myCloseBranch;
@Nullable
private Collection<HgRepository> myRepos;
public HgCheckinEnvironment(Project project) {
myProject = project;
}
public RefreshableOnComponent createAdditionalOptionsPanel(CheckinProjectPanel panel,
PairConsumer<Object, Object> additionalDataConsumer) {
reset();
return new HgCommitAdditionalComponent(myProject, panel);
}
private void reset() {
myNextCommitIsPushed = false;
myShouldCommitSubrepos = false;
myCloseBranch = false;
myMqNewPatch = false;
myRepos = null;
}
public String getDefaultMessageFor(FilePath[] filesToCheckin) {
return null;
}
public String getHelpId() {
return null;
}
public String getCheckinOperationName() {
return HgVcsMessages.message("hg4idea.commit");
}
public List<VcsException> commit(List<Change> changes,
String preparedComment,
@Nonnull NullableFunction<Object, Object> parametersHolder,
Set<String> feedback) {
List<VcsException> exceptions = new LinkedList<>();
Map<HgRepository, Set<HgFile>> repositoriesMap = getFilesByRepository(changes);
addRepositoriesWithoutChanges(repositoriesMap);
for (Map.Entry<HgRepository, Set<HgFile>> entry : repositoriesMap.entrySet()) {
HgRepository repo = entry.getKey();
Set<HgFile> selectedFiles = entry.getValue();
HgCommitTypeCommand command = myMqNewPatch ? new HgQNewCommand(myProject, repo, preparedComment, myNextCommitAmend) :
new HgCommitCommand(myProject, repo, preparedComment, myNextCommitAmend, myCloseBranch,
myShouldCommitSubrepos && !selectedFiles.isEmpty());
if (isMergeCommit(repo.getRoot())) {
//partial commits are not allowed during merges
//verifyResult that all changed files in the repo are selected
//If so, commit the entire repository
//If not, abort
Set<HgFile> changedFilesNotInCommit = getChangedFilesNotInCommit(repo.getRoot(), selectedFiles);
boolean partial = !changedFilesNotInCommit.isEmpty();
if (partial) {
final StringBuilder filesNotIncludedString = new StringBuilder();
for (HgFile hgFile : changedFilesNotInCommit) {
filesNotIncludedString.append("<li>");
filesNotIncludedString.append(hgFile.getRelativePath());
filesNotIncludedString.append("</li>");
}
if (!mayCommitEverything(filesNotIncludedString.toString())) {
//abort
return exceptions;
}
//firstly selected changes marked dirty in CommitHelper -> postRefresh, so we need to mark others
VcsDirtyScopeManager dirtyManager = VcsDirtyScopeManager.getInstance(myProject);
for (HgFile hgFile : changedFilesNotInCommit) {
dirtyManager.fileDirty(hgFile.toFilePath());
}
}
// else : all was included, or it was OK to commit everything,
// so no need to set the files on the command, because then mercurial will complain
}
else {
command.setFiles(selectedFiles);
}
try {
command.executeInCurrentThread();
}
catch (HgCommandException e) {
exceptions.add(new VcsException(e));
}
catch (VcsException e) {
exceptions.add(e);
}
}
// push if needed
if (myNextCommitIsPushed && exceptions.isEmpty()) {
final List<HgRepository> preselectedRepositories = ContainerUtil.newArrayList(repositoriesMap.keySet());
GuiUtils.invokeLaterIfNeeded(() ->
new VcsPushDialog(myProject, preselectedRepositories, HgUtil.getCurrentRepository(myProject)).show(),
ModalityState.defaultModalityState());
}
return exceptions;
}
private boolean isMergeCommit(VirtualFile repo) {
return new HgWorkingCopyRevisionsCommand(myProject).parents(repo).size() > 1;
}
private Set<HgFile> getChangedFilesNotInCommit(VirtualFile repo, Set<HgFile> selectedFiles) {
List<HgRevisionNumber> parents = new HgWorkingCopyRevisionsCommand(myProject).parents(repo);
HgStatusCommand statusCommand =
new HgStatusCommand.Builder(true).unknown(false).ignored(false).baseRevision(parents.get(0)).build(myProject);
Set<HgChange> allChangedFilesInRepo = statusCommand.executeInCurrentThread(repo);
Set<HgFile> filesNotIncluded = new HashSet<>();
for (HgChange change : allChangedFilesInRepo) {
HgFile beforeFile = change.beforeFile();
HgFile afterFile = change.afterFile();
if (!selectedFiles.contains(beforeFile)) {
filesNotIncluded.add(beforeFile);
}
else if (!selectedFiles.contains(afterFile)) {
filesNotIncluded.add(afterFile);
}
}
return filesNotIncluded;
}
private boolean mayCommitEverything(final String filesNotIncludedString) {
final int[] choice = new int[1];
Runnable runnable = new Runnable() {
public void run() {
choice[0] = Messages.showOkCancelDialog(
myProject,
HgVcsMessages.message("hg4idea.commit.partial.merge.message", filesNotIncludedString),
HgVcsMessages.message("hg4idea.commit.partial.merge.title"),
null
);
}
};
ApplicationManager.getApplication().invokeAndWait(runnable);
return choice[0] == Messages.OK;
}
public List<VcsException> commit(List<Change> changes, String preparedComment) {
return commit(changes, preparedComment, FunctionUtil.nullConstant(), null);
}
public List<VcsException> scheduleMissingFileForDeletion(List<FilePath> files) {
final List<HgFile> filesWithRoots = new ArrayList<>();
for (FilePath filePath : files) {
VirtualFile vcsRoot = VcsUtil.getVcsRootFor(myProject, filePath);
if (vcsRoot == null) {
continue;
}
filesWithRoots.add(new HgFile(vcsRoot, filePath));
}
new Task.Backgroundable(myProject, "Removing Files...") {
@Override
public void run(@Nonnull ProgressIndicator indicator) {
new HgRemoveCommand(myProject).executeInCurrentThread(filesWithRoots);
}
}.queue();
return null;
}
public List<VcsException> scheduleUnversionedFilesForAddition(final List<VirtualFile> files) {
new HgAddCommand(myProject).addWithProgress(files);
return null;
}
public boolean keepChangeListAfterCommit(ChangeList changeList) {
return false;
}
@Override
public boolean isRefreshAfterCommitNeeded() {
return false;
}
@Nonnull
private Map<HgRepository, Set<HgFile>> getFilesByRepository(List<Change> changes) {
Map<HgRepository, Set<HgFile>> result = new HashMap<>();
for (Change change : changes) {
ContentRevision afterRevision = change.getAfterRevision();
ContentRevision beforeRevision = change.getBeforeRevision();
if (afterRevision != null) {
addFile(result, afterRevision);
}
if (beforeRevision != null) {
addFile(result, beforeRevision);
}
}
return result;
}
private void addFile(Map<HgRepository, Set<HgFile>> result, ContentRevision contentRevision) {
FilePath filePath = contentRevision.getFile();
// try to find repository from hgFile from change: to be able commit sub repositories as expected
HgRepository repo = HgUtil.getRepositoryForFile(myProject, contentRevision instanceof HgCurrentBinaryContentRevision
? ((HgCurrentBinaryContentRevision)contentRevision).getRepositoryRoot()
: ChangesUtil.findValidParentAccurately(filePath));
if (repo == null) {
return;
}
Set<HgFile> hgFiles = result.get(repo);
if (hgFiles == null) {
hgFiles = new HashSet<>();
result.put(repo, hgFiles);
}
hgFiles.add(new HgFile(repo.getRoot(), filePath));
}
public void setNextCommitIsPushed() {
myNextCommitIsPushed = true;
}
public void setMqNew() {
myMqNewPatch = true;
}
public void setCloseBranch(boolean closeBranch) {
myCloseBranch = closeBranch;
}
public void setRepos(@Nonnull Collection<HgRepository> repos) {
myRepos = repos;
}
private void addRepositoriesWithoutChanges(@Nonnull Map<HgRepository, Set<HgFile>> repositoryMap) {
if (myRepos == null) return;
for (HgRepository repository : myRepos) {
if (!repositoryMap.keySet().contains(repository)) {
repositoryMap.put(repository, Collections.<HgFile>emptySet());
}
}
}
/**
* Commit options for hg
*/
public class HgCommitAdditionalComponent implements RefreshableOnComponent {
@Nonnull
private final JPanel myPanel;
@Nonnull
private final AmendComponent myAmend;
@Nonnull
private final JCheckBox myCommitSubrepos;
HgCommitAdditionalComponent(@Nonnull Project project, @Nonnull CheckinProjectPanel panel) {
HgVcs vcs = assertNotNull(HgVcs.getInstance(myProject));
myAmend = new MyAmendComponent(project, getRepositoryManager(project), panel, "Amend Commit (QRefresh)");
myAmend.getComponent().setEnabled(vcs.getVersion().isAmendSupported());
myCommitSubrepos = new JCheckBox("Commit subrepositories", false);
myCommitSubrepos.setToolTipText(XmlStringUtil.wrapInHtml(
"Commit all subrepos for selected repositories.<br>" +
" <code>hg ci <i><b>files</b></i> -S <i><b>subrepos</b></i></code>"));
myCommitSubrepos.setMnemonic('s');
Collection<HgRepository> repos = HgActionUtil.collectRepositoriesFromFiles(getRepositoryManager(myProject), panel.getRoots());
myCommitSubrepos.setVisible(ContainerUtil.exists(repos, HgRepository::hasSubrepos));
myCommitSubrepos.addActionListener(new MySelectionListener(myAmend.getCheckBox()));
myAmend.getCheckBox().addActionListener(new MySelectionListener(myCommitSubrepos));
GridBag gb = new GridBag().
setDefaultInsets(JBUI.insets(2)).
setDefaultAnchor(GridBagConstraints.WEST).
setDefaultWeightX(1).
setDefaultFill(GridBagConstraints.HORIZONTAL);
myPanel = new JPanel(new GridBagLayout());
myPanel.add(myAmend.getComponent(), gb.nextLine().next());
myPanel.add(myCommitSubrepos, gb.nextLine().next());
}
@Override
public void refresh() {
myAmend.refresh();
restoreState();
}
@Override
public void saveState() {
myNextCommitAmend = isAmend();
myShouldCommitSubrepos = myCommitSubrepos.isSelected();
}
@Override
public void restoreState() {
myNextCommitAmend = false;
myShouldCommitSubrepos = false;
}
@Override
public JComponent getComponent() {
return myPanel;
}
public boolean isAmend() {
return myAmend.isAmend();
}
private class MyAmendComponent extends AmendComponent {
public MyAmendComponent(@Nonnull Project project,
@Nonnull HgRepositoryManager repoManager,
@Nonnull CheckinProjectPanel panel,
@Nonnull String title) {
super(project, repoManager, panel, title);
}
@Nonnull
@Override
protected Set<VirtualFile> getVcsRoots(@Nonnull Collection<FilePath> filePaths) {
return HgUtil.hgRoots(myProject, filePaths);
}
@Nullable
@Override
protected String getLastCommitMessage(@Nonnull VirtualFile repo) throws VcsException {
HgCommandExecutor commandExecutor = new HgCommandExecutor(myProject);
List<String> args = new ArrayList<>();
args.add("-r");
args.add(".");
args.add("--template");
args.add("{desc}");
HgCommandResult result = commandExecutor.executeInCurrentThread(repo, "log", args);
return result == null ? "" : result.getRawOutput();
}
}
private class MySelectionListener implements ActionListener {
private final JCheckBox myUnselectedComponent;
public MySelectionListener(JCheckBox unselectedComponent) {
myUnselectedComponent = unselectedComponent;
}
@Override
public void actionPerformed(ActionEvent e) {
JCheckBox source = (JCheckBox)e.getSource();
if (source.isSelected()) {
myUnselectedComponent.setSelected(false);
myUnselectedComponent.setEnabled(false);
}
else {
myUnselectedComponent.setEnabled(true);
}
}
}
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.mapred.lib;
import java.io.IOException;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.HashMap;
import java.util.Set;
import java.util.Iterator;
import java.util.Map;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.FileUtil;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.BlockLocation;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.PathFilter;
import org.apache.hadoop.net.NodeBase;
import org.apache.hadoop.net.NetworkTopology;
import org.apache.hadoop.mapred.InputSplit;
import org.apache.hadoop.mapred.FileInputFormat;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapred.Reporter;
import org.apache.hadoop.mapred.RecordReader;
/**
* An abstract {@link org.apache.hadoop.mapred.InputFormat} that returns
* {@link CombineFileSplit}'s in
* {@link org.apache.hadoop.mapred.InputFormat#getSplits(JobConf, int)} method.
* Splits are constructed from the files under the input paths. A split cannot
* have files from different pools. Each split returned may contain blocks from
* different files. If a maxSplitSize is specified, then blocks on the same node
* are combined to form a single split. Blocks that are left over are then
* combined with other blocks in the same rack. If maxSplitSize is not
* specified, then blocks from the same rack are combined in a single split; no
* attempt is made to create node-local splits. If the maxSplitSize is equal to
* the block size, then this class is similar to the default spliting behaviour
* in Hadoop: each block is a locally processed split. Subclasses implement
* {@link org.apache.hadoop.mapred.InputFormat#getRecordReader(InputSplit, JobConf, Reporter)}
* to construct <code>RecordReader</code>'s for <code>CombineFileSplit</code>'s.
*
* @see CombineFileSplit
*/
public abstract class CombineFileInputFormat<K, V> extends
FileInputFormat<K, V> {
// ability to limit the size of a single split
private long maxSplitSize = 0;
private long minSplitSizeNode = 0;
private long minSplitSizeRack = 0;
// A pool of input paths filters. A split cannot have blocks from files
// across multiple pools.
private ArrayList<MultiPathFilter> pools = new ArrayList<MultiPathFilter>();
// mapping from a rack name to the set of Nodes in the rack
private static HashMap<String, Set<String>> rackToNodes = new HashMap<String, Set<String>>();
/**
* Specify the maximum size (in bytes) of each split. Each split is
* approximately equal to the specified size.
*/
protected void setMaxSplitSize(long maxSplitSize) {
this.maxSplitSize = maxSplitSize;
}
/**
* Specify the minimum size (in bytes) of each split per node. This applies
* to data that is left over after combining data on a single node into
* splits that are of maximum size specified by maxSplitSize. This leftover
* data will be combined into its own split if its size exceeds
* minSplitSizeNode.
*/
protected void setMinSplitSizeNode(long minSplitSizeNode) {
this.minSplitSizeNode = minSplitSizeNode;
}
/**
* Specify the minimum size (in bytes) of each split per rack. This applies
* to data that is left over after combining data on a single rack into
* splits that are of maximum size specified by maxSplitSize. This leftover
* data will be combined into its own split if its size exceeds
* minSplitSizeRack.
*/
protected void setMinSplitSizeRack(long minSplitSizeRack) {
this.minSplitSizeRack = minSplitSizeRack;
}
/**
* Create a new pool and add the filters to it. A split cannot have files
* from different pools.
*/
protected void createPool(JobConf conf, List<PathFilter> filters) {
pools.add(new MultiPathFilter(filters));
}
/**
* Create a new pool and add the filters to it. A pathname can satisfy any
* one of the specified filters. A split cannot have files from different
* pools.
*/
protected void createPool(JobConf conf, PathFilter... filters) {
MultiPathFilter multi = new MultiPathFilter();
for (PathFilter f : filters) {
multi.add(f);
}
pools.add(multi);
}
/**
* default constructor
*/
public CombineFileInputFormat() {
}
@Override
public InputSplit[] getSplits(JobConf job, int numSplits)
throws IOException {
long minSizeNode = 0;
long minSizeRack = 0;
long maxSize = 0;
// the values specified by setxxxSplitSize() takes precedence over the
// values that might have been specified in the config
if (minSplitSizeNode != 0) {
minSizeNode = minSplitSizeNode;
} else {
minSizeNode = job.getLong("mapred.min.split.size.per.node", 0);
}
if (minSplitSizeRack != 0) {
minSizeRack = minSplitSizeRack;
} else {
minSizeRack = job.getLong("mapred.min.split.size.per.rack", 0);
}
if (maxSplitSize != 0) {
maxSize = maxSplitSize;
} else {
maxSize = job.getLong("mapred.max.split.size", 0);
}
if (minSizeNode != 0 && maxSize != 0 && minSizeNode > maxSize) {
throw new IOException("Minimum split size pernode " + minSizeNode
+ " cannot be larger than maximum split size " + maxSize);
}
if (minSizeRack != 0 && maxSize != 0 && minSizeRack > maxSize) {
throw new IOException("Minimum split size per rack" + minSizeRack
+ " cannot be larger than maximum split size " + maxSize);
}
if (minSizeRack != 0 && minSizeNode > minSizeRack) {
throw new IOException("Minimum split size per node" + minSizeNode
+ " cannot be smaller than minimum split size per rack "
+ minSizeRack);
}
// all the files in input set
Path[] paths = FileUtil.stat2Paths(listStatus(job));
List<CombineFileSplit> splits = new ArrayList<CombineFileSplit>();
if (paths.length == 0) {
return splits.toArray(new CombineFileSplit[splits.size()]);
}
// In one single iteration, process all the paths in a single pool.
// Processing one pool at a time ensures that a split contans paths
// from a single pool only.
for (MultiPathFilter onepool : pools) {
ArrayList<Path> myPaths = new ArrayList<Path>();
// pick one input path. If it matches all the filters in a pool,
// add it to the output set
for (int i = 0; i < paths.length; i++) {
if (paths[i] == null) { // already processed
continue;
}
FileSystem fs = paths[i].getFileSystem(job);
Path p = new Path(paths[i].toUri().getPath());
if (onepool.accept(p)) {
myPaths.add(paths[i]); // add it to my output set
paths[i] = null; // already processed
}
}
// create splits for all files in this pool.
getMoreSplits(job, myPaths.toArray(new Path[myPaths.size()]),
maxSize, minSizeNode, minSizeRack, splits);
}
// Finally, process all paths that do not belong to any pool.
ArrayList<Path> myPaths = new ArrayList<Path>();
for (int i = 0; i < paths.length; i++) {
if (paths[i] == null) { // already processed
continue;
}
myPaths.add(paths[i]);
}
// create splits for all files that are not in any pool.
getMoreSplits(job, myPaths.toArray(new Path[myPaths.size()]), maxSize,
minSizeNode, minSizeRack, splits);
// free up rackToNodes map
rackToNodes.clear();
return splits.toArray(new CombineFileSplit[splits.size()]);
}
/**
* Return all the splits in the specified set of paths
*/
private void getMoreSplits(JobConf job, Path[] paths, long maxSize,
long minSizeNode, long minSizeRack, List<CombineFileSplit> splits)
throws IOException {
// all blocks for all the files in input set
OneFileInfo[] files;
// mapping from a rack name to the list of blocks it has
HashMap<String, List<OneBlockInfo>> rackToBlocks = new HashMap<String, List<OneBlockInfo>>();
// mapping from a block to the nodes on which it has replicas
HashMap<OneBlockInfo, String[]> blockToNodes = new HashMap<OneBlockInfo, String[]>();
// mapping from a node to the list of blocks that it contains
HashMap<String, List<OneBlockInfo>> nodeToBlocks = new HashMap<String, List<OneBlockInfo>>();
files = new OneFileInfo[paths.length];
if (paths.length == 0) {
return;
}
// populate all the blocks for all files
long totLength = 0;
for (int i = 0; i < paths.length; i++) {
files[i] = new OneFileInfo(paths[i], job, rackToBlocks,
blockToNodes, nodeToBlocks);
totLength += files[i].getLength();
}
ArrayList<OneBlockInfo> validBlocks = new ArrayList<OneBlockInfo>();
ArrayList<String> nodes = new ArrayList<String>();
long curSplitSize = 0;
// process all nodes and create splits that are local
// to a node.
for (Iterator<Map.Entry<String, List<OneBlockInfo>>> iter = nodeToBlocks
.entrySet().iterator(); iter.hasNext();) {
Map.Entry<String, List<OneBlockInfo>> one = iter.next();
nodes.add(one.getKey());
List<OneBlockInfo> blocksInNode = one.getValue();
// for each block, copy it into validBlocks. Delete it from
// blockToNodes so that the same block does not appear in
// two different splits.
for (OneBlockInfo oneblock : blocksInNode) {
if (blockToNodes.containsKey(oneblock)) {
validBlocks.add(oneblock);
blockToNodes.remove(oneblock);
curSplitSize += oneblock.length;
// if the accumulated split size exceeds the maximum, then
// create this split.
if (maxSize != 0 && curSplitSize >= maxSize) {
// create an input split and add it to the splits array
addCreatedSplit(job, splits, nodes, validBlocks);
curSplitSize = 0;
validBlocks.clear();
}
}
}
// if there were any blocks left over and their combined size is
// larger than minSplitNode, then combine them into one split.
// Otherwise add them back to the unprocessed pool. It is likely
// that they will be combined with other blocks from the same rack
// later on.
if (minSizeNode != 0 && curSplitSize >= minSizeNode) {
// create an input split and add it to the splits array
addCreatedSplit(job, splits, nodes, validBlocks);
} else {
for (OneBlockInfo oneblock : validBlocks) {
blockToNodes.put(oneblock, oneblock.hosts);
}
}
validBlocks.clear();
nodes.clear();
curSplitSize = 0;
}
// if blocks in a rack are below the specified minimum size, then keep
// them
// in 'overflow'. After the processing of all racks is complete, these
// overflow
// blocks will be combined into splits.
ArrayList<OneBlockInfo> overflowBlocks = new ArrayList<OneBlockInfo>();
ArrayList<String> racks = new ArrayList<String>();
// Process all racks over and over again until there is no more work to
// do.
while (blockToNodes.size() > 0) {
// Create one split for this rack before moving over to the next
// rack.
// Come back to this rack after creating a single split for each of
// the
// remaining racks.
// Process one rack location at a time, Combine all possible blocks
// that
// reside on this rack as one split. (constrained by minimum and
// maximum
// split size).
// iterate over all racks
for (Iterator<Map.Entry<String, List<OneBlockInfo>>> iter = rackToBlocks
.entrySet().iterator(); iter.hasNext();) {
Map.Entry<String, List<OneBlockInfo>> one = iter.next();
racks.add(one.getKey());
List<OneBlockInfo> blocks = one.getValue();
// for each block, copy it into validBlocks. Delete it from
// blockToNodes so that the same block does not appear in
// two different splits.
boolean createdSplit = false;
for (OneBlockInfo oneblock : blocks) {
if (blockToNodes.containsKey(oneblock)) {
validBlocks.add(oneblock);
blockToNodes.remove(oneblock);
curSplitSize += oneblock.length;
// if the accumulated split size exceeds the maximum,
// then
// create this split.
if (maxSize != 0 && curSplitSize >= maxSize) {
// create an input split and add it to the splits
// array
addCreatedSplit(job, splits, getHosts(racks),
validBlocks);
createdSplit = true;
break;
}
}
}
// if we created a split, then just go to the next rack
if (createdSplit) {
curSplitSize = 0;
validBlocks.clear();
racks.clear();
continue;
}
if (!validBlocks.isEmpty()) {
if (minSizeRack != 0 && curSplitSize >= minSizeRack) {
// if there is a mimimum size specified, then create a
// single split
// otherwise, store these blocks into overflow data
// structure
addCreatedSplit(job, splits, getHosts(racks),
validBlocks);
} else {
// There were a few blocks in this rack that remained to
// be processed.
// Keep them in 'overflow' block list. These will be
// combined later.
overflowBlocks.addAll(validBlocks);
}
}
curSplitSize = 0;
validBlocks.clear();
racks.clear();
}
}
assert blockToNodes.isEmpty();
assert curSplitSize == 0;
assert validBlocks.isEmpty();
assert racks.isEmpty();
// Process all overflow blocks
for (OneBlockInfo oneblock : overflowBlocks) {
validBlocks.add(oneblock);
curSplitSize += oneblock.length;
// This might cause an exiting rack location to be re-added,
// but it should be ok.
for (int i = 0; i < oneblock.racks.length; i++) {
racks.add(oneblock.racks[i]);
}
// if the accumulated split size exceeds the maximum, then
// create this split.
if (maxSize != 0 && curSplitSize >= maxSize) {
// create an input split and add it to the splits array
addCreatedSplit(job, splits, getHosts(racks), validBlocks);
curSplitSize = 0;
validBlocks.clear();
racks.clear();
}
}
// Process any remaining blocks, if any.
if (!validBlocks.isEmpty()) {
addCreatedSplit(job, splits, getHosts(racks), validBlocks);
}
}
/**
* Create a single split from the list of blocks specified in validBlocks
* Add this new split into splitList.
*/
private void addCreatedSplit(JobConf job, List<CombineFileSplit> splitList,
List<String> locations, ArrayList<OneBlockInfo> validBlocks) {
// create an input split
Path[] fl = new Path[validBlocks.size()];
long[] offset = new long[validBlocks.size()];
long[] length = new long[validBlocks.size()];
for (int i = 0; i < validBlocks.size(); i++) {
fl[i] = validBlocks.get(i).onepath;
offset[i] = validBlocks.get(i).offset;
length[i] = validBlocks.get(i).length;
}
// add this split to the list that is returned
CombineFileSplit thissplit = new CombineFileSplit(job, fl, offset,
length, locations.toArray(new String[0]));
splitList.add(thissplit);
}
/**
* This is not implemented yet.
*/
public abstract RecordReader<K, V> getRecordReader(InputSplit split,
JobConf job, Reporter reporter) throws IOException;
/**
* information about one file from the File System
*/
private static class OneFileInfo {
private long fileSize; // size of the file
private OneBlockInfo[] blocks; // all blocks in this file
OneFileInfo(Path path, JobConf job,
HashMap<String, List<OneBlockInfo>> rackToBlocks,
HashMap<OneBlockInfo, String[]> blockToNodes,
HashMap<String, List<OneBlockInfo>> nodeToBlocks)
throws IOException {
this.fileSize = 0;
// get block locations from file system
FileSystem fs = path.getFileSystem(job);
FileStatus stat = fs.getFileStatus(path);
BlockLocation[] locations = fs.getFileBlockLocations(stat, 0, stat
.getLen());
// create a list of all block and their locations
if (locations == null) {
blocks = new OneBlockInfo[0];
} else {
blocks = new OneBlockInfo[locations.length];
for (int i = 0; i < locations.length; i++) {
fileSize += locations[i].getLength();
OneBlockInfo oneblock = new OneBlockInfo(path, locations[i]
.getOffset(), locations[i].getLength(),
locations[i].getHosts(), locations[i]
.getTopologyPaths());
blocks[i] = oneblock;
// add this block to the block --> node locations map
blockToNodes.put(oneblock, oneblock.hosts);
// add this block to the rack --> block map
for (int j = 0; j < oneblock.racks.length; j++) {
String rack = oneblock.racks[j];
List<OneBlockInfo> blklist = rackToBlocks.get(rack);
if (blklist == null) {
blklist = new ArrayList<OneBlockInfo>();
rackToBlocks.put(rack, blklist);
}
blklist.add(oneblock);
// Add this host to rackToNodes map
addHostToRack(oneblock.racks[j], oneblock.hosts[j]);
}
// add this block to the node --> block map
for (int j = 0; j < oneblock.hosts.length; j++) {
String node = oneblock.hosts[j];
List<OneBlockInfo> blklist = nodeToBlocks.get(node);
if (blklist == null) {
blklist = new ArrayList<OneBlockInfo>();
nodeToBlocks.put(node, blklist);
}
blklist.add(oneblock);
}
}
}
}
long getLength() {
return fileSize;
}
OneBlockInfo[] getBlocks() {
return blocks;
}
}
/**
* information about one block from the File System
*/
private static class OneBlockInfo {
Path onepath; // name of this file
long offset; // offset in file
long length; // length of this block
String[] hosts; // nodes on whch this block resides
String[] racks; // network topology of hosts
OneBlockInfo(Path path, long offset, long len, String[] hosts,
String[] topologyPaths) {
this.onepath = path;
this.offset = offset;
this.hosts = hosts;
this.length = len;
assert (hosts.length == topologyPaths.length || topologyPaths.length == 0);
// if the file ystem does not have any rack information, then
// use dummy rack location.
if (topologyPaths.length == 0) {
topologyPaths = new String[hosts.length];
for (int i = 0; i < topologyPaths.length; i++) {
topologyPaths[i] = (new NodeBase(hosts[i],
NetworkTopology.DEFAULT_RACK)).toString();
}
}
// The topology paths have the host name included as the last
// component. Strip it.
this.racks = new String[topologyPaths.length];
for (int i = 0; i < topologyPaths.length; i++) {
this.racks[i] = (new NodeBase(topologyPaths[i]))
.getNetworkLocation();
}
}
}
private static void addHostToRack(String rack, String host) {
Set<String> hosts = rackToNodes.get(rack);
if (hosts == null) {
hosts = new HashSet<String>();
rackToNodes.put(rack, hosts);
}
hosts.add(host);
}
private static List<String> getHosts(List<String> racks) {
List<String> hosts = new ArrayList<String>();
for (String rack : racks) {
hosts.addAll(rackToNodes.get(rack));
}
return hosts;
}
/**
* Accept a path only if any one of filters given in the constructor do.
*/
private static class MultiPathFilter implements PathFilter {
private List<PathFilter> filters;
public MultiPathFilter() {
this.filters = new ArrayList<PathFilter>();
}
public MultiPathFilter(List<PathFilter> filters) {
this.filters = filters;
}
public void add(PathFilter one) {
filters.add(one);
}
public boolean accept(Path path) {
for (PathFilter filter : filters) {
if (filter.accept(path)) {
return true;
}
}
return false;
}
public String toString() {
StringBuffer buf = new StringBuffer();
buf.append("[");
for (PathFilter f : filters) {
buf.append(f);
buf.append(",");
}
buf.append("]");
return buf.toString();
}
}
}
| |
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jctools.maps.nbhm_test;
import java.io.*;
import java.util.*;
import java.util.concurrent.*;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.Test;
import org.jctools.maps.NonBlockingHashMap;
import static org.hamcrest.CoreMatchers.*;
import static org.junit.Assert.*;
// Test NonBlockingHashMap via JUnit
public class NBHM_Tester2
{
static private NonBlockingHashMap<String, String> _nbhm;
@BeforeClass
public static void setUp()
{
_nbhm = new NonBlockingHashMap<>();
}
@AfterClass
public static void tearDown()
{
_nbhm = null;
}
// Test some basic stuff; add a few keys, remove a few keys
@Test
public void testBasic()
{
assertTrue(_nbhm.isEmpty());
assertThat(_nbhm.putIfAbsent("k1", "v1"), nullValue());
checkSizes(1);
assertThat(_nbhm.putIfAbsent("k2", "v2"), nullValue());
checkSizes(2);
assertTrue(_nbhm.containsKey("k2"));
assertThat(_nbhm.put("k1", "v1a"), is("v1"));
assertThat(_nbhm.put("k2", "v2a"), is("v2"));
checkSizes(2);
assertThat(_nbhm.putIfAbsent("k2", "v2b"), is("v2a"));
assertThat(_nbhm.remove("k1"), is("v1a"));
assertFalse(_nbhm.containsKey("k1"));
checkSizes(1);
assertThat(_nbhm.remove("k1"), nullValue());
assertThat(_nbhm.remove("k2"), is("v2a"));
checkSizes(0);
assertThat(_nbhm.remove("k2"), nullValue());
assertThat(_nbhm.remove("k3"), nullValue());
assertTrue(_nbhm.isEmpty());
assertThat(_nbhm.put("k0", "v0"), nullValue());
assertTrue(_nbhm.containsKey("k0"));
checkSizes(1);
assertThat(_nbhm.remove("k0"), is("v0"));
assertFalse(_nbhm.containsKey("k0"));
checkSizes(0);
assertThat(_nbhm.replace("k0", "v0"), nullValue());
assertFalse(_nbhm.containsKey("k0"));
assertThat(_nbhm.put("k0", "v0"), nullValue());
assertEquals(_nbhm.replace("k0", "v0a"), "v0");
assertEquals(_nbhm.get("k0"), "v0a");
assertThat(_nbhm.remove("k0"), is("v0a"));
assertFalse(_nbhm.containsKey("k0"));
checkSizes(0);
assertThat(_nbhm.replace("k1", "v1"), nullValue());
assertFalse(_nbhm.containsKey("k1"));
assertThat(_nbhm.put("k1", "v1"), nullValue());
assertEquals(_nbhm.replace("k1", "v1a"), "v1");
assertEquals(_nbhm.get("k1"), "v1a");
assertThat(_nbhm.remove("k1"), is("v1a"));
assertFalse(_nbhm.containsKey("k1"));
checkSizes(0);
// Insert & Remove KeyBonks until the table resizes and we start
// finding Tombstone keys- and KeyBonk's equals-call with throw a
// ClassCastException if it sees a non-KeyBonk.
NonBlockingHashMap<KeyBonk, String> dumb = new NonBlockingHashMap<>();
for (int i = 0; i < 10000; i++)
{
final KeyBonk happy1 = new KeyBonk(i);
assertThat(dumb.put(happy1, "and"), nullValue());
if ((i & 1) == 0)
{
dumb.remove(happy1);
}
final KeyBonk happy2 = new KeyBonk(i); // 'equals' but not '=='
dumb.get(happy2);
}
// Simple insert of simple keys, with no reprobing on insert until the
// table gets full exactly. Then do a 'get' on the totally full table.
NonBlockingHashMap<Integer, Object> map = new NonBlockingHashMap<>(32);
for (int i = 1; i < 32; i++)
{
map.put(i, new Object());
}
map.get(33); // this returns null, but tested a crash edge case for expansion
}
// Check all iterators for correct size counts
private void checkSizes(int expectedSize)
{
assertEquals("size()", _nbhm.size(), expectedSize);
Collection<String> vals = _nbhm.values();
checkSizes("values()", vals.size(), vals.iterator(), expectedSize);
Set<String> keys = _nbhm.keySet();
checkSizes("keySet()", keys.size(), keys.iterator(), expectedSize);
Set<Map.Entry<String, String>> ents = _nbhm.entrySet();
checkSizes("entrySet()", ents.size(), ents.iterator(), expectedSize);
}
// Check that the iterator iterates the correct number of times
private void checkSizes(String msg, int sz, Iterator it, int expectedSize)
{
assertEquals(msg, expectedSize, sz);
int result = 0;
while (it.hasNext())
{
result++;
it.next();
}
assertEquals(msg, expectedSize, result);
}
@Test
public void testIteration()
{
assertTrue(_nbhm.isEmpty());
assertThat(_nbhm.put("k1", "v1"), nullValue());
assertThat(_nbhm.put("k2", "v2"), nullValue());
String str1 = "";
for (Map.Entry<String, String> e : _nbhm.entrySet())
{
str1 += e.getKey();
}
assertThat("found all entries", str1, anyOf(is("k1k2"), is("k2k1")));
String str2 = "";
for (String key : _nbhm.keySet())
{
str2 += key;
}
assertThat("found all keys", str2, anyOf(is("k1k2"), is("k2k1")));
String str3 = "";
for (String val : _nbhm.values())
{
str3 += val;
}
assertThat("found all vals", str3, anyOf(is("v1v2"), is("v2v1")));
assertThat("toString works", _nbhm.toString(), anyOf(is("{k1=v1, k2=v2}"), is("{k2=v2, k1=v1}")));
_nbhm.clear();
}
@Test
public void testSerial()
{
assertTrue(_nbhm.isEmpty());
assertThat(_nbhm.put("k1", "v1"), nullValue());
assertThat(_nbhm.put("k2", "v2"), nullValue());
// Serialize it out
try
{
FileOutputStream fos = new FileOutputStream("NBHM_test.txt");
ObjectOutputStream out = new ObjectOutputStream(fos);
out.writeObject(_nbhm);
out.close();
}
catch (IOException ex)
{
ex.printStackTrace();
}
// Read it back
try
{
File f = new File("NBHM_test.txt");
FileInputStream fis = new FileInputStream(f);
ObjectInputStream in = new ObjectInputStream(fis);
NonBlockingHashMap nbhm = (NonBlockingHashMap) in.readObject();
in.close();
assertEquals(_nbhm.toString(), nbhm.toString());
if (!f.delete())
{
throw new IOException("delete failed");
}
}
catch (IOException | ClassNotFoundException ex)
{
ex.printStackTrace();
}
}
@Test
public void testIterationBig2()
{
final int CNT = 10000;
NonBlockingHashMap<Integer, String> nbhm = new NonBlockingHashMap<>();
final String v = "v";
for (int i = 0; i < CNT; i++)
{
final Integer z = i;
String s0 = nbhm.get(z);
assertThat(s0, nullValue());
nbhm.put(z, v);
String s1 = nbhm.get(z);
assertThat(s1, is(v));
}
assertThat(nbhm.size(), is(CNT));
}
@Test
public void testIterationBig()
{
final int CNT = 10000;
assertThat(_nbhm.size(), is(0));
for (int i = 0; i < CNT; i++)
{
_nbhm.put("k" + i, "v" + i);
}
assertThat(_nbhm.size(), is(CNT));
int sz = 0;
int sum = 0;
for (String s : _nbhm.keySet())
{
sz++;
assertThat("", s.charAt(0), is('k'));
int x = Integer.parseInt(s.substring(1));
sum += x;
assertTrue(x >= 0 && x <= (CNT - 1));
}
assertThat("Found 10000 ints", sz, is(CNT));
assertThat("Found all integers in list", sum, is(CNT * (CNT - 1) / 2));
assertThat("can remove 3", _nbhm.remove("k3"), is("v3"));
assertThat("can remove 4", _nbhm.remove("k4"), is("v4"));
sz = 0;
sum = 0;
for (String s : _nbhm.keySet())
{
sz++;
assertThat("", s.charAt(0), is('k'));
int x = Integer.parseInt(s.substring(1));
sum += x;
assertTrue(x >= 0 && x <= (CNT - 1));
String v = _nbhm.get(s);
assertThat("", v.charAt(0), is('v'));
assertThat("", s.substring(1), is(v.substring(1)));
}
assertThat("Found " + (CNT - 2) + " ints", sz, is(CNT - 2));
assertThat("Found all integers in list", sum, is(CNT * (CNT - 1) / 2 - (3 + 4)));
_nbhm.clear();
}
// Do some simple concurrent testing
@Test
public void testConcurrentSimple() throws InterruptedException
{
final NonBlockingHashMap<String, String> nbhm = new NonBlockingHashMap<>();
// In 2 threads, add & remove even & odd elements concurrently
Thread t1 = new Thread()
{
public void run()
{
work_helper(nbhm, "T1", 1);
}
};
t1.start();
work_helper(nbhm, "T0", 0);
t1.join();
// In the end, all members should be removed
StringBuilder buf = new StringBuilder();
buf.append("Should be emptyset but has these elements: {");
boolean found = false;
for (String x : nbhm.keySet())
{
buf.append(" ").append(x);
found = true;
}
if (found)
{
System.out.println(buf + " }");
}
assertThat("concurrent size=0", nbhm.size(), is(0));
assertThat("keyset size=0", nbhm.keySet().size(), is(0));
}
void work_helper(NonBlockingHashMap<String, String> nbhm, String thrd, int d)
{
final int ITERS = 20000;
for (int j = 0; j < 10; j++)
{
//long start = System.nanoTime();
for (int i = d; i < ITERS; i += 2)
{
assertThat("this key not in there, so putIfAbsent must work",
nbhm.putIfAbsent("k" + i, thrd), is((String) null));
}
for (int i = d; i < ITERS; i += 2)
{
assertTrue(nbhm.remove("k" + i, thrd));
}
//double delta_nanos = System.nanoTime()-start;
//double delta_secs = delta_nanos/1000000000.0;
//double ops = ITERS*2;
//System.out.println("Thrd"+thrd+" "+(ops/delta_secs)+" ops/sec size="+nbhm.size());
}
}
@Test
public final void testNonBlockingHashMapSize()
{
NonBlockingHashMap<Long, String> items = new NonBlockingHashMap<>();
items.put(100L, "100");
items.put(101L, "101");
assertEquals("keySet().size()", 2, items.keySet().size());
assertTrue("keySet().contains(100)", items.keySet().contains(100L));
assertTrue("keySet().contains(101)", items.keySet().contains(101L));
assertEquals("values().size()", 2, items.values().size());
assertTrue("values().contains(\"100\")", items.values().contains("100"));
assertTrue("values().contains(\"101\")", items.values().contains("101"));
assertEquals("entrySet().size()", 2, items.entrySet().size());
boolean found100 = false;
boolean found101 = false;
for (Map.Entry<Long, String> entry : items.entrySet())
{
if (entry.getKey().equals(100L))
{
assertEquals("entry[100].getValue()==\"100\"", "100", entry.getValue());
found100 = true;
}
else if (entry.getKey().equals(101L))
{
assertEquals("entry[101].getValue()==\"101\"", "101", entry.getValue());
found101 = true;
}
}
assertTrue("entrySet().contains([100])", found100);
assertTrue("entrySet().contains([101])", found101);
}
// Concurrent insertion & then iterator test.
@Test
public void testNonBlockingHashMapIterator() throws InterruptedException
{
final int ITEM_COUNT1 = 1000;
final int THREAD_COUNT = 5;
final int PER_CNT = ITEM_COUNT1 / THREAD_COUNT;
final int ITEM_COUNT = PER_CNT * THREAD_COUNT; // fix roundoff for odd thread counts
NonBlockingHashMap<Long, TestKey> nbhml = new NonBlockingHashMap<>();
// use a barrier to open the gate for all threads at once to avoid rolling
// start and no actual concurrency
final CyclicBarrier barrier = new CyclicBarrier(THREAD_COUNT);
final ExecutorService ex = Executors.newFixedThreadPool(THREAD_COUNT);
final CompletionService<Object> co = new ExecutorCompletionService<>(ex);
for (int i = 0; i < THREAD_COUNT; i++)
{
co.submit(new NBHMLFeeder(nbhml, PER_CNT, barrier, i * PER_CNT));
}
for (int retCount = 0; retCount < THREAD_COUNT; retCount++)
{
co.take();
}
ex.shutdown();
assertEquals("values().size()", ITEM_COUNT, nbhml.values().size());
assertEquals("entrySet().size()", ITEM_COUNT, nbhml.entrySet().size());
int itemCount = 0;
for (TestKey K : nbhml.values())
{
itemCount++;
}
assertEquals("values().iterator() count", ITEM_COUNT, itemCount);
}
// --- Customer Test Case 3 ------------------------------------------------
private TestKeyFeeder getTestKeyFeeder()
{
final TestKeyFeeder feeder = new TestKeyFeeder();
feeder.checkedPut(10401000001844L, 657829272, 680293140); // section 12
feeder.checkedPut(10401000000614L, 657829272, 401326994); // section 12
feeder.checkedPut(10400345749304L, 2095121916, -9852212); // section 12
feeder.checkedPut(10401000002204L, 657829272, 14438460); // section 12
feeder.checkedPut(10400345749234L, 1186831289, -894006017); // section 12
feeder.checkedPut(10401000500234L, 969314784, -2112018706); // section 12
feeder.checkedPut(10401000000284L, 657829272, 521425852); // section 12
feeder.checkedPut(10401000002134L, 657829272, 208406306); // section 12
feeder.checkedPut(10400345749254L, 2095121916, -341939818); // section 12
feeder.checkedPut(10401000500384L, 969314784, -2136811544); // section 12
feeder.checkedPut(10401000001944L, 657829272, 935194952); // section 12
feeder.checkedPut(10400345749224L, 1186831289, -828214183); // section 12
feeder.checkedPut(10400345749244L, 2095121916, -351234120); // section 12
feeder.checkedPut(10400333128994L, 2095121916, -496909430); // section 12
feeder.checkedPut(10400333197934L, 2095121916, 2147144926); // section 12
feeder.checkedPut(10400333197944L, 2095121916, -2082366964); // section 12
feeder.checkedPut(10400336947684L, 2095121916, -1404212288); // section 12
feeder.checkedPut(10401000000594L, 657829272, 124369790); // section 12
feeder.checkedPut(10400331896264L, 2095121916, -1028383492); // section 12
feeder.checkedPut(10400332415044L, 2095121916, 1629436704); // section 12
feeder.checkedPut(10400345749614L, 1186831289, 1027996827); // section 12
feeder.checkedPut(10401000500424L, 969314784, -1871616544); // section 12
feeder.checkedPut(10400336947694L, 2095121916, -1468802722); // section 12
feeder.checkedPut(10410002672481L, 2154973, 1515288586); // section 12
feeder.checkedPut(10410345749171L, 2154973, 2084791828); // section 12
feeder.checkedPut(10400004960671L, 2154973, 1554754674); // section 12
feeder.checkedPut(10410009983601L, 2154973, -2049707334); // section 12
feeder.checkedPut(10410335811601L, 2154973, 1547385114); // section 12
feeder.checkedPut(10410000005951L, 2154973, -1136117016); // section 12
feeder.checkedPut(10400004938331L, 2154973, -1361373018); // section 12
feeder.checkedPut(10410001490421L, 2154973, -818792874); // section 12
feeder.checkedPut(10400001187131L, 2154973, 649763142); // section 12
feeder.checkedPut(10410000409071L, 2154973, -614460616); // section 12
feeder.checkedPut(10410333717391L, 2154973, 1343531416); // section 12
feeder.checkedPut(10410336680071L, 2154973, -914544144); // section 12
feeder.checkedPut(10410002068511L, 2154973, -746995576); // section 12
feeder.checkedPut(10410336207851L, 2154973, 863146156); // section 12
feeder.checkedPut(10410002365251L, 2154973, 542724164); // section 12
feeder.checkedPut(10400335812581L, 2154973, 2146284796); // section 12
feeder.checkedPut(10410337345361L, 2154973, -384625318); // section 12
feeder.checkedPut(10410000409091L, 2154973, -528258556); // section 12
return feeder;
}
// ---
@Test
public void testNonBlockingHashMapIteratorMultithreaded() throws InterruptedException, ExecutionException
{
TestKeyFeeder feeder = getTestKeyFeeder();
final int itemCount = feeder.size();
// validate results
final NonBlockingHashMap<Long, TestKey> items = feeder.getMapMultithreaded();
assertEquals("size()", itemCount, items.size());
assertEquals("values().size()", itemCount, items.values().size());
assertEquals("entrySet().size()", itemCount, items.entrySet().size());
int iteratorCount = 0;
for (TestKey m : items.values())
{
iteratorCount++;
}
// sometimes a different result comes back the second time
int iteratorCount2 = 0;
for (TestKey m2 : items.values())
{
iteratorCount2++;
}
assertEquals("iterator counts differ", iteratorCount, iteratorCount2);
assertEquals("values().iterator() count", itemCount, iteratorCount);
}
// --- Tests on equality of values
@Test
public void replaceResultIsBasedOnEquality() {
NonBlockingHashMap<Integer, Integer> map = new NonBlockingHashMap<>();
Integer initialValue = new Integer(10);
map.put(1, initialValue);
assertTrue(map.replace(1, initialValue, 20));
assertTrue(map.replace(1, new Integer(20), 30));
}
@Test
public void removeResultIsBasedOnEquality() {
NonBlockingHashMap<Integer, Integer> map = new NonBlockingHashMap<>();
Integer initialValue = new Integer(10);
map.put(1, initialValue);
assertTrue(map.remove(1, initialValue));
map.put(1, initialValue);
assertTrue(map.remove(1, new Integer(10)));
}
// Throw a ClassCastException if I see a tombstone during key-compares
private static class KeyBonk
{
final int _x;
KeyBonk(int i)
{
_x = i;
}
public int hashCode()
{
return (_x >> 2);
} public boolean equals(Object o)
{
return o != null && ((KeyBonk) o)._x // Throw CCE here
== this._x;
}
public String toString()
{
return "Bonk_" + Integer.toString(_x);
}
}
// --- NBHMLFeeder ---
// Class to be called from another thread, to get concurrent installs into
// the table.
static private class NBHMLFeeder implements Callable<Object>
{
static private final Random _rand = new Random(System.currentTimeMillis());
private final NonBlockingHashMap<Long, TestKey> _map;
private final int _count;
private final CyclicBarrier _barrier;
private final long _offset;
public NBHMLFeeder(
final NonBlockingHashMap<Long, TestKey> map,
final int count,
final CyclicBarrier barrier,
final long offset)
{
_map = map;
_count = count;
_barrier = barrier;
_offset = offset;
}
public Object call() throws Exception
{
_barrier.await(); // barrier, to force racing start
for (long j = 0; j < _count; j++)
{
_map.put(
j + _offset,
new TestKey(_rand.nextLong(), _rand.nextInt(), (short) _rand.nextInt(Short.MAX_VALUE)));
}
return null;
}
}
// --- TestKey ---
// Funny key tests all sorts of things, has a pre-wired hashCode & equals.
static private final class TestKey
{
public final int _type;
public final long _id;
public final int _hash;
public TestKey(final long id, final int type, int hash)
{
_id = id;
_type = type;
_hash = hash;
}
public int hashCode()
{
return _hash;
}
public boolean equals(Object object)
{
if (null == object)
{
return false;
}
if (object == this)
{
return true;
}
if (object.getClass() != this.getClass())
{
return false;
}
final TestKey other = (TestKey) object;
return (this._type == other._type && this._id == other._id);
}
public String toString()
{
return String.format("%s:%d,%d,%d", getClass().getSimpleName(), _id, _type, _hash);
}
}
// ---
static private class TestKeyFeeder
{
private final Hashtable<Integer, List<TestKey>> _items = new Hashtable<>();
private int _size = 0;
public int size()
{
return _size;
}
// Put items into the hashtable, sorted by 'type' into LinkedLists.
public void checkedPut(final long id, final int type, final int hash)
{
_size++;
final TestKey item = new TestKey(id, type, hash);
if (!_items.containsKey(type))
{
_items.put(type, new LinkedList<>());
}
_items.get(type).add(item);
}
public NonBlockingHashMap<Long, TestKey> getMapMultithreaded() throws InterruptedException, ExecutionException
{
final int threadCount = _items.keySet().size();
final NonBlockingHashMap<Long, TestKey> map = new NonBlockingHashMap<>();
// use a barrier to open the gate for all threads at once to avoid rolling start and no actual concurrency
final CyclicBarrier barrier = new CyclicBarrier(threadCount);
final ExecutorService ex = Executors.newFixedThreadPool(threadCount);
final CompletionService<Integer> co = new ExecutorCompletionService<>(ex);
for (Integer type : _items.keySet())
{
// A linked-list of things to insert
List<TestKey> items = _items.get(type);
TestKeyFeederThread feeder = new TestKeyFeederThread(items, map, barrier);
co.submit(feeder);
}
// wait for all threads to return
int itemCount = 0;
for (int retCount = 0; retCount < threadCount; retCount++)
{
final Future<Integer> result = co.take();
itemCount += result.get();
}
ex.shutdown();
return map;
}
}
// --- TestKeyFeederThread
static private class TestKeyFeederThread implements Callable<Integer>
{
private final NonBlockingHashMap<Long, TestKey> _map;
private final List<TestKey> _items;
private final CyclicBarrier _barrier;
public TestKeyFeederThread(
final List<TestKey> items,
final NonBlockingHashMap<Long, TestKey> map,
final CyclicBarrier barrier)
{
_map = map;
_items = items;
_barrier = barrier;
}
public Integer call() throws Exception
{
_barrier.await();
int count = 0;
for (TestKey item : _items)
{
if (_map.contains(item._id))
{
System.err.printf("COLLISION DETECTED: %s exists\n", item.toString());
}
final TestKey exists = _map.putIfAbsent(item._id, item);
if (exists == null)
{
count++;
}
else
{
System.err.printf("COLLISION DETECTED: %s exists as %s\n", item.toString(), exists.toString());
}
}
return count;
}
}
// This test is a copy of the JCK test Hashtable2027, which is incorrect.
// The test requires a particular order of values to appear in the esa
// array - but this is not part of the spec. A different implementation
// might put the same values into the array but in a different order.
//public void testToArray() {
// NonBlockingHashMap ht = new NonBlockingHashMap();
//
// ht.put("Nine", new Integer(9));
// ht.put("Ten", new Integer(10));
// ht.put("Ten1", new Integer(100));
//
// Collection es = ht.values();
//
// Object [] esa = es.toArray();
//
// ht.remove("Ten1");
//
// assertEquals( "size check", es.size(), 2 );
// assertEquals( "iterator_order[0]", new Integer( 9), esa[0] );
// assertEquals( "iterator_order[1]", new Integer(10), esa[1] );
//}
}
| |
package com.teocci.ytinbg.player;
import android.content.Context;
import android.content.res.AssetFileDescriptor;
import android.media.AudioFormat;
import android.media.AudioManager;
import android.media.AudioTrack;
import android.media.MediaCodec;
import android.media.MediaCodecInfo;
import android.media.MediaCodecList;
import android.media.MediaExtractor;
import android.media.MediaFormat;
import android.os.Handler;
import android.util.Log;
import java.io.IOException;
import java.nio.ByteBuffer;
public class AudioPlayer implements Runnable
{
public final String TAG = AudioPlayer.class.getSimpleName();
private MediaExtractor extractor;
private MediaCodec codec;
private AudioTrack audioTrack;
private PlayerEvents events = null;
private PlayerStates state = new PlayerStates();
private String sourcePath = null;
private int sourceRawResId = -1;
private Context context;
private boolean stop = false;
private Handler handler = new Handler();
private String mime = null;
private int sampleRate = 0, channels = 0, bitrate = 0;
private long presentationTimeUs = 0, duration = 0;
public void setEventsListener(PlayerEvents events)
{
this.events = events;
}
public AudioPlayer() {}
public AudioPlayer(PlayerEvents events)
{
setEventsListener(events);
}
/**
* For live streams, duration is 0
*
* @return true if is a live stream
*/
public boolean isLive()
{
return (duration == 0);
}
/**
* set the data source, a file path or an url, or a file descriptor, to play encoded audio from
*
* @param src the data source
*/
public void setDataSource(String src)
{
sourcePath = src;
}
public void setDataSource(Context context, int resid)
{
this.context = context;
sourceRawResId = resid;
}
public void play()
{
if (state.get() == PlayerStates.STOPPED) {
stop = false;
new Thread(this).start();
}
if (state.get() == PlayerStates.READY_TO_PLAY) {
state.set(PlayerStates.PLAYING);
syncNotify();
}
}
public synchronized boolean isPlaying()
{
return state.get() == PlayerStates.PLAYING;
}
public synchronized int getCurrentPosition()
{
return Math.round(presentationTimeUs / duration * 100);
}
/**
* Call notify to control the PAUSE (waiting) state, when the state is changed
*/
public synchronized void syncNotify()
{
notify();
}
public void stop()
{
stop = true;
}
public void pause()
{
state.set(PlayerStates.READY_TO_PLAY);
}
public void seek(long pos)
{
extractor.seekTo(pos, MediaExtractor.SEEK_TO_CLOSEST_SYNC);
}
public void seekTo(int percent)
{
long pos = percent * duration / 100;
seek(pos);
}
/**
* A pause mechanism that would block current thread when pause flag is set (READY_TO_PLAY)
*/
public synchronized void waitPlay()
{
// if (duration == 0) return;
while (state.get() == PlayerStates.READY_TO_PLAY) {
try {
wait();
} catch (InterruptedException e) {
e.printStackTrace();
}
}
}
@Override
public void run()
{
android.os.Process.setThreadPriority(android.os.Process.THREAD_PRIORITY_URGENT_AUDIO);
// Extractor gets information about the stream
extractor = new MediaExtractor();
// Try to set the source, this might fail
try {
if (sourcePath != null) extractor.setDataSource(this.sourcePath);
if (sourceRawResId != -1) {
AssetFileDescriptor fd = context.getResources().openRawResourceFd(sourceRawResId);
extractor.setDataSource(fd.getFileDescriptor(), fd.getStartOffset(), fd.getDeclaredLength());
fd.close();
}
} catch (Exception e) {
Log.e(TAG, "exception:" + e.getMessage());
e.printStackTrace();
if (events != null) handler.post(new Runnable()
{
@Override
public void run() { events.onError(); }
});
return;
}
// Read track header
MediaFormat format = null;
try {
format = extractor.getTrackFormat(0);
mime = format.getString(MediaFormat.KEY_MIME);
sampleRate = format.getInteger(MediaFormat.KEY_SAMPLE_RATE);
channels = format.getInteger(MediaFormat.KEY_CHANNEL_COUNT);
// If duration is 0, we are probably playing a live stream
duration = format.getLong(MediaFormat.KEY_DURATION);
bitrate = format.getInteger(MediaFormat.KEY_BIT_RATE);
} catch (Exception e) {
Log.e(TAG, "Reading format parameters exception:" + e.getMessage());
e.printStackTrace();
// Don't exit, tolerate this error, we'll fail later if this is critical
}
Log.d(TAG, "Track info: mime:" + mime +
" sampleRate:" + sampleRate +
" channels:" + channels +
" bitrate:" + bitrate +
" duration:" + duration
);
// check we have audio content we know
if (format == null || !mime.startsWith("audio/")) {
if (events != null) handler.post(() -> events.onError());
return;
}
// Create the actual decoder, using the mime to select
try {
codec = MediaCodec.createDecoderByType(mime);
} catch (IOException e) {
e.printStackTrace();
}
// Check we have a valid codec instance
if (codec == null) {
if (events != null) handler.post(() -> events.onError());
return;
}
// state.set(PlayerStates.READY_TO_PLAY);
if (events != null) handler.post(() -> events.onStart(mime, sampleRate, channels, duration));
codec.configure(format, null, null, 0);
codec.start();
ByteBuffer[] codecInputBuffers = codec.getInputBuffers();
ByteBuffer[] codecOutputBuffers = codec.getOutputBuffers();
// Configure AudioTrack
int channelConfiguration = channels == 1 ? AudioFormat.CHANNEL_OUT_MONO : AudioFormat.CHANNEL_OUT_STEREO;
int minSize = AudioTrack.getMinBufferSize(sampleRate, channelConfiguration, AudioFormat.ENCODING_PCM_16BIT);
audioTrack = new AudioTrack(AudioManager.STREAM_MUSIC, sampleRate, channelConfiguration,
AudioFormat.ENCODING_PCM_16BIT, minSize, AudioTrack.MODE_STREAM);
// Start playing, we will feed the AudioTrack later
audioTrack.play();
extractor.selectTrack(0);
// Start decoding
final long kTimeOutUs = 1000;
MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
boolean sawInputEOS = false;
boolean sawOutputEOS = false;
int noOutputCounter = 0;
int noOutputCounterLimit = 10;
state.set(PlayerStates.PLAYING);
while (!sawOutputEOS && noOutputCounter < noOutputCounterLimit && !stop) {
// Pause implementation
waitPlay();
noOutputCounter++;
// Read a buffer before feeding it to the decoder
if (!sawInputEOS) {
int inputBufIndex = codec.dequeueInputBuffer(kTimeOutUs);
if (inputBufIndex >= 0) {
ByteBuffer dstBuf = codecInputBuffers[inputBufIndex];
int sampleSize = extractor.readSampleData(dstBuf, 0);
if (sampleSize < 0) {
Log.d(TAG, "saw input EOS. Stopping playback");
sawInputEOS = true;
sampleSize = 0;
} else {
presentationTimeUs = extractor.getSampleTime();
final int percent = (duration == 0) ? 0 : (int) (100 * presentationTimeUs / duration);
if (events != null) {
handler.post(() -> events.onPlayUpdate(
percent,
presentationTimeUs / 1000,
duration / 1000
));
}
}
codec.queueInputBuffer(
inputBufIndex,
0,
sampleSize,
presentationTimeUs,
sawInputEOS ? MediaCodec.BUFFER_FLAG_END_OF_STREAM : 0
);
if (!sawInputEOS) {
extractor.advance();
}
} else {
Log.e(TAG, "inputBufIndex " + inputBufIndex);
}
} // !sawInputEOS
// Decode to PCM and push it to the AudioTrack player
int res = codec.dequeueOutputBuffer(info, kTimeOutUs);
if (res >= 0) {
if (info.size > 0) noOutputCounter = 0;
int outputBufIndex = res;
ByteBuffer buf = codecOutputBuffers[outputBufIndex];
final byte[] chunk = new byte[info.size];
buf.get(chunk);
buf.clear();
if (chunk.length > 0) {
audioTrack.write(chunk, 0, chunk.length);
// if(this.state.get() != PlayerStates.PLAYING) {
// if (events != null) handler.post(new Runnable() { @Override public void run() { events.onPlay(); } });
// state.set(PlayerStates.PLAYING);
// }
}
codec.releaseOutputBuffer(outputBufIndex, false);
if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
Log.d(TAG, "saw output EOS.");
sawOutputEOS = true;
}
} else if (res == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
codecOutputBuffers = codec.getOutputBuffers();
Log.d(TAG, "output buffers have changed.");
} else if (res == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
MediaFormat outFormat = codec.getOutputFormat();
Log.d(TAG, "output format has changed to " + outFormat);
} else {
Log.d(TAG, "dequeueOutputBuffer returned " + res);
}
}
Log.d(TAG, "stopping...");
if (codec != null) {
codec.stop();
codec.release();
codec = null;
}
if (audioTrack != null) {
audioTrack.flush();
audioTrack.release();
audioTrack = null;
}
// Clear source and the other globals
sourcePath = null;
sourceRawResId = -1;
duration = 0;
mime = null;
sampleRate = 0;
channels = 0;
bitrate = 0;
presentationTimeUs = 0;
duration = 0;
state.set(PlayerStates.STOPPED);
stop = true;
if (noOutputCounter >= noOutputCounterLimit) {
if (events != null) handler.post(new Runnable()
{
@Override
public void run() { events.onError(); }
});
} else {
if (events != null) handler.post(new Runnable()
{
@Override
public void run() { events.onStop(); }
});
}
}
public static String listCodecs()
{
String results = "";
int numCodecs = MediaCodecList.getCodecCount();
for (int i = 0; i < numCodecs; i++) {
MediaCodecInfo codecInfo = MediaCodecList.getCodecInfoAt(i);
// Grab results and put them in a list
String name = codecInfo.getName();
boolean isEncoder = codecInfo.isEncoder();
String[] types = codecInfo.getSupportedTypes();
String typeList = "";
for (String s : types) typeList += s + " ";
results += (i + 1) + ". " + name + " " + typeList + "\n\n";
}
return results;
}
public void setVolume(float left, float right)
{
if (audioTrack != null) {
audioTrack.setStereoVolume(left, right);
}
}
public void release()
{
if (codec != null) {
codec.stop();
codec.release();
codec = null;
}
if (audioTrack != null) {
audioTrack.flush();
audioTrack.release();
audioTrack = null;
}
// Clear source and the other globals
sourcePath = null;
sourceRawResId = -1;
duration = 0;
mime = null;
sampleRate = 0;
channels = 0;
bitrate = 0;
presentationTimeUs = 0;
duration = 0;
}
}
| |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.geo;
import org.apache.lucene.util.LuceneTestCase;
import org.elasticsearch.action.get.GetResponse;
import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.routing.IndexShardRoutingTable;
import org.elasticsearch.common.settings.ImmutableSettings;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.index.IndexService;
import org.elasticsearch.index.mapper.FieldMapper;
import org.elasticsearch.index.mapper.geo.GeoShapeFieldMapper;
import org.elasticsearch.indices.IndicesService;
import org.elasticsearch.test.geo.RandomShapeGenerator;
import org.elasticsearch.common.geo.ShapeRelation;
import org.elasticsearch.common.geo.builders.GeometryCollectionBuilder;
import org.elasticsearch.common.geo.builders.ShapeBuilder;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.index.query.FilterBuilders;
import org.elasticsearch.index.query.GeoShapeFilterBuilder;
import org.elasticsearch.index.query.GeoShapeQueryBuilder;
import org.elasticsearch.index.query.QueryBuilders;
import org.elasticsearch.test.ElasticsearchIntegrationTest;
import org.junit.Ignore;
import org.junit.Test;
import java.io.IOException;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
import static org.elasticsearch.index.query.FilterBuilders.geoIntersectionFilter;
import static org.elasticsearch.index.query.QueryBuilders.*;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.*;
import static org.hamcrest.Matchers.*;
public class GeoShapeIntegrationTests extends ElasticsearchIntegrationTest {
@Override
protected Settings nodeSettings(int nodeOrdinal) {
Settings settings = super.nodeSettings(nodeOrdinal);
return ImmutableSettings.builder().put("gateway.type", "local").put(settings).build();
}
@Test
public void testNullShape() throws Exception {
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type1")
.startObject("properties").startObject("location")
.field("type", "geo_shape")
.endObject().endObject()
.endObject().endObject().string();
assertAcked(prepareCreate("test").addMapping("type1", mapping));
ensureGreen();
indexRandom(false, client().prepareIndex("test", "type1", "aNullshape").setSource("{\"location\": null}"));
GetResponse result = client().prepareGet("test", "type1", "aNullshape").execute().actionGet();
assertThat(result.getField("location"), nullValue());
}
@Test
public void testIndexPointsFilterRectangle() throws Exception {
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type1")
.startObject("properties").startObject("location")
.field("type", "geo_shape")
.field("tree", "quadtree")
.endObject().endObject()
.endObject().endObject().string();
assertAcked(prepareCreate("test").addMapping("type1", mapping));
ensureGreen();
indexRandom(true,
client().prepareIndex("test", "type1", "1").setSource(jsonBuilder().startObject()
.field("name", "Document 1")
.startObject("location")
.field("type", "point")
.startArray("coordinates").value(-30).value(-30).endArray()
.endObject()
.endObject()),
client().prepareIndex("test", "type1", "2").setSource(jsonBuilder().startObject()
.field("name", "Document 2")
.startObject("location")
.field("type", "point")
.startArray("coordinates").value(-45).value(-50).endArray()
.endObject()
.endObject()));
ShapeBuilder shape = ShapeBuilder.newEnvelope().topLeft(-45, 45).bottomRight(45, -45);
SearchResponse searchResponse = client().prepareSearch()
.setQuery(filteredQuery(matchAllQuery(),
geoIntersectionFilter("location", shape)))
.execute().actionGet();
assertSearchResponse(searchResponse);
assertThat(searchResponse.getHits().getTotalHits(), equalTo(1l));
assertThat(searchResponse.getHits().hits().length, equalTo(1));
assertThat(searchResponse.getHits().getAt(0).id(), equalTo("1"));
searchResponse = client().prepareSearch()
.setQuery(geoShapeQuery("location", shape))
.execute().actionGet();
assertSearchResponse(searchResponse);
assertThat(searchResponse.getHits().getTotalHits(), equalTo(1l));
assertThat(searchResponse.getHits().hits().length, equalTo(1));
assertThat(searchResponse.getHits().getAt(0).id(), equalTo("1"));
}
@Test
public void testEdgeCases() throws Exception {
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type1")
.startObject("properties").startObject("location")
.field("type", "geo_shape")
.field("tree", "quadtree")
.endObject().endObject()
.endObject().endObject().string();
assertAcked(prepareCreate("test").addMapping("type1", mapping));
ensureGreen();
indexRandom(true, client().prepareIndex("test", "type1", "blakely").setSource(jsonBuilder().startObject()
.field("name", "Blakely Island")
.startObject("location")
.field("type", "polygon")
.startArray("coordinates").startArray()
.startArray().value(-122.83).value(48.57).endArray()
.startArray().value(-122.77).value(48.56).endArray()
.startArray().value(-122.79).value(48.53).endArray()
.startArray().value(-122.83).value(48.57).endArray() // close the polygon
.endArray().endArray()
.endObject()
.endObject()));
ShapeBuilder query = ShapeBuilder.newEnvelope().topLeft(-122.88, 48.62).bottomRight(-122.82, 48.54);
// This search would fail if both geoshape indexing and geoshape filtering
// used the bottom-level optimization in SpatialPrefixTree#recursiveGetNodes.
SearchResponse searchResponse = client().prepareSearch()
.setQuery(filteredQuery(matchAllQuery(),
geoIntersectionFilter("location", query)))
.execute().actionGet();
assertSearchResponse(searchResponse);
assertThat(searchResponse.getHits().getTotalHits(), equalTo(1l));
assertThat(searchResponse.getHits().hits().length, equalTo(1));
assertThat(searchResponse.getHits().getAt(0).id(), equalTo("blakely"));
}
@Test
public void testIndexedShapeReference() throws Exception {
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type1")
.startObject("properties").startObject("location")
.field("type", "geo_shape")
.field("tree", "quadtree")
.endObject().endObject()
.endObject().endObject().string();
assertAcked(prepareCreate("test").addMapping("type1", mapping));
createIndex("shapes");
ensureGreen();
ShapeBuilder shape = ShapeBuilder.newEnvelope().topLeft(-45, 45).bottomRight(45, -45);
indexRandom(true,
client().prepareIndex("shapes", "shape_type", "Big_Rectangle").setSource(jsonBuilder().startObject()
.field("shape", shape).endObject()),
client().prepareIndex("test", "type1", "1").setSource(jsonBuilder().startObject()
.field("name", "Document 1")
.startObject("location")
.field("type", "point")
.startArray("coordinates").value(-30).value(-30).endArray()
.endObject()
.endObject()));
SearchResponse searchResponse = client().prepareSearch("test")
.setQuery(filteredQuery(matchAllQuery(),
geoIntersectionFilter("location", "Big_Rectangle", "shape_type")))
.execute().actionGet();
assertSearchResponse(searchResponse);
assertThat(searchResponse.getHits().getTotalHits(), equalTo(1l));
assertThat(searchResponse.getHits().hits().length, equalTo(1));
assertThat(searchResponse.getHits().getAt(0).id(), equalTo("1"));
searchResponse = client().prepareSearch("test")
.setQuery(geoShapeQuery("location", "Big_Rectangle", "shape_type"))
.execute().actionGet();
assertSearchResponse(searchResponse);
assertThat(searchResponse.getHits().getTotalHits(), equalTo(1l));
assertThat(searchResponse.getHits().hits().length, equalTo(1));
assertThat(searchResponse.getHits().getAt(0).id(), equalTo("1"));
}
@Test
public void testReusableBuilder() throws IOException {
ShapeBuilder polygon = ShapeBuilder.newPolygon()
.point(170, -10).point(190, -10).point(190, 10).point(170, 10)
.hole().point(175, -5).point(185, -5).point(185, 5).point(175, 5).close()
.close();
assertUnmodified(polygon);
ShapeBuilder linestring = ShapeBuilder.newLineString()
.point(170, -10).point(190, -10).point(190, 10).point(170, 10);
assertUnmodified(linestring);
}
private void assertUnmodified(ShapeBuilder builder) throws IOException {
String before = jsonBuilder().startObject().field("area", builder).endObject().string();
builder.build();
String after = jsonBuilder().startObject().field("area", builder).endObject().string();
assertThat(before, equalTo(after));
}
@Test
public void testParsingMultipleShapes() throws Exception {
String mapping = XContentFactory.jsonBuilder()
.startObject()
.startObject("type1")
.startObject("properties")
.startObject("location1")
.field("type", "geo_shape")
.endObject()
.startObject("location2")
.field("type", "geo_shape")
.endObject()
.endObject()
.endObject()
.endObject()
.string();
assertAcked(prepareCreate("test").addMapping("type1", mapping));
ensureYellow();
String p1 = "\"location1\" : {\"type\":\"polygon\", \"coordinates\":[[[-10,-10],[10,-10],[10,10],[-10,10],[-10,-10]]]}";
String p2 = "\"location2\" : {\"type\":\"polygon\", \"coordinates\":[[[-20,-20],[20,-20],[20,20],[-20,20],[-20,-20]]]}";
String o1 = "{" + p1 + ", " + p2 + "}";
indexRandom(true, client().prepareIndex("test", "type1", "1").setSource(o1));
String filter = "{\"geo_shape\": {\"location2\": {\"indexed_shape\": {"
+ "\"id\": \"1\","
+ "\"type\": \"type1\","
+ "\"index\": \"test\","
+ "\"path\": \"location2\""
+ "}}}}";
SearchResponse result = client().prepareSearch("test").setQuery(QueryBuilders.matchAllQuery()).setPostFilter(filter).execute().actionGet();
assertSearchResponse(result);
assertHitCount(result, 1);
}
@Test
public void testShapeFetchingPath() throws Exception {
createIndex("shapes");
assertAcked(prepareCreate("test").addMapping("type", "location", "type=geo_shape"));
String location = "\"location\" : {\"type\":\"polygon\", \"coordinates\":[[[-10,-10],[10,-10],[10,10],[-10,10],[-10,-10]]]}";
indexRandom(true,
client().prepareIndex("shapes", "type", "1")
.setSource(
String.format(
Locale.ROOT, "{ %s, \"1\" : { %s, \"2\" : { %s, \"3\" : { %s } }} }", location, location, location, location
)
),
client().prepareIndex("test", "type", "1")
.setSource(jsonBuilder().startObject().startObject("location")
.field("type", "polygon")
.startArray("coordinates").startArray()
.startArray().value(-20).value(-20).endArray()
.startArray().value(20).value(-20).endArray()
.startArray().value(20).value(20).endArray()
.startArray().value(-20).value(20).endArray()
.startArray().value(-20).value(-20).endArray()
.endArray().endArray()
.endObject().endObject()));
ensureSearchable("test", "shapes");
GeoShapeFilterBuilder filter = FilterBuilders.geoShapeFilter("location", "1", "type", ShapeRelation.INTERSECTS)
.indexedShapeIndex("shapes")
.indexedShapePath("location");
SearchResponse result = client().prepareSearch("test").setQuery(QueryBuilders.matchAllQuery())
.setPostFilter(filter).get();
assertSearchResponse(result);
assertHitCount(result, 1);
filter = FilterBuilders.geoShapeFilter("location", "1", "type", ShapeRelation.INTERSECTS)
.indexedShapeIndex("shapes")
.indexedShapePath("1.location");
result = client().prepareSearch("test").setQuery(QueryBuilders.matchAllQuery())
.setPostFilter(filter).get();
assertSearchResponse(result);
assertHitCount(result, 1);
filter = FilterBuilders.geoShapeFilter("location", "1", "type", ShapeRelation.INTERSECTS)
.indexedShapeIndex("shapes")
.indexedShapePath("1.2.location");
result = client().prepareSearch("test").setQuery(QueryBuilders.matchAllQuery())
.setPostFilter(filter).get();
assertSearchResponse(result);
assertHitCount(result, 1);
filter = FilterBuilders.geoShapeFilter("location", "1", "type", ShapeRelation.INTERSECTS)
.indexedShapeIndex("shapes")
.indexedShapePath("1.2.3.location");
result = client().prepareSearch("test").setQuery(QueryBuilders.matchAllQuery())
.setPostFilter(filter).get();
assertSearchResponse(result);
assertHitCount(result, 1);
// now test the query variant
GeoShapeQueryBuilder query = QueryBuilders.geoShapeQuery("location", "1", "type")
.indexedShapeIndex("shapes")
.indexedShapePath("location");
result = client().prepareSearch("test").setQuery(query).get();
assertSearchResponse(result);
assertHitCount(result, 1);
query = QueryBuilders.geoShapeQuery("location", "1", "type")
.indexedShapeIndex("shapes")
.indexedShapePath("1.location");
result = client().prepareSearch("test").setQuery(query).get();
assertSearchResponse(result);
assertHitCount(result, 1);
query = QueryBuilders.geoShapeQuery("location", "1", "type")
.indexedShapeIndex("shapes")
.indexedShapePath("1.2.location");
result = client().prepareSearch("test").setQuery(query).get();
assertSearchResponse(result);
assertHitCount(result, 1);
query = QueryBuilders.geoShapeQuery("location", "1", "type")
.indexedShapeIndex("shapes")
.indexedShapePath("1.2.3.location");
result = client().prepareSearch("test").setQuery(query).get();
assertSearchResponse(result);
assertHitCount(result, 1);
}
@Test // Issue 2944
public void testThatShapeIsReturnedEvenWhenExclusionsAreSet() throws Exception {
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type1")
.startObject("properties").startObject("location")
.field("type", "geo_shape")
.endObject().endObject()
.startObject("_source")
.startArray("excludes").value("nonExistingField").endArray()
.endObject()
.endObject().endObject()
.string();
assertAcked(prepareCreate("test").addMapping("type1", mapping));
ensureGreen();
indexRandom(true,
client().prepareIndex("test", "type1", "1").setSource(jsonBuilder().startObject()
.field("name", "Document 1")
.startObject("location")
.field("type", "envelope")
.startArray("coordinates").startArray().value(-45.0).value(45).endArray().startArray().value(45).value(-45).endArray().endArray()
.endObject()
.endObject()));
SearchResponse searchResponse = client().prepareSearch("test").setQuery(QueryBuilders.matchAllQuery()).execute().actionGet();
assertThat(searchResponse.getHits().totalHits(), equalTo(1L));
Map<String, Object> indexedMap = searchResponse.getHits().getAt(0).sourceAsMap();
assertThat(indexedMap.get("location"), instanceOf(Map.class));
Map<String, Object> locationMap = (Map<String, Object>) indexedMap.get("location");
assertThat(locationMap.get("coordinates"), instanceOf(List.class));
List<List<Number>> coordinates = (List<List<Number>>) locationMap.get("coordinates");
assertThat(coordinates.size(), equalTo(2));
assertThat(coordinates.get(0).size(), equalTo(2));
assertThat(coordinates.get(0).get(0).doubleValue(), equalTo(-45.0));
assertThat(coordinates.get(0).get(1).doubleValue(), equalTo(45.0));
assertThat(coordinates.get(1).size(), equalTo(2));
assertThat(coordinates.get(1).get(0).doubleValue(), equalTo(45.0));
assertThat(coordinates.get(1).get(1).doubleValue(), equalTo(-45.0));
assertThat(locationMap.size(), equalTo(2));
}
@Ignore("https://github.com/elasticsearch/elasticsearch/issues/9904")
@Test
public void testShapeFilterWithRandomGeoCollection() throws Exception {
// Create a random geometry collection.
GeometryCollectionBuilder gcb = RandomShapeGenerator.createGeometryCollection(getRandom());
logger.info("Created Random GeometryCollection containing " + gcb.numShapes() + " shapes");
createIndex("randshapes");
assertAcked(prepareCreate("test").addMapping("type", "location", "type=geo_shape"));
XContentBuilder docSource = gcb.toXContent(jsonBuilder().startObject().field("location"), null).endObject();
indexRandom(true, client().prepareIndex("test", "type", "1").setSource(docSource));
ensureSearchable("test");
ShapeBuilder filterShape = (gcb.getShapeAt(randomIntBetween(0, gcb.numShapes() - 1)));
GeoShapeFilterBuilder filter = FilterBuilders.geoShapeFilter("location", filterShape, ShapeRelation.INTERSECTS);
SearchResponse result = client().prepareSearch("test").setQuery(QueryBuilders.matchAllQuery())
.setPostFilter(filter).get();
assertSearchResponse(result);
assertHitCount(result, 1);
}
@Test
public void testShapeFilterWithDefinedGeoCollection() throws Exception {
createIndex("shapes");
assertAcked(prepareCreate("test").addMapping("type", "location", "type=geo_shape"));
XContentBuilder docSource = jsonBuilder().startObject().startObject("location")
.field("type", "geometrycollection")
.startArray("geometries")
.startObject()
.field("type", "point")
.startArray("coordinates")
.value(100.0).value(0.0)
.endArray()
.endObject()
.startObject()
.field("type", "linestring")
.startArray("coordinates")
.startArray()
.value(101.0).value(0.0)
.endArray()
.startArray()
.value(102.0).value(1.0)
.endArray()
.endArray()
.endObject()
.endArray()
.endObject().endObject();
indexRandom(true,
client().prepareIndex("test", "type", "1")
.setSource(docSource));
ensureSearchable("test");
GeoShapeFilterBuilder filter = FilterBuilders.geoShapeFilter("location", ShapeBuilder.newGeometryCollection().polygon(ShapeBuilder.newPolygon().point(99.0, -1.0).point(99.0, 3.0).point(103.0, 3.0).point(103.0, -1.0).point(99.0, -1.0)), ShapeRelation.INTERSECTS);
SearchResponse result = client().prepareSearch("test").setQuery(QueryBuilders.matchAllQuery())
.setPostFilter(filter).get();
assertSearchResponse(result);
assertHitCount(result, 1);
filter = FilterBuilders.geoShapeFilter("location", ShapeBuilder.newGeometryCollection().polygon(ShapeBuilder.newPolygon().point(199.0, -11.0).point(199.0, 13.0).point(193.0, 13.0).point(193.0, -11.0).point(199.0, -11.0)), ShapeRelation.INTERSECTS);
result = client().prepareSearch("test").setQuery(QueryBuilders.matchAllQuery())
.setPostFilter(filter).get();
assertSearchResponse(result);
assertHitCount(result, 0);
filter = FilterBuilders.geoShapeFilter("location", ShapeBuilder.newGeometryCollection()
.polygon(ShapeBuilder.newPolygon().point(99.0, -1.0).point(99.0, 3.0).point(103.0, 3.0).point(103.0, -1.0).point(99.0, -1.0))
.polygon(ShapeBuilder.newPolygon().point(199.0, -11.0).point(199.0, 13.0).point(193.0, 13.0).point(193.0, -11.0).point(199.0, -11.0)), ShapeRelation.INTERSECTS);
result = client().prepareSearch("test").setQuery(QueryBuilders.matchAllQuery())
.setPostFilter(filter).get();
assertSearchResponse(result);
assertHitCount(result, 1);
}
/**
* Test that orientation parameter correctly persists across cluster restart
* @throws IOException
*/
public void testOrientationPersistence() throws Exception {
String idxName = "orientation";
String mapping = XContentFactory.jsonBuilder().startObject().startObject("shape")
.startObject("properties").startObject("location")
.field("type", "geo_shape")
.field("orientation", "left")
.endObject().endObject()
.endObject().endObject().string();
// create index
assertAcked(prepareCreate(idxName).addMapping("shape", mapping));
mapping = XContentFactory.jsonBuilder().startObject().startObject("shape")
.startObject("properties").startObject("location")
.field("type", "geo_shape")
.field("orientation", "right")
.endObject().endObject()
.endObject().endObject().string();
assertAcked(prepareCreate(idxName+"2").addMapping("shape", mapping));
ensureGreen(idxName, idxName+"2");
internalCluster().fullRestart();
ensureGreen(idxName, idxName+"2");
// left orientation test
IndicesService indicesService = internalCluster().getInstance(IndicesService.class, findNodeName(idxName));
IndexService indexService = indicesService.indexService(idxName);
FieldMapper fieldMapper = indexService.mapperService().smartNameFieldMapper("location");
assertThat(fieldMapper, instanceOf(GeoShapeFieldMapper.class));
GeoShapeFieldMapper gsfm = (GeoShapeFieldMapper)fieldMapper;
ShapeBuilder.Orientation orientation = gsfm.orientation();
assertThat(orientation, equalTo(ShapeBuilder.Orientation.CLOCKWISE));
assertThat(orientation, equalTo(ShapeBuilder.Orientation.LEFT));
assertThat(orientation, equalTo(ShapeBuilder.Orientation.CW));
// right orientation test
indicesService = internalCluster().getInstance(IndicesService.class, findNodeName(idxName+"2"));
indexService = indicesService.indexService(idxName+"2");
fieldMapper = indexService.mapperService().smartNameFieldMapper("location");
assertThat(fieldMapper, instanceOf(GeoShapeFieldMapper.class));
gsfm = (GeoShapeFieldMapper)fieldMapper;
orientation = gsfm.orientation();
assertThat(orientation, equalTo(ShapeBuilder.Orientation.COUNTER_CLOCKWISE));
assertThat(orientation, equalTo(ShapeBuilder.Orientation.RIGHT));
assertThat(orientation, equalTo(ShapeBuilder.Orientation.CCW));
}
private String findNodeName(String index) {
ClusterState state = client().admin().cluster().prepareState().get().getState();
IndexShardRoutingTable shard = state.getRoutingTable().index(index).shard(0);
String nodeId = shard.assignedShards().get(0).currentNodeId();
return state.getNodes().get(nodeId).name();
}
}
| |
package org.robolectric.shadows;
import static android.os.Build.VERSION_CODES.KITKAT_WATCH;
import static android.os.Build.VERSION_CODES.LOLLIPOP;
import static java.nio.charset.StandardCharsets.UTF_8;
import static org.robolectric.RuntimeEnvironment.castNativePtr;
import android.database.Cursor;
import android.database.CursorWindow;
import com.almworks.sqlite4java.SQLiteConstants;
import com.almworks.sqlite4java.SQLiteException;
import com.almworks.sqlite4java.SQLiteStatement;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.atomic.AtomicLong;
import org.robolectric.annotation.Implementation;
import org.robolectric.annotation.Implements;
@Implements(value = CursorWindow.class)
public class ShadowCursorWindow {
private static final WindowData WINDOW_DATA = new WindowData();
@Implementation
protected static Number nativeCreate(String name, int cursorWindowSize) {
return castNativePtr(WINDOW_DATA.create(name, cursorWindowSize));
}
@Implementation(maxSdk = KITKAT_WATCH)
protected static void nativeDispose(int windowPtr) {
nativeDispose((long) windowPtr);
}
@Implementation(minSdk = LOLLIPOP)
protected static void nativeDispose(long windowPtr) {
WINDOW_DATA.close(windowPtr);
}
@Implementation(maxSdk = KITKAT_WATCH)
protected static byte[] nativeGetBlob(int windowPtr, int row, int column) {
return nativeGetBlob((long) windowPtr, row, column);
}
@Implementation(minSdk = LOLLIPOP)
protected static byte[] nativeGetBlob(long windowPtr, int row, int column) {
Value value = WINDOW_DATA.get(windowPtr).value(row, column);
switch (value.type) {
case Cursor.FIELD_TYPE_NULL:
return null;
case Cursor.FIELD_TYPE_BLOB:
// This matches Android's behavior, which does not match the SQLite spec
byte[] blob = (byte[])value.value;
return blob == null ? new byte[]{} : blob;
case Cursor.FIELD_TYPE_STRING:
return ((String)value.value).getBytes(UTF_8);
default:
throw new android.database.sqlite.SQLiteException("Getting blob when column is non-blob. Row " + row + ", col " + column);
}
}
@Implementation(maxSdk = KITKAT_WATCH)
protected static String nativeGetString(int windowPtr, int row, int column) {
return nativeGetString((long) windowPtr, row, column);
}
@Implementation(minSdk = LOLLIPOP)
protected static String nativeGetString(long windowPtr, int row, int column) {
Value val = WINDOW_DATA.get(windowPtr).value(row, column);
if (val.type == Cursor.FIELD_TYPE_BLOB) {
throw new android.database.sqlite.SQLiteException("Getting string when column is blob. Row " + row + ", col " + column);
}
Object value = val.value;
return value == null ? null : String.valueOf(value);
}
@Implementation(maxSdk = KITKAT_WATCH)
protected static long nativeGetLong(int windowPtr, int row, int column) {
return nativeGetLong((long) windowPtr, row, column);
}
@Implementation(minSdk = LOLLIPOP)
protected static long nativeGetLong(long windowPtr, int row, int column) {
return nativeGetNumber(windowPtr, row, column).longValue();
}
@Implementation(maxSdk = KITKAT_WATCH)
protected static double nativeGetDouble(int windowPtr, int row, int column) {
return nativeGetDouble((long) windowPtr, row, column);
}
@Implementation(minSdk = LOLLIPOP)
protected static double nativeGetDouble(long windowPtr, int row, int column) {
return nativeGetNumber(windowPtr, row, column).doubleValue();
}
@Implementation(maxSdk = KITKAT_WATCH)
protected static int nativeGetType(int windowPtr, int row, int column) {
return nativeGetType((long) windowPtr, row, column);
}
@Implementation(minSdk = LOLLIPOP)
protected static int nativeGetType(long windowPtr, int row, int column) {
return WINDOW_DATA.get(windowPtr).value(row, column).type;
}
@Implementation(maxSdk = KITKAT_WATCH)
protected static void nativeClear(int windowPtr) {
nativeClear((long) windowPtr);
}
@Implementation(minSdk = LOLLIPOP)
protected static void nativeClear(long windowPtr) {
WINDOW_DATA.clear(windowPtr);
}
@Implementation(maxSdk = KITKAT_WATCH)
protected static int nativeGetNumRows(int windowPtr) {
return nativeGetNumRows((long) windowPtr);
}
@Implementation(minSdk = LOLLIPOP)
protected static int nativeGetNumRows(long windowPtr) {
return WINDOW_DATA.get(windowPtr).numRows();
}
@Implementation(maxSdk = KITKAT_WATCH)
protected static boolean nativePutBlob(int windowPtr, byte[] value, int row, int column) {
return nativePutBlob((long) windowPtr, value, row, column);
}
@Implementation(minSdk = LOLLIPOP)
protected static boolean nativePutBlob(long windowPtr, byte[] value, int row, int column) {
return WINDOW_DATA.get(windowPtr).putValue(new Value(value, Cursor.FIELD_TYPE_BLOB), row, column);
}
@Implementation(maxSdk = KITKAT_WATCH)
protected static boolean nativePutString(int windowPtr, String value, int row, int column) {
return nativePutString((long) windowPtr, value, row, column);
}
@Implementation(minSdk = LOLLIPOP)
protected static boolean nativePutString(long windowPtr, String value, int row, int column) {
return WINDOW_DATA.get(windowPtr).putValue(new Value(value, Cursor.FIELD_TYPE_STRING), row, column);
}
@Implementation(maxSdk = KITKAT_WATCH)
protected static boolean nativePutLong(int windowPtr, long value, int row, int column) {
return nativePutLong((long) windowPtr, value, row, column);
}
@Implementation(minSdk = LOLLIPOP)
protected static boolean nativePutLong(long windowPtr, long value, int row, int column) {
return WINDOW_DATA.get(windowPtr).putValue(new Value(value, Cursor.FIELD_TYPE_INTEGER), row, column);
}
@Implementation(maxSdk = KITKAT_WATCH)
protected static boolean nativePutDouble(int windowPtr, double value, int row, int column) {
return nativePutDouble((long) windowPtr, value, row, column);
}
@Implementation(minSdk = LOLLIPOP)
protected static boolean nativePutDouble(long windowPtr, double value, int row, int column) {
return WINDOW_DATA.get(windowPtr).putValue(new Value(value, Cursor.FIELD_TYPE_FLOAT), row, column);
}
@Implementation(maxSdk = KITKAT_WATCH)
protected static boolean nativePutNull(int windowPtr, int row, int column) {
return nativePutNull((long) windowPtr, row, column);
}
@Implementation(minSdk = LOLLIPOP)
protected static boolean nativePutNull(long windowPtr, int row, int column) {
return WINDOW_DATA.get(windowPtr).putValue(new Value(null, Cursor.FIELD_TYPE_NULL), row, column);
}
@Implementation(maxSdk = KITKAT_WATCH)
protected static boolean nativeAllocRow(int windowPtr) {
return nativeAllocRow((long) windowPtr);
}
@Implementation(minSdk = LOLLIPOP)
protected static boolean nativeAllocRow(long windowPtr) {
return WINDOW_DATA.get(windowPtr).allocRow();
}
@Implementation(maxSdk = KITKAT_WATCH)
protected static boolean nativeSetNumColumns(int windowPtr, int columnNum) {
return nativeSetNumColumns((long) windowPtr, columnNum);
}
@Implementation(minSdk = LOLLIPOP)
protected static boolean nativeSetNumColumns(long windowPtr, int columnNum) {
return WINDOW_DATA.get(windowPtr).setNumColumns(columnNum);
}
@Implementation(maxSdk = KITKAT_WATCH)
protected static String nativeGetName(int windowPtr) {
return nativeGetName((long) windowPtr);
}
@Implementation(minSdk = LOLLIPOP)
protected static String nativeGetName(long windowPtr) {
return WINDOW_DATA.get(windowPtr).getName();
}
protected static int setData(long windowPtr, SQLiteStatement stmt) throws SQLiteException {
return WINDOW_DATA.setData(windowPtr, stmt);
}
private static Number nativeGetNumber(long windowPtr, int row, int column) {
Value value = WINDOW_DATA.get(windowPtr).value(row, column);
switch (value.type) {
case Cursor.FIELD_TYPE_NULL:
case SQLiteConstants.SQLITE_NULL:
return 0;
case Cursor.FIELD_TYPE_INTEGER:
case Cursor.FIELD_TYPE_FLOAT:
return (Number) value.value;
case Cursor.FIELD_TYPE_STRING: {
try {
return Double.parseDouble((String) value.value);
} catch (NumberFormatException e) {
return 0;
}
}
case Cursor.FIELD_TYPE_BLOB:
throw new android.database.sqlite.SQLiteException("could not convert "+value);
default:
throw new android.database.sqlite.SQLiteException("unknown type: "+value.type);
}
}
private static class Data {
private final List<Row> rows;
private final String name;
private int numColumns;
public Data(String name, int cursorWindowSize) {
this.name = name;
this.rows = new ArrayList<Row>(cursorWindowSize);
}
public Value value(int rowN, int colN) {
Row row = rows.get(rowN);
if (row == null) {
throw new IllegalArgumentException("Bad row number: " + rowN + ", count: " + rows.size());
}
return row.get(colN);
}
public int numRows() {
return rows.size();
}
public boolean putValue(Value value, int rowN, int colN) {
return rows.get(rowN).set(colN, value);
}
public void fillWith(SQLiteStatement stmt) throws SQLiteException {
//Android caches results in the WindowedCursor to allow moveToPrevious() to function.
//Robolectric will have to cache the results too. In the rows list.
while (stmt.step()) {
rows.add(fillRowValues(stmt));
}
}
private static int cursorValueType(final int sqliteType) {
switch (sqliteType) {
case SQLiteConstants.SQLITE_NULL: return Cursor.FIELD_TYPE_NULL;
case SQLiteConstants.SQLITE_INTEGER: return Cursor.FIELD_TYPE_INTEGER;
case SQLiteConstants.SQLITE_FLOAT: return Cursor.FIELD_TYPE_FLOAT;
case SQLiteConstants.SQLITE_TEXT: return Cursor.FIELD_TYPE_STRING;
case SQLiteConstants.SQLITE_BLOB: return Cursor.FIELD_TYPE_BLOB;
default:
throw new IllegalArgumentException("Bad SQLite type " + sqliteType + ". See possible values in SQLiteConstants.");
}
}
private static Row fillRowValues(SQLiteStatement stmt) throws SQLiteException {
final int columnCount = stmt.columnCount();
Row row = new Row(columnCount);
for (int index = 0; index < columnCount; index++) {
row.set(index, new Value(stmt.columnValue(index), cursorValueType(stmt.columnType(index))));
}
return row;
}
public void clear() {
rows.clear();
}
public boolean allocRow() {
rows.add(new Row(numColumns));
return true;
}
public boolean setNumColumns(int numColumns) {
this.numColumns = numColumns;
return true;
}
public String getName() {
return name;
}
}
private static class Row {
private final List<Value> values;
public Row(int length) {
values = new ArrayList<Value>(length);
for (int i=0; i<length; i++) {
values.add(new Value(null, Cursor.FIELD_TYPE_NULL));
}
}
public Value get(int n) {
return values.get(n);
}
public boolean set(int colN, Value value) {
values.set(colN, value);
return true;
}
}
private static class Value {
private final Object value;
private final int type;
public Value(final Object value, final int type) {
this.value = value;
this.type = type;
}
}
private static class WindowData {
private final AtomicLong windowPtrCounter = new AtomicLong(0);
private final Map<Number, Data> dataMap = new ConcurrentHashMap<>();
public Data get(long ptr) {
Data data = dataMap.get(ptr);
if (data == null) {
throw new IllegalArgumentException("Invalid window pointer: " + ptr + "; current pointers: " + dataMap.keySet());
}
return data;
}
public int setData(final long ptr, final SQLiteStatement stmt) throws SQLiteException {
Data data = get(ptr);
data.fillWith(stmt);
return data.numRows();
}
public void close(final long ptr) {
Data removed = dataMap.remove(ptr);
if (removed == null) {
throw new IllegalArgumentException("Bad cursor window pointer " + ptr + ". Valid pointers: " + dataMap.keySet());
}
}
public void clear(final long ptr) {
get(ptr).clear();
}
public long create(String name, int cursorWindowSize) {
long ptr = windowPtrCounter.incrementAndGet();
dataMap.put(ptr, new Data(name, cursorWindowSize));
return ptr;
}
}
// TODO: Implement these methods
// private static native int nativeCreateFromParcel(Parcel parcel);
// private static native void nativeWriteToParcel($ptrClass windowPtr, Parcel parcel);
// private static native void nativeFreeLastRow($ptrClass windowPtr);
// private static native void nativeCopyStringToBuffer($ptrClass windowPtr, int row, int column, CharArrayBuffer buffer);
}
| |
/*
* Copyright (c) 2012, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation. Oracle designates this
* particular file as subject to the "Classpath" exception as provided
* by Oracle in the LICENSE file that accompanied this code.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
/**
* @test
* @bug 8003639
* @summary convert lambda testng tests to jtreg and add them
* @run testng LambdaTranslationTest2
*/
import org.testng.annotations.Test;
import java.util.ArrayList;
import java.util.List;
import static org.testng.Assert.assertEquals;
import static org.testng.Assert.assertTrue;
/**
* LambdaTranslationTest2 -- end-to-end smoke tests for lambda evaluation
*/
@Test
public class LambdaTranslationTest2 {
final String dummy = "dummy";
public void testLambdas() {
TPredicate<String> isEmpty = s -> s.isEmpty();
assertTrue(isEmpty.test(""));
assertTrue(!isEmpty.test("foo"));
TPredicate<Object> oIsEmpty = s -> ((String) s).isEmpty();
assertTrue(oIsEmpty.test(""));
assertTrue(!oIsEmpty.test("foo"));
TPredicate<Object> alwaysTrue = o -> true;
assertTrue(alwaysTrue.test(""));
assertTrue(alwaysTrue.test(null));
TPredicate<Object> alwaysFalse = o -> false;
assertTrue(!alwaysFalse.test(""));
assertTrue(!alwaysFalse.test(null));
// tests local capture
String foo = "foo";
TPredicate<String> equalsFoo = s -> s.equals(foo);
assertTrue(!equalsFoo.test(""));
assertTrue(equalsFoo.test("foo"));
// tests instance capture
TPredicate<String> equalsDummy = s -> s.equals(dummy);
assertTrue(!equalsDummy.test(""));
assertTrue(equalsDummy.test("dummy"));
TMapper<Object, Object> ident = s -> s;
assertEquals("blarf", ident.map("blarf"));
assertEquals("wooga", ident.map("wooga"));
assertTrue("wooga" == ident.map("wooga"));
// constant capture
TMapper<Object, Object> prefixer = s -> "p" + s;
assertEquals("pblarf", prefixer.map("blarf"));
assertEquals("pwooga", prefixer.map("wooga"));
// instance capture
TMapper<Object, Object> prefixer2 = s -> dummy + s;
assertEquals("dummyblarf", prefixer2.map("blarf"));
assertEquals("dummywooga", prefixer2.map("wooga"));
}
interface Factory<T> {
T make();
}
interface StringFactory extends Factory<String> { }
interface StringFactory2 extends Factory<String> {
String make();
}
public void testBridges() {
Factory<String> of = () -> "y";
Factory<?> ef = () -> "z";
assertEquals("y", of.make());
assertEquals("y", ((Factory<?>) of).make());
assertEquals("y", ((Factory) of).make());
assertEquals("z", ef.make());
assertEquals("z", ((Factory) ef).make());
}
public void testBridgesImplicitSpecialization() {
StringFactory sf = () -> "x";
assertEquals("x", sf.make());
assertEquals("x", ((Factory<String>) sf).make());
assertEquals("x", ((Factory<?>) sf).make());
assertEquals("x", ((Factory) sf).make());
}
public void testBridgesExplicitSpecialization() {
StringFactory2 sf = () -> "x";
assertEquals("x", sf.make());
assertEquals("x", ((Factory<String>) sf).make());
assertEquals("x", ((Factory<?>) sf).make());
assertEquals("x", ((Factory) sf).make());
}
public void testSuperCapture() {
class A {
String make() { return "x"; }
}
class B extends A {
void testSuperCapture() {
StringFactory sf = () -> super.make();
assertEquals("x", sf.make());
}
}
new B().testSuperCapture();
}
interface WidenD {
public String m(float a0, double a1);
}
interface WidenS {
public String m(byte a0, short a1);
}
interface WidenI {
public String m(byte a0, short a1, char a2, int a3);
}
interface WidenL {
public String m(byte a0, short a1, char a2, int a3, long a4);
}
interface Box {
public String m(byte a0, short a1, char a2, int a3, long a4, boolean a5, float a6, double a7);
}
static String pb(Byte a0, Short a1, Character a2, Integer a3, Long a4, Boolean a5, Float a6, Double a7) {
return String.format("b%d s%d c%c i%d j%d z%b f%f d%f", a0, a1, a2, a3, a4, a5, a6, a7);
}
static String pwI1(int a0, int a1, int a2, int a3) {
return String.format("b%d s%d c%d i%d", a0, a1, a2, a3);
}
static String pwI2(Integer a0, Integer a1, Integer a2, Integer a3) {
return String.format("b%d s%d c%d i%d", a0, a1, a2, a3);
}
static String pwL1(long a0, long a1, long a2, long a3, long a4) {
return String.format("b%d s%d c%d i%d j%d", a0, a1, a2, a3, a4);
}
static String pwL2(Long a0, Long a1, Long a2, Long a3, Long a4) {
return String.format("b%d s%d c%d i%d j%d", a0, a1, a2, a3, a4);
}
static String pwS1(short a0, short a1) {
return String.format("b%d s%d", a0, a1);
}
static String pwS2(Short a0, Short a1) {
return String.format("b%d s%d", a0, a1);
}
static String pwD1(double a0, double a1) {
return String.format("f%f d%f", a0, a1);
}
static String pwD2(Double a0, Double a1) {
return String.format("f%f d%f", a0, a1);
}
public void testPrimitiveWidening() {
WidenS ws1 = LambdaTranslationTest2::pwS1;
assertEquals("b1 s2", ws1.m((byte) 1, (short) 2));
WidenD wd1 = LambdaTranslationTest2::pwD1;
assertEquals("f1.000000 d2.000000", wd1.m(1.0f, 2.0));
WidenI wi1 = LambdaTranslationTest2::pwI1;
assertEquals("b1 s2 c3 i4", wi1.m((byte) 1, (short) 2, (char) 3, 4));
WidenL wl1 = LambdaTranslationTest2::pwL1;
assertEquals("b1 s2 c3 i4 j5", wl1.m((byte) 1, (short) 2, (char) 3, 4, 5L));
// @@@ TODO: clarify spec on widen+box conversion
}
interface Unbox {
public String m(Byte a0, Short a1, Character a2, Integer a3, Long a4, Boolean a5, Float a6, Double a7);
}
static String pu(byte a0, short a1, char a2, int a3, long a4, boolean a5, float a6, double a7) {
return String.format("b%d s%d c%c i%d j%d z%b f%f d%f", a0, a1, a2, a3, a4, a5, a6, a7);
}
public void testUnboxing() {
Unbox u = LambdaTranslationTest2::pu;
assertEquals("b1 s2 cA i4 j5 ztrue f6.000000 d7.000000", u.m((byte)1, (short) 2, 'A', 4, 5L, true, 6.0f, 7.0));
}
public void testBoxing() {
Box b = LambdaTranslationTest2::pb;
assertEquals("b1 s2 cA i4 j5 ztrue f6.000000 d7.000000", b.m((byte) 1, (short) 2, 'A', 4, 5L, true, 6.0f, 7.0));
}
static boolean cc(Object o) {
return ((String) o).equals("foo");
}
public void testArgCastingAdaptation() {
TPredicate<String> p = LambdaTranslationTest2::cc;
assertTrue(p.test("foo"));
assertTrue(!p.test("bar"));
}
interface SonOfPredicate<T> extends TPredicate<T> { }
public void testExtendsSAM() {
SonOfPredicate<String> p = s -> s.isEmpty();
assertTrue(p.test(""));
assertTrue(!p.test("foo"));
}
public void testConstructorRef() {
Factory<List<String>> lf = ArrayList<String>::new;
List<String> list = lf.make();
assertTrue(list instanceof ArrayList);
assertTrue(list != lf.make());
list.add("a");
assertEquals("[a]", list.toString());
}
private static String privateMethod() {
return "private";
}
public void testPrivateMethodRef() {
Factory<String> sf = LambdaTranslationTest2::privateMethod;
assertEquals("private", sf.make());
}
private interface PrivateIntf {
String make();
}
public void testPrivateIntf() {
PrivateIntf p = () -> "foo";
assertEquals("foo", p.make());
}
interface Op<T> {
public T op(T a, T b);
}
public void testBoxToObject() {
Op<Integer> maxer = Math::max;
for (int i=-100000; i < 100000; i += 100)
for (int j=-100000; j < 100000; j += 99) {
assertEquals((int) maxer.op(i,j), Math.max(i,j));
}
}
protected static String protectedMethod() {
return "protected";
}
public void testProtectedMethodRef() {
Factory<String> sf = LambdaTranslationTest2::protectedMethod;
assertEquals("protected", sf.make());
}
class Inner1 {
String m1() {
return "Inner1.m1()";
}
class Inner2 {
public String m1() {
return "Inner1.Inner2.m1()";
}
protected String m2() {
return "Inner1.Inner2.m2()";
}
String m3() {
return "Inner1.Inner2.m3()";
}
class Inner3<T> {
T t = null;
Inner3(T t) {
this.t = t;
}
T m1() {
return t;
}
}
}
}
public void testInnerClassMethodRef() {
Factory<String> fs = new Inner1()::m1;
assertEquals("Inner1.m1()", fs.make());
fs = new Inner1().new Inner2()::m1;
assertEquals("Inner1.Inner2.m1()", fs.make());
fs = new Inner1().new Inner2()::m2;
assertEquals("Inner1.Inner2.m2()", fs.make());
fs = new Inner1().new Inner2()::m3;
assertEquals("Inner1.Inner2.m3()", fs.make());
fs = new Inner1().new Inner2().new Inner3<String>("Inner1.Inner2.Inner3")::m1;
assertEquals("Inner1.Inner2.Inner3", fs.make());
Factory<Integer> fsi = new Inner1().new Inner2().new Inner3<Integer>(100)::m1;
assertEquals(100, (int)fsi.make());
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.yarn.server.resourcemanager.recovery.records.impl.pb;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import org.apache.hadoop.ipc.CallerContext;
import org.apache.hadoop.ipc.protobuf.RpcHeaderProtos;
import org.apache.hadoop.yarn.api.records.ApplicationSubmissionContext;
import org.apache.hadoop.yarn.api.records.ApplicationTimeoutType;
import org.apache.hadoop.yarn.api.records.impl.pb.ApplicationSubmissionContextPBImpl;
import org.apache.hadoop.yarn.api.records.impl.pb.ProtoUtils;
import org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutMapProto;
import org.apache.hadoop.yarn.proto.YarnServerResourceManagerRecoveryProtos.ApplicationStateDataProto;
import org.apache.hadoop.yarn.proto.YarnServerResourceManagerRecoveryProtos.ApplicationStateDataProtoOrBuilder;
import org.apache.hadoop.yarn.proto.YarnServerResourceManagerRecoveryProtos.RMAppStateProto;
import org.apache.hadoop.yarn.server.resourcemanager.recovery.records.ApplicationStateData;
import org.apache.hadoop.yarn.server.resourcemanager.rmapp.RMAppState;
import org.apache.hadoop.thirdparty.protobuf.ByteString;
import org.apache.hadoop.thirdparty.protobuf.TextFormat;
public class ApplicationStateDataPBImpl extends ApplicationStateData {
ApplicationStateDataProto proto =
ApplicationStateDataProto.getDefaultInstance();
ApplicationStateDataProto.Builder builder = null;
boolean viaProto = false;
private ApplicationSubmissionContext applicationSubmissionContext = null;
private Map<ApplicationTimeoutType, Long> applicationTimeouts = null;
public ApplicationStateDataPBImpl() {
builder = ApplicationStateDataProto.newBuilder();
}
public ApplicationStateDataPBImpl(
ApplicationStateDataProto proto) {
this.proto = proto;
viaProto = true;
}
@Override
public ApplicationStateDataProto getProto() {
mergeLocalToProto();
proto = viaProto ? proto : builder.build();
viaProto = true;
return proto;
}
private void mergeLocalToBuilder() {
if (this.applicationSubmissionContext != null) {
builder.setApplicationSubmissionContext(
((ApplicationSubmissionContextPBImpl)applicationSubmissionContext)
.getProto());
}
if (this.applicationTimeouts != null) {
addApplicationTimeouts();
}
}
private void mergeLocalToProto() {
if (viaProto)
maybeInitBuilder();
mergeLocalToBuilder();
proto = builder.build();
viaProto = true;
}
private void maybeInitBuilder() {
if (viaProto || builder == null) {
builder = ApplicationStateDataProto.newBuilder(proto);
}
viaProto = false;
}
@Override
public long getSubmitTime() {
ApplicationStateDataProtoOrBuilder p = viaProto ? proto : builder;
if (!p.hasSubmitTime()) {
return -1;
}
return (p.getSubmitTime());
}
@Override
public void setSubmitTime(long submitTime) {
maybeInitBuilder();
builder.setSubmitTime(submitTime);
}
@Override
public long getStartTime() {
ApplicationStateDataProtoOrBuilder p = viaProto ? proto : builder;
return p.getStartTime();
}
@Override
public void setStartTime(long startTime) {
maybeInitBuilder();
builder.setStartTime(startTime);
}
@Override
public long getLaunchTime() {
ApplicationStateDataProtoOrBuilder p = viaProto ? proto : builder;
return p.getLaunchTime();
}
@Override
public void setLaunchTime(long launchTime) {
maybeInitBuilder();
builder.setLaunchTime(launchTime);
}
@Override
public String getUser() {
ApplicationStateDataProtoOrBuilder p = viaProto ? proto : builder;
if (!p.hasUser()) {
return null;
}
return (p.getUser());
}
@Override
public void setUser(String user) {
maybeInitBuilder();
builder.setUser(user);
}
@Override
public ApplicationSubmissionContext getApplicationSubmissionContext() {
ApplicationStateDataProtoOrBuilder p = viaProto ? proto : builder;
if(applicationSubmissionContext != null) {
return applicationSubmissionContext;
}
if (!p.hasApplicationSubmissionContext()) {
return null;
}
applicationSubmissionContext =
new ApplicationSubmissionContextPBImpl(
p.getApplicationSubmissionContext());
return applicationSubmissionContext;
}
@Override
public void setApplicationSubmissionContext(
ApplicationSubmissionContext context) {
maybeInitBuilder();
if (context == null) {
builder.clearApplicationSubmissionContext();
}
this.applicationSubmissionContext = context;
}
@Override
public RMAppState getState() {
ApplicationStateDataProtoOrBuilder p = viaProto ? proto : builder;
if (!p.hasApplicationState()) {
return null;
}
return convertFromProtoFormat(p.getApplicationState());
}
@Override
public void setState(RMAppState finalState) {
maybeInitBuilder();
if (finalState == null) {
builder.clearApplicationState();
return;
}
builder.setApplicationState(convertToProtoFormat(finalState));
}
@Override
public String getDiagnostics() {
ApplicationStateDataProtoOrBuilder p = viaProto ? proto : builder;
if (!p.hasDiagnostics()) {
return null;
}
return p.getDiagnostics();
}
@Override
public void setDiagnostics(String diagnostics) {
maybeInitBuilder();
if (diagnostics == null) {
builder.clearDiagnostics();
return;
}
builder.setDiagnostics(diagnostics);
}
@Override
public long getFinishTime() {
ApplicationStateDataProtoOrBuilder p = viaProto ? proto : builder;
return p.getFinishTime();
}
@Override
public void setFinishTime(long finishTime) {
maybeInitBuilder();
builder.setFinishTime(finishTime);
}
@Override
public int hashCode() {
return getProto().hashCode();
}
@Override
public boolean equals(Object other) {
if (other == null)
return false;
if (other.getClass().isAssignableFrom(this.getClass())) {
return this.getProto().equals(this.getClass().cast(other).getProto());
}
return false;
}
@Override
public CallerContext getCallerContext() {
ApplicationStateDataProtoOrBuilder p = viaProto ? proto : builder;
RpcHeaderProtos.RPCCallerContextProto pbContext = p.getCallerContext();
if (pbContext != null) {
CallerContext context = new CallerContext.Builder(pbContext.getContext())
.setSignature(pbContext.getSignature().toByteArray()).build();
return context;
}
return null;
}
@Override
public void setCallerContext(CallerContext callerContext) {
if (callerContext != null) {
maybeInitBuilder();
RpcHeaderProtos.RPCCallerContextProto.Builder b = RpcHeaderProtos.RPCCallerContextProto
.newBuilder();
if (callerContext.isContextValid()) {
b.setContext(callerContext.getContext());
}
if (callerContext.getSignature() != null) {
b.setSignature(ByteString.copyFrom(callerContext.getSignature()));
}
if(callerContext.isContextValid()
|| callerContext.getSignature() != null) {
builder.setCallerContext(b);
}
}
}
@Override
public String toString() {
return TextFormat.shortDebugString(getProto());
}
private static String RM_APP_PREFIX = "RMAPP_";
public static RMAppStateProto convertToProtoFormat(RMAppState e) {
return RMAppStateProto.valueOf(RM_APP_PREFIX + e.name());
}
public static RMAppState convertFromProtoFormat(RMAppStateProto e) {
return RMAppState.valueOf(e.name().replace(RM_APP_PREFIX, ""));
}
@Override
public Map<ApplicationTimeoutType, Long> getApplicationTimeouts() {
initApplicationTimeout();
return this.applicationTimeouts;
}
private void initApplicationTimeout() {
if (this.applicationTimeouts != null) {
return;
}
ApplicationStateDataProtoOrBuilder p = viaProto ? proto : builder;
List<ApplicationTimeoutMapProto> lists = p.getApplicationTimeoutsList();
this.applicationTimeouts =
new HashMap<ApplicationTimeoutType, Long>(lists.size());
for (ApplicationTimeoutMapProto timeoutProto : lists) {
this.applicationTimeouts.put(
ProtoUtils
.convertFromProtoFormat(timeoutProto.getApplicationTimeoutType()),
timeoutProto.getTimeout());
}
}
@Override
public void setApplicationTimeouts(
Map<ApplicationTimeoutType, Long> appTimeouts) {
if (appTimeouts == null) {
return;
}
initApplicationTimeout();
this.applicationTimeouts.clear();
this.applicationTimeouts.putAll(appTimeouts);
}
private void addApplicationTimeouts() {
maybeInitBuilder();
builder.clearApplicationTimeouts();
if (applicationTimeouts == null) {
return;
}
Iterable<? extends ApplicationTimeoutMapProto> values =
new Iterable<ApplicationTimeoutMapProto>() {
@Override
public Iterator<ApplicationTimeoutMapProto> iterator() {
return new Iterator<ApplicationTimeoutMapProto>() {
private Iterator<ApplicationTimeoutType> iterator =
applicationTimeouts.keySet().iterator();
@Override
public boolean hasNext() {
return iterator.hasNext();
}
@Override
public ApplicationTimeoutMapProto next() {
ApplicationTimeoutType key = iterator.next();
return ApplicationTimeoutMapProto.newBuilder()
.setTimeout(applicationTimeouts.get(key))
.setApplicationTimeoutType(
ProtoUtils.convertToProtoFormat(key))
.build();
}
@Override
public void remove() {
throw new UnsupportedOperationException();
}
};
}
};
this.builder.addAllApplicationTimeouts(values);
}
}
| |
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.prestosql.spi.predicate;
import com.fasterxml.jackson.core.JsonParser;
import com.fasterxml.jackson.core.type.TypeReference;
import com.fasterxml.jackson.databind.DeserializationContext;
import com.fasterxml.jackson.databind.JsonDeserializer;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.module.SimpleModule;
import com.google.common.collect.ImmutableMap;
import io.airlift.json.ObjectMapperProvider;
import io.prestosql.spi.block.Block;
import io.prestosql.spi.block.TestingBlockEncodingSerde;
import io.prestosql.spi.block.TestingBlockJsonSerde;
import io.prestosql.spi.connector.ColumnHandle;
import io.prestosql.spi.connector.TestingColumnHandle;
import io.prestosql.spi.type.TestingTypeDeserializer;
import io.prestosql.spi.type.TestingTypeManager;
import io.prestosql.spi.type.Type;
import org.testng.annotations.Test;
import java.io.IOException;
import java.util.Map;
import static io.airlift.slice.Slices.utf8Slice;
import static io.prestosql.spi.predicate.TupleDomain.columnWiseUnion;
import static io.prestosql.spi.type.BigintType.BIGINT;
import static io.prestosql.spi.type.BooleanType.BOOLEAN;
import static io.prestosql.spi.type.DoubleType.DOUBLE;
import static io.prestosql.spi.type.VarcharType.VARCHAR;
import static org.testng.Assert.assertEquals;
import static org.testng.Assert.assertFalse;
import static org.testng.Assert.assertTrue;
public class TestTupleDomain
{
private static final ColumnHandle A = new TestingColumnHandle("a");
private static final ColumnHandle B = new TestingColumnHandle("b");
private static final ColumnHandle C = new TestingColumnHandle("c");
private static final ColumnHandle D = new TestingColumnHandle("d");
private static final ColumnHandle E = new TestingColumnHandle("e");
private static final ColumnHandle F = new TestingColumnHandle("f");
@Test
public void testNone()
{
assertTrue(TupleDomain.none().isNone());
assertEquals(TupleDomain.<ColumnHandle>none(),
TupleDomain.withColumnDomains(ImmutableMap.of(
A, Domain.none(BIGINT))));
assertEquals(TupleDomain.<ColumnHandle>none(),
TupleDomain.withColumnDomains(ImmutableMap.of(
A, Domain.all(BIGINT),
B, Domain.none(VARCHAR))));
}
@Test
public void testAll()
{
assertTrue(TupleDomain.all().isAll());
assertEquals(TupleDomain.<ColumnHandle>all(),
TupleDomain.withColumnDomains(ImmutableMap.of(
A, Domain.all(BIGINT))));
assertEquals(TupleDomain.<ColumnHandle>all(),
TupleDomain.withColumnDomains(ImmutableMap.<ColumnHandle, Domain>of()));
}
@Test
public void testIntersection()
{
TupleDomain<ColumnHandle> tupleDomain1 = TupleDomain.withColumnDomains(
ImmutableMap.<ColumnHandle, Domain>builder()
.put(A, Domain.all(VARCHAR))
.put(B, Domain.notNull(DOUBLE))
.put(C, Domain.singleValue(BIGINT, 1L))
.put(D, Domain.create(ValueSet.ofRanges(Range.greaterThanOrEqual(DOUBLE, 0.0)), true))
.build());
TupleDomain<ColumnHandle> tupleDomain2 = TupleDomain.withColumnDomains(
ImmutableMap.<ColumnHandle, Domain>builder()
.put(A, Domain.singleValue(VARCHAR, utf8Slice("value")))
.put(B, Domain.singleValue(DOUBLE, 0.0))
.put(C, Domain.singleValue(BIGINT, 1L))
.put(D, Domain.create(ValueSet.ofRanges(Range.lessThan(DOUBLE, 10.0)), false))
.build());
TupleDomain<ColumnHandle> expectedTupleDomain = TupleDomain.withColumnDomains(
ImmutableMap.<ColumnHandle, Domain>builder()
.put(A, Domain.singleValue(VARCHAR, utf8Slice("value")))
.put(B, Domain.singleValue(DOUBLE, 0.0))
.put(C, Domain.singleValue(BIGINT, 1L))
.put(D, Domain.create(ValueSet.ofRanges(Range.range(DOUBLE, 0.0, true, 10.0, false)), false))
.build());
assertEquals(tupleDomain1.intersect(tupleDomain2), expectedTupleDomain);
}
@Test
public void testNoneIntersection()
{
assertEquals(TupleDomain.none().intersect(TupleDomain.all()), TupleDomain.none());
assertEquals(TupleDomain.all().intersect(TupleDomain.none()), TupleDomain.none());
assertEquals(TupleDomain.none().intersect(TupleDomain.none()), TupleDomain.none());
assertEquals(
TupleDomain.withColumnDomains(ImmutableMap.of(A, Domain.onlyNull(BIGINT)))
.intersect(TupleDomain.withColumnDomains(ImmutableMap.of(A, Domain.notNull(BIGINT)))),
TupleDomain.<ColumnHandle>none());
}
@Test
public void testMismatchedColumnIntersection()
{
TupleDomain<ColumnHandle> tupleDomain1 = TupleDomain.withColumnDomains(
ImmutableMap.of(
A, Domain.all(DOUBLE),
B, Domain.singleValue(VARCHAR, utf8Slice("value"))));
TupleDomain<ColumnHandle> tupleDomain2 = TupleDomain.withColumnDomains(
ImmutableMap.of(
A, Domain.create(ValueSet.ofRanges(Range.greaterThanOrEqual(DOUBLE, 0.0)), true),
C, Domain.singleValue(BIGINT, 1L)));
TupleDomain<ColumnHandle> expectedTupleDomain = TupleDomain.withColumnDomains(ImmutableMap.of(
A, Domain.create(ValueSet.ofRanges(Range.greaterThanOrEqual(DOUBLE, 0.0)), true),
B, Domain.singleValue(VARCHAR, utf8Slice("value")),
C, Domain.singleValue(BIGINT, 1L)));
assertEquals(tupleDomain1.intersect(tupleDomain2), expectedTupleDomain);
}
@Test
public void testColumnWiseUnion()
{
TupleDomain<ColumnHandle> tupleDomain1 = TupleDomain.withColumnDomains(
ImmutableMap.<ColumnHandle, Domain>builder()
.put(A, Domain.all(VARCHAR))
.put(B, Domain.notNull(DOUBLE))
.put(C, Domain.onlyNull(BIGINT))
.put(D, Domain.singleValue(BIGINT, 1L))
.put(E, Domain.create(ValueSet.ofRanges(Range.greaterThanOrEqual(DOUBLE, 0.0)), true))
.build());
TupleDomain<ColumnHandle> tupleDomain2 = TupleDomain.withColumnDomains(
ImmutableMap.<ColumnHandle, Domain>builder()
.put(A, Domain.singleValue(VARCHAR, utf8Slice("value")))
.put(B, Domain.singleValue(DOUBLE, 0.0))
.put(C, Domain.notNull(BIGINT))
.put(D, Domain.singleValue(BIGINT, 1L))
.put(E, Domain.create(ValueSet.ofRanges(Range.lessThan(DOUBLE, 10.0)), false))
.build());
TupleDomain<ColumnHandle> expectedTupleDomain = TupleDomain.withColumnDomains(
ImmutableMap.<ColumnHandle, Domain>builder()
.put(A, Domain.all(VARCHAR))
.put(B, Domain.notNull(DOUBLE))
.put(C, Domain.all(BIGINT))
.put(D, Domain.singleValue(BIGINT, 1L))
.put(E, Domain.all(DOUBLE))
.build());
assertEquals(columnWiseUnion(tupleDomain1, tupleDomain2), expectedTupleDomain);
}
@Test
public void testNoneColumnWiseUnion()
{
assertEquals(columnWiseUnion(TupleDomain.none(), TupleDomain.all()), TupleDomain.all());
assertEquals(columnWiseUnion(TupleDomain.all(), TupleDomain.none()), TupleDomain.all());
assertEquals(columnWiseUnion(TupleDomain.none(), TupleDomain.none()), TupleDomain.none());
assertEquals(
columnWiseUnion(
TupleDomain.withColumnDomains(ImmutableMap.of(A, Domain.onlyNull(BIGINT))),
TupleDomain.withColumnDomains(ImmutableMap.of(A, Domain.notNull(BIGINT)))),
TupleDomain.<ColumnHandle>all());
}
@Test
public void testMismatchedColumnWiseUnion()
{
TupleDomain<ColumnHandle> tupleDomain1 = TupleDomain.withColumnDomains(
ImmutableMap.of(
A, Domain.all(DOUBLE),
B, Domain.singleValue(VARCHAR, utf8Slice("value"))));
TupleDomain<ColumnHandle> tupleDomain2 = TupleDomain.withColumnDomains(
ImmutableMap.of(
A, Domain.create(ValueSet.ofRanges(Range.greaterThanOrEqual(DOUBLE, 0.0)), true),
C, Domain.singleValue(BIGINT, 1L)));
TupleDomain<ColumnHandle> expectedTupleDomain = TupleDomain.withColumnDomains(ImmutableMap.of(A, Domain.all(DOUBLE)));
assertEquals(columnWiseUnion(tupleDomain1, tupleDomain2), expectedTupleDomain);
}
@Test
public void testOverlaps()
{
assertTrue(overlaps(
ImmutableMap.of(),
ImmutableMap.of()));
assertTrue(overlaps(
ImmutableMap.of(),
ImmutableMap.of(A, Domain.singleValue(BIGINT, 0L))));
assertFalse(overlaps(
ImmutableMap.of(),
ImmutableMap.of(A, Domain.none(BIGINT))));
assertFalse(overlaps(
ImmutableMap.of(A, Domain.none(BIGINT)),
ImmutableMap.of(A, Domain.none(BIGINT))));
assertTrue(overlaps(
ImmutableMap.of(A, Domain.all(BIGINT)),
ImmutableMap.of(A, Domain.all(BIGINT))));
assertTrue(overlaps(
ImmutableMap.of(A, Domain.singleValue(BIGINT, 1L)),
ImmutableMap.of(B, Domain.singleValue(VARCHAR, utf8Slice("value")))));
assertTrue(overlaps(
ImmutableMap.of(A, Domain.singleValue(BIGINT, 1L)),
ImmutableMap.of(A, Domain.all(BIGINT))));
assertFalse(overlaps(
ImmutableMap.of(A, Domain.singleValue(BIGINT, 1L)),
ImmutableMap.of(A, Domain.singleValue(BIGINT, 2L))));
assertFalse(overlaps(
ImmutableMap.of(
A, Domain.singleValue(BIGINT, 1L),
B, Domain.singleValue(BIGINT, 1L)),
ImmutableMap.of(
A, Domain.singleValue(BIGINT, 1L),
B, Domain.singleValue(BIGINT, 2L))));
assertTrue(overlaps(
ImmutableMap.of(
A, Domain.singleValue(BIGINT, 1L),
B, Domain.all(BIGINT)),
ImmutableMap.of(
A, Domain.singleValue(BIGINT, 1L),
B, Domain.singleValue(BIGINT, 2L))));
}
@Test
public void testContains()
{
assertTrue(contains(
ImmutableMap.of(),
ImmutableMap.of()));
assertTrue(contains(
ImmutableMap.of(),
ImmutableMap.of(A, Domain.none(BIGINT))));
assertTrue(contains(
ImmutableMap.of(),
ImmutableMap.of(A, Domain.all(BIGINT))));
assertTrue(contains(
ImmutableMap.of(),
ImmutableMap.of(A, Domain.singleValue(DOUBLE, 0.0))));
assertFalse(contains(
ImmutableMap.of(A, Domain.none(BIGINT)),
ImmutableMap.of()));
assertTrue(contains(
ImmutableMap.of(A, Domain.none(BIGINT)),
ImmutableMap.of(A, Domain.none(BIGINT))));
assertFalse(contains(
ImmutableMap.of(A, Domain.none(BIGINT)),
ImmutableMap.of(A, Domain.all(BIGINT))));
assertFalse(contains(
ImmutableMap.of(A, Domain.none(BIGINT)),
ImmutableMap.of(A, Domain.singleValue(BIGINT, 0L))));
assertTrue(contains(
ImmutableMap.of(A, Domain.all(BIGINT)),
ImmutableMap.of()));
assertTrue(contains(
ImmutableMap.of(A, Domain.all(BIGINT)),
ImmutableMap.of(A, Domain.none(BIGINT))));
assertTrue(contains(
ImmutableMap.of(A, Domain.all(BIGINT)),
ImmutableMap.of(A, Domain.all(BIGINT))));
assertTrue(contains(
ImmutableMap.of(A, Domain.all(BIGINT)),
ImmutableMap.of(A, Domain.singleValue(BIGINT, 0L))));
assertFalse(contains(
ImmutableMap.of(A, Domain.singleValue(BIGINT, 0L)),
ImmutableMap.of()));
assertTrue(contains(
ImmutableMap.of(A, Domain.singleValue(BIGINT, 0L)),
ImmutableMap.of(A, Domain.none(BIGINT))));
assertFalse(contains(
ImmutableMap.of(A, Domain.singleValue(BIGINT, 0L)),
ImmutableMap.of(A, Domain.all(BIGINT))));
assertTrue(contains(
ImmutableMap.of(A, Domain.singleValue(BIGINT, 0L)),
ImmutableMap.of(A, Domain.singleValue(BIGINT, 0L))));
assertFalse(contains(
ImmutableMap.of(A, Domain.singleValue(BIGINT, 0L)),
ImmutableMap.of(B, Domain.singleValue(VARCHAR, utf8Slice("value")))));
assertFalse(contains(
ImmutableMap.of(
A, Domain.singleValue(BIGINT, 0L),
B, Domain.singleValue(VARCHAR, utf8Slice("value"))),
ImmutableMap.of(B, Domain.singleValue(VARCHAR, utf8Slice("value")))));
assertTrue(contains(
ImmutableMap.of(
A, Domain.singleValue(BIGINT, 0L),
B, Domain.singleValue(VARCHAR, utf8Slice("value"))),
ImmutableMap.of(B, Domain.none(VARCHAR))));
assertTrue(contains(
ImmutableMap.of(
A, Domain.singleValue(BIGINT, 0L),
B, Domain.singleValue(VARCHAR, utf8Slice("value"))),
ImmutableMap.of(
A, Domain.singleValue(BIGINT, 1L),
B, Domain.none(VARCHAR))));
assertTrue(contains(
ImmutableMap.of(
B, Domain.singleValue(VARCHAR, utf8Slice("value"))),
ImmutableMap.of(
A, Domain.singleValue(BIGINT, 0L),
B, Domain.singleValue(VARCHAR, utf8Slice("value")))));
assertTrue(contains(
ImmutableMap.of(
A, Domain.all(BIGINT),
B, Domain.singleValue(VARCHAR, utf8Slice("value"))),
ImmutableMap.of(
A, Domain.singleValue(BIGINT, 0L),
B, Domain.singleValue(VARCHAR, utf8Slice("value")))));
assertFalse(contains(
ImmutableMap.of(
A, Domain.all(BIGINT),
B, Domain.singleValue(VARCHAR, utf8Slice("value"))),
ImmutableMap.of(
A, Domain.singleValue(BIGINT, 0L),
B, Domain.singleValue(VARCHAR, utf8Slice("value2")))));
assertTrue(contains(
ImmutableMap.of(
A, Domain.all(BIGINT),
B, Domain.singleValue(VARCHAR, utf8Slice("value"))),
ImmutableMap.of(
A, Domain.singleValue(BIGINT, 0L),
B, Domain.singleValue(VARCHAR, utf8Slice("value2")),
C, Domain.none(VARCHAR))));
assertFalse(contains(
ImmutableMap.of(
A, Domain.all(BIGINT),
B, Domain.singleValue(VARCHAR, utf8Slice("value")),
C, Domain.none(VARCHAR)),
ImmutableMap.of(
A, Domain.singleValue(BIGINT, 0L),
B, Domain.singleValue(VARCHAR, utf8Slice("value2")))));
assertTrue(contains(
ImmutableMap.of(
A, Domain.all(BIGINT),
B, Domain.singleValue(VARCHAR, utf8Slice("value")),
C, Domain.none(VARCHAR)),
ImmutableMap.of(
A, Domain.singleValue(BIGINT, 0L),
B, Domain.none(VARCHAR))));
}
@Test
public void testEquals()
{
assertTrue(equals(
ImmutableMap.of(),
ImmutableMap.of()));
assertTrue(equals(
ImmutableMap.of(),
ImmutableMap.of(A, Domain.all(BIGINT))));
assertFalse(equals(
ImmutableMap.of(),
ImmutableMap.of(A, Domain.none(BIGINT))));
assertFalse(equals(
ImmutableMap.of(),
ImmutableMap.of(A, Domain.singleValue(BIGINT, 0L))));
assertTrue(equals(
ImmutableMap.of(A, Domain.all(BIGINT)),
ImmutableMap.of(A, Domain.all(BIGINT))));
assertFalse(equals(
ImmutableMap.of(A, Domain.all(BIGINT)),
ImmutableMap.of(A, Domain.none(BIGINT))));
assertFalse(equals(
ImmutableMap.of(A, Domain.all(BIGINT)),
ImmutableMap.of(A, Domain.singleValue(BIGINT, 0L))));
assertTrue(equals(
ImmutableMap.of(A, Domain.none(BIGINT)),
ImmutableMap.of(A, Domain.none(BIGINT))));
assertFalse(equals(
ImmutableMap.of(A, Domain.none(BIGINT)),
ImmutableMap.of(A, Domain.singleValue(BIGINT, 0L))));
assertTrue(equals(
ImmutableMap.of(A, Domain.singleValue(BIGINT, 0L)),
ImmutableMap.of(A, Domain.singleValue(BIGINT, 0L))));
assertFalse(equals(
ImmutableMap.of(A, Domain.singleValue(BIGINT, 0L)),
ImmutableMap.of(B, Domain.singleValue(BIGINT, 0L))));
assertFalse(equals(
ImmutableMap.of(A, Domain.singleValue(BIGINT, 0L)),
ImmutableMap.of(A, Domain.singleValue(BIGINT, 1L))));
assertTrue(equals(
ImmutableMap.of(A, Domain.all(BIGINT)),
ImmutableMap.of(B, Domain.all(VARCHAR))));
assertTrue(equals(
ImmutableMap.of(A, Domain.none(BIGINT)),
ImmutableMap.of(B, Domain.none(VARCHAR))));
assertTrue(equals(
ImmutableMap.of(A, Domain.none(BIGINT)),
ImmutableMap.of(
A, Domain.singleValue(BIGINT, 0L),
B, Domain.none(VARCHAR))));
assertFalse(equals(
ImmutableMap.of(
A, Domain.singleValue(BIGINT, 1L)),
ImmutableMap.of(
A, Domain.singleValue(BIGINT, 0L),
B, Domain.none(VARCHAR))));
assertTrue(equals(
ImmutableMap.of(
A, Domain.singleValue(BIGINT, 1L),
C, Domain.none(DOUBLE)),
ImmutableMap.of(
A, Domain.singleValue(BIGINT, 0L),
B, Domain.none(VARCHAR))));
assertTrue(equals(
ImmutableMap.of(
A, Domain.singleValue(BIGINT, 0L),
B, Domain.all(DOUBLE)),
ImmutableMap.of(
A, Domain.singleValue(BIGINT, 0L),
B, Domain.all(DOUBLE))));
assertTrue(equals(
ImmutableMap.of(
A, Domain.singleValue(BIGINT, 0L),
B, Domain.all(VARCHAR)),
ImmutableMap.of(
A, Domain.singleValue(BIGINT, 0L),
C, Domain.all(DOUBLE))));
assertFalse(equals(
ImmutableMap.of(
A, Domain.singleValue(BIGINT, 0L),
B, Domain.all(VARCHAR)),
ImmutableMap.of(
A, Domain.singleValue(BIGINT, 1L),
C, Domain.all(DOUBLE))));
assertFalse(equals(
ImmutableMap.of(
A, Domain.singleValue(BIGINT, 0L),
B, Domain.all(VARCHAR)),
ImmutableMap.of(
A, Domain.singleValue(BIGINT, 0L),
C, Domain.singleValue(DOUBLE, 0.0))));
}
@Test
public void testIsNone()
{
assertFalse(TupleDomain.withColumnDomains(ImmutableMap.<ColumnHandle, Domain>of()).isNone());
assertFalse(TupleDomain.withColumnDomains(ImmutableMap.of(A, Domain.singleValue(BIGINT, 0L))).isNone());
assertTrue(TupleDomain.withColumnDomains(ImmutableMap.of(A, Domain.none(BIGINT))).isNone());
assertFalse(TupleDomain.withColumnDomains(ImmutableMap.of(A, Domain.all(BIGINT))).isNone());
assertTrue(TupleDomain.withColumnDomains(ImmutableMap.of(A, Domain.all(BIGINT), B, Domain.none(BIGINT))).isNone());
}
@Test
public void testIsAll()
{
assertTrue(TupleDomain.withColumnDomains(ImmutableMap.<ColumnHandle, Domain>of()).isAll());
assertFalse(TupleDomain.withColumnDomains(ImmutableMap.of(A, Domain.singleValue(BIGINT, 0L))).isAll());
assertTrue(TupleDomain.withColumnDomains(ImmutableMap.of(A, Domain.all(BIGINT))).isAll());
assertFalse(TupleDomain.withColumnDomains(ImmutableMap.of(A, Domain.singleValue(BIGINT, 0L), B, Domain.all(BIGINT))).isAll());
}
@Test
public void testExtractFixedValues()
{
assertEquals(
TupleDomain.extractFixedValues(TupleDomain.withColumnDomains(
ImmutableMap.<ColumnHandle, Domain>builder()
.put(A, Domain.all(DOUBLE))
.put(B, Domain.singleValue(VARCHAR, utf8Slice("value")))
.put(C, Domain.onlyNull(BIGINT))
.put(D, Domain.create(ValueSet.ofRanges(Range.equal(BIGINT, 1L)), true))
.build())).get(),
ImmutableMap.of(
B, NullableValue.of(VARCHAR, utf8Slice("value")),
C, NullableValue.asNull(BIGINT)));
}
@Test
public void testExtractFixedValuesFromNone()
{
assertFalse(TupleDomain.extractFixedValues(TupleDomain.none()).isPresent());
}
@Test
public void testExtractFixedValuesFromAll()
{
assertEquals(TupleDomain.extractFixedValues(TupleDomain.all()).get(), ImmutableMap.of());
}
@Test
public void testSingleValuesMapToDomain()
{
assertEquals(
TupleDomain.fromFixedValues(
ImmutableMap.<ColumnHandle, NullableValue>builder()
.put(A, NullableValue.of(BIGINT, 1L))
.put(B, NullableValue.of(VARCHAR, utf8Slice("value")))
.put(C, NullableValue.of(DOUBLE, 0.01))
.put(D, NullableValue.asNull(BOOLEAN))
.build()),
TupleDomain.withColumnDomains(ImmutableMap.<ColumnHandle, Domain>builder()
.put(A, Domain.singleValue(BIGINT, 1L))
.put(B, Domain.singleValue(VARCHAR, utf8Slice("value")))
.put(C, Domain.singleValue(DOUBLE, 0.01))
.put(D, Domain.onlyNull(BOOLEAN))
.build()));
}
@Test
public void testEmptySingleValuesMapToDomain()
{
assertEquals(TupleDomain.fromFixedValues(ImmutableMap.of()), TupleDomain.all());
}
@Test
public void testJsonSerialization()
throws Exception
{
TestingTypeManager typeManager = new TestingTypeManager();
TestingBlockEncodingSerde blockEncodingSerde = new TestingBlockEncodingSerde(typeManager);
ObjectMapper mapper = new ObjectMapperProvider().get()
.registerModule(new SimpleModule()
.addDeserializer(ColumnHandle.class, new JsonDeserializer<ColumnHandle>()
{
@Override
public ColumnHandle deserialize(JsonParser jsonParser, DeserializationContext deserializationContext)
throws IOException
{
return new ObjectMapperProvider().get().readValue(jsonParser, TestingColumnHandle.class);
}
})
.addDeserializer(Type.class, new TestingTypeDeserializer(typeManager))
.addSerializer(Block.class, new TestingBlockJsonSerde.Serializer(blockEncodingSerde))
.addDeserializer(Block.class, new TestingBlockJsonSerde.Deserializer(blockEncodingSerde)));
TupleDomain<ColumnHandle> tupleDomain = TupleDomain.all();
assertEquals(tupleDomain, mapper.readValue(mapper.writeValueAsString(tupleDomain), new TypeReference<TupleDomain<ColumnHandle>>() {}));
tupleDomain = TupleDomain.none();
assertEquals(tupleDomain, mapper.readValue(mapper.writeValueAsString(tupleDomain), new TypeReference<TupleDomain<ColumnHandle>>() {}));
tupleDomain = TupleDomain.fromFixedValues(ImmutableMap.of(A, NullableValue.of(BIGINT, 1L), B, NullableValue.asNull(VARCHAR)));
assertEquals(tupleDomain, mapper.readValue(mapper.writeValueAsString(tupleDomain), new TypeReference<TupleDomain<ColumnHandle>>() {}));
}
@Test
public void testTransform()
{
Map<Integer, Domain> domains = ImmutableMap.<Integer, Domain>builder()
.put(1, Domain.singleValue(BIGINT, 1L))
.put(2, Domain.singleValue(BIGINT, 2L))
.put(3, Domain.singleValue(BIGINT, 3L))
.build();
TupleDomain<Integer> domain = TupleDomain.withColumnDomains(domains);
TupleDomain<String> transformed = domain.transform(Object::toString);
Map<String, Domain> expected = ImmutableMap.<String, Domain>builder()
.put("1", Domain.singleValue(BIGINT, 1L))
.put("2", Domain.singleValue(BIGINT, 2L))
.put("3", Domain.singleValue(BIGINT, 3L))
.build();
assertEquals(transformed.getDomains().get(), expected);
}
@Test(expectedExceptions = IllegalArgumentException.class)
public void testTransformFailsWithNonUniqueMapping()
{
Map<Integer, Domain> domains = ImmutableMap.<Integer, Domain>builder()
.put(1, Domain.singleValue(BIGINT, 1L))
.put(2, Domain.singleValue(BIGINT, 2L))
.put(3, Domain.singleValue(BIGINT, 3L))
.build();
TupleDomain<Integer> domain = TupleDomain.withColumnDomains(domains);
domain.transform(input -> "x");
}
private boolean overlaps(Map<ColumnHandle, Domain> domains1, Map<ColumnHandle, Domain> domains2)
{
TupleDomain<ColumnHandle> tupleDomain1 = TupleDomain.withColumnDomains(domains1);
TupleDomain<ColumnHandle> tupleDOmain2 = TupleDomain.withColumnDomains(domains2);
return tupleDomain1.overlaps(tupleDOmain2);
}
private boolean contains(Map<ColumnHandle, Domain> superSet, Map<ColumnHandle, Domain> subSet)
{
TupleDomain<ColumnHandle> superSetTupleDomain = TupleDomain.withColumnDomains(superSet);
TupleDomain<ColumnHandle> subSetTupleDomain = TupleDomain.withColumnDomains(subSet);
return superSetTupleDomain.contains(subSetTupleDomain);
}
private boolean equals(Map<ColumnHandle, Domain> domains1, Map<ColumnHandle, Domain> domains2)
{
TupleDomain<ColumnHandle> tupleDomain1 = TupleDomain.withColumnDomains(domains1);
TupleDomain<ColumnHandle> tupleDOmain2 = TupleDomain.withColumnDomains(domains2);
return tupleDomain1.equals(tupleDOmain2);
}
}
| |
/* $Id: MatchMap.java 988245 2010-08-23 18:39:35Z kwright $ */
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.manifoldcf.crawler.connectors.sharepoint;
import org.apache.manifoldcf.core.interfaces.*;
import java.util.*;
import java.util.regex.*;
/** An instance of this class describes a "match map", which describes a translation of an input
* string using regexp technology.
* A match map consists of multiple clauses, which are fired in sequence. Each clause is a regexp
* search and replace, where the replace string can include references to the groups present in the
* search regexp.
* MatchMaps can be converted to strings in two different ways. The first way is to build a single
* string of the form "match1=replace1&match2=replace2...". Strings of this kind must escape & and =
* characters in the match and replace strings, where found. The second way is to generate an array
* of match strings and a corresponding array of replace strings. This method requires no escaping
* of the string contents.
*/
public class MatchMap
{
public static final String _rcsid = "@(#)$Id: MatchMap.java 988245 2010-08-23 18:39:35Z kwright $";
/** This is the set of match regexp strings */
protected ArrayList matchStrings;
/** This is the set of Pattern objects corresponding to the match regexp strings.
* It's null if the patterns have not been built yet. */
protected Pattern[] matchPatterns = null;
/** This is the set of replace strings */
protected ArrayList replaceStrings;
/** Constructor. Build an empty matchmap. */
public MatchMap()
{
matchStrings = new ArrayList();
replaceStrings = new ArrayList();
}
/** Constructor. Build a matchmap from a single string. */
public MatchMap(String stringForm)
{
matchStrings = new ArrayList();
replaceStrings = new ArrayList();
StringBuilder matchString = new StringBuilder();
StringBuilder replaceString = new StringBuilder();
int i = 0;
while (i < stringForm.length())
{
matchString.setLength(0);
replaceString.setLength(0);
while (i < stringForm.length())
{
char x = stringForm.charAt(i);
if (x == '&' || x == '=')
break;
i++;
if (x == '\\' && i < stringForm.length())
x = stringForm.charAt(i++);
matchString.append(x);
}
if (i < stringForm.length())
{
char x = stringForm.charAt(i);
if (x == '=')
{
i++;
// Pick up the second string
while (i < stringForm.length())
{
x = stringForm.charAt(i);
if (x == '&')
break;
i++;
if (x == '\\' && i < stringForm.length())
x = stringForm.charAt(i++);
replaceString.append(x);
}
}
}
matchStrings.add(matchString.toString());
replaceStrings.add(replaceString.toString());
if (i < stringForm.length())
{
char x = stringForm.charAt(i);
if (x == '&')
i++;
}
}
}
/** Constructor. Build a matchmap from two arraylists representing match and replace strings */
public MatchMap(ArrayList matchStrings, ArrayList replaceStrings)
{
this.matchStrings = (ArrayList)matchStrings.clone();
this.replaceStrings = (ArrayList)replaceStrings.clone();
}
/** Get the number of match/replace strings */
public int getMatchCount()
{
return matchStrings.size();
}
/** Get a specific match string */
public String getMatchString(int index)
{
return (String)matchStrings.get(index);
}
/** Get a specific replace string */
public String getReplaceString(int index)
{
return (String)replaceStrings.get(index);
}
/** Delete a specified match/replace string pair */
public void deleteMatchPair(int index)
{
matchStrings.remove(index);
replaceStrings.remove(index);
matchPatterns = null;
}
/** Insert a match/replace string pair */
public void insertMatchPair(int index, String match, String replace)
{
matchStrings.add(index,match);
replaceStrings.add(index,replace);
matchPatterns = null;
}
/** Append a match/replace string pair */
public void appendMatchPair(String match, String replace)
{
matchStrings.add(match);
replaceStrings.add(replace);
matchPatterns = null;
}
/** Append old-style match/replace pair.
* This method translates old-style regexp and group output form to the
* current style before adding to the map.
*/
public void appendOldstyleMatchPair(String oldstyleMatch, String oldstyleReplace)
{
String newStyleMatch = "^" + oldstyleMatch + "$";
// Need to build a new-style replace string from the old one. To do that, use the
// original parser (which basically will guarantee that we get it right)
EvaluatorTokenStream et = new EvaluatorTokenStream(oldstyleReplace);
StringBuilder newStyleReplace = new StringBuilder();
while (true)
{
EvaluatorToken t = et.peek();
if (t == null)
break;
switch (t.getType())
{
case EvaluatorToken.TYPE_COMMA:
et.advance();
break;
case EvaluatorToken.TYPE_GROUP:
et.advance();
int groupNumber = t.getGroupNumber();
switch (t.getGroupStyle())
{
case EvaluatorToken.GROUPSTYLE_NONE:
newStyleReplace.append("$(").append(Integer.toString(groupNumber)).append(")");
break;
case EvaluatorToken.GROUPSTYLE_LOWER:
newStyleReplace.append("$(").append(Integer.toString(groupNumber)).append("l)");
break;
case EvaluatorToken.GROUPSTYLE_UPPER:
newStyleReplace.append("$(").append(Integer.toString(groupNumber)).append("u)");
break;
case EvaluatorToken.GROUPSTYLE_MIXED:
newStyleReplace.append("$(").append(Integer.toString(groupNumber)).append("m)");
break;
default:
break;
}
break;
case EvaluatorToken.TYPE_TEXT:
et.advance();
escape(newStyleReplace,t.getTextValue());
break;
default:
break;
}
}
appendMatchPair(newStyleMatch,newStyleReplace.toString());
}
/** Escape a string so it is verbatim */
protected static void escape(StringBuilder output, String input)
{
int i = 0;
while (i < input.length())
{
char x = input.charAt(i++);
if (x == '$')
output.append(x);
output.append(x);
}
}
/** Convert the matchmap to string form. */
public String toString()
{
int i = 0;
StringBuilder rval = new StringBuilder();
while (i < matchStrings.size())
{
String matchString = (String)matchStrings.get(i);
String replaceString = (String)replaceStrings.get(i);
if (i > 0)
rval.append('&');
stuff(rval,matchString);
rval.append('=');
stuff(rval,replaceString);
i++;
}
return rval.toString();
}
/** Stuff characters */
protected static void stuff(StringBuilder sb, String value)
{
int i = 0;
while (i < value.length())
{
char x = value.charAt(i++);
if (x == '\\' || x == '&' || x == '=')
sb.append('\\');
sb.append(x);
}
}
/** Perform a translation.
*/
public String translate(String input)
throws ManifoldCFException
{
// Build pattern vector if not already there
if (matchPatterns == null)
{
matchPatterns = new Pattern[matchStrings.size()];
int i = 0;
while (i < matchPatterns.length)
{
String regexp = (String)matchStrings.get(i);
try
{
matchPatterns[i] = Pattern.compile(regexp);
}
catch (java.util.regex.PatternSyntaxException e)
{
matchPatterns = null;
throw new ManifoldCFException("For match expression '"+regexp+"', found pattern syntax error: "+e.getMessage(),e);
}
i++;
}
}
int j = 0;
while (j < matchPatterns.length)
{
Pattern p = matchPatterns[j];
// Construct a matcher
Matcher m = p.matcher(input);
// Grab the output description
String outputDescription = (String)replaceStrings.get(j);
j++;
// Create a copy buffer
StringBuilder outputBuffer = new StringBuilder();
// Keep track of the index in the original string we have done up to
int currentIndex = 0;
// Scan the string using find, and for each one found, do a translation
while (true)
{
boolean foundOne = m.find();
if (foundOne == false)
{
// No subsequent match found.
// Copy everything from currentIndex until the end of input
outputBuffer.append(input.substring(currentIndex));
break;
}
// Do a translation. This involves copying everything in the input
// string up until the start of the match, then doing a replace for
// the match itself, and finally setting the currentIndex to the end
// of the match.
int matchStart = m.start(0);
int matchEnd = m.end(0);
if (matchStart == -1)
{
// The expression was degenerate; treat this as the end.
outputBuffer.append(input.substring(currentIndex));
break;
}
outputBuffer.append(input.substring(currentIndex,matchStart));
// Process translation description!
int i = 0;
while (i < outputDescription.length())
{
char x = outputDescription.charAt(i++);
if (x == '$' && i < outputDescription.length())
{
x = outputDescription.charAt(i++);
if (x == '(')
{
// Process evaluation expression
StringBuilder numberBuf = new StringBuilder();
boolean upper = false;
boolean lower = false;
boolean mixed = false;
while (i < outputDescription.length())
{
char y = outputDescription.charAt(i++);
if (y == ')')
break;
else if (y >= '0' && y <= '9')
numberBuf.append(y);
else if (y == 'u' || y == 'U')
upper = true;
else if (y == 'l' || y == 'L')
lower = true;
else if (y == 'm' || y == 'M')
mixed = true;
}
String number = numberBuf.toString();
try
{
int groupnum = Integer.parseInt(number);
String groupValue = m.group(groupnum);
if (upper)
outputBuffer.append(groupValue.toUpperCase());
else if (lower)
outputBuffer.append(groupValue.toLowerCase());
else if (mixed && groupValue.length() > 0)
outputBuffer.append(groupValue.substring(0,1).toUpperCase()).append(groupValue.substring(1).toLowerCase());
else
outputBuffer.append(groupValue);
}
catch (NumberFormatException e)
{
// Silently skip, because it's an illegal group number, so nothing
// gets added.
}
// Go back around, so we don't add the $ in
continue;
}
}
outputBuffer.append(x);
}
currentIndex = matchEnd;
}
input = outputBuffer.toString();
}
return input;
}
// Protected classes
// These classes are used to process the old token-based replacement strings
/** Evaluator token.
*/
protected static class EvaluatorToken
{
public final static int TYPE_GROUP = 0;
public final static int TYPE_TEXT = 1;
public final static int TYPE_COMMA = 2;
public final static int GROUPSTYLE_NONE = 0;
public final static int GROUPSTYLE_LOWER = 1;
public final static int GROUPSTYLE_UPPER = 2;
public final static int GROUPSTYLE_MIXED = 3;
protected int type;
protected int groupNumber = -1;
protected int groupStyle = GROUPSTYLE_NONE;
protected String textValue = null;
public EvaluatorToken()
{
type = TYPE_COMMA;
}
public EvaluatorToken(int groupNumber, int groupStyle)
{
type = TYPE_GROUP;
this.groupNumber = groupNumber;
this.groupStyle = groupStyle;
}
public EvaluatorToken(String text)
{
type = TYPE_TEXT;
this.textValue = text;
}
public int getType()
{
return type;
}
public int getGroupNumber()
{
return groupNumber;
}
public int getGroupStyle()
{
return groupStyle;
}
public String getTextValue()
{
return textValue;
}
}
/** Token stream.
*/
protected static class EvaluatorTokenStream
{
protected String text;
protected int pos;
protected EvaluatorToken token = null;
/** Constructor.
*/
public EvaluatorTokenStream(String text)
{
this.text = text;
this.pos = 0;
}
/** Get current token.
*/
public EvaluatorToken peek()
{
if (token == null)
{
token = nextToken();
}
return token;
}
/** Go on to next token.
*/
public void advance()
{
token = null;
}
protected EvaluatorToken nextToken()
{
char x;
// Fetch the next token
while (true)
{
if (pos == text.length())
return null;
x = text.charAt(pos);
if (x > ' ')
break;
pos++;
}
StringBuilder sb;
if (x == '"')
{
// Parse text
pos++;
sb = new StringBuilder();
while (true)
{
if (pos == text.length())
break;
x = text.charAt(pos);
pos++;
if (x == '"')
{
break;
}
if (x == '\\')
{
if (pos == text.length())
break;
x = text.charAt(pos++);
}
sb.append(x);
}
return new EvaluatorToken(sb.toString());
}
if (x == ',')
{
pos++;
return new EvaluatorToken();
}
// Eat number at beginning
sb = new StringBuilder();
while (true)
{
if (pos == text.length())
break;
x = text.charAt(pos);
if (x >= '0' && x <= '9')
{
sb.append(x);
pos++;
continue;
}
break;
}
String numberValue = sb.toString();
int groupNumber = 0;
if (numberValue.length() > 0)
groupNumber = new Integer(numberValue).intValue();
// Save the next char position
int modifierPos = pos;
// Go to the end of the word
while (true)
{
if (pos == text.length())
break;
x = text.charAt(pos);
if (x == ',' || x >= '0' && x <= '9' || x <= ' ' && x >= 0)
break;
pos++;
}
int style = EvaluatorToken.GROUPSTYLE_NONE;
if (modifierPos != pos)
{
String modifier = text.substring(modifierPos,pos);
if (modifier.startsWith("u"))
style = EvaluatorToken.GROUPSTYLE_UPPER;
else if (modifier.startsWith("l"))
style = EvaluatorToken.GROUPSTYLE_LOWER;
else if (modifier.startsWith("m"))
style = EvaluatorToken.GROUPSTYLE_MIXED;
}
return new EvaluatorToken(groupNumber,style);
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* @author Igor V. Stolyarov
* @version $Revision$
*/
/*
* Created on 22.12.2004
*
*/
package org.apache.harmony.awt.gl.image;
import java.awt.Graphics;
import java.awt.Image;
import java.awt.image.BufferedImage;
import java.awt.image.ColorModel;
import java.awt.image.DataBuffer;
import java.awt.image.DataBufferByte;
import java.awt.image.DataBufferInt;
import java.awt.image.DirectColorModel;
import java.awt.image.ImageConsumer;
import java.awt.image.ImageObserver;
import java.awt.image.ImageProducer;
import java.awt.image.IndexColorModel;
import java.awt.image.WritableRaster;
import java.util.Hashtable;
import java.util.Vector;
import org.apache.harmony.awt.gl.ImageSurface;
import org.apache.harmony.awt.internal.nls.Messages;
/**
* This class represent implementation of abstact Image class
*/
public class OffscreenImage extends Image implements ImageConsumer {
static final ColorModel rgbCM = ColorModel.getRGBdefault();
ImageProducer src;
BufferedImage image;
ColorModel cm;
WritableRaster raster;
boolean isIntRGB;
Hashtable<?, ?> properties;
Vector<ImageObserver> observers;
int width;
int height;
int imageState;
int hints;
private boolean producing;
private ImageSurface imageSurf;
public OffscreenImage(ImageProducer ip){
imageState = 0;
src = ip;
width = -1;
height = -1;
observers = new Vector<ImageObserver>();
producing = false;
}
@Override
public Object getProperty(String name, ImageObserver observer) {
if(name == null) {
// awt.38=Property name is not defined
throw new NullPointerException(Messages.getString("awt.38")); //$NON-NLS-1$
}
if(properties == null){
addObserver(observer);
startProduction();
if(properties == null) {
return null;
}
}
Object prop = properties.get(name);
if(prop == null) {
prop = UndefinedProperty;
}
return prop;
}
@Override
public ImageProducer getSource() {
return src;
}
@Override
public int getWidth(ImageObserver observer) {
if((imageState & ImageObserver.WIDTH) == 0){
addObserver(observer);
startProduction();
if((imageState & ImageObserver.WIDTH) == 0) {
return -1;
}
}
return width;
}
@Override
public int getHeight(ImageObserver observer) {
if((imageState & ImageObserver.HEIGHT) == 0){
addObserver(observer);
startProduction();
if((imageState & ImageObserver.HEIGHT) == 0) {
return -1;
}
}
return height;
}
@Override
public Graphics getGraphics() {
// awt.39=This method is not implemented for image obtained from ImageProducer
throw new UnsupportedOperationException(Messages.getString("awt.39")); //$NON-NLS-1$
}
@Override
public void flush() {
stopProduction();
imageUpdate(this, ImageObserver.ABORT, -1, -1, -1, -1);
imageState &= ~ImageObserver.ERROR;
imageState = 0;
image = null;
cm = null;
raster = null;
hints = 0;
width = -1;
height = -1;
}
public void setProperties(Hashtable<?, ?> properties) {
this.properties = properties;
imageUpdate(this, ImageObserver.PROPERTIES, 0, 0, width, height);
}
public void setColorModel(ColorModel cm) {
this.cm = cm;
}
/*
* We suppose what in case loading JPEG image then image has DirectColorModel
* and for infill image Raster will use setPixels method with int array.
*
* In case loading GIF image, for raster infill, is used setPixels method with
* byte array and Color Model is IndexColorModel. But Color Model may
* be changed during this process. Then is called setPixels method with
* int array and image force to default color model - int ARGB. The rest
* pixels are sending in DirectColorModel.
*/
public void setPixels(int x, int y, int w, int h, ColorModel model,
int[] pixels, int off, int scansize) {
if(raster == null){
if(cm == null){
if(model == null) {
// awt.3A=Color Model is null
throw new NullPointerException(Messages.getString("awt.3A")); //$NON-NLS-1$
}
cm = model;
}
createRaster();
}
if(model == null) {
model = cm;
}
if(cm != model){
forceToIntARGB();
}
if(cm == model && model.getTransferType() == DataBuffer.TYPE_INT &&
raster.getNumDataElements() == 1){
DataBufferInt dbi = (DataBufferInt) raster.getDataBuffer();
int data[] = dbi.getData();
int scanline = raster.getWidth();
int rof = dbi.getOffset() + y * scanline + x;
for(int lineOff = off, line = y; line < y + h;
line++, lineOff += scansize, rof += scanline){
System.arraycopy(pixels, lineOff, data, rof, w);
}
}else if(isIntRGB){
int buff[] = new int[w];
DataBufferInt dbi = (DataBufferInt) raster.getDataBuffer();
int data[] = dbi.getData();
int scanline = raster.getWidth();
int rof = dbi.getOffset() + y * scanline + x;
for (int sy = y, sOff = off; sy < y + h; sy++, sOff += scansize,
rof += scanline) {
for (int sx = x, idx = 0; sx < x + w; sx++, idx++) {
buff[idx] = model.getRGB(pixels[sOff + idx]);
}
System.arraycopy(buff, 0, data, rof, w);
}
}else{
Object buf = null;
for (int sy = y, sOff = off; sy < y + h; sy++, sOff += scansize) {
for (int sx = x, idx = 0; sx < x + w; sx++, idx++) {
int rgb = model.getRGB(pixels[sOff + idx]);
buf = cm.getDataElements(rgb, buf);
raster.setDataElements(sx, sy, buf);
}
}
}
if (imageSurf != null) {
imageSurf.invalidate();
}
imageUpdate(this, ImageObserver.SOMEBITS, 0, 0, width, height);
}
public void setPixels(int x, int y, int w, int h, ColorModel model,
byte[] pixels, int off, int scansize) {
if(raster == null){
if(cm == null){
if(model == null) {
// awt.3A=Color Model is null
throw new NullPointerException(Messages.getString("awt.3A")); //$NON-NLS-1$
}
cm = model;
}
createRaster();
}
if(model == null) {
model = cm;
}
if(model != cm){
forceToIntARGB();
}
if(isIntRGB){
int buff[] = new int[w];
IndexColorModel icm = (IndexColorModel) model;
int colorMap[] = new int[icm.getMapSize()];
icm.getRGBs(colorMap);
DataBufferInt dbi = (DataBufferInt) raster.getDataBuffer();
int data[] = dbi.getData();
int scanline = raster.getWidth();
int rof = dbi.getOffset() + y * scanline + x;
if(model instanceof IndexColorModel){
for (int sy = y, sOff = off; sy < y + h; sy++, sOff += scansize,
rof += scanline) {
for (int sx = x, idx = 0; sx < x + w; sx++, idx++) {
buff[idx] = colorMap[pixels[sOff + idx] & 0xff];
}
System.arraycopy(buff, 0, data, rof, w);
}
}else{
for (int sy = y, sOff = off; sy < y + h; sy++, sOff += scansize,
rof += scanline) {
for (int sx = x, idx = 0; sx < x + w; sx++, idx++) {
buff[idx] = model.getRGB(pixels[sOff + idx] & 0xff);
}
System.arraycopy(buff, 0, data, rof, w);
}
}
}else if(model == cm && model.getTransferType() == DataBuffer.TYPE_BYTE &&
raster.getNumDataElements() == 1){
DataBufferByte dbb = (DataBufferByte)raster.getDataBuffer();
byte data[] = dbb.getData();
int scanline = raster.getWidth();
int rof = dbb.getOffset() + y * scanline + x;
for(int lineOff = off, line = y; line < y + h;
line++, lineOff += scansize, rof += scanline){
System.arraycopy(pixels, lineOff, data, rof, w);
}
}else{
for (int sy = y, sOff = off; sy < y + h; sy++, sOff += scansize) {
for (int sx = x, idx = 0; sx < x + w; sx++, idx++) {
int rgb = model.getRGB(pixels[sOff + idx] & 0xff);
raster.setDataElements(sx, sy, cm.getDataElements(rgb, null));
}
}
}
if (imageSurf != null) {
imageSurf.invalidate();
}
imageUpdate(this, ImageObserver.SOMEBITS, 0, 0, width, height);
}
public void setDimensions(int width, int height) {
if(width <= 0 || height <= 0){
imageComplete(ImageObserver.ERROR);
return;
}
this.width = width;
this.height = height;
imageUpdate(this, (ImageObserver.HEIGHT | ImageObserver.WIDTH),
0, 0, width, height);
}
public void setHints(int hints) {
this.hints = hints;
}
public void imageComplete(int state) {
int flag;
switch(state){
case IMAGEABORTED:
flag = ImageObserver.ABORT;
break;
case IMAGEERROR:
flag = ImageObserver.ERROR | ImageObserver.ABORT;
break;
case SINGLEFRAMEDONE:
flag = ImageObserver.FRAMEBITS;
break;
case STATICIMAGEDONE:
flag = ImageObserver.ALLBITS;
break;
default:
// awt.3B=Incorrect ImageConsumer completion status
throw new IllegalArgumentException(Messages.getString("awt.3B")); //$NON-NLS-1$
}
imageUpdate(this, flag, 0, 0, width, height);
if((flag & (ImageObserver.ERROR | ImageObserver.ABORT |
ImageObserver.ALLBITS)) != 0 ) {
stopProduction();
observers.removeAllElements();
}
}
public /*synchronized*/ BufferedImage getBufferedImage(){
if(image == null){
ColorModel model = getColorModel();
WritableRaster wr = getRaster();
if(model != null && wr != null) {
image = new BufferedImage(model, wr, model.isAlphaPremultiplied(), null);
}
}
return image;
}
public /*synchronized*/ int checkImage(ImageObserver observer){
addObserver(observer);
return imageState;
}
public /*synchronized*/ boolean prepareImage(ImageObserver observer){
if((imageState & ImageObserver.ERROR) != 0){
if(observer != null){
observer.imageUpdate(this, ImageObserver.ERROR |
ImageObserver.ABORT, -1, -1, -1, -1);
}
return false;
}
if((imageState & ImageObserver.ALLBITS) != 0) {
return true;
}
addObserver(observer);
startProduction();
return ((imageState & ImageObserver.ALLBITS) != 0);
}
public /*synchronized*/ ColorModel getColorModel(){
if(cm == null) {
startProduction();
}
return cm;
}
public /*synchronized*/ WritableRaster getRaster(){
if(raster == null) {
startProduction();
}
return raster;
}
public int getState(){
return imageState;
}
private /*synchronized*/ void addObserver(ImageObserver observer){
if(observer != null){
if(observers.contains(observer)) {
return;
}
if((imageState & ImageObserver.ERROR) != 0){
observer.imageUpdate(this, ImageObserver.ERROR |
ImageObserver.ABORT, -1, -1, -1, -1);
return;
}
if((imageState & ImageObserver.ALLBITS) != 0){
observer.imageUpdate(this, imageState, 0, 0, width, height);
return;
}
observers.addElement(observer);
}
}
private synchronized void startProduction(){
if(!producing){
imageState &= ~ImageObserver.ABORT;
producing = true;
src.startProduction(this);
}
}
private synchronized void stopProduction(){
producing = false;
src.removeConsumer(this);
}
private void createRaster(){
try{
raster = cm.createCompatibleWritableRaster(width, height);
isIntRGB = false;
if(cm instanceof DirectColorModel){
DirectColorModel dcm = (DirectColorModel) cm;
if(dcm.getTransferType() == DataBuffer.TYPE_INT &&
dcm.getRedMask() == 0xff0000 &&
dcm.getGreenMask() == 0xff00 &&
dcm.getBlueMask() == 0xff){
isIntRGB = true;
}
}
}catch(Exception e){
cm = ColorModel.getRGBdefault();
raster = cm.createCompatibleWritableRaster(width, height);
isIntRGB = true;
}
}
private /*synchronized*/ void imageUpdate(Image img, int infoflags, int x, int y,
int width, int height){
imageState |= infoflags;
for (ImageObserver observer : observers) {
observer.imageUpdate(this, infoflags, x, y, width, height);
}
// notifyAll();
}
private void forceToIntARGB(){
int w = raster.getWidth();
int h = raster.getHeight();
WritableRaster destRaster = rgbCM.createCompatibleWritableRaster(w, h);
Object obj = null;
int pixels[] = new int[w];
if(cm instanceof IndexColorModel){
IndexColorModel icm = (IndexColorModel) cm;
int colorMap[] = new int[icm.getMapSize()];
icm.getRGBs(colorMap);
for (int y = 0; y < h; y++) {
obj = raster.getDataElements(0, y, w, 1, obj);
byte ba[] = (byte[]) obj;
for (int x = 0; x < ba.length; x++) {
pixels[x] = colorMap[ba[x] & 0xff];
}
destRaster.setDataElements(0, y, w, 1, pixels);
}
}else{
for(int y = 0; y < h; y++){
for(int x = 0; x < w; x++){
obj = raster.getDataElements(x, y, obj);
pixels[x] = cm.getRGB(obj);
}
destRaster.setDataElements(0, y, w, 1, pixels);
}
}
synchronized(this){
if(imageSurf != null){
imageSurf.dispose();
imageSurf = null;
}
if(image != null){
image.flush();
image = null;
}
cm = rgbCM;
raster = destRaster;
isIntRGB = true;
}
}
public ImageSurface getImageSurface() {
if (imageSurf == null) {
ColorModel model = getColorModel();
WritableRaster wr = getRaster();
if(model != null && wr != null) {
imageSurf = new ImageSurface(model, wr);
}
}
return imageSurf;
}
}
| |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.action.search;
import org.apache.lucene.index.Term;
import org.apache.lucene.search.ScoreDoc;
import org.apache.lucene.search.TermStatistics;
import org.apache.lucene.search.TopDocs;
import org.apache.lucene.store.MockDirectoryWrapper;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.util.BigArrays;
import org.elasticsearch.common.util.concurrent.AtomicArray;
import org.elasticsearch.index.Index;
import org.elasticsearch.search.DocValueFormat;
import org.elasticsearch.search.SearchPhaseResult;
import org.elasticsearch.search.SearchShardTarget;
import org.elasticsearch.search.dfs.DfsSearchResult;
import org.elasticsearch.search.query.QuerySearchRequest;
import org.elasticsearch.search.query.QuerySearchResult;
import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.transport.Transport;
import java.io.IOException;
import java.io.UncheckedIOException;
import java.util.concurrent.atomic.AtomicReference;
public class DfsQueryPhaseTests extends ESTestCase {
private static DfsSearchResult newSearchResult(int shardIndex, long requestId, SearchShardTarget target) {
DfsSearchResult result = new DfsSearchResult(requestId, target);
result.setShardIndex(shardIndex);
return result;
}
public void testDfsWith2Shards() throws IOException {
AtomicArray<DfsSearchResult> results = new AtomicArray<>(2);
AtomicReference<AtomicArray<SearchPhaseResult>> responseRef = new AtomicReference<>();
results.set(0, newSearchResult(0, 1, new SearchShardTarget("node1", new Index("test", "na"), 0)));
results.set(1, newSearchResult(1, 2, new SearchShardTarget("node2", new Index("test", "na"), 0)));
results.get(0).termsStatistics(new Term[0], new TermStatistics[0]);
results.get(1).termsStatistics(new Term[0], new TermStatistics[0]);
SearchPhaseController controller = new SearchPhaseController(Settings.EMPTY, BigArrays.NON_RECYCLING_INSTANCE, null);
SearchTransportService searchTransportService = new SearchTransportService(
Settings.builder().put("search.remote.connect", false).build(), null) {
@Override
public void sendExecuteQuery(Transport.Connection connection, QuerySearchRequest request, SearchTask task,
SearchActionListener<QuerySearchResult> listener) {
if (request.id() == 1) {
QuerySearchResult queryResult = new QuerySearchResult(123, new SearchShardTarget("node1", new Index("test", "na"), 0));
queryResult.topDocs(new TopDocs(1, new ScoreDoc[] {new ScoreDoc(42, 1.0F)}, 2.0F), new DocValueFormat[0]);
queryResult.size(2); // the size of the result set
listener.onResponse(queryResult);
} else if (request.id() == 2) {
QuerySearchResult queryResult = new QuerySearchResult(123, new SearchShardTarget("node2", new Index("test", "na"), 0));
queryResult.topDocs(new TopDocs(1, new ScoreDoc[] {new ScoreDoc(84, 2.0F)}, 2.0F), new DocValueFormat[0]);
queryResult.size(2); // the size of the result set
listener.onResponse(queryResult);
} else {
fail("no such request ID: " + request.id());
}
}
};
MockSearchPhaseContext mockSearchPhaseContext = new MockSearchPhaseContext(2);
mockSearchPhaseContext.searchTransport = searchTransportService;
DfsQueryPhase phase = new DfsQueryPhase(results, controller,
(response) -> new SearchPhase("test") {
@Override
public void run() throws IOException {
responseRef.set(response.results);
}
}, mockSearchPhaseContext);
assertEquals("dfs_query", phase.getName());
phase.run();
mockSearchPhaseContext.assertNoFailure();
assertNotNull(responseRef.get());
assertNotNull(responseRef.get().get(0));
assertNull(responseRef.get().get(0).fetchResult());
assertEquals(1, responseRef.get().get(0).queryResult().topDocs().totalHits);
assertEquals(42, responseRef.get().get(0).queryResult().topDocs().scoreDocs[0].doc);
assertNotNull(responseRef.get().get(1));
assertNull(responseRef.get().get(1).fetchResult());
assertEquals(1, responseRef.get().get(1).queryResult().topDocs().totalHits);
assertEquals(84, responseRef.get().get(1).queryResult().topDocs().scoreDocs[0].doc);
assertTrue(mockSearchPhaseContext.releasedSearchContexts.isEmpty());
assertEquals(2, mockSearchPhaseContext.numSuccess.get());
}
public void testDfsWith1ShardFailed() throws IOException {
AtomicArray<DfsSearchResult> results = new AtomicArray<>(2);
AtomicReference<AtomicArray<SearchPhaseResult>> responseRef = new AtomicReference<>();
results.set(0, newSearchResult(0, 1, new SearchShardTarget("node1", new Index("test", "na"), 0)));
results.set(1, newSearchResult(1, 2, new SearchShardTarget("node2", new Index("test", "na"), 0)));
results.get(0).termsStatistics(new Term[0], new TermStatistics[0]);
results.get(1).termsStatistics(new Term[0], new TermStatistics[0]);
SearchPhaseController controller = new SearchPhaseController(Settings.EMPTY, BigArrays.NON_RECYCLING_INSTANCE, null);
SearchTransportService searchTransportService = new SearchTransportService(
Settings.builder().put("search.remote.connect", false).build(), null) {
@Override
public void sendExecuteQuery(Transport.Connection connection, QuerySearchRequest request, SearchTask task,
SearchActionListener<QuerySearchResult> listener) {
if (request.id() == 1) {
QuerySearchResult queryResult = new QuerySearchResult(123, new SearchShardTarget("node1", new Index("test", "na"), 0));
queryResult.topDocs(new TopDocs(1, new ScoreDoc[] {new ScoreDoc(42, 1.0F)}, 2.0F), new DocValueFormat[0]);
queryResult.size(2); // the size of the result set
listener.onResponse(queryResult);
} else if (request.id() == 2) {
listener.onFailure(new MockDirectoryWrapper.FakeIOException());
} else {
fail("no such request ID: " + request.id());
}
}
};
MockSearchPhaseContext mockSearchPhaseContext = new MockSearchPhaseContext(2);
mockSearchPhaseContext.searchTransport = searchTransportService;
DfsQueryPhase phase = new DfsQueryPhase(results, controller,
(response) -> new SearchPhase("test") {
@Override
public void run() throws IOException {
responseRef.set(response.results);
}
}, mockSearchPhaseContext);
assertEquals("dfs_query", phase.getName());
phase.run();
mockSearchPhaseContext.assertNoFailure();
assertNotNull(responseRef.get());
assertNotNull(responseRef.get().get(0));
assertNull(responseRef.get().get(0).fetchResult());
assertEquals(1, responseRef.get().get(0).queryResult().topDocs().totalHits);
assertEquals(42, responseRef.get().get(0).queryResult().topDocs().scoreDocs[0].doc);
assertNull(responseRef.get().get(1));
assertEquals(1, mockSearchPhaseContext.numSuccess.get());
assertEquals(1, mockSearchPhaseContext.failures.size());
assertTrue(mockSearchPhaseContext.failures.get(0).getCause() instanceof MockDirectoryWrapper.FakeIOException);
assertEquals(1, mockSearchPhaseContext.releasedSearchContexts.size());
assertTrue(mockSearchPhaseContext.releasedSearchContexts.contains(2L));
assertNull(responseRef.get().get(1));
}
public void testFailPhaseOnException() throws IOException {
AtomicArray<DfsSearchResult> results = new AtomicArray<>(2);
AtomicReference<AtomicArray<SearchPhaseResult>> responseRef = new AtomicReference<>();
results.set(0, newSearchResult(0, 1, new SearchShardTarget("node1", new Index("test", "na"), 0)));
results.set(1, newSearchResult(1, 2, new SearchShardTarget("node2", new Index("test", "na"), 0)));
results.get(0).termsStatistics(new Term[0], new TermStatistics[0]);
results.get(1).termsStatistics(new Term[0], new TermStatistics[0]);
SearchPhaseController controller = new SearchPhaseController(Settings.EMPTY, BigArrays.NON_RECYCLING_INSTANCE, null);
SearchTransportService searchTransportService = new SearchTransportService(
Settings.builder().put("search.remote.connect", false).build(), null) {
@Override
public void sendExecuteQuery(Transport.Connection connection, QuerySearchRequest request, SearchTask task,
SearchActionListener<QuerySearchResult> listener) {
if (request.id() == 1) {
QuerySearchResult queryResult = new QuerySearchResult(123, new SearchShardTarget("node1", new Index("test", "na"), 0));
queryResult.topDocs(new TopDocs(1, new ScoreDoc[] {new ScoreDoc(42, 1.0F)}, 2.0F), new DocValueFormat[0]);
queryResult.size(2); // the size of the result set
listener.onResponse(queryResult);
} else if (request.id() == 2) {
throw new UncheckedIOException(new MockDirectoryWrapper.FakeIOException());
} else {
fail("no such request ID: " + request.id());
}
}
};
MockSearchPhaseContext mockSearchPhaseContext = new MockSearchPhaseContext(2);
mockSearchPhaseContext.searchTransport = searchTransportService;
DfsQueryPhase phase = new DfsQueryPhase(results, controller,
(response) -> new SearchPhase("test") {
@Override
public void run() throws IOException {
responseRef.set(response.results);
}
}, mockSearchPhaseContext);
assertEquals("dfs_query", phase.getName());
expectThrows(UncheckedIOException.class, () -> phase.run());
assertTrue(mockSearchPhaseContext.releasedSearchContexts.isEmpty()); // phase execution will clean up on the contexts
}
}
| |
package ru.testing.client.websocket;
import org.apache.log4j.Logger;
import org.glassfish.tyrus.client.ClientManager;
import org.glassfish.tyrus.client.ClientProperties;
import org.glassfish.tyrus.client.SslContextConfigurator;
import org.glassfish.tyrus.client.SslEngineConfigurator;
import org.glassfish.tyrus.ext.extension.deflate.PerMessageDeflateExtension;
import ru.testing.client.common.objects.Header;
import ru.testing.client.elements.Dialogs;
import javax.websocket.*;
import java.io.IOException;
import java.net.URI;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import static java.util.Collections.singletonList;
/**
* WebSocket client
*/
public class WsClient extends Endpoint {
private static final Logger LOGGER = Logger.getLogger(WsClient.class);
private final ClientManager client;
private final ClientEndpointConfig config;
private SslEngineConfigurator sslEngineConfigurator = new SslEngineConfigurator(new SslContextConfigurator());
private List<Extension> extensions = new ArrayList<>();
private URI endpointURI;
private List<Header> headerList;
private Session session;
private boolean sslValidate;
/**
* Default client constructor
*/
public WsClient() {
// SSL configuration
sslEngineConfigurator.setHostnameVerifier((host, sslSession) -> true);
// Create ws client
client = ClientManager.createClient();
// Create client configuration
config = ClientEndpointConfig.Builder.create()
.decoders(singletonList(SimpleDecoder.class))
.encoders(singletonList(SimpleEncoder.class))
.configurator(new ClientEndpointConfig.Configurator() {
@Override
public void beforeRequest(Map<String, List<String>> headers) {
try {
if (headerList != null && headerList.size() > 0) {
for (Header header : headerList) {
String headerName = header.getName();
if (headers.containsKey(headerName)) {
String value = headers.get(headerName).get(0)
.concat(String.format(";%s", header.getValue()));
headers.put(headerName, singletonList(value));
} else {
headers.put(headerName, singletonList(header.getValue()));
}
}
}
} catch (Exception e) {
LOGGER.error("Error add headers: {}", e);
}
// Logging request headers
for (Map.Entry<String, List<String>> entry : headers.entrySet()) {
LOGGER.debug(String.format("<- %s", entry));
}
}
@Override
public void afterResponse(HandshakeResponse hr) {
// Logging response headers
Map<String, List<String>> headers = hr.getHeaders();
for (Map.Entry<String, List<String>> entry : headers.entrySet()) {
LOGGER.debug(String.format("-> %s", entry));
}
}
})
.extensions(extensions)
.build();
}
/**
* Set endpoint url
*
* @param endpointURI URI
*/
public void setEndpointURI(URI endpointURI) {
this.endpointURI = endpointURI;
}
/**
* Set request header
*
* @param headers List<Header>
*/
public void setHeaders(List<Header> headers) {
this.headerList = headers;
}
/**
* Set ssl validate for connection
*
* @param sslValidate boolean
*/
public void setSslValidate(boolean sslValidate) {
this.sslValidate = sslValidate;
}
/**
* Set compression websocket extension
*
* @param withCompression boolean
*/
public void setWithCompression(boolean withCompression) {
if (withCompression) {
extensions.add(new PerMessageDeflateExtension());
}
}
/**
* Open websocket connection
*
* @throws Exception connect to server
*/
public void openConnection() throws Exception {
if (session != null && session.isOpen()) {
LOGGER.warn("Profile already connected!");
} else {
LOGGER.info(String.format("Connecting to %s ...", endpointURI.getHost()));
if (endpointURI.getScheme().equals("wss") && !sslValidate) {
client.getProperties().put(ClientProperties.SSL_ENGINE_CONFIGURATOR, sslEngineConfigurator);
}
session = client.connectToServer(this, config, endpointURI);
}
}
@Override
public void onOpen(Session session, EndpointConfig config) {
if (session.isOpen()) {
LOGGER.info(String.format("Connection open with server: %s", session.getRequestURI()));
this.session = session;
}
}
@OnClose
public void onClose(final Session session, final CloseReason reason) {
if (!session.isOpen()) {
LOGGER.info(String.format("Connection closed: %s", reason));
this.session = null;
}
}
/**
* Set message handler for output response message
*
* @param messageHandler MessageHandler.Whole<String>
*/
public void setMessageHandler(MessageHandler.Whole<String> messageHandler) {
if (session != null) {
session.addMessageHandler(messageHandler);
}
}
/**
* Send string message to websocket session
*
* @param message String
*/
public void sendMessage(String message) {
try {
if (session != null) {
session.getBasicRemote().sendText(message);
}
} catch (IOException e) {
new Dialogs().getExceptionDialog(e);
}
}
/**
* Get connection status
*
* @return boolean
*/
public boolean isOpenConnection() {
return session != null && session.isOpen();
}
/**
* Method close current connection
*/
public void closeConnection() {
LOGGER.debug(String.format("Close ws connection to endpoint uri: %s", this.endpointURI));
if (session != null) {
try {
session.close();
} catch (IOException e) {
LOGGER.error("Close connection error: {}", e.getCause());
}
}
}
}
| |
package org.opentosca.container.api.service;
import java.util.Collection;
import java.util.Map;
import java.util.Optional;
import java.util.Set;
import java.util.stream.Collectors;
import javax.inject.Inject;
import javax.ws.rs.NotFoundException;
import javax.xml.namespace.QName;
import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.DocumentBuilderFactory;
import javax.xml.parsers.ParserConfigurationException;
import org.eclipse.winery.model.tosca.TNodeTemplate;
import org.eclipse.winery.model.tosca.TRelationshipTemplate;
import org.eclipse.winery.model.tosca.TServiceTemplate;
import org.opentosca.container.api.dto.NodeTemplateDTO;
import org.opentosca.container.api.dto.RelationshipTemplateDTO;
import org.opentosca.container.api.dto.request.CreateRelationshipTemplateInstanceRequest;
import org.opentosca.container.core.common.jpa.DocumentConverter;
import org.opentosca.container.core.engine.ToscaEngine;
import org.opentosca.container.core.model.csar.Csar;
import org.opentosca.container.core.model.csar.CsarId;
import org.opentosca.container.core.next.model.NodeTemplateInstance;
import org.opentosca.container.core.next.model.NodeTemplateInstanceProperty;
import org.opentosca.container.core.next.model.NodeTemplateInstanceState;
import org.opentosca.container.core.next.model.PlanInstance;
import org.opentosca.container.core.next.model.PlanType;
import org.opentosca.container.core.next.model.Property;
import org.opentosca.container.core.next.model.RelationshipTemplateInstance;
import org.opentosca.container.core.next.model.RelationshipTemplateInstanceProperty;
import org.opentosca.container.core.next.model.RelationshipTemplateInstanceState;
import org.opentosca.container.core.next.model.ServiceTemplateInstance;
import org.opentosca.container.core.next.model.ServiceTemplateInstanceProperty;
import org.opentosca.container.core.next.model.ServiceTemplateInstanceState;
import org.opentosca.container.core.next.model.Situation;
import org.opentosca.container.core.next.model.SituationTrigger;
import org.opentosca.container.core.next.model.SituationTriggerInstance;
import org.opentosca.container.core.next.model.SituationTriggerProperty;
import org.opentosca.container.core.next.model.SituationsMonitor;
import org.opentosca.container.core.next.repository.NodeTemplateInstanceRepository;
import org.opentosca.container.core.next.repository.PlanInstanceRepository;
import org.opentosca.container.core.next.repository.RelationshipTemplateInstanceRepository;
import org.opentosca.container.core.next.repository.ServiceTemplateInstanceRepository;
import org.opentosca.container.core.next.repository.SituationRepository;
import org.opentosca.container.core.next.repository.SituationTriggerInstanceRepository;
import org.opentosca.container.core.next.repository.SituationTriggerRepository;
import org.opentosca.container.core.next.repository.SituationsMonitorRepository;
import org.opentosca.container.core.service.CsarStorageService;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.stereotype.Service;
import org.w3c.dom.Document;
import org.w3c.dom.Element;
/**
* Allows access to instance information for service templates and node templates.
*/
@Service
public class InstanceService {
private static final Logger logger = LoggerFactory.getLogger(InstanceService.class);
private final ServiceTemplateInstanceRepository serviceTemplateInstanceRepository =
new ServiceTemplateInstanceRepository();
private final NodeTemplateInstanceRepository nodeTemplateInstanceRepository = new NodeTemplateInstanceRepository();
private final RelationshipTemplateInstanceRepository relationshipTemplateInstanceRepository =
new RelationshipTemplateInstanceRepository();
// situations
private final SituationRepository sitRepo = new SituationRepository();
private final SituationTriggerRepository sitTrig = new SituationTriggerRepository();
private final SituationTriggerInstanceRepository sitTrigInst = new SituationTriggerInstanceRepository();
private final SituationsMonitorRepository situationsMonitorRepo = new SituationsMonitorRepository();
private final RelationshipTemplateService relationshipTemplateService;
private final NodeTemplateService nodeTemplateService;
private final ServiceTemplateService serviceTemplateService;
private final CsarStorageService storage;
private final DocumentConverter converter = new DocumentConverter();
@Inject
public InstanceService(RelationshipTemplateService relationshipTemplateService, NodeTemplateService nodeTemplateService, ServiceTemplateService serviceTemplateService, CsarStorageService storage) {
this.relationshipTemplateService = relationshipTemplateService;
this.nodeTemplateService = nodeTemplateService;
this.serviceTemplateService = serviceTemplateService;
this.storage = storage;
}
public Document convertPropertyToDocument(final Property property) {
return this.converter.convertToEntityAttribute(property.getValue());
}
/**
* Converts an xml document to an xml-based property sui/table for service or node template instances
*/
public <T extends Property> T convertDocumentToProperty(final Document propertyDoc,
final Class<T> type) throws InstantiationException,
IllegalAccessException,
IllegalArgumentException {
if (propertyDoc == null) {
final String msg =
String.format("The set of parameters of an instance of type %s cannot be null", type.getName());
logger.error(msg);
throw new IllegalArgumentException(msg);
}
final String propertyAsString = this.converter.convertToDatabaseColumn(propertyDoc);
final T property = type.newInstance();
property.setName("xml");
property.setType("xml");
property.setValue(propertyAsString);
return property;
}
/* Service Template Instances */
public Collection<ServiceTemplateInstance> getServiceTemplateInstances(final String serviceTemplate) {
logger.debug("Requesting instances of ServiceTemplate \"{}\"...", serviceTemplate);
return this.serviceTemplateInstanceRepository.findByTemplateId(serviceTemplate);
}
public ServiceTemplateInstance getServiceTemplateInstance(final Long id, final boolean evaluatePropertyMappings) {
logger.debug("Requesting service template instance <{}>...", id);
final Optional<ServiceTemplateInstance> instance = this.serviceTemplateInstanceRepository.find(id);
if (instance.isPresent()) {
final ServiceTemplateInstance result = instance.get();
if (evaluatePropertyMappings) {
final PropertyMappingsHelper helper = new PropertyMappingsHelper(this, storage);
helper.evaluatePropertyMappings(result);
}
return result;
}
logger.debug("Service Template Instance <" + id + "> not found.");
throw new NotFoundException("Service Template Instance <" + id + "> not found.");
}
public ServiceTemplateInstanceState getServiceTemplateInstanceState(final Long id) {
final ServiceTemplateInstance service = getServiceTemplateInstance(id, false);
return service.getState();
}
public void setServiceTemplateInstanceState(final Long id, final String state) throws NotFoundException,
IllegalArgumentException {
ServiceTemplateInstanceState newState;
try {
newState = ServiceTemplateInstanceState.valueOf(state);
} catch (final Exception e) {
final String msg =
String.format("The given state %s is an illegal service template instance state.", state);
logger.error(msg, e);
throw new IllegalArgumentException(msg, e);
}
final ServiceTemplateInstance service = getServiceTemplateInstance(id, false);
service.setState(newState);
this.serviceTemplateInstanceRepository.update(service);
}
public void setServiceTemplateInstanceProperties(final Long id,
final Document properties) throws ReflectiveOperationException {
final ServiceTemplateInstance service = getServiceTemplateInstance(id, false);
try {
final ServiceTemplateInstanceProperty property =
this.convertDocumentToProperty(properties, ServiceTemplateInstanceProperty.class);
service.addProperty(property);
this.serviceTemplateInstanceRepository.update(service);
} catch (InstantiationException | IllegalAccessException e) { // This is not supposed to happen at all!
final String msg = String.format("An error occurred while instantiating an instance of the %s class.",
ServiceTemplateInstanceProperty.class);
logger.error(msg, e);
throw e;
}
}
public void deleteServiceTemplateInstance(final Long instanceId) {
// throws exception if not found
final ServiceTemplateInstance instance = getServiceTemplateInstance(instanceId, false);
this.serviceTemplateInstanceRepository.remove(instance);
}
public ServiceTemplateInstance createServiceTemplateInstance(final String csarId, final String serviceTemplateName) throws InstantiationException, IllegalAccessException, IllegalArgumentException {
final CsarId csar = this.serviceTemplateService.checkServiceTemplateExistence(csarId, serviceTemplateName);
final Document propertiesAsDoc =
createServiceInstanceInitialPropertiesFromServiceTemplate(csar, serviceTemplateName);
final ServiceTemplateInstanceProperty property =
convertDocumentToProperty(propertiesAsDoc, ServiceTemplateInstanceProperty.class);
final ServiceTemplateInstance instance = new ServiceTemplateInstance();
instance.setCsarId(csar);
instance.setTemplateId(serviceTemplateName);
instance.setState(ServiceTemplateInstanceState.INITIAL);
instance.addProperty(property);
this.serviceTemplateInstanceRepository.add(instance);
return instance;
}
public ServiceTemplateInstance createServiceTemplateInstance(final String csarId, final String serviceTemplateName,
final String correlationId) throws NotFoundException,
InstantiationException,
IllegalAccessException,
IllegalArgumentException {
final CsarId csar = this.serviceTemplateService.checkServiceTemplateExistence(csarId, serviceTemplateName);
final PlanInstanceRepository repository = new PlanInstanceRepository();
PlanInstance pi = null;
int retries = 0;
int maxRetries = 20;
while (pi == null && (retries++ < maxRetries)) {
pi = repository.findByCorrelationId(correlationId);
}
// if no instance was found it is possible that live-modeling was started, just create an empty instance
if (pi == null) {
return this.createServiceTemplateInstance(csarId, serviceTemplateName);
}
// If the found plan is a build plan there shouldn't be a service template instance available,
// if it is a transformation plan the service instance mustn't be of the service template the new service instance should belong to
if ((pi.getType().equals(PlanType.BUILD) && pi.getServiceTemplateInstance() == null)
|| (pi.getType().equals(PlanType.TRANSFORMATION) && !pi.getServiceTemplateInstance().getTemplateId().equals(serviceTemplateName))) {
return this.createServiceTemplateInstance(csar, serviceTemplateName, pi);
} else {
final String msg = "The build plan instance is already associted with a service template instance!";
logger.error(msg);
throw new IllegalArgumentException(msg);
}
}
private ServiceTemplateInstance createServiceTemplateInstance(final CsarId csarId, final String serviceTemplateName,
final PlanInstance buildPlanInstance) throws InstantiationException,
IllegalAccessException,
IllegalArgumentException {
final Document propertiesAsDoc =
createServiceInstanceInitialPropertiesFromServiceTemplate(csarId, serviceTemplateName);
final ServiceTemplateInstanceProperty property =
convertDocumentToProperty(propertiesAsDoc, ServiceTemplateInstanceProperty.class);
final ServiceTemplateInstance instance = new ServiceTemplateInstance();
instance.setCsarId(csarId);
instance.setTemplateId(serviceTemplateName);
instance.setState(ServiceTemplateInstanceState.INITIAL);
instance.addProperty(property);
instance.addPlanInstance(buildPlanInstance);
instance.setCreationCorrelationId(buildPlanInstance.getCorrelationId());
this.serviceTemplateInstanceRepository.add(instance);
if (buildPlanInstance.getServiceTemplateInstance() == null) {
buildPlanInstance.setServiceTemplateInstance(instance);
}
new PlanInstanceRepository().update(buildPlanInstance);
return instance;
}
private Document createServiceInstanceInitialPropertiesFromServiceTemplate(final CsarId csarId,
final String serviceTemplateId) {
final Document existingProperties =
this.serviceTemplateService.getPropertiesOfServiceTemplate(csarId, serviceTemplateId);
if (existingProperties != null) {
return existingProperties;
}
logger.debug("No Properties found in BoundaryDefinitions for ST {} thus creating blank ones",
serviceTemplateId);
final DocumentBuilderFactory dbf = DocumentBuilderFactory.newInstance();
dbf.setNamespaceAware(true);
DocumentBuilder db;
try {
db = dbf.newDocumentBuilder();
final Document doc = db.newDocument();
final Element createElementNS =
doc.createElementNS("http://docs.oasis-open.org/tosca/ns/2011/12", "Properties");
createElementNS.setAttribute("xmlns:tosca", "http://docs.oasis-open.org/tosca/ns/2011/12");
createElementNS.setPrefix("tosca");
doc.appendChild(createElementNS);
return doc;
} catch (final ParserConfigurationException e) {
logger.error("Cannot create a new DocumentBuilder: {}", e.getMessage());
}
return null; // this should never happen
}
/* Node Template Instances */
public Collection<NodeTemplateInstance> getNodeTemplateInstances(final String nodeTemplateName) {
logger.debug("Requesting instances of NodeTemplate \"{}\"...", nodeTemplateName);
return this.nodeTemplateInstanceRepository.findByTemplateId(nodeTemplateName);
}
public Collection<NodeTemplateInstance> getAllNodeTemplateInstances() {
logger.debug("Requesting all NodeTemplate instances");
return this.nodeTemplateInstanceRepository.findAll();
}
public NodeTemplateInstance resolveNodeTemplateInstance(final String serviceTemplateName,
final String nodeTemplateId, final Long id) {
// We only need to check that the instance belongs to the template, the rest is
// guaranteed while this is a sub-resource
final NodeTemplateInstance instance = getNodeTemplateInstance(id);
if (!(instance.getTemplateId().equals(nodeTemplateId)
&& instance.getServiceTemplateInstance().getTemplateId().equals(serviceTemplateName))) {
logger.error("Node template instance <{}> could not be found", id);
throw new NotFoundException(String.format("Node template instance <%s> could not be found", id));
}
return instance;
}
public NodeTemplateInstance getNodeTemplateInstance(final Long id) {
logger.debug("Requesting node template instance <{}>...", id);
final Optional<NodeTemplateInstance> instance = this.nodeTemplateInstanceRepository.find(id);
if (instance.isPresent()) {
return instance.get();
}
logger.debug("Node Template Instance <" + id + "> not found.");
throw new NotFoundException("Node Template Instance <" + id + "> not found.");
}
public NodeTemplateInstanceState getNodeTemplateInstanceState(final String serviceTemplateQName,
final String nodeTemplateId, final Long id) {
final NodeTemplateInstance node = resolveNodeTemplateInstance(serviceTemplateQName, nodeTemplateId, id);
return node.getState();
}
public void setNodeTemplateInstanceState(final String serviceTemplateName, final String nodeTemplateId,
final Long id,
final String state) throws NotFoundException, IllegalArgumentException {
NodeTemplateInstanceState newState;
try {
newState = NodeTemplateInstanceState.valueOf(state);
} catch (final Exception e) {
final String msg = String.format("The given state %s is an illegal node template instance state.", state);
logger.error(msg, e);
throw new IllegalArgumentException(msg, e);
}
final NodeTemplateInstance node = resolveNodeTemplateInstance(serviceTemplateName, nodeTemplateId, id);
node.setState(newState);
this.nodeTemplateInstanceRepository.update(node);
}
public Document getNodeTemplateInstanceProperties(final String serviceTemplateQName, final String nodeTemplateId,
final Long id) throws NotFoundException {
final NodeTemplateInstance node = resolveNodeTemplateInstance(serviceTemplateQName, nodeTemplateId, id);
final Optional<NodeTemplateInstanceProperty> firstProp = node.getProperties().stream().findFirst();
if (firstProp.isPresent()) {
return convertPropertyToDocument(firstProp.get());
}
final String msg = String.format("No properties are found for the node template instance <%s>", id);
logger.debug(msg);
return null;
}
public void setNodeTemplateInstanceProperties(final String serviceTemplateQName, final String nodeTemplateId,
final Long id,
final Document properties) throws ReflectiveOperationException {
final NodeTemplateInstance node = resolveNodeTemplateInstance(serviceTemplateQName, nodeTemplateId, id);
try {
final NodeTemplateInstanceProperty property =
this.convertDocumentToProperty(properties, NodeTemplateInstanceProperty.class);
node.addProperty(property);
this.nodeTemplateInstanceRepository.update(node);
} catch (InstantiationException | IllegalAccessException e) { // This is not supposed to happen at all!
final String msg = String.format("An error occurred while instantiating an instance of the %s class.",
NodeTemplateInstanceProperty.class);
logger.error(msg, e);
throw e;
}
}
public NodeTemplateInstance createNewNodeTemplateInstance(final String csarId,
final String serviceTemplateNameAsString,
final String nodeTemplateId,
final Long serviceTemplateInstanceId) throws InstantiationException,
IllegalAccessException,
IllegalArgumentException {
final Csar csar = storage.findById(new CsarId(csarId));
final TServiceTemplate serviceTemplate;
final TNodeTemplate nodeTemplate;
try {
serviceTemplate = ToscaEngine.resolveServiceTemplate(csar, serviceTemplateNameAsString);
nodeTemplate = ToscaEngine.resolveNodeTemplate(serviceTemplate, nodeTemplateId);
} catch (org.opentosca.container.core.common.NotFoundException e) {
throw new NotFoundException(e.getMessage(), e);
}
final NodeTemplateDTO dto = nodeTemplateService.createNodeTemplate(nodeTemplate, csar);
final Document propertiesAsDocument = ToscaEngine.getEntityTemplateProperties(nodeTemplate);
// Properties
// We set the properties of the template as initial properties
final NodeTemplateInstance newInstance = new NodeTemplateInstance();
if (propertiesAsDocument != null) {
final NodeTemplateInstanceProperty properties =
this.convertDocumentToProperty(propertiesAsDocument, NodeTemplateInstanceProperty.class);
newInstance.addProperty(properties);
}
// State
newInstance.setState(NodeTemplateInstanceState.INITIAL);
// Template
newInstance.setTemplateId(nodeTemplate.getIdFromIdOrNameField());
// Type
newInstance.setTemplateType(QName.valueOf(dto.getNodeType()));
// ServiceTemplateInstance
final ServiceTemplateInstance serviceTemplateInstance = getServiceTemplateInstance(serviceTemplateInstanceId, false);
// only compare the local Id, because ServiceTemplateInstance does not keep the
// fully namespaced QName as the parent Id (which sucks, but it is what it is for now)
if (!serviceTemplateInstance.getTemplateId().equals(serviceTemplate.getIdFromIdOrNameField())) {
final String msg =
String.format("Service template instance id <%s> does not belong to service template: %s",
serviceTemplateInstanceId, serviceTemplate.getName());
logger.error(msg);
throw new IllegalArgumentException(msg);
}
newInstance.setServiceTemplateInstance(serviceTemplateInstance);
this.nodeTemplateInstanceRepository.add(newInstance);
return newInstance;
}
public void deleteNodeTemplateInstance(final String serviceTemplateQName, final String nodeTemplateId,
final Long id) {
// throws exception if not found
final NodeTemplateInstance instance = resolveNodeTemplateInstance(serviceTemplateQName, nodeTemplateId, id);
this.nodeTemplateInstanceRepository.remove(instance);
}
/* Relationship Template Instances */
public Collection<RelationshipTemplateInstance> getRelationshipTemplateInstances(final String relationshipTemplateQName) {
logger.debug("Requesting instances of RelationshipTemplate \"{}\"...", relationshipTemplateQName);
return this.relationshipTemplateInstanceRepository.findByTemplateId(relationshipTemplateQName);
}
/**
* Gets a reference to the relationship template instance. Ensures that the instance actually belongs to the
* relationship template.
*
* @throws NotFoundException if the instance does not belong to the relationship template
*/
public RelationshipTemplateInstance resolveRelationshipTemplateInstance(final String serviceTemplateName,
final String relationshipTemplateId,
final Long instanceId) throws NotFoundException {
// We only need to check that the instance belongs to the template, the rest is
// guaranteed while this is a sub-resource
final RelationshipTemplateInstance instance = getRelationshipTemplateInstance(instanceId);
if (!(instance.getTemplateId().equals(relationshipTemplateId)
&& instance.getServiceTemplateInstance().getTemplateId().equals(serviceTemplateName))) {
logger.error("Relationship template instance <{}> could not be found", instanceId);
throw new NotFoundException(
String.format("Relationship template instance <%s> could not be found", instanceId));
}
return instance;
}
private RelationshipTemplateInstance getRelationshipTemplateInstance(final Long id) {
logger.debug("Requesting relationship template instance <{}>...", id);
final Optional<RelationshipTemplateInstance> instance = this.relationshipTemplateInstanceRepository.find(id);
if (instance.isPresent()) {
return instance.get();
}
logger.debug("Relationship Template Instance <" + id + "> not found.");
throw new NotFoundException("Relationship Template Instance <" + id + "> not found.");
}
public RelationshipTemplateInstanceState getRelationshipTemplateInstanceState(final String serviceTemplateQName,
final String relationshipTemplateId,
final Long id) {
final RelationshipTemplateInstance relationship =
resolveRelationshipTemplateInstance(serviceTemplateQName, relationshipTemplateId, id);
return relationship.getState();
}
public void setRelationshipTemplateInstanceState(final String serviceTemplateQName,
final String relationshipTemplateId, final Long id,
final String state) throws NotFoundException,
IllegalArgumentException {
RelationshipTemplateInstanceState newState;
try {
newState = RelationshipTemplateInstanceState.valueOf(state);
} catch (final Exception e) {
final String msg =
String.format("The given state %s is an illegal relationship template instance state.", state);
logger.error(msg, e);
throw new IllegalArgumentException(msg, e);
}
final RelationshipTemplateInstance relationship =
resolveRelationshipTemplateInstance(serviceTemplateQName, relationshipTemplateId, id);
relationship.setState(newState);
this.relationshipTemplateInstanceRepository.update(relationship);
}
public Document getRelationshipTemplateInstanceProperties(final String serviceTemplateQName,
final String relationshipTemplateId,
final Long id) throws NotFoundException {
final RelationshipTemplateInstance relationship =
resolveRelationshipTemplateInstance(serviceTemplateQName, relationshipTemplateId, id);
final Optional<RelationshipTemplateInstanceProperty> firstProp =
relationship.getProperties().stream().findFirst();
if (firstProp.isPresent()) {
return convertPropertyToDocument(firstProp.get());
}
final String msg = String.format("No properties are found for the relationship template instance <%s>", id);
logger.debug(msg);
return null;
}
public void setRelationshipTemplateInstanceProperties(final String serviceTemplateQName,
final String relationshipTemplateId, final Long id,
final Document properties) throws ReflectiveOperationException {
final RelationshipTemplateInstance relationship =
resolveRelationshipTemplateInstance(serviceTemplateQName, relationshipTemplateId, id);
try {
final RelationshipTemplateInstanceProperty property =
this.convertDocumentToProperty(properties, RelationshipTemplateInstanceProperty.class);
relationship.addProperty(property);
this.relationshipTemplateInstanceRepository.update(relationship);
} catch (InstantiationException | IllegalAccessException e) { // This is not supposed to happen at all!
final String msg = String.format("An error occurred while instantiating an instance of the %s class.",
RelationshipTemplateInstanceProperty.class);
logger.error(msg, e);
throw e;
}
}
public RelationshipTemplateInstance createNewRelationshipTemplateInstance(final String csarId,
final String serviceTemplateName,
final String relationshipTemplateId,
final CreateRelationshipTemplateInstanceRequest request) throws InstantiationException,
IllegalAccessException,
IllegalArgumentException {
if (request == null || request.getSourceNodeTemplateInstanceId() == null
|| request.getTargetNodeTemplateInstanceId() == null) {
final String msg = "Relationship template instance creation request is empty or missing content";
logger.error(msg);
throw new IllegalArgumentException(msg);
}
final Csar csar = storage.findById(new CsarId(csarId));
final TServiceTemplate serviceTemplate;
final TRelationshipTemplate relationshipTemplate;
try {
serviceTemplate = ToscaEngine.resolveServiceTemplate(csar, serviceTemplateName);
relationshipTemplate = ToscaEngine.resolveRelationshipTemplate(serviceTemplate, relationshipTemplateId);
} catch (org.opentosca.container.core.common.NotFoundException e) {
throw new NotFoundException(e.getMessage(), e);
}
final RelationshipTemplateInstance newInstance = new RelationshipTemplateInstance();
final RelationshipTemplateDTO dto =
this.relationshipTemplateService.getRelationshipTemplateById(csarId, serviceTemplateName,
relationshipTemplateId);
// Properties
// We set the properties of the template as initial properties
final Document propertiesAsDocument =
ToscaEngine.getEntityTemplateProperties(relationshipTemplate);
if (propertiesAsDocument != null) {
final RelationshipTemplateInstanceProperty properties =
this.convertDocumentToProperty(propertiesAsDocument, RelationshipTemplateInstanceProperty.class);
newInstance.addProperty(properties);
}
// State
newInstance.setState(RelationshipTemplateInstanceState.INITIAL);
// Template
newInstance.setTemplateId(relationshipTemplateId);
// Type
newInstance.setTemplateType(QName.valueOf(dto.getRelationshipType()));
// Source node instance
newInstance.setSource(getNodeTemplateInstance(request.getSourceNodeTemplateInstanceId()));
// Target node instance
newInstance.setTarget(getNodeTemplateInstance(request.getTargetNodeTemplateInstanceId()));
newInstance.setServiceTemplateInstance(serviceTemplateInstanceRepository.find(request.getServiceInstanceId()).get());
this.relationshipTemplateInstanceRepository.add(newInstance);
return newInstance;
}
public void deleteRelationshipTemplateInstance(final String serviceTemplateQName,
final String relationshipTemplateId, final Long instanceId) {
// throws exception if not found
final RelationshipTemplateInstance instance =
resolveRelationshipTemplateInstance(serviceTemplateQName, relationshipTemplateId, instanceId);
this.relationshipTemplateInstanceRepository.remove(instance);
}
/* Situations */
public Situation createNewSituation(final String thingId, final String situationTemplateId, final boolean active,
final float eventProbability, final String eventTime) {
final Situation newInstance = new Situation();
newInstance.setSituationTemplateId(situationTemplateId);
newInstance.setThingId(thingId);
newInstance.setActive(active);
newInstance.setEventProbability(eventProbability);
newInstance.setEventTime(eventTime);
this.sitRepo.add(newInstance);
return newInstance;
}
public Situation getSituation(final Long id) {
final Optional<Situation> instance = this.sitRepo.find(id);
if (instance.isPresent()) {
return instance.get();
}
throw new NotFoundException("Situation <" + id + "> not found.");
}
public Collection<Situation> getSituations() {
return this.sitRepo.findAll();
}
public boolean removeSituation(final Long situationId) {
if (this.sitTrig.findSituationTriggersBySituationId(situationId).isEmpty()) {
this.sitRepo.find(situationId).ifPresent(x -> this.sitRepo.remove(x));
return true;
}
return false;
}
public Collection<SituationTrigger> getSituationTriggers() {
return this.sitTrig.findAll();
}
public SituationTrigger createNewSituationTrigger(final Collection<Situation> situations, final CsarId csarId,
final boolean triggerOnActivation, final boolean isSingleInstance,
final ServiceTemplateInstance serviceInstance,
final NodeTemplateInstance nodeInstance,
final String interfaceName, final String operationName,
final Set<SituationTriggerProperty> inputs,
final float eventProbability, final String eventTime) {
final SituationTrigger newInstance = new SituationTrigger();
newInstance.setSituations(situations);
newInstance.setCsarId(csarId);
newInstance.setTriggerOnActivation(triggerOnActivation);
newInstance.setSingleInstance(isSingleInstance);
if (serviceInstance != null) {
newInstance.setServiceInstance(serviceInstance);
}
newInstance.setInterfaceName(interfaceName);
newInstance.setOperationName(operationName);
if (nodeInstance != null) {
newInstance.setNodeInstance(nodeInstance);
}
for (SituationTriggerProperty input : inputs) {
input.setSituationTrigger(newInstance);
}
newInstance.setInputs(inputs);
if (eventProbability != -1.0f) {
newInstance.setEventProbability(eventProbability);
}
if (eventTime != null) {
newInstance.setEventTime(eventTime);
}
this.sitTrig.add(newInstance);
return newInstance;
}
public SituationTrigger getSituationTrigger(final Long id) {
final Optional<SituationTrigger> opt = this.sitTrig.find(id);
if (opt.isPresent()) {
return opt.get();
}
throw new NotFoundException("SituationTrigger <" + id + "> not found.");
}
public void removeSituationTrigger(Long situationTriggerId) {
this.sitTrigInst.findBySituationTriggerId(situationTriggerId).forEach(x -> this.sitTrigInst.remove(x));
this.sitTrig.find(situationTriggerId).ifPresent(x -> this.sitTrig.remove(x));
}
public void updateSituation(final Situation situation) {
this.sitRepo.update(situation);
}
public SituationTriggerInstance getSituationTriggerInstance(final Long id) {
return this.sitTrigInst.find(id)
.orElseThrow(() -> new RuntimeException("SituationTriggerInstance <" + id + "> not found."));
}
public SituationsMonitor createNewSituationsMonitor(final ServiceTemplateInstance instance,
final Map<String, Collection<Long>> situations) {
final SituationsMonitor monitor = new SituationsMonitor();
monitor.setServiceInstance(instance);
monitor.setNode2Situations(situations);
this.situationsMonitorRepo.add(monitor);
return monitor;
}
public Collection<SituationsMonitor> getSituationsMonitors() {
return this.situationsMonitorRepo.findAll();
}
public Collection<SituationsMonitor> getSituationsMonitors(final Long serviceInstanceID) {
return this.getSituationsMonitors().stream()
.filter(monitor -> monitor.getServiceInstance() != null
&& monitor.getServiceInstance().getId().equals(serviceInstanceID))
.collect(Collectors.toList());
}
}
| |
// Copyright 2014 The Bazel Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.devtools.build.lib.skyframe;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Preconditions;
import com.google.common.base.Supplier;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Iterables;
import com.google.common.collect.ListMultimap;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import com.google.common.collect.Sets;
import com.google.common.eventbus.EventBus;
import com.google.devtools.build.lib.actions.Action;
import com.google.devtools.build.lib.actions.Artifact;
import com.google.devtools.build.lib.actions.ArtifactFactory;
import com.google.devtools.build.lib.actions.ArtifactOwner;
import com.google.devtools.build.lib.actions.ArtifactPrefixConflictException;
import com.google.devtools.build.lib.actions.MutableActionGraph;
import com.google.devtools.build.lib.actions.MutableActionGraph.ActionConflictException;
import com.google.devtools.build.lib.analysis.AnalysisFailureEvent;
import com.google.devtools.build.lib.analysis.BlazeDirectories;
import com.google.devtools.build.lib.analysis.BuildView;
import com.google.devtools.build.lib.analysis.CachingAnalysisEnvironment;
import com.google.devtools.build.lib.analysis.ConfiguredRuleClassProvider;
import com.google.devtools.build.lib.analysis.ConfiguredTarget;
import com.google.devtools.build.lib.analysis.ConfiguredTargetFactory;
import com.google.devtools.build.lib.analysis.LabelAndConfiguration;
import com.google.devtools.build.lib.analysis.ViewCreationFailedException;
import com.google.devtools.build.lib.analysis.buildinfo.BuildInfoFactory;
import com.google.devtools.build.lib.analysis.buildinfo.BuildInfoFactory.BuildInfoKey;
import com.google.devtools.build.lib.analysis.config.BinTools;
import com.google.devtools.build.lib.analysis.config.BuildConfiguration;
import com.google.devtools.build.lib.analysis.config.BuildConfigurationCollection;
import com.google.devtools.build.lib.analysis.config.ConfigMatchingProvider;
import com.google.devtools.build.lib.cmdline.Label;
import com.google.devtools.build.lib.collect.nestedset.NestedSetBuilder;
import com.google.devtools.build.lib.events.Event;
import com.google.devtools.build.lib.events.EventHandler;
import com.google.devtools.build.lib.packages.Attribute;
import com.google.devtools.build.lib.packages.Package;
import com.google.devtools.build.lib.packages.RuleClassProvider;
import com.google.devtools.build.lib.packages.Target;
import com.google.devtools.build.lib.pkgcache.LoadingPhaseRunner;
import com.google.devtools.build.lib.skyframe.ActionLookupValue.ActionLookupKey;
import com.google.devtools.build.lib.skyframe.AspectFunction.AspectCreationException;
import com.google.devtools.build.lib.skyframe.AspectValue.AspectValueKey;
import com.google.devtools.build.lib.skyframe.BuildInfoCollectionValue.BuildInfoKeyAndConfig;
import com.google.devtools.build.lib.skyframe.ConfiguredTargetFunction.ConfiguredValueCreationException;
import com.google.devtools.build.lib.skyframe.SkyframeActionExecutor.ConflictException;
import com.google.devtools.build.lib.vfs.PathFragment;
import com.google.devtools.build.skyframe.CycleInfo;
import com.google.devtools.build.skyframe.ErrorInfo;
import com.google.devtools.build.skyframe.EvaluationProgressReceiver;
import com.google.devtools.build.skyframe.EvaluationResult;
import com.google.devtools.build.skyframe.SkyFunction.Environment;
import com.google.devtools.build.skyframe.SkyKey;
import com.google.devtools.build.skyframe.SkyValue;
import java.util.Collection;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.logging.Logger;
import javax.annotation.Nullable;
/**
* Skyframe-based driver of analysis.
*
* <p>Covers enough functionality to work as a substitute for {@code BuildView#configureTargets}.
*/
public final class SkyframeBuildView {
private static Logger LOG = Logger.getLogger(BuildView.class.getName());
private final ConfiguredTargetFactory factory;
private final ArtifactFactory artifactFactory;
private final SkyframeExecutor skyframeExecutor;
private final BinTools binTools;
private boolean enableAnalysis = false;
// This hack allows us to see when a configured target has been invalidated, and thus when the set
// of artifact conflicts needs to be recomputed (whenever a configured target has been invalidated
// or newly evaluated).
private final EvaluationProgressReceiver invalidationReceiver =
new ConfiguredTargetValueInvalidationReceiver();
private final Set<SkyKey> evaluatedConfiguredTargets = Sets.newConcurrentHashSet();
// Used to see if checks of graph consistency need to be done after analysis.
private volatile boolean someConfiguredTargetEvaluated = false;
// We keep the set of invalidated configuration target keys so that we can know if something
// has been invalidated after graph pruning has been executed.
private Set<SkyKey> dirtiedConfiguredTargetKeys = Sets.newConcurrentHashSet();
private volatile boolean anyConfiguredTargetDeleted = false;
private final RuleClassProvider ruleClassProvider;
// The host configuration containing all fragments used by this build's transitive closure.
private BuildConfiguration topLevelHostConfiguration;
// Fragment-limited versions of the host configuration. It's faster to create/cache these here
// than to store them in Skyframe.
private Map<Set<Class<? extends BuildConfiguration.Fragment>>, BuildConfiguration>
hostConfigurationCache = Maps.newConcurrentMap();
private BuildConfigurationCollection configurations;
/**
* If the last build was executed with {@code Options#discard_analysis_cache} and we are not
* running Skyframe full, we should clear the legacy data since it is out-of-sync.
*/
private boolean skyframeAnalysisWasDiscarded;
public SkyframeBuildView(BlazeDirectories directories,
SkyframeExecutor skyframeExecutor, BinTools binTools,
ConfiguredRuleClassProvider ruleClassProvider) {
this.factory = new ConfiguredTargetFactory(ruleClassProvider);
this.artifactFactory = new ArtifactFactory(directories.getExecRoot());
this.skyframeExecutor = skyframeExecutor;
this.binTools = binTools;
this.ruleClassProvider = ruleClassProvider;
}
public void resetEvaluatedConfiguredTargetKeysSet() {
evaluatedConfiguredTargets.clear();
}
public Set<SkyKey> getEvaluatedTargetKeys() {
return ImmutableSet.copyOf(evaluatedConfiguredTargets);
}
ConfiguredTargetFactory getConfiguredTargetFactory() {
return factory;
}
/**
* Sets the configurations. Not thread-safe. DO NOT CALL except from tests!
*/
@VisibleForTesting
public void setConfigurations(BuildConfigurationCollection configurations) {
// Clear all cached ConfiguredTargets on configuration change of if --discard_analysis_cache
// was set on the previous build. In the former case, it's not required for correctness, but
// prevents unbounded memory usage.
if ((this.configurations != null && !configurations.equals(this.configurations))
|| skyframeAnalysisWasDiscarded) {
LOG.info("Discarding analysis cache: configurations have changed.");
skyframeExecutor.dropConfiguredTargets();
}
skyframeAnalysisWasDiscarded = false;
this.configurations = configurations;
setTopLevelHostConfiguration(configurations.getHostConfiguration());
}
/**
* Sets the host configuration consisting of all fragments that will be used by the top level
* targets' transitive closures.
*
* <p>This is used to power {@link #getHostConfiguration} during analysis, which computes
* fragment-trimmed host configurations from the top-level one.
*/
private void setTopLevelHostConfiguration(BuildConfiguration topLevelHostConfiguration) {
if (topLevelHostConfiguration.equals(this.topLevelHostConfiguration)) {
return;
}
hostConfigurationCache.clear();
this.topLevelHostConfiguration = topLevelHostConfiguration;
}
/**
* Drops the analysis cache. If building with Skyframe, targets in {@code topLevelTargets} may
* remain in the cache for use during the execution phase.
*
* @see com.google.devtools.build.lib.analysis.BuildView.Options#discardAnalysisCache
*/
public void clearAnalysisCache(Collection<ConfiguredTarget> topLevelTargets) {
// TODO(bazel-team): Consider clearing packages too to save more memory.
skyframeAnalysisWasDiscarded = true;
skyframeExecutor.clearAnalysisCache(topLevelTargets);
}
private void setDeserializedArtifactOwners() throws ViewCreationFailedException {
Map<PathFragment, Artifact> deserializedArtifactMap =
artifactFactory.getDeserializedArtifacts();
Set<Artifact> deserializedArtifacts = new HashSet<>();
for (Artifact artifact : deserializedArtifactMap.values()) {
if (!artifact.getExecPath().getBaseName().endsWith(".gcda")) {
// gcda files are classified as generated artifacts, but are not actually generated. All
// others need owners.
deserializedArtifacts.add(artifact);
}
}
if (deserializedArtifacts.isEmpty()) {
// If there are no deserialized artifacts to process, don't pay the price of iterating over
// the graph.
return;
}
for (Map.Entry<SkyKey, ActionLookupValue> entry :
skyframeExecutor.getActionLookupValueMap().entrySet()) {
for (Action action : entry.getValue().getActionsForFindingArtifactOwners()) {
for (Artifact output : action.getOutputs()) {
Artifact deserializedArtifact = deserializedArtifactMap.get(output.getExecPath());
if (deserializedArtifact != null) {
deserializedArtifact.setArtifactOwner((ActionLookupKey) entry.getKey().argument());
deserializedArtifacts.remove(deserializedArtifact);
}
}
}
}
if (!deserializedArtifacts.isEmpty()) {
throw new ViewCreationFailedException("These artifacts were read in from the FDO profile but"
+ " have no generating action that could be found. If you are confident that your profile was"
+ " collected from the same source state at which you're building, please report this:\n"
+ Artifact.asExecPaths(deserializedArtifacts));
}
artifactFactory.clearDeserializedArtifacts();
}
/**
* Analyzes the specified targets using Skyframe as the driving framework.
*
* @return the configured targets that should be built along with a WalkableGraph of the analysis.
*/
public SkyframeAnalysisResult configureTargets(
EventHandler eventHandler,
List<ConfiguredTargetKey> values,
List<AspectValueKey> aspectKeys,
EventBus eventBus,
boolean keepGoing)
throws InterruptedException, ViewCreationFailedException {
enableAnalysis(true);
EvaluationResult<ActionLookupValue> result;
try {
result = skyframeExecutor.configureTargets(eventHandler, values, aspectKeys, keepGoing);
} finally {
enableAnalysis(false);
}
ImmutableMap<Action, ConflictException> badActions = skyframeExecutor.findArtifactConflicts();
Collection<AspectValue> goodAspects = Lists.newArrayListWithCapacity(values.size());
NestedSetBuilder<Package> packages = NestedSetBuilder.stableOrder();
for (AspectValueKey aspectKey : aspectKeys) {
AspectValue value = (AspectValue) result.get(AspectValue.key(aspectKey));
if (value == null) {
// Skip aspects that couldn't be applied to targets.
continue;
}
goodAspects.add(value);
packages.addTransitive(value.getTransitivePackages());
}
// Filter out all CTs that have a bad action and convert to a list of configured targets. This
// code ensures that the resulting list of configured targets has the same order as the incoming
// list of values, i.e., that the order is deterministic.
Collection<ConfiguredTarget> goodCts = Lists.newArrayListWithCapacity(values.size());
for (ConfiguredTargetKey value : values) {
ConfiguredTargetValue ctValue =
(ConfiguredTargetValue) result.get(ConfiguredTargetValue.key(value));
if (ctValue == null) {
continue;
}
goodCts.add(ctValue.getConfiguredTarget());
packages.addTransitive(ctValue.getTransitivePackages());
}
if (!result.hasError() && badActions.isEmpty()) {
setDeserializedArtifactOwners();
return new SkyframeAnalysisResult(
ImmutableList.copyOf(goodCts),
result.getWalkableGraph(),
ImmutableList.copyOf(goodAspects),
LoadingPhaseRunner.collectPackageRoots(packages.build().toCollection()));
}
// --nokeep_going so we fail with an exception for the first error.
// TODO(bazel-team): We might want to report the other errors through the event bus but
// for keeping this code in parity with legacy we just report the first error for now.
if (!keepGoing) {
for (Map.Entry<Action, ConflictException> bad : badActions.entrySet()) {
ConflictException ex = bad.getValue();
try {
ex.rethrowTyped();
} catch (MutableActionGraph.ActionConflictException ace) {
ace.reportTo(eventHandler);
String errorMsg = "Analysis of target '" + bad.getKey().getOwner().getLabel()
+ "' failed; build aborted";
throw new ViewCreationFailedException(errorMsg);
} catch (ArtifactPrefixConflictException apce) {
eventHandler.handle(Event.error(apce.getMessage()));
}
throw new ViewCreationFailedException(ex.getMessage());
}
Map.Entry<SkyKey, ErrorInfo> error = result.errorMap().entrySet().iterator().next();
SkyKey topLevel = error.getKey();
ErrorInfo errorInfo = error.getValue();
assertSaneAnalysisError(errorInfo, topLevel);
skyframeExecutor.getCyclesReporter().reportCycles(errorInfo.getCycleInfo(), topLevel,
eventHandler);
Throwable cause = errorInfo.getException();
Preconditions.checkState(cause != null || !Iterables.isEmpty(errorInfo.getCycleInfo()),
errorInfo);
String errorMsg = null;
if (topLevel.argument() instanceof ConfiguredTargetKey) {
errorMsg =
"Analysis of target '"
+ ConfiguredTargetValue.extractLabel(topLevel)
+ "' failed; build aborted";
} else if (topLevel.argument() instanceof AspectValueKey) {
AspectValueKey aspectKey = (AspectValueKey) topLevel.argument();
errorMsg = "Analysis of aspect '" + aspectKey.getDescription() + "' failed; build aborted";
} else {
assert false;
}
if (cause instanceof ActionConflictException) {
((ActionConflictException) cause).reportTo(eventHandler);
}
throw new ViewCreationFailedException(errorMsg);
}
// --keep_going : We notify the error and return a ConfiguredTargetValue
for (Map.Entry<SkyKey, ErrorInfo> errorEntry : result.errorMap().entrySet()) {
if (values.contains(errorEntry.getKey().argument())) {
SkyKey errorKey = errorEntry.getKey();
ConfiguredTargetKey label = (ConfiguredTargetKey) errorKey.argument();
ErrorInfo errorInfo = errorEntry.getValue();
assertSaneAnalysisError(errorInfo, errorKey);
skyframeExecutor.getCyclesReporter().reportCycles(errorInfo.getCycleInfo(), errorKey,
eventHandler);
// We try to get the root cause key first from ErrorInfo rootCauses. If we don't have one
// we try to use the cycle culprit if the error is a cycle. Otherwise we use the top-level
// error key.
Label root;
if (!Iterables.isEmpty(errorEntry.getValue().getRootCauses())) {
SkyKey culprit = Preconditions.checkNotNull(Iterables.getFirst(
errorEntry.getValue().getRootCauses(), null));
root = ((ConfiguredTargetKey) culprit.argument()).getLabel();
} else {
root = maybeGetConfiguredTargetCycleCulprit(errorInfo.getCycleInfo());
}
Exception cause = errorInfo.getException();
if (cause instanceof ActionConflictException) {
((ActionConflictException) cause).reportTo(eventHandler);
}
eventHandler.handle(
Event.warn("errors encountered while analyzing target '"
+ label.getLabel() + "': it will not be built"));
eventBus.post(new AnalysisFailureEvent(
LabelAndConfiguration.of(label.getLabel(), label.getConfiguration()), root));
}
}
Collection<Exception> reportedExceptions = Sets.newHashSet();
for (Map.Entry<Action, ConflictException> bad : badActions.entrySet()) {
ConflictException ex = bad.getValue();
try {
ex.rethrowTyped();
} catch (MutableActionGraph.ActionConflictException ace) {
ace.reportTo(eventHandler);
eventHandler
.handle(Event.warn("errors encountered while analyzing target '"
+ bad.getKey().getOwner().getLabel() + "': it will not be built"));
} catch (ArtifactPrefixConflictException apce) {
if (reportedExceptions.add(apce)) {
eventHandler.handle(Event.error(apce.getMessage()));
}
}
}
if (!badActions.isEmpty()) {
// In order to determine the set of configured targets transitively error free from action
// conflict issues, we run a post-processing update() that uses the bad action map.
EvaluationResult<PostConfiguredTargetValue> actionConflictResult =
skyframeExecutor.postConfigureTargets(eventHandler, values, keepGoing, badActions);
goodCts = Lists.newArrayListWithCapacity(values.size());
for (ConfiguredTargetKey value : values) {
PostConfiguredTargetValue postCt =
actionConflictResult.get(PostConfiguredTargetValue.key(value));
if (postCt != null) {
goodCts.add(postCt.getCt());
}
}
}
setDeserializedArtifactOwners();
return new SkyframeAnalysisResult(
ImmutableList.copyOf(goodCts),
result.getWalkableGraph(),
ImmutableList.copyOf(goodAspects),
LoadingPhaseRunner.collectPackageRoots(packages.build().toCollection()));
}
@Nullable
Label maybeGetConfiguredTargetCycleCulprit(Iterable<CycleInfo> cycleInfos) {
for (CycleInfo cycleInfo : cycleInfos) {
SkyKey culprit = Iterables.getFirst(cycleInfo.getCycle(), null);
if (culprit == null) {
continue;
}
if (culprit.functionName().equals(SkyFunctions.CONFIGURED_TARGET)) {
return ((LabelAndConfiguration) culprit.argument()).getLabel();
}
}
return null;
}
private static void assertSaneAnalysisError(ErrorInfo errorInfo, SkyKey key) {
Throwable cause = errorInfo.getException();
if (cause != null) {
// We should only be trying to configure targets when the loading phase succeeds, meaning
// that the only errors should be analysis errors.
Preconditions.checkState(
cause instanceof ConfiguredValueCreationException
|| cause instanceof AspectCreationException // for top-level aspects
|| cause instanceof ActionConflictException,
"%s -> %s",
key,
errorInfo);
}
}
public ArtifactFactory getArtifactFactory() {
return artifactFactory;
}
/**
* Because we don't know what build-info artifacts this configured target may request, we
* conservatively register a dep on all of them.
*/
// TODO(bazel-team): Allow analysis to return null so the value builder can exit and wait for a
// restart deps are not present.
private boolean getWorkspaceStatusValues(Environment env, BuildConfiguration config) {
env.getValue(WorkspaceStatusValue.SKY_KEY);
Map<BuildInfoKey, BuildInfoFactory> buildInfoFactories =
PrecomputedValue.BUILD_INFO_FACTORIES.get(env);
if (buildInfoFactories == null) {
return false;
}
// These factories may each create their own build info artifacts, all depending on the basic
// build-info.txt and build-changelist.txt.
List<SkyKey> depKeys = Lists.newArrayList();
for (BuildInfoKey key : buildInfoFactories.keySet()) {
if (buildInfoFactories.get(key).isEnabled(config)) {
depKeys.add(BuildInfoCollectionValue.key(new BuildInfoKeyAndConfig(key, config)));
}
}
env.getValues(depKeys);
return !env.valuesMissing();
}
/** Returns null if any build-info values are not ready. */
@Nullable
CachingAnalysisEnvironment createAnalysisEnvironment(ArtifactOwner owner,
boolean isSystemEnv, EventHandler eventHandler,
Environment env, BuildConfiguration config) {
if (config != null && !getWorkspaceStatusValues(env, config)) {
return null;
}
boolean extendedSanityChecks = config != null && config.extendedSanityChecks();
boolean allowRegisteringActions = config == null || config.isActionsEnabled();
return new CachingAnalysisEnvironment(
artifactFactory, owner, isSystemEnv, extendedSanityChecks, eventHandler, env,
allowRegisteringActions, binTools);
}
/**
* Invokes the appropriate constructor to create a {@link ConfiguredTarget} instance.
*
* <p>For use in {@code ConfiguredTargetFunction}.
*
* <p>Returns null if Skyframe deps are missing or upon certain errors.
*/
@Nullable
ConfiguredTarget createConfiguredTarget(Target target, BuildConfiguration configuration,
CachingAnalysisEnvironment analysisEnvironment,
ListMultimap<Attribute, ConfiguredTarget> prerequisiteMap,
Set<ConfigMatchingProvider> configConditions) throws InterruptedException {
Preconditions.checkState(enableAnalysis,
"Already in execution phase %s %s", target, configuration);
return factory.createConfiguredTarget(analysisEnvironment, artifactFactory, target,
configuration, getHostConfiguration(configuration), prerequisiteMap,
configConditions);
}
/**
* Returns the host configuration trimmed to the same fragments as the input configuration. If
* the input is null, returns the top-level host configuration.
*
* <p>For static configurations, this unconditionally returns the (sole) top-level configuration.
*
* <p>This may only be called after {@link #setTopLevelHostConfiguration} has set the
* correct host configuration at the top-level.
*/
public BuildConfiguration getHostConfiguration(BuildConfiguration config) {
if (config == null || !config.useDynamicConfigurations()) {
return topLevelHostConfiguration;
}
// TODO(bazel-team): have the fragment classes be those required by the consuming target's
// transitive closure. This isn't the same as the input configuration's fragment classes -
// the latter may be a proper subset of the former.
//
// ConfigurationFactory.getConfiguration provides the reason why: if a declared required
// fragment is evaluated and returns null, it never gets added to the configuration. So if we
// use the configuration's fragments as the source of truth, that excludes required fragments
// that never made it in.
//
// If we're just trimming an existing configuration, this is no big deal (if the original
// configuration doesn't need the fragment, the trimmed one doesn't either). But this method
// trims a host configuration to the same scope as a target configuration. Since their options
// are different, the host instance may actually be able to produce the fragment. So it's
// wrong and potentially dangerous to unilaterally exclude it.
Set<Class<? extends BuildConfiguration.Fragment>> fragmentClasses = config.fragmentClasses();
BuildConfiguration hostConfig = hostConfigurationCache.get(fragmentClasses);
if (hostConfig != null) {
return hostConfig;
}
BuildConfiguration trimmedConfig =
topLevelHostConfiguration.clone(fragmentClasses, ruleClassProvider);
hostConfigurationCache.put(fragmentClasses, trimmedConfig);
return trimmedConfig;
}
@Nullable
SkyframeDependencyResolver createDependencyResolver(Environment env) {
return new SkyframeDependencyResolver(env);
}
/**
* Workaround to clear all legacy data, like the artifact factory. We need
* to clear them to avoid conflicts.
* TODO(bazel-team): Remove this workaround. [skyframe-execution]
*/
void clearLegacyData() {
artifactFactory.clear();
}
/**
* Hack to invalidate actions in legacy action graph when their values are invalidated in
* skyframe.
*/
EvaluationProgressReceiver getInvalidationReceiver() {
return invalidationReceiver;
}
/** Clear the invalidated configured targets detected during loading and analysis phases. */
public void clearInvalidatedConfiguredTargets() {
dirtiedConfiguredTargetKeys = Sets.newConcurrentHashSet();
anyConfiguredTargetDeleted = false;
}
public boolean isSomeConfiguredTargetInvalidated() {
return anyConfiguredTargetDeleted || !dirtiedConfiguredTargetKeys.isEmpty();
}
/**
* Called from SkyframeExecutor to see whether the graph needs to be checked for artifact
* conflicts. Returns true if some configured target has been evaluated since the last time the
* graph was checked for artifact conflicts (with that last time marked by a call to
* {@link #resetEvaluatedConfiguredTargetFlag()}).
*/
boolean isSomeConfiguredTargetEvaluated() {
Preconditions.checkState(!enableAnalysis);
return someConfiguredTargetEvaluated;
}
/**
* Called from SkyframeExecutor after the graph is checked for artifact conflicts so that
* the next time {@link #isSomeConfiguredTargetEvaluated} is called, it will return true only if
* some configured target has been evaluated since the last check for artifact conflicts.
*/
void resetEvaluatedConfiguredTargetFlag() {
someConfiguredTargetEvaluated = false;
}
/**
* {@link #createConfiguredTarget} will only create configured targets if this is set to true. It
* should be set to true before any Skyframe update call that might call into {@link
* #createConfiguredTarget}, and false immediately after the call. Use it to fail-fast in the case
* that a target is requested for analysis not during the analysis phase.
*/
void enableAnalysis(boolean enable) {
this.enableAnalysis = enable;
}
private class ConfiguredTargetValueInvalidationReceiver implements EvaluationProgressReceiver {
@Override
public void invalidated(SkyKey skyKey, InvalidationState state) {
if (skyKey.functionName().equals(SkyFunctions.CONFIGURED_TARGET)) {
if (state == InvalidationState.DELETED) {
anyConfiguredTargetDeleted = true;
} else {
// If the value was just dirtied and not deleted, then it may not be truly invalid, since
// it may later get re-validated. Therefore adding the key to dirtiedConfiguredTargetKeys
// is provisional--if the key is later evaluated and the value found to be clean, then we
// remove it from the set.
dirtiedConfiguredTargetKeys.add(skyKey);
}
}
}
@Override
public void enqueueing(SkyKey skyKey) {}
@Override
public void computed(SkyKey skyKey, long elapsedTimeNanos) {}
@Override
public void evaluated(SkyKey skyKey, Supplier<SkyValue> skyValueSupplier,
EvaluationState state) {
if (skyKey.functionName().equals(SkyFunctions.CONFIGURED_TARGET)) {
switch (state) {
case BUILT:
if (skyValueSupplier.get() != null) {
evaluatedConfiguredTargets.add(skyKey);
// During multithreaded operation, this is only set to true, so no concurrency issues.
someConfiguredTargetEvaluated = true;
}
break;
case CLEAN:
// If the configured target value did not need to be rebuilt, then it wasn't truly
// invalid.
dirtiedConfiguredTargetKeys.remove(skyKey);
break;
}
}
}
}
}
| |
/*
* Copyright 2000-2009 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jetbrains.plugins.groovy.lang.psi.util;
import com.intellij.codeInsight.NullableNotNullManager;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.psi.*;
import com.intellij.psi.codeStyle.JavaCodeStyleManager;
import com.intellij.psi.codeStyle.VariableKind;
import com.intellij.psi.util.PropertyUtil;
import com.intellij.psi.util.PsiUtil;
import com.intellij.util.ArrayUtil;
import com.intellij.util.IncorrectOperationException;
import org.jetbrains.annotations.NonNls;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import org.jetbrains.plugins.groovy.lang.psi.GroovyPsiElement;
import org.jetbrains.plugins.groovy.lang.psi.GroovyPsiElementFactory;
import org.jetbrains.plugins.groovy.lang.psi.api.GroovyResolveResult;
import org.jetbrains.plugins.groovy.lang.psi.api.auxiliary.modifiers.GrModifier;
import org.jetbrains.plugins.groovy.lang.psi.api.auxiliary.modifiers.GrModifierList;
import org.jetbrains.plugins.groovy.lang.psi.api.statements.GrField;
import org.jetbrains.plugins.groovy.lang.psi.api.statements.blocks.GrCodeBlock;
import org.jetbrains.plugins.groovy.lang.psi.api.statements.expressions.GrExpression;
import org.jetbrains.plugins.groovy.lang.psi.api.statements.params.GrParameter;
import org.jetbrains.plugins.groovy.lang.psi.api.statements.typedef.members.GrAccessorMethod;
import org.jetbrains.plugins.groovy.lang.psi.api.statements.typedef.members.GrMethod;
import org.jetbrains.plugins.groovy.lang.psi.impl.PsiImplUtil;
import org.jetbrains.plugins.groovy.lang.resolve.ResolveUtil;
import org.jetbrains.plugins.groovy.lang.resolve.processors.AccessorResolverProcessor;
import java.beans.Introspector;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
/**
* @author ilyas
*/
public class GroovyPropertyUtils {
private static final Logger LOG = Logger.getInstance(GroovyPropertyUtils.class);
public static final String IS_PREFIX = "is";
public static final String GET_PREFIX = "get";
public static final String SET_PREFIX = "set";
private GroovyPropertyUtils() {
}
public static PsiMethod[] getAllSettersByField(PsiField field) {
return getAllSetters(field.getContainingClass(), field.getName(), field.hasModifierProperty(PsiModifier.STATIC), false);
}
@NotNull
public static PsiMethod[] getAllGettersByField(PsiField field) {
return getAllGetters(field.getContainingClass(), field.getName(), field.hasModifierProperty(PsiModifier.STATIC), false);
}
@Nullable
public static PsiMethod findSetterForField(PsiField field) {
final PsiClass containingClass = field.getContainingClass();
final Project project = field.getProject();
final String propertyName = PropertyUtil.suggestPropertyName(project, field);
final boolean isStatic = field.hasModifierProperty(PsiModifier.STATIC);
return findPropertySetter(containingClass, propertyName, isStatic, true);
}
@Nullable
public static PsiMethod findGetterForField(PsiField field) {
final PsiClass containingClass = field.getContainingClass();
final Project project = field.getProject();
final String propertyName = PropertyUtil.suggestPropertyName(project, field);
final boolean isStatic = field.hasModifierProperty(PsiModifier.STATIC);
return findPropertyGetter(containingClass, propertyName, isStatic, true);
}
@Nullable
public static PsiMethod findPropertySetter(@Nullable PsiType type, String propertyName, @NotNull GroovyPsiElement context) {
final String setterName = getSetterName(propertyName);
if (type == null) {
final GrExpression fromText = GroovyPsiElementFactory.getInstance(context.getProject()).createExpressionFromText("this", context);
return findPropertySetter(fromText.getType(), propertyName, context);
}
final AccessorResolverProcessor processor = new AccessorResolverProcessor(setterName, propertyName, context, false);
ResolveUtil.processAllDeclarations(type, processor, ResolveState.initial(), context);
final GroovyResolveResult[] setterCandidates = processor.getCandidates();
return PsiImplUtil.extractUniqueElement(setterCandidates);
}
@Nullable
public static PsiMethod findPropertySetter(PsiClass aClass, String propertyName, boolean isStatic, boolean checkSuperClasses) {
if (aClass == null) return null;
PsiMethod[] methods;
if (checkSuperClasses) {
methods = aClass.getAllMethods();
}
else {
methods = aClass.getMethods();
}
for (PsiMethod method : methods) {
if (method.hasModifierProperty(PsiModifier.STATIC) != isStatic) continue;
if (isSimplePropertySetter(method)) {
if (propertyName.equals(getPropertyNameBySetter(method))) {
return method;
}
}
}
return null;
}
@NotNull
public static PsiMethod[] getAllGetters(PsiClass aClass, @NotNull String propertyName, boolean isStatic, boolean checkSuperClasses) {
if (aClass == null) return PsiMethod.EMPTY_ARRAY;
PsiMethod[] methods;
if (checkSuperClasses) {
methods = aClass.getAllMethods();
}
else {
methods = aClass.getMethods();
}
List<PsiMethod> result = new ArrayList<PsiMethod>();
for (PsiMethod method : methods) {
if (method.hasModifierProperty(PsiModifier.STATIC) != isStatic) continue;
if (isSimplePropertyGetter(method)) {
if (propertyName.equals(getPropertyNameByGetter(method))) {
result.add(method);
}
}
}
return result.toArray(new PsiMethod[result.size()]);
}
@NotNull
public static PsiMethod[] getAllSetters(PsiClass aClass, @NotNull String propertyName, boolean isStatic, boolean checkSuperClasses) {
if (aClass == null) return PsiMethod.EMPTY_ARRAY;
PsiMethod[] methods;
if (checkSuperClasses) {
methods = aClass.getAllMethods();
}
else {
methods = aClass.getMethods();
}
List<PsiMethod> result = new ArrayList<PsiMethod>();
for (PsiMethod method : methods) {
if (method.hasModifierProperty(PsiModifier.STATIC) != isStatic) continue;
if (isSimplePropertySetter(method)) {
if (propertyName.equals(getPropertyNameBySetter(method))) {
result.add(method);
}
}
}
return result.toArray(new PsiMethod[result.size()]);
}
@Nullable
public static PsiMethod findPropertyGetter(@Nullable PsiClass aClass,
String propertyName,
@Nullable Boolean isStatic,
boolean checkSuperClasses) {
if (aClass == null) return null;
PsiMethod[] methods;
if (checkSuperClasses) {
methods = aClass.getAllMethods();
}
else {
methods = aClass.getMethods();
}
for (PsiMethod method : methods) {
if (isStatic != null && method.hasModifierProperty(PsiModifier.STATIC) != isStatic) continue;
if (isSimplePropertyGetter(method)) {
if (propertyName.equals(getPropertyNameByGetter(method))) {
return method;
}
}
}
return null;
}
public static boolean isSimplePropertyAccessor(PsiMethod method) {
return isSimplePropertyGetter(method) || isSimplePropertySetter(method);
}//do not check return type
public static boolean isSimplePropertyGetter(PsiMethod method) {
return isSimplePropertyGetter(method, null);
}//do not check return type
public static boolean isSimplePropertyGetter(PsiMethod method, @Nullable String propertyName) {
if (method == null || method.isConstructor()) return false;
if (method.getParameterList().getParametersCount() != 0) return false;
if (!isGetterName(method.getName())) return false;
if (method.getName().startsWith(IS_PREFIX) && !PsiType.BOOLEAN.equals(method.getReturnType())) {
return false;
}
if (method.getReturnType() == PsiType.VOID) return false;
if (propertyName == null) return true;
final String byGetter = getPropertyNameByGetter(method);
return propertyName.equals(byGetter) || (!isPropertyName(byGetter) && propertyName.equals(
getPropertyNameByGetterName(method.getName(), PsiType.BOOLEAN.equals(method.getReturnType()))));
}
public static boolean isSimplePropertySetter(PsiMethod method) {
return isSimplePropertySetter(method, null);
}
public static boolean isSimplePropertySetter(PsiMethod method, @Nullable String propertyName) {
if (method == null || method.isConstructor()) return false;
if (method.getParameterList().getParametersCount() != 1) return false;
if (!isSetterName(method.getName())) return false;
if (propertyName==null) return true;
final String bySetter = getPropertyNameBySetter(method);
return propertyName.equals(bySetter) || (!isPropertyName(bySetter) && propertyName.equals(getPropertyNameBySetterName(method.getName())));
}
@Nullable
public static String getPropertyNameByGetter(PsiMethod getterMethod) {
if (getterMethod instanceof GrAccessorMethod) {
return ((GrAccessorMethod)getterMethod).getProperty().getName();
}
@NonNls String methodName = getterMethod.getName();
final boolean isPropertyBoolean = PsiType.BOOLEAN.equals(getterMethod.getReturnType());
return getPropertyNameByGetterName(methodName, isPropertyBoolean);
}
@Nullable
public static String getPropertyNameByGetterName(String methodName, boolean canBeBoolean) {
if (methodName.startsWith(GET_PREFIX) && methodName.length() > 3) {
return decapitalize(methodName.substring(3));
}
if (canBeBoolean && methodName.startsWith(IS_PREFIX) && methodName.length() > 2) {
return decapitalize(methodName.substring(2));
}
return null;
}
@Nullable
public static String getPropertyNameBySetter(PsiMethod setterMethod) {
if (setterMethod instanceof GrAccessorMethod) {
return ((GrAccessorMethod)setterMethod).getProperty().getName();
}
@NonNls String methodName = setterMethod.getName();
return getPropertyNameBySetterName(methodName);
}
@Nullable
public static String getPropertyNameBySetterName(String methodName) {
if (methodName.startsWith(SET_PREFIX) && methodName.length() > 3) {
return StringUtil.decapitalize(methodName.substring(3));
}
else {
return null;
}
}
@Nullable
public static String getPropertyNameByAccessorName(String accessorName) {
if (isGetterName(accessorName)) {
return getPropertyNameByGetterName(accessorName, true);
}
else if (isSetterName(accessorName)) {
return getPropertyNameBySetterName(accessorName);
}
return null;
}
@Nullable
public static String getPropertyName(PsiMethod accessor) {
if (isSimplePropertyGetter(accessor)) return getPropertyNameByGetter(accessor);
if (isSimplePropertySetter(accessor)) return getPropertyNameBySetter(accessor);
return null;
}
public static boolean isGetterName(@NotNull String name) {
int prefixLength;
if (name.startsWith(GET_PREFIX)) {
prefixLength = 3;
}
else if (name.startsWith(IS_PREFIX)) {
prefixLength = 2;
}
else {
return false;
}
if (name.length() == prefixLength) return false;
if (isUpperCase(name.charAt(prefixLength))) return true;
return name.length() > prefixLength + 1 && isUpperCase(name.charAt(prefixLength + 1));
}
public static String getGetterNameNonBoolean(@NotNull String name) {
return getAccessorName(GET_PREFIX, name);
}
public static String getGetterNameBoolean(@NotNull String name) {
return getAccessorName(IS_PREFIX, name);
}
public static String getSetterName(@NotNull String name) {
return getAccessorName("set", name);
}
public static String getAccessorName(String prefix, String name) {
if (name.length() == 0) return prefix;
StringBuilder sb = new StringBuilder();
sb.append(prefix);
if (name.length() > 1 && Character.isUpperCase(name.charAt(1))) {
sb.append(name);
}
else {
sb.append(Character.toUpperCase(name.charAt(0)));
sb.append(name, 1, name.length());
}
return sb.toString();
}
/**
* Returns getter names in priority order
* @param name property name
* @return getter names
*/
public static String[] suggestGettersName(@NotNull String name) {
return new String[]{getGetterNameBoolean(name), getGetterNameNonBoolean(name)};
}
public static boolean isPropertyName(String name) {
if (name.length() == 0) return false;
if (Character.isUpperCase(name.charAt(0)) && (name.length() == 1 || !Character.isUpperCase(name.charAt(1)))) return false;
return true;
}
public static String[] suggestSettersName(@NotNull String name) {
return new String[]{getSetterName(name)};
}
public static boolean isSetterName(String name) {
return name != null
&& name.startsWith(SET_PREFIX)
&& name.length() > 3
&& (isUpperCase(name.charAt(3)) || (name.length() > 4 && isUpperCase(name.charAt(3))));
}
public static boolean isProperty(@Nullable PsiClass aClass, @Nullable String propertyName, boolean isStatic) {
if (aClass == null || propertyName == null) return false;
final PsiField field = aClass.findFieldByName(propertyName, true);
if (field instanceof GrField && ((GrField)field).isProperty() && field.hasModifierProperty(PsiModifier.STATIC) == isStatic) return true;
final PsiMethod getter = findPropertyGetter(aClass, propertyName, isStatic, true);
if (getter != null && getter.hasModifierProperty(PsiModifier.PUBLIC)) return true;
final PsiMethod setter = findPropertySetter(aClass, propertyName, isStatic, true);
return setter != null && setter.hasModifierProperty(PsiModifier.PUBLIC);
}
public static boolean isProperty(GrField field) {
final PsiClass clazz = field.getContainingClass();
return isProperty(clazz, field.getName(), field.hasModifierProperty(PsiModifier.STATIC));
}
private static boolean isUpperCase(char c) {
return Character.toUpperCase(c) == c;
}
/*public static boolean canBePropertyName(String name) {
return !(name.length() > 1 && Character.isUpperCase(name.charAt(1)) && Character.isLowerCase(name.charAt(0)));
}*/
public static String capitalize(String s) {
if (s.length() == 0) return s;
if (s.length() == 1) return s.toUpperCase();
if (Character.isUpperCase(s.charAt(1))) return s;
final char[] chars = s.toCharArray();
chars[0] = Character.toUpperCase(chars[0]);
return new String(chars);
}
public static String decapitalize(String s) {
return Introspector.decapitalize(s);
}
@Nullable
public static PsiField findFieldForAccessor(PsiMethod accessor, boolean checkSuperClasses) {
final PsiClass psiClass = accessor.getContainingClass();
if (psiClass == null) return null;
PsiField field = null;
if (!checkSuperClasses) {
field = psiClass.findFieldByName(getPropertyNameByAccessorName(accessor.getName()), true);
}
else {
final String name = getPropertyNameByAccessorName(accessor.getName());
assert name != null;
final PsiField[] allFields = psiClass.getAllFields();
for (PsiField psiField : allFields) {
if (name.equals(psiField.getName())) {
field = psiField;
break;
}
}
}
if (field == null) return null;
if (field.hasModifierProperty(PsiModifier.STATIC) == accessor.hasModifierProperty(PsiModifier.STATIC)) {
return field;
}
return null;
}
@Nullable
public static String getGetterPrefix(PsiMethod getter) {
final String name = getter.getName();
if (name.startsWith(GET_PREFIX)) return GET_PREFIX;
if (name.startsWith(IS_PREFIX)) return IS_PREFIX;
return null;
}
@Nullable
public static String getSetterPrefix(PsiMethod setter) {
if (setter.getName().startsWith(SET_PREFIX)) return SET_PREFIX;
return null;
}
@Nullable
public static String getAccessorPrefix(PsiMethod method) {
final String prefix = getGetterPrefix(method);
if (prefix != null) return prefix;
return getSetterPrefix(method);
}
public static boolean isAccessorFor(PsiMethod accessor, PsiField field) {
final String accessorName = accessor.getName();
final String fieldName = field.getName();
if (!ArrayUtil.contains(accessorName, suggestGettersName(fieldName)) &&
!ArrayUtil.contains(accessorName, suggestSettersName(fieldName))) {
return false;
}
final PsiClass accessorClass = accessor.getContainingClass();
final PsiClass fieldClass = field.getContainingClass();
if (!field.getManager().areElementsEquivalent(accessorClass, fieldClass)) return false;
return accessor.hasModifierProperty(PsiModifier.STATIC) == field.hasModifierProperty(PsiModifier.STATIC);
}
public static List<GrAccessorMethod> getFieldAccessors(GrField field) {
List<GrAccessorMethod> accessors = new ArrayList<GrAccessorMethod>();
final GrAccessorMethod[] getters = field.getGetters();
Collections.addAll(accessors, getters);
final GrAccessorMethod setter = field.getSetter();
if (setter != null) accessors.add(setter);
return accessors;
}
public static GrMethod generateGetterPrototype(PsiField field) {
GroovyPsiElementFactory factory = GroovyPsiElementFactory.getInstance(field.getProject());
String name = field.getName();
String getName = getGetterNameNonBoolean(field.getName());
try {
PsiType type = field instanceof GrField ? ((GrField)field).getDeclaredType() : field.getType();
GrMethod getter = factory.createMethod(getName, type);
if (field.hasModifierProperty(PsiModifier.STATIC)) {
PsiUtil.setModifierProperty(getter, PsiModifier.STATIC, true);
}
annotateWithNullableStuff(field, getter);
GrCodeBlock body = factory.createMethodBodyFromText("\nreturn " + name + "\n");
getter.getBlock().replace(body);
return getter;
}
catch (IncorrectOperationException e) {
LOG.error(e);
return null;
}
}
public static GrMethod generateSetterPrototype(PsiField field) {
Project project = field.getProject();
JavaCodeStyleManager codeStyleManager = JavaCodeStyleManager.getInstance(project);
GroovyPsiElementFactory factory = GroovyPsiElementFactory.getInstance(project);
String name = field.getName();
boolean isStatic = field.hasModifierProperty(PsiModifier.STATIC);
VariableKind kind = codeStyleManager.getVariableKind(field);
String propertyName = codeStyleManager.variableNameToPropertyName(name, kind);
String setName = getSetterName(field.getName());
final PsiClass containingClass = field.getContainingClass();
try {
GrMethod setMethod = factory.createMethod(setName, PsiType.VOID);
String parameterName = codeStyleManager.propertyNameToVariableName(propertyName, VariableKind.PARAMETER);
final PsiType type = field instanceof GrField ? ((GrField)field).getDeclaredType() : field.getType();
GrParameter param = factory.createParameter(parameterName, type);
annotateWithNullableStuff(field, param);
setMethod.getParameterList().add(param);
PsiUtil.setModifierProperty(setMethod, PsiModifier.STATIC, isStatic);
@NonNls StringBuilder builder = new StringBuilder();
if (name.equals(parameterName)) {
if (!isStatic) {
builder.append("this.");
}
else {
String className = containingClass.getName();
if (className != null) {
builder.append(className);
builder.append(".");
}
}
}
builder.append(name);
builder.append("=");
builder.append(parameterName);
builder.append("\n");
GrCodeBlock body = factory.createMethodBodyFromText(builder.toString());
setMethod.getBlock().replace(body);
return setMethod;
}
catch (IncorrectOperationException e) {
LOG.error(e);
return null;
}
}
private static void annotateWithNullableStuff(final PsiModifierListOwner field, final PsiModifierListOwner listOwner)
throws IncorrectOperationException {
final NullableNotNullManager manager = NullableNotNullManager.getInstance(field.getProject());
final String notNull = manager.getNotNull(field);
if (notNull != null) {
annotate(listOwner, notNull);
}
else {
final String nullable = manager.getNullable(field);
if (nullable != null) {
annotate(listOwner, nullable);
}
}
final PsiModifierList modifierList = listOwner.getModifierList();
if (modifierList.hasExplicitModifier(GrModifier.DEF)) {
LOG.assertTrue(modifierList instanceof GrModifierList);
if (modifierList.getAnnotations().length > 0 || ((GrModifierList)modifierList).getModifiers().length > 1) {
((GrModifierList)modifierList).setModifierProperty(GrModifier.DEF, false);
}
}
}
private static void annotate(final PsiModifierListOwner listOwner, final String annotationQName)
throws IncorrectOperationException {
final PsiModifierList modifierList = listOwner.getModifierList();
LOG.assertTrue(modifierList != null);
modifierList.addAnnotation(annotationQName);
}
}
| |
/*
* To change this template, choose Tools | Templates
* and open the template in the editor.
*/
package com.jme3.gde.ogretools.convert;
import java.io.BufferedInputStream;
import java.io.BufferedOutputStream;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.util.ArrayList;
import java.util.zip.ZipEntry;
import java.util.zip.ZipInputStream;
import org.netbeans.api.progress.ProgressHandle;
import org.openide.DialogDisplayer;
import org.openide.NotifyDescriptor;
import org.openide.NotifyDescriptor.Confirmation;
import org.openide.util.Exceptions;
import org.openide.util.Utilities;
/**
*
* @author normenhansen
*/
public class OgreXMLConvert {
static final int BUFFER = 2048;
public static String osx_path = System.getProperty("netbeans.user") + "/ogretools/";
public static String windows_path = System.getProperty("netbeans.user") + "\\ogretools\\";
public static String linux_path = System.getProperty("netbeans.user") + "/.ogretools/";
public boolean doConvert(OgreXMLConvertOptions options, ProgressHandle handle) {
if (!checkTools()) {
return false;
}
String[] cmdOptions = getCommandString(options);
Process proc = null;
if (!options.isBinaryFile()) {
handle.progress("Optimizing Mesh / Creating LOD meshes");
//convert to binary + modify
try {
proc = Runtime.getRuntime().exec(cmdOptions);
OutputReader outReader = new OutputReader(proc.getInputStream());
outReader.setProgress(handle);
OutputReader errReader = new OutputReader(proc.getErrorStream());
errReader.setProgress(handle);
outReader.start();
errReader.start();
try {
proc.waitFor();
} catch (InterruptedException ex) {
Exceptions.printStackTrace(ex);
}
if (proc.exitValue() != 0) {
return false;
}
} catch (IOException ex) {
Exceptions.printStackTrace(ex);
cleanUp(options);
return false;
}
}
handle.progress("Converting Binary Mesh");
//convert back to xml
cmdOptions = getBackCommandString(options);
try {
proc = Runtime.getRuntime().exec(cmdOptions);
OutputReader outReader = new OutputReader(proc.getInputStream());
outReader.setProgress(handle);
OutputReader errReader = new OutputReader(proc.getErrorStream());
errReader.setProgress(handle);
outReader.start();
errReader.start();
try {
proc.waitFor();
} catch (InterruptedException ex) {
Exceptions.printStackTrace(ex);
}
if (proc.exitValue() != 0) {
return false;
}
} catch (IOException ex) {
Exceptions.printStackTrace(ex);
cleanUp(options);
return false;
}
cleanUp(options);
return true;
}
private void cleanUp(OgreXMLConvertOptions options) {
if (!options.isBinaryFile()) {
File file = new File(options.getBinaryFileName());
if (file.exists()) {
file.delete();
}
}
}
private String[] getBackCommandString(OgreXMLConvertOptions options) {
ArrayList<String> strings = new ArrayList<String>();
if (Utilities.isWindows()) {
strings.add(windows_path + "OgreXMLConverter.exe");
strings.add("-log");
strings.add(windows_path + "OgreXMLConverter.log");
} else if (Utilities.isMac()) {
strings.add(osx_path + "bin/OgreXMLConverter");
strings.add("-log");
strings.add(osx_path + "OgreXMLConverter.log");
} else {
strings.add(linux_path + "OgreXMLConverter");
strings.add("-log");
strings.add(linux_path + "OgreXMLConverter.log");
}
strings.add(options.getBinaryFileName());
strings.add(options.getDestFile());
return strings.toArray(new String[strings.size()]);
}
private String[] getCommandString(OgreXMLConvertOptions options) {
ArrayList<String> strings = new ArrayList<String>();
if (Utilities.isWindows()) {
strings.add(windows_path + "OgreXMLConverter.exe");
strings.add("-log");
strings.add(windows_path + "OgreXMLConverter.log");
} else if (Utilities.isMac()) {
strings.add(osx_path + "bin/OgreXMLConverter");
strings.add("-log");
strings.add(osx_path + "OgreXMLConverter.log");
} else {
strings.add(linux_path + "OgreXMLConverter");
strings.add("-log");
strings.add(linux_path + "OgreXMLConverter.log");
}
strings.add("-gl");
if (options.isGenerateTangents()) {
strings.add("-t");
}
if (!options.isGenerateEdgeLists()) {
strings.add("-e");
}
if (options.getLodLevels() > 0) {
strings.add("-l");
strings.add(options.getLodLevels() + "");
// strings.add("-v");
// strings.add(options.getLodValue() + "");
strings.add("-p");
strings.add(options.getLodPercent() + "");
// strings.add("-s");
// strings.add(options.getLodStrategy());
}
strings.add(options.getSourceFile());
strings.add(options.getBinaryFileName());
return strings.toArray(new String[strings.size()]);
}
private boolean checkTools() {
if (Utilities.isWindows()) {
File file = new File(windows_path + "OgreXMLConverter.exe");
if (!file.exists()) {
return extractToolsWindows();
}
} else if (Utilities.isMac()) {
File file = new File(osx_path + "bin/OgreXMLConverter");
if (!file.exists()) {
return extractToolsOSX();
}
} else {
File file = new File(linux_path + "OgreXMLConverter");
if (!file.exists()) {
return extractToolsLinux();
}
}
return true;
}
private boolean extractToolsOSX() {
File path = new File(osx_path);
if (!path.exists()) {
path.mkdirs();
}
try {
BufferedInputStream in = new BufferedInputStream(getClass().getResourceAsStream("/com/jme3/gde/ogretools/convert/OgreTools-Mac-Intel.zip"));
BufferedOutputStream out = new BufferedOutputStream(new FileOutputStream(osx_path + "tools.zip"));
int inbyte = in.read();
while (inbyte != -1) {
out.write(inbyte);
inbyte = in.read();
}
in.close();
out.close();
String[] cmdStrings = new String[]{
"unzip",
"-o",
"-q",
osx_path + "tools.zip",
"-d",
osx_path
};
Process p = Runtime.getRuntime().exec(cmdStrings);
OutputReader outReader = new OutputReader(p.getInputStream());
OutputReader errReader = new OutputReader(p.getErrorStream());
outReader.start();
errReader.start();
p.waitFor();
File zipFile = new File(osx_path + "tools.zip");
zipFile.delete();
if (p.exitValue() != 0) {
return false;
}
} catch (Exception e) {
e.printStackTrace();
Confirmation msg = new NotifyDescriptor.Confirmation(
"Error extracting OgreTools!",
NotifyDescriptor.DEFAULT_OPTION,
NotifyDescriptor.ERROR_MESSAGE);
DialogDisplayer.getDefault().notify(msg);
return false;
}
return true;
}
private boolean extractToolsWindows() {
File path = new File(windows_path);
if (!path.exists()) {
path.mkdirs();
}
try {
File scriptsFolderFile = new File(windows_path);
if (!scriptsFolderFile.exists()) {
Confirmation msg = new NotifyDescriptor.Confirmation(
"Error extracting OgreTools!",
NotifyDescriptor.DEFAULT_OPTION,
NotifyDescriptor.ERROR_MESSAGE);
DialogDisplayer.getDefault().notify(msg);
return false;
}
BufferedOutputStream dest = null;
ZipInputStream zis = new ZipInputStream(new BufferedInputStream(getClass().getResourceAsStream("/com/jme3/gde/ogretools/convert/OgreTools-Windows.zip")));
ZipEntry entry;
while ((entry = zis.getNextEntry()) != null) {
int count;
byte data[] = new byte[BUFFER];
if (entry.getName().contains(".svn") || entry.getName().contains(".DS_Store")) {
continue;
}
if (entry.isDirectory()) {
File dir = new File(windows_path + File.separator + entry.getName());
dir.mkdirs();
continue;
}
// write the files to the disk
FileOutputStream fos = new FileOutputStream(windows_path + File.separator + entry.getName());
dest = new BufferedOutputStream(fos, BUFFER);
while ((count = zis.read(data, 0, BUFFER))
!= -1) {
dest.write(data, 0, count);
}
dest.flush();
dest.close();
}
zis.close();
} catch (IOException ex) {
Confirmation msg = new NotifyDescriptor.Confirmation(
"Error extracting OgreXMLTools:\n" + ex.toString(),
NotifyDescriptor.DEFAULT_OPTION,
NotifyDescriptor.ERROR_MESSAGE);
DialogDisplayer.getDefault().notify(msg);
Exceptions.printStackTrace(ex);
return false;
}
return true;
}
private boolean extractToolsLinux() {
File path = new File(linux_path);
if (!path.exists()) {
path.mkdirs();
}
try {
File scriptsFolderFile = new File(linux_path);
if (!scriptsFolderFile.exists()) {
Confirmation msg = new NotifyDescriptor.Confirmation(
"Error extracting OgreTools!",
NotifyDescriptor.DEFAULT_OPTION,
NotifyDescriptor.ERROR_MESSAGE);
DialogDisplayer.getDefault().notify(msg);
return false;
}
BufferedOutputStream dest = null;
ZipInputStream zis = new ZipInputStream(new BufferedInputStream(getClass().getResourceAsStream("/com/jme3/gde/ogretools/convert/OgreTools-Linux.zip")));
ZipEntry entry;
while ((entry = zis.getNextEntry()) != null) {
int count;
byte data[] = new byte[BUFFER];
if (entry.getName().contains(".svn") || entry.getName().contains(".DS_Store")) {
continue;
}
if (entry.isDirectory()) {
File dir = new File(linux_path + File.separator + entry.getName());
dir.mkdirs();
continue;
}
// write the files to the disk
FileOutputStream fos = new FileOutputStream(linux_path + File.separator + entry.getName());
dest = new BufferedOutputStream(fos, BUFFER);
while ((count = zis.read(data, 0, BUFFER))
!= -1) {
dest.write(data, 0, count);
}
dest.flush();
dest.close();
}
zis.close();
} catch (IOException ex) {
Confirmation msg = new NotifyDescriptor.Confirmation(
"Error extracting OgreXMLTools:\n" + ex.toString(),
NotifyDescriptor.DEFAULT_OPTION,
NotifyDescriptor.ERROR_MESSAGE);
DialogDisplayer.getDefault().notify(msg);
Exceptions.printStackTrace(ex);
return false;
}
return true;
}
}
| |
/*
* Copyright 2012-2016 bambooCORE, greenstep of copyright Chen Xin Nien
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* -----------------------------------------------------------------------
*
* author: Chen Xin Nien
* contact: chen.xin.nien@gmail.com
*
*/
package com.netsteadfast.greenstep.bsc.action;
import java.text.ParseException;
import java.util.Date;
import java.util.List;
import java.util.Map;
import javax.annotation.Resource;
import org.apache.commons.lang.time.DateFormatUtils;
import org.apache.commons.lang3.time.DateUtils;
import org.apache.log4j.Logger;
import org.apache.struts2.json.annotations.JSON;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.annotation.Scope;
import org.springframework.stereotype.Controller;
import com.netsteadfast.greenstep.BscConstants;
import com.netsteadfast.greenstep.base.SysMessageUtil;
import com.netsteadfast.greenstep.base.action.BaseJsonAction;
import com.netsteadfast.greenstep.base.exception.AuthorityException;
import com.netsteadfast.greenstep.base.exception.ControllerException;
import com.netsteadfast.greenstep.base.exception.ServiceException;
import com.netsteadfast.greenstep.base.model.ControllerAuthority;
import com.netsteadfast.greenstep.base.model.ControllerMethodAuthority;
import com.netsteadfast.greenstep.base.model.DefaultResult;
import com.netsteadfast.greenstep.base.model.GreenStepSysMsgConstants;
import com.netsteadfast.greenstep.bsc.action.utils.SelectItemFieldCheckUtils;
import com.netsteadfast.greenstep.bsc.model.BscMeasureDataFrequency;
import com.netsteadfast.greenstep.bsc.service.IEmployeeService;
import com.netsteadfast.greenstep.bsc.service.IOrganizationService;
import com.netsteadfast.greenstep.bsc.util.HistoryItemScoreReportContentQueryUtils;
import com.netsteadfast.greenstep.po.hbm.BbEmployee;
import com.netsteadfast.greenstep.po.hbm.BbOrganization;
import com.netsteadfast.greenstep.util.SimpleUtils;
import com.netsteadfast.greenstep.vo.EmployeeVO;
import com.netsteadfast.greenstep.vo.OrganizationVO;
@ControllerAuthority(check=true)
@Controller("bsc.web.controller.HistoryItemScoreReportContentQueryAction")
@Scope
public class HistoryItemScoreReportContentQueryAction extends BaseJsonAction {
private static final long serialVersionUID = 3711570676385444183L;
protected Logger logger=Logger.getLogger(HistoryItemScoreReportContentQueryAction.class);
private IOrganizationService<OrganizationVO, BbOrganization, String> organizationService;
private IEmployeeService<EmployeeVO, BbEmployee, String> employeeService;
private String message = "";
private String success = IS_NO;
private List<Map<String, Object>> chartData;
private List<String> chartCategories;
private String subtitle = "";
private String newDateVal = "";
private String startDate = "";
private String endDate = "";
private List<String> xAxisCategories;
private List<String> yAxisCategories;
private List<List<Object>> heatMapChartData;
public HistoryItemScoreReportContentQueryAction() {
super();
}
@JSON(serialize=false)
public IOrganizationService<OrganizationVO, BbOrganization, String> getOrganizationService() {
return organizationService;
}
@Autowired
@Resource(name="bsc.service.OrganizationService")
public void setOrganizationService(
IOrganizationService<OrganizationVO, BbOrganization, String> organizationService) {
this.organizationService = organizationService;
}
@JSON(serialize=false)
public IEmployeeService<EmployeeVO, BbEmployee, String> getEmployeeService() {
return employeeService;
}
@Autowired
@Resource(name="bsc.service.EmployeeService")
public void setEmployeeService(IEmployeeService<EmployeeVO, BbEmployee, String> employeeService) {
this.employeeService = employeeService;
}
private void checkFields() throws ControllerException, Exception {
this.getCheckFieldHandler()
.add("itemType", SelectItemFieldCheckUtils.class, "Please select item type!" )
.add("frequency", SelectItemFieldCheckUtils.class, "Please select frequency!" )
.process().throwMessage();
this.getCheckFieldHandler()
.single("itemType", ( !SimpleUtils.isDate( this.getFields().get("dateVal") ) ), "Error, please refresh this page!")
.throwMessage();
String dataFor = this.getFields().get("dataFor");
if ("organization".equals(dataFor)
&& this.isNoSelectId(this.getFields().get("measureDataOrganizationOid")) ) {
super.throwMessage("measureDataOrganizationOid", "Please select organization!");
}
if ("employee".equals(dataFor)
&& this.isNoSelectId(this.getFields().get("measureDataEmployeeOid")) ) {
super.throwMessage("measureDataEmployeeOid", "Please select employee!");
}
}
@SuppressWarnings("unchecked")
private void getContent() throws ControllerException, AuthorityException, ServiceException, Exception {
this.checkFields();
this.newDateVal = this.getChangeDateVal();
String frequency = this.getFields().get("frequency");
String measureDataTypeName = BscConstants.MEASURE_DATA_FOR_ALL;
String empId = BscConstants.MEASURE_DATA_EMPLOYEE_FULL;
String orgId = BscConstants.MEASURE_DATA_ORGANIZATION_FULL;
String measureDataOrganizationOid = this.getFields().get("measureDataOrganizationOid");
String measureDataEmployeeOid = this.getFields().get("measureDataEmployeeOid");
String dataFor = this.getFields().get("dataFor");
if ("organization".equals(dataFor) && !super.isNoSelectId(measureDataOrganizationOid)) {
OrganizationVO organization = new OrganizationVO();
organization.setOid(measureDataOrganizationOid);
DefaultResult<OrganizationVO> result = this.organizationService.findObjectByOid(organization);
if ( result.getValue() == null ) {
throw new ServiceException( result.getSystemMessage().getValue() );
}
organization = result.getValue();
orgId = organization.getOrgId();
measureDataTypeName = organization.getOrgId() + " - " + organization.getName();
}
if ("employee".equals(dataFor) && !super.isNoSelectId(measureDataEmployeeOid)) {
EmployeeVO employee = new EmployeeVO();
employee.setOid(measureDataEmployeeOid);
DefaultResult<EmployeeVO> result = this.employeeService.findObjectByOid(employee);
if ( result.getValue() == null ) {
throw new ServiceException( result.getSystemMessage().getValue() );
}
employee = result.getValue();
empId = employee.getEmpId();
measureDataTypeName = employee.getEmpId() + " - " + employee.getFullName();
}
if ("line".equals(this.getFields().get("chartType"))) { // Line chart
this.chartData = HistoryItemScoreReportContentQueryUtils.getLineChartData(
this.getFields().get("itemType"), frequency, this.newDateVal, orgId, empId);
if (this.chartData == null || this.chartData.size() < 1) {
super.throwMessage("itemType", SysMessageUtil.get(GreenStepSysMsgConstants.SEARCH_NO_DATA));
}
this.chartCategories = HistoryItemScoreReportContentQueryUtils
.getLineChartCategoriesFromData(this.newDateVal, this.chartData);
this.subtitle = "Frequency: " + BscMeasureDataFrequency.getFrequencyMap(false).get(frequency) + ", "
+ "Date range: " + this.chartCategories.get(0) + " ~ " + this.getChartCategories().get( this.getChartCategories().size()-1 ) + ", "
+ "Measure-data for: " + measureDataTypeName;
this.startDate = this.chartCategories.get(0);
this.endDate = this.getChartCategories().get( this.getChartCategories().size()-1 );
this.success = IS_YES;
} else { // Heat map chart
Map<String, Object> chartDataMap = HistoryItemScoreReportContentQueryUtils.getHartMapChartData(
this.getFields().get("itemType"), frequency, this.newDateVal, orgId, empId);
if (chartDataMap == null || chartDataMap.size() != 3) {
super.throwMessage("itemType", SysMessageUtil.get(GreenStepSysMsgConstants.SEARCH_NO_DATA));
}
this.heatMapChartData = (List<List<Object>>) chartDataMap.get("seriesData");
this.xAxisCategories = (List<String>) chartDataMap.get("xAxisCategories");
this.yAxisCategories = (List<String>) chartDataMap.get("yAxisCategories");
this.subtitle = "Frequency: " + BscMeasureDataFrequency.getFrequencyMap(false).get(frequency) + ", "
+ "Date range: " + this.yAxisCategories.get(0) + " ~ " + this.yAxisCategories.get( this.yAxisCategories.size()-1 ) + ", "
+ "Measure-data for: " + measureDataTypeName;
this.startDate = this.yAxisCategories.get(0);
this.endDate = this.yAxisCategories.get( this.yAxisCategories.size()-1 );
this.success = IS_YES;
}
}
private String getChangeDateVal() throws ParseException {
String dateVal = this.getFields().get("dateVal");
String dateChangeStatus = this.getFields().get("dateChangeStatus");
String sysNowDate = super.getNowDate().replaceAll("/", "");
Date dateValObj = DateUtils.parseDate(dateVal, new String[]{"yyyyMMdd"});
if ("next".equals(dateChangeStatus)) {
if (Integer.parseInt(dateVal) >= Integer.parseInt(sysNowDate)) {
dateVal = sysNowDate;
} else {
Date nexDate = DateUtils.addDays(dateValObj, 1);
dateVal = DateFormatUtils.format(nexDate, "yyyyMMdd");
}
}
if ("prev".equals(dateChangeStatus)) {
Date nexDate = DateUtils.addDays(dateValObj, -1);
dateVal = DateFormatUtils.format(nexDate, "yyyyMMdd");
}
return dateVal;
}
/**
* bsc.historyItemScoreReportContentQueryAction.action
*
* @return
* @throws Exception
*/
@ControllerMethodAuthority(programId="BSC_PROG003D0008Q")
public String execute() throws Exception {
try {
if (!this.allowJob()) {
this.message = this.getNoAllowMessage();
return SUCCESS;
}
this.getContent();
} catch (AuthorityException | ControllerException | ServiceException e) {
this.message = e.getMessage().toString();
} catch (Exception e) {
this.message = this.logException(e);
this.success = IS_EXCEPTION;
}
return SUCCESS;
}
@JSON
@Override
public String getLogin() {
return super.isAccountLogin();
}
@JSON
@Override
public String getIsAuthorize() {
return super.isActionAuthorize();
}
@JSON
@Override
public String getMessage() {
return this.message;
}
@JSON
@Override
public String getSuccess() {
return this.success;
}
@JSON
@Override
public List<String> getFieldsId() {
return this.fieldsId;
}
@JSON
@Override
public Map<String, String> getFieldsMessage() {
return this.fieldsMessage;
}
@JSON
public List<Map<String, Object>> getChartData() {
return chartData;
}
@JSON
public List<String> getChartCategories() {
return chartCategories;
}
@JSON
public String getSubtitle() {
return subtitle;
}
@JSON
public String getNewDateVal() {
return newDateVal;
}
@JSON
public String getStartDate() {
return startDate;
}
@JSON
public String getEndDate() {
return endDate;
}
@JSON
public List<String> getxAxisCategories() {
return xAxisCategories;
}
@JSON
public List<String> getyAxisCategories() {
return yAxisCategories;
}
@JSON
public List<List<Object>> getHeatMapChartData() {
return heatMapChartData;
}
}
| |
package com.skiwi.ogameplanner;
import java.util.ArrayList;
import java.util.EnumMap;
import java.util.List;
import java.util.Map;
import static com.skiwi.ogameplanner.Building.*;
import static com.skiwi.ogameplanner.Resource.*;
/**
* @author Frank van Heeswijk
*/
public class PlayerSnapshot {
private final ServerSettings serverSettings;
private final int averagePlanetTemperature;
private final List<Action> performedActions = new ArrayList<>();
private long time = 0;
private final EnumMap<Resource, Double> resources = new EnumMap<>(Resource.class);
private final EnumMap<Building, Integer> buildings = new EnumMap<>(Building.class);
private final EnumMap<Research, Integer> researches = new EnumMap<>(Research.class);
private final EnumMap<Ship, Integer> ships = new EnumMap<>(Ship.class);
private Building buildingInProgress = null;
public PlayerSnapshot(ServerSettings serverSettings, int averagePlanetTemperature) {
this.serverSettings = serverSettings;
this.averagePlanetTemperature = averagePlanetTemperature;
for (Resource resource : Resource.values()) {
resources.put(resource, 0d);
}
for (Building building : Building.values()) {
buildings.put(building, 0);
}
for (Research research : Research.values()) {
researches.put(research, 0);
}
for (Ship ship : Ship.values()) {
ships.put(ship, 0);
}
}
public ServerSettings getServerSettings() {
return serverSettings;
}
public int getAveragePlanetTemperature() {
return averagePlanetTemperature;
}
public List<Action> getPerformedActions() {
return performedActions;
}
public long getTime() {
return time;
}
public double getResourceAmount(Resource resource) {
return resources.getOrDefault(resource, 0d);
}
public int getBuildingLevel(Building building) {
return buildings.getOrDefault(building, 0);
}
public int getResearchLevel(Research research) {
return researches.getOrDefault(research, 0);
}
public int getShipAmount(Ship ship) {
return ships.getOrDefault(ship, 0);
}
public void initializeResources(Map<Resource, Double> resources) {
this.resources.putAll(resources);
}
public void initializeBuildings(Map<Building, Integer> buildings) {
this.buildings.putAll(buildings);
}
public void initializeResearches(Map<Research, Integer> researches) {
this.researches.putAll(researches);
}
public void initializeShips(Map<Ship, Integer> ships) {
this.ships.putAll(ships);
}
public List<Action> generateActions() {
List<Action> actions = new ArrayList<>();
addBuildingActions(actions);
//TODO add actions for other things too
return actions;
}
private void addBuildingActions(List<Action> actions) {
if (buildingInProgress == null) {
buildings.forEach((building, level) -> {
if (building.satisfiesRequirements(this)) {
ActionCost upgradeCost = building.getUpgradeCost(this);
if (satisfiesResourcesCost(upgradeCost)) {
actions.add(new StartUpgradeBuildingAction(building));
}
else {
actions.add(new WaitForBuildingAction(building));
}
}
});
}
else {
//TODO generate all possible actions for that building (including DM usage)
Action finishBuildingAction = new FinishUpgradeBuildingAction(buildingInProgress);
if (finishBuildingAction.isAllowed(this)) {
actions.add(finishBuildingAction);
}
}
}
public PlayerSnapshot copyForNewAction(Action performedAction) {
PlayerSnapshot playerSnapshot = new PlayerSnapshot(serverSettings, averagePlanetTemperature);
playerSnapshot.performedActions.addAll(performedActions);
playerSnapshot.performedActions.add(performedAction);
playerSnapshot.time = time; //TODO maybe related ActionCost to Action and add it at this point?
playerSnapshot.resources.putAll(resources);
playerSnapshot.buildings.putAll(buildings);
playerSnapshot.researches.putAll(researches);
playerSnapshot.ships.putAll(ships);
return playerSnapshot;
}
public boolean satisfiesResourcesCost(ActionCost actionCost) {
if ((int)Math.floor(getResourceAmount(METAL)) < actionCost.getMetal()) {
return false;
}
if ((int)Math.floor(getResourceAmount(CRYSTAL)) < actionCost.getCrystal()) {
return false;
}
if ((int)Math.floor(getResourceAmount(DEUTERIUM)) < actionCost.getDeuterium()) {
return false;
}
if ((int)Math.floor(getResourceAmount(DARK_MATTER)) < actionCost.getDarkMatter()) {
return false;
}
if (actionCost.getMetal() > METAL_STORAGE.getStorageCapacity(this)) {
return false;
}
if (actionCost.getCrystal() > CRYSTAL_STORAGE.getStorageCapacity(this)) {
return false;
}
if (actionCost.getDeuterium() > DEUTERIUM_TANK.getStorageCapacity(this)) {
return false;
}
return true;
}
private void addCost(ActionCost actionCost) {
addTimeCost(actionCost);
addResourcesCost(actionCost);
}
private void addTimeCost(ActionCost actionCost) {
time += actionCost.getTime();
double metalProduction = METAL_MINE.getHourlyResourceProduction(this) / 3600d;
double crystalProduction = CRYSTAL_MINE.getHourlyResourceProduction(this) / 3600d;
double deuteriumProduction = DEUTERIUM_SYNTHESIZER.getHourlyResourceProduction(this) / 3600d;
//TODO create better system to add resources
resources.merge(METAL, metalProduction * actionCost.getTime(), (amount, production) -> Math.min(amount + production, METAL_STORAGE.getStorageCapacity(this)));
resources.merge(CRYSTAL, crystalProduction * actionCost.getTime(), (amount, production) -> Math.min(amount + production, CRYSTAL_STORAGE.getStorageCapacity(this)));
resources.merge(DEUTERIUM, deuteriumProduction * actionCost.getTime(), (amount, production) -> Math.min(amount + production, DEUTERIUM_TANK.getStorageCapacity(this)));
}
private void addResourcesCost(ActionCost actionCost) {
resources.merge(METAL, actionCost.getMetal() * 1d, (amount, cost) -> amount - cost);
resources.merge(CRYSTAL, actionCost.getCrystal() * 1d, (amount, cost) -> amount - cost);
resources.merge(DEUTERIUM, actionCost.getDeuterium() * 1d, (amount, cost) -> amount - cost);
resources.merge(DARK_MATTER, actionCost.getDarkMatter() * 1d, (amount, cost) -> amount - cost);
}
public void wait(ActionCost actionCost) {
addTimeCost(actionCost);
}
public void startUpgradeBuilding(Building building) {
addResourcesCost(building.getUpgradeCost(this));
buildingInProgress = building;
}
public void finishUpgradeBuilding(Building building) {
addTimeCost(building.getUpgradeCost(this));
buildings.merge(building, 1, (currentLevel, newLevels) -> currentLevel + newLevels);
buildingInProgress = null;
}
public boolean isCurrentlyUpgradingBuilding(Building building) {
return (buildingInProgress == building);
}
//temp method
public Building buildingThatBlocksStartingOrWaitingForBuilding(Building building) {
//TODO does not handle requirements yet (and never will)
double metalHourlyProduction = METAL_MINE.getHourlyResourceProduction(this);
double crystalHourlyProduction = CRYSTAL_MINE.getHourlyResourceProduction(this);
double deuteriumHourlyProduction = DEUTERIUM_SYNTHESIZER.getHourlyResourceProduction(this);
ActionCost upgradeCost = building.getUpgradeCost(this);
double metalWaitHours = (getResourceAmount(METAL) + upgradeCost.getMetal()) / metalHourlyProduction;
double crystalWaitHours = (getResourceAmount(CRYSTAL) + upgradeCost.getCrystal()) / crystalHourlyProduction;
double deuteriumWaitHours = (getResourceAmount(DEUTERIUM) + upgradeCost.getDeuterium()) / deuteriumHourlyProduction;
if (Double.isInfinite(metalWaitHours) || Double.isInfinite(crystalWaitHours)) {
throw new IllegalStateException("this should not happen");
}
if (Double.isInfinite(deuteriumWaitHours)) {
if (getBuildingLevel(DEUTERIUM_SYNTHESIZER) == 0) {
return DEUTERIUM_SYNTHESIZER;
}
else {
return SOLAR_PLANT;
}
}
if (upgradeCost.getMetal() > METAL_STORAGE.getStorageCapacity(this)) {
return METAL_STORAGE;
}
if (upgradeCost.getCrystal() > CRYSTAL_STORAGE.getStorageCapacity(this)) {
return CRYSTAL_STORAGE;
}
if (upgradeCost.getDeuterium() > DEUTERIUM_TANK.getStorageCapacity(this)) {
return DEUTERIUM_TANK;
}
throw new IllegalStateException("this should not be called if starting or waiting for the building was allowed in the first place");
}
}
| |
/*
* Copyright 2016 SimplifyOps, Inc. (http://simplifyops.com)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.dtolabs.rundeck.core.authorization.providers;
import com.dtolabs.rundeck.core.authorization.Attribute;
import com.dtolabs.rundeck.core.authorization.Validation;
import com.dtolabs.rundeck.core.authorization.ValidationSet;
import com.dtolabs.rundeck.core.authorization.providers.yaml.model.ACLPolicyDoc;
import org.yaml.snakeyaml.Yaml;
import java.io.*;
import java.lang.reflect.Constructor;
import java.lang.reflect.InvocationTargetException;
import java.util.*;
/**
* Created by greg on 7/17/15.
*/
public class YamlProvider {
private static PolicyCollectionFactory factory;
public static final Class<?> DEFAULT_FACTORY = YamlPolicyFactoryV2.class;
public static final String FACTORY_CLASS_PROPERTY = YamlProvider.class.getName() + ".factoryClass";
static {
String prop = System.getProperty(FACTORY_CLASS_PROPERTY);
Class<?> factoryClass = DEFAULT_FACTORY;
if (null != prop) {
try {
factoryClass = Class.forName(prop);
if (!PolicyCollectionFactory.class.isAssignableFrom(factoryClass)) {
throw new RuntimeException("Cannot use class " + prop + " as PolicyCollectionFactory");
}
} catch (ClassNotFoundException e) {
throw new RuntimeException(e);
}
}
try {
Constructor<?> declaredConstructor = factoryClass.getDeclaredConstructor();
Object o = declaredConstructor.newInstance();
factory = (PolicyCollectionFactory) o;
} catch (NoSuchMethodException | InstantiationException | InvocationTargetException | IllegalAccessException
e) {
throw new RuntimeException(e);
}
}
public static final FilenameFilter filenameFilter = new FilenameFilter() {
@Override
public boolean accept(final File dir, final String name) {
return name.endsWith(".aclpolicy");
}
};
public static Validation validate(final CacheableYamlSource source, final ValidationSet validation) {
return validate(source, null, validation);
}
public static Validation validate(
final CacheableYamlSource source,
final Set<Attribute> forcedContext, final ValidationSet validation
) {
return validate(Collections.singletonList(source), forcedContext, validation);
}
public static Validation validate(
final Iterable<CacheableYamlSource> sources,
final Set<Attribute> forcedContext, final ValidationSet validation
) {
return validate(validation, sources, forcedContext);
}
public static Validation validate(final Iterable<CacheableYamlSource> sources, final ValidationSet validation) {
return validate(sources, null, validation);
}
private static Validation validate(
ValidationSet validation,
final Iterable<CacheableYamlSource> sources,
final Set<Attribute> forcedContext
)
{
return getFactory().validate(validation, sources, forcedContext);
}
public static PolicyCollectionFactory getFactory() {
return factory;
}
public static void setFactory(PolicyCollectionFactory factory) {
YamlProvider.factory = factory;
}
/**
* Load policies from a source
*
* @param source source
*
* @return policies
*
* @throws IOException
*/
public static PolicyCollection policiesFromSource(final YamlSource source) throws IOException {
return policiesFromSource(source, null, null);
}
/**
* Load policies from a source
*
* @param source source
* @param forcedContext Context to require for all policies parsed
*
* @return policies
*
* @throws IOException
*/
public static PolicyCollection policiesFromSource(
final YamlSource source,
final Set<Attribute> forcedContext,
final ValidationSet validation
)
throws IOException
{
return factory.policiesFromSource(source, forcedContext, validation);
}
/**
* Load policies from a source
*
* @param source source
* @param forcedContext Context to require for all policies parsed
*
* @return policies
*
* @throws IOException
*/
public static PolicyCollection policiesFromSource(
final YamlSource source,
final Set<Attribute> forcedContext
)
throws IOException
{
return policiesFromSource(source, forcedContext, null);
}
/**
* Load policies from a file
*
* @param source source
*
* @return policies
*
* @throws IOException
*/
public static PolicyCollection policiesFromFile(final File source) throws IOException {
return policiesFromSource(sourceFromFile(source, null));
}
public static CacheableYamlSource sourceFromFile(final File file, final ValidationSet validationSet) {
return new CacheableYamlFileSource(file, validationSet);
}
public static Iterable<CacheableYamlSource> asSources(final File dir) {
if (!dir.isDirectory()) {
throw new IllegalArgumentException("dir should be a directory");
}
return asSources(dir.listFiles(filenameFilter));
}
public static Iterable<CacheableYamlSource> asSources(final File[] files) {
ArrayList<CacheableYamlSource> list = new ArrayList<>();
if (null != files) {
for (File file : files) {
list.add(YamlProvider.sourceFromFile(file, null));
}
}
return list;
}
/**
* Source from a stream
*
* @param identity identity
* @param content yaml string
* @param modified date the content was last modified, for caching purposes
*
* @return source
*/
public static CacheableYamlSource sourceFromString(
final String identity,
final String content,
final Date modified,
final ValidationSet validation
)
{
return new CacheableYamlSource() {
@Override
public boolean isValid() {
return true;
}
@Override
public Date getLastModified() {
return modified;
}
@Override
public String getIdentity() {
return identity;
}
@Override
public Iterable<ACLPolicyDoc> loadAll(final Yaml yaml) throws IOException {
return YamlParsePolicy.documentIterable(yaml.loadAll(content).iterator(), validation, identity);
}
@Override
public ValidationSet getValidationSet() {
return validation;
}
@Override
public void close() throws IOException {
}
};
}
/**
* Source from a stream
*
* @param identity identity
* @param stream stream
* @param modified date the content was last modified, for caching purposes
*
* @param validationSet
* @return source
*/
public static CacheableYamlSource sourceFromStream(
final String identity,
final InputStream stream,
final Date modified,
final ValidationSet validationSet
)
{
return new CacheableYamlStreamSource(stream, identity, modified, validationSet);
}
public static SourceProvider getDirProvider(final File rootDir) {
return new DirProvider(rootDir);
}
public static SourceProvider getFileProvider(final File singleFile) {
return new FileProvider(singleFile);
}
private static class CacheableYamlFileSource implements CacheableYamlSource {
private final File file;
FileInputStream fileInputStream;
private final ValidationSet validationSet;
public CacheableYamlFileSource(final File file, final ValidationSet validationSet) {
this.file = file;
this.validationSet = validationSet;
}
@Override
public Iterable<ACLPolicyDoc> loadAll(final Yaml yaml) throws IOException
{
if (null == fileInputStream) {
fileInputStream = new FileInputStream(file);
}
return YamlParsePolicy.documentIterable(
yaml.loadAll(fileInputStream).iterator(),
validationSet,
file.getName()
);
}
@Override
public String getIdentity() {
return file.getAbsolutePath();
}
@Override
public void close() throws IOException {
if (null != fileInputStream) {
fileInputStream.close();
}
}
@Override
public boolean isValid() {
return file.exists();
}
@Override
public Date getLastModified() {
return new Date(file.lastModified());
}
@Override
public boolean equals(final Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
final CacheableYamlFileSource that = (CacheableYamlFileSource) o;
return file.equals(that.file);
}
@Override
public int hashCode() {
return file.hashCode();
}
@Override
public ValidationSet getValidationSet() {
return validationSet;
}
}
private static class CacheableYamlStreamSource implements CacheableYamlSource {
private final InputStream stream;
private final String identity;
private final Date modified;
private final ValidationSet validationSet;
public CacheableYamlStreamSource(
final InputStream stream,
final String identity,
final Date modified,
final ValidationSet validationSet
) {
this.stream = stream;
this.identity = identity;
this.modified = modified;
this.validationSet = validationSet;
}
@Override
public Iterable<ACLPolicyDoc> loadAll(final Yaml yaml) throws IOException {
return YamlParsePolicy.documentIterable(yaml.loadAll(stream).iterator(), validationSet, identity);
}
@Override
public String getIdentity() {
return identity;
}
@Override
public void close() throws IOException {
stream.close();
}
@Override
public boolean isValid() {
try {
return null != stream && stream.available() > -1;
} catch (IOException e) {
return false;
}
}
@Override
public Date getLastModified() {
return modified;
}
@Override
public boolean equals(final Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
final CacheableYamlStreamSource that = (CacheableYamlStreamSource) o;
return identity.equals(that.identity);
}
@Override
public int hashCode() {
return identity.hashCode();
}
@Override
public ValidationSet getValidationSet() {
return validationSet;
}
}
static class DirProvider implements SourceProvider {
private File rootDir;
public DirProvider(final File rootDir) {
this.rootDir = rootDir;
}
long lastDirListCheckTime = 0;
private File[] lastDirList;
private File[] listDirFiles() {
if (System.currentTimeMillis() - lastDirListCheckTime > PoliciesCache.DIR_LIST_CHECK_DELAY) {
doListDir();
}
return lastDirList;
}
private void doListDir() {
lastDirList = rootDir.listFiles(filenameFilter);
lastDirListCheckTime = System.currentTimeMillis();
}
public Iterator<CacheableYamlSource> getSourceIterator() {
return asSources(listDirFiles()).iterator();
}
}
static class FileProvider implements SourceProvider {
private File file;
public FileProvider(final File file) {
this.file = file;
}
@Override
public Iterator<CacheableYamlSource> getSourceIterator() {
return asSources(new File[]{file}).iterator();
}
}
}
| |
package ij.io;
import java.awt.*;
import java.awt.image.*;
import java.io.*;
import java.util.*;
import ij.*;
import ij.gui.*;
import ij.process.*;
import ij.util.*;
import ij.plugin.frame.Recorder;
import ij.plugin.*;
import ij.measure.Calibration;
/** This is a dialog box used to imports raw 8, 16, 24 and 32-bit images. */
public class ImportDialog {
private String fileName;
private String directory;
static final String TYPE = "raw.type";
static final String WIDTH = "raw.width";
static final String HEIGHT = "raw.height";
static final String OFFSET = "raw.offset";
static final String N = "raw.n";
static final String GAP = "raw.gap";
static final String OPTIONS = "raw.options";
static final int WHITE_IS_ZERO = 1;
static final int INTEL_BYTE_ORDER = 2;
static final int OPEN_ALL = 4;
// default settings
private static int sChoiceSelection = Prefs.getInt(TYPE,0);
private static int sWidth = Prefs.getInt(WIDTH,512);
private static int sHeight = Prefs.getInt(HEIGHT,512);
private static long sOffset = Prefs.getInt(OFFSET,0);
private static int sNImages = Prefs.getInt(N,1);
private static long sGapBetweenImages = Prefs.getInt(GAP,0);
private static boolean sWhiteIsZero;
private static boolean sIntelByteOrder;
private static boolean sVirtual;
private int choiceSelection = sChoiceSelection;
private int width = sWidth;
private int height = sHeight;
private long offset = sOffset;
private int nImages = sNImages;
private long gapBetweenImages = sGapBetweenImages;
private boolean whiteIsZero = sWhiteIsZero;
private boolean intelByteOrder = sIntelByteOrder;
private boolean virtual = sVirtual;
private static int options;
private static FileInfo lastFileInfo;
private boolean openAll;
private static String[] types = {"8-bit", "16-bit Signed", "16-bit Unsigned",
"32-bit Signed", "32-bit Unsigned", "32-bit Real", "64-bit Real", "24-bit RGB",
"24-bit RGB Planar", "24-bit BGR", "24-bit Integer", "32-bit ARGB", "32-bit ABGR", "1-bit Bitmap"};
static {
options = Prefs.getInt(OPTIONS, 0);
sWhiteIsZero = (options&WHITE_IS_ZERO)!=0;
sIntelByteOrder = (options&INTEL_BYTE_ORDER)!=0;
}
public ImportDialog(String fileName, String directory) {
this.fileName = fileName;
this.directory = directory;
IJ.showStatus("Importing: " + fileName);
}
public ImportDialog() {
}
boolean showDialog() {
boolean macro = Macro.getOptions()!=null;
if (macro) {
width = height = 512;
offset = gapBetweenImages = 0;
nImages = 1;
whiteIsZero = intelByteOrder = virtual = false;
}
if (choiceSelection>=types.length)
choiceSelection = 0;
getDimensionsFromName(fileName);
GenericDialog gd = new GenericDialog("Import>Raw...");
gd.addChoice("Image type:", types, types[choiceSelection]);
gd.addNumericField("Width:", width, 0, 8, "pixels");
gd.addNumericField("Height:", height, 0, 8, "pixels");
gd.addNumericField("Offset to first image:", offset, 0, 8, "bytes");
gd.addNumericField("Number of images:", nImages, 0, 8, null);
gd.addNumericField("Gap between images:", gapBetweenImages, 0, 8, "bytes");
gd.addCheckbox("White is zero", whiteIsZero);
gd.addCheckbox("Little-endian byte order", intelByteOrder);
gd.addCheckbox("Open all files in folder", openAll);
gd.addCheckbox("Use virtual stack", virtual);
gd.addHelp(IJ.URL+"/docs/menus/file.html#raw");
gd.showDialog();
if (gd.wasCanceled())
return false;
choiceSelection = gd.getNextChoiceIndex();
width = (int)gd.getNextNumber();
height = (int)gd.getNextNumber();
gd.setSmartRecording(offset==0);
offset = (long)gd.getNextNumber();
gd.setSmartRecording(nImages==1);
nImages = (int)gd.getNextNumber();
gd.setSmartRecording(gapBetweenImages==0);
gapBetweenImages = (long)gd.getNextNumber();
gd.setSmartRecording(false);
whiteIsZero = gd.getNextBoolean();
intelByteOrder = gd.getNextBoolean();
openAll = gd.getNextBoolean();
virtual = gd.getNextBoolean();
IJ.register(ImportDialog.class);
if (!macro) {
sChoiceSelection = choiceSelection;
sWidth = width;
sHeight = height;
sOffset = offset;
sNImages = nImages;
sGapBetweenImages = gapBetweenImages;
sWhiteIsZero = whiteIsZero;
sIntelByteOrder = intelByteOrder;
sVirtual = virtual;
}
return true;
}
/** Opens all the images in the directory. */
void openAll(String[] list, FileInfo fi) {
FolderOpener fo = new FolderOpener();
list = fo.trimFileList(list);
list = fo.sortFileList(list);
if (list==null) return;
ImageStack stack=null;
ImagePlus imp=null;
double min = Double.MAX_VALUE;
double max = -Double.MAX_VALUE;
int digits = 0;
for (int i=0; i<list.length; i++) {
if (list[i].startsWith("."))
continue;
fi.fileName = list[i];
imp = new FileOpener(fi).openImage();
if (imp==null)
IJ.log(list[i] + ": unable to open");
else {
if (stack==null)
stack = imp.createEmptyStack();
try {
ImageStack stack2 = imp.getStack();
int slices = stack2.getSize();
if (digits==0) {
digits = 2;
if (slices>99) digits=3;
if (slices>999) digits=4;
if (slices>9999) digits=5;
}
for (int n=1; n<=slices; n++) {
ImageProcessor ip = stack2.getProcessor(n);
if (ip.getMin()<min)
min = ip.getMin();
if (ip.getMax()>max)
max = ip.getMax();
String label = list[i];
if (slices>1) label += "-" + IJ.pad(n,digits);
stack.addSlice(label, ip);
}
} catch(OutOfMemoryError e) {
IJ.outOfMemory("OpenAll");
stack.trim();
break;
}
IJ.showStatus((stack.size()+1) + ": " + list[i]);
}
}
String dir = Recorder.fixPath(fi.directory);
Recorder.recordCall(fi.getCode()+"imp = Raw.openAll(\""+ dir+"\", fi);");
if (stack!=null) {
imp = new ImagePlus("Imported Stack", stack);
if (imp.getBitDepth()==16 || imp.getBitDepth()==32)
imp.getProcessor().setMinAndMax(min, max);
Calibration cal = imp.getCalibration();
if (fi.fileType==FileInfo.GRAY16_SIGNED)
cal.setSigned16BitCalibration();
imp.show();
}
}
/** Displays the dialog and opens the specified image or images.
Does nothing if the dialog is canceled. */
public void openImage() {
FileInfo fi = getFileInfo();
if (fi==null)
return;
if (openAll) {
if (virtual) {
ImagePlus imp = Raw.openAllVirtual(directory, fi);
String dir = Recorder.fixPath(directory);
Recorder.recordCall(fi.getCode()+"imp = Raw.openAllVirtual(\""+dir+"\", fi);");
if (imp!=null) {
imp.setSlice(imp.getStackSize()/2);
imp.show();
imp.setSlice(1);
}
return;
}
String[] list = new File(directory).list();
if (list==null) return;
openAll(list, fi);
} else if (virtual)
new FileInfoVirtualStack(fi);
else {
FileOpener fo = new FileOpener(fi);
ImagePlus imp = fo.openImage();
String filePath = fi.getFilePath();
filePath = Recorder.fixPath(filePath);
Recorder.recordCall(fi.getCode()+"imp = Raw.open(\""+filePath+"\", fi);");
if (imp!=null) {
imp.show();
int n = imp.getStackSize();
if (n>1) {
imp.setSlice(n/2);
ImageProcessor ip = imp.getProcessor();
ip.resetMinAndMax();
imp.setDisplayRange(ip.getMin(),ip.getMax());
}
} else
IJ.error("File>Import>Raw", "File not found: "+filePath);
}
}
/** Displays the dialog and returns a FileInfo object that can be used to
open the image. Returns null if the dialog is canceled. The fileName
and directory fields are null if the no argument constructor was used. */
public FileInfo getFileInfo() {
if (!showDialog())
return null;
String imageType = types[choiceSelection];
FileInfo fi = new FileInfo();
fi.fileFormat = fi.RAW;
fi.fileName = fileName;
directory = IJ.addSeparator(directory);
fi.directory = directory;
fi.width = width;
fi.height = height;
if (offset>2147483647)
fi.longOffset = offset;
else
fi.offset = (int)offset;
fi.nImages = nImages;
fi.gapBetweenImages = (int)gapBetweenImages;
fi.longGap = gapBetweenImages;
fi.intelByteOrder = intelByteOrder;
fi.whiteIsZero = whiteIsZero;
if (imageType.equals("8-bit"))
fi.fileType = FileInfo.GRAY8;
else if (imageType.equals("16-bit Signed"))
fi.fileType = FileInfo.GRAY16_SIGNED;
else if (imageType.equals("16-bit Unsigned"))
fi.fileType = FileInfo.GRAY16_UNSIGNED;
else if (imageType.equals("32-bit Signed"))
fi.fileType = FileInfo.GRAY32_INT;
else if (imageType.equals("32-bit Unsigned"))
fi.fileType = FileInfo.GRAY32_UNSIGNED;
else if (imageType.equals("32-bit Real"))
fi.fileType = FileInfo.GRAY32_FLOAT;
else if (imageType.equals("64-bit Real"))
fi.fileType = FileInfo.GRAY64_FLOAT;
else if (imageType.equals("24-bit RGB"))
fi.fileType = FileInfo.RGB;
else if (imageType.equals("24-bit RGB Planar"))
fi.fileType = FileInfo.RGB_PLANAR;
else if (imageType.equals("24-bit BGR"))
fi.fileType = FileInfo.BGR;
else if (imageType.equals("24-bit Integer"))
fi.fileType = FileInfo.GRAY24_UNSIGNED;
else if (imageType.equals("32-bit ARGB"))
fi.fileType = FileInfo.ARGB;
else if (imageType.equals("32-bit ABGR"))
fi.fileType = FileInfo.ABGR;
else if (imageType.equals("1-bit Bitmap"))
fi.fileType = FileInfo.BITMAP;
else
fi.fileType = FileInfo.GRAY8;
if (IJ.debugMode) IJ.log("ImportDialog: "+fi);
lastFileInfo = (FileInfo)fi.clone();
return fi;
}
/** Called once when ImageJ quits. */
public static void savePreferences(Properties prefs) {
prefs.put(TYPE, Integer.toString(sChoiceSelection));
prefs.put(WIDTH, Integer.toString(sWidth));
prefs.put(HEIGHT, Integer.toString(sHeight));
prefs.put(OFFSET, Integer.toString(sOffset>2147483647?0:(int)sOffset));
prefs.put(N, Integer.toString(sNImages));
prefs.put(GAP, Integer.toString(sGapBetweenImages>2147483647?0:(int)sGapBetweenImages));
int options = 0;
if (sWhiteIsZero)
options |= WHITE_IS_ZERO;
if (sIntelByteOrder)
options |= INTEL_BYTE_ORDER;
prefs.put(OPTIONS, Integer.toString(options));
}
/** Returns the FileInfo object used to import the last raw image,
or null if a raw image has not been imported. */
public static FileInfo getLastFileInfo() {
return lastFileInfo;
}
private void getDimensionsFromName(String name) {
if (name==null)
return;
if (!name.matches(".*[0-9]+x[0-9]+.*"))
return; // must have 'x' seperator
int lastUnderscore = name.lastIndexOf("_");
String name2 = name;
if (lastUnderscore>=0)
name2 = name.substring(lastUnderscore);
char[] chars = new char[name2.length()];
for (int i=0; i<name2.length(); i++) // change non-digits to spaces
chars[i] = Character.isDigit(name2.charAt(i))?name2.charAt(i):' ';
name2 = new String(chars);
String[] numbers = Tools.split(name2);
int n = numbers.length;
if (n<2) return;
int w = (int)Tools.parseDouble(numbers[0],0);
if (w<1) return;
int h = (int)Tools.parseDouble(numbers[1],0);
if (h<1) return;
width = w;
height = h;
nImages = 1;
if (n>2) {
int d = (int)Tools.parseDouble(numbers[2],0);
if (d>0)
nImages = d;
}
guessFormat(directory, name);
}
private void guessFormat(String dir, String name) {
if (dir==null) return;
File file = new File(dir+name);
long imageSize = (long)width*height*nImages;
long fileSize = file.length();
if (fileSize==4*imageSize)
choiceSelection = 5; // 32-bit real
else if (fileSize==2*imageSize)
choiceSelection = 2; // 16-bit unsigned
else if (fileSize==3*imageSize)
choiceSelection = 7; // 24-bit RGB
else if (fileSize==imageSize)
choiceSelection = 0; // 8-bit
if (name.endsWith("be.raw")) // big-endian
intelByteOrder = false;
else if (name.endsWith("le.raw")) // little-endian
intelByteOrder = true;
}
}
| |
/*
* Copyright 2006-2007 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.openspaces.esb.mule.queue;
import java.util.LinkedList;
import java.util.List;
import org.mule.DefaultMuleMessage;
import org.mule.VoidMuleEvent;
import org.mule.api.MuleContext;
import org.mule.api.MuleEvent;
import org.mule.api.MuleException;
import org.mule.api.MuleMessage;
import org.mule.api.construct.FlowConstruct;
import org.mule.api.endpoint.InboundEndpoint;
import org.mule.api.lifecycle.CreateException;
import org.mule.api.service.Service;
import org.mule.api.transport.Connector;
import org.mule.api.transport.PropertyScope;
import org.mule.transport.NullPayload;
import org.mule.transport.PollingReceiverWorker;
import org.mule.transport.TransactedPollingMessageReceiver;
import org.openspaces.core.SpaceClosedException;
import org.openspaces.core.SpaceInterruptedException;
import com.gigaspaces.document.DocumentProperties;
import com.gigaspaces.query.ISpaceQuery;
import com.j_spaces.core.exception.SpaceUnavailableException;
/**
* Receives (takes) a message from an internal queue. The queue is a virtualized queue represented
* by the {@link org.openspaces.esb.mule.queue.OpenSpacesQueueObject} with its endpoint address
* set (and not the message).
*
* @author kimchy
*/
public class OpenSpacesQueueMessageReceiver extends TransactedPollingMessageReceiver {
private static final MuleEvent voidEvent = new VoidMuleEvent() {
// This override is required for org.mule.transport.TransactedPollingMessageReceiver$1.process(TransactedPollingMessageReceiver.java:161) ~[mule-core-3.7.0.jar:3.7.0]
@Override
public MuleContext getMuleContext() {
return null;
}
};
private OpenSpacesQueueConnector connector;
private ISpaceQuery<OpenSpacesQueueObject> template;
public OpenSpacesQueueMessageReceiver(Connector connector, FlowConstruct flowConstruct, InboundEndpoint endpoint) throws CreateException {
super(connector, flowConstruct, endpoint);
init(connector, endpoint);
}
public OpenSpacesQueueMessageReceiver(Connector connector,
Service service,
final InboundEndpoint endpoint) throws CreateException {
super(connector, service, endpoint);
init(connector, endpoint);
}
private void init(Connector connector, final InboundEndpoint endpoint) {
this.connector = (OpenSpacesQueueConnector) connector;
this.setReceiveMessagesInTransaction(endpoint.getTransactionConfig().isTransacted());
// use the defined timeout to set the frequency of the non-blocking polling
this.setFrequency(this.connector.getTimeout()/10L);
}
protected void doConnect() throws Exception {
OpenSpacesQueueObject internalTemplate = connector.newQueueTemplate(endpoint.getEndpointURI().getAddress());
template = connector.getGigaSpaceObj().snapshot(internalTemplate);
}
protected void doDispose() {
// template method
}
protected void doDisconnect() throws Exception {
// template method
}
public Object onCall(MuleMessage message, boolean synchronous) throws MuleException {
// Rewrite the message to treat it as a new message
MuleMessage newMessage = new DefaultMuleMessage(message);
return routeMessage(newMessage);
}
protected List getMessages() throws Exception {
// The list of retrieved messages that will be returned
List<MuleMessage> messages = new LinkedList<MuleMessage>();
// try to get the first event off the queue
try {
/*
* Determine how many messages to batch in this poll: we need to drain the queue quickly, but not by
* slamming the workManager too hard. It is impossible to determine this more precisely without proper
* load statistics/feedback or some kind of "event cost estimate". Therefore we just try to use half
* of the receiver's workManager, since it is shared with receivers for other endpoints.
*/
OpenSpacesQueueObject entry = connector.getGigaSpaceObj().take(template);
if (entry != null) {
appendMessage(messages, entry);
// batch more messages if needed
OpenSpacesQueueObject[] entries = connector.getGigaSpaceObj().takeMultiple(template, connector.getBatchSize());
if (entries != null) {
for (OpenSpacesQueueObject entry1 : entries) {
appendMessage(messages, entry1);
}
}
}
} catch (SpaceInterruptedException e) {
// do nothing, we are being stopped
} catch (SpaceClosedException e) {
// do nothing, we are being stopped
} catch (SpaceUnavailableException e) {
// do nothing, we are being stopped
}
// let our workManager handle the batch of events
return messages;
}
private void appendMessage(List<MuleMessage> messages, OpenSpacesQueueObject entry) throws Exception{
MuleMessage inboundMessage = createMuleMessage(entry);
// keep first dequeued event
messages.add(inboundMessage);
}
@Override
protected MuleEvent processMessage(Object msg) throws Exception {
// getMessages() returns UMOEvents
MuleMessage message = (MuleMessage) msg;
// Rewrite the message to treat it as a new message
MuleMessage newMessage = new DefaultMuleMessage(message, this.connector.getMuleContext());
MuleEvent response = routeMessage(newMessage);
//write response
//should send back only if remote synch is set or no outbound endpoints
if (endpoint.getExchangePattern().hasResponse() && response != null) {
MuleMessage responseMessage = response.getMessage();
String correlationId = message.getCorrelationId();
OpenSpacesQueueObject responseEntry = connector.newQueueEntry(getEndpointURI().getAddress() + OpenSpacesQueueMessageDispatcher.DEFAULT_RESPONSE_QUEUE);
responseEntry.setCorrelationID(correlationId);
DocumentProperties payloadMetaData = new DocumentProperties();
for (String propertyName : responseMessage.getPropertyNames(PropertyScope.OUTBOUND)) {
Object property = responseMessage.getProperty(propertyName, PropertyScope.OUTBOUND);
payloadMetaData.put(propertyName, property);
}
responseEntry.setPayloadMetaData(payloadMetaData);
Object payload = responseMessage.getPayload();
if(payload instanceof NullPayload)
payload = null;
responseEntry.setPayload(payload);
if (logger.isDebugEnabled()) {
logger.debug(getEndpointURI() + " sending response to client " + responseEntry);
}
Integer lease = responseMessage.getOutboundProperty(OpenSpacesQueueObject.RESPONSE_TIMEOUT_PROPERTY);
if (lease != null) {
connector.getGigaSpaceObj().write(responseEntry, (long) lease);
} else {
connector.getGigaSpaceObj().write(responseEntry);
}
}
return response != null ? response : voidEvent;
}
/*
* We create our own "polling" worker here since we need to evade the standard scheduler.
*/
// @Override
protected PollingReceiverWorker createWork() {
return new ReceiverWorker(this);
}
/*
* Even though the OpenSpaces Queue transport is "polling" for messages, the nonexistent cost of accessing the queue is
* a good reason to not use the regular scheduling mechanism in order to both minimize latency and
* maximize throughput.
*/
protected static class ReceiverWorker extends PollingReceiverWorker {
public ReceiverWorker(OpenSpacesQueueMessageReceiver pollingMessageReceiver) {
super(pollingMessageReceiver);
}
public void run() {
/*
* We simply run our own polling loop all the time as long as the receiver is started. The
* blocking wait defined by VMConnector.getQueueTimeout() will prevent this worker's receiver
* thread from busy-waiting.
*/
if (this.getReceiver().isConnected()) {
super.run();
}
}
}
}
| |
/*****************************************************************************
*
* HOPERUN PROPRIETARY INFORMATION
*
* The information contained herein is proprietary to HopeRun
* and shall not be reproduced or disclosed in whole or in part
* or used for any design or manufacture
* without direct written authorization from HopeRun.
*
* Copyright (c) 2013 by HopeRun. All rights reserved.
*
***************************************************************************/
package com.hoperun.feiying.joyplus.service.handler;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.List;
import org.apache.log4j.Logger;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;
import org.springframework.transaction.annotation.Transactional;
import com.hoperun.feiying.framework.enums.EStatus;
import com.hoperun.feiying.framework.model.videodetail.CommentsModel;
import com.hoperun.feiying.framework.model.videodetail.DownUrlsModel;
import com.hoperun.feiying.framework.model.videodetail.DownUrlsSubModel;
import com.hoperun.feiying.framework.model.videodetail.EpisodesModel;
import com.hoperun.feiying.framework.model.videodetail.MovieDetailModel;
import com.hoperun.feiying.framework.model.videodetail.TopicsModel;
import com.hoperun.feiying.framework.model.videodetail.VideoDetailModel;
import com.hoperun.feiying.framework.model.videodetail.VideoUrlsModel;
import com.hoperun.feiying.framework.uitls.JsonUtil;
import com.hoperun.feiying.framework.upload.request.BaseRequest;
import com.hoperun.feiying.framework.upload.request.VideoDetailRequest;
import com.hoperun.feiying.framework.upload.response.BaseResponse;
import com.hoperun.feiying.framework.utils.StringUtil;
import com.hoperun.feiying.joyplus.service.entity.Comments;
import com.hoperun.feiying.joyplus.service.entity.EpisodesDown;
import com.hoperun.feiying.joyplus.service.entity.EpisodesVideo;
import com.hoperun.feiying.joyplus.service.entity.FavoriteCollecter;
import com.hoperun.feiying.joyplus.service.entity.Program;
import com.hoperun.feiying.joyplus.service.entity.Topics;
import com.hoperun.feiying.joyplus.service.service.CommentsService;
import com.hoperun.feiying.joyplus.service.service.EpisodesDownService;
import com.hoperun.feiying.joyplus.service.service.EpisodesVideoService;
import com.hoperun.feiying.joyplus.service.service.FavoriteCollecterService;
import com.hoperun.feiying.joyplus.service.service.ProgramService;
import com.hoperun.feiying.joyplus.service.service.TopicsService;
import com.hoperun.framework.task.RemoteTaskDefine;
import com.hoperun.framework.task.ERemoteTaskItemStatus;
import com.hoperun.framework.task.vo.RemoteTaskItemVO;
/**
* ClassName: VideoDetailHandler
*
* @description
* @author yang_jun
* @Date Feb 8, 2014
*
*/
@Component("videoDetailHandler")
public class VideoDetailHandler implements IHandler {
private static final Logger log = Logger.getLogger(VideoDetailHandler.class);
private static final Logger programViewLog = Logger.getLogger("programViewLog");
private static SimpleDateFormat date = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
@Autowired
private CommentsService commentsService;
@Autowired
private TopicsService topicsService;
@Autowired
private ProgramService programService;
@Autowired
private EpisodesDownService episodesDownService;
@Autowired
private EpisodesVideoService episodesVideoService;
@Autowired
private FavoriteCollecterService favoriteCollecterService;
@Override
public BaseResponse handle(BaseRequest request) {
BaseResponse response = new BaseResponse();
VideoDetailRequest vdRequest = (VideoDetailRequest) request;
List<VideoDetailModel> vdList = vdRequest.getVdModelList();
List<String> failedIdlist = vdRequest.getFailedProIdList();
if(!failedIdlist.isEmpty()){
for(String id:failedIdlist){
log.info(String.format("search programView failed, prod_id [%s]", id));
}
}
if (!vdList.isEmpty()) {
for (VideoDetailModel model : vdList) {
try {
saveProgramView(model, vdRequest);
} catch (Exception e) {
response.setStatus(EStatus.EXCEPTION);
}
}
}
return response;
}
@Transactional
private void saveProgramView(VideoDetailModel model, VideoDetailRequest vdRequest) {
String proId = null;
try {
// movie show TV
if (model.getMovie() != null) {
proId = model.getMovie().getId();
dealWithMovie(model.getMovie());
} else if (model.getTv() != null) {
proId = model.getTv().getId();
dealWithMovie(model.getTv());
} else if (model.getShow() != null) {
proId = model.getShow().getId();
dealWithMovie(model.getShow());
}
// if there is no such a program ,ignore it
Program program = programService.findProgramByProdId(Long.parseLong(proId));
if (program == null) {
return;
}
if (!StringUtil.isEmpty(model.getCanPlayDevice())) {
dealWithCanPlayDevice(Integer.parseInt(model.getCanPlayDevice()), proId);
} else {
dealWithCanPlayDevice(0, proId);
}
// commnets
List<CommentsModel> comments = model.getComments();
if (comments != null && !comments.isEmpty()) {
dealWithComments(comments, proId);
}
// topics
List<TopicsModel> topics = model.getTopics();
if (topics != null && !topics.isEmpty()) {
dealWithTopics(topics, proId);
}
programViewLog.info(String.format("Save programView successful, prod_id [%s]", proId));
} catch (Exception e) {
String identity = null;
List<RemoteTaskItemVO> taskItems = vdRequest.getScheduleVO().getTaskList().get(0).getTaskItems();
for (RemoteTaskItemVO itemVo : taskItems) {
boolean hasSetStatus = false;
List<String> idList = (List<String>) itemVo.getParams().get(
RemoteTaskDefine.PARAMS_DETAIL_TASK_SEARCH_LIST_KEY);
identity = itemVo.getIdentity().getIdentity();
for (String id : idList) {
if (id.equals(proId)) {
itemVo.setStatus(ERemoteTaskItemStatus.Failed);
hasSetStatus = true;
break;
}
}
if (hasSetStatus) {
break;
}
}
programViewLog.error(String.format("identity:[%s]--Save programView exception, prod_id [%s]", proId,
identity));
}
}
private void dealWithTopics(List<TopicsModel> topicsList, String proId) {
Program program = programService.findProgramByProdId(Long.parseLong(proId));
if (program == null) {
return;
}
List<Topics> dbtopicList = program.getTopicsList();
if (!dbtopicList.isEmpty()) {
for (Topics dbTopic : dbtopicList) {
dbTopic.remove();
}
}
for (TopicsModel model : topicsList) {
Topics topic = new Topics();
topic.setName(model.gettName());
if (!StringUtil.isEmpty(model.gettId())) {
topic.setTopicId(Long.parseLong(model.gettId()));
}
topic.getProgramList().add(program);
topicsService.saveTopics(topic);
}
}
private void dealWithComments(List<CommentsModel> commentsList, String prodId) {
for (CommentsModel model : commentsList) {
Comments comments = commentsService.findCommentsByCommentId(Long.parseLong(model.getId()));
if (comments == null) {
comments = new Comments();
}
comments.setProgram(programService.findProgramByProdId(Long.parseLong(prodId)));
comments.setCommentId(Long.parseLong(model.getId()));
comments.setComments(model.getContent());
try {
comments.setCreateDate(date.parse(model.getCreateDate()));
} catch (ParseException e) {
}
if (!StringUtil.isEmpty(model.getOwnerId())) {
comments.setOwnerId(model.getOwnerId());
}
comments.setOwnerName(model.getOwnerName());
comments.setOwnerPicUrl(model.getOwnerPicUrl());
comments.setReplies(model.getReplies());
if (comments.getId() != null) {
commentsService.updateComments(comments);
} else {
commentsService.saveComments(comments);
}
}
}
private void dealWithCanPlayDevice(int can_play_device, String proId) {
Program program = programService.findProgramByProdId(Long.parseLong(proId));
if (program == null) {
program = new Program();
}
program.setCanPlayDevice(can_play_device);
programService.updateOrSave(program);
}
private void dealWithMovie(MovieDetailModel movie) {
String proId = movie.getId();
// TODO
// movie.getSid();
// movie.getCid();
Program program = programService.findProgramByProdId(Long.parseLong(proId));
if (program == null) {
return;
}
FavoriteCollecter faCollecter = favoriteCollecterService.findFavoriteCollecterByProId(Long.parseLong(proId));
if (faCollecter == null) {
faCollecter = new FavoriteCollecter();
}
faCollecter.setProgram(program);
program.setEpisodesCount(movie.getEpisodesCount());
if (!StringUtil.isEmpty(movie.getLikeNum())) {
faCollecter.setLikeNum(Long.parseLong(movie.getLikeNum()));
}
if (!StringUtil.isEmpty(movie.getWatchNum())) {
faCollecter.setWatchNum(Long.parseLong(movie.getWatchNum()));
}
if (!StringUtil.isEmpty(movie.getTotalCommentNumber())) {
faCollecter.setTotalCommentNumber(Long.parseLong(movie.getTotalCommentNumber()));
}
if (!StringUtil.isEmpty(movie.getFavorityNum())) {
faCollecter.setFavorityNum(Long.parseLong(movie.getFavorityNum()));
}
if (!StringUtil.isEmpty(movie.getSupportNum())) {
faCollecter.setSupportNum(Long.parseLong(movie.getSupportNum()));
}
if (movie.getFee().equals("false")) {
program.setFee(false);
} else {
program.setFee(true);
}
if (!StringUtil.isEmpty(movie.getUsergroup())) {
program.setUserGroup(Integer.parseInt(movie.getUsergroup()));
}
program.setSources(movie.getSources());
program.setDoubanId(movie.getDoubanId());
programService.updateOrSave(program);
if (faCollecter.getId() == null) {
favoriteCollecterService.saveFavoriteCollecter(faCollecter);
} else {
favoriteCollecterService.updateFavoriteCollecter(faCollecter);
}
List<EpisodesModel> episodesModelList = movie.getEpisodes();
if (!episodesModelList.isEmpty()) {
dealWithEpisodes(episodesModelList, proId);
}
}
private void dealWithEpisodes(List<EpisodesModel> episodesModelList, String proId) {
episodesVideoService.deleteEpisodesVideoByProId(Long.parseLong(proId));
episodesDownService.deleteEpisodesDownByProId(Long.parseLong(proId));
for (EpisodesModel model : episodesModelList) {
String name = model.getName();
String vid = model.getVid();
List<VideoUrlsModel> videoUrlsList = model.getVideoUrls();
List<DownUrlsModel> downUrlsList = model.getDownUrls();
if (videoUrlsList != null && !videoUrlsList.isEmpty()) {
dealWithVideoUrls(videoUrlsList, proId, name, vid);
}
if (downUrlsList != null && !downUrlsList.isEmpty()) {
dealWithDownUrls(downUrlsList, proId, name);
}
}
}
private void dealWithDownUrls(List<DownUrlsModel> downUrlsList, String proId, String name) {
Program program = programService.findProgramByProdId(Long.parseLong(proId));
if (program == null) {
return;
}
for (DownUrlsModel model : downUrlsList) {
List<DownUrlsSubModel> urlsList = model.getUrls();
for (DownUrlsSubModel subModel : urlsList) {
EpisodesDown episodesDown = new EpisodesDown();
episodesDown.setProgram(programService.findProgramByProdId(Long.parseLong(proId)));
episodesDown.setName(name);
episodesDown.setDownSource(model.getSource());
episodesDown.setDownUrl(subModel.getUrl());
episodesDown.setDownType(subModel.getType());
episodesDown.setDownFile(subModel.getFile());
episodesDownService.saveEpisodesDown(episodesDown);
}
}
}
private void dealWithVideoUrls(List<VideoUrlsModel> videoUrlsList, String proId, String name, String vid) {
Program program = programService.findProgramByProdId(Long.parseLong(proId));
if (program == null) {
return;
}
for (VideoUrlsModel model : videoUrlsList) {
EpisodesVideo episodesVideo = new EpisodesVideo();
episodesVideo.setProgram(program);
episodesVideo.setName(name);
if (vid != null && vid != "") {
episodesVideo.setVid(Long.parseLong(vid));// only for sohu
}
episodesVideo.setVideoSource(model.getSource());
episodesVideo.setVideoUrl(model.getUrl());
episodesVideoService.saveEpisodesVideo(episodesVideo);
}
}
@Override
public BaseRequest packageRequest(String request) {
return JsonUtil.fromJson(request, VideoDetailRequest.class);
}
}
| |
/*
* Copyright 2017 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.example.android.unsplash.transition;
import android.animation.Animator;
import android.animation.AnimatorListenerAdapter;
import android.animation.ArgbEvaluator;
import android.animation.ObjectAnimator;
import android.animation.PropertyValuesHolder;
import android.content.Context;
import android.content.res.ColorStateList;
import android.graphics.Bitmap;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.ColorFilter;
import android.graphics.Paint;
import android.graphics.PixelFormat;
import android.graphics.PorterDuff;
import android.graphics.drawable.Drawable;
import android.transition.Transition;
import android.transition.TransitionValues;
import android.util.AttributeSet;
import android.util.TypedValue;
import android.view.Gravity;
import android.view.View;
import android.view.ViewGroup;
import android.widget.TextView;
/**
* Transitions a TextView from one font size to another. This does not
* do any animation of TextView content and if the text changes, this
* transition will not run.
* <p>
* The animation works by capturing a bitmap of the text at the start
* and end states. It then scales the start bitmap until it reaches
* a threshold and switches to the scaled end bitmap for the remainder
* of the animation. This keeps the jump in bitmaps in the middle of
* the animation, where it is less noticeable than at the beginning
* or end of the animation. This transition does not work well with
* cropped text. TextResize also does not work with changes in
* TextView gravity.
*/
public class TextResize extends Transition {
private static final String FONT_SIZE = "TextResize:fontSize";
private static final String DATA = "TextResize:data";
private static final String[] PROPERTIES = {
// We only care about FONT_SIZE. If anything else changes, we don't
// want this transition to be called to create an Animator.
FONT_SIZE,
};
public TextResize() {
addTarget(TextView.class);
}
/**
* Constructor used from XML.
*/
public TextResize(Context context, AttributeSet attrs) {
super(context, attrs);
addTarget(TextView.class);
}
@Override
public String[] getTransitionProperties() {
return PROPERTIES;
}
@Override
public void captureStartValues(TransitionValues transitionValues) {
captureValues(transitionValues);
}
@Override
public void captureEndValues(TransitionValues transitionValues) {
captureValues(transitionValues);
}
private void captureValues(TransitionValues transitionValues) {
if (!(transitionValues.view instanceof TextView)) {
return;
}
final TextView view = (TextView) transitionValues.view;
final float fontSize = view.getTextSize();
transitionValues.values.put(FONT_SIZE, fontSize);
final TextResizeData data = new TextResizeData(view);
transitionValues.values.put(DATA, data);
}
@Override
public Animator createAnimator(ViewGroup sceneRoot, TransitionValues startValues,
TransitionValues endValues) {
if (startValues == null || endValues == null) {
return null;
}
final TextResizeData startData = (TextResizeData) startValues.values.get(DATA);
final TextResizeData endData = (TextResizeData) endValues.values.get(DATA);
if (startData.gravity != endData.gravity) {
return null; // Can't deal with changes in gravity
}
final TextView textView = (TextView) endValues.view;
float startFontSize = (Float) startValues.values.get(FONT_SIZE);
// Capture the start bitmap -- we need to set the values to the start values first
setTextViewData(textView, startData, startFontSize);
final float startWidth = textView.getPaint().measureText(textView.getText().toString());
final Bitmap startBitmap = captureTextBitmap(textView);
if (startBitmap == null) {
startFontSize = 0;
}
float endFontSize = (Float) endValues.values.get(FONT_SIZE);
// Set the values to the end values
setTextViewData(textView, endData, endFontSize);
final float endWidth = textView.getPaint().measureText(textView.getText().toString());
// Capture the end bitmap
final Bitmap endBitmap = captureTextBitmap(textView);
if (endBitmap == null) {
endFontSize = 0;
}
if (startFontSize == 0 && endFontSize == 0) {
return null; // Can't animate null bitmaps
}
// Set the colors of the TextView so that nothing is drawn.
// Only draw the bitmaps in the overlay.
final ColorStateList textColors = textView.getTextColors();
final ColorStateList hintColors = textView.getHintTextColors();
final int highlightColor = textView.getHighlightColor();
final ColorStateList linkColors = textView.getLinkTextColors();
textView.setTextColor(Color.TRANSPARENT);
textView.setHintTextColor(Color.TRANSPARENT);
textView.setHighlightColor(Color.TRANSPARENT);
textView.setLinkTextColor(Color.TRANSPARENT);
// Create the drawable that will be animated in the TextView's overlay.
// Ensure that it is showing the start state now.
final SwitchBitmapDrawable drawable = new SwitchBitmapDrawable(textView, startData.gravity,
startBitmap, startFontSize, startWidth, endBitmap, endFontSize, endWidth);
textView.getOverlay().add(drawable);
// Properties: left, top, font size, text color
final PropertyValuesHolder leftProp =
PropertyValuesHolder.ofFloat("left", startData.paddingLeft, endData.paddingLeft);
final PropertyValuesHolder topProp =
PropertyValuesHolder.ofFloat("top", startData.paddingTop, endData.paddingTop);
final PropertyValuesHolder rightProp = PropertyValuesHolder.ofFloat("right",
startData.width - startData.paddingRight, endData.width - endData.paddingRight);
final PropertyValuesHolder bottomProp = PropertyValuesHolder.ofFloat("bottom",
startData.height - startData.paddingBottom, endData.height - endData.paddingBottom);
final PropertyValuesHolder fontSizeProp =
PropertyValuesHolder.ofFloat("fontSize", startFontSize, endFontSize);
final ObjectAnimator animator;
if (startData.textColor != endData.textColor) {
final PropertyValuesHolder textColorProp = PropertyValuesHolder.ofObject("textColor",
new ArgbEvaluator(), startData.textColor, endData.textColor);
animator = ObjectAnimator.ofPropertyValuesHolder(drawable,
leftProp, topProp, rightProp, bottomProp, fontSizeProp, textColorProp);
} else {
animator = ObjectAnimator.ofPropertyValuesHolder(drawable,
leftProp, topProp, rightProp, bottomProp, fontSizeProp);
}
final float finalFontSize = endFontSize;
AnimatorListenerAdapter listener = new AnimatorListenerAdapter() {
@Override
public void onAnimationEnd(Animator animation) {
textView.getOverlay().remove(drawable);
textView.setTextColor(textColors);
textView.setHintTextColor(hintColors);
textView.setHighlightColor(highlightColor);
textView.setLinkTextColor(linkColors);
}
@Override
public void onAnimationPause(Animator animation) {
textView.setTextSize(TypedValue.COMPLEX_UNIT_PX, drawable.getFontSize());
final int paddingLeft = Math.round(drawable.getLeft());
final int paddingTop = Math.round(drawable.getTop());
final float fraction = animator.getAnimatedFraction();
final int paddingRight = Math.round(interpolate(startData.paddingRight,
endData.paddingRight, fraction));
final int paddingBottom = Math.round(interpolate(startData.paddingBottom,
endData.paddingBottom, fraction));
textView.setPadding(paddingLeft, paddingTop, paddingRight, paddingBottom);
textView.setTextColor(drawable.getTextColor());
}
@Override
public void onAnimationResume(Animator animation) {
textView.setTextSize(TypedValue.COMPLEX_UNIT_PX, finalFontSize);
textView.setPadding(endData.paddingLeft, endData.paddingTop,
endData.paddingRight, endData.paddingBottom);
textView.setTextColor(endData.textColor);
}
};
animator.addListener(listener);
animator.addPauseListener(listener);
return animator;
}
private static void setTextViewData(TextView view, TextResizeData data, float fontSize) {
view.setTextSize(TypedValue.COMPLEX_UNIT_PX, fontSize);
view.setPadding(data.paddingLeft, data.paddingTop, data.paddingRight, data.paddingBottom);
view.setRight(view.getLeft() + data.width);
view.setBottom(view.getTop() + data.height);
view.setTextColor(data.textColor);
int widthSpec = View.MeasureSpec.makeMeasureSpec(view.getWidth(), View.MeasureSpec.EXACTLY);
int heightSpec = View.MeasureSpec.makeMeasureSpec(view.getHeight(), View.MeasureSpec.EXACTLY);
view.measure(widthSpec, heightSpec);
view.layout(view.getLeft(), view.getTop(), view.getRight(), view.getBottom());
}
private static Bitmap captureTextBitmap(TextView textView) {
Drawable background = textView.getBackground();
textView.setBackground(null);
int width = textView.getWidth() - textView.getPaddingLeft() - textView.getPaddingRight();
int height = textView.getHeight() - textView.getPaddingTop() - textView.getPaddingBottom();
if (width == 0 || height == 0) {
return null;
}
Bitmap bitmap = Bitmap.createBitmap(width, height, Bitmap.Config.ARGB_8888);
Canvas canvas = new Canvas(bitmap);
canvas.translate(-textView.getPaddingLeft(), -textView.getPaddingTop());
textView.draw(canvas);
textView.setBackground(background);
return bitmap;
}
private static float interpolate(float start, float end, float fraction) {
return start + (fraction * (end - start));
}
/**
* This Drawable is used to scale the start and end bitmaps and switch between them
* at the appropriate progress.
*/
private static class SwitchBitmapDrawable extends Drawable {
private final TextView view;
private final int horizontalGravity;
private final int verticalGravity;
private final Bitmap startBitmap;
private final Bitmap endBitmap;
private final Paint paint = new Paint();
private final float startFontSize;
private final float endFontSize;
private final float startWidth;
private final float endWidth;
private float fontSize;
private float left;
private float top;
private float right;
private float bottom;
private int textColor;
public SwitchBitmapDrawable(TextView view, int gravity,
Bitmap startBitmap, float startFontSize, float startWidth,
Bitmap endBitmap, float endFontSize, float endWidth) {
this.view = view;
this.horizontalGravity = gravity & Gravity.HORIZONTAL_GRAVITY_MASK;
this.verticalGravity = gravity & Gravity.VERTICAL_GRAVITY_MASK;
this.startBitmap = startBitmap;
this.endBitmap = endBitmap;
this.startFontSize = startFontSize;
this.endFontSize = endFontSize;
this.startWidth = startWidth;
this.endWidth = endWidth;
}
@Override
public void invalidateSelf() {
super.invalidateSelf();
view.invalidate();
}
/**
* Sets the font size that the text should be displayed at.
*
* @param fontSize The font size in pixels of the scaled bitmap text.
*/
public void setFontSize(float fontSize) {
this.fontSize = fontSize;
invalidateSelf();
}
/**
* Sets the color of the text to be displayed.
*
* @param textColor The color of the text to be displayed.
*/
public void setTextColor(int textColor) {
this.textColor = textColor;
setColorFilter(textColor, PorterDuff.Mode.SRC_IN);
invalidateSelf();
}
/**
* Sets the left side of the text. This should be the same as the left padding.
*
* @param left The left side of the text in pixels.
*/
public void setLeft(float left) {
this.left = left;
invalidateSelf();
}
/**
* Sets the top of the text. This should be the same as the top padding.
*
* @param top The top of the text in pixels.
*/
public void setTop(float top) {
this.top = top;
invalidateSelf();
}
/**
* Sets the right of the drawable.
*
* @param right The right pixel of the drawn area.
*/
public void setRight(float right) {
this.right = right;
invalidateSelf();
}
/**
* Sets the bottom of the drawable.
*
* @param bottom The bottom pixel of the drawn area.
*/
public void setBottom(float bottom) {
this.bottom = bottom;
invalidateSelf();
}
/**
* @return The left side of the text.
*/
public float getLeft() {
return left;
}
/**
* @return The top of the text.
*/
public float getTop() {
return top;
}
/**
* @return The right side of the text.
*/
public float getRight() {
return right;
}
/**
* @return The bottom of the text.
*/
public float getBottom() {
return bottom;
}
/**
* @return The font size of the text in the displayed bitmap.
*/
public float getFontSize() {
return fontSize;
}
/**
* @return The color of the text being displayed.
*/
public int getTextColor() {
return textColor;
}
@Override
public void draw(Canvas canvas) {
int saveCount = canvas.save();
// The threshold changes depending on the target font sizes. Because scaled-up
// fonts look bad, we want to switch when closer to the smaller font size. This
// algorithm ensures that null bitmaps (font size = 0) are never used.
final float threshold = startFontSize / (startFontSize + endFontSize);
final float fontSize = getFontSize();
final float progress = (fontSize - startFontSize)/(endFontSize - startFontSize);
// The drawn text width is a more accurate scale than font size. This avoids
// jump when switching bitmaps.
final float expectedWidth = interpolate(startWidth, endWidth, progress);
if (progress < threshold) {
// draw start bitmap
final float scale = expectedWidth / startWidth;
float tx = getTranslationPoint(horizontalGravity, left, right,
startBitmap.getWidth(), scale);
float ty = getTranslationPoint(verticalGravity, top, bottom,
startBitmap.getHeight(), scale);
canvas.translate(tx, ty);
canvas.scale(scale, scale);
canvas.drawBitmap(startBitmap, 0, 0, paint);
} else {
// draw end bitmap
final float scale = expectedWidth / endWidth;
float tx = getTranslationPoint(horizontalGravity, left, right,
endBitmap.getWidth(), scale);
float ty = getTranslationPoint(verticalGravity, top, bottom,
endBitmap.getHeight(), scale);
canvas.translate(tx, ty);
canvas.scale(scale, scale);
canvas.drawBitmap(endBitmap, 0, 0, paint);
}
canvas.restoreToCount(saveCount);
}
@Override
public void setAlpha(int alpha) {
}
@Override
public void setColorFilter(ColorFilter colorFilter) {
paint.setColorFilter(colorFilter);
}
@Override
public int getOpacity() {
return PixelFormat.TRANSLUCENT;
}
private float getTranslationPoint(int gravity, float start, float end, float dim,
float scale) {
switch (gravity) {
case Gravity.CENTER_HORIZONTAL:
case Gravity.CENTER_VERTICAL:
return ((start + end) - (dim * scale))/2f;
case Gravity.RIGHT:
case Gravity.BOTTOM:
return end - (dim * scale);
case Gravity.LEFT:
case Gravity.TOP:
default:
return start;
}
}
}
/**
* Contains all the non-font-size data used by the TextResize transition.
* None of these values should trigger the transition, so they are not listed
* in PROPERTIES. These are captured together to avoid boxing of all the
* primitives while adding to TransitionValues.
*/
static class TextResizeData {
public final int paddingLeft;
public final int paddingTop;
public final int paddingRight;
public final int paddingBottom;
public final int width;
public final int height;
public final int gravity;
public final int textColor;
public TextResizeData(TextView textView) {
this.paddingLeft = textView.getPaddingLeft();
this.paddingTop = textView.getPaddingTop();
this.paddingRight = textView.getPaddingRight();
this.paddingBottom = textView.getPaddingBottom();
this.width = textView.getWidth();
this.height = textView.getHeight();
this.gravity = textView.getGravity();
this.textColor = textView.getCurrentTextColor();
}
}
}
| |
package com.fasterxml.jackson.core.io;
import java.io.*;
public final class UTF8Writer extends Writer
{
final static int SURR1_FIRST = 0xD800;
final static int SURR1_LAST = 0xDBFF;
final static int SURR2_FIRST = 0xDC00;
final static int SURR2_LAST = 0xDFFF;
final private IOContext _context;
private OutputStream _out;
private byte[] _outBuffer;
final private int _outBufferEnd;
private int _outPtr;
/**
* When outputting chars from BMP, surrogate pairs need to be coalesced.
* To do this, both pairs must be known first; and since it is possible
* pairs may be split, we need temporary storage for the first half
*/
private int _surrogate;
public UTF8Writer(IOContext ctxt, OutputStream out)
{
_context = ctxt;
_out = out;
_outBuffer = ctxt.allocWriteEncodingBuffer();
/* Max. expansion for a single char (in unmodified UTF-8) is
* 4 bytes (or 3 depending on how you view it -- 4 when recombining
* surrogate pairs)
*/
_outBufferEnd = _outBuffer.length - 4;
_outPtr = 0;
}
@Override
public Writer append(char c)
throws IOException
{
write(c);
return this;
}
@Override
public void close()
throws IOException
{
if (_out != null) {
if (_outPtr > 0) {
_out.write(_outBuffer, 0, _outPtr);
_outPtr = 0;
}
OutputStream out = _out;
_out = null;
byte[] buf = _outBuffer;
if (buf != null) {
_outBuffer = null;
_context.releaseWriteEncodingBuffer(buf);
}
out.close();
// Let's 'flush' orphan surrogate, no matter what; but only
// after cleanly closing everything else.
int code = _surrogate;
_surrogate = 0;
if (code > 0) {
illegalSurrogate(code);
}
}
}
@Override
public void flush()
throws IOException
{
if (_out != null) {
if (_outPtr > 0) {
_out.write(_outBuffer, 0, _outPtr);
_outPtr = 0;
}
_out.flush();
}
}
@Override
public void write(char[] cbuf)
throws IOException
{
write(cbuf, 0, cbuf.length);
}
@Override
public void write(char[] cbuf, int off, int len)
throws IOException
{
if (len < 2) {
if (len == 1) {
write(cbuf[off]);
}
return;
}
// First: do we have a leftover surrogate to deal with?
if (_surrogate > 0) {
char second = cbuf[off++];
--len;
write(convertSurrogate(second));
// will have at least one more char
}
int outPtr = _outPtr;
byte[] outBuf = _outBuffer;
int outBufLast = _outBufferEnd; // has 4 'spare' bytes
// All right; can just loop it nice and easy now:
len += off; // len will now be the end of input buffer
output_loop:
for (; off < len; ) {
/* First, let's ensure we can output at least 4 bytes
* (longest UTF-8 encoded codepoint):
*/
if (outPtr >= outBufLast) {
_out.write(outBuf, 0, outPtr);
outPtr = 0;
}
int c = cbuf[off++];
// And then see if we have an Ascii char:
if (c < 0x80) { // If so, can do a tight inner loop:
outBuf[outPtr++] = (byte)c;
// Let's calc how many ascii chars we can copy at most:
int maxInCount = (len - off);
int maxOutCount = (outBufLast - outPtr);
if (maxInCount > maxOutCount) {
maxInCount = maxOutCount;
}
maxInCount += off;
ascii_loop:
while (true) {
if (off >= maxInCount) { // done with max. ascii seq
continue output_loop;
}
c = cbuf[off++];
if (c >= 0x80) {
break ascii_loop;
}
outBuf[outPtr++] = (byte) c;
}
}
// Nope, multi-byte:
if (c < 0x800) { // 2-byte
outBuf[outPtr++] = (byte) (0xc0 | (c >> 6));
outBuf[outPtr++] = (byte) (0x80 | (c & 0x3f));
} else { // 3 or 4 bytes
// Surrogates?
if (c < SURR1_FIRST || c > SURR2_LAST) {
outBuf[outPtr++] = (byte) (0xe0 | (c >> 12));
outBuf[outPtr++] = (byte) (0x80 | ((c >> 6) & 0x3f));
outBuf[outPtr++] = (byte) (0x80 | (c & 0x3f));
continue;
}
// Yup, a surrogate:
if (c > SURR1_LAST) { // must be from first range
_outPtr = outPtr;
illegalSurrogate(c);
}
_surrogate = c;
// and if so, followed by another from next range
if (off >= len) { // unless we hit the end?
break;
}
c = convertSurrogate(cbuf[off++]);
if (c > 0x10FFFF) { // illegal in JSON as well as in XML
_outPtr = outPtr;
illegalSurrogate(c);
}
outBuf[outPtr++] = (byte) (0xf0 | (c >> 18));
outBuf[outPtr++] = (byte) (0x80 | ((c >> 12) & 0x3f));
outBuf[outPtr++] = (byte) (0x80 | ((c >> 6) & 0x3f));
outBuf[outPtr++] = (byte) (0x80 | (c & 0x3f));
}
}
_outPtr = outPtr;
}
@Override
public void write(int c) throws IOException
{
// First; do we have a left over surrogate?
if (_surrogate > 0) {
c = convertSurrogate(c);
// If not, do we start with a surrogate?
} else if (c >= SURR1_FIRST && c <= SURR2_LAST) {
// Illegal to get second part without first:
if (c > SURR1_LAST) {
illegalSurrogate(c);
}
// First part just needs to be held for now
_surrogate = c;
return;
}
if (_outPtr >= _outBufferEnd) { // let's require enough room, first
_out.write(_outBuffer, 0, _outPtr);
_outPtr = 0;
}
if (c < 0x80) { // ascii
_outBuffer[_outPtr++] = (byte) c;
} else {
int ptr = _outPtr;
if (c < 0x800) { // 2-byte
_outBuffer[ptr++] = (byte) (0xc0 | (c >> 6));
_outBuffer[ptr++] = (byte) (0x80 | (c & 0x3f));
} else if (c <= 0xFFFF) { // 3 bytes
_outBuffer[ptr++] = (byte) (0xe0 | (c >> 12));
_outBuffer[ptr++] = (byte) (0x80 | ((c >> 6) & 0x3f));
_outBuffer[ptr++] = (byte) (0x80 | (c & 0x3f));
} else { // 4 bytes
if (c > 0x10FFFF) { // illegal
illegalSurrogate(c);
}
_outBuffer[ptr++] = (byte) (0xf0 | (c >> 18));
_outBuffer[ptr++] = (byte) (0x80 | ((c >> 12) & 0x3f));
_outBuffer[ptr++] = (byte) (0x80 | ((c >> 6) & 0x3f));
_outBuffer[ptr++] = (byte) (0x80 | (c & 0x3f));
}
_outPtr = ptr;
}
}
@Override
public void write(String str) throws IOException
{
write(str, 0, str.length());
}
@Override
public void write(String str, int off, int len) throws IOException
{
if (len < 2) {
if (len == 1) {
write(str.charAt(off));
}
return;
}
// First: do we have a leftover surrogate to deal with?
if (_surrogate > 0) {
char second = str.charAt(off++);
--len;
write(convertSurrogate(second));
// will have at least one more char (case of 1 char was checked earlier on)
}
int outPtr = _outPtr;
byte[] outBuf = _outBuffer;
int outBufLast = _outBufferEnd; // has 4 'spare' bytes
// All right; can just loop it nice and easy now:
len += off; // len will now be the end of input buffer
output_loop:
for (; off < len; ) {
/* First, let's ensure we can output at least 4 bytes
* (longest UTF-8 encoded codepoint):
*/
if (outPtr >= outBufLast) {
_out.write(outBuf, 0, outPtr);
outPtr = 0;
}
int c = str.charAt(off++);
// And then see if we have an Ascii char:
if (c < 0x80) { // If so, can do a tight inner loop:
outBuf[outPtr++] = (byte)c;
// Let's calc how many ascii chars we can copy at most:
int maxInCount = (len - off);
int maxOutCount = (outBufLast - outPtr);
if (maxInCount > maxOutCount) {
maxInCount = maxOutCount;
}
maxInCount += off;
ascii_loop:
while (true) {
if (off >= maxInCount) { // done with max. ascii seq
continue output_loop;
}
c = str.charAt(off++);
if (c >= 0x80) {
break ascii_loop;
}
outBuf[outPtr++] = (byte) c;
}
}
// Nope, multi-byte:
if (c < 0x800) { // 2-byte
outBuf[outPtr++] = (byte) (0xc0 | (c >> 6));
outBuf[outPtr++] = (byte) (0x80 | (c & 0x3f));
} else { // 3 or 4 bytes
// Surrogates?
if (c < SURR1_FIRST || c > SURR2_LAST) {
outBuf[outPtr++] = (byte) (0xe0 | (c >> 12));
outBuf[outPtr++] = (byte) (0x80 | ((c >> 6) & 0x3f));
outBuf[outPtr++] = (byte) (0x80 | (c & 0x3f));
continue;
}
// Yup, a surrogate:
if (c > SURR1_LAST) { // must be from first range
_outPtr = outPtr;
illegalSurrogate(c);
}
_surrogate = c;
// and if so, followed by another from next range
if (off >= len) { // unless we hit the end?
break;
}
c = convertSurrogate(str.charAt(off++));
if (c > 0x10FFFF) { // illegal, as per RFC 4627
_outPtr = outPtr;
illegalSurrogate(c);
}
outBuf[outPtr++] = (byte) (0xf0 | (c >> 18));
outBuf[outPtr++] = (byte) (0x80 | ((c >> 12) & 0x3f));
outBuf[outPtr++] = (byte) (0x80 | ((c >> 6) & 0x3f));
outBuf[outPtr++] = (byte) (0x80 | (c & 0x3f));
}
}
_outPtr = outPtr;
}
/*
/**********************************************************
/* Internal methods
/**********************************************************
*/
/**
* Method called to calculate Unicode code-point, from a surrogate pair.
*
* @param secondPart Second UTF-16 unit of surrogate (first part stored in {@code _surrogate})
*
* @return Decoded Unicode point
*
* @throws IOException If surrogate pair is invalid
*/
protected int convertSurrogate(int secondPart)
throws IOException
{
int firstPart = _surrogate;
_surrogate = 0;
// Ok, then, is the second part valid?
if (secondPart < SURR2_FIRST || secondPart > SURR2_LAST) {
throw new IOException("Broken surrogate pair: first char 0x"+Integer.toHexString(firstPart)+", second 0x"+Integer.toHexString(secondPart)+"; illegal combination");
}
return 0x10000 + ((firstPart - SURR1_FIRST) << 10) + (secondPart - SURR2_FIRST);
}
protected static void illegalSurrogate(int code) throws IOException {
throw new IOException(illegalSurrogateDesc(code));
}
protected static String illegalSurrogateDesc(int code)
{
if (code > 0x10FFFF) { // over max?
return "Illegal character point (0x"+Integer.toHexString(code)+") to output; max is 0x10FFFF as per RFC 4627";
}
if (code >= SURR1_FIRST) {
if (code <= SURR1_LAST) { // Unmatched first part (closing without second part?)
return "Unmatched first part of surrogate pair (0x"+Integer.toHexString(code)+")";
}
return "Unmatched second part of surrogate pair (0x"+Integer.toHexString(code)+")";
}
// should we ever get this?
return "Illegal character point (0x"+Integer.toHexString(code)+") to output";
}
}
| |
//
// Created by Azer Bulbul on 12/29/13.
// Copyright (c) 2013 Azer Bulbul. All rights reserved.
//
package com.azer.camera;
import java.io.BufferedInputStream;
import java.io.BufferedOutputStream;
import java.io.File;
import java.io.FileDescriptor;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.InputStream;
import java.io.OutputStream;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import android.app.Activity;
import android.content.ContentValues;
import android.content.Intent;
import android.content.pm.PackageManager;
import android.content.pm.ResolveInfo;
import android.database.Cursor;
import android.graphics.Bitmap;
import android.media.MediaScannerConnection;
import android.net.Uri;
import android.os.Environment;
import android.os.ParcelFileDescriptor;
import android.provider.MediaStore;
import android.provider.OpenableColumns;
import android.provider.MediaStore.MediaColumns;
import android.text.TextUtils;
import com.adobe.fre.FREBitmapData;
import com.adobe.fre.FREContext;
import com.adobe.fre.FREFunction;
public class CameraSaveExtensionContext extends FREContext {
@Override
public void dispose()
{
cleardata();
CameraSaveExtension.context = null;
}
@Override
public Map<String, FREFunction> getFunctions() {
Map<String, FREFunction> functionMap = new HashMap<String, FREFunction>();
functionMap.put("saveBitmapData", new CameraSaveFunction());
functionMap.put("isImagePickerAvailable", new IsImagePickerAvailableFunction());
functionMap.put("isCameraAvailable", new IsCameraAvailableFunction());
functionMap.put("deleteTempFile", new DeleteTempFileFunction());
functionMap.put("browseForImage", new browseForImage());
return functionMap;
}
public void cleardata(){
if(bm!=null){
try{bm.recycle();}
catch (Exception e) {e.printStackTrace();}
catch (Error e){e.printStackTrace(); }
}
if(inputValue!=null){
try{inputValue.release();}
catch(IllegalStateException e){e.printStackTrace();}
catch (Exception e) {e.printStackTrace();}
catch (Error e){e.printStackTrace();}
}
bm = null;
inputValue = null;
}
/*save image functions*/
public Bitmap bm =null;
public FREBitmapData inputValue = null;
public MyMediaConnectorClient myscanner = null;
private boolean resetExternalStorageMedia() {
Boolean ret = true;
try{
Uri uri = Uri.parse("file://" + Environment.getExternalStorageDirectory());
Intent intent = new Intent(Intent.ACTION_MEDIA_MOUNTED, uri);
getActivity().sendBroadcast(intent);
ret = true;
}
catch (Exception e) {e.printStackTrace(); ret = false;}
catch (Error e){e.printStackTrace(); ret = false;}
return (ret);
}
private void notifyMediaScannerService(String path) {
MediaScannerConnection.scanFile(getActivity(),
new String[] { path }, null,
new MediaScannerConnection.OnScanCompletedListener() {
public void onScanCompleted(String path, Uri uri) {}
}
);
}
public void writeToCustomPath(){
File path = Environment.getExternalStoragePublicDirectory(
Environment.DIRECTORY_PICTURES + File.separator + "MyFolder" + File.separator);
File file = new File(path, String.valueOf(System.currentTimeMillis())+".jpg");
if(!path.exists())
{
path.mkdirs();
}
String eventCode = "ok";
OutputStream os_ = null;
try{
os_ = new FileOutputStream(file);
this.bm.compress(Bitmap.CompressFormat.JPEG, 90, os_);
os_.flush();
os_.close();
eventCode = "ok";
}catch (FileNotFoundException e){
e.printStackTrace();
eventCode = "err";
}catch (Exception e) {
e.printStackTrace();
eventCode = "err";
}catch (Error e){
e.printStackTrace();
eventCode = "err";
}finally{
try {this.inputValue.release();}
catch (Exception e) {e.printStackTrace();}
catch (Error e) {e.printStackTrace();}
finally{this.inputValue = null;}
}
if(eventCode != "err"){
try{
MediaScannerConnection.scanFile(getActivity(),
new String[] { file.toString() }, null,
new MediaScannerConnection.OnScanCompletedListener() {
public void onScanCompleted(String path, Uri uri) {
//Log.i("ExternalStorage", "Scanned " + path + ":");
//Log.i("ExternalStorage", "-> uri=" + uri);
}
});
}
catch (Exception e) {e.printStackTrace();}
catch (Error e){e.printStackTrace();}
}
cleardata();
this.inputValue = null;
this.bm = null;
dispatchStatusEventAsync(eventCode, "status");
}
public void writeToDisk(){
SimpleDateFormat s = new SimpleDateFormat("ddMMyyyyhhmmss");
String format = s.format(new Date());
ContentValues values = new ContentValues();
values.put(android.provider.MediaStore.Images.Media.MIME_TYPE, "image/jpeg");
values.put(android.provider.MediaStore.Images.Media.DATE_TAKEN, System.currentTimeMillis());
values.put(android.provider.MediaStore.Images.Media.TITLE, "IMG_"+ format);
values.put(android.provider.MediaStore.Images.Media.DESCRIPTION, "2020");
Uri uri_ =this.getActivity().getContentResolver().insert(android.provider.MediaStore.Images.Media.EXTERNAL_CONTENT_URI, values);
String eventCode = "ok";
OutputStream os_ = null;
try{
os_ = this.getActivity().getContentResolver().openOutputStream(uri_);
this.bm.compress(Bitmap.CompressFormat.JPEG, 90, os_);
os_.flush();
os_.close();
if(resetExternalStorageMedia() == false){
notifyMediaScannerService(uri_.getPath());
}
eventCode = "ok";
}catch (FileNotFoundException e){
e.printStackTrace();
eventCode = "err";
}catch (Exception e) {
e.printStackTrace();
eventCode = "err";
}catch (Error e){
e.printStackTrace();
eventCode = "err";
}finally{
try {this.inputValue.release(); eventCode = "ok";}
catch (Exception e) {e.printStackTrace();}
catch (Error e) {e.printStackTrace();}
finally{this.inputValue = null;}
}
cleardata();
this.inputValue = null;
this.bm = null;
dispatchStatusEventAsync(eventCode, "status");
}
/*browse image functions*/
public static final int NO_ACTION = -1;
public static final int GALLERY_IMAGES_ONLY_ACTION = 0;
public static final int CAMERA_IMAGE_ACTION = 1;
private int _currentAction = NO_ACTION;
public Boolean isImagePickerAvailable()
{
return isActionAvailable(GALLERY_IMAGES_ONLY_ACTION);
}
public void displayImagePicker()
{
startPickerActivityForAction(GALLERY_IMAGES_ONLY_ACTION);
}
public Boolean isCameraAvailable()
{
Boolean hasCameraFeature = getActivity().getPackageManager().hasSystemFeature(PackageManager.FEATURE_CAMERA);
Boolean hasFrontCameraFeature = getActivity().getPackageManager().hasSystemFeature("android.hardware.camera.front");
Boolean isAvailable = (hasFrontCameraFeature || hasCameraFeature) && (isActionAvailable(CAMERA_IMAGE_ACTION));
return isAvailable;
}
private Boolean isActionAvailable(int action)
{
final PackageManager packageManager = getActivity().getPackageManager();
List<ResolveInfo> list = packageManager.queryIntentActivities(getIntentForAction(action), PackageManager.MATCH_DEFAULT_ONLY);
return list.size() > 0;
}
private Intent getIntentForAction(int action)
{
Intent intent;
switch (action)
{
case GALLERY_IMAGES_ONLY_ACTION:
intent = new Intent(Intent.ACTION_PICK, android.provider.MediaStore.Images.Media.EXTERNAL_CONTENT_URI);
//intent = new Intent(Intent.ACTION_GET_CONTENT);
//intent.setType("image/*");
return intent;
case CAMERA_IMAGE_ACTION:
return new Intent(MediaStore.ACTION_IMAGE_CAPTURE);
default:
return null;
}
}
public void displayCamera()
{
startPickerActivityForAction(CAMERA_IMAGE_ACTION);
}
private void handleResultForAction(Intent data, int action)
{
if (action == GALLERY_IMAGES_ONLY_ACTION )
{
handleResultForGallery(data);
}
else if (action == CAMERA_IMAGE_ACTION )
{
handleResultForImageCamera(data);
}
}
private String selectedImagePath;
private void handleResultForGallery(Intent data)
{
Uri selectedImageUri = null;
try{
selectedImageUri = data.getData();
} catch (Exception e) {
e.printStackTrace();
} catch (Error e){
e.printStackTrace();
}
finally{}
String fileManagerString = null;
if(selectedImageUri!=null){
try{
fileManagerString = selectedImageUri.getPath();
} catch (Exception e) {
e.printStackTrace();
} catch (Error e){
e.printStackTrace();
}
finally{}
try{
selectedImagePath = getPath(selectedImageUri);
} catch (Exception e) {
e.printStackTrace();
} catch (Error e){
e.printStackTrace();
}
finally{}
if(selectedImagePath == null){
if(fileManagerString!=null){
selectedImagePath = fileManagerString;
}
}
}
if (selectedImagePath.startsWith("http")) {
//
}
else if (selectedImagePath.startsWith("content://com.google.android.gallery3d")) {
try {
processPicasaMedia(selectedImagePath, ".jpg");
selectedImagePath = null;
} catch (Exception e) {
selectedImagePath = null;
e.printStackTrace();
} finally{
selectedImagePath = _cameraOutputPath;
}
} else if (selectedImagePath.startsWith("content://com.google.android.apps.photos.content")
|| selectedImagePath.startsWith("content://com.android.providers.media.documents")) {
try {
processGooglePhotosMedia(selectedImagePath, ".jpg");
selectedImagePath = null;
} catch (Exception e) {
selectedImagePath = null;
e.printStackTrace();
} finally{
selectedImagePath = _cameraOutputPath;
}
}
if(selectedImagePath!=null){
dispatchResultEvent("IMAGEPATH", selectedImagePath);
}
else if(fileManagerString!=null){
dispatchResultEvent("IMAGEPATH", fileManagerString);
}
else if(_cameraOutputPath!=null){
dispatchResultEvent("IMAGEPATH", _cameraOutputPath);
}
else {
dispatchResultEvent("IMAGEPATH", "");
}
}
private void handleResultForImageCamera(Intent data)
{
if(_cameraOutputPath!=null){
dispatchResultEvent("IMAGEPATH", _cameraOutputPath);
}
else {
dispatchResultEvent("IMAGEPATH", "");
}
//deleteTemporaryImageFile(_cameraOutputPath);
}
CameraSaveNativeActivity _pickerActivity;
private void startPickerActivityForAction(int action)
{
_currentAction = action;
Intent intent = new Intent(getActivity().getApplicationContext(), CameraSaveNativeActivity.class);
getActivity().startActivity(intent);
}
public void onCreatePickerActivity(CameraSaveNativeActivity pickerActivity)
{
if (_currentAction != NO_ACTION)
{
Intent intent = getIntentForAction(_currentAction);
prepareIntentForAction(intent, _currentAction);
_pickerActivity = pickerActivity;
_pickerActivity.startActivityForResult(intent, _currentAction);
}
}
private void prepareIntentForAction(Intent intent, int action)
{
if (action == CAMERA_IMAGE_ACTION)
{
prepareIntentForPictureCamera(intent);
}
}
public void onPickerActivityResult(int requestCode, int resultCode, Intent data)
{
if (requestCode == _currentAction && resultCode == Activity.RESULT_OK)
{
handleResultForAction(data, _currentAction);
}
else
{
dispatchResultEvent("DID_CANCEL");
}
}
private void dispatchResultEvent(String eventName, String message)
{
_currentAction = NO_ACTION;
if (_pickerActivity != null)
{
_pickerActivity.finish();
}
dispatchStatusEventAsync(eventName, message);
}
private void dispatchResultEvent(String eventName)
{
dispatchResultEvent(eventName, "OK");
}
private String getPath(Uri selectedImage)
{
final String[] filePathColumn = { MediaColumns.DATA, MediaColumns.DISPLAY_NAME };
Cursor cursor = getActivity().getContentResolver().query(selectedImage, filePathColumn, null, null, null);
// Some devices return an URI of com.android instead of com.google.android
if (selectedImage.toString().startsWith("content://com.android.gallery3d.provider"))
{
selectedImage = Uri.parse( selectedImage.toString().replace("com.android.gallery3d", "com.google.android.gallery3d") );
}
if (cursor != null)
{
cursor.moveToFirst();
int columnIndex = cursor.getColumnIndex(MediaColumns.DATA);
// if it is a picassa image on newer devices with OS 3.0 and up
if (selectedImage.toString().startsWith("content://com.google.android.gallery3d")
|| selectedImage.toString().startsWith("content://com.google.android.apps.photos.content")
|| selectedImage.toString().startsWith("content://com.android.providers.media.documents")
)
{
columnIndex = cursor.getColumnIndex(MediaColumns.DISPLAY_NAME);
return selectedImage.toString();
}
else
{
return cursor.getString(columnIndex);
}
}
else if ( selectedImage != null && selectedImage.toString().length() > 0 )
{
return selectedImage.toString();
}
else return null;
}
protected void processPicasaMedia(String path, String extension) throws Exception {
try {
InputStream inputStream = getActivity().getContentResolver().openInputStream(Uri.parse(path));
File tempFile = getTemporaryImageFile(extension);
_cameraOutputPath = tempFile.getAbsolutePath();
BufferedOutputStream outStream = new BufferedOutputStream(
new FileOutputStream(_cameraOutputPath));
byte[] buf = new byte[2048];
int len;
while ((len = inputStream.read(buf)) > 0) {
outStream.write(buf, 0, len);
}
inputStream.close();
outStream.close();
} catch (FileNotFoundException e) {
e.printStackTrace();
throw e;
} catch (Exception e) {
e.printStackTrace();
throw e;
}
}
protected void processGooglePhotosMedia(String path, String extension) throws Exception {
String retrievedExtension = checkExtension(Uri.parse(path));
if (retrievedExtension != null && !TextUtils.isEmpty(retrievedExtension)) {
extension = "." + retrievedExtension;
}
try {
File tempFile = getTemporaryImageFile(extension);
_cameraOutputPath = tempFile.getAbsolutePath();
ParcelFileDescriptor parcelFileDescriptor = getActivity().getContentResolver()
.openFileDescriptor(Uri.parse(path), "r");
FileDescriptor fileDescriptor = parcelFileDescriptor.getFileDescriptor();
InputStream inputStream = new FileInputStream(fileDescriptor);
BufferedInputStream reader = new BufferedInputStream(inputStream);
BufferedOutputStream outStream = new BufferedOutputStream(
new FileOutputStream(_cameraOutputPath));
byte[] buf = new byte[2048];
int len;
while ((len = reader.read(buf)) > 0) {
outStream.write(buf, 0, len);
}
outStream.flush();
outStream.close();
inputStream.close();
} catch (FileNotFoundException e) {
e.printStackTrace();
throw e;
} catch (Exception e) {
e.printStackTrace();
throw e;
}
}
public String _cameraOutputPath = null;
private void prepareIntentForPictureCamera(Intent intent)
{
File tempFile = getTemporaryImageFile(".jpg");
_cameraOutputPath = tempFile.getAbsolutePath();
intent.putExtra(MediaStore.EXTRA_OUTPUT, Uri.fromFile(tempFile));
}
private File getTemporaryImageFile( String extension )
{
// Get or create folder for temp files
File tempFolder = new File(Environment.getExternalStorageDirectory()+File.separator+"AzrImagePicker");
if (!tempFolder.exists())
{
tempFolder.mkdir();
try
{
new File(tempFolder, ".nomedia").createNewFile();
}
catch (Exception e) {}
}
// Create temp file
return new File(tempFolder, String.valueOf(System.currentTimeMillis())+extension);
}
public void deleteTemporaryImageFile(String filePath)
{
try{
new File(filePath).delete();
} catch(Exception e) {}
}
public String checkExtension(Uri uri) {
String extension = "";
// The query, since it only applies to a single document, will only
// return
// one row. There's no need to filter, sort, or select fields, since we
// want
// all fields for one document.
Cursor cursor = getActivity().getContentResolver().query(uri, null, null, null, null);
try {
// moveToFirst() returns false if the cursor has 0 rows. Very handy
// for
// "if there's anything to look at, look at it" conditionals.
if (cursor != null && cursor.moveToFirst()) {
// Note it's called "Display Name". This is
// provider-specific, and might not necessarily be the file
// name.
String displayName = cursor.getString(cursor
.getColumnIndex(OpenableColumns.DISPLAY_NAME));
int position = displayName.indexOf(".");
extension = displayName.substring(position + 1);
//int sizeIndex = cursor.getColumnIndex(OpenableColumns.SIZE);
// If the size is unknown, the value stored is null. But since
// an
// int can't be null in Java, the behavior is
// implementation-specific,
// which is just a fancy term for "unpredictable". So as
// a rule, check if it's null before assigning to an int. This
// will
// happen often: The storage API allows for remote files, whose
// size might not be locally known.
//String size = null;
//if (!cursor.isNull(sizeIndex)) {
// Technically the column stores an int, but
// cursor.getString()
// will do the conversion automatically.
//size = cursor.getString(sizeIndex);
//} else {
// size = "Unknown";
//}
}
} finally {
cursor.close();
}
return extension;
}
public static String getDirectory(String foldername) {
// if (!foldername.startsWith(".")) {
// foldername = "." + foldername;
// }
File directory = null;
directory = new File(Environment.getExternalStorageDirectory().getAbsolutePath()
+ File.separator + foldername);
if (!directory.exists()) {
directory.mkdir();
try
{
new File(directory, ".nomedia").createNewFile();
}
catch (Exception e) {}
}
return directory.getAbsolutePath();
}
public static String getFileExtension(String filename) {
String extension = "";
try {
extension = filename.substring(filename.lastIndexOf(".") + 1);
} catch (Exception e) {
e.printStackTrace();
}
return extension;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more contributor license
* agreements. See the NOTICE file distributed with this work for additional information regarding
* copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License. You may obtain a
* copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package org.apache.geode.internal.cache;
import static org.assertj.core.api.Assertions.assertThatThrownBy;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
import org.junit.Test;
/**
* Test verifies the setting and getting of disk id values are correctly
*/
public class DiskIdJUnitTest {
/**
* Test the getOplogId returns what has been set
*/
@Test
public void testGetSetOplogId() throws Exception {
DiskId did = getDiskId();
did.setOplogId(-1);
assertEquals(-1, did.getOplogId());
did.setOplogId(0);
assertEquals(0, did.getOplogId());
did.setOplogId(1024);
assertEquals(1024, did.getOplogId());
did.setOplogId(-1024);
assertEquals(-1024, did.getOplogId());
}
/**
* Test the getUserbits returns what has been set
*/
@Test
public void testGetSetUserBits() throws Exception {
DiskId did = getDiskId();
byte userBits = 0;
userBits = EntryBits.setSerialized(userBits, true);
did.setUserBits(userBits);
assertEquals(userBits, did.getUserBits());
userBits = EntryBits.setInvalid(userBits, true);
did.setUserBits(userBits);
assertEquals(userBits, did.getUserBits());
userBits = EntryBits.setLocalInvalid(userBits, true);
did.setUserBits(userBits);
assertEquals(userBits, did.getUserBits());
assertTrue(EntryBits.isSerialized(userBits));
assertTrue(EntryBits.isInvalid(userBits));
assertTrue(EntryBits.isLocalInvalid(userBits));
userBits = EntryBits.setSerialized(userBits, false);
did.setUserBits(userBits);
assertEquals(userBits, did.getUserBits());
userBits = EntryBits.setInvalid(userBits, false);
did.setUserBits(userBits);
assertEquals(userBits, did.getUserBits());
userBits = EntryBits.setLocalInvalid(userBits, false);
did.setUserBits(userBits);
assertFalse(EntryBits.isSerialized(userBits));
assertFalse(EntryBits.isInvalid(userBits));
assertFalse(EntryBits.isLocalInvalid(userBits));
userBits = 0x0;
userBits = EntryBits.setSerialized(userBits, true);
did.setUserBits(userBits);
assertTrue(EntryBits.isSerialized(userBits));
assertFalse(EntryBits.isInvalid(userBits));
assertFalse(EntryBits.isLocalInvalid(userBits));
userBits = 0x0;
userBits = EntryBits.setInvalid(userBits, true);
did.setUserBits(userBits);
assertFalse(EntryBits.isSerialized(userBits));
assertTrue(EntryBits.isInvalid(userBits));
assertFalse(EntryBits.isLocalInvalid(userBits));
userBits = 0x0;
userBits = EntryBits.setLocalInvalid(userBits, true);
did.setUserBits(userBits);
assertFalse(EntryBits.isSerialized(userBits));
assertFalse(EntryBits.isInvalid(userBits));
assertTrue(EntryBits.isLocalInvalid(userBits));
userBits = 0x0;
userBits = EntryBits.setTombstone(userBits, true);
userBits = EntryBits.setWithVersions(userBits, true);
did.setUserBits(userBits);
assertFalse(EntryBits.isLocalInvalid(userBits));
assertFalse(EntryBits.isSerialized(userBits));
assertFalse(EntryBits.isInvalid(userBits));
assertTrue(EntryBits.isTombstone(userBits));
assertTrue(EntryBits.isWithVersions(userBits));
}
/**
* Test the whether setting of one set of values does not affect another set of values
*/
@Test
public void testAllOperationsValidatingResult1() {
DiskId did = getDiskId();
for (int i = -16777215; i < 16777215; i++) {
boolean boolValuePerIteration = false;
did.setOplogId(i);
// set true for even, set false for odd
switch ((i % 3)) {
case 0:
boolValuePerIteration = true;
break;
case 1:
case 2:
boolValuePerIteration = false;
break;
}
byte userbits = 0;
switch (i % 4) {
case 0:
break;
case 1:
did.setUserBits(EntryBits.setSerialized(userbits, boolValuePerIteration));
break;
case 2:
did.setUserBits(EntryBits.setInvalid(userbits, boolValuePerIteration));
break;
case 3:
did.setUserBits(EntryBits.setLocalInvalid(userbits, boolValuePerIteration));
break;
}
assertEquals(did.getOplogId(), i);
byte userBits2 = did.getUserBits();
switch (i % 4) {
case 0:
break;
case 1:
assertEquals(EntryBits.isSerialized(userBits2), boolValuePerIteration);
break;
case 2:
assertEquals(EntryBits.isInvalid(userBits2), boolValuePerIteration);
break;
case 3:
assertEquals(EntryBits.isLocalInvalid(userBits2), boolValuePerIteration);
break;
}
}
}
/**
* Tests that an instance of 'PersistenceIntOplogOffsetDiskId' is created when max-oplog-size (in
* bytes) passed is smaller than Integer.MAX_VALUE
*/
@Test
public void testPersistIntDiskIdInstance() {
int maxOplogSizeinMB = 2;
DiskId diskId = DiskId.createDiskId(maxOplogSizeinMB, true /* is persistence type */, true);
assertTrue(
"Instance of 'PersistIntOplogOffsetDiskId' was not created though max oplog size (in bytes) was smaller than Integer.MAX_VALUE",
DiskId.isInstanceofPersistIntOplogOffsetDiskId(diskId));
}
/**
* Tests that an instance of 'LongOplogOffsetDiskId' is created when max-oplog-size (in bytes)
* passed is greater than Integer.MAX_VALUE
*/
@Test
public void testPersistLongDiskIdInstance() {
long maxOplogSizeInBytes = (long) Integer.MAX_VALUE + 1;
int maxOplogSizeinMB = (int) (maxOplogSizeInBytes / (1024 * 1024));
DiskId diskId = DiskId.createDiskId(maxOplogSizeinMB, true/* is persistence type */, true);
assertTrue(
"Instance of 'PersistLongOplogOffsetDiskId' was not created though max oplog size (in bytes) was greater than Integer.MAX_VALUE",
DiskId.isInstanceofPersistLongOplogOffsetDiskId(diskId));
}
/**
* Tests that an instance of 'PersistenceIntOplogOffsetDiskId' is created when max-oplog-size (in
* bytes) passed is smaller than Integer.MAX_VALUE
*/
@Test
public void testOverflowIntDiskIdInstance() {
int maxOplogSizeinMB = 2;
DiskId diskId = DiskId.createDiskId(maxOplogSizeinMB, false /* is overflow type */, true);
assertTrue(
"Instance of 'OverflowIntOplogOffsetDiskId' was not created though max oplog size (in bytes) was smaller than Integer.MAX_VALUE",
DiskId.isInstanceofOverflowIntOplogOffsetDiskId(diskId));
}
/**
* Tests that an instance of 'LongOplogOffsetDiskId' is created when max-oplog-size (in bytes)
* passed is greater than Integer.MAX_VALUE
*/
@Test
public void testOverflowLongDiskIdInstance() {
long maxOplogSizeInBytes = (long) Integer.MAX_VALUE + 1;
int maxOplogSizeinMB = (int) (maxOplogSizeInBytes / (1024 * 1024));
DiskId diskId = DiskId.createDiskId(maxOplogSizeinMB, false/* is overflow type */, true);
assertTrue(
"Instance of 'OverflowLongOplogOffsetDiskId' was not created though max oplog size (in bytes) was greater than Integer.MAX_VALUE",
DiskId.isInstanceofOverflowOnlyWithLongOffset(diskId));
}
private DiskId getDiskId() {
return DiskId.createDiskId(1024, true /* is persistence type */, true);
}
/**
* Tests unmarkForWrite for persistent region does not change keyId
*/
@Test
public void testPersistUnmarkForWrite() {
DiskId diskId = getDiskId();
diskId.setKeyId(11);
diskId.unmarkForWriting();
long newKeyId = diskId.getKeyId();
assertEquals(11, newKeyId);
}
/**
* Tests markForWrite for persistent region failed
*/
@Test
public void testPersistMarkForWrite() {
DiskId diskId = getDiskId();
diskId.setKeyId(11);
assertThatThrownBy(() -> diskId.markForWriting()).isInstanceOf(IllegalStateException.class);
}
}
| |
/*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
/*
* This code was generated by https://github.com/googleapis/google-api-java-client-services/
* Modify at your own risk.
*/
package com.google.api.services.androidenterprise.model;
/**
* A WebApps resource represents a web app created for an enterprise. Web apps are published to
* managed Google Play and can be distributed like other Android apps. On a user's device, a web app
* opens its specified URL.
*
* <p> This is the Java data model class that specifies how to parse/serialize into the JSON that is
* transmitted over HTTP when working with the Google Play EMM API. For a detailed explanation see:
* <a href="https://developers.google.com/api-client-library/java/google-http-java-client/json">https://developers.google.com/api-client-library/java/google-http-java-client/json</a>
* </p>
*
* @author Google, Inc.
*/
@SuppressWarnings("javadoc")
public final class WebApp extends com.google.api.client.json.GenericJson {
/**
* The display mode of the web app.
*
* Possible values include: - "minimalUi", the device's status bar, navigation bar, the app's
* URL, and a refresh button are visible when the app is open. For HTTP URLs, you can only select
* this option. - "standalone", the device's status bar and navigation bar are visible when the
* app is open. - "fullScreen", the app opens in full screen mode, hiding the device's status and
* navigation bars. All browser UI elements, page URL, system status bar and back button are not
* visible, and the web app takes up the entirety of the available display area.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String displayMode;
/**
* A list of icons representing this website. If absent, a default icon (for create) or the
* current icon (for update) will be used.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.util.List<WebAppIcon> icons;
/**
* A flag whether the app has been published to the Play store yet.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean isPublished;
/**
* The start URL, i.e. the URL that should load when the user opens the application.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String startUrl;
/**
* The title of the web app as displayed to the user (e.g., amongst a list of other applications,
* or as a label for an icon).
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String title;
/**
* The current version of the app.
*
* Note that the version can automatically increase during the lifetime of the web app, while
* Google does internal housekeeping to keep the web app up-to-date.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key @com.google.api.client.json.JsonString
private java.lang.Long versionCode;
/**
* The ID of the application. A string of the form "app:" where the package name always starts
* with the prefix "com.google.enterprise.webapp." followed by a random id.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String webAppId;
/**
* The display mode of the web app.
*
* Possible values include: - "minimalUi", the device's status bar, navigation bar, the app's
* URL, and a refresh button are visible when the app is open. For HTTP URLs, you can only select
* this option. - "standalone", the device's status bar and navigation bar are visible when the
* app is open. - "fullScreen", the app opens in full screen mode, hiding the device's status and
* navigation bars. All browser UI elements, page URL, system status bar and back button are not
* visible, and the web app takes up the entirety of the available display area.
* @return value or {@code null} for none
*/
public java.lang.String getDisplayMode() {
return displayMode;
}
/**
* The display mode of the web app.
*
* Possible values include: - "minimalUi", the device's status bar, navigation bar, the app's
* URL, and a refresh button are visible when the app is open. For HTTP URLs, you can only select
* this option. - "standalone", the device's status bar and navigation bar are visible when the
* app is open. - "fullScreen", the app opens in full screen mode, hiding the device's status and
* navigation bars. All browser UI elements, page URL, system status bar and back button are not
* visible, and the web app takes up the entirety of the available display area.
* @param displayMode displayMode or {@code null} for none
*/
public WebApp setDisplayMode(java.lang.String displayMode) {
this.displayMode = displayMode;
return this;
}
/**
* A list of icons representing this website. If absent, a default icon (for create) or the
* current icon (for update) will be used.
* @return value or {@code null} for none
*/
public java.util.List<WebAppIcon> getIcons() {
return icons;
}
/**
* A list of icons representing this website. If absent, a default icon (for create) or the
* current icon (for update) will be used.
* @param icons icons or {@code null} for none
*/
public WebApp setIcons(java.util.List<WebAppIcon> icons) {
this.icons = icons;
return this;
}
/**
* A flag whether the app has been published to the Play store yet.
* @return value or {@code null} for none
*/
public java.lang.Boolean getIsPublished() {
return isPublished;
}
/**
* A flag whether the app has been published to the Play store yet.
* @param isPublished isPublished or {@code null} for none
*/
public WebApp setIsPublished(java.lang.Boolean isPublished) {
this.isPublished = isPublished;
return this;
}
/**
* The start URL, i.e. the URL that should load when the user opens the application.
* @return value or {@code null} for none
*/
public java.lang.String getStartUrl() {
return startUrl;
}
/**
* The start URL, i.e. the URL that should load when the user opens the application.
* @param startUrl startUrl or {@code null} for none
*/
public WebApp setStartUrl(java.lang.String startUrl) {
this.startUrl = startUrl;
return this;
}
/**
* The title of the web app as displayed to the user (e.g., amongst a list of other applications,
* or as a label for an icon).
* @return value or {@code null} for none
*/
public java.lang.String getTitle() {
return title;
}
/**
* The title of the web app as displayed to the user (e.g., amongst a list of other applications,
* or as a label for an icon).
* @param title title or {@code null} for none
*/
public WebApp setTitle(java.lang.String title) {
this.title = title;
return this;
}
/**
* The current version of the app.
*
* Note that the version can automatically increase during the lifetime of the web app, while
* Google does internal housekeeping to keep the web app up-to-date.
* @return value or {@code null} for none
*/
public java.lang.Long getVersionCode() {
return versionCode;
}
/**
* The current version of the app.
*
* Note that the version can automatically increase during the lifetime of the web app, while
* Google does internal housekeeping to keep the web app up-to-date.
* @param versionCode versionCode or {@code null} for none
*/
public WebApp setVersionCode(java.lang.Long versionCode) {
this.versionCode = versionCode;
return this;
}
/**
* The ID of the application. A string of the form "app:" where the package name always starts
* with the prefix "com.google.enterprise.webapp." followed by a random id.
* @return value or {@code null} for none
*/
public java.lang.String getWebAppId() {
return webAppId;
}
/**
* The ID of the application. A string of the form "app:" where the package name always starts
* with the prefix "com.google.enterprise.webapp." followed by a random id.
* @param webAppId webAppId or {@code null} for none
*/
public WebApp setWebAppId(java.lang.String webAppId) {
this.webAppId = webAppId;
return this;
}
@Override
public WebApp set(String fieldName, Object value) {
return (WebApp) super.set(fieldName, value);
}
@Override
public WebApp clone() {
return (WebApp) super.clone();
}
}
| |
/**
* Copyright (C) 2015 - present by OpenGamma Inc. and the OpenGamma group of companies
* <p/>
* Please see distribution for license.
*/
package com.opengamma.sesame.marketdata;
import org.joda.beans.BeanDefinition;
import org.joda.beans.ImmutableBean;
import org.joda.beans.PropertyDefinition;
import com.opengamma.analytics.financial.model.interestrate.curve.ForwardCurve;
import java.util.Map;
import java.util.NoSuchElementException;
import java.util.Set;
import org.joda.beans.Bean;
import org.joda.beans.JodaBeanUtils;
import org.joda.beans.MetaProperty;
import org.joda.beans.Property;
import org.joda.beans.impl.direct.DirectFieldsBeanBuilder;
import org.joda.beans.impl.direct.DirectMetaBean;
import org.joda.beans.impl.direct.DirectMetaProperty;
import org.joda.beans.impl.direct.DirectMetaPropertyMap;
/**
* Identifies a forward curve by name.
*/
@BeanDefinition
public class ForwardCurveId implements MarketDataId<ForwardCurve>, ImmutableBean {
/** The name of the forward curve. */
@PropertyDefinition(validate = "notEmpty")
private final String _name;
@Override
public Class<ForwardCurve> getMarketDataType() {
return ForwardCurve.class;
}
/**
* Returns an ID for a forward curve with the specified name
*
* @param name the forward curve name, not empty
* @return an ID for a forward curve with the specified name
*/
public static ForwardCurveId of(String name) {
return ForwardCurveId.builder().name(name).build();
}
//------------------------- AUTOGENERATED START -------------------------
///CLOVER:OFF
/**
* The meta-bean for {@code ForwardCurveId}.
* @return the meta-bean, not null
*/
public static ForwardCurveId.Meta meta() {
return ForwardCurveId.Meta.INSTANCE;
}
static {
JodaBeanUtils.registerMetaBean(ForwardCurveId.Meta.INSTANCE);
}
/**
* Returns a builder used to create an instance of the bean.
* @return the builder, not null
*/
public static ForwardCurveId.Builder builder() {
return new ForwardCurveId.Builder();
}
/**
* Restricted constructor.
* @param builder the builder to copy from, not null
*/
protected ForwardCurveId(ForwardCurveId.Builder builder) {
JodaBeanUtils.notEmpty(builder._name, "name");
this._name = builder._name;
}
@Override
public ForwardCurveId.Meta metaBean() {
return ForwardCurveId.Meta.INSTANCE;
}
@Override
public <R> Property<R> property(String propertyName) {
return metaBean().<R>metaProperty(propertyName).createProperty(this);
}
@Override
public Set<String> propertyNames() {
return metaBean().metaPropertyMap().keySet();
}
//-----------------------------------------------------------------------
/**
* Gets the name of the forward curve.
* @return the value of the property, not empty
*/
public String getName() {
return _name;
}
//-----------------------------------------------------------------------
/**
* Returns a builder that allows this bean to be mutated.
* @return the mutable builder, not null
*/
public Builder toBuilder() {
return new Builder(this);
}
@Override
public boolean equals(Object obj) {
if (obj == this) {
return true;
}
if (obj != null && obj.getClass() == this.getClass()) {
ForwardCurveId other = (ForwardCurveId) obj;
return JodaBeanUtils.equal(getName(), other.getName());
}
return false;
}
@Override
public int hashCode() {
int hash = getClass().hashCode();
hash = hash * 31 + JodaBeanUtils.hashCode(getName());
return hash;
}
@Override
public String toString() {
StringBuilder buf = new StringBuilder(64);
buf.append("ForwardCurveId{");
int len = buf.length();
toString(buf);
if (buf.length() > len) {
buf.setLength(buf.length() - 2);
}
buf.append('}');
return buf.toString();
}
protected void toString(StringBuilder buf) {
buf.append("name").append('=').append(JodaBeanUtils.toString(getName())).append(',').append(' ');
}
//-----------------------------------------------------------------------
/**
* The meta-bean for {@code ForwardCurveId}.
*/
public static class Meta extends DirectMetaBean {
/**
* The singleton instance of the meta-bean.
*/
static final Meta INSTANCE = new Meta();
/**
* The meta-property for the {@code name} property.
*/
private final MetaProperty<String> _name = DirectMetaProperty.ofImmutable(
this, "name", ForwardCurveId.class, String.class);
/**
* The meta-properties.
*/
private final Map<String, MetaProperty<?>> _metaPropertyMap$ = new DirectMetaPropertyMap(
this, null,
"name");
/**
* Restricted constructor.
*/
protected Meta() {
}
@Override
protected MetaProperty<?> metaPropertyGet(String propertyName) {
switch (propertyName.hashCode()) {
case 3373707: // name
return _name;
}
return super.metaPropertyGet(propertyName);
}
@Override
public ForwardCurveId.Builder builder() {
return new ForwardCurveId.Builder();
}
@Override
public Class<? extends ForwardCurveId> beanType() {
return ForwardCurveId.class;
}
@Override
public Map<String, MetaProperty<?>> metaPropertyMap() {
return _metaPropertyMap$;
}
//-----------------------------------------------------------------------
/**
* The meta-property for the {@code name} property.
* @return the meta-property, not null
*/
public final MetaProperty<String> name() {
return _name;
}
//-----------------------------------------------------------------------
@Override
protected Object propertyGet(Bean bean, String propertyName, boolean quiet) {
switch (propertyName.hashCode()) {
case 3373707: // name
return ((ForwardCurveId) bean).getName();
}
return super.propertyGet(bean, propertyName, quiet);
}
@Override
protected void propertySet(Bean bean, String propertyName, Object newValue, boolean quiet) {
metaProperty(propertyName);
if (quiet) {
return;
}
throw new UnsupportedOperationException("Property cannot be written: " + propertyName);
}
}
//-----------------------------------------------------------------------
/**
* The bean-builder for {@code ForwardCurveId}.
*/
public static class Builder extends DirectFieldsBeanBuilder<ForwardCurveId> {
private String _name;
/**
* Restricted constructor.
*/
protected Builder() {
}
/**
* Restricted copy constructor.
* @param beanToCopy the bean to copy from, not null
*/
protected Builder(ForwardCurveId beanToCopy) {
this._name = beanToCopy.getName();
}
//-----------------------------------------------------------------------
@Override
public Object get(String propertyName) {
switch (propertyName.hashCode()) {
case 3373707: // name
return _name;
default:
throw new NoSuchElementException("Unknown property: " + propertyName);
}
}
@Override
public Builder set(String propertyName, Object newValue) {
switch (propertyName.hashCode()) {
case 3373707: // name
this._name = (String) newValue;
break;
default:
throw new NoSuchElementException("Unknown property: " + propertyName);
}
return this;
}
@Override
public Builder set(MetaProperty<?> property, Object value) {
super.set(property, value);
return this;
}
@Override
public Builder setString(String propertyName, String value) {
setString(meta().metaProperty(propertyName), value);
return this;
}
@Override
public Builder setString(MetaProperty<?> property, String value) {
super.setString(property, value);
return this;
}
@Override
public Builder setAll(Map<String, ? extends Object> propertyValueMap) {
super.setAll(propertyValueMap);
return this;
}
@Override
public ForwardCurveId build() {
return new ForwardCurveId(this);
}
//-----------------------------------------------------------------------
/**
* Sets the {@code name} property in the builder.
* @param name the new value, not empty
* @return this, for chaining, not null
*/
public Builder name(String name) {
JodaBeanUtils.notEmpty(name, "name");
this._name = name;
return this;
}
//-----------------------------------------------------------------------
@Override
public String toString() {
StringBuilder buf = new StringBuilder(64);
buf.append("ForwardCurveId.Builder{");
int len = buf.length();
toString(buf);
if (buf.length() > len) {
buf.setLength(buf.length() - 2);
}
buf.append('}');
return buf.toString();
}
protected void toString(StringBuilder buf) {
buf.append("name").append('=').append(JodaBeanUtils.toString(_name)).append(',').append(' ');
}
}
///CLOVER:ON
//-------------------------- AUTOGENERATED END --------------------------
}
| |
/*
* JBoss, Home of Professional Open Source
*
* Copyright 2013 Red Hat, Inc. and/or its affiliates.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.xnio.ssl;
import static org.xnio._private.Messages.msg;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.nio.channels.FileChannel;
import java.util.concurrent.TimeUnit;
import org.xnio.channels.StreamSinkChannel;
import org.xnio.conduits.AbstractStreamSourceConduit;
import org.xnio.conduits.ConduitReadableByteChannel;
import org.xnio.conduits.Conduits;
import org.xnio.conduits.StreamSourceConduit;
/**
* Jsse SSL source conduit implementation based on {@link JsseSslConduitEngine}.
*
* @author <a href="mailto:frainone@redhat.com">Flavia Rainone</a>
*
*/
final class JsseSslStreamSourceConduit extends AbstractStreamSourceConduit<StreamSourceConduit> {
private final JsseSslConduitEngine sslEngine;
private volatile boolean tls;
protected JsseSslStreamSourceConduit(StreamSourceConduit next, JsseSslConduitEngine sslEngine, boolean tls) {
super(next);
if (sslEngine == null) {
throw msg.nullParameter("sslEngine");
}
this.sslEngine = sslEngine;
this.tls = tls;
}
void enableTls() {
tls = true;
if (isReadResumed()) {
wakeupReads();
}
}
@Override
public long transferTo(final long position, final long count, final FileChannel target) throws IOException {
return target.transferFrom(new ConduitReadableByteChannel(this), position, count);
}
@Override
public long transferTo(final long count, final ByteBuffer throughBuffer, final StreamSinkChannel target) throws IOException {
return Conduits.transfer(this, count, throughBuffer, target);
}
@Override
public int read(ByteBuffer dst) throws IOException {
if (!tls) {
final int res = super.read(dst);
if (res == -1) {
terminateReads();
}
return res;
}
if ((!sslEngine.isDataAvailable() && sslEngine.isInboundClosed()) || sslEngine.isClosed()) {
return -1;
}
final boolean attemptToUnwrapFirst;
synchronized(sslEngine.getUnwrapLock()) {
attemptToUnwrapFirst = sslEngine.getUnwrapBuffer().remaining() > 0;
}
if (attemptToUnwrapFirst) {
final int unwrapResult = sslEngine.unwrap(dst);
if (unwrapResult > 0) {
return unwrapResult;
}
}
final int readResult;
synchronized(sslEngine.getUnwrapLock()) {
final ByteBuffer unwrapBuffer = sslEngine.getUnwrapBuffer().compact();
try {
readResult = super.read(unwrapBuffer);
} finally {
unwrapBuffer.flip();
}
}
final int unwrapResult = sslEngine.unwrap(dst);
if (unwrapResult == 0 && readResult == -1) {
terminateReads();
return -1;
}
return unwrapResult;
}
@Override
public long read(ByteBuffer[] dsts, int offs, int len) throws IOException {
if (!tls) {
final long res = super.read(dsts, offs, len);
if (res == -1) {
terminateReads();
}
return res;
}
if (offs < 0 || offs > len || len < 0 || offs + len > dsts.length) {
throw new ArrayIndexOutOfBoundsException();
}
if (sslEngine.isClosed() || (!sslEngine.isDataAvailable() && sslEngine.isInboundClosed())) {
return -1;
}
final int readResult;
final long unwrapResult;
synchronized (sslEngine.getUnwrapLock()) {
// retrieve buffer from sslEngine, to save some memory space
final ByteBuffer unwrapBuffer = sslEngine.getUnwrapBuffer().compact();
try {
readResult = super.read(unwrapBuffer);
} finally {
unwrapBuffer.flip();
}
}
unwrapResult = sslEngine.unwrap(dsts, offs, len);
if (unwrapResult == 0 && readResult == -1) {
terminateReads();
return -1;
}
return unwrapResult;
}
@Override
public void resumeReads() {
if (tls && sslEngine.isFirstHandshake()) {
super.wakeupReads();
} else {
super.resumeReads();
}
}
@Override
public void terminateReads() throws IOException {
if (!tls) {
super.terminateReads();
return;
}
try {
sslEngine.closeInbound();
} catch (IOException ex) {
try {
super.terminateReads();
} catch (IOException e2) {
e2.addSuppressed(ex);
throw e2;
}
throw ex;
}
}
@Override
public void awaitReadable() throws IOException {
if (tls) {
sslEngine.awaitCanUnwrap();
}
if(sslEngine.isDataAvailable()) {
return;
}
super.awaitReadable();
}
@Override
public void awaitReadable(long time, TimeUnit timeUnit) throws IOException {
if (!tls) {
super.awaitReadable(time, timeUnit);
return;
}
synchronized (sslEngine.getUnwrapLock()) {
if(sslEngine.getUnwrapBuffer().hasRemaining()) {
return;
}
}
long duration = timeUnit.toNanos(time);
long awaited = System.nanoTime();
sslEngine.awaitCanUnwrap(time, timeUnit);
awaited = System.nanoTime() - awaited;
if (awaited > duration) {
return;
}
super.awaitReadable(duration - awaited, TimeUnit.NANOSECONDS);
}
}
| |
/**
* Copyright 2014 Cisco Systems, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.cisco.oss.foundation.directory.lookup;
import java.util.ArrayList;
import java.util.List;
import java.util.Map.Entry;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.Executors;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.ThreadFactory;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicReference;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.cisco.oss.foundation.directory.cache.ModelServiceClientCache;
import com.cisco.oss.foundation.directory.client.DirectoryServiceClient;
import com.cisco.oss.foundation.directory.entity.ModelMetadataKey;
import com.cisco.oss.foundation.directory.entity.ModelService;
import com.cisco.oss.foundation.directory.lifecycle.Stoppable;
/**
* It is the DirectoryLookupService with client-side Cache.
*
* It caches ServiceInstance for quick lookup and provides the cache sync function to
* sync the latest changes of the cached ServiceInstances.
*
*
*/
public class CachedDirectoryLookupService extends DirectoryLookupService implements Stoppable {
private static final Logger LOGGER = LoggerFactory
.getLogger(CachedDirectoryLookupService.class);
/**
* The LookupManager cache sync executor kickoff delay time property name in seconds.
* @deprecated
* use {@link DirectoryLookupService#SD_API_POLLING_DELAY_PROPERTY}
*/
public static final String SD_API_CACHE_SYNC_DELAY_PROPERTY = SD_API_POLLING_DELAY_PROPERTY;
/**
* The default delay time of LookupManager cache sync executor kickoff.
* @deprecated
* use {@link DirectoryLookupService#SD_API_POLLING_DELAY_PROPERTY}
*/
public static final int SD_API_CACHE_SYNC_DELAY_DEFAULT = SD_API_POLLING_DELAY_DEFAULT;
/**
* The LookupManager cache sync interval property name in seconds.
* @deprecated
* use {@link DirectoryLookupService#SD_API_POLLING_INTERVAL_PROPERTY}
*/
public static final String SD_API_CACHE_SYNC_INTERVAL_PROPERTY = SD_API_POLLING_INTERVAL_PROPERTY;
/**
* The default LookupManager cache sync interval value.
* @deprecated
* use {@link DirectoryLookupService#SD_API_POLLING_INTERVAL_DEFAULT}
*/
public static final int SD_API_CACHE_SYNC_INTERVAL_DEFAULT = SD_API_POLLING_INTERVAL_DEFAULT;
/**
* ScheduledExecutorService to sync cache.
*/
private final AtomicReference<ScheduledExecutorService> syncService = new AtomicReference<>();
/**
* Internal cache that maps the service name to a list of service instances.
*/
private final ConcurrentHashMap<String, ModelServiceClientCache> cache = new ConcurrentHashMap<>();
/**
* Internal cache that maps the metadata key name to a list of service instances.
private final ConcurrentHashMap<String, ModelMetadataKey> metaKeyCache = new ConcurrentHashMap<>();
*/
/**
* Mark whether component is started.
*/
private final AtomicBoolean isStarted = new AtomicBoolean(false);
/**
* Constructor.
*
* @param directoryServiceClient
* the DirectoryServiceClient.
*/
public CachedDirectoryLookupService(DirectoryServiceClient directoryServiceClient) {
super(directoryServiceClient);
syncService.set(newSyncService());
}
private ScheduledExecutorService newSyncService(){
return Executors
.newSingleThreadScheduledExecutor(new ThreadFactory() {
@Override
public Thread newThread(Runnable r) {
Thread t = new Thread(r);
t.setName("SD_Cache_Sync_Task");
t.setDaemon(true);
return t;
}
});
}
/**
* Start the CachedDirectoryLookupService.
*
* It is thread safe.
*
*/
@Override
public void start(){
if (isStarted.compareAndSet(false,true)){
super.start();
initCacheSyncTask();
}
}
/**
* Stop the CachedDirectoryLookupService.
*
* It is thread safe.
*
*/
@Override
public void stop(){
if (isStarted.compareAndSet(true,false)) {
// if you shutdown it, it can not be use anymore
super.stop();
ScheduledExecutorService service = this.syncService.getAndSet(newSyncService());
service.shutdown();
LOGGER.info("Cache sync Service is shutdown");
for (Entry<String,ModelServiceClientCache> entry : cache.entrySet()){
removeInstanceChangeListener(entry.getKey(),entry.getValue());
}
getCache().clear();
}
}
/**
* Get the ModelService.
*
* It will query the cache first. If not found in the cache, the service will be added to the cache.
*
*
* @param serviceName
* the Service name.
* @return
* the ModelService.
*/
@Override
public ModelService getModelService(String serviceName){
ModelServiceClientCache cache = getCache().get(serviceName);
ModelService lookup;
if (cache == null) {
// cache has not never been created, initialize an new one.
if (LOGGER.isDebugEnabled()) {
LOGGER.debug("service has not been cached, try to cache the service {} ", serviceName);
}
lookup = super.getModelService(serviceName);
if (lookup!=null) {
getCache().putIfAbsent(serviceName, new ModelServiceClientCache(lookup));
cache = getCache().get(serviceName);
addInstanceChangeListener(serviceName, cache);
}
} else {
// service cached has been removed
if (cache.getData() == null) {
if (LOGGER.isDebugEnabled()) {
LOGGER.debug("cached service={} is obsoleted, try to get service from server", serviceName);
}
lookup = super.getModelService(serviceName);
if (lookup != null) {
// replace old cached service by new one
if (LOGGER.isDebugEnabled()) {
LOGGER.debug("try to replace the obsoleted cached service={} by service from server {}", serviceName,lookup.getServiceInstances());
}
removeInstanceChangeListener(serviceName, cache);
boolean replaced = getCache().replace(serviceName, cache, new ModelServiceClientCache(lookup));
if (replaced) {
addInstanceChangeListener(serviceName, getCache().get(serviceName));
} else {
LOGGER.error("fail to replace the obsoleted cached service={}", serviceName);
}
}else{
LOGGER.error("fail to lookup service={} from server",serviceName);
}
}
// use the use cached service
else {
if (LOGGER.isDebugEnabled()) {
LOGGER.debug("service={} has been cached, get service from cache. {}", serviceName,cache.getData().getServiceInstances());
}
lookup = cache.getData();
}
}
return lookup;
}
/**
* initialization of the CacheSyncTask
*/
private void initCacheSyncTask(){
LOGGER.info("Cache sync Service is started");
}
/**
* Get the ServiceDirectoryCache that caches Services.
*
* It is thread safe.
*
* @return
* the ServiceDirectoryCache.
*/
private ConcurrentHashMap<String, ModelServiceClientCache> getCache(){
return this.cache;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.commons.io;
import java.io.ByteArrayInputStream;
import java.io.InputStream;
import java.io.OutputStream;
import java.io.Reader;
import java.io.StringWriter;
import java.io.Writer;
import java.util.Arrays;
import org.apache.commons.io.output.ByteArrayOutputStream;
import org.apache.commons.io.testtools.FileBasedTestCase;
import org.apache.commons.io.testtools.YellOnCloseInputStream;
import org.apache.commons.io.testtools.YellOnFlushAndCloseOutputStream;
@SuppressWarnings("deprecation") // these are test cases for the deprecated CopyUtils
/**
* JUnit tests for CopyUtils.
*
* @author Jeff Turner
* @author Matthew Hawthorne
* @author <a href="mailto:jeremias@apache.org">Jeremias Maerki</a>
* @version $Id$
* @see CopyUtils
*/
public class CopyUtilsTest extends FileBasedTestCase {
/*
* NOTE this is not particularly beautiful code. A better way to check for
* flush and close status would be to implement "trojan horse" wrapper
* implementations of the various stream classes, which set a flag when
* relevant methods are called. (JT)
*/
private static final int FILE_SIZE = 1024 * 4 + 1;
private byte[] inData = generateTestData(FILE_SIZE);
public CopyUtilsTest(String testName) {
super(testName);
}
// ----------------------------------------------------------------
// Setup
// ----------------------------------------------------------------
@Override
public void setUp() throws Exception {
}
@Override
public void tearDown() throws Exception {
}
// ----------------------------------------------------------------
// Tests
// ----------------------------------------------------------------
public void testCtor() {
new CopyUtils();
// Nothing to assert, the constructor is public and does not blow up.
}
public void testCopy_byteArrayToOutputStream() throws Exception {
ByteArrayOutputStream baout = new ByteArrayOutputStream();
OutputStream out = new YellOnFlushAndCloseOutputStream(baout, false, true);
CopyUtils.copy(inData, out);
assertEquals("Sizes differ", inData.length, baout.size());
assertTrue("Content differs", Arrays.equals(inData, baout.toByteArray()));
}
public void testCopy_byteArrayToWriter() throws Exception {
ByteArrayOutputStream baout = new ByteArrayOutputStream();
OutputStream out = new YellOnFlushAndCloseOutputStream(baout, false, true);
Writer writer = new java.io.OutputStreamWriter(out, "US-ASCII");
CopyUtils.copy(inData, writer);
writer.flush();
assertEquals("Sizes differ", inData.length, baout.size());
assertTrue("Content differs", Arrays.equals(inData, baout.toByteArray()));
}
public void testCopy_byteArrayToWriterWithEncoding() throws Exception {
String inDataStr = "data";
String charsetName = "UTF-8";
StringWriter writer = new StringWriter();
CopyUtils.copy(inDataStr.getBytes(charsetName), writer, charsetName);
assertEquals(inDataStr, writer.toString());
}
public void testCopy_inputStreamToOutputStream() throws Exception {
InputStream in = new ByteArrayInputStream(inData);
in = new YellOnCloseInputStream(in);
ByteArrayOutputStream baout = new ByteArrayOutputStream();
OutputStream out = new YellOnFlushAndCloseOutputStream(baout, false, true);
int count = CopyUtils.copy(in, out);
assertEquals("Not all bytes were read", 0, in.available());
assertEquals("Sizes differ", inData.length, baout.size());
assertTrue("Content differs", Arrays.equals(inData, baout.toByteArray()));
assertEquals(inData.length, count);
}
public void testCopy_inputStreamToWriter() throws Exception {
InputStream in = new ByteArrayInputStream(inData);
in = new YellOnCloseInputStream(in);
ByteArrayOutputStream baout = new ByteArrayOutputStream();
OutputStream out = new YellOnFlushAndCloseOutputStream(baout, false, true);
Writer writer = new java.io.OutputStreamWriter(out, "US-ASCII");
CopyUtils.copy(in, writer);
writer.flush();
assertEquals("Not all bytes were read", 0, in.available());
assertEquals("Sizes differ", inData.length, baout.size());
assertTrue("Content differs", Arrays.equals(inData, baout.toByteArray()));
}
public void testCopy_inputStreamToWriterWithEncoding() throws Exception {
String inDataStr = "data";
String charsetName = "UTF-8";
StringWriter writer = new StringWriter();
CopyUtils.copy(new ByteArrayInputStream(inDataStr.getBytes(charsetName)), writer, charsetName);
assertEquals(inDataStr, writer.toString());
}
public void testCopy_readerToOutputStream() throws Exception {
InputStream in = new ByteArrayInputStream(inData);
in = new YellOnCloseInputStream(in);
Reader reader = new java.io.InputStreamReader(in, "US-ASCII");
ByteArrayOutputStream baout = new ByteArrayOutputStream();
OutputStream out = new YellOnFlushAndCloseOutputStream(baout, false, true);
CopyUtils.copy(reader, out);
//Note: this method *does* flush. It is equivalent to:
// OutputStreamWriter _out = new OutputStreamWriter(fout);
// IOUtils.copy( fin, _out, 4096 ); // copy( Reader, Writer, int );
// _out.flush();
// out = fout;
// Note: rely on the method to flush
assertEquals("Sizes differ", inData.length, baout.size());
assertTrue("Content differs", Arrays.equals(inData, baout.toByteArray()));
}
public void testCopy_readerToWriter() throws Exception {
InputStream in = new ByteArrayInputStream(inData);
in = new YellOnCloseInputStream(in);
Reader reader = new java.io.InputStreamReader(in, "US-ASCII");
ByteArrayOutputStream baout = new ByteArrayOutputStream();
OutputStream out = new YellOnFlushAndCloseOutputStream(baout, false, true);
Writer writer = new java.io.OutputStreamWriter(out, "US-ASCII");
int count = CopyUtils.copy(reader, writer);
writer.flush();
assertEquals(
"The number of characters returned by copy is wrong",
inData.length,
count);
assertEquals("Sizes differ", inData.length, baout.size());
assertTrue("Content differs", Arrays.equals(inData, baout.toByteArray()));
}
public void testCopy_stringToOutputStream() throws Exception {
String str = new String(inData, "US-ASCII");
ByteArrayOutputStream baout = new ByteArrayOutputStream();
OutputStream out = new YellOnFlushAndCloseOutputStream(baout, false, true);
CopyUtils.copy(str, out);
//Note: this method *does* flush. It is equivalent to:
// OutputStreamWriter _out = new OutputStreamWriter(fout);
// IOUtils.copy( str, _out, 4096 ); // copy( Reader, Writer, int );
// _out.flush();
// out = fout;
// note: we don't flush here; this IOUtils method does it for us
assertEquals("Sizes differ", inData.length, baout.size());
assertTrue("Content differs", Arrays.equals(inData, baout.toByteArray()));
}
public void testCopy_stringToWriter() throws Exception {
String str = new String(inData, "US-ASCII");
ByteArrayOutputStream baout = new ByteArrayOutputStream();
OutputStream out = new YellOnFlushAndCloseOutputStream(baout, false, true);
Writer writer = new java.io.OutputStreamWriter(out, "US-ASCII");
CopyUtils.copy(str, writer);
writer.flush();
assertEquals("Sizes differ", inData.length, baout.size());
assertTrue("Content differs", Arrays.equals(inData, baout.toByteArray()));
}
} // CopyUtilsTest
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.