gt stringclasses 1
value | context stringlengths 2.05k 161k |
|---|---|
/**
* Copyright (C) 2015 adesso Schweiz AG (www.adesso.ch)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.wte4j.impl;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import static org.mockito.Matchers.any;
import static org.mockito.Matchers.anyMapOf;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.net.URISyntaxException;
import java.net.URL;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.zip.ZipEntry;
import java.util.zip.ZipFile;
import org.apache.commons.io.FileUtils;
import org.apache.commons.io.IOUtils;
import org.junit.Before;
import org.junit.Test;
import org.wte4j.Formatter;
import org.wte4j.FormatterFactory;
import org.wte4j.FormatterInstantiationException;
import org.wte4j.Template;
import org.wte4j.TemplateBuildException;
import org.wte4j.UnknownFormatterException;
import org.wte4j.User;
import org.wte4j.WteDataModel;
import org.wte4j.WteModelService;
import org.wte4j.impl.format.ToStringFormatter;
public class WordTemplateBuilderTest {
private Map<String, Class<?>> modelMap = null;
private List<String> requiredProperties = null;
@Before
public void initVars() {
modelMap = new HashMap<String, Class<?>>();
modelMap.put("value", String.class);
requiredProperties = new ArrayList<String>();
}
@Test
public void testNormal() {
TemplateContextFactory contextFactory = mock(TemplateContextFactory.class);
WordTemplateBuilder<String> wtb = new WordTemplateBuilder<String>(contextFactory, new SimpleValueModelService(),
String.class);
wtb.setAuthor(new User("test1", "test2"));
Template<String> wt = wtb.build();
assertEquals(wt.getClass(), WordTemplate.class);
assertNotNull(wt);
}
@Test
public void buildWithFile() throws URISyntaxException, IOException {
URL fileUrl = ClassLoader.getSystemResource("org/wte4j/basic-values-template.docx");
Path file = Paths.get(fileUrl.toURI());
TemplateContextFactory contextFactory = mock(TemplateContextFactory.class);
WordTemplateBuilder<String> wtb = new WordTemplateBuilder<String>(contextFactory, new SimpleValueModelService(),
String.class);
Template<String> template = wtb.setAuthor(new User("test1", "test2"))
.setTemplateFile(file)
.build();
ByteArrayOutputStream bytesOut = new ByteArrayOutputStream();
template.write(bytesOut);
assertTrue(Arrays.equals(Files.readAllBytes(file), bytesOut.toByteArray()));
}
@Test(expected = TemplateBuildException.class)
public void testParametersMissing() {
new WordTemplateBuilder<String>(null, null, null);
fail("Exception expected");
}
@Test(expected = TemplateBuildException.class)
public void testRequiredPropertiesMissing() {
requiredProperties.add("test");
TemplateContextFactory contextFactory = mock(TemplateContextFactory.class);
WordTemplateBuilder<String> wtb = new WordTemplateBuilder<String>(contextFactory, new SimpleValueModelService(),
String.class);
wtb.build();
fail("Exception expected");
}
@Test
public void createBasicTemplate() throws Exception {
Map<String, Class<?>> elements = new HashMap<String, Class<?>>();
elements.put("string", String.class);
WteModelService service = mock(WteModelService.class);
Map<String, String> properties = anyMapOf(String.class, String.class);
Class<?> inputType = any(Class.class);
when(service.listModelElements(inputType, properties)).thenReturn(
elements);
TemplateContextFactory contextFactory = mock(TemplateContextFactory.class);
WordTemplateBuilder<String> builder = new WordTemplateBuilder<String>(
contextFactory, service, String.class);
byte[] templateContent = builder.createBasicTemplate();
File templateFile = File.createTempFile("generated", "docx");
try {
FileUtils.writeByteArrayToFile(templateFile, templateContent);
ZipFile zipFile = new ZipFile(templateFile);
ZipEntry entry = zipFile.getEntry("word/document.xml");
InputStream documentIn = zipFile.getInputStream(entry);
String doucmentXml = IOUtils.toString(documentIn);
doucmentXml = doucmentXml.replaceAll("\\n\\s+", "");
String expected = "<w:sdt>"
+ "<w:sdtPr>"//
+ /* */"<w:tag w:val=\"string\"/>"//
+ /* */"<w:text/>"//
+ /* */"<w:showingPlcHdr/>"//
+ /* */"<w15:appearance w15:val=\"tags\"/>"//
+ /* */"<w:alias w:val=\"string\"/>"//
+ "</w:sdtPr>"//
+ "<w:sdtContent>"//
+ /* */"<w:p><w:r>"
+ /* */"<w:rPr><w:rStyle w:val=\"PlaceholderText\"/></w:rPr>"
+ /* */"<w:t>string</w:t>"//
+ /* */"</w:r></w:p>" //
+ "</w:sdtContent>"//
+ "</w:sdt>";
assertTrue(doucmentXml.contains(expected));
} finally {
templateFile.delete();
}
}
private class SimpleValueModelService implements WteModelService {
@Override
public Map<String, Class<?>> listModelElements(Class<?> inputClass,
Map<String, String> properties) {
return modelMap;
}
@Override
public List<String> listRequiredModelProperties() {
return requiredProperties;
}
@Override
public WteDataModel createModel(Template<?> template, final Object input) {
return new WteDataModel() {
@Override
public Object getValue(String key)
throws IllegalArgumentException {
if (modelMap.containsKey(key)) {
return modelMap.get(key);
} else {
throw new IllegalArgumentException();
}
}
};
}
}
private class SimpleFormatterFactory implements FormatterFactory {
private Formatter formatter = new ToStringFormatter();
@Override
public Formatter createFormatter(String name, List<String> args)
throws UnknownFormatterException,
FormatterInstantiationException {
if (name.equals("string")) {
return formatter;
}
throw new UnknownFormatterException(name);
}
@Override
public Formatter createDefaultFormatter(Class<?> type)
throws FormatterInstantiationException {
return formatter;
}
}
}
| |
// Copyright 2000-2018 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.intellij.openapi.wm.impl;
import com.intellij.execution.configurations.GeneralCommandLine;
import com.intellij.execution.util.ExecUtil;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.util.SystemInfo;
import com.intellij.openapi.util.registry.Registry;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.openapi.vfs.CharsetToolkit;
import com.intellij.openapi.wm.IdeFrame;
import com.intellij.openapi.wm.WindowManager;
import com.intellij.util.concurrency.AtomicFieldUpdater;
import org.jetbrains.annotations.Nullable;
import sun.awt.AWTAccessor;
import sun.misc.Unsafe;
import javax.swing.*;
import java.awt.*;
import java.awt.peer.ComponentPeer;
import java.lang.reflect.Field;
import java.lang.reflect.Method;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import static com.intellij.util.ArrayUtil.newLongArray;
public class X11UiUtil {
private static final Logger LOG = Logger.getInstance("#com.intellij.openapi.wm.impl.X11UiUtil");
private static final int True = 1;
private static final int False = 0;
private static final long None = 0;
private static final long XA_ATOM = 4;
private static final long XA_WINDOW = 33;
private static final int CLIENT_MESSAGE = 33;
private static final int FORMAT_BYTE = 8;
private static final int FORMAT_LONG = 32;
private static final long EVENT_MASK = (3L << 19);
private static final long NET_WM_STATE_TOGGLE = 2;
@SuppressWarnings("SpellCheckingInspection")
private static class Xlib {
private Unsafe unsafe;
private Method XGetWindowProperty;
private Method XFree;
private Method RootWindow;
private Method XSendEvent;
private Method getWindow;
private Method getScreenNumber;
private Method awtLock;
private Method awtUnlock;
private long display;
private long UTF8_STRING;
private long NET_SUPPORTING_WM_CHECK;
private long NET_WM_NAME;
private long NET_WM_ALLOWED_ACTIONS;
private long NET_WM_STATE;
private long NET_WM_ACTION_FULLSCREEN;
private long NET_WM_STATE_FULLSCREEN;
@Nullable
private static Xlib getInstance() {
Class<? extends Toolkit> toolkitClass = Toolkit.getDefaultToolkit().getClass();
if (!SystemInfo.isXWindow || !"sun.awt.X11.XToolkit".equals(toolkitClass.getName())) {
return null;
}
try {
Xlib x11 = new Xlib();
// reflect on Xlib method wrappers and important structures
Class<?> XlibWrapper = Class.forName("sun.awt.X11.XlibWrapper");
x11.unsafe = AtomicFieldUpdater.getUnsafe();
x11.XGetWindowProperty = method(XlibWrapper, "XGetWindowProperty", 12);
x11.XFree = method(XlibWrapper, "XFree", 1);
x11.RootWindow = method(XlibWrapper, "RootWindow", 2);
x11.XSendEvent = method(XlibWrapper, "XSendEvent", 5);
Class<?> XBaseWindow = Class.forName("sun.awt.X11.XBaseWindow");
x11.getWindow = method(XBaseWindow, "getWindow");
x11.getScreenNumber = method(XBaseWindow, "getScreenNumber");
x11.display = (Long)method(toolkitClass, "getDisplay").invoke(null);
x11.awtLock = method(toolkitClass, "awtLock");
x11.awtUnlock = method(toolkitClass, "awtUnlock");
// intern atoms
Class<?> XAtom = Class.forName("sun.awt.X11.XAtom");
Method get = method(XAtom, "get", String.class);
Field atom = field(XAtom, "atom");
x11.UTF8_STRING = (Long)atom.get(get.invoke(null, "UTF8_STRING"));
x11.NET_SUPPORTING_WM_CHECK = (Long)atom.get(get.invoke(null, "_NET_SUPPORTING_WM_CHECK"));
x11.NET_WM_NAME = (Long)atom.get(get.invoke(null, "_NET_WM_NAME"));
x11.NET_WM_ALLOWED_ACTIONS = (Long)atom.get(get.invoke(null, "_NET_WM_ALLOWED_ACTIONS"));
x11.NET_WM_STATE = (Long)atom.get(get.invoke(null, "_NET_WM_STATE"));
x11.NET_WM_ACTION_FULLSCREEN = (Long)atom.get(get.invoke(null, "_NET_WM_ACTION_FULLSCREEN"));
x11.NET_WM_STATE_FULLSCREEN = (Long)atom.get(get.invoke(null, "_NET_WM_STATE_FULLSCREEN"));
// check for _NET protocol support
Long netWmWindow = x11.getNetWmWindow();
if (netWmWindow == null) {
LOG.info("_NET protocol is not supported");
return null;
}
return x11;
}
catch (Throwable t) {
LOG.info("cannot initialize", t);
}
return null;
}
private long getRootWindow(long screen) throws Exception {
awtLock.invoke(null);
try {
return (Long)RootWindow.invoke(null, display, screen);
}
finally {
awtUnlock.invoke(null);
}
}
@Nullable
private Long getNetWmWindow() throws Exception {
long rootWindow = getRootWindow(0);
long[] values = getLongArrayProperty(rootWindow, NET_SUPPORTING_WM_CHECK, XA_WINDOW);
return values != null && values.length > 0 ? values[0] : null;
}
@Nullable
private long[] getLongArrayProperty(long window, long name, long type) throws Exception {
return getWindowProperty(window, name, type, FORMAT_LONG);
}
@Nullable
private String getUtfStringProperty(long window, long name) throws Exception {
byte[] bytes = getWindowProperty(window, name, UTF8_STRING, FORMAT_BYTE);
return bytes != null ? new String(bytes, CharsetToolkit.UTF8_CHARSET) : null;
}
@Nullable
private <T> T getWindowProperty(long window, long name, long type, long expectedFormat) throws Exception {
long data = unsafe.allocateMemory(64);
awtLock.invoke(null);
try {
unsafe.setMemory(data, 64, (byte)0);
int result = (Integer)XGetWindowProperty.invoke(
null, display, window, name, 0L, 65535L, (long)False, type, data, data + 8, data + 16, data + 24, data + 32);
if (result == 0) {
int format = unsafe.getInt(data + 8);
long pointer = SystemInfo.is64Bit ? unsafe.getLong(data + 32) : unsafe.getInt(data + 32);
if (pointer != None && format == expectedFormat) {
int length = SystemInfo.is64Bit ? (int)unsafe.getLong(data + 16) : unsafe.getInt(data + 16);
if (format == FORMAT_BYTE) {
byte[] bytes = new byte[length];
for (int i = 0; i < length; i++) bytes[i] = unsafe.getByte(pointer + i);
return (T)bytes;
}
else if (format == FORMAT_LONG) {
long[] values = newLongArray(length);
for (int i = 0; i < length; i++) {
values[i] = SystemInfo.is64Bit ? unsafe.getLong(pointer + 8 * i) : unsafe.getInt(pointer + 4 * i);
}
return (T)values;
}
else if (format != None) {
LOG.info("unexpected format: " + format);
}
}
if (pointer != None) XFree.invoke(null, pointer);
}
}
finally {
awtUnlock.invoke(null);
unsafe.freeMemory(data);
}
return null;
}
private void sendClientMessage(long target, long window, long type, long... data) throws Exception {
assert data.length <= 5;
long event = unsafe.allocateMemory(128);
awtLock.invoke(null);
try {
unsafe.setMemory(event, 128, (byte)0);
unsafe.putInt(event, CLIENT_MESSAGE);
if (!SystemInfo.is64Bit) {
unsafe.putInt(event + 8, True);
unsafe.putInt(event + 16, (int)window);
unsafe.putInt(event + 20, (int)type);
unsafe.putInt(event + 24, FORMAT_LONG);
for (int i = 0; i < data.length; i++) {
unsafe.putInt(event + 28 + 4 * i, (int)data[i]);
}
}
else {
unsafe.putInt(event + 16, True);
unsafe.putLong(event + 32, window);
unsafe.putLong(event + 40, NET_WM_STATE);
unsafe.putInt(event + 48, FORMAT_LONG);
for (int i = 0; i < data.length; i++) {
unsafe.putLong(event + 56 + 8 * i, data[i]);
}
}
XSendEvent.invoke(null, display, target, false, EVENT_MASK, event);
}
finally {
awtUnlock.invoke(null);
unsafe.freeMemory(event);
}
}
}
@Nullable private static final Xlib X11 = Xlib.getInstance();
// WM detection and patching
@Nullable
public static String getWmName() {
if (X11 == null) return null;
try {
Long netWmWindow = X11.getNetWmWindow();
if (netWmWindow != null) {
return X11.getUtfStringProperty(netWmWindow, X11.NET_WM_NAME);
}
}
catch (Throwable t) {
LOG.info("cannot get WM name", t);
}
return null;
}
@SuppressWarnings("SpellCheckingInspection")
public static void patchDetectedWm(String wmName) {
if (X11 == null || !Registry.is("ide.x11.override.wm")) return;
try {
if ("Muffin".equals(wmName)) {
setWM("MUTTER_WM");
}
else if ("Marco".equals(wmName)) {
setWM("MARCO_WM", "METACITY_WM");
}
else if ("awesome".equals(wmName)) {
String version = getAwesomeWMVersion();
if (StringUtil.compareVersionNumbers(version, "3.5") >= 0) {
setWM("SAWFISH_WM");
}
else if (version != null) {
setWM("OTHER_NONREPARENTING_WM", "LG3D_WM");
}
}
}
catch (Throwable t) {
LOG.warn(t);
}
}
private static void setWM(String... wmConstants) throws Exception {
Class<?> xwmClass = Class.forName("sun.awt.X11.XWM");
Object xwm = method(xwmClass, "getWM").invoke(null);
if (xwm != null) {
for (String wmConstant : wmConstants) {
try {
Field wm = field(xwmClass, wmConstant);
Object id = wm.get(null);
if (id != null) {
field(xwmClass, "awt_wmgr").set(null, id);
field(xwmClass, "WMID").set(xwm, id);
LOG.info("impersonated WM: " + wmConstant);
break;
}
}
catch (NoSuchFieldException ignore) { }
}
}
}
@Nullable
private static String getAwesomeWMVersion() {
try {
String version = ExecUtil.execAndReadLine(new GeneralCommandLine("awesome", "--version"));
if (version != null) {
Matcher m = Pattern.compile("awesome v([0-9.]+)").matcher(version);
if (m.find()) {
return m.group(1);
}
}
}
catch (Throwable t) {
LOG.warn(t);
}
return null;
}
// full-screen support
public static boolean isFullScreenSupported() {
if (X11 == null) return false;
IdeFrame[] frames = WindowManager.getInstance().getAllProjectFrames();
if (frames.length == 0) return true; // no frame to check the property so be optimistic here
return frames[0] instanceof JFrame && hasWindowProperty((JFrame)frames[0], X11.NET_WM_ALLOWED_ACTIONS, X11.NET_WM_ACTION_FULLSCREEN);
}
public static boolean isInFullScreenMode(JFrame frame) {
return X11 != null && hasWindowProperty(frame, X11.NET_WM_STATE, X11.NET_WM_STATE_FULLSCREEN);
}
private static boolean hasWindowProperty(JFrame frame, long name, long expected) {
if (X11 == null) return false;
try {
ComponentPeer peer = AWTAccessor.getComponentAccessor().getPeer(frame);
if (peer != null) {
long window = (Long)X11.getWindow.invoke(peer);
long[] values = X11.getLongArrayProperty(window, name, XA_ATOM);
if (values != null) {
for (long value : values) {
if (value == expected) return true;
}
}
}
return false;
}
catch (Throwable t) {
LOG.info("cannot check window property", t);
return false;
}
}
public static void toggleFullScreenMode(JFrame frame) {
if (X11 == null) return;
try {
ComponentPeer peer = AWTAccessor.getComponentAccessor().getPeer(frame);
if (peer == null) throw new IllegalStateException(frame + " has no peer");
long window = (Long)X11.getWindow.invoke(peer);
long screen = (Long)X11.getScreenNumber.invoke(peer);
long rootWindow = X11.getRootWindow(screen);
X11.sendClientMessage(rootWindow, window, X11.NET_WM_STATE, NET_WM_STATE_TOGGLE, X11.NET_WM_STATE_FULLSCREEN);
}
catch (Throwable t) {
LOG.info("cannot toggle mode", t);
}
}
// reflection utilities
private static Method method(Class<?> aClass, String name, Class<?>... parameterTypes) throws Exception {
while (aClass != null) {
try {
Method method = aClass.getDeclaredMethod(name, parameterTypes);
method.setAccessible(true);
return method;
}
catch (NoSuchMethodException e) {
aClass = aClass.getSuperclass();
}
}
throw new NoSuchMethodException(name);
}
private static Method method(Class<?> aClass, String name, int parameters) throws Exception {
for (Method method : aClass.getDeclaredMethods()) {
if (name.equals(method.getName()) && method.getParameterTypes().length == parameters) {
method.setAccessible(true);
return method;
}
}
throw new NoSuchMethodException(name);
}
private static Field field(Class<?> aClass, String name) throws Exception {
Field field = aClass.getDeclaredField(name);
field.setAccessible(true);
return field;
}
}
| |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.bootstrap;
import org.apache.lucene.util.Constants;
import org.elasticsearch.common.SuppressForbidden;
import org.elasticsearch.common.io.PathUtils;
import org.elasticsearch.common.settings.Setting;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.env.Environment;
import org.elasticsearch.test.ESTestCase;
import java.io.FilePermission;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.security.PermissionCollection;
import java.security.Permissions;
import java.util.Set;
import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.hasToString;
@SuppressForbidden(reason = "modifies system properties and attempts to create symbolic links intentionally")
public class EvilSecurityTests extends ESTestCase {
/** test generated permissions */
public void testGeneratedPermissions() throws Exception {
Path path = createTempDir();
// make a fake ES home and ensure we only grant permissions to that.
Path esHome = path.resolve("esHome");
Settings.Builder settingsBuilder = Settings.builder();
settingsBuilder.put(Environment.PATH_HOME_SETTING.getKey(), esHome.toString());
Settings settings = settingsBuilder.build();
Path fakeTmpDir = createTempDir();
String realTmpDir = System.getProperty("java.io.tmpdir");
Permissions permissions;
try {
System.setProperty("java.io.tmpdir", fakeTmpDir.toString());
Environment environment = new Environment(settings);
permissions = Security.createPermissions(environment);
} finally {
System.setProperty("java.io.tmpdir", realTmpDir);
}
// the fake es home
assertNoPermissions(esHome, permissions);
// its parent
assertNoPermissions(esHome.getParent(), permissions);
// some other sibling
assertNoPermissions(esHome.getParent().resolve("other"), permissions);
// double check we overwrote java.io.tmpdir correctly for the test
assertNoPermissions(PathUtils.get(realTmpDir), permissions);
}
/** test generated permissions for all configured paths */
@SuppressWarnings("deprecation") // needs to check settings for deprecated path
public void testEnvironmentPaths() throws Exception {
Path path = createTempDir();
// make a fake ES home and ensure we only grant permissions to that.
Path esHome = path.resolve("esHome");
Settings.Builder settingsBuilder = Settings.builder();
settingsBuilder.put(Environment.PATH_HOME_SETTING.getKey(), esHome.resolve("home").toString());
settingsBuilder.putArray(Environment.PATH_DATA_SETTING.getKey(), esHome.resolve("data1").toString(),
esHome.resolve("data2").toString());
settingsBuilder.put(Environment.PATH_SHARED_DATA_SETTING.getKey(), esHome.resolve("custom").toString());
settingsBuilder.put(Environment.PATH_LOGS_SETTING.getKey(), esHome.resolve("logs").toString());
settingsBuilder.put(Environment.PIDFILE_SETTING.getKey(), esHome.resolve("test.pid").toString());
Settings settings = settingsBuilder.build();
Path fakeTmpDir = createTempDir();
String realTmpDir = System.getProperty("java.io.tmpdir");
Permissions permissions;
Environment environment;
try {
System.setProperty("java.io.tmpdir", fakeTmpDir.toString());
environment = new Environment(settings, esHome.resolve("conf"));
permissions = Security.createPermissions(environment);
} finally {
System.setProperty("java.io.tmpdir", realTmpDir);
}
// the fake es home
assertNoPermissions(esHome, permissions);
// its parent
assertNoPermissions(esHome.getParent(), permissions);
// some other sibling
assertNoPermissions(esHome.getParent().resolve("other"), permissions);
// double check we overwrote java.io.tmpdir correctly for the test
assertNoPermissions(PathUtils.get(realTmpDir), permissions);
// check that all directories got permissions:
// bin file: ro
assertExactPermissions(new FilePermission(environment.binFile().toString(), "read,readlink"), permissions);
// lib file: ro
assertExactPermissions(new FilePermission(environment.libFile().toString(), "read,readlink"), permissions);
// modules file: ro
assertExactPermissions(new FilePermission(environment.modulesFile().toString(), "read,readlink"), permissions);
// config file: ro
assertExactPermissions(new FilePermission(environment.configFile().toString(), "read,readlink"), permissions);
// plugins: ro
assertExactPermissions(new FilePermission(environment.pluginsFile().toString(), "read,readlink"), permissions);
// data paths: r/w
for (Path dataPath : environment.dataFiles()) {
assertExactPermissions(new FilePermission(dataPath.toString(), "read,readlink,write,delete"), permissions);
}
for (Path dataPath : environment.dataWithClusterFiles()) {
assertExactPermissions(new FilePermission(dataPath.toString(), "read,readlink,write,delete"), permissions);
}
assertExactPermissions(new FilePermission(environment.sharedDataFile().toString(), "read,readlink,write,delete"), permissions);
// logs: r/w
assertExactPermissions(new FilePermission(environment.logsFile().toString(), "read,readlink,write,delete"), permissions);
// temp dir: r/w
assertExactPermissions(new FilePermission(fakeTmpDir.toString(), "read,readlink,write,delete"), permissions);
// PID file: delete only (for the shutdown hook)
assertExactPermissions(new FilePermission(environment.pidFile().toString(), "delete"), permissions);
}
public void testDuplicateDataPaths() throws IOException {
final Path path = createTempDir();
final Path home = path.resolve("home");
final Path data = path.resolve("data");
final Path duplicate;
if (randomBoolean()) {
duplicate = data;
} else {
duplicate = createTempDir().toAbsolutePath().resolve("link");
Files.createSymbolicLink(duplicate, data);
}
final Settings settings =
Settings
.builder()
.put(Environment.PATH_HOME_SETTING.getKey(), home.toString())
.putArray(Environment.PATH_DATA_SETTING.getKey(), data.toString(), duplicate.toString())
.build();
final Environment environment = new Environment(settings);
final IllegalStateException e = expectThrows(IllegalStateException.class, () -> Security.createPermissions(environment));
assertThat(e, hasToString(containsString("path [" + duplicate.toRealPath() + "] is duplicated by [" + duplicate + "]")));
}
public void testEnsureSymlink() throws IOException {
Path p = createTempDir();
Path exists = p.resolve("exists");
Files.createDirectory(exists);
// symlink
Path linkExists = p.resolve("linkExists");
try {
Files.createSymbolicLink(linkExists, exists);
} catch (UnsupportedOperationException | IOException e) {
assumeNoException("test requires filesystem that supports symbolic links", e);
} catch (SecurityException e) {
assumeNoException("test cannot create symbolic links with security manager enabled", e);
}
Security.ensureDirectoryExists(linkExists);
Files.createTempFile(linkExists, null, null);
}
public void testEnsureBrokenSymlink() throws IOException {
Path p = createTempDir();
// broken symlink
Path brokenLink = p.resolve("brokenLink");
try {
Files.createSymbolicLink(brokenLink, p.resolve("nonexistent"));
} catch (UnsupportedOperationException | IOException e) {
assumeNoException("test requires filesystem that supports symbolic links", e);
} catch (SecurityException e) {
assumeNoException("test cannot create symbolic links with security manager enabled", e);
}
try {
Security.ensureDirectoryExists(brokenLink);
fail("didn't get expected exception");
} catch (IOException expected) {}
}
/** When a configured dir is a symlink, test that permissions work on link target */
public void testSymlinkPermissions() throws IOException {
// see https://github.com/elastic/elasticsearch/issues/12170
assumeFalse("windows does not automatically grant permission to the target of symlinks", Constants.WINDOWS);
Path dir = createTempDir();
Path target = dir.resolve("target");
Files.createDirectory(target);
// symlink
Path link = dir.resolve("link");
try {
Files.createSymbolicLink(link, target);
} catch (UnsupportedOperationException | IOException e) {
assumeNoException("test requires filesystem that supports symbolic links", e);
} catch (SecurityException e) {
assumeNoException("test cannot create symbolic links with security manager enabled", e);
}
Permissions permissions = new Permissions();
Security.addPath(permissions, "testing", link, "read");
assertExactPermissions(new FilePermission(link.toString(), "read"), permissions);
assertExactPermissions(new FilePermission(link.resolve("foo").toString(), "read"), permissions);
assertExactPermissions(new FilePermission(target.toString(), "read"), permissions);
assertExactPermissions(new FilePermission(target.resolve("foo").toString(), "read"), permissions);
}
/**
* checks exact file permissions, meaning those and only those for that path.
*/
static void assertExactPermissions(FilePermission expected, PermissionCollection actual) {
String target = expected.getName(); // see javadocs
Set<String> permissionSet = asSet(expected.getActions().split(","));
boolean read = permissionSet.remove("read");
boolean readlink = permissionSet.remove("readlink");
boolean write = permissionSet.remove("write");
boolean delete = permissionSet.remove("delete");
boolean execute = permissionSet.remove("execute");
assertTrue("unrecognized permission: " + permissionSet, permissionSet.isEmpty());
assertEquals(read, actual.implies(new FilePermission(target, "read")));
assertEquals(readlink, actual.implies(new FilePermission(target, "readlink")));
assertEquals(write, actual.implies(new FilePermission(target, "write")));
assertEquals(delete, actual.implies(new FilePermission(target, "delete")));
assertEquals(execute, actual.implies(new FilePermission(target, "execute")));
}
/**
* checks that this path has no permissions
*/
static void assertNoPermissions(Path path, PermissionCollection actual) {
String target = path.toString();
assertFalse(actual.implies(new FilePermission(target, "read")));
assertFalse(actual.implies(new FilePermission(target, "readlink")));
assertFalse(actual.implies(new FilePermission(target, "write")));
assertFalse(actual.implies(new FilePermission(target, "delete")));
assertFalse(actual.implies(new FilePermission(target, "execute")));
}
}
| |
package org.apache.helix.tools;
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.CommandLineParser;
import org.apache.commons.cli.GnuParser;
import org.apache.commons.cli.HelpFormatter;
import org.apache.commons.cli.Option;
import org.apache.commons.cli.OptionBuilder;
import org.apache.commons.cli.OptionGroup;
import org.apache.commons.cli.Options;
import org.apache.commons.cli.ParseException;
import org.apache.helix.PropertyKey;
import org.apache.helix.ZNRecord;
import org.apache.helix.manager.zk.ZNRecordSerializer;
import org.apache.helix.manager.zk.ZkClient;
import org.apache.helix.tools.ClusterVerifiers.ClusterExternalViewVerifier;
import org.apache.helix.tools.ClusterVerifiers.ClusterLiveNodesVerifier;
import org.apache.log4j.Logger;
/**
* collection of test utilities for integration tests
*/
public class IntegrationTestUtil {
private static Logger LOG = Logger.getLogger(IntegrationTestUtil.class);
public static final long defaultTimeout = 30 * 1000; // in milliseconds
public static final String help = "help";
public static final String zkSvr = "zkSvr";
public static final String verifyExternalView = "verifyExternalView";
public static final String verifyLiveNodes = "verifyLiveNodes";
public static final String readZNode = "readZNode";
public static final String readLeader = "readLeader";
final ZkClient _zkclient;
final ZNRecordSerializer _serializer;
public IntegrationTestUtil(ZkClient zkclient) {
_zkclient = zkclient;
_serializer = new ZNRecordSerializer();
}
public void verifyExternalView(String[] args) {
if (args == null || args.length == 0) {
System.err.println("Illegal arguments for " + verifyExternalView);
return;
}
long timeoutValue = defaultTimeout;
String clusterName = args[0];
List<String> liveNodes = new ArrayList<String>();
for (int i = 1; i < args.length; i++) {
liveNodes.add(args[i]);
}
ClusterExternalViewVerifier verifier =
new ClusterExternalViewVerifier(_zkclient, clusterName, liveNodes);
boolean success = verifier.verifyByPolling(timeoutValue);
System.out.println(success ? "Successful" : "Failed");
}
public void verifyLiveNodes(String[] args) {
if (args == null || args.length == 0) {
System.err.println("Illegal arguments for " + verifyLiveNodes);
return;
}
long timeoutValue = defaultTimeout;
String clusterName = args[0];
List<String> liveNodes = new ArrayList<String>();
for (int i = 1; i < args.length; i++) {
liveNodes.add(args[i]);
}
ClusterLiveNodesVerifier verifier =
new ClusterLiveNodesVerifier(_zkclient, clusterName, liveNodes);
boolean success = verifier.verify(timeoutValue);
System.out.println(success ? "Successful" : "Failed");
}
public void readZNode(String path) {
ZNRecord record = _zkclient.readData(path, true);
if (record == null) {
System.out.println("null");
} else {
System.out.println(new String(_serializer.serialize(record)));
}
}
@SuppressWarnings("static-access")
static Options constructCommandLineOptions() {
Option helpOption =
OptionBuilder.withLongOpt(help).withDescription("Prints command-line options information")
.create();
Option zkSvrOption =
OptionBuilder.hasArgs(1).isRequired(true).withArgName("zookeeperAddress")
.withLongOpt(zkSvr).withDescription("Provide zookeeper-address").create();
Option verifyExternalViewOption =
OptionBuilder.hasArgs().isRequired(false).withArgName("clusterName node1 node2..")
.withLongOpt(verifyExternalView).withDescription("Verify external-view").create();
Option verifyLiveNodesOption =
OptionBuilder.hasArg().isRequired(false).withArgName("clusterName node1, node2..")
.withLongOpt(verifyLiveNodes).withDescription("Verify live-nodes").create();
Option readZNodeOption =
OptionBuilder.hasArgs(1).isRequired(false).withArgName("zkPath").withLongOpt(readZNode)
.withDescription("Read znode").create();
Option readLeaderOption =
OptionBuilder.hasArgs(1).isRequired(false).withArgName("clusterName")
.withLongOpt(readLeader).withDescription("Read cluster controller").create();
OptionGroup optGroup = new OptionGroup();
optGroup.setRequired(true);
optGroup.addOption(verifyExternalViewOption);
optGroup.addOption(verifyLiveNodesOption);
optGroup.addOption(readZNodeOption);
optGroup.addOption(readLeaderOption);
Options options = new Options();
options.addOption(helpOption);
options.addOption(zkSvrOption);
options.addOptionGroup(optGroup);
return options;
}
static void printUsage(Options cliOptions) {
HelpFormatter helpFormatter = new HelpFormatter();
helpFormatter.setWidth(1000);
helpFormatter.printHelp("java " + ClusterExternalViewVerifier.class.getName(), cliOptions);
}
static void processCommandLineArgs(String[] cliArgs) {
CommandLineParser cliParser = new GnuParser();
Options cliOptions = constructCommandLineOptions();
CommandLine cmd = null;
try {
cmd = cliParser.parse(cliOptions, cliArgs);
} catch (ParseException pe) {
System.err.println("failed to parse command-line args: " + Arrays.asList(cliArgs)
+ ", exception: " + pe.toString());
printUsage(cliOptions);
System.exit(1);
}
String zkServer = cmd.getOptionValue(zkSvr);
ZkClient zkclient =
new ZkClient(zkServer, ZkClient.DEFAULT_SESSION_TIMEOUT,
ZkClient.DEFAULT_CONNECTION_TIMEOUT, new ZNRecordSerializer());
IntegrationTestUtil util = new IntegrationTestUtil(zkclient);
if (cmd != null) {
if (cmd.hasOption(verifyExternalView)) {
String[] args = cmd.getOptionValues(verifyExternalView);
util.verifyExternalView(args);
} else if (cmd.hasOption(verifyLiveNodes)) {
String[] args = cmd.getOptionValues(verifyLiveNodes);
util.verifyLiveNodes(args);
} else if (cmd.hasOption(readZNode)) {
String path = cmd.getOptionValue(readZNode);
util.readZNode(path);
} else if (cmd.hasOption(readLeader)) {
String clusterName = cmd.getOptionValue(readLeader);
PropertyKey.Builder keyBuilder = new PropertyKey.Builder(clusterName);
util.readZNode(keyBuilder.controllerLeader().getPath());
} else {
printUsage(cliOptions);
}
}
}
public static void main(String[] args) {
processCommandLineArgs(args);
}
}
| |
package io.quarkus.deployment.dev;
import java.io.File;
import java.io.Serializable;
import java.net.URL;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.Set;
import io.quarkus.bootstrap.app.QuarkusBootstrap;
import io.quarkus.maven.dependency.ArtifactKey;
import io.quarkus.paths.PathCollection;
import io.quarkus.paths.PathList;
/**
* Object that is used to pass context data from the plugin doing the invocation
* into the dev mode process using java serialization.
*
* There is no need to worry about compat as both sides will always be using the same version
*/
public class DevModeContext implements Serializable {
private static final long serialVersionUID = 4688502145533897982L;
public static final CompilationUnit EMPTY_COMPILATION_UNIT = new CompilationUnit(PathList.of(), null, null, null);
public static final String ENABLE_PREVIEW_FLAG = "--enable-preview";
private ModuleInfo applicationRoot;
private final List<ModuleInfo> additionalModules = new ArrayList<>();
private final Map<String, String> systemProperties = new HashMap<>();
private final Map<String, String> buildSystemProperties = new HashMap<>();
private String sourceEncoding;
private final List<URL> additionalClassPathElements = new ArrayList<>();
private File cacheDir;
private File projectDir;
private boolean test;
private boolean abortOnFailedStart;
// the jar file which is used to launch the DevModeMain
private File devModeRunnerJarFile;
private boolean localProjectDiscovery = true;
// args of the main-method
private String[] args;
private List<String> compilerOptions;
private String releaseJavaVersion;
private String sourceJavaVersion;
private String targetJvmVersion;
private List<String> compilerPluginArtifacts;
private List<String> compilerPluginsOptions;
private String alternateEntryPoint;
private QuarkusBootstrap.Mode mode = QuarkusBootstrap.Mode.DEV;
private String baseName;
private final Set<ArtifactKey> localArtifacts = new HashSet<>();
public boolean isLocalProjectDiscovery() {
return localProjectDiscovery;
}
public DevModeContext setLocalProjectDiscovery(boolean localProjectDiscovery) {
this.localProjectDiscovery = localProjectDiscovery;
return this;
}
public String getAlternateEntryPoint() {
return alternateEntryPoint;
}
public DevModeContext setAlternateEntryPoint(String alternateEntryPoint) {
this.alternateEntryPoint = alternateEntryPoint;
return this;
}
public ModuleInfo getApplicationRoot() {
return applicationRoot;
}
public DevModeContext setApplicationRoot(ModuleInfo applicationRoot) {
this.applicationRoot = applicationRoot;
return this;
}
public List<ModuleInfo> getAdditionalModules() {
return additionalModules;
}
public Map<String, String> getSystemProperties() {
return systemProperties;
}
public Map<String, String> getBuildSystemProperties() {
return buildSystemProperties;
}
public String getSourceEncoding() {
return sourceEncoding;
}
public void setSourceEncoding(String sourceEncoding) {
this.sourceEncoding = sourceEncoding;
}
public List<URL> getAdditionalClassPathElements() {
return additionalClassPathElements;
}
public File getCacheDir() {
return cacheDir;
}
public void setCacheDir(File cacheDir) {
this.cacheDir = cacheDir;
}
public boolean isTest() {
return test;
}
public void setTest(boolean test) {
this.test = test;
}
public boolean isAbortOnFailedStart() {
return abortOnFailedStart;
}
public void setAbortOnFailedStart(boolean abortOnFailedStart) {
this.abortOnFailedStart = abortOnFailedStart;
}
public List<String> getCompilerOptions() {
return compilerOptions;
}
public void setCompilerOptions(List<String> compilerOptions) {
this.compilerOptions = compilerOptions;
}
public String getReleaseJavaVersion() {
return releaseJavaVersion;
}
public void setReleaseJavaVersion(String releaseJavaVersion) {
this.releaseJavaVersion = releaseJavaVersion;
}
public String getSourceJavaVersion() {
return sourceJavaVersion;
}
public void setSourceJavaVersion(String sourceJavaVersion) {
this.sourceJavaVersion = sourceJavaVersion;
}
public String getTargetJvmVersion() {
return targetJvmVersion;
}
public void setTargetJvmVersion(String targetJvmVersion) {
this.targetJvmVersion = targetJvmVersion;
}
public List<String> getCompilerPluginArtifacts() {
return compilerPluginArtifacts;
}
public void setCompilerPluginArtifacts(List<String> compilerPluginArtifacts) {
this.compilerPluginArtifacts = compilerPluginArtifacts;
}
public List<String> getCompilerPluginsOptions() {
return compilerPluginsOptions;
}
public void setCompilerPluginsOptions(List<String> compilerPluginsOptions) {
this.compilerPluginsOptions = compilerPluginsOptions;
}
public File getDevModeRunnerJarFile() {
return devModeRunnerJarFile;
}
public void setDevModeRunnerJarFile(final File devModeRunnerJarFile) {
this.devModeRunnerJarFile = devModeRunnerJarFile;
}
public File getProjectDir() {
return projectDir;
}
public DevModeContext setProjectDir(File projectDir) {
this.projectDir = projectDir;
return this;
}
public String[] getArgs() {
return args;
}
public void setArgs(String[] args) {
this.args = args;
}
public List<ModuleInfo> getAllModules() {
List<ModuleInfo> ret = new ArrayList<>();
ret.add(applicationRoot);
ret.addAll(additionalModules);
return ret;
}
public QuarkusBootstrap.Mode getMode() {
return mode;
}
public void setMode(QuarkusBootstrap.Mode mode) {
this.mode = mode;
}
public String getBaseName() {
return baseName;
}
public void setBaseName(String baseName) {
this.baseName = baseName;
}
public Set<ArtifactKey> getLocalArtifacts() {
return localArtifacts;
}
public static class ModuleInfo implements Serializable {
private static final long serialVersionUID = -1376678003747618410L;
private final ArtifactKey appArtifactKey;
private final String name;
private final String projectDirectory;
private final CompilationUnit main;
private final CompilationUnit test;
private final String preBuildOutputDir;
private final PathCollection sourceParents;
private final String targetDir;
ModuleInfo(Builder builder) {
this.appArtifactKey = builder.appArtifactKey;
this.name = builder.name == null ? builder.appArtifactKey.toGacString() : builder.name;
this.projectDirectory = builder.projectDirectory;
this.main = new CompilationUnit(builder.sourcePaths, builder.classesPath,
builder.resourcePaths,
builder.resourcesOutputPath);
if (builder.testClassesPath != null) {
this.test = new CompilationUnit(builder.testSourcePaths,
builder.testClassesPath, builder.testResourcePaths, builder.testResourcesOutputPath);
} else {
this.test = null;
}
this.sourceParents = builder.sourceParents;
this.preBuildOutputDir = builder.preBuildOutputDir;
this.targetDir = builder.targetDir;
}
public String getName() {
return name;
}
public String getProjectDirectory() {
return projectDirectory;
}
public PathCollection getSourceParents() {
return sourceParents;
}
//TODO: why isn't this immutable?
public void addSourcePaths(Collection<String> additionalPaths) {
this.main.sourcePaths = this.main.sourcePaths.add(
additionalPaths.stream()
.map(p -> Paths.get(p).isAbsolute() ? p : (projectDirectory + File.separator + p))
.map(Paths::get)
.toArray(Path[]::new));
}
public String getPreBuildOutputDir() {
return preBuildOutputDir;
}
public String getTargetDir() {
return targetDir;
}
public ArtifactKey getArtifactKey() {
return appArtifactKey;
}
public CompilationUnit getMain() {
return main;
}
public Optional<CompilationUnit> getTest() {
return Optional.ofNullable(test);
}
public void addSourcePathFirst(String path) {
String absolutePath = Paths.get(path).isAbsolute() ? path
: (projectDirectory + File.separator + path);
this.main.sourcePaths = this.main.sourcePaths.addFirst(Paths.get(absolutePath));
}
public static class Builder {
private ArtifactKey appArtifactKey;
private String name;
private String projectDirectory;
private PathCollection sourcePaths = PathList.of();
private String classesPath;
private PathCollection resourcePaths = PathList.of();
private String resourcesOutputPath;
private String preBuildOutputDir;
private PathCollection sourceParents = PathList.of();
private String targetDir;
private PathCollection testSourcePaths = PathList.of();
private String testClassesPath;
private PathCollection testResourcePaths = PathList.of();
private String testResourcesOutputPath;
public Builder setArtifactKey(ArtifactKey appArtifactKey) {
this.appArtifactKey = appArtifactKey;
return this;
}
public Builder setName(String name) {
this.name = name;
return this;
}
public Builder setProjectDirectory(String projectDirectory) {
this.projectDirectory = projectDirectory;
return this;
}
public Builder setSourcePaths(PathCollection sourcePaths) {
this.sourcePaths = sourcePaths;
return this;
}
public Builder setClassesPath(String classesPath) {
this.classesPath = classesPath;
return this;
}
public Builder setResourcePaths(PathCollection resourcePaths) {
this.resourcePaths = resourcePaths;
return this;
}
public Builder setResourcesOutputPath(String resourcesOutputPath) {
this.resourcesOutputPath = resourcesOutputPath;
return this;
}
public Builder setPreBuildOutputDir(String preBuildOutputDir) {
this.preBuildOutputDir = preBuildOutputDir;
return this;
}
public Builder setSourceParents(PathCollection sourceParents) {
this.sourceParents = sourceParents;
return this;
}
public Builder setTargetDir(String targetDir) {
this.targetDir = targetDir;
return this;
}
public Builder setTestSourcePaths(PathCollection testSourcePaths) {
this.testSourcePaths = testSourcePaths;
return this;
}
public Builder setTestClassesPath(String testClassesPath) {
this.testClassesPath = testClassesPath;
return this;
}
public Builder setTestResourcePaths(PathCollection testResourcePaths) {
this.testResourcePaths = testResourcePaths;
return this;
}
public Builder setTestResourcesOutputPath(String testResourcesOutputPath) {
this.testResourcesOutputPath = testResourcesOutputPath;
return this;
}
public ModuleInfo build() {
return new ModuleInfo(this);
}
}
}
public static class CompilationUnit implements Serializable {
private static final long serialVersionUID = -511238068393954948L;
private PathCollection sourcePaths;
private final String classesPath;
private final PathCollection resourcePaths;
private final String resourcesOutputPath;
public CompilationUnit(PathCollection sourcePaths, String classesPath, PathCollection resourcePaths,
String resourcesOutputPath) {
this.sourcePaths = sourcePaths;
this.classesPath = classesPath;
this.resourcePaths = resourcePaths;
this.resourcesOutputPath = resourcesOutputPath;
}
public PathCollection getSourcePaths() {
return sourcePaths;
}
public String getClassesPath() {
return classesPath;
}
public PathCollection getResourcePaths() {
return resourcePaths;
}
public String getResourcesOutputPath() {
return resourcesOutputPath;
}
}
public boolean isEnablePreview() {
if (compilerOptions == null) {
return false;
}
return compilerOptions.contains(ENABLE_PREVIEW_FLAG);
}
}
| |
/*
* Copyright 2008-present MongoDB, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.mongodb.client.model;
import com.mongodb.lang.Nullable;
import org.bson.BsonArray;
import org.bson.BsonDocument;
import org.bson.BsonDouble;
import org.bson.BsonInt32;
import org.bson.BsonString;
import org.bson.BsonValue;
import org.bson.Document;
import org.junit.jupiter.api.Test;
import java.util.AbstractMap;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
import java.util.function.BiFunction;
import java.util.function.Function;
import java.util.function.Supplier;
import static com.mongodb.client.model.Windows.documents;
import static com.mongodb.client.model.Windows.range;
import static java.util.Arrays.asList;
import static org.junit.jupiter.api.Assertions.assertAll;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertThrows;
final class TestWindowedComputations {
private static final String NO_EXPRESSION = "{}";
private static final String PATH = "newField";
private static final Map.Entry<Integer, BsonValue> INT_EXPR = new AbstractMap.SimpleImmutableEntry<>(1, new BsonInt32(1));
private static final Map.Entry<String, BsonValue> STR_EXPR =
new AbstractMap.SimpleImmutableEntry<>("$fieldToRead", new BsonString("$fieldToRead"));
private static final Window POSITION_BASED_WINDOW = documents(1, 2);
private static final Window RANGE_BASED_WINDOW = range(1, 2);
@Test
void of() {
WindowedComputation expected = WindowedComputations.sum(PATH, STR_EXPR.getKey(), POSITION_BASED_WINDOW);
WindowedComputation actual = WindowedComputations.of(new BsonField(PATH, new Document("$sum", STR_EXPR.getKey())
.append("window", POSITION_BASED_WINDOW.toBsonDocument())));
assertAll(
() -> assertEquals(expected.toBsonField().getName(), actual.toBsonField().getName()),
() -> assertEquals(expected.toBsonField().getValue().toBsonDocument(), actual.toBsonField().getValue().toBsonDocument()));
}
@Test
void simpleWindowFunctions() {
final Map<Object, BsonValue> expressions = new HashMap<>();
expressions.put(INT_EXPR.getKey(), INT_EXPR.getValue());
expressions.put(STR_EXPR.getKey(), STR_EXPR.getValue());
final Collection<Window> windows = asList(null, POSITION_BASED_WINDOW, RANGE_BASED_WINDOW);
assertAll(
() -> assertSimpleParameterWindowFunction("$sum", WindowedComputations::sum, expressions, windows, false),
() -> assertSimpleParameterWindowFunction("$avg", WindowedComputations::avg, expressions, windows, false),
() -> assertSimpleParameterWindowFunction("$stdDevSamp", WindowedComputations::stdDevSamp, expressions, windows, false),
() -> assertSimpleParameterWindowFunction("$stdDevPop", WindowedComputations::stdDevPop, expressions, windows, false),
() -> assertSimpleParameterWindowFunction("$min", WindowedComputations::min, expressions, windows, false),
() -> assertSimpleParameterWindowFunction("$max", WindowedComputations::max, expressions, windows, false),
() -> assertNoParameterWindowFunction("$count", WindowedComputations::count, windows, false),
() -> assertSimpleParameterWindowFunction("$push", WindowedComputations::push, expressions, windows, false),
() -> assertSimpleParameterWindowFunction("$addToSet", WindowedComputations::addToSet, expressions, windows, false),
() -> assertSimpleParameterWindowFunction("$first", WindowedComputations::first, expressions, windows, false),
() -> assertSimpleParameterWindowFunction("$last", WindowedComputations::last, expressions, windows, false),
() -> assertNoParameterNoWindowFunction("$documentNumber", WindowedComputations::documentNumber),
() -> assertNoParameterNoWindowFunction("$rank", WindowedComputations::rank),
() -> assertNoParameterNoWindowFunction("$denseRank", WindowedComputations::denseRank)
);
}
@Test
void derivative() {
assertDerivativeOrIntegral("$derivative", WindowedComputations::derivative);
}
@Test
void timeDerivative() {
assertTimeDerivativeOrIntegral("$derivative", WindowedComputations::timeDerivative);
}
@Test
void integral() {
assertDerivativeOrIntegral("$integral", WindowedComputations::integral);
}
@Test
void timeIntegral() {
assertTimeDerivativeOrIntegral("$integral", WindowedComputations::timeIntegral);
}
@Test
void covarianceSamp() {
assertCovariance("$covarianceSamp", WindowedComputations::covarianceSamp);
}
@Test
void covariancePop() {
assertCovariance("$covariancePop", WindowedComputations::covariancePop);
}
@Test
void expMovingAvgWithN() {
assertWindowedComputation(
new BsonField(PATH, new BsonDocument("$expMovingAvg", new BsonDocument("input", STR_EXPR.getValue())
.append("N", new BsonInt32(1)))),
WindowedComputations.expMovingAvg(PATH, STR_EXPR.getKey(), 1));
assertAll(
() -> assertThrows(IllegalArgumentException.class, () ->
WindowedComputations.expMovingAvg(null, STR_EXPR.getKey(), 1)),
() -> assertThrows(IllegalArgumentException.class, () ->
WindowedComputations.expMovingAvg(PATH, null, 1)),
() -> assertThrows(IllegalArgumentException.class, () ->
WindowedComputations.expMovingAvg(PATH, STR_EXPR.getKey(), 0)),
() -> assertThrows(IllegalArgumentException.class, () ->
WindowedComputations.expMovingAvg(PATH, STR_EXPR.getKey(), -1)));
}
@Test
void expMovingAvgWithAlpha() {
assertWindowedComputation(
new BsonField(PATH, new BsonDocument("$expMovingAvg", new BsonDocument("input", STR_EXPR.getValue())
.append("alpha", new BsonDouble(0.5)))),
WindowedComputations.expMovingAvg(PATH, STR_EXPR.getKey(), 0.5));
assertAll(
() -> assertThrows(IllegalArgumentException.class, () ->
WindowedComputations.expMovingAvg(null, STR_EXPR.getKey(), 0.5)),
() -> assertThrows(IllegalArgumentException.class, () ->
WindowedComputations.expMovingAvg(PATH, null, 0.5)),
() -> assertThrows(IllegalArgumentException.class, () ->
WindowedComputations.expMovingAvg(PATH, STR_EXPR.getKey(), 0d)),
() -> assertThrows(IllegalArgumentException.class, () ->
WindowedComputations.expMovingAvg(PATH, STR_EXPR.getKey(), 1d)));
}
@Test
void shift() {
assertAll(
() -> assertWindowedComputation(
new BsonField(PATH, new BsonDocument("$shift", new BsonDocument("output", STR_EXPR.getValue())
.append("by", new BsonInt32(-1))
.append("default", INT_EXPR.getValue()))),
WindowedComputations.shift(PATH, STR_EXPR.getKey(), INT_EXPR.getKey(), -1)),
() -> assertWindowedComputation(
new BsonField(PATH, new BsonDocument("$shift", new BsonDocument("output", STR_EXPR.getValue())
.append("by", new BsonInt32(0)))),
WindowedComputations.shift(PATH, STR_EXPR.getKey(), null, 0)),
() -> assertWindowedComputation(
new BsonField(PATH, new BsonDocument("$shift", new BsonDocument("output", STR_EXPR.getValue())
.append("by", new BsonInt32(1))
.append("default", INT_EXPR.getValue()))),
WindowedComputations.shift(PATH, STR_EXPR.getKey(), INT_EXPR.getKey(), 1)));
assertAll(
() -> assertThrows(IllegalArgumentException.class, () ->
WindowedComputations.shift(null, STR_EXPR.getKey(), INT_EXPR.getKey(), 0)),
() -> assertThrows(IllegalArgumentException.class, () ->
WindowedComputations.shift(PATH, null, INT_EXPR.getKey(), 0)));
}
private static void assertSimpleParameterWindowFunction(final String expectedFunctionName,
final TriFunction<String, Object, Window, WindowedComputation>
windowedComputationBuilder,
final Map<Object, BsonValue> expressions,
final Collection<Window> windows,
final boolean windowRequired) {
boolean assertNullExpressionsNotAllowed = !expressions.containsKey(NO_EXPRESSION);
for (final Map.Entry<Object, BsonValue> expressionAndEncoded: expressions.entrySet()) {
final Object expression = expressionAndEncoded.getKey();
final BsonValue encodedExpression = expressionAndEncoded.getValue();
for (final Window window : windows) {
final BsonDocument expectedFunctionAndWindow = new BsonDocument(expectedFunctionName, encodedExpression);
if (window != null) {
expectedFunctionAndWindow.append("window", window.toBsonDocument());
}
BsonField expectedWindowedComputation = new BsonField(PATH, expectedFunctionAndWindow);
Supplier<String> msg = () -> "expectedFunctionName=" + expectedFunctionName
+ "path=" + PATH
+ "expression=" + expression
+ "window=" + window
+ "windowRequired=" + windowRequired;
if (windowRequired && window == null) {
assertThrows(IllegalArgumentException.class, () -> windowedComputationBuilder.apply(PATH, expression, null), msg);
} else {
assertWindowedComputation(expectedWindowedComputation, windowedComputationBuilder.apply(PATH, expression, window), msg);
}
assertThrows(IllegalArgumentException.class, () -> windowedComputationBuilder.apply(null, expression, window), msg);
if (assertNullExpressionsNotAllowed) {
assertThrows(IllegalArgumentException.class, () -> windowedComputationBuilder.apply(PATH, null, window), msg);
}
}
}
}
private static void assertNoParameterWindowFunction(final String expectedFunctionName,
final BiFunction<String, Window, WindowedComputation> windowedComputationBuilder,
final Collection<Window> windows, final boolean windowRequired) {
assertSimpleParameterWindowFunction(expectedFunctionName,
(fName, expr, window) -> windowedComputationBuilder.apply(fName, window),
Collections.singletonMap(NO_EXPRESSION, BsonDocument.parse(NO_EXPRESSION)), windows, windowRequired);
}
private static void assertNoParameterNoWindowFunction(final String expectedFunctionName,
final Function<String, WindowedComputation> windowedComputationBuilder) {
assertNoParameterWindowFunction(expectedFunctionName, (fName, window) -> windowedComputationBuilder.apply(fName),
Collections.singleton(null), false);
}
private static void assertWindowedComputation(final BsonField expected, final WindowedComputation actual,
@Nullable final Supplier<String> messageSupplier) {
assertEquals(expected.getName(), actual.toBsonField().getName(), messageSupplier);
assertEquals(expected.getValue().toBsonDocument(), actual.toBsonField().getValue().toBsonDocument(), messageSupplier);
}
private static void assertWindowedComputation(final BsonField expected, final WindowedComputation actual) {
assertWindowedComputation(expected, actual, null);
}
private static void assertDerivativeOrIntegral(final String expectedFunctionName,
final TriFunction<String, Object, Window, WindowedComputation>
windowedComputationBuilder) {
assertDerivativeOrIntegral(expectedFunctionName,
(fName, expr, window, unit) -> windowedComputationBuilder.apply(fName, expr, window), false);
}
private static void assertTimeDerivativeOrIntegral(final String expectedFunctionName,
final QuadriFunction<String, Object, Window, MongoTimeUnit, WindowedComputation>
windowedComputationBuilder) {
assertDerivativeOrIntegral(expectedFunctionName, windowedComputationBuilder, true);
}
private static void assertDerivativeOrIntegral(final String expectedFunctionName,
final QuadriFunction<String, Object, Window, MongoTimeUnit, WindowedComputation>
windowedComputationBuilder,
final boolean time) {
final BsonDocument expectedArgs = new BsonDocument("input", STR_EXPR.getValue());
if (time) {
expectedArgs.append("unit", new BsonString(MongoTimeUnit.DAY.value()));
}
assertWindowedComputation(new BsonField(PATH,
new BsonDocument(expectedFunctionName, expectedArgs)
.append("window", POSITION_BASED_WINDOW.toBsonDocument())),
windowedComputationBuilder.apply(PATH, STR_EXPR.getKey(), POSITION_BASED_WINDOW, MongoTimeUnit.DAY));
assertThrows(IllegalArgumentException.class, () ->
windowedComputationBuilder.apply(PATH, STR_EXPR.getKey(), null, MongoTimeUnit.DAY));
if (time) {
assertThrows(IllegalArgumentException.class, () ->
windowedComputationBuilder.apply(PATH, STR_EXPR.getKey(), POSITION_BASED_WINDOW, null));
}
}
private static void assertCovariance(final String expectedFunctionName,
final QuadriFunction<String, Object, Object, Window, WindowedComputation>
windowedComputationBuilder) {
assertWindowedComputation(new BsonField(PATH,
new BsonDocument(expectedFunctionName, new BsonArray(asList(INT_EXPR.getValue(), STR_EXPR.getValue())))
.append("window", POSITION_BASED_WINDOW.toBsonDocument())),
windowedComputationBuilder.apply(PATH, INT_EXPR.getKey(), STR_EXPR.getKey(), POSITION_BASED_WINDOW));
assertWindowedComputation(new BsonField(PATH,
new BsonDocument(expectedFunctionName, new BsonArray(asList(INT_EXPR.getValue(), STR_EXPR.getValue())))),
windowedComputationBuilder.apply(PATH, INT_EXPR.getKey(), STR_EXPR.getKey(), null));
assertAll(
() -> assertThrows(IllegalArgumentException.class, () ->
windowedComputationBuilder.apply(PATH, null, STR_EXPR.getKey(), POSITION_BASED_WINDOW)),
() -> assertThrows(IllegalArgumentException.class, () ->
windowedComputationBuilder.apply(PATH, INT_EXPR.getKey(), null, POSITION_BASED_WINDOW)));
}
@FunctionalInterface
interface TriFunction<A1, A2, A3, R> {
R apply(@Nullable A1 a1, @Nullable A2 a2, @Nullable A3 a3);
}
@FunctionalInterface
interface QuadriFunction<A1, A2, A3, A4, R> {
R apply(@Nullable A1 a1, @Nullable A2 a2, @Nullable A3 a3, @Nullable A4 a4);
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.runtime.resourcemanager;
import org.apache.flink.api.common.JobID;
import org.apache.flink.api.common.time.Time;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.runtime.clusterframework.ApplicationStatus;
import org.apache.flink.runtime.entrypoint.ClusterInformation;
import org.apache.flink.runtime.heartbeat.TestingHeartbeatServices;
import org.apache.flink.runtime.highavailability.TestingHighAvailabilityServices;
import org.apache.flink.runtime.leaderelection.TestingLeaderElectionService;
import org.apache.flink.runtime.leaderretrieval.LeaderRetrievalService;
import org.apache.flink.runtime.metrics.util.TestingMetricRegistry;
import org.apache.flink.runtime.rpc.FencedRpcEndpoint;
import org.apache.flink.runtime.rpc.RpcEndpoint;
import org.apache.flink.runtime.rpc.RpcService;
import org.apache.flink.runtime.rpc.TestingRpcService;
import org.apache.flink.runtime.util.TestingFatalErrorHandler;
import org.apache.flink.util.concurrent.FutureUtils;
import javax.annotation.Nullable;
import java.util.Optional;
import java.util.UUID;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.ForkJoinPool;
import java.util.function.Function;
import static org.apache.flink.util.Preconditions.checkNotNull;
/** Implementation of {@link ResourceManagerService} for testing purpose. */
public class TestingResourceManagerService implements ResourceManagerService {
private static final Time TIMEOUT = Time.seconds(10L);
private final ResourceManagerServiceImpl rmService;
private final TestingLeaderElectionService leaderElectionService;
private final TestingFatalErrorHandler fatalErrorHandler;
private final RpcService rpcService;
private final boolean needStopRpcService;
private TestingResourceManagerService(
ResourceManagerServiceImpl rmService,
TestingLeaderElectionService leaderElectionService,
TestingFatalErrorHandler fatalErrorHandler,
RpcService rpcService,
boolean needStopRpcService) {
this.rmService = rmService;
this.leaderElectionService = leaderElectionService;
this.fatalErrorHandler = fatalErrorHandler;
this.rpcService = rpcService;
this.needStopRpcService = needStopRpcService;
}
@Override
public void start() throws Exception {
rmService.start();
}
@Override
public CompletableFuture<Void> getTerminationFuture() {
return rmService.getTerminationFuture();
}
@Override
public CompletableFuture<Void> deregisterApplication(
ApplicationStatus applicationStatus, @Nullable String diagnostics) {
return rmService.deregisterApplication(applicationStatus, diagnostics);
}
@Override
public CompletableFuture<Void> closeAsync() {
return rmService.closeAsync();
}
public Optional<ResourceManagerGateway> getResourceManagerGateway() {
return getResourceManagerOpt().map(rm -> rm.getSelfGateway(ResourceManagerGateway.class));
}
public Optional<ResourceManagerId> getResourceManagerFencingToken() {
return getResourceManagerOpt().map(FencedRpcEndpoint::getFencingToken);
}
public Optional<CompletableFuture<Void>> getResourceManagerTerminationFuture() {
return getResourceManagerOpt().map(RpcEndpoint::getTerminationFuture);
}
private Optional<ResourceManager<?>> getResourceManagerOpt() {
return Optional.ofNullable(rmService.getLeaderResourceManager());
}
public void isLeader(UUID uuid) {
leaderElectionService.isLeader(uuid);
}
public void notLeader() {
leaderElectionService.notLeader();
}
public void rethrowFatalErrorIfAny() throws Exception {
if (fatalErrorHandler.hasExceptionOccurred()) {
fatalErrorHandler.rethrowError();
}
}
public void ignoreFatalErrors() {
fatalErrorHandler.clearError();
}
public void cleanUp() throws Exception {
rmService
.closeAsync()
.thenCompose((ignore) -> this.stopRpcServiceIfNeeded())
.get(TIMEOUT.getSize(), TIMEOUT.getUnit());
}
private CompletableFuture<Void> stopRpcServiceIfNeeded() {
return needStopRpcService ? rpcService.stopService() : FutureUtils.completedVoidFuture();
}
public static Builder newBuilder() {
return new Builder();
}
public static class Builder {
private RpcService rpcService = null;
private boolean needStopRpcService = true;
private TestingLeaderElectionService rmLeaderElectionService = null;
private Function<JobID, LeaderRetrievalService> jmLeaderRetrieverFunction = null;
public Builder setRpcService(RpcService rpcService) {
this.rpcService = checkNotNull(rpcService);
this.needStopRpcService = false;
return this;
}
public Builder setRmLeaderElectionService(
TestingLeaderElectionService rmLeaderElectionService) {
this.rmLeaderElectionService = checkNotNull(rmLeaderElectionService);
return this;
}
public Builder setJmLeaderRetrieverFunction(
Function<JobID, LeaderRetrievalService> jmLeaderRetrieverFunction) {
this.jmLeaderRetrieverFunction = checkNotNull(jmLeaderRetrieverFunction);
return this;
}
public TestingResourceManagerService build() throws Exception {
rpcService = rpcService != null ? rpcService : new TestingRpcService();
rmLeaderElectionService =
rmLeaderElectionService != null
? rmLeaderElectionService
: new TestingLeaderElectionService();
final TestingHighAvailabilityServices haServices =
new TestingHighAvailabilityServices();
haServices.setResourceManagerLeaderElectionService(rmLeaderElectionService);
if (jmLeaderRetrieverFunction != null) {
haServices.setJobMasterLeaderRetrieverFunction(jmLeaderRetrieverFunction);
}
final TestingFatalErrorHandler fatalErrorHandler = new TestingFatalErrorHandler();
return new TestingResourceManagerService(
ResourceManagerServiceImpl.create(
StandaloneResourceManagerFactory.getInstance(),
new Configuration(),
rpcService,
haServices,
new TestingHeartbeatServices(),
fatalErrorHandler,
new ClusterInformation("localhost", 1234),
null,
TestingMetricRegistry.builder().build(),
"localhost",
ForkJoinPool.commonPool()),
rmLeaderElectionService,
fatalErrorHandler,
rpcService,
needStopRpcService);
}
}
}
| |
/**
* Copyright 2011-2021 Asakusa Framework Team.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.asakusafw.directio.hive.parquet.v1;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.util.HashMap;
import java.util.Map;
import com.asakusafw.directio.hive.util.TemporalUtil;
import com.asakusafw.runtime.value.DateOption;
import com.asakusafw.runtime.value.DateTimeOption;
import com.asakusafw.runtime.value.DateUtil;
import com.asakusafw.runtime.value.ValueOption;
import parquet.column.Dictionary;
import parquet.io.api.Binary;
import parquet.io.api.RecordConsumer;
import parquet.schema.PrimitiveType;
import parquet.schema.PrimitiveType.PrimitiveTypeName;
import parquet.schema.Type;
import parquet.schema.Type.Repetition;
/**
* Converts between {@link ValueOption} and {@code timestamp (binary)}.
* @since 0.7.2
*/
public enum TimestampValueDrivers implements ParquetValueDriver {
/**
* {@link DateOption}.
*/
DATE(DateOption.class) {
@Override
public ValueConverter getConverter() {
return new DateConverter();
}
@Override
public ValueWriter getWriter() {
return new DateWriter();
}
},
/**
* {@link DateTimeOption}.
*/
DATETIME(DateTimeOption.class) {
@Override
public ValueConverter getConverter() {
return new DateTimeConverter();
}
@Override
public ValueWriter getWriter() {
return new DateTimeWriter();
}
},
;
final Class<? extends ValueOption<?>> valueOptionClass;
TimestampValueDrivers(Class<? extends ValueOption<?>> valueOptionClass) {
this.valueOptionClass = valueOptionClass;
}
@Override
public Type getType(String name) {
return new PrimitiveType(Repetition.OPTIONAL, PrimitiveTypeName.INT96, name);
}
/**
* Returns a {@link ParquetValueDriver} for the specified type.
* @param valueClass the {@link ValueOption} type
* @return the corresponded {@link ParquetValueDriver}, or {@code null} if it is not found
*/
public static ParquetValueDriver find(Class<?> valueClass) {
return Lazy.FROM_CLASS.get(valueClass);
}
private static final class Lazy {
static final Map<Class<?>, TimestampValueDrivers> FROM_CLASS;
static {
Map<Class<?>, TimestampValueDrivers> map = new HashMap<>();
for (TimestampValueDrivers element : TimestampValueDrivers.values()) {
map.put(element.valueOptionClass, element);
}
FROM_CLASS = map;
}
private Lazy() {
return;
}
}
abstract static class AbstractWriter implements ValueWriter {
void write(int julianDay, long timeOfDayNanos, RecordConsumer consumer) {
ByteBuffer buf = ByteBuffer.allocate(12).order(ByteOrder.LITTLE_ENDIAN);
buf.clear();
buf.putLong(timeOfDayNanos);
buf.putInt(julianDay);
buf.flip();
consumer.addBinary(Binary.fromByteBuffer(buf));
}
}
static class DateWriter extends AbstractWriter {
@Override
public void write(Object value, RecordConsumer consumer) {
DateOption option = (DateOption) value;
int julianDayNumber = TemporalUtil.getJulianDayNumber(option.get());
long nanoTime = TemporalUtil.getTimeOfDayNanos(option.get());
write(julianDayNumber, nanoTime, consumer);
}
}
static class DateTimeWriter extends AbstractWriter {
@Override
public void write(Object value, RecordConsumer consumer) {
DateTimeOption option = (DateTimeOption) value;
int julianDayNumber = TemporalUtil.getJulianDayNumber(option.get());
long nanoTime = TemporalUtil.getTimeOfDayNanos(option.get());
write(julianDayNumber, nanoTime, consumer);
}
}
abstract static class AbstractConverter extends ValueConverter {
private int[] julianDays;
private long[] nanoTimes;
protected AbstractConverter() {
return;
}
@Override
public boolean hasDictionarySupport() {
return true;
}
@Override
public void setDictionary(Dictionary dictionary) {
int size = dictionary.getMaxId() + 1;
if (this.julianDays == null || this.julianDays.length < size) {
int capacity = (int) (size * 1.2) + 1;
this.julianDays = new int[capacity];
this.nanoTimes = new long[capacity];
}
for (int id = 0, max = dictionary.getMaxId(); id <= max; id++) {
ByteBuffer bytes = dictionary.decodeToBinary(id).toByteBuffer().order(ByteOrder.LITTLE_ENDIAN);
long time = bytes.getLong();
int day = bytes.getInt();
julianDays[id] = day;
nanoTimes[id] = time;
}
}
@Override
public void addValueFromDictionary(int dictionaryId) {
addNanoTime(julianDays[dictionaryId], nanoTimes[dictionaryId]);
}
@Override
public void addBinary(Binary value) {
ByteBuffer bytes = value.toByteBuffer().order(ByteOrder.LITTLE_ENDIAN);
long time = bytes.getLong();
int day = bytes.getInt();
addNanoTime(day, time);
}
abstract void addNanoTime(int julianDay, long nanoTime);
}
static class DateConverter extends AbstractConverter {
private DateOption target;
@Override
public void set(ValueOption<?> value) {
this.target = (DateOption) value;
}
@SuppressWarnings("deprecation")
@Override
void addNanoTime(int julianDay, long nanoTime) {
long seconds = TemporalUtil.toElapsedSeconds(julianDay, nanoTime);
target.modify(DateUtil.getDayFromSeconds(seconds));
}
}
static class DateTimeConverter extends AbstractConverter {
private DateTimeOption target;
@Override
public void set(ValueOption<?> value) {
this.target = (DateTimeOption) value;
}
@SuppressWarnings("deprecation")
@Override
void addNanoTime(int julianDay, long nanoTime) {
target.modify(TemporalUtil.toElapsedSeconds(julianDay, nanoTime));
}
}
}
| |
package demos;
import java.lang.reflect.*;
import javax.media.opengl.*;
import javax.media.nativewindow.*;
import com.jogamp.newt.*;
import com.jogamp.newt.event.*;
import com.jogamp.newt.opengl.*;
public class GLNewtRun extends WindowAdapter implements KeyListener, MouseListener {
static GLWindow window;
static volatile boolean quit = false;
public void windowDestroyNotify(WindowEvent e) {
quit = true;
}
static int dx=0;
static int dy=0;
static int dw=0;
static int dh=0;
public void keyPressed(KeyEvent e) {
System.out.println(e);
if(e.getKeyChar()=='f') {
window.setFullscreen(!window.isFullscreen());
} else if(e.getKeyChar()=='q') {
quit = true;
} else if(e.getKeyChar()=='p') {
int x = window.getX() + dx;
int y = window.getY() + dy;
System.out.println("Reset Pos "+x+"/"+y);
window.setPosition(x, y);
} else if(e.getKeyChar()=='s') {
int w = window.getWidth() + dw;
int h = window.getHeight() + dh;
System.out.println("Reset Size "+w+"x"+h);
window.setSize(w, h);
}
}
public void keyReleased(KeyEvent e) {
System.out.println(e);
}
public void keyTyped(KeyEvent e) {
System.out.println(e);
}
public void mouseClicked(MouseEvent e) {
System.out.println(" mouseevent: "+e);
switch(e.getClickCount()) {
case 1:
if(e.getButton()>MouseEvent.BUTTON1) {
window.setFullscreen(!window.isFullscreen());
}
break;
default:
quit=true;
break;
}
}
public void mouseEntered(MouseEvent e) {
}
public void mouseExited(MouseEvent e) {
}
public void mousePressed(MouseEvent e) {
}
public void mouseReleased(MouseEvent e) {
}
public void mouseMoved(MouseEvent e) {
}
public void mouseDragged(MouseEvent e) {
}
public void mouseWheelMoved(MouseEvent e) {
}
public boolean shouldQuit() { return quit; }
public static int str2int(String str, int def) {
try {
return Integer.parseInt(str);
} catch (Exception ex) { ex.printStackTrace(); }
return def;
}
public static boolean setField(Object instance, String fieldName, Object value) {
try {
Field f = instance.getClass().getField(fieldName);
if(f.getType().isInstance(value)) {
f.set(instance, value);
return true;
} else {
System.out.println(instance.getClass()+" '"+fieldName+"' field not assignable with "+value.getClass()+", it's a: "+f.getType());
}
} catch (NoSuchFieldException nsfe) {
System.out.println(instance.getClass()+" has no '"+fieldName+"' field");
} catch (Throwable t) {
t.printStackTrace();
}
return false;
}
public static void main(String[] args) {
boolean parented = false;
boolean useAWTTestFrame = false;
boolean useAWT = false;
boolean undecorated = false;
boolean fullscreen = false;
int x_p = 0;
int y_p = 0;
int x = 0;
int y = 0;
int width = 800;
int height = 480;
String glProfileStr = null;
if(0==args.length) {
throw new RuntimeException("Usage: "+GLNewtRun.class+" <demo class name (GLEventListener)>");
}
GLNewtRun listener = new GLNewtRun();
int i=0;
while(i<args.length-1) {
if(args[i].equals("-awt")) {
useAWT = true;
} else if(args[i].equals("-awttestframe")) {
useAWT = true;
useAWTTestFrame = true;
} else if(args[i].equals("-undecorated")) {
undecorated = true;
} else if(args[i].equals("-parented")) {
parented = true;
} else if(args[i].equals("-fs")) {
fullscreen = true;
} else if(args[i].equals("-xp")) {
i++;
x_p = str2int(args[i], x_p);
} else if(args[i].equals("-yp")) {
i++;
y_p = str2int(args[i], y_p);
} else if(args[i].equals("-x")) {
i++;
x = str2int(args[i], x);
} else if(args[i].equals("-y")) {
i++;
y = str2int(args[i], y);
} else if(args[i].equals("-width")) {
i++;
width = str2int(args[i], width);
} else if(args[i].equals("-height")) {
i++;
height = str2int(args[i], height);
} else if(args[i].startsWith("-GL")) {
glProfileStr = args[i].substring(1);
} else if(args[i].equals("-dx")) {
i++;
dx = str2int(args[i], dx);
} else if(args[i].equals("-dy")) {
i++;
dy = str2int(args[i], dy);
} else if(args[i].equals("-dw")) {
i++;
dw = str2int(args[i], dw);
} else if(args[i].equals("-dh")) {
i++;
dh = str2int(args[i], dh);
}
i++;
}
String demoClassName = args[i];
Object demoObject = null;
try {
Class demoClazz = Class.forName(demoClassName);
demoObject = demoClazz.newInstance();
} catch (Throwable t) {
t.printStackTrace();
throw new RuntimeException("Error while instantiating demo: "+demoClassName);
}
if( !(demoObject instanceof GLEventListener) ) {
throw new RuntimeException("Not a GLEventListener: "+demoClassName);
}
GLEventListener demo = (GLEventListener) demoObject;
GLProfile glp = GLProfile.get(glProfileStr);
try {
GLCapabilities caps = new GLCapabilities(glp);
NewtFactory.setUseEDT(true);
Window nWindow = null;
if(useAWT) {
Display nDisplay = NewtFactory.createDisplay(NativeWindowFactory.TYPE_AWT, null); // local display
Screen nScreen = NewtFactory.createScreen(NativeWindowFactory.TYPE_AWT, nDisplay, 0); // screen 0
if(useAWTTestFrame) {
java.awt.MenuBar menuTest = new java.awt.MenuBar();
menuTest.add(new java.awt.Menu("External Frame Test - Menu"));
java.awt.Frame frame = new java.awt.Frame("External Frame Test");
frame.setMenuBar(menuTest);
nWindow = NewtFactory.createWindow(NativeWindowFactory.TYPE_AWT, new Object[] { frame }, nScreen, caps);
} else {
nWindow = NewtFactory.createWindow(NativeWindowFactory.TYPE_AWT, nScreen, caps);
}
} else {
Display nDisplay = NewtFactory.createDisplay(null); // local display
Screen nScreen = NewtFactory.createScreen(nDisplay, 0); // screen 0
if(parented) {
Window parent = NewtFactory.createWindow(nScreen, caps);
parent.setPosition(x_p, y_p);
parent.setSize(width+width/10, height+height/10);
parent.setVisible(true);
nWindow = NewtFactory.createWindow(parent, caps);
} else {
nWindow = NewtFactory.createWindow(nScreen, caps);
}
}
nWindow.setUndecorated(undecorated);
nWindow.getScreen().setDestroyWhenUnused(true);
window = GLWindow.create(nWindow);
if(!setField(demo, "window", window)) {
setField(demo, "glWindow", window);
}
window.addWindowListener(listener);
window.addMouseListener(listener);
window.addKeyListener(listener);
window.addGLEventListener(demo);
window.setPosition(x, y);
window.setSize(width, height);
window.setFullscreen(fullscreen);
// Size OpenGL to Video Surface
window.setVisible(true);
window.enablePerfLog(true);
do {
window.display();
} while (!quit && window.getDuration() < 20000) ;
window.destroy();
} catch (Throwable t) {
t.printStackTrace();
}
}
}
| |
/* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.camunda.bpm.engine.impl;
import static org.camunda.bpm.engine.impl.util.EnsureUtil.ensureNotNull;
import java.util.Date;
import java.util.List;
import org.camunda.bpm.engine.exception.NotValidException;
import org.camunda.bpm.engine.history.HistoricDecisionInstance;
import org.camunda.bpm.engine.history.HistoricDecisionInstanceQuery;
import org.camunda.bpm.engine.impl.interceptor.CommandContext;
import org.camunda.bpm.engine.impl.interceptor.CommandExecutor;
/**
* @author Philipp Ossler
*/
public class HistoricDecisionInstanceQueryImpl extends AbstractQuery<HistoricDecisionInstanceQuery, HistoricDecisionInstance> implements HistoricDecisionInstanceQuery {
private static final long serialVersionUID = 1L;
protected String decisionInstanceId;
protected String[] decisionInstanceIdIn;
protected String decisionDefinitionId;
protected String decisionDefinitionKey;
protected String decisionDefinitionName;
protected String processDefinitionKey;
protected String processDefinitionId;
protected String processInstanceId;
protected String caseDefinitionKey;
protected String caseDefinitionId;
protected String caseInstanceId;
protected String[] activityInstanceIds;
protected String[] activityIds;
protected Date evaluatedBefore;
protected Date evaluatedAfter;
protected String userId;
protected boolean includeInput = false;
protected boolean includeOutputs = false;
protected boolean isByteArrayFetchingEnabled = true;
protected boolean isCustomObjectDeserializationEnabled = true;
protected String rootDecisionInstanceId;
protected boolean rootDecisionInstancesOnly = false;
protected String decisionRequirementsDefinitionId;
protected String decisionRequirementsDefinitionKey;
protected String[] tenantIds;
public HistoricDecisionInstanceQueryImpl() {
}
public HistoricDecisionInstanceQueryImpl(CommandExecutor commandExecutor) {
super(commandExecutor);
}
public HistoricDecisionInstanceQuery decisionInstanceId(String decisionInstanceId) {
ensureNotNull(NotValidException.class, "decisionInstanceId", decisionInstanceId);
this.decisionInstanceId = decisionInstanceId;
return this;
}
public HistoricDecisionInstanceQuery decisionInstanceIdIn(String... decisionInstanceIdIn) {
ensureNotNull("decisionInstanceIdIn", (Object[]) decisionInstanceIdIn);
this.decisionInstanceIdIn = decisionInstanceIdIn;
return this;
}
public HistoricDecisionInstanceQuery decisionDefinitionId(String decisionDefinitionId) {
ensureNotNull(NotValidException.class, "decisionDefinitionId", decisionDefinitionId);
this.decisionDefinitionId = decisionDefinitionId;
return this;
}
@Override
public HistoricDecisionInstanceQuery decisionDefinitionKey(String decisionDefinitionKey) {
ensureNotNull(NotValidException.class, "decisionDefinitionKey", decisionDefinitionKey);
this.decisionDefinitionKey = decisionDefinitionKey;
return this;
}
@Override
public HistoricDecisionInstanceQuery decisionDefinitionName(String decisionDefinitionName) {
ensureNotNull(NotValidException.class, "decisionDefinitionName", decisionDefinitionName);
this.decisionDefinitionName = decisionDefinitionName;
return this;
}
@Override
public HistoricDecisionInstanceQuery processDefinitionKey(String processDefinitionKey) {
ensureNotNull(NotValidException.class, "processDefinitionKey", processDefinitionKey);
this.processDefinitionKey = processDefinitionKey;
return this;
}
@Override
public HistoricDecisionInstanceQuery processDefinitionId(String processDefinitionId) {
ensureNotNull(NotValidException.class, "processDefinitionId", processDefinitionId);
this.processDefinitionId = processDefinitionId;
return this;
}
@Override
public HistoricDecisionInstanceQuery processInstanceId(String processInstanceId) {
ensureNotNull(NotValidException.class, "processInstanceId", processInstanceId);
this.processInstanceId = processInstanceId;
return this;
}
@Override
public HistoricDecisionInstanceQuery caseDefinitionKey(String caseDefinitionKey) {
ensureNotNull(NotValidException.class, "caseDefinitionKey", caseDefinitionKey);
this.caseDefinitionKey = caseDefinitionKey;
return this;
}
@Override
public HistoricDecisionInstanceQuery caseDefinitionId(String caseDefinitionId) {
ensureNotNull(NotValidException.class, "caseDefinitionId", caseDefinitionId);
this.caseDefinitionId = caseDefinitionId;
return this;
}
@Override
public HistoricDecisionInstanceQuery caseInstanceId(String caseInstanceId) {
ensureNotNull(NotValidException.class, "caseInstanceId", caseInstanceId);
this.caseInstanceId = caseInstanceId;
return this;
}
@Override
public HistoricDecisionInstanceQuery activityIdIn(String... activityIds) {
ensureNotNull("activityIds", (Object[]) activityIds);
this.activityIds = activityIds;
return this;
}
public HistoricDecisionInstanceQuery activityInstanceIdIn(String... activityInstanceIds) {
ensureNotNull("activityInstanceIds", (Object[]) activityInstanceIds);
this.activityInstanceIds = activityInstanceIds;
return this;
}
public HistoricDecisionInstanceQuery evaluatedBefore(Date evaluatedBefore) {
ensureNotNull(NotValidException.class, "evaluatedBefore", evaluatedBefore);
this.evaluatedBefore = evaluatedBefore;
return this;
}
public HistoricDecisionInstanceQuery evaluatedAfter(Date evaluatedAfter) {
ensureNotNull(NotValidException.class, "evaluatedAfter", evaluatedAfter);
this.evaluatedAfter = evaluatedAfter;
return this;
}
public HistoricDecisionInstanceQuery tenantIdIn(String... tenantIds) {
ensureNotNull("tenantIds", (Object[]) tenantIds);
this.tenantIds = tenantIds;
return this;
}
public HistoricDecisionInstanceQuery orderByTenantId() {
return orderBy(HistoricDecisionInstanceQueryProperty.TENANT_ID);
}
@Override
public HistoricDecisionInstanceQuery userId(String userId) {
ensureNotNull(NotValidException.class, "userId", userId);
this.userId = userId;
return this;
}
@Override
public HistoricDecisionInstanceQuery orderByEvaluationTime() {
orderBy(HistoricDecisionInstanceQueryProperty.EVALUATION_TIME);
return this;
}
@Override
public long executeCount(CommandContext commandContext) {
checkQueryOk();
return commandContext
.getHistoricDecisionInstanceManager()
.findHistoricDecisionInstanceCountByQueryCriteria(this);
}
@Override
public List<HistoricDecisionInstance> executeList(CommandContext commandContext, Page page) {
checkQueryOk();
return commandContext
.getHistoricDecisionInstanceManager()
.findHistoricDecisionInstancesByQueryCriteria(this, page);
}
public String getDecisionDefinitionId() {
return decisionDefinitionId;
}
public String getDecisionDefinitionKey() {
return decisionDefinitionKey;
}
public String getDecisionDefinitionName() {
return decisionDefinitionName;
}
public String getProcessDefinitionKey() {
return processDefinitionKey;
}
public String getProcessDefinitionId() {
return processDefinitionId;
}
public String getProcessInstanceId() {
return processInstanceId;
}
public String getCaseDefinitionKey() {
return caseDefinitionKey;
}
public String getCaseDefinitionId() {
return caseDefinitionId;
}
public String getCaseInstanceId() {
return caseInstanceId;
}
public String[] getActivityInstanceIds() {
return activityInstanceIds;
}
public String[] getActivityIds() {
return activityIds;
}
public String[] getTenantIds() {
return tenantIds;
}
public HistoricDecisionInstanceQuery includeInputs() {
includeInput = true;
return this;
}
@Override
public HistoricDecisionInstanceQuery includeOutputs() {
includeOutputs = true;
return this;
}
public boolean isIncludeInput() {
return includeInput;
}
public boolean isIncludeOutputs() {
return includeOutputs;
}
@Override
public HistoricDecisionInstanceQuery disableBinaryFetching() {
isByteArrayFetchingEnabled = false;
return this;
}
@Override
public HistoricDecisionInstanceQuery disableCustomObjectDeserialization() {
isCustomObjectDeserializationEnabled = false;
return this;
}
public boolean isByteArrayFetchingEnabled() {
return isByteArrayFetchingEnabled;
}
public boolean isCustomObjectDeserializationEnabled() {
return isCustomObjectDeserializationEnabled;
}
public String getRootDecisionInstanceId() {
return rootDecisionInstanceId;
}
public HistoricDecisionInstanceQuery rootDecisionInstanceId(String rootDecisionInstanceId) {
ensureNotNull(NotValidException.class, "rootDecisionInstanceId", rootDecisionInstanceId);
this.rootDecisionInstanceId = rootDecisionInstanceId;
return this;
}
public boolean isRootDecisionInstancesOnly() {
return rootDecisionInstancesOnly;
}
public HistoricDecisionInstanceQuery rootDecisionInstancesOnly() {
this.rootDecisionInstancesOnly = true;
return this;
}
@Override
public HistoricDecisionInstanceQuery decisionRequirementsDefinitionId(String decisionRequirementsDefinitionId) {
ensureNotNull(NotValidException.class, "decisionRequirementsDefinitionId", decisionRequirementsDefinitionId);
this.decisionRequirementsDefinitionId = decisionRequirementsDefinitionId;
return this;
}
@Override
public HistoricDecisionInstanceQuery decisionRequirementsDefinitionKey(String decisionRequirementsDefinitionKey) {
ensureNotNull(NotValidException.class, "decisionRequirementsDefinitionKey", decisionRequirementsDefinitionKey);
this.decisionRequirementsDefinitionKey = decisionRequirementsDefinitionKey;
return this;
}
public String getDecisionRequirementsDefinitionId() {
return decisionRequirementsDefinitionId;
}
public String getDecisionRequirementsDefinitionKey() {
return decisionRequirementsDefinitionKey;
}
}
| |
/*
* Lantern
*
* Copyright (c) LanternPowered <https://www.lanternpowered.org>
* Copyright (c) SpongePowered <https://www.spongepowered.org>
* Copyright (c) contributors
*
* This work is licensed under the terms of the MIT License (MIT). For
* a copy, see 'LICENSE.txt' or <https://opensource.org/licenses/MIT>.
*/
package org.lanternpowered.server.data.io.store.entity;
import static org.lanternpowered.server.data.DataHelper.getOrCreateView;
import org.lanternpowered.server.data.DataQueries;
import org.lanternpowered.server.data.io.store.ObjectSerializer;
import org.lanternpowered.server.data.io.store.SimpleValueContainer;
import org.lanternpowered.server.data.io.store.item.ItemStackStore;
import org.lanternpowered.server.data.key.LanternKeys;
import org.lanternpowered.server.entity.living.player.gamemode.LanternGameMode;
import org.lanternpowered.server.game.Lantern;
import org.lanternpowered.server.game.registry.type.advancement.AdvancementTreeRegistryModule;
import org.lanternpowered.server.game.registry.type.entity.player.GameModeRegistryModule;
import org.lanternpowered.server.inventory.AbstractSlot;
import org.lanternpowered.server.inventory.ISlot;
import org.lanternpowered.server.inventory.LanternItemStack;
import org.lanternpowered.server.inventory.vanilla.AbstractUserInventory;
import org.lanternpowered.server.inventory.vanilla.LanternPlayerArmorInventory;
import org.lanternpowered.server.inventory.vanilla.LanternPrimaryPlayerInventory;
import org.lanternpowered.server.item.recipe.RecipeBookState;
import org.lanternpowered.server.registry.type.data.GameModeRegistry;
import org.lanternpowered.server.world.LanternWorld;
import org.lanternpowered.server.world.LanternWorldPropertiesOld;
import org.spongepowered.api.ResourceKey;
import org.spongepowered.api.data.Keys;
import org.spongepowered.api.data.persistence.DataContainer;
import org.spongepowered.api.data.persistence.DataQuery;
import org.spongepowered.api.data.persistence.DataView;
import org.spongepowered.api.data.persistence.Queries;
import org.spongepowered.api.entity.living.player.gamemode.GameMode;
import org.spongepowered.api.entity.living.player.gamemode.GameModes;
import org.spongepowered.api.item.inventory.Inventory;
import org.spongepowered.api.item.inventory.Slot;
import org.spongepowered.api.item.inventory.type.GridInventory;
import org.spongepowered.api.util.RespawnLocation;
import org.spongepowered.math.vector.Vector3d;
import java.time.Instant;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.UUID;
/**
* Note: This store assumes that all the Sponge data (world/data/sponge) is merged
* into one {@link DataView}. This listed under a sub view with the data query
* {@link DataQueries#SPONGE_DATA}.
*/
@SuppressWarnings({"OptionalGetWithoutIsPresent", "ConstantConditions"})
public class UserStore<T extends AbstractUser> extends LivingStore<T> {
private static final DataQuery ABILITIES = DataQuery.of("abilities");
private static final DataQuery FLYING = DataQuery.of("flying");
private static final DataQuery FLYING_SPEED = DataQuery.of("flySpeed");
private static final DataQuery CAN_FLY = DataQuery.of("mayfly");
private static final DataQuery SCORE = DataQuery.of("Score");
private static final DataQuery GAME_MODE = DataQuery.of("playerGameType");
private static final DataQuery SELECTED_ITEM_SLOT = DataQuery.of("SelectedItemSlot");
private static final DataQuery DIMENSION = DataQuery.of("Dimension");
private static final DataQuery BUKKIT_FIRST_DATE_PLAYED = DataQuery.of('.', "bukkit.firstPlayed");
private static final DataQuery BUKKIT_LAST_DATE_PLAYED = DataQuery.of('.', "bukkit.lastPlayed");
private static final DataQuery FIRST_DATE_PLAYED = DataQuery.of("FirstJoin");
private static final DataQuery LAST_DATE_PLAYED = DataQuery.of("LastPlayed");
private static final DataQuery RESPAWN_LOCATIONS = DataQuery.of("Spawns");
private static final DataQuery RESPAWN_LOCATIONS_DIMENSION = DataQuery.of("Dim");
private static final DataQuery RESPAWN_LOCATIONS_X = DataQuery.of("SpawnX");
private static final DataQuery RESPAWN_LOCATIONS_Y = DataQuery.of("SpawnY");
private static final DataQuery RESPAWN_LOCATIONS_Z = DataQuery.of("SpawnZ");
private static final DataQuery RESPAWN_LOCATIONS_FORCED = DataQuery.of("SpawnForced");
private static final DataQuery SLOT = DataQuery.of("Slot");
private static final DataQuery INVENTORY = DataQuery.of("Inventory");
private static final DataQuery ENDER_CHEST_INVENTORY = DataQuery.of("EnderItems");
private static final DataQuery RECIPE_BOOK = DataQuery.of("recipeBook");
private static final DataQuery CRAFTING_RECIPE_BOOK_GUI_OPEN = DataQuery.of("isGuiOpen");
private static final DataQuery CRAFTING_RECIPE_BOOK_FILTER_ACTIVE = DataQuery.of("isFilteringCraftable");
private static final DataQuery SMELTING_RECIPE_BOOK_GUI_OPEN = DataQuery.of("isFurnaceGuiOpen");
private static final DataQuery SMELTING_RECIPE_BOOK_FILTER_ACTIVE = DataQuery.of("isFurnaceFilteringCraftable");
private static final DataQuery OPEN_ADVANCEMENT_TREE = DataQuery.of("openAdvancementTree"); // Lantern
@Override
public void deserialize(T player, DataView dataView) {
super.deserialize(player, dataView);
final int dimension = dataView.getInt(DIMENSION).orElse(0);
Lantern.getWorldManager().getWorldProperties(dimension).ifPresent(worldProperties -> {
final LanternWorldPropertiesOld worldProperties0 = (LanternWorldPropertiesOld) worldProperties;
final Optional<LanternWorld> optWorld = worldProperties0.getWorld();
if (optWorld.isPresent()) {
player.setRawWorld(optWorld.get());
} else {
player.setUserWorld(worldProperties0);
}
});
}
@Override
public void serialize(T entity, DataView dataView) {
super.serialize(entity, dataView);
final LanternWorld world = entity.getWorld();
final UUID uniqueId = world != null ? world.getUniqueId() :
entity.getUserWorld() != null ? entity.getUserWorld().getUniqueId() : null;
dataView.set(DIMENSION, uniqueId == null ? 0 : Lantern.getWorldManager().getWorldDimensionId(uniqueId).orElse(0));
}
@Override
public void serializeValues(T player, SimpleValueContainer valueContainer, DataView dataView) {
valueContainer.remove(Keys.HEAD_ROTATION);
valueContainer.remove(Keys.IS_SPRINTING);
valueContainer.remove(Keys.IS_SNEAKING);
valueContainer.remove(LanternKeys.ACTIVE_HAND);
final DataView abilities = dataView.createView(ABILITIES);
abilities.set(FLYING, (byte) (valueContainer.remove(Keys.IS_FLYING).orElse(false) ? 1 : 0));
abilities.set(FLYING_SPEED, valueContainer.remove(Keys.FLYING_SPEED).orElse(0.1).floatValue());
abilities.set(CAN_FLY, (byte) (valueContainer.remove(Keys.CAN_FLY).orElse(false) ? 1 : 0));
final DataView spongeData = getOrCreateView(dataView, DataQueries.EXTENDED_SPONGE_DATA);
spongeData.set(FIRST_DATE_PLAYED, valueContainer.remove(Keys.FIRST_DATE_PLAYED).orElse(Instant.now()).toEpochMilli());
spongeData.set(LAST_DATE_PLAYED, valueContainer.remove(Keys.LAST_DATE_PLAYED).orElse(Instant.now()).toEpochMilli());
spongeData.set(UNIQUE_ID, player.getUniqueId().toString());
spongeData.set(Queries.CONTENT_VERSION, 1);
final Map<UUID, RespawnLocation> respawnLocations = valueContainer.remove(Keys.RESPAWN_LOCATIONS).get();
final List<DataView> respawnLocationViews = new ArrayList<>();
for (RespawnLocation respawnLocation : respawnLocations.values()) {
Lantern.getWorldManager().getWorldDimensionId(respawnLocation.getWorldUniqueId()).ifPresent(dimensionId -> {
// Overworld respawn location is saved in the root container
if (dimensionId == 0) {
serializeRespawnLocationTo(dataView, respawnLocation);
} else {
respawnLocationViews.add(serializeRespawnLocationTo(DataContainer.createNew(DataView.SafetyMode.NO_DATA_CLONED), respawnLocation)
.set(RESPAWN_LOCATIONS_DIMENSION, dimensionId));
}
});
}
dataView.set(RESPAWN_LOCATIONS, respawnLocationViews);
dataView.set(GAME_MODE, ((LanternGameMode) valueContainer.remove(Keys.GAME_MODE).orElseGet(GameModes.NOT_SET)).getInternalId());
dataView.set(SELECTED_ITEM_SLOT, player.getInventory().getHotbar().getSelectedSlotIndex());
dataView.set(SCORE, valueContainer.remove(LanternKeys.SCORE).get());
// Serialize the player inventory
dataView.set(INVENTORY, serializePlayerInventory(player.getInventory()));
// Serialize the ender chest inventory
dataView.set(ENDER_CHEST_INVENTORY, serializeEnderChest(player.getEnderChestInventory()));
final DataView recipeBook = dataView.createView(RECIPE_BOOK);
RecipeBookState recipeBookState = valueContainer.remove(LanternKeys.CRAFTING_RECIPE_BOOK_STATE).orElse(null);
if (recipeBookState != null) {
recipeBook.set(CRAFTING_RECIPE_BOOK_FILTER_ACTIVE,
(byte) (recipeBookState.isFilterActive() ? 1 : 0));
recipeBook.set(CRAFTING_RECIPE_BOOK_GUI_OPEN,
(byte) (recipeBookState.isCurrentlyOpen() ? 1 : 0));
}
recipeBookState = valueContainer.remove(LanternKeys.FURNACE_RECIPE_BOOK_STATE).orElse(null);
if (recipeBookState != null) {
recipeBook.set(SMELTING_RECIPE_BOOK_FILTER_ACTIVE,
(byte) (recipeBookState.isFilterActive() ? 1 : 0));
recipeBook.set(SMELTING_RECIPE_BOOK_GUI_OPEN,
(byte) (recipeBookState.isCurrentlyOpen() ? 1 : 0));
}
valueContainer.remove(LanternKeys.OPEN_ADVANCEMENT_TREE).ifPresent(o ->
o.ifPresent(advancementTree -> dataView.set(OPEN_ADVANCEMENT_TREE, advancementTree.getKey())));
super.serializeValues(player, valueContainer, dataView);
}
private static DataView serializeRespawnLocationTo(DataView dataView, RespawnLocation respawnLocation) {
final Vector3d position = respawnLocation.getPosition();
return dataView
.set(RESPAWN_LOCATIONS_X, position.getX())
.set(RESPAWN_LOCATIONS_Y, position.getY())
.set(RESPAWN_LOCATIONS_Z, position.getZ())
.set(RESPAWN_LOCATIONS_FORCED, respawnLocation.isForced());
}
@Override
public void deserializeValues(T player, SimpleValueContainer valueContainer, DataView dataView) {
// Try to convert old bukkit values first
dataView.getLong(BUKKIT_FIRST_DATE_PLAYED).ifPresent(v -> valueContainer.set(Keys.FIRST_DATE_PLAYED, Instant.ofEpochMilli(v)));
dataView.getLong(BUKKIT_LAST_DATE_PLAYED).ifPresent(v -> valueContainer.set(Keys.LAST_DATE_PLAYED, Instant.ofEpochMilli(v)));
// Deserialize sponge data
dataView.getView(DataQueries.EXTENDED_SPONGE_DATA).ifPresent(view -> {
view.getLong(FIRST_DATE_PLAYED).ifPresent(v -> valueContainer.set(Keys.FIRST_DATE_PLAYED, Instant.ofEpochMilli(v)));
view.getLong(LAST_DATE_PLAYED).ifPresent(v -> valueContainer.set(Keys.LAST_DATE_PLAYED, Instant.ofEpochMilli(v)));
});
dataView.getView(ABILITIES).ifPresent(view -> {
view.getInt(FLYING).ifPresent(v -> valueContainer.set(Keys.IS_FLYING, v > 0));
view.getDouble(FLYING_SPEED).ifPresent(v -> valueContainer.set(Keys.FLYING_SPEED, v));
view.getInt(CAN_FLY).ifPresent(v -> valueContainer.set(Keys.CAN_FLY, v > 0));
});
final Map<UUID, RespawnLocation> respawnLocations = new HashMap<>();
// Overworld respawn location is saved in the root container
final Optional<Double> optSpawnX = dataView.getDouble(RESPAWN_LOCATIONS_X);
final Optional<Double> optSpawnY = dataView.getDouble(RESPAWN_LOCATIONS_Y);
final Optional<Double> optSpawnZ = dataView.getDouble(RESPAWN_LOCATIONS_Z);
if (optSpawnX.isPresent() && optSpawnY.isPresent() && optSpawnZ.isPresent()) {
UUID uniqueId = Lantern.getWorldManager().getWorldProperties(0).get().getUniqueId();
respawnLocations.put(uniqueId, deserializeRespawnLocation(dataView, uniqueId, optSpawnX.get(),
optSpawnY.get(), optSpawnZ.get()));
}
dataView.getViewList(RESPAWN_LOCATIONS).ifPresent(v -> v.forEach(view -> {
int dimensionId = view.getInt(RESPAWN_LOCATIONS_DIMENSION).get();
Lantern.getWorldManager().getWorldProperties(dimensionId).ifPresent(props -> {
UUID uniqueId = props.getUniqueId();
double x = view.getDouble(RESPAWN_LOCATIONS_X).get();
double y = view.getDouble(RESPAWN_LOCATIONS_Y).get();
double z = view.getDouble(RESPAWN_LOCATIONS_Z).get();
respawnLocations.put(uniqueId, deserializeRespawnLocation(view, uniqueId, x, y, z));
});
}));
valueContainer.set(Keys.RESPAWN_LOCATIONS, respawnLocations);
dataView.getInt(SCORE).ifPresent(v -> valueContainer.set(LanternKeys.SCORE, v));
final GameMode gameMode = dataView.getInt(GAME_MODE)
.flatMap(v -> GameModeRegistry.get().getOptional(v)).orElseGet(GameModes.NOT_SET);
valueContainer.set(Keys.GAME_MODE, gameMode);
player.getInventory().getHotbar().setSelectedSlotIndex(dataView.getInt(SELECTED_ITEM_SLOT).orElse(0));
// Deserialize the player inventory
dataView.getViewList(INVENTORY).ifPresent(views -> deserializePlayerInventory(player.getInventory(), views));
// Deserialize the ender chest inventory
dataView.getViewList(ENDER_CHEST_INVENTORY).ifPresent(views -> deserializeEnderChest(player.getEnderChestInventory(), views));
dataView.getView(RECIPE_BOOK).ifPresent(view -> {
boolean filterActive = view.getInt(CRAFTING_RECIPE_BOOK_FILTER_ACTIVE).orElse(0) > 0;
boolean currentlyOpen = view.getInt(CRAFTING_RECIPE_BOOK_GUI_OPEN).orElse(0) > 0;
valueContainer.set(LanternKeys.CRAFTING_RECIPE_BOOK_STATE, new RecipeBookState(currentlyOpen, filterActive));
filterActive = view.getInt(SMELTING_RECIPE_BOOK_FILTER_ACTIVE).orElse(0) > 0;
currentlyOpen = view.getInt(SMELTING_RECIPE_BOOK_GUI_OPEN).orElse(0) > 0;
valueContainer.set(LanternKeys.FURNACE_RECIPE_BOOK_STATE, new RecipeBookState(currentlyOpen, filterActive));
});
dataView.getString(OPEN_ADVANCEMENT_TREE).ifPresent(id -> valueContainer
.set(LanternKeys.OPEN_ADVANCEMENT_TREE, AdvancementTreeRegistryModule.get().get(ResourceKey.resolve(id))));
super.deserializeValues(player, valueContainer, dataView);
}
private static RespawnLocation deserializeRespawnLocation(DataView dataView, UUID worldUUID, double x, double y, double z) {
boolean forced = dataView.getInt(RESPAWN_LOCATIONS_FORCED).orElse(0) > 0;
return RespawnLocation.builder()
.world(worldUUID)
.position(new Vector3d(x, y, z))
.forceSpawn(forced)
.build();
}
private static List<DataView> serializeEnderChest(GridInventory enderChestInventory) {
final List<DataView> itemViews = new ArrayList<>();
final Iterable<Slot> slots = enderChestInventory.slots();
for (Slot slot : slots) {
((ISlot) slot).peek().ifNotEmpty(stack -> {
final DataView itemView = ItemStackStore.INSTANCE.serialize(stack);
itemView.set(SLOT, enderChestInventory.get(slot, Keys.SLOT_INDEX).get().byteValue());
itemViews.add(itemView);
});
}
return itemViews;
}
private static void deserializeEnderChest(GridInventory enderChestInventory, List<DataView> itemViews) {
for (DataView itemView : itemViews) {
final int slot = itemView.getByte(SLOT).get() & 0xff;
final LanternItemStack itemStack = ItemStackStore.INSTANCE.deserialize(itemView);
enderChestInventory.set(slot, itemStack);
}
}
private static void deserializePlayerInventory(AbstractUserInventory<?> inventory, List<DataView> itemViews) {
final LanternPrimaryPlayerInventory mainInventory = inventory.getPrimary();
final LanternPlayerArmorInventory equipmentInventory = inventory.getArmor();
final AbstractSlot offHandSlot = inventory.getOffhand();
for (DataView itemView : itemViews) {
final int slot = itemView.getByte(SLOT).get() & 0xff;
final LanternItemStack itemStack = ItemStackStore.INSTANCE.deserialize(itemView);
if (slot >= 0 && slot < mainInventory.capacity()) {
mainInventory.set(slot, itemStack);
} else if (slot >= 100 && slot - 100 < equipmentInventory.capacity()) {
equipmentInventory.set(slot - 100, itemStack);
} else if (slot == 150) {
offHandSlot.set(itemStack);
}
}
}
private static List<DataView> serializePlayerInventory(AbstractUserInventory<?> inventory) {
final List<DataView> itemViews = new ArrayList<>();
final LanternPrimaryPlayerInventory mainInventory = inventory.getPrimary();
final LanternPlayerArmorInventory equipmentInventory = inventory.getArmor();
final AbstractSlot offHandSlot = inventory.getOffhand();
Iterable<Slot> slots = mainInventory.slots();
for (Slot slot : slots) {
serializeSlot(mainInventory, slot, 0, ItemStackStore.INSTANCE, itemViews);
}
slots = equipmentInventory.slots();
for (Slot slot : slots) {
serializeSlot(equipmentInventory, slot, 100, ItemStackStore.INSTANCE, itemViews);
}
serializeSlot(150, offHandSlot, ItemStackStore.INSTANCE, itemViews);
return itemViews;
}
private static void serializeSlot(Inventory parent, Slot slot, int indexOffset,
ObjectSerializer<LanternItemStack> itemStackSerializer, List<DataView> views) {
final int index = parent.getProperty(slot, InventoryProperties.SLOT_INDEX).get();
serializeSlot(index + indexOffset, slot, itemStackSerializer, views);
}
private static void serializeSlot(int index, Slot slot, ObjectSerializer<LanternItemStack> itemStackSerializer, List<DataView> views) {
((ISlot) slot).peek().ifNotEmpty(stack -> {
final DataView itemView = itemStackSerializer.serialize(stack);
itemView.set(SLOT, (byte) index);
views.add(itemView);
});
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hive.ql.optimizer.calcite.stats;
import java.util.ArrayList;
import java.util.BitSet;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
import java.util.SortedMap;
import org.apache.calcite.linq4j.Linq4j;
import org.apache.calcite.linq4j.Ord;
import org.apache.calcite.linq4j.function.Predicate1;
import org.apache.calcite.plan.RelOptPredicateList;
import org.apache.calcite.plan.RelOptUtil;
import org.apache.calcite.rel.RelNode;
import org.apache.calcite.rel.core.Aggregate;
import org.apache.calcite.rel.core.Join;
import org.apache.calcite.rel.core.JoinRelType;
import org.apache.calcite.rel.core.Project;
import org.apache.calcite.rel.core.Union;
import org.apache.calcite.rel.metadata.BuiltInMetadata;
import org.apache.calcite.rel.metadata.ChainedRelMetadataProvider;
import org.apache.calcite.rel.metadata.MetadataDef;
import org.apache.calcite.rel.metadata.MetadataHandler;
import org.apache.calcite.rel.metadata.ReflectiveRelMetadataProvider;
import org.apache.calcite.rel.metadata.RelMdPredicates;
import org.apache.calcite.rel.metadata.RelMetadataProvider;
import org.apache.calcite.rel.metadata.RelMetadataQuery;
import org.apache.calcite.rex.RexBuilder;
import org.apache.calcite.rex.RexCall;
import org.apache.calcite.rex.RexInputRef;
import org.apache.calcite.rex.RexLiteral;
import org.apache.calcite.rex.RexNode;
import org.apache.calcite.rex.RexPermuteInputsShuttle;
import org.apache.calcite.rex.RexUtil;
import org.apache.calcite.rex.RexVisitorImpl;
import org.apache.calcite.sql.SqlKind;
import org.apache.calcite.sql.fun.SqlStdOperatorTable;
import org.apache.calcite.util.BitSets;
import org.apache.calcite.util.BuiltInMethod;
import org.apache.calcite.util.ImmutableBitSet;
import org.apache.calcite.util.mapping.Mapping;
import org.apache.calcite.util.mapping.MappingType;
import org.apache.calcite.util.mapping.Mappings;
import org.apache.hadoop.hive.ql.optimizer.calcite.HiveCalciteUtil;
import com.google.common.base.Function;
import com.google.common.collect.HashMultimap;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.Iterables;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
//TODO: Move this to calcite
public class HiveRelMdPredicates implements MetadataHandler<BuiltInMetadata.Predicates> {
public static final RelMetadataProvider SOURCE =
ChainedRelMetadataProvider.of(
ImmutableList.of(
ReflectiveRelMetadataProvider.reflectiveSource(
BuiltInMethod.PREDICATES.method, new HiveRelMdPredicates()),
RelMdPredicates.SOURCE));
private static final List<RexNode> EMPTY_LIST = ImmutableList.of();
//~ Constructors -----------------------------------------------------------
private HiveRelMdPredicates() {}
//~ Methods ----------------------------------------------------------------
@Override
public MetadataDef<BuiltInMetadata.Predicates> getDef() {
return BuiltInMetadata.Predicates.DEF;
}
/**
* Infers predicates for a project.
*
* <ol>
* <li>create a mapping from input to projection. Map only positions that
* directly reference an input column.
* <li>Expressions that only contain above columns are retained in the
* Project's pullExpressions list.
* <li>For e.g. expression 'a + e = 9' below will not be pulled up because 'e'
* is not in the projection list.
*
* <pre>
* childPullUpExprs: {a > 7, b + c < 10, a + e = 9}
* projectionExprs: {a, b, c, e / 2}
* projectionPullupExprs: {a > 7, b + c < 10}
* </pre>
*
* </ol>
*/
public RelOptPredicateList getPredicates(Project project, RelMetadataQuery mq) {
RelNode child = project.getInput();
final RexBuilder rexBuilder = project.getCluster().getRexBuilder();
RelOptPredicateList childInfo = mq.getPulledUpPredicates(child);
List<RexNode> projectPullUpPredicates = new ArrayList<RexNode>();
HashMultimap<Integer, Integer> inpIndxToOutIndxMap = HashMultimap.create();
ImmutableBitSet.Builder columnsMappedBuilder = ImmutableBitSet.builder();
Mapping m = Mappings.create(MappingType.PARTIAL_FUNCTION, child.getRowType().getFieldCount(),
project.getRowType().getFieldCount());
for (Ord<RexNode> o : Ord.zip(project.getProjects())) {
if (o.e instanceof RexInputRef) {
int sIdx = ((RexInputRef) o.e).getIndex();
m.set(sIdx, o.i);
inpIndxToOutIndxMap.put(sIdx, o.i);
columnsMappedBuilder.set(sIdx);
}
}
// Go over childPullUpPredicates. If a predicate only contains columns in
// 'columnsMapped' construct a new predicate based on mapping.
final ImmutableBitSet columnsMapped = columnsMappedBuilder.build();
for (RexNode r : childInfo.pulledUpPredicates) {
ImmutableBitSet rCols = RelOptUtil.InputFinder.bits(r);
if (columnsMapped.contains(rCols)) {
r = r.accept(new RexPermuteInputsShuttle(m, child));
projectPullUpPredicates.add(r);
}
}
// Project can also generate constants. We need to include them.
for (Ord<RexNode> expr : Ord.zip(project.getProjects())) {
if (RexLiteral.isNullLiteral(expr.e)) {
projectPullUpPredicates.add(rexBuilder.makeCall(SqlStdOperatorTable.IS_NULL,
rexBuilder.makeInputRef(project, expr.i)));
} else if (expr.e instanceof RexLiteral) {
final RexLiteral literal = (RexLiteral) expr.e;
projectPullUpPredicates.add(rexBuilder.makeCall(SqlStdOperatorTable.EQUALS,
rexBuilder.makeInputRef(project, expr.i), literal));
} else if (expr.e instanceof RexCall && HiveCalciteUtil.isDeterministicFuncOnLiterals(expr.e)) {
//TODO: Move this to calcite
projectPullUpPredicates.add(rexBuilder.makeCall(SqlStdOperatorTable.EQUALS,
rexBuilder.makeInputRef(project, expr.i), expr.e));
}
}
return RelOptPredicateList.of(rexBuilder, projectPullUpPredicates);
}
/** Infers predicates for a {@link org.apache.calcite.rel.core.Join}. */
public RelOptPredicateList getPredicates(Join join, RelMetadataQuery mq) {
RexBuilder rB = join.getCluster().getRexBuilder();
RelNode left = join.getInput(0);
RelNode right = join.getInput(1);
final RelOptPredicateList leftInfo = mq.getPulledUpPredicates(left);
final RelOptPredicateList rightInfo = mq.getPulledUpPredicates(right);
JoinConditionBasedPredicateInference jI =
new JoinConditionBasedPredicateInference(join,
RexUtil.composeConjunction(rB, leftInfo.pulledUpPredicates, false),
RexUtil.composeConjunction(rB, rightInfo.pulledUpPredicates,
false));
return jI.inferPredicates(false);
}
/**
* Infers predicates for an Aggregate.
*
* <p>Pulls up predicates that only contains references to columns in the
* GroupSet. For e.g.
*
* <pre>
* inputPullUpExprs : { a > 7, b + c < 10, a + e = 9}
* groupSet : { a, b}
* pulledUpExprs : { a > 7}
* </pre>
*/
public RelOptPredicateList getPredicates(Aggregate agg, RelMetadataQuery mq) {
final RelNode input = agg.getInput();
final RelOptPredicateList inputInfo = mq.getPulledUpPredicates(input);
final List<RexNode> aggPullUpPredicates = new ArrayList<>();
final RexBuilder rexBuilder = agg.getCluster().getRexBuilder();
ImmutableBitSet groupKeys = agg.getGroupSet();
Mapping m = Mappings.create(MappingType.PARTIAL_FUNCTION,
input.getRowType().getFieldCount(), agg.getRowType().getFieldCount());
int i = 0;
for (int j : groupKeys) {
m.set(j, i++);
}
for (RexNode r : inputInfo.pulledUpPredicates) {
ImmutableBitSet rCols = RelOptUtil.InputFinder.bits(r);
if (!rCols.isEmpty() && groupKeys.contains(rCols)) {
r = r.accept(new RexPermuteInputsShuttle(m, input));
aggPullUpPredicates.add(r);
}
}
return RelOptPredicateList.of(rexBuilder, aggPullUpPredicates);
}
/**
* Infers predicates for a Union.
*/
public RelOptPredicateList getPredicates(Union union, RelMetadataQuery mq) {
RexBuilder rB = union.getCluster().getRexBuilder();
Map<String, RexNode> finalPreds = new HashMap<>();
List<RexNode> finalResidualPreds = new ArrayList<>();
for (int i = 0; i < union.getInputs().size(); i++) {
RelNode input = union.getInputs().get(i);
RelOptPredicateList info = mq.getPulledUpPredicates(input);
if (info.pulledUpPredicates.isEmpty()) {
return RelOptPredicateList.EMPTY;
}
Map<String, RexNode> preds = new HashMap<>();
List<RexNode> residualPreds = new ArrayList<>();
for (RexNode pred : info.pulledUpPredicates) {
final String predString = pred.toString();
if (i == 0) {
preds.put(predString, pred);
continue;
}
if (finalPreds.containsKey(predString)) {
preds.put(predString, pred);
} else {
residualPreds.add(pred);
}
}
// Add new residual preds
finalResidualPreds.add(RexUtil.composeConjunction(rB, residualPreds, false));
// Add those that are not part of the final set to residual
for (Entry<String, RexNode> e : finalPreds.entrySet()) {
if (!preds.containsKey(e.getKey())) {
// This node was in previous union inputs, but it is not in this one
for (int j = 0; j < i; j++) {
finalResidualPreds.set(j, RexUtil.composeConjunction(rB, Lists.newArrayList(
finalResidualPreds.get(j), e.getValue()), false));
}
}
}
// Final preds
finalPreds = preds;
}
List<RexNode> preds = new ArrayList<>(finalPreds.values());
RexNode disjPred = RexUtil.composeDisjunction(rB, finalResidualPreds, false);
if (!disjPred.isAlwaysTrue()) {
preds.add(disjPred);
}
return RelOptPredicateList.of(rB, preds);
}
/**
* Utility to infer predicates from one side of the join that apply on the
* other side.
*
* <p>Contract is:<ul>
*
* <li>initialize with a {@link org.apache.calcite.rel.core.Join} and
* optional predicates applicable on its left and right subtrees.
*
* <li>you can
* then ask it for equivalentPredicate(s) given a predicate.
*
* </ul>
*
* <p>So for:
* <ol>
* <li>'<code>R1(x) join R2(y) on x = y</code>' a call for
* equivalentPredicates on '<code>x > 7</code>' will return '
* <code>[y > 7]</code>'
* <li>'<code>R1(x) join R2(y) on x = y join R3(z) on y = z</code>' a call for
* equivalentPredicates on the second join '<code>x > 7</code>' will return
* </ol>
*/
static class JoinConditionBasedPredicateInference {
final Join joinRel;
final boolean isSemiJoin;
final int nSysFields;
final int nFieldsLeft;
final int nFieldsRight;
final ImmutableBitSet leftFieldsBitSet;
final ImmutableBitSet rightFieldsBitSet;
final ImmutableBitSet allFieldsBitSet;
SortedMap<Integer, BitSet> equivalence;
final Map<String, ImmutableBitSet> exprFields;
final Set<String> allExprsDigests;
final Set<String> equalityPredicates;
final RexNode leftChildPredicates;
final RexNode rightChildPredicates;
public JoinConditionBasedPredicateInference(Join joinRel,
RexNode lPreds, RexNode rPreds) {
this(joinRel, ((Join) joinRel).isSemiJoin(), lPreds, rPreds);
}
private JoinConditionBasedPredicateInference(Join joinRel, boolean isSemiJoin,
RexNode lPreds, RexNode rPreds) {
super();
this.joinRel = joinRel;
this.isSemiJoin = isSemiJoin;
nFieldsLeft = joinRel.getLeft().getRowType().getFieldList().size();
nFieldsRight = joinRel.getRight().getRowType().getFieldList().size();
nSysFields = joinRel.getSystemFieldList().size();
leftFieldsBitSet = ImmutableBitSet.range(nSysFields,
nSysFields + nFieldsLeft);
rightFieldsBitSet = ImmutableBitSet.range(nSysFields + nFieldsLeft,
nSysFields + nFieldsLeft + nFieldsRight);
allFieldsBitSet = ImmutableBitSet.range(0,
nSysFields + nFieldsLeft + nFieldsRight);
exprFields = Maps.newHashMap();
allExprsDigests = new HashSet<>();
if (lPreds == null) {
leftChildPredicates = null;
} else {
Mappings.TargetMapping leftMapping = Mappings.createShiftMapping(
nSysFields + nFieldsLeft, nSysFields, 0, nFieldsLeft);
leftChildPredicates = lPreds.accept(
new RexPermuteInputsShuttle(leftMapping, joinRel.getInput(0)));
for (RexNode r : RelOptUtil.conjunctions(leftChildPredicates)) {
exprFields.put(r.toString(), RelOptUtil.InputFinder.bits(r));
allExprsDigests.add(r.toString());
}
}
if (rPreds == null) {
rightChildPredicates = null;
} else {
Mappings.TargetMapping rightMapping = Mappings.createShiftMapping(
nSysFields + nFieldsLeft + nFieldsRight,
nSysFields + nFieldsLeft, 0, nFieldsRight);
rightChildPredicates = rPreds.accept(
new RexPermuteInputsShuttle(rightMapping, joinRel.getInput(1)));
for (RexNode r : RelOptUtil.conjunctions(rightChildPredicates)) {
exprFields.put(r.toString(), RelOptUtil.InputFinder.bits(r));
allExprsDigests.add(r.toString());
}
}
equivalence = Maps.newTreeMap();
equalityPredicates = new HashSet<>();
for (int i = 0; i < nSysFields + nFieldsLeft + nFieldsRight; i++) {
equivalence.put(i, BitSets.of(i));
}
// Only process equivalences found in the join conditions. Processing
// Equivalences from the left or right side infer predicates that are
// already present in the Tree below the join.
RexBuilder rexBuilder = joinRel.getCluster().getRexBuilder();
List<RexNode> exprs =
RelOptUtil.conjunctions(
compose(rexBuilder, ImmutableList.of(joinRel.getCondition())));
final EquivalenceFinder eF = new EquivalenceFinder();
new ArrayList<>(
Lists.transform(exprs,
new Function<RexNode, Void>() {
public Void apply(RexNode input) {
return input.accept(eF);
}
}));
equivalence = BitSets.closure(equivalence);
}
/**
* The PullUp Strategy is sound but not complete.
* <ol>
* <li>We only pullUp inferred predicates for now. Pulling up existing
* predicates causes an explosion of duplicates. The existing predicates are
* pushed back down as new predicates. Once we have rules to eliminate
* duplicate Filter conditions, we should pullUp all predicates.
* <li>For Left Outer: we infer new predicates from the left and set them as
* applicable on the Right side. No predicates are pulledUp.
* <li>Right Outer Joins are handled in an analogous manner.
* <li>For Full Outer Joins no predicates are pulledUp or inferred.
* </ol>
*/
public RelOptPredicateList inferPredicates(
boolean includeEqualityInference) {
final List<RexNode> inferredPredicates = new ArrayList<>();
final List<RexNode> nonFieldsPredicates = new ArrayList<>();
final Set<String> allExprsDigests = new HashSet<>(this.allExprsDigests);
final JoinRelType joinType = joinRel.getJoinType();
final List<RexNode> leftPreds = ImmutableList.copyOf(RelOptUtil.conjunctions(leftChildPredicates));
final List<RexNode> rightPreds = ImmutableList.copyOf(RelOptUtil.conjunctions(rightChildPredicates));
final RexBuilder rexBuilder = joinRel.getCluster().getRexBuilder();
switch (joinType) {
case INNER:
case LEFT:
case SEMI:
case ANTI:
infer(leftPreds, allExprsDigests, inferredPredicates,
nonFieldsPredicates, includeEqualityInference,
joinType == JoinRelType.LEFT ? rightFieldsBitSet
: allFieldsBitSet);
break;
}
switch (joinType) {
case INNER:
case RIGHT:
case SEMI:
infer(rightPreds, allExprsDigests, inferredPredicates,
nonFieldsPredicates, includeEqualityInference,
joinType == JoinRelType.RIGHT ? leftFieldsBitSet
: allFieldsBitSet);
break;
}
Mappings.TargetMapping rightMapping = Mappings.createShiftMapping(
nSysFields + nFieldsLeft + nFieldsRight,
0, nSysFields + nFieldsLeft, nFieldsRight);
final RexPermuteInputsShuttle rightPermute =
new RexPermuteInputsShuttle(rightMapping, joinRel);
Mappings.TargetMapping leftMapping = Mappings.createShiftMapping(
nSysFields + nFieldsLeft, 0, nSysFields, nFieldsLeft);
final RexPermuteInputsShuttle leftPermute =
new RexPermuteInputsShuttle(leftMapping, joinRel);
final List<RexNode> leftInferredPredicates = new ArrayList<>();
final List<RexNode> rightInferredPredicates = new ArrayList<>();
for (RexNode iP : inferredPredicates) {
ImmutableBitSet iPBitSet = RelOptUtil.InputFinder.bits(iP);
if (leftFieldsBitSet.contains(iPBitSet)) {
leftInferredPredicates.add(iP.accept(leftPermute));
} else if (rightFieldsBitSet.contains(iPBitSet)) {
rightInferredPredicates.add(iP.accept(rightPermute));
}
}
if ((joinType == JoinRelType.INNER || joinType == JoinRelType.SEMI) &&
!nonFieldsPredicates.isEmpty()) {
// Predicates without field references can be pushed to both inputs
final Set<String> leftPredsSet = new HashSet<String>(
Lists.transform(leftPreds, HiveCalciteUtil.REX_STR_FN));
final Set<String> rightPredsSet = new HashSet<String>(
Lists.transform(rightPreds, HiveCalciteUtil.REX_STR_FN));
for (RexNode iP : nonFieldsPredicates) {
if (!leftPredsSet.contains(iP.toString())) {
leftInferredPredicates.add(iP);
}
if (!rightPredsSet.contains(iP.toString())) {
rightInferredPredicates.add(iP);
}
}
}
switch (joinType) {
case INNER:
Iterable<RexNode> pulledUpPredicates = Iterables.concat(leftPreds, rightPreds,
RelOptUtil.conjunctions(joinRel.getCondition()), inferredPredicates);
return RelOptPredicateList.of(rexBuilder,
pulledUpPredicates, leftInferredPredicates, rightInferredPredicates);
case SEMI:
return RelOptPredicateList.of(rexBuilder, Iterables.concat(leftPreds, leftInferredPredicates),
leftInferredPredicates, rightInferredPredicates);
case LEFT:
case ANTI:
return RelOptPredicateList.of(rexBuilder,
leftPreds, EMPTY_LIST, rightInferredPredicates);
case RIGHT:
return RelOptPredicateList.of(rexBuilder,
rightPreds, leftInferredPredicates, EMPTY_LIST);
default:
assert inferredPredicates.size() == 0;
return RelOptPredicateList.EMPTY;
}
}
public RexNode left() {
return leftChildPredicates;
}
public RexNode right() {
return rightChildPredicates;
}
private void infer(List<RexNode> predicates, Set<String> allExprsDigests,
List<RexNode> inferedPredicates, List<RexNode> nonFieldsPredicates,
boolean includeEqualityInference, ImmutableBitSet inferringFields) {
for (RexNode r : predicates) {
if (!includeEqualityInference
&& equalityPredicates.contains(r.toString())) {
continue;
}
Iterable<Mapping> ms = mappings(r);
if (ms.iterator().hasNext()) {
for (Mapping m : ms) {
RexNode tr = r.accept(
new RexPermuteInputsShuttle(m, joinRel.getInput(0),
joinRel.getInput(1)));
if (inferringFields.contains(RelOptUtil.InputFinder.bits(tr))
&& !allExprsDigests.contains(tr.toString())
&& !isAlwaysTrue(tr)) {
inferedPredicates.add(tr);
allExprsDigests.add(tr.toString());
}
}
} else {
if (!isAlwaysTrue(r)) {
nonFieldsPredicates.add(r);
}
}
}
}
Iterable<Mapping> mappings(final RexNode predicate) {
return new Iterable<Mapping>() {
public Iterator<Mapping> iterator() {
ImmutableBitSet fields = exprFields.get(predicate.toString());
if (fields.cardinality() == 0) {
return Collections.emptyIterator();
}
return new ExprsItr(fields);
}
};
}
private void equivalent(int p1, int p2) {
BitSet b = equivalence.get(p1);
b.set(p2);
b = equivalence.get(p2);
b.set(p1);
}
RexNode compose(RexBuilder rexBuilder, Iterable<RexNode> exprs) {
exprs = Linq4j.asEnumerable(exprs).where(new Predicate1<RexNode>() {
public boolean apply(RexNode expr) {
return expr != null;
}
});
return RexUtil.composeConjunction(rexBuilder, exprs, false);
}
/**
* Find expressions of the form 'col_x = col_y'.
*/
class EquivalenceFinder extends RexVisitorImpl<Void> {
protected EquivalenceFinder() {
super(true);
}
@Override public Void visitCall(RexCall call) {
if (call.getOperator().getKind() == SqlKind.EQUALS) {
int lPos = pos(call.getOperands().get(0));
int rPos = pos(call.getOperands().get(1));
if (lPos != -1 && rPos != -1) {
JoinConditionBasedPredicateInference.this.equivalent(lPos, rPos);
JoinConditionBasedPredicateInference.this.equalityPredicates
.add(call.toString());
}
}
return null;
}
}
/**
* Given an expression returns all the possible substitutions.
*
* <p>For example, for an expression 'a + b + c' and the following
* equivalences: <pre>
* a : {a, b}
* b : {a, b}
* c : {c, e}
* </pre>
*
* <p>The following Mappings will be returned:
* <pre>
* {a → a, b → a, c → c}
* {a → a, b → a, c → e}
* {a → a, b → b, c → c}
* {a → a, b → b, c → e}
* {a → b, b → a, c → c}
* {a → b, b → a, c → e}
* {a → b, b → b, c → c}
* {a → b, b → b, c → e}
* </pre>
*
* <p>which imply the following inferences:
* <pre>
* a + a + c
* a + a + e
* a + b + c
* a + b + e
* b + a + c
* b + a + e
* b + b + c
* b + b + e
* </pre>
*/
class ExprsItr implements Iterator<Mapping> {
final int[] columns;
final BitSet[] columnSets;
final int[] iterationIdx;
Mapping nextMapping;
boolean firstCall;
ExprsItr(ImmutableBitSet fields) {
nextMapping = null;
columns = new int[fields.cardinality()];
columnSets = new BitSet[fields.cardinality()];
iterationIdx = new int[fields.cardinality()];
for (int j = 0, i = fields.nextSetBit(0); i >= 0; i = fields
.nextSetBit(i + 1), j++) {
columns[j] = i;
columnSets[j] = equivalence.get(i);
iterationIdx[j] = 0;
}
firstCall = true;
}
public boolean hasNext() {
if (firstCall) {
initializeMapping();
firstCall = false;
} else {
computeNextMapping(iterationIdx.length - 1);
}
return nextMapping != null;
}
public Mapping next() {
return nextMapping;
}
public void remove() {
throw new UnsupportedOperationException();
}
private void computeNextMapping(int level) {
int t = columnSets[level].nextSetBit(iterationIdx[level]);
if (t < 0) {
if (level == 0) {
nextMapping = null;
} else {
int tmp = columnSets[level].nextSetBit(0);
nextMapping.set(columns[level], tmp);
iterationIdx[level] = tmp + 1;
computeNextMapping(level - 1);
}
} else {
nextMapping.set(columns[level], t);
iterationIdx[level] = t + 1;
}
}
private void initializeMapping() {
nextMapping = Mappings.create(MappingType.PARTIAL_FUNCTION,
nSysFields + nFieldsLeft + nFieldsRight,
nSysFields + nFieldsLeft + nFieldsRight);
for (int i = 0; i < columnSets.length; i++) {
BitSet c = columnSets[i];
int t = c.nextSetBit(iterationIdx[i]);
if (t < 0) {
nextMapping = null;
return;
}
nextMapping.set(columns[i], t);
iterationIdx[i] = t + 1;
}
}
}
private int pos(RexNode expr) {
if (expr instanceof RexInputRef) {
return ((RexInputRef) expr).getIndex();
}
return -1;
}
private boolean isAlwaysTrue(RexNode predicate) {
if (predicate instanceof RexCall) {
RexCall c = (RexCall) predicate;
if (c.getOperator().getKind() == SqlKind.EQUALS) {
int lPos = pos(c.getOperands().get(0));
int rPos = pos(c.getOperands().get(1));
return lPos != -1 && lPos == rPos;
}
}
return predicate.isAlwaysTrue();
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.gemstone.gemfire.internal.cache;
import java.io.DataInput;
import java.io.DataOutput;
import java.io.IOException;
import java.net.InetAddress;
import java.net.InetSocketAddress;
import java.net.UnknownHostException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.Properties;
import java.util.StringTokenizer;
import org.apache.logging.log4j.Logger;
import com.gemstone.gemfire.DataSerializable;
import com.gemstone.gemfire.DataSerializer;
import com.gemstone.gemfire.cache.CacheException;
import com.gemstone.gemfire.cache.RegionService;
import com.gemstone.gemfire.cache.client.Pool;
import com.gemstone.gemfire.cache.client.PoolFactory;
import com.gemstone.gemfire.cache.client.internal.LocatorDiscoveryCallback;
import com.gemstone.gemfire.cache.client.internal.PoolImpl;
import com.gemstone.gemfire.cache.query.QueryService;
import com.gemstone.gemfire.cache.wan.GatewaySender;
import com.gemstone.gemfire.distributed.internal.DistributionConfig;
import com.gemstone.gemfire.internal.admin.remote.DistributionLocatorId;
import com.gemstone.gemfire.internal.i18n.LocalizedStrings;
import com.gemstone.gemfire.internal.logging.LogService;
import com.gemstone.gemfire.internal.logging.log4j.LocalizedMessage;
import com.gemstone.gemfire.pdx.internal.TypeRegistry;
/**
* Implementation of PoolFactory.
* @author darrel
* @since 5.7
*/
public class PoolFactoryImpl implements PoolFactory {
private static final Logger logger = LogService.getLogger();
/**
* Used internally to pass the attributes from this factory to
* the real pool it is creating.
*/
private PoolAttributes attributes = new PoolAttributes();
/**
* The cache that created this factory
*/
private final PoolManagerImpl pm;
public PoolFactoryImpl(PoolManagerImpl pm) {
this.pm = pm;
}
public PoolFactory setFreeConnectionTimeout(int connectionTimeout) {
if (connectionTimeout <= 0) {
throw new IllegalArgumentException("connectionTimeout must be greater than zero");
}
this.attributes.connectionTimeout = connectionTimeout;
return this;
}
public PoolFactory setLoadConditioningInterval(int connectionLifetime) {
if (connectionLifetime < -1) {
throw new IllegalArgumentException("connectionLifetime must be greater than or equal to -1");
}
this.attributes.connectionLifetime = connectionLifetime;
return this;
}
public PoolFactory setSocketBufferSize(int bufferSize) {
if (bufferSize <= 0) {
throw new IllegalArgumentException("socketBufferSize must be greater than zero");
}
this.attributes.socketBufferSize = bufferSize;
return this;
}
public PoolFactory setThreadLocalConnections(boolean threadLocalConnections) {
this.attributes.threadLocalConnections = threadLocalConnections;
return this;
}
public PoolFactory setIdleTimeout(long idleTimout) {
if(idleTimout < -1) {
throw new IllegalArgumentException("idleTimeout must be greater than or equal to -1");
}
this.attributes.idleTimeout = idleTimout;
return this;
}
public PoolFactory setMaxConnections(int maxConnections) {
if(maxConnections < this.attributes.minConnections && maxConnections != -1) {
throw new IllegalArgumentException(
"maxConnections must be greater than or equal to minConnections ("
+ attributes.minConnections + ")");
}
if(maxConnections <= 0 && maxConnections != -1) {
throw new IllegalArgumentException(
"maxConnections must be greater than 0, or set to -1 (no max)");
}
this.attributes.maxConnections = maxConnections;
return this;
}
public PoolFactory setMinConnections(int minConnections) {
if(minConnections > attributes.maxConnections && attributes.maxConnections != -1) {
throw new IllegalArgumentException(
"must be less than or equal to maxConnections (" + attributes.maxConnections + ")");
}
if(minConnections < 0) {
throw new IllegalArgumentException(
"must be greater than or equal to 0");
}
this.attributes.minConnections=minConnections;
return this;
}
public PoolFactory setPingInterval(long pingInterval) {
if(pingInterval <= 0) {
throw new IllegalArgumentException("pingInterval must be greater than zero");
}
this.attributes.pingInterval=pingInterval;
return this;
}
public PoolFactory setStatisticInterval(int statisticInterval) {
if(statisticInterval < -1) {
throw new IllegalArgumentException("statisticInterval must be greater than or equal to -1");
}
this.attributes.statisticInterval = statisticInterval;
return this;
}
public PoolFactory setRetryAttempts(int retryAttempts) {
if(retryAttempts < -1) {
throw new IllegalArgumentException("retryAttempts must be greater than or equal to -1");
}
this.attributes.retryAttempts=retryAttempts;
return this;
}
public PoolFactory setReadTimeout(int timeout) {
if (timeout < 0) {
throw new IllegalArgumentException("readTimeout must be greater than or equal to zero");
}
this.attributes.readTimeout = timeout;
return this;
}
public PoolFactory setServerGroup(String group) {
if (group == null) {
group = DEFAULT_SERVER_GROUP;
}
this.attributes.serverGroup = group;
return this;
}
public PoolFactory setSubscriptionEnabled(boolean enabled) {
this.attributes.queueEnabled = enabled;
return this;
}
public PoolFactory setPRSingleHopEnabled(boolean enabled) {
this.attributes.prSingleHopEnabled = enabled;
return this;
}
public PoolFactory setMultiuserAuthentication(boolean enabled) {
this.attributes.multiuserSecureModeEnabled = enabled;
return this;
}
public PoolFactory setStartDisabled(boolean disable) {
this.attributes.startDisabled = disable;
return this;
}
public PoolFactory setLocatorDiscoveryCallback(LocatorDiscoveryCallback callback) {
this.attributes.locatorCallback = callback;
return this;
}
public PoolFactory setSubscriptionRedundancy(int redundancyLevel) {
if (redundancyLevel < -1) {
throw new IllegalArgumentException("queueRedundancyLevel must be greater than or equal to -1");
}
this.attributes.queueRedundancyLevel = redundancyLevel;
return this;
}
public PoolFactory setSubscriptionMessageTrackingTimeout(int messageTrackingTimeout) {
if (messageTrackingTimeout <= 0) {
throw new IllegalArgumentException("queueMessageTrackingTimeout must be greater than zero");
}
this.attributes.queueMessageTrackingTimeout = messageTrackingTimeout;
return this;
}
private PoolFactory add(String host, int port, List l) {
if (port == 0) {
throw new IllegalArgumentException("port must be greater than 0 but was " + port);
// the rest of the port validation is done by InetSocketAddress
}
try {
InetAddress hostAddr = InetAddress.getByName(host);
InetSocketAddress sockAddr = new InetSocketAddress(hostAddr, port);
l.add(sockAddr);
} catch (UnknownHostException cause) {
// IllegalArgumentException ex = new IllegalArgumentException("Unknown host " + host);
// ex.initCause(cause);
// throw ex;
// Fix for #45348
logger.warn(LocalizedMessage.create(LocalizedStrings.PoolFactoryImpl_HOSTNAME_UNKNOWN, host));
InetSocketAddress sockAddr = new InetSocketAddress(host, port);
l.add(sockAddr);
}
return this;
}
public PoolFactory setSubscriptionAckInterval(int ackInterval) {
if(ackInterval <= 0) {
throw new IllegalArgumentException("ackInterval must be greater than 0");
}
this.attributes.queueAckInterval = ackInterval;
return this;
}
public PoolFactory addLocator(String host, int port) {
if (this.attributes.servers.size() > 0) {
throw new IllegalStateException("A server has already been added. You can only add locators or servers; not both.");
}
return add(host, port, this.attributes.locators);
}
public PoolFactory addServer(String host, int port) {
if (this.attributes.locators.size() > 0) {
throw new IllegalStateException("A locator has already been added. You can only add locators or servers; not both.");
}
return add(host, port, this.attributes.servers);
}
public PoolFactory reset() {
// preserve the startDisabled across resets
boolean sd = this.attributes.startDisabled;
this.attributes = new PoolAttributes();
this.attributes.startDisabled = sd;
return this;
}
/**
* Initializes the state of this factory for the given pool's state.
*/
public void init(Pool cp) {
setFreeConnectionTimeout(cp.getFreeConnectionTimeout());
setLoadConditioningInterval(cp.getLoadConditioningInterval());
setSocketBufferSize(cp.getSocketBufferSize());
setReadTimeout(cp.getReadTimeout());
setMinConnections(cp.getMinConnections());
setMaxConnections(cp.getMaxConnections());
setRetryAttempts(cp.getRetryAttempts());
setIdleTimeout(cp.getIdleTimeout());
setPingInterval(cp.getPingInterval());
setStatisticInterval(cp.getStatisticInterval());
setThreadLocalConnections(cp.getThreadLocalConnections());
setSubscriptionEnabled(cp.getSubscriptionEnabled());
setPRSingleHopEnabled(cp.getPRSingleHopEnabled());
setSubscriptionRedundancy(cp.getSubscriptionRedundancy());
setSubscriptionMessageTrackingTimeout(cp.getSubscriptionMessageTrackingTimeout());
setSubscriptionAckInterval(cp.getSubscriptionAckInterval());
setServerGroup(cp.getServerGroup());
setMultiuserAuthentication(cp.getMultiuserAuthentication());
this.attributes.locators.addAll(cp.getLocators());
this.attributes.servers.addAll(cp.getServers());
}
public void init(GatewaySender sender) {
this.attributes.setGateway(true);
this.attributes.setGatewaySender(sender);
setIdleTimeout(-1); // never time out
setLoadConditioningInterval(-1); // never time out
setMaxConnections(-1);
setMinConnections(0);
setThreadLocalConnections(true);
}
/**
* Create a new Pool for connecting a client to a set of GemFire Cache Servers.
* using this factory's settings for attributes.
*
* @param name the name of the connection pool, used when connecting regions to it
* @throws IllegalStateException if the connection pool name already exists
* @throws IllegalStateException if this factory does not have any locators or servers
* @return the newly created connection pool.
* @since 5.7
*/
public Pool create(String name) throws CacheException {
GemFireCacheImpl cache = GemFireCacheImpl.getInstance();
if(cache != null) {
TypeRegistry registry = cache.getPdxRegistry();
if(registry != null && !attributes.isGateway()) {
registry.creatingPool();
}
}
return PoolImpl.create(this.pm, name, this.attributes);
}
/**
* Needed by test framework.
*/
public PoolAttributes getPoolAttributes() {
return this.attributes;
}
/**
* Not a true pool just the attributes.
* Serialization is used by unit tests
*/
public static class PoolAttributes implements Pool, DataSerializable {
private static final long serialVersionUID = 1L; // for findbugs
public int connectionTimeout = DEFAULT_FREE_CONNECTION_TIMEOUT;
public int connectionLifetime = DEFAULT_LOAD_CONDITIONING_INTERVAL;
public int socketBufferSize = DEFAULT_SOCKET_BUFFER_SIZE;
public boolean threadLocalConnections = DEFAULT_THREAD_LOCAL_CONNECTIONS;
public int readTimeout = DEFAULT_READ_TIMEOUT;
public int minConnections = DEFAULT_MIN_CONNECTIONS;
public int maxConnections = DEFAULT_MAX_CONNECTIONS;
public long idleTimeout = DEFAULT_IDLE_TIMEOUT;
public int retryAttempts = DEFAULT_RETRY_ATTEMPTS;
public long pingInterval = DEFAULT_PING_INTERVAL;
public int statisticInterval = DEFAULT_STATISTIC_INTERVAL;
public boolean queueEnabled = DEFAULT_SUBSCRIPTION_ENABLED;
public boolean prSingleHopEnabled = DEFAULT_PR_SINGLE_HOP_ENABLED;
public int queueRedundancyLevel = DEFAULT_SUBSCRIPTION_REDUNDANCY;
public int queueMessageTrackingTimeout = DEFAULT_SUBSCRIPTION_MESSAGE_TRACKING_TIMEOUT;
public int queueAckInterval = DEFAULT_SUBSCRIPTION_ACK_INTERVAL;
public String serverGroup = DEFAULT_SERVER_GROUP;
public boolean multiuserSecureModeEnabled = DEFAULT_MULTIUSER_AUTHENTICATION;
public ArrayList/*<InetSocketAddress>*/ locators = new ArrayList();
public ArrayList/*<InetSocketAddress>*/ servers = new ArrayList();
public transient boolean startDisabled = false; // only used by junit tests
public transient LocatorDiscoveryCallback locatorCallback = null; //only used by tests
public GatewaySender gatewaySender = null;
/**
* True if the pool is used by a Gateway.
*/
public boolean gateway = false;
public int getFreeConnectionTimeout() {
return this.connectionTimeout;
}
public int getLoadConditioningInterval() {
return this.connectionLifetime;
}
public int getSocketBufferSize() {
return this.socketBufferSize;
}
public int getMinConnections() {
return minConnections;
}
public int getMaxConnections() {
return maxConnections;
}
public long getIdleTimeout() {
return idleTimeout;
}
public int getRetryAttempts() {
return retryAttempts;
}
public long getPingInterval() {
return pingInterval;
}
public int getStatisticInterval() {
return statisticInterval;
}
public boolean getThreadLocalConnections() {
return this.threadLocalConnections;
}
public int getReadTimeout() {
return this.readTimeout;
}
public boolean getSubscriptionEnabled() {
return this.queueEnabled;
}
public boolean getPRSingleHopEnabled() {
return this.prSingleHopEnabled;
}
public int getSubscriptionRedundancy() {
return this.queueRedundancyLevel;
}
public int getSubscriptionMessageTrackingTimeout() {
return this.queueMessageTrackingTimeout;
}
public int getSubscriptionAckInterval() {
return queueAckInterval;
}
public String getServerGroup() {
return this.serverGroup;
}
public boolean isGateway() {
return this.gateway;
}
public void setGateway(boolean v) {
this.gateway = v;
}
public void setGatewaySender(GatewaySender sender) {
this.gatewaySender = sender;
}
public GatewaySender getGatewaySender(){
return this.gatewaySender;
}
public boolean getMultiuserAuthentication() {
return this.multiuserSecureModeEnabled;
}
public void setMultiuserSecureModeEnabled(boolean v) {
this.multiuserSecureModeEnabled = v;
}
public List/*<InetSocketAddress>*/ getLocators() {
if (this.locators.size() == 0 && this.servers.size() == 0) {
throw new IllegalStateException("At least one locator or server must be added before a connection pool can be created.");
}
// needs to return a copy.
return Collections.unmodifiableList(new ArrayList(this.locators));
}
public List/*<InetSocketAddress>*/ getServers() {
if (this.locators.size() == 0 && this.servers.size() == 0) {
throw new IllegalStateException("At least one locator or server must be added before a connection pool can be created.");
}
// needs to return a copy.
return Collections.unmodifiableList(new ArrayList(this.servers));
}
public String getName() {
throw new UnsupportedOperationException();
}
public void destroy() throws CacheException {
throw new UnsupportedOperationException();
}
public void destroy(boolean keepAlive) throws CacheException {
throw new UnsupportedOperationException();
}
public boolean isDestroyed() {
throw new UnsupportedOperationException();
}
public void releaseThreadLocalConnection() {
throw new UnsupportedOperationException();
}
public QueryService getQueryService() {
throw new UnsupportedOperationException();
}
public int getPendingEventCount() {
throw new UnsupportedOperationException();
}
public RegionService createAuthenticatedCacheView(Properties properties) {
throw new UnsupportedOperationException();
}
public void toData(DataOutput out) throws IOException {
DataSerializer.writePrimitiveInt(this.connectionTimeout, out);
DataSerializer.writePrimitiveInt(this.connectionLifetime, out);
DataSerializer.writePrimitiveInt(this.socketBufferSize, out);
DataSerializer.writePrimitiveInt(this.readTimeout, out);
DataSerializer.writePrimitiveInt(this.minConnections, out);
DataSerializer.writePrimitiveInt(this.maxConnections, out);
DataSerializer.writePrimitiveInt(this.retryAttempts, out);
DataSerializer.writePrimitiveLong(this.idleTimeout, out);
DataSerializer.writePrimitiveLong(this.pingInterval, out);
DataSerializer.writePrimitiveInt(this.queueRedundancyLevel, out);
DataSerializer.writePrimitiveInt(this.queueMessageTrackingTimeout, out);
DataSerializer.writePrimitiveBoolean(this.threadLocalConnections, out);
DataSerializer.writePrimitiveBoolean(this.queueEnabled, out);
DataSerializer.writeString(this.serverGroup, out);
DataSerializer.writeArrayList(this.locators, out);
DataSerializer.writeArrayList(this.servers, out);
DataSerializer.writePrimitiveInt(this.statisticInterval, out);
DataSerializer.writePrimitiveBoolean(this.multiuserSecureModeEnabled,out);
}
public void fromData(DataInput in)
throws IOException, ClassNotFoundException
{
this.connectionTimeout = DataSerializer.readPrimitiveInt(in);
this.connectionLifetime = DataSerializer.readPrimitiveInt(in);
this.socketBufferSize = DataSerializer.readPrimitiveInt(in);
this.readTimeout = DataSerializer.readPrimitiveInt(in);
this.minConnections= DataSerializer.readPrimitiveInt(in);
this.maxConnections= DataSerializer.readPrimitiveInt(in);
this.retryAttempts= DataSerializer.readPrimitiveInt(in);
this.idleTimeout= DataSerializer.readPrimitiveLong(in);
this.pingInterval= DataSerializer.readPrimitiveLong(in);
this.queueRedundancyLevel = DataSerializer.readPrimitiveInt(in);
this.queueMessageTrackingTimeout = DataSerializer.readPrimitiveInt(in);
this.threadLocalConnections = DataSerializer.readPrimitiveBoolean(in);
this.queueEnabled = DataSerializer.readPrimitiveBoolean(in);
this.serverGroup = DataSerializer.readString(in);
this.locators = DataSerializer.readArrayList(in);
this.servers = DataSerializer.readArrayList(in);
this.statisticInterval= DataSerializer.readPrimitiveInt(in);
this.multiuserSecureModeEnabled = DataSerializer.readPrimitiveBoolean(in);
}
}
}
| |
/*
* Copyright (c) 2011, salesforce.com, inc.
* All rights reserved.
* Redistribution and use of this software in source and binary forms, with or
* without modification, are permitted provided that the following conditions
* are met:
* - Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
* - Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* - Neither the name of salesforce.com, inc. nor the names of its contributors
* may be used to endorse or promote products derived from this software without
* specific prior written permission of salesforce.com, inc.
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
* LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
*/
package com.salesforce.androidsdk.phonegap;
import java.util.Arrays;
import java.util.List;
import com.salesforce.androidsdk.phonegap.util.test.JSTestCase;
/**
* Running javascript tests for SmartStore plugin
*/
public class SmartStoreJSTest extends JSTestCase {
public SmartStoreJSTest() {
super("SmartStoreTestSuite");
}
@Override
protected int getMaxRuntimeInSecondsForTest(String testName) {
return 30;
}
@Override
public List<String> getTestNames() {
return Arrays.asList(new String[] {
"testGetDatabaseSize",
"testRegisterRemoveSoup",
"testRegisterRemoveSoupGlobalStore",
"testRegisterBogusSoup",
"testRegisterSoupNoIndices",
"testUpsertSoupEntries",
"testUpsertSoupEntriesWithExternalId",
"testUpsertToNonexistentSoup",
"testRetrieveSoupEntries",
"testRemoveFromSoup",
"testRemoveFromSoupByQuery",
"testQuerySoupWithExactQuery",
"testQuerySoupWithAllQueryDescending",
"testQuerySoupWithRangeQueryWithOrderPath",
"testQuerySoupBadQuerySpec",
"testQuerySoupEndKeyNoBeginKey",
"testQuerySoupBeginKeyNoEndKey",
"testManipulateCursor",
"testMoveCursorToPreviousPageFromFirstPage",
"testMoveCursorToNextPageFromLastPage",
"testArbitrarySoupNames",
"testQuerySpecFactories",
"testLikeQuerySpecStartsWith",
"testLikeQuerySpecEndsWith",
"testLikeQueryInnerText",
"testFullTextSearch",
"testCompoundQueryPath",
"testEmptyQuerySpec",
"testIntegerQuerySpec",
"testSmartQueryWithCount",
"testSmartQueryWithSpecialFields",
"testSmartQueryWithIntegerCompare",
"testSmartQueryWithMultipleFieldsAndWhereInClause",
"testSmartQueryWithSingleFieldAndWhereInClause",
"testSmartQueryWithWhereLikeClause",
"testSmartQueryWithWhereLikeClauseOrdered",
"testGetSoupIndexSpecs",
"testGetSoupIndexSpecsWithBogusSoupName",
"testAlterSoupNoReIndexing",
"testAlterSoupWithReIndexing",
"testAlterSoupWithBogusSoupName",
"testReIndexSoup",
"testClearSoup",
"testFullTextSearchAgainstArrayNode",
"testLikeQueryAgainstArrayNode",
"testExactQueryAgainstArrayNode",
"testSmartQueryAgainstArrayNode"
});
}
public void testGetDatabaseSize() {
runTest("testGetDatabaseSize");
}
public void testRegisterRemoveSoup() {
runTest("testRegisterRemoveSoup");
}
public void testRegisterRemoveSoupGlobalStore() {
runTest("testRegisterRemoveSoupGlobalStore");
}
public void testRegisterBogusSoup() {
runTest("testRegisterBogusSoup");
}
public void testRegisterSoupNoIndices() {
runTest("testRegisterSoupNoIndices");
}
public void testUpsertSoupEntries() {
runTest("testUpsertSoupEntries");
}
public void testUpsertSoupEntriesWithExternalId() {
runTest("testUpsertSoupEntriesWithExternalId");
}
public void testUpsertToNonexistentSoup() {
runTest("testUpsertToNonexistentSoup");
}
public void testRetrieveSoupEntries() {
runTest("testRetrieveSoupEntries");
}
public void testRemoveFromSoup() {
runTest("testRemoveFromSoup");
}
public void testRemoveFromSoupByQuery() {
runTest("testRemoveFromSoupByQuery");
}
public void testQuerySoupWithExactQuery() {
runTest("testQuerySoupWithExactQuery");
}
public void testQuerySoupWithAllQueryDescending() {
runTest("testQuerySoupWithAllQueryDescending");
}
public void testQuerySoupWithRangeQueryWithOrderPath() {
runTest("testQuerySoupWithRangeQueryWithOrderPath");
}
public void testQuerySoupBadQuerySpec() {
runTest("testQuerySoupBadQuerySpec");
}
public void testQuerySoupEndKeyNoBeginKey() {
runTest("testQuerySoupEndKeyNoBeginKey");
}
public void testQuerySoupBeginKeyNoEndKey() {
runTest("testQuerySoupBeginKeyNoEndKey");
}
public void testManipulateCursor() {
runTest("testManipulateCursor");
}
public void testMoveCursorToPreviousPageFromFirstPage() {
runTest("testMoveCursorToPreviousPageFromFirstPage");
}
public void testMoveCursorToNextPageFromLastPage() {
runTest("testMoveCursorToNextPageFromLastPage");
}
public void testArbitrarySoupNames() {
runTest("testArbitrarySoupNames");
}
public void testQuerySpecFactories() {
runTest("testQuerySpecFactories");
}
public void testLikeQuerySpecStartsWith() {
runTest("testLikeQuerySpecStartsWith");
}
public void testLikeQuerySpecEndsWith() {
runTest("testLikeQuerySpecEndsWith");
}
public void testLikeQueryInnerText() {
runTest("testLikeQueryInnerText");
}
public void testFullTextSearch() {
runTest("testFullTextSearch");
}
public void testCompoundQueryPath() {
runTest("testCompoundQueryPath");
}
public void testEmptyQuerySpec() {
runTest("testEmptyQuerySpec");
}
public void testIntegerQuerySpec() {
runTest("testIntegerQuerySpec");
}
public void testSmartQueryWithCount() {
runTest("testSmartQueryWithCount");
}
public void testSmartQueryWithSpecialFields() {
runTest("testSmartQueryWithSpecialFields");
}
public void testSmartQueryWithIntegerCompare() {
runTest("testSmartQueryWithIntegerCompare");
}
public void testSmartQueryWithMultipleFieldsAndWhereInClause() {
runTest("testSmartQueryWithMultipleFieldsAndWhereInClause");
}
public void testSmartQueryWithSingleFieldAndWhereInClause() {
runTest("testSmartQueryWithSingleFieldAndWhereInClause");
}
public void testSmartQueryWithWhereLikeClause() {
runTest("testSmartQueryWithWhereLikeClause");
}
public void testSmartQueryWithWhereLikeClauseOrdered() {
runTest("testSmartQueryWithWhereLikeClauseOrdered");
}
public void testGetSoupIndexSpecs() {
runTest("testGetSoupIndexSpecs");
}
public void testGetSoupIndexSpecsWithBogusSoupName() {
runTest("testGetSoupIndexSpecsWithBogusSoupName");
}
public void testAlterSoupNoReIndexing() {
runTest("testAlterSoupNoReIndexing");
}
public void testAlterSoupWithReIndexing() {
runTest("testAlterSoupWithReIndexing");
}
public void testAlterSoupWithBogusSoupName() {
runTest("testAlterSoupWithBogusSoupName");
}
public void testReIndexSoup() {
runTest("testReIndexSoup");
}
public void testClearSoup() {
runTest("testClearSoup");
}
public void testFullTextSearchAgainstArrayNode() {
runTest("testFullTextSearchAgainstArrayNode");
}
public void testLikeQueryAgainstArrayNode() {
runTest("testLikeQueryAgainstArrayNode");
}
public void testExactQueryAgainstArrayNode() {
runTest("testExactQueryAgainstArrayNode");
}
public void testSmartQueryAgainstArrayNode() {
runTest("testSmartQueryAgainstArrayNode");
}
}
| |
/*
* Copyright 2015 OpenCB
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.opencb.biodata.models.variant;
import java.io.Serializable;
import java.net.URI;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
/**
*
* @author Cristina Yenyxe Gonzalez Garcia <cyenyxe@ebi.ac.uk>
*/
public class VariantStudy implements Serializable {
public enum StudyType {
COLLECTION("Collection"),
FAMILY("Family"),
TRIO("Trio"),
CONTROL("Control Set"),
CASE("Case Set"),
CASE_CONTROL("Case-Control"),
PAIRED("Paired"),
PAIRED_TUMOR("Tumor vs. Matched-Normal"),
TIME_SERIES("Time Series"),
AGGREGATE("Aggregate");
private final String symbol;
private StudyType(String symbol) {
this.symbol = symbol;
}
@Override
public String toString() {
return symbol;
}
// abstract QueryBuilder apply(String key, Object value, QueryBuilder builder);
// Returns Operation for string, or null if string is invalid
private static final Map<String, StudyType> stringToEnum = new HashMap<>();
static { // Initialize map from constant name to enum constant
for (StudyType op : values()) {
stringToEnum.put(op.toString(), op);
}
}
public static StudyType fromString(String symbol) {
return stringToEnum.get(symbol);
}
};
private String name;
private String id;
private String description;
private int[] taxonomyId;
private String speciesCommonName;
private String speciesScientificName;
private String sourceType;
private String center;
private String material;
private String scope;
private StudyType type;
private String experimentType;
private String experimentTypeAbbreviation;
private String assembly;
private String platform;
private URI url;
private int numVariants;
private int numSamples;
private List<VariantSource> sources;
public VariantStudy() {
this(null, null);
}
public VariantStudy(String studyName, String studyId) {
this(studyName, studyId, null);
}
public VariantStudy(String studyName, String studyId, List<VariantSource> sources) {
this(studyName, studyId, sources, null, null, null, null, null, null, null, null, StudyType.COLLECTION,
null, null, null, null, null, -1, -1);
}
public VariantStudy(String studyName, String studyId, List<VariantSource> sources, String description, int[] speciesId,
String speciesCommonName, String speciesScientificName, String sourceType, String center, String material,
String scope, StudyType type, String experimentType, String experimentTypeAbbreviation, String referenceAssembly,
String platform, URI projectUrl, int numVariants, int numSamples) {
this.name = studyName;
this.id = studyId;
this.description = description;
this.taxonomyId = speciesId;
this.speciesCommonName = speciesCommonName;
this.speciesScientificName = speciesScientificName;
this.sourceType = sourceType;
this.center = center;
this.material = material;
this.scope = scope;
this.type = type;
this.experimentType = experimentType;
this.experimentTypeAbbreviation = experimentTypeAbbreviation;
this.assembly = referenceAssembly;
this.platform = platform;
this.url = projectUrl;
this.numVariants = numVariants;
this.numSamples = numSamples;
this.sources = sources;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public String getId() {
return id;
}
public void setId(String id) {
this.id = id;
}
public String getDescription() {
return description;
}
public void setDescription(String description) {
this.description = description;
}
public int[] getTaxonomyId() {
return taxonomyId;
}
public void setTaxonomyId(int[] taxonomyId) {
this.taxonomyId = taxonomyId;
}
public String getSpeciesCommonName() {
return speciesCommonName;
}
public void setSpeciesCommonName(String speciesCommonName) {
this.speciesCommonName = speciesCommonName;
}
public String getSpeciesScientificName() {
return speciesScientificName;
}
public void setSpeciesScientificName(String speciesScientificName) {
this.speciesScientificName = speciesScientificName;
}
public String getSourceType() {
return sourceType;
}
public void setSourceType(String sourceType) {
this.sourceType = sourceType;
}
public String getCenter() {
return center;
}
public void setCenter(String center) {
this.center = center;
}
public String getMaterial() {
return material;
}
public void setMaterial(String material) {
this.material = material;
}
public String getScope() {
return scope;
}
public void setScope(String scope) {
this.scope = scope;
}
public StudyType getType() {
return type;
}
public void setType(StudyType type) {
this.type = type;
}
public String getTypeName() {
return type != null ? type.symbol : null;
}
public String getExperimentType() {
return experimentType;
}
public void setExperimentType(String experimentType) {
this.experimentType = experimentType;
}
public String getExperimentTypeAbbreviation() {
return experimentTypeAbbreviation;
}
public void setExperimentTypeAbbreviation(String experimentTypeAbbreviation) {
this.experimentTypeAbbreviation = experimentTypeAbbreviation;
}
public String getAssembly() {
return assembly;
}
public void setAssembly(String assembly) {
this.assembly = assembly;
}
public String getPlatform() {
return platform;
}
public void setPlatform(String platform) {
this.platform = platform;
}
public int getNumVariants() {
return numVariants;
}
public URI getUrl() {
return url;
}
public void setUrl(URI url) {
this.url = url;
}
public void setNumVariants(int numVariants) {
this.numVariants = numVariants;
}
public int getNumSamples() {
return numSamples;
}
public void setNumSamples(int numSamples) {
this.numSamples = numSamples;
}
public List<VariantSource> getSources() {
return sources;
}
public void setSources(List<VariantSource> sources) {
this.sources = sources;
}
public boolean addSource(VariantSource source) {
return this.sources.add(source);
}
}
| |
/**
* <copyright>
* </copyright>
*
*/
package cruise.umple.umple.impl;
import cruise.umple.umple.TraceCase_;
import cruise.umple.umple.TraceDirective_;
import cruise.umple.umple.Trace_;
import cruise.umple.umple.UmplePackage;
import java.util.Collection;
import org.eclipse.emf.common.notify.NotificationChain;
import org.eclipse.emf.common.util.EList;
import org.eclipse.emf.ecore.EClass;
import org.eclipse.emf.ecore.InternalEObject;
import org.eclipse.emf.ecore.impl.MinimalEObjectImpl;
import org.eclipse.emf.ecore.util.EObjectContainmentEList;
import org.eclipse.emf.ecore.util.InternalEList;
/**
* <!-- begin-user-doc -->
* An implementation of the model object '<em><b>Trace </b></em>'.
* <!-- end-user-doc -->
* <p>
* The following features are implemented:
* <ul>
* <li>{@link cruise.umple.umple.impl.Trace_Impl#getTraceDirective_1 <em>Trace Directive 1</em>}</li>
* <li>{@link cruise.umple.umple.impl.Trace_Impl#getTraceCase_1 <em>Trace Case 1</em>}</li>
* </ul>
* </p>
*
* @generated
*/
public class Trace_Impl extends MinimalEObjectImpl.Container implements Trace_
{
/**
* The cached value of the '{@link #getTraceDirective_1() <em>Trace Directive 1</em>}' containment reference list.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getTraceDirective_1()
* @generated
* @ordered
*/
protected EList<TraceDirective_> traceDirective_1;
/**
* The cached value of the '{@link #getTraceCase_1() <em>Trace Case 1</em>}' containment reference list.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getTraceCase_1()
* @generated
* @ordered
*/
protected EList<TraceCase_> traceCase_1;
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
protected Trace_Impl()
{
super();
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
protected EClass eStaticClass()
{
return UmplePackage.eINSTANCE.getTrace_();
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EList<TraceDirective_> getTraceDirective_1()
{
if (traceDirective_1 == null)
{
traceDirective_1 = new EObjectContainmentEList<TraceDirective_>(TraceDirective_.class, this, UmplePackage.TRACE___TRACE_DIRECTIVE_1);
}
return traceDirective_1;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EList<TraceCase_> getTraceCase_1()
{
if (traceCase_1 == null)
{
traceCase_1 = new EObjectContainmentEList<TraceCase_>(TraceCase_.class, this, UmplePackage.TRACE___TRACE_CASE_1);
}
return traceCase_1;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public NotificationChain eInverseRemove(InternalEObject otherEnd, int featureID, NotificationChain msgs)
{
switch (featureID)
{
case UmplePackage.TRACE___TRACE_DIRECTIVE_1:
return ((InternalEList<?>)getTraceDirective_1()).basicRemove(otherEnd, msgs);
case UmplePackage.TRACE___TRACE_CASE_1:
return ((InternalEList<?>)getTraceCase_1()).basicRemove(otherEnd, msgs);
}
return super.eInverseRemove(otherEnd, featureID, msgs);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public Object eGet(int featureID, boolean resolve, boolean coreType)
{
switch (featureID)
{
case UmplePackage.TRACE___TRACE_DIRECTIVE_1:
return getTraceDirective_1();
case UmplePackage.TRACE___TRACE_CASE_1:
return getTraceCase_1();
}
return super.eGet(featureID, resolve, coreType);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@SuppressWarnings("unchecked")
@Override
public void eSet(int featureID, Object newValue)
{
switch (featureID)
{
case UmplePackage.TRACE___TRACE_DIRECTIVE_1:
getTraceDirective_1().clear();
getTraceDirective_1().addAll((Collection<? extends TraceDirective_>)newValue);
return;
case UmplePackage.TRACE___TRACE_CASE_1:
getTraceCase_1().clear();
getTraceCase_1().addAll((Collection<? extends TraceCase_>)newValue);
return;
}
super.eSet(featureID, newValue);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public void eUnset(int featureID)
{
switch (featureID)
{
case UmplePackage.TRACE___TRACE_DIRECTIVE_1:
getTraceDirective_1().clear();
return;
case UmplePackage.TRACE___TRACE_CASE_1:
getTraceCase_1().clear();
return;
}
super.eUnset(featureID);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public boolean eIsSet(int featureID)
{
switch (featureID)
{
case UmplePackage.TRACE___TRACE_DIRECTIVE_1:
return traceDirective_1 != null && !traceDirective_1.isEmpty();
case UmplePackage.TRACE___TRACE_CASE_1:
return traceCase_1 != null && !traceCase_1.isEmpty();
}
return super.eIsSet(featureID);
}
} //Trace_Impl
| |
package org.kie.services.client.api.command;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import static org.mockito.Matchers.any;
import static org.mockito.Mockito.atLeastOnce;
import static org.mockito.Mockito.verify;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.io.StringWriter;
import java.io.Writer;
import java.lang.reflect.Field;
import java.lang.reflect.Modifier;
import java.lang.reflect.ParameterizedType;
import java.lang.reflect.Type;
import java.net.URL;
import java.nio.charset.Charset;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.Date;
import java.util.GregorianCalendar;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.LinkedHashSet;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Random;
import java.util.Set;
import java.util.UUID;
import javax.xml.bind.JAXBContext;
import javax.xml.bind.Marshaller;
import javax.xml.bind.Unmarshaller;
import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlTransient;
import javax.xml.datatype.DatatypeFactory;
import javax.xml.datatype.XMLGregorianCalendar;
import org.jbpm.services.task.commands.AddTaskCommand;
import org.junit.Test;
import org.kie.internal.jaxb.StringKeyObjectValueMapXmlAdapter;
import org.kie.remote.client.jaxb.AcceptedClientCommands;
import org.kie.remote.jaxb.gen.ActivateTaskCommand;
import org.kie.remote.jaxb.gen.DeleteCommand;
import org.kie.remote.jaxb.gen.TaskCommand;
import org.mockito.Mockito;
import org.reflections.Reflections;
import org.reflections.scanners.FieldAnnotationsScanner;
import org.reflections.scanners.SubTypesScanner;
import org.reflections.scanners.TypeAnnotationsScanner;
import org.reflections.util.ClasspathHelper;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.sun.xml.bind.marshaller.CharacterEscapeHandler;
public class RemoteCommandObjectTest {
private static final Logger logger = LoggerFactory.getLogger(RemoteCommandObjectTest.class);
private static Reflections reflections = new Reflections(ClasspathHelper.forPackage("org.kie.remote.command"),
ClasspathHelper.forPackage("org.jbpm.services.task.commands"),
ClasspathHelper.forPackage("org.jbpm.process.audit.command"),
ClasspathHelper.forPackage("org.kie.remote.jaxb.gen"),
new TypeAnnotationsScanner(),
new FieldAnnotationsScanner(), new SubTypesScanner());
private DatatypeFactory datatypeFactory;
public RemoteCommandObjectTest() throws Exception {
datatypeFactory = DatatypeFactory.newInstance();
}
@Test
public void isAcceptableCommandTest() throws Exception {
assertTrue( "The .isAcceptedCommandClass method is not functioning correctly: generated classes",
AcceptedClientCommands.isAcceptedCommandClass(org.kie.remote.jaxb.gen.AddTaskCommand.class));
assertFalse( "The .isAcceptedCommandClass method is not functioning correctly: original command classes",
AcceptedClientCommands.isAcceptedCommandClass(org.jbpm.services.task.commands.AddTaskCommand.class));
}
@Test
public void reflectionMappingTest() throws Exception {
Set<Class<?>> classes = reflections.getTypesAnnotatedWith(XmlAccessorType.class);
assertFalse(classes.isEmpty());
List<Class<?>> classList = new LinkedList<Class<?>>(classes);
Collections.sort(classList, new Comparator<Class<?>>() {
public int compare( Class<?> o1, Class<?> o2 ) {
return o1.getSimpleName().compareTo(o2.getSimpleName());
}
});
classes = new LinkedHashSet<Class<?>>(classList);
Map<Class, Class> kieCmdGenCmdClassMap = new LinkedHashMap<Class, Class>();
for( Class<?> cmdClass : classes ) {
if( ! cmdClass.getSimpleName().endsWith("Command") ) {
continue;
}
if( ! AcceptedClientCommands.isAcceptedCommandClass(cmdClass) ) {
continue;
}
Class genCmdClass = Class.forName("org.kie.remote.jaxb.gen." + cmdClass.getSimpleName());
assertTrue( genCmdClass.getSimpleName() + " is not an accepted command?",
AcceptedClientCommands.isAcceptedCommandClass(genCmdClass) );
kieCmdGenCmdClassMap.put(cmdClass, genCmdClass);
}
assertFalse("No generated command classes could be found to test.", kieCmdGenCmdClassMap.isEmpty());
for( Entry<Class, Class> classEntry : kieCmdGenCmdClassMap.entrySet() ) {
compareKieClassInstanceToGenClassInstance(classEntry.getKey(), classEntry.getValue());
}
}
private void compareKieClassInstanceToGenClassInstance( Class kieCmdClass, Class genCmdClass ) throws Exception {
Object genCmd = fillCommand(genCmdClass);
if( DeleteCommand.class.equals(genCmdClass) ) {
((DeleteCommand) genCmd).setFactHandle( ":1:2:3:4:5:TRAIT" );
}
Object copyKieCmd = roundTripFromFlatToOrigCmd(genCmd, genCmdClass, kieCmdClass);
for( Field field : kieCmdClass.getDeclaredFields() ) {
if( field.getAnnotation(XmlTransient.class) != null) {
continue;
}
field.setAccessible(true);
Object kieCmdFieldVal = field.get(copyKieCmd);
assertNotNull( kieCmdClass.getSimpleName() + "." + field.getName(), kieCmdFieldVal );
}
}
private static Random random = new Random();
private Object fillCommand( Class cmdClass ) throws Exception {
Object cmdObj = cmdClass.getConstructor().newInstance();
Field[] fields = cmdClass.getDeclaredFields();
for( Field field : fields ) {
if( Modifier.isStatic(field.getModifiers()) ) {
continue;
}
fillField(field, cmdObj);
}
if( cmdClass.getSuperclass() != null ) {
for( Field taskField : cmdClass.getSuperclass().getDeclaredFields() ) {
if( Modifier.isStatic(taskField.getModifiers()) ) {
continue;
}
fillField(taskField, cmdObj);
}
}
return cmdObj;
}
private void fillField( Field field, Object obj ) throws Exception {
field.setAccessible(true);
String fieldTypeName = field.getType().getName();
Class fieldType = field.getType();
if( fieldTypeName.startsWith("java") || !fieldTypeName.contains(".") ) {
if( fieldType.equals(String.class) ) {
if( "className".equals(field.getName()) ) {
field.set(obj, String.class.getName() );
} else {
field.set(obj, UUID.randomUUID().toString());
}
} else if( fieldType.equals(Integer.class) || fieldType.equals(int.class) ) {
field.set(obj, random.nextInt());
} else if( fieldType.equals(Long.class) || fieldType.equals(long.class) ) {
field.set(obj, random.nextLong());
} else if( fieldType.equals(Boolean.class) || fieldType.equals(boolean.class) ) {
field.set(obj, random.nextBoolean());
} else if( fieldType.equals(Map.class) ) {
Map<String, Object> val = new HashMap<String, Object>();
for( int i = 0; i < 3; ++i ) {
val.put(UUID.randomUUID().toString(), random.nextLong());
}
field.set(obj, val);
} else if( fieldType.equals(Date.class) ) {
field.set(obj, new Date());
} else if( fieldType.equals(List.class) ) {
ParameterizedType fieldGenericType = (ParameterizedType) field.getGenericType();
Type listType = fieldGenericType.getActualTypeArguments()[0];
List<Object> list = new ArrayList<Object>();
Class listItemClass = Class.forName(((Class) listType).getName());
for( int i = 0; i < 2; ++i ) {
Object listItem = null;
if( listItemClass.isEnum() ) {
Object[] enumConstants = listItemClass.getEnumConstants();
listItem = enumConstants[random.nextInt(enumConstants.length)];
} else if( listItemClass.getName().startsWith("java") ) {
if( Long.class.equals(listItemClass) ) {
listItem = random.nextLong();
} else if( String.class.equals(listItemClass) ) {
listItem = UUID.randomUUID().toString();
} else if( Object.class.equals(listItemClass) ) {
listItem = "Object";
} else {
fail(listItemClass.getName());
}
} else {
if( TaskCommand.class.equals(listItemClass) ) {
ActivateTaskCommand cmd = new ActivateTaskCommand();
cmd.setTaskId(random.nextLong());
cmd.setUserId(UUID.randomUUID().toString());
listItem = cmd;
} else {
listItem = listItemClass.getConstructor().newInstance();
for( Field listItemField : listItemClass.getDeclaredFields() ) {
fillField(listItemField, listItem);
}
}
}
list.add(listItem);
}
field.set(obj, list);
} else if( fieldType.equals(XMLGregorianCalendar.class) ) {
XMLGregorianCalendar date = datatypeFactory.newXMLGregorianCalendar(new GregorianCalendar());
field.set(obj, date);
} else if( fieldType.equals(Object.class) ) {
field.set(obj, "Object");
} else if( fieldType.equals(byte[].class) ) {
byte [] value = StringKeyObjectValueMapXmlAdapter.serializeObject("Object", "test");
field.set(obj, value);
} else {
fail("> " + obj.getClass().getSimpleName() + "." + field.getName() + ": " + fieldType.getName());
}
} else if( fieldType.isEnum() ) {
Object [] enumConstants = fieldType.getEnumConstants();
field.set(obj, enumConstants[random.nextInt(enumConstants.length)]);
} else {
if( fieldType.equals(org.kie.remote.jaxb.gen.TaskCommand.class) ) {
ActivateTaskCommand cmd = new ActivateTaskCommand();
cmd.setTaskId(2l);
cmd.setUserId(UUID.randomUUID().toString());
field.set(obj, cmd);
} else {
if( field.getAnnotation(XmlTransient.class) != null ) {
return;
}
Object subObj = fieldType.getConstructor().newInstance();
field.set(obj, subObj);
for( Field subField : field.getType().getDeclaredFields() ) {
fillField( subField, subObj);
}
// fail(kieCmd.getClass().getSimpleName() + "." + field.getName() + ": " + field.getType());
}
}
}
private Object roundTripFromFlatToOrigCmd( Object flatCmd, Class flatCmdClass, Class origCmdClass ) throws Exception {
String xmlStr = serialize(flatCmd, flatCmdClass);
logger.debug("{}:\n{}---", flatCmdClass.getSimpleName(), xmlStr);
return deserialize(xmlStr, origCmdClass);
}
private String serialize( Object object, Class cmdClass ) throws Exception {
Marshaller marshaller = null;
marshaller = getJaxbContext(cmdClass).createMarshaller();
marshaller.setProperty(Marshaller.JAXB_FORMATTED_OUTPUT, true);
marshaller.setProperty(CharacterEscapeHandler.class.getName(), new CharacterEscapeHandler() {
public void escape( char[] ac, int i, int j, boolean flag, Writer writer ) throws IOException {
writer.write(ac, i, j);
}
});
StringWriter stringWriter = new StringWriter();
try {
marshaller.marshal(object, stringWriter);
} catch( Exception e ) {
e.printStackTrace();
fail( "Could not marshal " + cmdClass.getSimpleName() );
}
String output = stringWriter.toString();
return output;
}
private <T> T deserialize( String xmlStr, Class<T> cmdClass ) throws Exception {
Unmarshaller unmarshaller = null;
unmarshaller = getJaxbContext(cmdClass).createUnmarshaller();
ByteArrayInputStream xmlStrInputStream = new ByteArrayInputStream(xmlStr.getBytes(Charset.forName("UTF-8")));
T jaxbObj = (T) unmarshaller.unmarshal(xmlStrInputStream);
return jaxbObj;
}
private JAXBContext getJaxbContext( Class... classes ) throws Exception {
return JAXBContext.newInstance(classes);
}
@Test
public void preprocessTest() throws Exception {
RemoteConfiguration config = new RemoteConfiguration("adsf", new URL("http://localhost:80808"), "user", "pwd" );
KieSessionClientCommandObject spyCmdObj = Mockito.spy(new KieSessionClientCommandObject(config));
List<Object> objList = new ArrayList<Object>();
Field paramClassesField = AcceptedClientCommands.class.getDeclaredField("sendObjectParameterCommandClasses");
paramClassesField.setAccessible(true);
Set<Class> sendObjectParameterClasses = (Set<Class>) paramClassesField.get(null);
for( Class clientClass : sendObjectParameterClasses ) {
Object inst = clientClass.getConstructor(new Class[0]).newInstance(new Object[0]);
spyCmdObj.preprocessCommand(inst, objList);
logger.debug( "Are {} instances checked for user-defined classes?", clientClass.getSimpleName() );
verify(spyCmdObj, atLeastOnce()).addPossiblyNullObject(any(), any(List.class));
}
}
}
| |
package com.fsck.k9.preferences;
import java.io.File;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedHashMap;
import java.util.Map;
import java.util.Set;
import java.util.TreeMap;
import android.content.SharedPreferences;
import android.os.Environment;
import com.fsck.k9.Account;
import com.fsck.k9.FontSizes;
import com.fsck.k9.K9;
import com.fsck.k9.K9.NotificationHideSubject;
import com.fsck.k9.K9.NotificationQuickDelete;
import com.fsck.k9.K9.SplitViewMode;
import com.fsck.k9.K9.Theme;
import com.fsck.k9.R;
import com.fsck.k9.Account.SortType;
import com.fsck.k9.preferences.Settings.*;
import static com.fsck.k9.K9.LockScreenNotificationVisibility;
public class GlobalSettings {
public static final Map<String, TreeMap<Integer, SettingsDescription>> SETTINGS;
public static final Map<Integer, SettingsUpgrader> UPGRADERS;
static {
Map<String, TreeMap<Integer, SettingsDescription>> s =
new LinkedHashMap<String, TreeMap<Integer, SettingsDescription>>();
/**
* When adding new settings here, be sure to increment {@link Settings.VERSION}
* and use that for whatever you add here.
*/
s.put("animations", Settings.versions(
new V(1, new BooleanSetting(false))
));
s.put("attachmentdefaultpath", Settings.versions(
new V(1, new DirectorySetting(Environment.getExternalStorageDirectory().toString()))
));
s.put("backgroundOperations", Settings.versions(
new V(1, new EnumSetting<K9.BACKGROUND_OPS>(
K9.BACKGROUND_OPS.class, K9.BACKGROUND_OPS.WHEN_CHECKED_AUTO_SYNC))
));
s.put("changeRegisteredNameColor", Settings.versions(
new V(1, new BooleanSetting(false))
));
s.put("confirmDelete", Settings.versions(
new V(1, new BooleanSetting(false))
));
s.put("confirmDeleteStarred", Settings.versions(
new V(2, new BooleanSetting(false))
));
s.put("confirmSpam", Settings.versions(
new V(1, new BooleanSetting(false))
));
s.put("countSearchMessages", Settings.versions(
new V(1, new BooleanSetting(false))
));
s.put("enableDebugLogging", Settings.versions(
new V(1, new BooleanSetting(false))
));
s.put("enableSensitiveLogging", Settings.versions(
new V(1, new BooleanSetting(false))
));
s.put("fontSizeAccountDescription", Settings.versions(
new V(1, new FontSizeSetting(FontSizes.FONT_DEFAULT))
));
s.put("fontSizeAccountName", Settings.versions(
new V(1, new FontSizeSetting(FontSizes.FONT_DEFAULT))
));
s.put("fontSizeFolderName", Settings.versions(
new V(1, new FontSizeSetting(FontSizes.FONT_DEFAULT))
));
s.put("fontSizeFolderStatus", Settings.versions(
new V(1, new FontSizeSetting(FontSizes.FONT_DEFAULT))
));
s.put("fontSizeMessageComposeInput", Settings.versions(
new V(5, new FontSizeSetting(FontSizes.FONT_DEFAULT))
));
s.put("fontSizeMessageListDate", Settings.versions(
new V(1, new FontSizeSetting(FontSizes.FONT_DEFAULT))
));
s.put("fontSizeMessageListPreview", Settings.versions(
new V(1, new FontSizeSetting(FontSizes.FONT_DEFAULT))
));
s.put("fontSizeMessageListSender", Settings.versions(
new V(1, new FontSizeSetting(FontSizes.FONT_DEFAULT))
));
s.put("fontSizeMessageListSubject", Settings.versions(
new V(1, new FontSizeSetting(FontSizes.FONT_DEFAULT))
));
s.put("fontSizeMessageViewAdditionalHeaders", Settings.versions(
new V(1, new FontSizeSetting(FontSizes.FONT_DEFAULT))
));
s.put("fontSizeMessageViewCC", Settings.versions(
new V(1, new FontSizeSetting(FontSizes.FONT_DEFAULT))
));
s.put("fontSizeMessageViewContent", Settings.versions(
new V(1, new WebFontSizeSetting(3)),
new V(31, null)
));
s.put("fontSizeMessageViewDate", Settings.versions(
new V(1, new FontSizeSetting(FontSizes.FONT_DEFAULT))
));
s.put("fontSizeMessageViewSender", Settings.versions(
new V(1, new FontSizeSetting(FontSizes.FONT_DEFAULT))
));
s.put("fontSizeMessageViewSubject", Settings.versions(
new V(1, new FontSizeSetting(FontSizes.FONT_DEFAULT))
));
s.put("fontSizeMessageViewTime", Settings.versions(
new V(1, new FontSizeSetting(FontSizes.FONT_DEFAULT))
));
s.put("fontSizeMessageViewTo", Settings.versions(
new V(1, new FontSizeSetting(FontSizes.FONT_DEFAULT))
));
s.put("gesturesEnabled", Settings.versions(
new V(1, new BooleanSetting(true)),
new V(4, new BooleanSetting(false))
));
s.put("hideSpecialAccounts", Settings.versions(
new V(1, new BooleanSetting(false))
));
s.put("keyguardPrivacy", Settings.versions(
new V(1, new BooleanSetting(false)),
new V(12, null)
));
s.put("language", Settings.versions(
new V(1, new LanguageSetting())
));
s.put("measureAccounts", Settings.versions(
new V(1, new BooleanSetting(true))
));
s.put("messageListCheckboxes", Settings.versions(
new V(1, new BooleanSetting(false))
));
s.put("messageListPreviewLines", Settings.versions(
new V(1, new IntegerRangeSetting(1, 100, 2))
));
s.put("messageListStars", Settings.versions(
new V(1, new BooleanSetting(true))
));
s.put("messageViewFixedWidthFont", Settings.versions(
new V(1, new BooleanSetting(false))
));
s.put("messageViewReturnToList", Settings.versions(
new V(1, new BooleanSetting(false))
));
s.put("messageViewShowNext", Settings.versions(
new V(1, new BooleanSetting(false))
));
s.put("quietTimeEnabled", Settings.versions(
new V(1, new BooleanSetting(false))
));
s.put("quietTimeEnds", Settings.versions(
new V(1, new TimeSetting("7:00"))
));
s.put("quietTimeStarts", Settings.versions(
new V(1, new TimeSetting("21:00"))
));
s.put("registeredNameColor", Settings.versions(
new V(1, new ColorSetting(0xFF00008F))
));
s.put("showContactName", Settings.versions(
new V(1, new BooleanSetting(false))
));
s.put("showCorrespondentNames", Settings.versions(
new V(1, new BooleanSetting(true))
));
s.put("sortTypeEnum", Settings.versions(
new V(10, new EnumSetting<SortType>(SortType.class, Account.DEFAULT_SORT_TYPE))
));
s.put("sortAscending", Settings.versions(
new V(10, new BooleanSetting(Account.DEFAULT_SORT_ASCENDING))
));
s.put("startIntegratedInbox", Settings.versions(
new V(1, new BooleanSetting(false))
));
s.put("theme", Settings.versions(
new V(1, new ThemeSetting(K9.Theme.LIGHT))
));
s.put("messageViewTheme", Settings.versions(
new V(16, new ThemeSetting(K9.Theme.LIGHT)),
new V(24, new SubThemeSetting(K9.Theme.USE_GLOBAL))
));
s.put("useVolumeKeysForListNavigation", Settings.versions(
new V(1, new BooleanSetting(false))
));
s.put("useVolumeKeysForNavigation", Settings.versions(
new V(1, new BooleanSetting(false))
));
s.put("wrapFolderNames", Settings.versions(
new V(22, new BooleanSetting(false))
));
s.put("notificationHideSubject", Settings.versions(
new V(12, new EnumSetting<NotificationHideSubject>(
NotificationHideSubject.class, NotificationHideSubject.NEVER))
));
s.put("useBackgroundAsUnreadIndicator", Settings.versions(
new V(19, new BooleanSetting(true))
));
s.put("threadedView", Settings.versions(
new V(20, new BooleanSetting(true))
));
s.put("splitViewMode", Settings.versions(
new V(23, new EnumSetting<SplitViewMode>(SplitViewMode.class, SplitViewMode.NEVER))
));
s.put("messageComposeTheme", Settings.versions(
new V(24, new SubThemeSetting(K9.Theme.USE_GLOBAL))
));
s.put("fixedMessageViewTheme", Settings.versions(
new V(24, new BooleanSetting(true))
));
s.put("showContactPicture", Settings.versions(
new V(25, new BooleanSetting(true))
));
s.put("autofitWidth", Settings.versions(
new V(28, new BooleanSetting(true))
));
s.put("colorizeMissingContactPictures", Settings.versions(
new V(29, new BooleanSetting(true))
));
s.put("messageViewDeleteActionVisible", Settings.versions(
new V(30, new BooleanSetting(true))
));
s.put("messageViewArchiveActionVisible", Settings.versions(
new V(30, new BooleanSetting(false))
));
s.put("messageViewMoveActionVisible", Settings.versions(
new V(30, new BooleanSetting(false))
));
s.put("messageViewCopyActionVisible", Settings.versions(
new V(30, new BooleanSetting(false))
));
s.put("messageViewSpamActionVisible", Settings.versions(
new V(30, new BooleanSetting(false))
));
s.put("fontSizeMessageViewContentPercent", Settings.versions(
new V(31, new IntegerRangeSetting(40, 250, 100))
));
s.put("hideUserAgent", Settings.versions(
new V(32, new BooleanSetting(false))
));
s.put("hideTimeZone", Settings.versions(
new V(32, new BooleanSetting(false))
));
s.put("lockScreenNotificationVisibility", Settings.versions(
new V(37, new EnumSetting<LockScreenNotificationVisibility>(LockScreenNotificationVisibility.class,
LockScreenNotificationVisibility.MESSAGE_COUNT))
));
s.put("confirmDeleteFromNotification", Settings.versions(
new V(38, new BooleanSetting(true))
));
s.put("messageListSenderAboveSubject", Settings.versions(
new V(38, new BooleanSetting(false))
));
s.put("notificationQuickDelete", Settings.versions(
new V(38, new EnumSetting<NotificationQuickDelete>(NotificationQuickDelete.class,
NotificationQuickDelete.NEVER))
));
SETTINGS = Collections.unmodifiableMap(s);
Map<Integer, SettingsUpgrader> u = new HashMap<Integer, SettingsUpgrader>();
u.put(12, new SettingsUpgraderV12());
u.put(24, new SettingsUpgraderV24());
u.put(31, new SettingsUpgraderV31());
UPGRADERS = Collections.unmodifiableMap(u);
}
public static Map<String, Object> validate(int version, Map<String, String> importedSettings) {
return Settings.validate(version, SETTINGS, importedSettings, false);
}
public static Set<String> upgrade(int version, Map<String, Object> validatedSettings) {
return Settings.upgrade(version, UPGRADERS, SETTINGS, validatedSettings);
}
public static Map<String, String> convert(Map<String, Object> settings) {
return Settings.convert(settings, SETTINGS);
}
public static Map<String, String> getGlobalSettings(SharedPreferences storage) {
Map<String, String> result = new HashMap<String, String>();
for (String key : SETTINGS.keySet()) {
String value = storage.getString(key, null);
if (value != null) {
result.put(key, value);
}
}
return result;
}
/**
* Upgrades the settings from version 11 to 12
*
* Map the 'keyguardPrivacy' value to the new NotificationHideSubject enum.
*/
public static class SettingsUpgraderV12 implements SettingsUpgrader {
@Override
public Set<String> upgrade(Map<String, Object> settings) {
Boolean keyguardPrivacy = (Boolean) settings.get("keyguardPrivacy");
if (keyguardPrivacy != null && keyguardPrivacy.booleanValue()) {
// current setting: only show subject when unlocked
settings.put("notificationHideSubject", NotificationHideSubject.WHEN_LOCKED);
} else {
// always show subject [old default]
settings.put("notificationHideSubject", NotificationHideSubject.NEVER);
}
return new HashSet<String>(Arrays.asList("keyguardPrivacy"));
}
}
/**
* Upgrades the settings from version 23 to 24.
*
* <p>
* Set <em>messageViewTheme</em> to {@link K9.Theme#USE_GLOBAL} if <em>messageViewTheme</em> has
* the same value as <em>theme</em>.
* </p>
*/
public static class SettingsUpgraderV24 implements SettingsUpgrader {
@Override
public Set<String> upgrade(Map<String, Object> settings) {
K9.Theme messageViewTheme = (K9.Theme) settings.get("messageViewTheme");
K9.Theme theme = (K9.Theme) settings.get("theme");
if (theme != null && messageViewTheme != null && theme == messageViewTheme) {
settings.put("messageViewTheme", K9.Theme.USE_GLOBAL);
}
return null;
}
}
/**
* Upgrades the settings from version 30 to 31.
*
* <p>
* Convert value from <em>fontSizeMessageViewContent</em> to
* <em>fontSizeMessageViewContentPercent</em>.
* </p>
*/
public static class SettingsUpgraderV31 implements SettingsUpgrader {
@Override
public Set<String> upgrade(Map<String, Object> settings) {
int oldSize = ((Integer) settings.get("fontSizeMessageViewContent")).intValue();
int newSize = convertFromOldSize(oldSize);
settings.put("fontSizeMessageViewContentPercent", newSize);
return new HashSet<String>(Arrays.asList("fontSizeMessageViewContent"));
}
public static int convertFromOldSize(int oldSize) {
switch (oldSize) {
case 1: {
return 40;
}
case 2: {
return 75;
}
case 4: {
return 175;
}
case 5: {
return 250;
}
case 3:
default: {
return 100;
}
}
}
}
/**
* The language setting.
*
* <p>
* Valid values are read from {@code settings_language_values} in
* {@code res/values/arrays.xml}.
* </p>
*/
public static class LanguageSetting extends PseudoEnumSetting<String> {
private final Map<String, String> mMapping;
public LanguageSetting() {
super("");
Map<String, String> mapping = new HashMap<String, String>();
String[] values = K9.app.getResources().getStringArray(R.array.settings_language_values);
for (String value : values) {
if (value.length() == 0) {
mapping.put("", "default");
} else {
mapping.put(value, value);
}
}
mMapping = Collections.unmodifiableMap(mapping);
}
@Override
protected Map<String, String> getMapping() {
return mMapping;
}
@Override
public Object fromString(String value) throws InvalidSettingValueException {
if (mMapping.containsKey(value)) {
return value;
}
throw new InvalidSettingValueException();
}
}
/**
* The theme setting.
*/
public static class ThemeSetting extends SettingsDescription {
private static final String THEME_LIGHT = "light";
private static final String THEME_DARK = "dark";
public ThemeSetting(K9.Theme defaultValue) {
super(defaultValue);
}
@Override
public Object fromString(String value) throws InvalidSettingValueException {
try {
Integer theme = Integer.parseInt(value);
if (theme == K9.Theme.LIGHT.ordinal() ||
// We used to store the resource ID of the theme in the preference storage,
// but don't use the database upgrade mechanism to update the values. So
// we have to deal with the old format here.
theme == android.R.style.Theme_Light) {
return K9.Theme.LIGHT;
} else if (theme == K9.Theme.DARK.ordinal() || theme == android.R.style.Theme) {
return K9.Theme.DARK;
}
} catch (NumberFormatException e) { /* do nothing */ }
throw new InvalidSettingValueException();
}
@Override
public Object fromPrettyString(String value) throws InvalidSettingValueException {
if (THEME_LIGHT.equals(value)) {
return K9.Theme.LIGHT;
} else if (THEME_DARK.equals(value)) {
return K9.Theme.DARK;
}
throw new InvalidSettingValueException();
}
@Override
public String toPrettyString(Object value) {
switch ((K9.Theme) value) {
case DARK: {
return THEME_DARK;
}
default: {
return THEME_LIGHT;
}
}
}
@Override
public String toString(Object value) {
return Integer.toString(((K9.Theme) value).ordinal());
}
}
/**
* The message view theme setting.
*/
public static class SubThemeSetting extends ThemeSetting {
private static final String THEME_USE_GLOBAL = "use_global";
public SubThemeSetting(Theme defaultValue) {
super(defaultValue);
}
@Override
public Object fromString(String value) throws InvalidSettingValueException {
try {
Integer theme = Integer.parseInt(value);
if (theme == K9.Theme.USE_GLOBAL.ordinal()) {
return K9.Theme.USE_GLOBAL;
}
return super.fromString(value);
} catch (NumberFormatException e) {
throw new InvalidSettingValueException();
}
}
@Override
public Object fromPrettyString(String value) throws InvalidSettingValueException {
if (THEME_USE_GLOBAL.equals(value)) {
return K9.Theme.USE_GLOBAL;
}
return super.fromPrettyString(value);
}
@Override
public String toPrettyString(Object value) {
if (((K9.Theme) value) == K9.Theme.USE_GLOBAL) {
return THEME_USE_GLOBAL;
}
return super.toPrettyString(value);
}
}
/**
* A time setting.
*/
public static class TimeSetting extends SettingsDescription {
public TimeSetting(String defaultValue) {
super(defaultValue);
}
@Override
public Object fromString(String value) throws InvalidSettingValueException {
if (!value.matches(TimePickerPreference.VALIDATION_EXPRESSION)) {
throw new InvalidSettingValueException();
}
return value;
}
}
/**
* A directory on the file system.
*/
public static class DirectorySetting extends SettingsDescription {
public DirectorySetting(String defaultValue) {
super(defaultValue);
}
@Override
public Object fromString(String value) throws InvalidSettingValueException {
try {
if (new File(value).isDirectory()) {
return value;
}
} catch (Exception e) { /* do nothing */ }
throw new InvalidSettingValueException();
}
}
}
| |
/*******************************************************************************
* Copyright (c) 2015
*
* Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"),
* to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense,
* and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
* DEALINGS IN THE SOFTWARE.
*******************************************************************************/
package jsettlers.graphics.sound;
import go.graphics.sound.ISoundDataRetriever;
import go.graphics.sound.SoundPlayer;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.RandomAccessFile;
import java.util.ArrayList;
import java.util.Random;
import jsettlers.graphics.reader.bytereader.ByteReader;
import jsettlers.graphics.startscreen.SettingsManager;
/**
* This class manages reading and playing of the sound file.
* <p>
* Some known sounds:
* <p>
* 1 (6 times): knock a tree <br>
* 2 (3 times): digg <br>
* 3 (twice): knock stone <br>
* 5: saw <br>
* 6: smith <br>
* 7: smith <br>
* 12: farmer <br>
* 14: donkey <br>
* 30: sword Soldier <br>
* 31/32 (soldier ?) <br>
* 33 Bowman <br>
* 35 soldier killed <br>
* 36 building getting removed <br>
* 39: pigs <br>
* 40: donkey <br>
* 41: donkey <br>
* 42: wind/mill: 2.5s <br>
* 56: lock <br>
* 57-59: notification sounds<br>
* 62: Ui klick <br>
* 68, 68b: Sea <br>
* 69, 69b: Bird <br>
* 70, 70b: Bird 71: Water (river) <br>
* 72 (+ alternaitves): moor <br>
* 73: wind <br>
* 74: crazy wind <br>
* 75 (3 times): thunder <br>
* 76 (2 times): rain <br>
* 80: You are beeing attacked <br>
* 81: Mill, <br>
* 82: older mill, <br>
* 83: even older mill <br>
* 84: catapult <br>
* 85: Arrow shooting <br>
* 86 -90: canon shooting <br>
* 91: fire <br>
* 92: small fire <br>
* 100 - 110: Attacked (same sound?) ? <br>
* 111, 112: gong, <br>
* 113 (4 times): kill (maya?)
*
* @author michael
*/
public class SoundManager {
private static final int SOUND_META_LENGTH = 16;
private static final int SOUND_FILE_START = 0x24;
private static final byte[] SOUND_FILE_MAGIC = new byte[] {
0x44,
0x15,
0x01,
0x00,
0x02,
0x00,
0x00,
0x00,
0x00,
0x00,
0x00,
0x00,
0x1C,
0x00,
0x00,
0x00
};
private static final int SEQUENCE_N = 118;
/**
* Sound ID when we are attacked.
*/
public static final int NOTIFY_ATTACKED = 80;
/**
* The lookup paths for the dat files.
*/
private final SoundPlayer player;
private final Random random = new Random();
private static ArrayList<File> lookupPaths = new ArrayList<File>();
/**
* The start positions of all the playable sounds.
*/
private int[][] soundStarts;
private boolean initializing = false;
/**
* Creates a new sound manager.
*
* @param player
* The player to play sounds at.
*/
public SoundManager(SoundPlayer player) {
this.player = player;
initialize();
}
private void loadSounds() throws FileNotFoundException, IOException {
ByteReader reader = openSoundFile();
this.soundStarts = getSoundStarts(reader);
player.setSoundDataRetriever(new SoundDataRetriever(reader));
}
/**
* Reads the start indexes of the sounds.
*
* @param reader
* The reader to read from.
* @return An array of start indexes for each sound and it's variants.
* @throws IOException
* If the file could not be read.
*/
protected static int[][] getSoundStarts(ByteReader reader)
throws IOException {
int[] seqheaderstarts = new int[SEQUENCE_N];
for (int i = 0; i < SEQUENCE_N; i++) {
seqheaderstarts[i] = reader.read32();
}
int[][] playerids = new int[SEQUENCE_N][];
for (int i = 0; i < SEQUENCE_N; i++) {
reader.skipTo(seqheaderstarts[i]);
int alternaitvecount = reader.read32();
int[] starts = new int[alternaitvecount];
for (int j = 0; j < alternaitvecount; j++) {
starts[j] = reader.read32();
}
playerids[i] = starts;
}
return playerids;
}
/**
* Opens the sound file.
*
* @return The file reader.
* @throws IOException
* If the file could not be opened,
* @throws FileNotFoundException
* If the file was not found.
*/
protected static ByteReader openSoundFile() throws IOException,
FileNotFoundException {
File sndfile = getSoundFile();
if (sndfile == null) {
throw new IOException("Sound file not found.");
}
RandomAccessFile randomAccessFile = new RandomAccessFile(sndfile, "r");
ByteReader reader = new ByteReader(randomAccessFile);
reader.assumeToRead(SOUND_FILE_MAGIC);
reader.skipTo(SOUND_FILE_START);
return reader;
}
private static File getSoundFile() {
File sndfile = null;
synchronized (lookupPaths) {
for (File dir : lookupPaths) {
File file = new File(dir, "Siedler3_00.dat");
if (file.exists()) {
sndfile = file;
break;
}
}
}
return sndfile;
}
/**
* Plays a given sound.
*
* @param soundid
* The sound id to play.
* @param volume1
* The volume for the left speaker.
* @param volume2
* The volume for the right speaker.
*/
public void playSound(int soundid, float volume1, float volume2) {
initialize();
if (soundStarts != null && soundid >= 0 && soundid < SEQUENCE_N) {
int[] alternatives = soundStarts[soundid];
if (alternatives != null && alternatives.length > 0) {
int rand = random.nextInt(alternatives.length);
float volume = SettingsManager.getInstance().getVolume();
player.playSound(alternatives[rand], volume1 * volume, volume2
* volume);
}
}
}
private void initialize() {
synchronized (this) {
if (initializing) {
return;
}
initializing = true;
}
new Thread(new Runnable() {
@Override
public void run() {
try {
loadSounds();
} catch (Throwable e) {
e.printStackTrace();
}
}
}, "sound loader").start();
}
/**
* Adds a sound file lookup path.
*
* @param file
* The file path.
*/
public static void addLookupPath(File file) {
synchronized (lookupPaths) {
lookupPaths.add(file);
}
}
/**
* This class wraps an open {@link ByteReader} to a {@link ISoundDataRetriever}.
*
* @author Michael Zangl
*
*/
protected static class SoundDataRetriever implements ISoundDataRetriever {
private final ByteReader reader;
/**
* Create a new {@link SoundDataRetriever}.
*
* @param reader
* The byte reader.
*/
public SoundDataRetriever(ByteReader reader) {
this.reader = reader;
}
@Override
public synchronized short[] getSoundData(int soundStart)
throws IOException {
return SoundManager.getSoundData(reader, soundStart);
}
}
/**
* Reads the sound data from a byte reader.
*
* @param reader
* The reader to read.
* @param start
* The sound start position.
* @return The read sound data.
* @throws IOException
* If that sound could not be read.
*/
protected static short[] getSoundData(ByteReader reader, int start)
throws IOException {
reader.skipTo(start);
int length = reader.read32() / 2 - SOUND_META_LENGTH;
reader.read32();
reader.read32(); // mostly 22050
reader.read32(); // mostly 44100
reader.read32();
return loadSound(reader, length);
}
private static short[] loadSound(ByteReader reader, int length)
throws IOException {
if (length < 0) {
return new short[0];
}
short[] data = new short[length];
for (int i = 0; i < length; i++) {
data[i] = (short) reader.read16signed();
}
return data;
}
}
| |
/*
* The Alluxio Open Foundation licenses this work under the Apache License, version 2.0
* (the "License"). You may not use this work except in compliance with the License, which is
* available at www.apache.org/licenses/LICENSE-2.0
*
* This software is distributed on an "AS IS" basis, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied, as more fully set forth in the License.
*
* See the NOTICE file distributed with this work for information regarding copyright ownership.
*/
package alluxio.collections;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import alluxio.util.SleepUtils;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Set;
import java.util.concurrent.BrokenBarrierException;
import java.util.concurrent.Callable;
import java.util.concurrent.CyclicBarrier;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.Future;
import java.util.concurrent.ThreadLocalRandom;
import java.util.concurrent.atomic.AtomicBoolean;
/**
* Test concurrent behavior of {@link IndexedSet}.
*/
public class IndexedSetConcurrencyTest {
/** The maximum value for the size value for the test object. */
private static final int MAX_SIZE = 30;
/** The duration for each test. */
private static final int TEST_CASE_DURATION_MS = 5000;
/** The minimum number of threads for each task type. */
private static final int MIN_TASKS = 3;
/** The maximum number of threads for each task type. */
private static final int MAX_TASKS = 6;
/** the maximum repeatable times in one task of size in {@link TestInfo}. */
private static final int MAX_REPEAT_TIMES = 6;
private IndexedSet<TestInfo> mIndexedSet;
private ExecutorService mThreadPool;
/** Used to stop concurrent threads. */
private AtomicBoolean mStopThreads;
/**
* Base class for testing different behaviours of {@link IndexedSet} concurrently.
*/
private abstract class ConcurrentTask implements Callable<Void> {
/** Number of items added or deleted in this single task. */
private volatile long mCount = 0;
private CyclicBarrier mBarrier;
/**
* @param barrier the CyclicBarrier
*/
public ConcurrentTask(CyclicBarrier barrier) {
mBarrier = barrier;
}
public long getCount() {
return mCount;
}
/**
* Runs a single task.
*
* @return number of items added or deleted
*/
abstract long runSingleTask();
@Override
public Void call() throws BrokenBarrierException, InterruptedException {
mBarrier.await();
while (!mStopThreads.get()) {
mCount += runSingleTask();
}
return null;
}
}
/**
* A class for testing add behaviour of {@link IndexedSet} concurrently.
*/
private class ConcurrentAdd extends ConcurrentTask {
public ConcurrentAdd(CyclicBarrier barrier) {
super(barrier);
}
@Override
public long runSingleTask() {
return mIndexedSet.add(new TestInfo()) ? 1 : 0;
}
}
/**
* A class for testing add behaviour of {@link IndexedSet} concurrently and checking if
* the result is valid.
*/
private class ConcurrentAddWithCheck extends ConcurrentTask {
public ConcurrentAddWithCheck(CyclicBarrier barrier) {
super(barrier);
}
@Override
public long runSingleTask() {
long result = 0;
int size = ThreadLocalRandom.current().nextInt(0, MAX_SIZE);
for (int i = ThreadLocalRandom.current().nextInt(1, MAX_REPEAT_TIMES + 1); i > 0; i--) {
TestInfo instance = new TestInfo(ThreadLocalRandom.current().nextLong(), size);
result += (mIndexedSet.add(instance) ? 1 : 0);
assertTrue(mIndexedSet.contains(mIdIndex, instance.getId()));
assertEquals(1, mIndexedSet.getByField(mIdIndex, instance.getId()).size());
}
assertTrue(result <= mIndexedSet.getByField(mSizeIndex, size).size());
return result;
}
}
/**
* A class for testing remove behaviour of {@link IndexedSet} concurrently.
*/
private class ConcurrentRemove extends ConcurrentTask {
public ConcurrentRemove(CyclicBarrier barrier) {
super(barrier);
}
@Override
public long runSingleTask() {
TestInfo info =
mIndexedSet.getFirstByField(mSizeIndex, ThreadLocalRandom.current().nextInt(0, MAX_SIZE));
if (info != null) {
return mIndexedSet.remove(info) ? 1 : 0;
}
return 0;
}
}
/**
* Removes concurrent tasks by field.
*/
private class ConcurrentRemoveByField extends ConcurrentTask {
public ConcurrentRemoveByField(CyclicBarrier barrier) {
super(barrier);
}
@Override
public long runSingleTask() {
return mIndexedSet
.removeByField(mSizeIndex, ThreadLocalRandom.current().nextInt(0, MAX_SIZE));
}
}
/**
* Removes concurrent tasks by iterator.
*/
private class ConcurrentRemoveByIterator extends ConcurrentTask {
public ConcurrentRemoveByIterator(CyclicBarrier barrier) {
super(barrier);
}
@Override
public long runSingleTask() {
long removed = 0;
Iterator<TestInfo> it = mIndexedSet.iterator();
while (it.hasNext()) {
it.next();
it.remove();
removed++;
}
return removed;
}
}
/**
* Clears out concurrent tasks.
*/
private class ConcurrentClear extends ConcurrentTask {
public ConcurrentClear(CyclicBarrier barrier) {
super(barrier);
}
@Override
public long runSingleTask() {
mIndexedSet.clear();
return 1;
}
}
/**
* Helper class for testing {@link IndexedSet}.
*/
private final class TestInfo {
private final long mId;
private final int mSize;
/**
* Creates an instance of {@link TestInfo} randomly.
*/
private TestInfo() {
this(ThreadLocalRandom.current().nextLong(),
ThreadLocalRandom.current().nextInt(0, MAX_SIZE));
}
/**
* Creates an instance of {@link TestInfo} by giving its id and size fields.
*
* @param id the id
* @param size the size
*/
private TestInfo(long id, int size) {
mId = id;
mSize = size;
}
public long getId() {
return mId;
}
public int getSize() {
return mSize;
}
}
private final IndexDefinition<TestInfo, Long> mIdIndex =
new IndexDefinition<TestInfo, Long>(true) {
@Override
public Long getFieldValue(TestInfo o) {
return o.getId();
}
};
private final IndexDefinition<TestInfo, Integer> mSizeIndex =
new IndexDefinition<TestInfo, Integer>(false) {
@Override
public Integer getFieldValue(TestInfo o) {
return o.getSize();
}
};
@Before
public void before() throws Exception {
mIndexedSet = new IndexedSet<>(mIdIndex, mSizeIndex);
mThreadPool = Executors.newCachedThreadPool();
mStopThreads = new AtomicBoolean(false);
}
@After
public void after() {
mThreadPool.shutdownNow();
}
/**
* Verifies the {@link #mIndexedSet} for internal consistency.
*/
private void verifySet() {
Iterator<TestInfo> it = mIndexedSet.iterator();
Set<Long> ids = new HashSet<>();
Set<Integer> sizes = new HashSet<>();
// Verify the size.
int expectedCount = 0;
while (it.hasNext()) {
TestInfo info = it.next();
ids.add(info.getId());
sizes.add(info.getSize());
expectedCount++;
}
assertEquals(expectedCount, mIndexedSet.size());
// Verify the size according to the id index.
int count = 0;
for (Long id : ids) {
Set<TestInfo> elements = mIndexedSet.getByField(mIdIndex, id);
count += elements.size();
}
assertEquals(expectedCount, count);
// Verify the size according to the size index.
count = 0;
for (Integer size : sizes) {
Set<TestInfo> elements = mIndexedSet.getByField(mSizeIndex, size);
count += elements.size();
}
assertEquals(expectedCount, count);
}
@Test
public void basicConcurrentUpdate() throws Exception {
List<Future<?>> futures = new ArrayList<>();
List<ConcurrentTask> addTasks = new ArrayList<>();
List<ConcurrentTask> removeTasks = new ArrayList<>();
int[] tasksNumbers = new int[3];
int totalTasksNumber = 0;
// Try to balance adds and removes
tasksNumbers[0] = 2 * ThreadLocalRandom.current().nextInt(MIN_TASKS, MAX_TASKS + 1);
totalTasksNumber += tasksNumbers[0];
// Add random number of each task type.
for (int i = 1; i < 3; i++) {
tasksNumbers[i] = ThreadLocalRandom.current().nextInt(MIN_TASKS, MAX_TASKS + 1);
totalTasksNumber += tasksNumbers[i];
}
CyclicBarrier barrier = new CyclicBarrier(totalTasksNumber);
// Add random number of each task type.
for (int i = 0; i < tasksNumbers[0]; i++) {
// Try to balance adds and removes
addTasks.add(new ConcurrentAdd(barrier));
}
for (int i = 0; i < tasksNumbers[1]; i++) {
removeTasks.add(new ConcurrentRemove(barrier));
}
for (int i = 0; i < tasksNumbers[2]; i++) {
removeTasks.add(new ConcurrentRemoveByField(barrier));
}
for (ConcurrentTask task : addTasks) {
futures.add(mThreadPool.submit(task));
}
for (ConcurrentTask task : removeTasks) {
futures.add(mThreadPool.submit(task));
}
SleepUtils.sleepMs(TEST_CASE_DURATION_MS);
mStopThreads.set(true);
for (Future<?> future : futures) {
future.get();
}
// Calculate how many elements have been added or removed.
long added = 0;
for (ConcurrentTask task : addTasks) {
added += task.getCount();
}
long removed = 0;
for (ConcurrentTask task : removeTasks) {
removed += task.getCount();
}
assertEquals(mIndexedSet.size(), added - removed);
verifySet();
}
@Test
public void concurrentUpdate() throws Exception {
List<Future<?>> futures = new ArrayList<>();
int[] tasksNumbers = new int[5];
int totalTasksNumber = 0;
// Try to balance adds and removes
tasksNumbers[0] = 4 * ThreadLocalRandom.current().nextInt(MIN_TASKS, MAX_TASKS + 1);
totalTasksNumber += tasksNumbers[0];
// Add random number of each task type.
for (int i = 1; i < 5; i++) {
tasksNumbers[i] = ThreadLocalRandom.current().nextInt(MIN_TASKS, MAX_TASKS + 1);
totalTasksNumber += tasksNumbers[i];
}
CyclicBarrier barrier = new CyclicBarrier(totalTasksNumber);
for (int i = 0; i < tasksNumbers[0]; i++) {
futures.add(mThreadPool.submit(new ConcurrentAdd(barrier)));
}
for (int i = 0; i < tasksNumbers[1]; i++) {
futures.add(mThreadPool.submit(new ConcurrentRemove(barrier)));
}
for (int i = 0; i < tasksNumbers[2]; i++) {
futures.add(mThreadPool.submit(new ConcurrentRemoveByField(barrier)));
}
for (int i = 0; i < tasksNumbers[3]; i++) {
futures.add(mThreadPool.submit(new ConcurrentRemoveByIterator(barrier)));
}
for (int i = 0; i < tasksNumbers[4]; i++) {
futures.add(mThreadPool.submit(new ConcurrentClear(barrier)));
}
SleepUtils.sleepMs(TEST_CASE_DURATION_MS);
mStopThreads.set(true);
for (Future<?> future : futures) {
future.get();
}
verifySet();
}
@Test
public void concurrentAdd() throws Exception {
List<Future<?>> futures = new ArrayList<>();
// Add random number of each task type.
int tasksNumber = 2 * ThreadLocalRandom.current().nextInt(MIN_TASKS, MAX_TASKS + 1);
CyclicBarrier barrier = new CyclicBarrier(tasksNumber);
for (int i = 0; i < tasksNumber; i++) {
futures.add(mThreadPool.submit(new ConcurrentAddWithCheck(barrier)));
}
SleepUtils.sleepMs(TEST_CASE_DURATION_MS);
mStopThreads.set(true);
for (Future<?> future : futures) {
future.get();
}
verifySet();
}
/**
* Use the mSizeIndex as primary index, test the correctness of using non-unique index as primary
* index.
*/
@Test
public void nonUniqueConcurrentUpdate() throws Exception {
mIndexedSet = new IndexedSet<>(mSizeIndex, mIdIndex);
List<Future<?>> futures = new ArrayList<>();
int[] tasksNumbers = new int[5];
int totalTasksNumber = 0;
// Try to balance adds and removes
tasksNumbers[0] = 4 * ThreadLocalRandom.current().nextInt(MIN_TASKS, MAX_TASKS + 1);
totalTasksNumber += tasksNumbers[0];
// Add random number of each task type.
for (int i = 1; i < 5; i++) {
tasksNumbers[i] = ThreadLocalRandom.current().nextInt(MIN_TASKS, MAX_TASKS + 1);
totalTasksNumber += tasksNumbers[i];
}
CyclicBarrier barrier = new CyclicBarrier(totalTasksNumber);
for (int i = 0; i < tasksNumbers[0]; i++) {
futures.add(mThreadPool.submit(new ConcurrentAdd(barrier)));
}
for (int i = 0; i < tasksNumbers[1]; i++) {
futures.add(mThreadPool.submit(new ConcurrentRemove(barrier)));
}
for (int i = 0; i < tasksNumbers[2]; i++) {
futures.add(mThreadPool.submit(new ConcurrentRemoveByField(barrier)));
}
for (int i = 0; i < tasksNumbers[3]; i++) {
futures.add(mThreadPool.submit(new ConcurrentRemoveByIterator(barrier)));
}
for (int i = 0; i < tasksNumbers[4]; i++) {
futures.add(mThreadPool.submit(new ConcurrentClear(barrier)));
}
SleepUtils.sleepMs(TEST_CASE_DURATION_MS);
mStopThreads.set(true);
for (Future<?> future : futures) {
future.get();
}
verifySet();
}
}
| |
/*
* Copyright 2013 Rackspace
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.rackspacecloud.blueflood.outputs.serializers;
import com.rackspacecloud.blueflood.exceptions.SerializationException;
import com.rackspacecloud.blueflood.outputs.formats.MetricData;
import com.rackspacecloud.blueflood.types.*;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
public interface BasicRollupsOutputSerializer<T> {
public T transformRollupData(MetricData metricData, Set<MetricStat> filterStats) throws SerializationException;
public static enum MetricStat {
AVERAGE("average") {
@Override
Object convertRollupToObject(Rollup rollup) throws Exception {
if (rollup instanceof BasicRollup)
return ((BasicRollup) rollup).getAverage();
else if (rollup instanceof BluefloodTimerRollup)
return ((BluefloodTimerRollup) rollup).getAverage();
else
// counters, sets
throw new Exception(String.format("average not supported for this type: %s", rollup.getClass().getSimpleName()));
}
@Override
Object convertRawSampleToObject(Object rawSample) {
return rawSample;
}
},
VARIANCE("variance") {
@Override
Object convertRollupToObject(Rollup rollup) throws Exception {
if (rollup instanceof BasicRollup)
return ((BasicRollup) rollup).getVariance();
else if (rollup instanceof BluefloodTimerRollup)
return ((BluefloodTimerRollup) rollup).getVariance();
else
// counters, sets.
throw new Exception(String.format("variance not supported for this type: %s", rollup.getClass().getSimpleName()));
}
@Override
Object convertRawSampleToObject(Object rawSample) {
return 0;
}
},
MIN("min") {
@Override
Object convertRollupToObject(Rollup rollup) throws Exception {
if (rollup instanceof BasicRollup)
return ((BasicRollup) rollup).getMinValue();
else if (rollup instanceof BluefloodTimerRollup)
return ((BluefloodTimerRollup) rollup).getMinValue();
else
// counters, sets.
throw new Exception(String.format("min not supported for this type: %s", rollup.getClass().getSimpleName()));
}
@Override
Object convertRawSampleToObject(Object rawSample) {
return rawSample;
}
},
MAX("max") {
@Override
Object convertRollupToObject(Rollup rollup) throws Exception {
if (rollup instanceof BasicRollup)
return ((BasicRollup) rollup).getMaxValue();
else if (rollup instanceof BluefloodTimerRollup)
return ((BluefloodTimerRollup) rollup).getMaxValue();
else
// counters, sets.
throw new Exception(String.format("min not supported for this type: %s", rollup.getClass().getSimpleName()));
}
@Override
Object convertRawSampleToObject(Object rawSample) {
return rawSample;
}
},
NUM_POINTS("numPoints") {
@Override
Object convertRollupToObject(Rollup rollup) throws Exception {
if (rollup instanceof BasicRollup)
return ((BasicRollup) rollup).getCount();
else if (rollup instanceof BluefloodTimerRollup)
return ((BluefloodTimerRollup) rollup).getCount();
else if (rollup instanceof BluefloodCounterRollup)
return ((BluefloodCounterRollup) rollup).getSampleCount();
else if (rollup instanceof BluefloodSetRollup)
return ((BluefloodSetRollup) rollup).getCount();
else if (rollup instanceof BluefloodEnumRollup)
return ((BluefloodEnumRollup) rollup).getNumPoints();
else
// gauge.
throw new Exception(String.format("numPoints not supported for this type: %s", rollup.getClass().getSimpleName()));
}
@Override
Object convertRawSampleToObject(Object rawSample) {
return 1;
}
},
LATEST("latest") {
@Override
Object convertRollupToObject(Rollup rollup) throws Exception {
if (rollup instanceof BluefloodGaugeRollup)
return ((BluefloodGaugeRollup) rollup).getLatestValue().getValue();
else
// every other type.
throw new Exception(String.format("latest value not supported for this type: %s", rollup.getClass().getSimpleName()));
}
@Override
Object convertRawSampleToObject(Object rawSample) {
return rawSample;
}
},
RATE("rate") {
@Override
Object convertRollupToObject(Rollup rollup) throws Exception {
if (rollup instanceof BluefloodTimerRollup)
return ((BluefloodTimerRollup) rollup).getRate();
else if (rollup instanceof BluefloodCounterRollup)
return ((BluefloodCounterRollup) rollup).getRate();
else
// gauge, set, basic
throw new Exception(String.format("rate not supported for this type: %s", rollup.getClass().getSimpleName()));
}
@Override
Object convertRawSampleToObject(Object rawSample) {
return rawSample;
}
},
SUM("sum") {
@Override
Object convertRollupToObject(Rollup rollup) throws Exception {
if (rollup instanceof BluefloodTimerRollup)
return ((BluefloodTimerRollup) rollup).getSum();
else if (rollup instanceof BluefloodCounterRollup)
return ((BluefloodCounterRollup) rollup).getCount();
else
// every other type.
throw new Exception(String.format("sum not supported for this type: %s", rollup.getClass().getSimpleName()));
}
@Override
Object convertRawSampleToObject(Object rawSample) {
return rawSample;
}
},
PERCENTILE("percentiles") {
@Override
Object convertRollupToObject(Rollup rollup) throws Exception {
if (rollup instanceof BluefloodTimerRollup)
return ((BluefloodTimerRollup) rollup).getPercentiles();
else
// every other type.
throw new Exception(String.format("percentiles supported for this type: %s", rollup.getClass().getSimpleName()));
}
@Override
Object convertRawSampleToObject(Object rawSample) {
return rawSample;
}
},
ENUM_VALUES("enum_values") {
@Override
Object convertRollupToObject(Rollup rollup) throws Exception {
if (rollup instanceof BluefloodEnumRollup)
return ((BluefloodEnumRollup) rollup).getStringEnumValuesWithCounts();
else
// every other type.
throw new Exception(String.format("Enum values supported for this type: %s", rollup.getClass().getSimpleName()));
}
@Override
Object convertRawSampleToObject(Object rawSample) {
return rawSample;
}
},
TYPE("type") {
@Override
Object convertRollupToObject(Rollup rollup) throws Exception {
if (rollup instanceof BluefloodEnumRollup)
return MetricData.Type.ENUM;
else
// every other type.
throw new Exception(String.format("Enum values supported for this type: %s", rollup.getClass().getSimpleName()));
}
@Override
Object convertRawSampleToObject(Object rawSample) {
return rawSample;
}
}
;
private MetricStat(String s) {
this.stringRep = s;
}
private String stringRep;
private static final Map<String, MetricStat> stringToEnum = new HashMap<String, MetricStat>();
static {
for (MetricStat ms : values()) {
stringToEnum.put(ms.toString().toLowerCase(), ms);
}
}
public static MetricStat fromString(String s) {
return stringToEnum.get(s.toLowerCase());
}
public static Set<MetricStat> fromStringList(List<String> statList) {
Set<MetricStat> set = new HashSet<MetricStat>();
for (String stat : statList ) {
MetricStat metricStat = fromString(stat);
if (metricStat != null) {
set.add(fromString(stat));
}
}
return set;
}
@Override
public String toString() {
return this.stringRep;
}
abstract Object convertRollupToObject(Rollup rollup) throws Exception;
abstract Object convertRawSampleToObject(Object rawSample);
}
}
| |
/*
* Copyright (c) 1999, 2011, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation. Oracle designates this
* particular file as subject to the "Classpath" exception as provided
* by Oracle in the LICENSE file that accompanied this code.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
/*
* This source code is provided to illustrate the usage of a given feature
* or technique and has been deliberately simplified. Additional steps
* required for a production-quality application, such as security checks,
* input validation and proper error handling, might not be present in
* this sample code.
*/
/* Generated By:JavaCC: Do not edit this line. ASCII_UCodeESC_CharStream.java Version 0.7pre6 */
package com.sun.tools.example.debug.expr;
/**
* An implementation of interface CharStream, where the stream is assumed to
* contain only ASCII characters (with java-like unicode escape processing).
*/
public final class ASCII_UCodeESC_CharStream
{
public static final boolean staticFlag = false;
static final int hexval(char c) throws java.io.IOException {
switch(c)
{
case '0' :
return 0;
case '1' :
return 1;
case '2' :
return 2;
case '3' :
return 3;
case '4' :
return 4;
case '5' :
return 5;
case '6' :
return 6;
case '7' :
return 7;
case '8' :
return 8;
case '9' :
return 9;
case 'a' :
case 'A' :
return 10;
case 'b' :
case 'B' :
return 11;
case 'c' :
case 'C' :
return 12;
case 'd' :
case 'D' :
return 13;
case 'e' :
case 'E' :
return 14;
case 'f' :
case 'F' :
return 15;
}
throw new java.io.IOException(); // Should never come here
}
public int bufpos = -1;
int bufsize;
int available;
int tokenBegin;
private int bufline[];
private int bufcolumn[];
private int column = 0;
private int line = 1;
private java.io.InputStream inputStream;
private boolean prevCharIsCR = false;
private boolean prevCharIsLF = false;
private byte[] nextCharBuf;
private char[] buffer;
private int maxNextCharInd = 0;
private int nextCharInd = -1;
private int inBuf = 0;
private final void ExpandBuff(boolean wrapAround)
{
char[] newbuffer = new char[bufsize + 2048];
int newbufline[] = new int[bufsize + 2048];
int newbufcolumn[] = new int[bufsize + 2048];
try
{
if (wrapAround)
{
System.arraycopy(buffer, tokenBegin, newbuffer, 0, bufsize - tokenBegin);
System.arraycopy(buffer, 0, newbuffer,
bufsize - tokenBegin, bufpos);
buffer = newbuffer;
System.arraycopy(bufline, tokenBegin, newbufline, 0, bufsize - tokenBegin);
System.arraycopy(bufline, 0, newbufline, bufsize - tokenBegin, bufpos);
bufline = newbufline;
System.arraycopy(bufcolumn, tokenBegin, newbufcolumn, 0, bufsize - tokenBegin);
System.arraycopy(bufcolumn, 0, newbufcolumn, bufsize - tokenBegin, bufpos);
bufcolumn = newbufcolumn;
bufpos += (bufsize - tokenBegin);
}
else
{
System.arraycopy(buffer, tokenBegin, newbuffer, 0, bufsize - tokenBegin);
buffer = newbuffer;
System.arraycopy(bufline, tokenBegin, newbufline, 0, bufsize - tokenBegin);
bufline = newbufline;
System.arraycopy(bufcolumn, tokenBegin, newbufcolumn, 0, bufsize - tokenBegin);
bufcolumn = newbufcolumn;
bufpos -= tokenBegin;
}
}
catch (Throwable t)
{
throw new Error(t.getMessage());
}
available = (bufsize += 2048);
tokenBegin = 0;
}
private final void FillBuff() throws java.io.IOException
{
int i;
if (maxNextCharInd == 4096)
maxNextCharInd = nextCharInd = 0;
try {
if ((i = inputStream.read(nextCharBuf, maxNextCharInd,
4096 - maxNextCharInd)) == -1)
{
inputStream.close();
throw new java.io.IOException();
}
else
maxNextCharInd += i;
return;
}
catch(java.io.IOException e) {
if (bufpos != 0)
{
--bufpos;
backup(0);
}
else
{
bufline[bufpos] = line;
bufcolumn[bufpos] = column;
}
throw e;
}
}
private final byte ReadByte() throws java.io.IOException
{
if (++nextCharInd >= maxNextCharInd)
FillBuff();
return nextCharBuf[nextCharInd];
}
public final char BeginToken() throws java.io.IOException
{
if (inBuf > 0)
{
--inBuf;
return buffer[tokenBegin = (bufpos == bufsize - 1) ? (bufpos = 0)
: ++bufpos];
}
tokenBegin = 0;
bufpos = -1;
return readChar();
}
private final void AdjustBuffSize()
{
if (available == bufsize)
{
if (tokenBegin > 2048)
{
bufpos = 0;
available = tokenBegin;
}
else
ExpandBuff(false);
}
else if (available > tokenBegin)
available = bufsize;
else if ((tokenBegin - available) < 2048)
ExpandBuff(true);
else
available = tokenBegin;
}
private final void UpdateLineColumn(char c)
{
column++;
if (prevCharIsLF)
{
prevCharIsLF = false;
line += (column = 1);
}
else if (prevCharIsCR)
{
prevCharIsCR = false;
if (c == '\n')
{
prevCharIsLF = true;
}
else
line += (column = 1);
}
switch (c)
{
case '\r' :
prevCharIsCR = true;
break;
case '\n' :
prevCharIsLF = true;
break;
case '\t' :
column--;
column += (8 - (column & 07));
break;
default :
break;
}
bufline[bufpos] = line;
bufcolumn[bufpos] = column;
}
public final char readChar() throws java.io.IOException
{
if (inBuf > 0)
{
--inBuf;
return buffer[(bufpos == bufsize - 1) ? (bufpos = 0) : ++bufpos];
}
char c;
if (++bufpos == available)
AdjustBuffSize();
if (((buffer[bufpos] = c = (char)((char)0xff & ReadByte())) == '\\'))
{
UpdateLineColumn(c);
int backSlashCnt = 1;
for (;;) // Read all the backslashes
{
if (++bufpos == available)
AdjustBuffSize();
try
{
if ((buffer[bufpos] = c = (char)((char)0xff & ReadByte())) != '\\')
{
UpdateLineColumn(c);
// found a non-backslash char.
if ((c == 'u') && ((backSlashCnt & 1) == 1))
{
if (--bufpos < 0)
bufpos = bufsize - 1;
break;
}
backup(backSlashCnt);
return '\\';
}
}
catch(java.io.IOException e)
{
if (backSlashCnt > 1)
backup(backSlashCnt);
return '\\';
}
UpdateLineColumn(c);
backSlashCnt++;
}
// Here, we have seen an odd number of backslash's followed by a 'u'
try
{
while ((c = (char)((char)0xff & ReadByte())) == 'u')
++column;
buffer[bufpos] = c = (char)(hexval(c) << 12 |
hexval((char)((char)0xff & ReadByte())) << 8 |
hexval((char)((char)0xff & ReadByte())) << 4 |
hexval((char)((char)0xff & ReadByte())));
column += 4;
}
catch(java.io.IOException e)
{
throw new Error("Invalid escape character at line " + line +
" column " + column + ".");
}
if (backSlashCnt == 1)
return c;
else
{
backup(backSlashCnt - 1);
return '\\';
}
}
else
{
UpdateLineColumn(c);
return (c);
}
}
/**
* @deprecated
* @see #getEndColumn
*/
@Deprecated
public final int getColumn() {
return bufcolumn[bufpos];
}
/**
* @deprecated
* @see #getEndLine
*/
@Deprecated
public final int getLine() {
return bufline[bufpos];
}
public final int getEndColumn() {
return bufcolumn[bufpos];
}
public final int getEndLine() {
return bufline[bufpos];
}
public final int getBeginColumn() {
return bufcolumn[tokenBegin];
}
public final int getBeginLine() {
return bufline[tokenBegin];
}
public final void backup(int amount) {
inBuf += amount;
if ((bufpos -= amount) < 0)
bufpos += bufsize;
}
public ASCII_UCodeESC_CharStream(java.io.InputStream dstream,
int startline, int startcolumn, int buffersize)
{
inputStream = dstream;
line = startline;
column = startcolumn - 1;
available = bufsize = buffersize;
buffer = new char[buffersize];
bufline = new int[buffersize];
bufcolumn = new int[buffersize];
nextCharBuf = new byte[4096];
}
public ASCII_UCodeESC_CharStream(java.io.InputStream dstream,
int startline, int startcolumn)
{
this(dstream, startline, startcolumn, 4096);
}
public void ReInit(java.io.InputStream dstream,
int startline, int startcolumn, int buffersize)
{
inputStream = dstream;
line = startline;
column = startcolumn - 1;
if (buffer == null || buffersize != buffer.length)
{
available = bufsize = buffersize;
buffer = new char[buffersize];
bufline = new int[buffersize];
bufcolumn = new int[buffersize];
nextCharBuf = new byte[4096];
}
prevCharIsLF = prevCharIsCR = false;
tokenBegin = inBuf = maxNextCharInd = 0;
nextCharInd = bufpos = -1;
}
public void ReInit(java.io.InputStream dstream,
int startline, int startcolumn)
{
ReInit(dstream, startline, startcolumn, 4096);
}
public final String GetImage()
{
if (bufpos >= tokenBegin)
return new String(buffer, tokenBegin, bufpos - tokenBegin + 1);
else
return new String(buffer, tokenBegin, bufsize - tokenBegin) +
new String(buffer, 0, bufpos + 1);
}
public final char[] GetSuffix(int len)
{
char[] ret = new char[len];
if ((bufpos + 1) >= len)
System.arraycopy(buffer, bufpos - len + 1, ret, 0, len);
else
{
System.arraycopy(buffer, bufsize - (len - bufpos - 1), ret, 0,
len - bufpos - 1);
System.arraycopy(buffer, 0, ret, len - bufpos - 1, bufpos + 1);
}
return ret;
}
public void Done()
{
nextCharBuf = null;
buffer = null;
bufline = null;
bufcolumn = null;
}
/**
* Method to adjust line and column numbers for the start of a token.<BR>
*/
public void adjustBeginLineColumn(int newLine, int newCol)
{
int start = tokenBegin;
int len;
if (bufpos >= tokenBegin)
{
len = bufpos - tokenBegin + inBuf + 1;
}
else
{
len = bufsize - tokenBegin + bufpos + 1 + inBuf;
}
int i = 0, j = 0, k = 0;
int nextColDiff = 0, columnDiff = 0;
while (i < len &&
bufline[j = start % bufsize] == bufline[k = ++start % bufsize])
{
bufline[j] = newLine;
nextColDiff = columnDiff + bufcolumn[k] - bufcolumn[j];
bufcolumn[j] = newCol + columnDiff;
columnDiff = nextColDiff;
i++;
}
if (i < len)
{
bufline[j] = newLine++;
bufcolumn[j] = newCol + columnDiff;
while (i++ < len)
{
if (bufline[j = start % bufsize] != bufline[++start % bufsize])
bufline[j] = newLine++;
else
bufline[j] = newLine;
}
}
line = bufline[j];
column = bufcolumn[j];
}
}
| |
/*
* Copyright (c) 2015 Nova Ordis LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.novaordis.gld.api.sampler;
import io.novaordis.gld.api.Operation;
import io.novaordis.gld.api.sampler.metrics.Metric;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
/**
* Static utilities related to sampling intervals.
*/
public class SamplingIntervalUtil {
// Constants -------------------------------------------------------------------------------------------------------
private static final Logger log = LoggerFactory.getLogger(SamplingIntervalUtil.class);
// Static ----------------------------------------------------------------------------------------------------------
/**
* Distribute the values from the given SamplingInterval to 'extraSamples' more successive same-length intervals
* (so the total number of returned intervals will be extraSamples + 1). It is possible - and quite probable, if the
* sampling run periodicity is well chosen relative to the sampling interval - that extraSamples is 0.
*
* If n is 0, it simply returns the instance it was passed.
*
* TODO: too complex, needs refactoring
* TODO: the extrapolation algorithm can be improved by making the distribution more precise as we scan the
* interval list. Now we distribute evenly across all intervals and rounding errors add up to back-load the
* last interval.
*
*/
public static SamplingInterval[] extrapolate(SamplingInterval recorded, int extraSamples) {
if (recorded == null) {
throw new IllegalArgumentException("null sampling interval");
}
if (extraSamples == 0) {
return new SamplingInterval[] { recorded };
}
// total number of samples
int n = extraSamples + 1;
SamplingIntervalImpl[] result = new SamplingIntervalImpl[n];
long duration = recorded.getDurationMs();
long start = recorded.getStartMs();
boolean annotationsProcessed = false;
// metrics should propagate the same values
Set<Metric> metrics = recorded.getMetrics();
for(Class<? extends Operation> ot : recorded.getOperationTypes()) {
CounterValues valuesToBeDistributed = recorded.getCounterValues(ot);
long successCount = 0L;
long successCumulatedDuration = 0L;
Set<Class<? extends Throwable>> failureTypes = valuesToBeDistributed.getFailureTypes();
Map<Class<? extends Throwable>, Long> failureCount = zeroInitializedFailureCounterMap(failureTypes);
Map<Class<? extends Throwable>, Long> failureCumulatedDuration = zeroInitializedFailureCounterMap(failureTypes);
for(int i = 0; i < n; i ++) {
SamplingIntervalImpl si = result[i];
if (si == null) {
si = new SamplingIntervalImpl(start, duration, recorded.getOperationTypes());
si.setMetrics(new HashSet<>(metrics)); // make a copy of the map
result[i] = si;
start += duration;
}
if ((i == 0) && !annotationsProcessed) {
annotationsProcessed = true;
// place all annotations in the first sampling interval
for(String a: recorded.getAnnotations()) {
si.addAnnotation(a);
}
}
long sc;
long scd = 0L;
Map<Class<? extends Throwable>, Long> fc = zeroInitializedFailureCounterMap(failureTypes);
Map<Class<? extends Throwable>, Long> fcd = zeroInitializedFailureCounterMap(failureTypes);
if (i != n - 1) {
sc = valuesToBeDistributed.getSuccessCount() / n;
successCount += sc;
if (sc != 0) {
scd = valuesToBeDistributed.getSuccessCumulatedDurationNano() / n;
successCumulatedDuration += scd;
}
for(Class<? extends Throwable> ft: failureTypes) {
fc.put(ft, valuesToBeDistributed.getFailureCount(ft) / n);
failureCount.put(ft, failureCount.get(ft) + fc.get(ft));
if (fc.get(ft) != 0L) {
fcd.put(ft, valuesToBeDistributed.getFailureCumulatedDurationNano(ft) / n);
failureCumulatedDuration.put(ft, failureCumulatedDuration.get(ft) + fcd.get(ft));
}
}
}
else {
// last sampling interval
sc = valuesToBeDistributed.getSuccessCount() - successCount;
scd = valuesToBeDistributed.getSuccessCumulatedDurationNano() - successCumulatedDuration;
for(Class<? extends Throwable> ft: failureTypes) {
fc.put(ft, valuesToBeDistributed.getFailureCount(ft) - failureCount.get(ft));
fcd.put(ft, valuesToBeDistributed.getFailureCumulatedDurationNano(ft) - failureCumulatedDuration.get(ft));
}
}
Map<Class<? extends Throwable>, ImmutableFailureCounter> failures = new HashMap<>();
for(Class<? extends Throwable> ft: failureTypes) {
ImmutableFailureCounter ifc = new ImmutableFailureCounter(fc.get(ft), fcd.get(ft));
failures.put(ft, ifc);
}
CounterValuesImpl cv = new CounterValuesImpl(sc, scd, failures);
si.setCounterValues(ot, cv);
}
}
return result;
}
public static Set<Metric> snapshotMetrics(Set<Class<? extends Metric>> metricTypes) {
Set<Metric> result = new HashSet<>();
for(Class<? extends Metric> mt: metricTypes) {
try {
Metric m = mt.newInstance();
result.add(m);
}
catch(Exception e) {
log.warn("could not create Metric instance from " + mt, e);
}
}
return result;
}
// Attributes ------------------------------------------------------------------------------------------------------
// Constructors ----------------------------------------------------------------------------------------------------
private SamplingIntervalUtil() {
}
// Public ----------------------------------------------------------------------------------------------------------
// Package protected -----------------------------------------------------------------------------------------------
// Protected -------------------------------------------------------------------------------------------------------
// Private ---------------------------------------------------------------------------------------------------------
private static Map<Class<? extends Throwable>, Long> zeroInitializedFailureCounterMap(
Set<Class<? extends Throwable>> failureTypes ) {
Map<Class<? extends Throwable>, Long> result = new HashMap<>();
for(Class<? extends Throwable> ft: failureTypes) {
result.put(ft, 0L);
}
return result;
}
// Inner classes ---------------------------------------------------------------------------------------------------
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.qpid.protonj2.buffer;
import java.nio.ByteBuffer;
import java.nio.charset.Charset;
import io.netty.buffer.ByteBuf;
import io.netty.buffer.Unpooled;
/**
* Wrapper class for Netty ByteBuf instances
*/
public final class ProtonNettyByteBuffer implements ProtonBuffer {
private final ByteBuf wrapped;
public ProtonNettyByteBuffer(ByteBuf toWrap) {
this.wrapped = toWrap;
}
public ProtonNettyByteBuffer(int maximumCapacity) {
wrapped = Unpooled.buffer(1024, maximumCapacity);
}
@Override
public ByteBuf unwrap() {
return wrapped;
}
@Override
public int capacity() {
return wrapped.capacity();
}
@Override
public ProtonBuffer capacity(int newCapacity) {
wrapped.capacity(newCapacity);
return this;
}
@Override
public ProtonBuffer clear() {
wrapped.clear();
return this;
}
@Override
public int compareTo(ProtonBuffer other) {
int length = getReadIndex() + Math.min(getReadableBytes(), other.getReadableBytes());
for (int i = this.getReadIndex(), j = getReadIndex(); i < length; i++, j++) {
int cmp = Integer.compare(getByte(i) & 0xFF, other.getByte(j) & 0xFF);
if (cmp != 0) {
return cmp;
}
}
return getReadableBytes() - other.getReadableBytes();
}
@Override
public ProtonBuffer copy() {
return new ProtonNettyByteBuffer(wrapped.copy());
}
@Override
public ProtonBuffer copy(int index, int length) {
return new ProtonNettyByteBuffer(wrapped.copy(index, length));
}
@Override
public ProtonBuffer duplicate() {
return new ProtonNettyByteBuffer(wrapped.duplicate());
}
@Override
public ProtonBuffer ensureWritable(int minWritableBytes) throws IndexOutOfBoundsException, IllegalArgumentException {
wrapped.ensureWritable(minWritableBytes);
return this;
}
@Override
public byte[] getArray() {
return wrapped.array();
}
@Override
public int getArrayOffset() {
return wrapped.arrayOffset();
}
@Override
public boolean getBoolean(int index) {
return wrapped.getBoolean(index);
}
@Override
public byte getByte(int index) {
return wrapped.getByte(index);
}
@Override
public ProtonBuffer getBytes(int index, byte[] destination) {
wrapped.getBytes(index, destination);
return this;
}
@Override
public ProtonBuffer getBytes(int index, ByteBuffer destination) {
wrapped.getBytes(index, destination);
return this;
}
@Override
public ProtonBuffer getBytes(int index, ProtonBuffer destination) {
int length = destination.getWritableBytes();
getBytes(index, destination, destination.getWriteIndex(), length);
destination.setWriteIndex(destination.getWriteIndex() + length);
return this;
}
@Override
public ProtonBuffer getBytes(int index, ProtonBuffer destination, int length) {
getBytes(index, destination, destination.getWriteIndex(), length);
destination.setWriteIndex(destination.getWriteIndex() + length);
return this;
}
@Override
public ProtonBuffer getBytes(int index, ProtonBuffer destination, int offset, int length) {
if (destination.hasArray()) {
wrapped.getBytes(index, destination.getArray(), destination.getArrayOffset() + offset, length);
} else if (hasArray()) {
destination.setBytes(offset, getArray(), getArrayOffset() + index, length);
} else if (destination instanceof ProtonNettyByteBuffer) {
ProtonNettyByteBuffer wrapper = (ProtonNettyByteBuffer) destination;
wrapped.getBytes(index, wrapper.unwrap(), offset, length);
} else {
checkDestinationIndex(index, length, offset, destination.capacity());
for (int i = 0; i < length; ++i) {
destination.setByte(offset + i, wrapped.getByte(index + i));
}
}
return this;
}
@Override
public ProtonBuffer getBytes(int index, byte[] destination, int offset, int length) {
wrapped.getBytes(index, destination, offset, length);
return this;
}
@Override
public char getChar(int index) {
return wrapped.getChar(index);
}
@Override
public double getDouble(int index) {
return wrapped.getDouble(index);
}
@Override
public float getFloat(int index) {
return wrapped.getFloat(index);
}
@Override
public int getInt(int index) {
return wrapped.getInt(index);
}
@Override
public long getLong(int index) {
return wrapped.getLong(index);
}
@Override
public int getReadIndex() {
return wrapped.readerIndex();
}
@Override
public int getReadableBytes() {
return wrapped.readableBytes();
}
@Override
public short getShort(int index) {
return wrapped.getShort(index);
}
@Override
public short getUnsignedByte(int index) {
return wrapped.getUnsignedByte(index);
}
@Override
public long getUnsignedInt(int index) {
return wrapped.getUnsignedInt(index);
}
@Override
public int getUnsignedShort(int index) {
return wrapped.getUnsignedShort(index);
}
@Override
public int getWritableBytes() {
return wrapped.writableBytes();
}
@Override
public int getMaxWritableBytes() {
return wrapped.maxWritableBytes();
}
@Override
public int getWriteIndex() {
return wrapped.writerIndex();
}
@Override
public boolean hasArray() {
return wrapped.hasArray();
}
@Override
public boolean isReadable() {
return wrapped.isReadable();
}
@Override
public boolean isReadable(int minReadableBytes) {
return wrapped.isReadable(minReadableBytes);
}
@Override
public boolean isWritable() {
return wrapped.isWritable();
}
@Override
public boolean isWritable(int minWritableBytes) {
return wrapped.isWritable(minWritableBytes);
}
@Override
public ProtonBuffer markReadIndex() {
wrapped.markReaderIndex();
return this;
}
@Override
public ProtonBuffer markWriteIndex() {
wrapped.markWriterIndex();
return this;
}
@Override
public int maxCapacity() {
return wrapped.maxCapacity();
}
@Override
public boolean readBoolean() {
return wrapped.readBoolean();
}
@Override
public byte readByte() {
return wrapped.readByte();
}
@Override
public ProtonBuffer readBytes(byte[] destination) {
wrapped.readBytes(destination);
return this;
}
@Override
public ProtonBuffer readBytes(ByteBuffer destination) {
wrapped.readBytes(destination);
return this;
}
@Override
public ProtonBuffer readBytes(byte[] destination, int length) {
wrapped.readBytes(destination, 0, length);
return this;
}
@Override
public ProtonBuffer readBytes(byte[] destination, int offset, int length) {
wrapped.readBytes(destination, offset, length);
return this;
}
@Override
public ProtonBuffer readBytes(ProtonBuffer destination) {
readBytes(destination, destination.getWritableBytes());
return this;
}
@Override
public ProtonBuffer readBytes(ProtonBuffer destination, int length) {
if (length > destination.getWritableBytes()) {
throw new IndexOutOfBoundsException(String.format(
"length(%d) exceeds target Writable Bytes:(%d), target is: %s", length, destination.getWritableBytes(), destination));
}
readBytes(destination, destination.getWriteIndex(), length);
destination.setWriteIndex(destination.getWriteIndex() + length);
return this;
}
@Override
public ProtonBuffer readBytes(ProtonBuffer destination, int offset, int length) {
checkReadableBytes(length);
getBytes(wrapped.readerIndex(), destination, offset, length);
wrapped.skipBytes(length);
return this;
}
@Override
public double readDouble() {
return wrapped.readDouble();
}
@Override
public float readFloat() {
return wrapped.readFloat();
}
@Override
public int readInt() {
return wrapped.readInt();
}
@Override
public long readLong() {
return wrapped.readLong();
}
@Override
public short readShort() {
return wrapped.readShort();
}
@Override
public ProtonBuffer resetReadIndex() {
wrapped.resetReaderIndex();
return this;
}
@Override
public ProtonBuffer resetWriteIndex() {
wrapped.resetWriterIndex();
return this;
}
@Override
public ProtonBuffer setBoolean(int index, boolean value) {
wrapped.setBoolean(index, value);
return this;
}
@Override
public ProtonBuffer setByte(int index, int value) {
wrapped.setByte(index, value);
return this;
}
@Override
public ProtonBuffer setBytes(int index, byte[] value) {
wrapped.setBytes(index, value);
return this;
}
@Override
public ProtonBuffer setBytes(int index, ByteBuffer value) {
wrapped.setBytes(index, value);
return this;
}
@Override
public ProtonBuffer setBytes(int index, ProtonBuffer value) {
return setBytes(index, value, value.getReadableBytes());
}
@Override
public ProtonBuffer setBytes(int index, ProtonBuffer value, int length) {
checkIndex(index, length);
if (value == null) {
throw new NullPointerException("src");
}
if (length > value.getReadableBytes()) {
throw new IndexOutOfBoundsException(String.format(
"length(%d) exceeds source buffer Readable Bytes(%d), source is: %s", length, value.getReadableBytes(), value));
}
setBytes(index, value, value.getReadIndex(), length);
value.setReadIndex(value.getReadIndex() + length);
return this;
}
@Override
public ProtonBuffer setBytes(int index, ProtonBuffer value, int offset, int length) {
if (value instanceof ProtonNettyByteBuffer) {
wrapped.setBytes(index, (ByteBuf) value.unwrap(), offset, length);
} else if (value.hasArray()) {
wrapped.setBytes(index, value.getArray(), value.getArrayOffset() + offset, length);
} else if (hasArray()) {
value.getBytes(offset, getArray(), getArrayOffset() + index, length);
} else {
checkSourceIndex(index, length, offset, value.capacity());
for (int i = 0; i < length; ++i) {
wrapped.setByte(index + i, value.getByte(offset + i));
}
}
return this;
}
@Override
public ProtonBuffer setBytes(int index, byte[] value, int offset, int length) {
wrapped.setBytes(index, value, offset, length);
return this;
}
@Override
public ProtonBuffer setChar(int index, int value) {
wrapped.setChar(index, value);
return this;
}
@Override
public ProtonBuffer setDouble(int index, double value) {
wrapped.setDouble(index, value);
return this;
}
@Override
public ProtonBuffer setFloat(int index, float value) {
wrapped.setFloat(index, value);
return this;
}
@Override
public ProtonBuffer setIndex(int readIndex, int writeIndex) {
wrapped.setIndex(readIndex, writeIndex);
return this;
}
@Override
public ProtonBuffer setInt(int index, int value) {
wrapped.setInt(index, value);
return this;
}
@Override
public ProtonBuffer setLong(int index, long value) {
wrapped.setLong(index, value);
return this;
}
@Override
public ProtonBuffer setReadIndex(int index) {
wrapped.readerIndex(index);
return this;
}
@Override
public ProtonBuffer setShort(int index, int value) {
wrapped.setShort(index, value);
return this;
}
@Override
public ProtonBuffer setWriteIndex(int index) {
wrapped.writerIndex(index);
return this;
}
@Override
public ProtonBuffer skipBytes(int skippedBytes) {
wrapped.skipBytes(skippedBytes);
return this;
}
@Override
public ProtonBuffer slice() {
return new ProtonNettyByteBuffer(wrapped.slice());
}
@Override
public ProtonBuffer slice(int index, int length) {
return new ProtonNettyByteBuffer(wrapped.slice(index, length));
}
@Override
public ByteBuffer toByteBuffer() {
return wrapped.nioBuffer();
}
@Override
public ByteBuffer toByteBuffer(int index, int length) {
return wrapped.nioBuffer(index, length);
}
@Override
public String toString() {
return wrapped.toString();
}
@Override
public String toString(Charset charset) {
return wrapped.toString(charset);
}
@Override
public ProtonBuffer writeBoolean(boolean value) {
wrapped.writeBoolean(value);
return this;
}
@Override
public ProtonBuffer writeByte(int value) {
wrapped.writeByte(value);
return this;
}
@Override
public ProtonBuffer writeBytes(ByteBuffer value) {
wrapped.writeBytes(value);
return this;
}
@Override
public ProtonBuffer writeBytes(byte[] value) {
wrapped.writeBytes(value);
return this;
}
@Override
public ProtonBuffer writeBytes(byte[] value, int length) {
wrapped.writeBytes(value, 0, length);
return this;
}
@Override
public ProtonBuffer writeBytes(byte[] array, int offset, int length) {
wrapped.writeBytes(array, offset, length);
return this;
}
@Override
public ProtonBuffer writeBytes(ProtonBuffer value) {
return writeBytes(value, value.getReadableBytes());
}
@Override
public ProtonBuffer writeBytes(ProtonBuffer value, int length) {
if (length > value.getReadableBytes()) {
throw new IndexOutOfBoundsException(String.format(
"length(%d) exceeds source Readable Bytes(%d), source is: %s", length, value.getReadableBytes(), value));
}
writeBytes(value, value.getReadIndex(), length);
value.skipBytes(length);
return this;
}
@Override
public ProtonBuffer writeBytes(ProtonBuffer value, int offset, int length) {
ensureWritable(length);
setBytes(wrapped.writerIndex(), value, offset, length);
wrapped.writerIndex(wrapped.writerIndex() + length);
return this;
}
@Override
public ProtonBuffer writeDouble(double value) {
wrapped.writeDouble(value);
return this;
}
@Override
public ProtonBuffer writeFloat(float value) {
wrapped.writeFloat(value);
return this;
}
@Override
public ProtonBuffer writeInt(int value) {
wrapped.writeInt(value);
return this;
}
@Override
public ProtonBuffer writeLong(long value) {
wrapped.writeLong(value);
return this;
}
@Override
public ProtonBuffer writeShort(short value) {
wrapped.writeShort(value);
return this;
}
@Override
public int hashCode() {
return wrapped.hashCode();
}
@Override
public boolean equals(Object other) {
if (this == other) {
return true;
}
if (!(other instanceof ProtonBuffer)) {
return false;
}
ProtonBuffer that = (ProtonBuffer) other;
if (this.getReadableBytes() != that.getReadableBytes()) {
return false;
}
int index = getReadIndex();
for (int i = getReadableBytes() - 1, j = that.getReadableBytes() - 1; i >= index; i--, j--) {
if (!(getByte(i) == that.getByte(j))) {
return false;
}
}
return true;
}
//----- Internal Bounds Checking Utilities
protected final void checkReadableBytes(int minimumReadableBytes) {
if (minimumReadableBytes < 0) {
throw new IllegalArgumentException("minimumReadableBytes: " + minimumReadableBytes + " (expected: >= 0)");
}
internalCheckReadableBytes(minimumReadableBytes);
}
private void internalCheckReadableBytes(int minimumReadableBytes) {
// Called when we know that we don't need to validate if the minimum readable
// value is negative.
if (wrapped.readerIndex() > wrapped.writerIndex() - minimumReadableBytes) {
throw new IndexOutOfBoundsException(String.format(
"readIndex(%d) + length(%d) exceeds writeIndex(%d): %s",
wrapped.readerIndex(), minimumReadableBytes, wrapped.writerIndex(), this));
}
}
protected static boolean isOutOfBounds(int index, int length, int capacity) {
return (index | length | (index + length) | (capacity - (index + length))) < 0;
}
protected final void checkIndex(int index, int fieldLength) {
if (isOutOfBounds(index, fieldLength, capacity())) {
throw new IndexOutOfBoundsException(String.format(
"index: %d, length: %d (expected: range(0, %d))", index, fieldLength, capacity()));
}
}
protected final void checkSourceIndex(int index, int length, int srcIndex, int srcCapacity) {
checkIndex(index, length);
if (isOutOfBounds(srcIndex, length, srcCapacity)) {
throw new IndexOutOfBoundsException(String.format(
"srcIndex: %d, length: %d (expected: range(0, %d))", srcIndex, length, srcCapacity));
}
}
protected final void checkDestinationIndex(int index, int length, int dstIndex, int dstCapacity) {
checkIndex(index, length);
if (isOutOfBounds(dstIndex, length, dstCapacity)) {
throw new IndexOutOfBoundsException(String.format(
"dstIndex: %d, length: %d (expected: range(0, %d))", dstIndex, length, dstCapacity));
}
}
}
| |
package jeffaschenk.commons.frameworks.cnxidx.utility.ldap;
import java.io.BufferedReader;
import java.io.IOException;
import javax.naming.directory.Attribute;
import javax.naming.directory.Attributes;
import javax.naming.directory.BasicAttribute;
import javax.naming.directory.BasicAttributes;
/**
* Java Class to process and read as input an LDIF file or stream.
* Functionality for LDIF version detection,
* comments and other enhancements and fixes applied for compliance with
* IETF RFC2849 - LDAP Data Interchange Format.
*
* @author jeff.schenk
* @version 1.0 $Revision
* Developed 2001
*/
public class idxChangeLogLDIFReader {
private boolean hasMore = true;
private String myCurrent_DN = null;
private BufferedReader myin = null;
private String LDIFVersion = "";
/**
* Initial Constructor.
*/
public idxChangeLogLDIFReader(BufferedReader in) {
myin = in;
} // End of Constructor.
/**
* Obtains the next LDIF Entry found in our BufferedReader
* input. Will return an Attributes Object, containing all
* of the Attributes for the obtained entry.
*
* @return Attributes All Attributes for the current entry.
* @throws java.io.IOException if problems reading BufferedReader
*/
public Attributes getNextEntry() throws IOException {
String inputline = null;
String attributeName = null;
String inputDN = null;
StringBuffer entry_value = null;
StringBuffer entry_dn = null;
Attributes entry = null;
boolean entry_value_encoded = false;
int position;
int from;
int to;
boolean First_Line = true;
// *******************************************
// Process the Incoming LDIF Data.
while ((inputline = myin.readLine()) != null) {
// *********************************************
// If this is our first line, just for a version
// indication.
if (First_Line) {
if ((inputline.length() > 8) &&
("version".equalsIgnoreCase(inputline.substring(0, 7)))) {
// ****************************
// Got a Version Line
// So obtain the Version.
LDIFVersion = inputline.substring(8);
LDIFVersion = LDIFVersion.trim();
// **********************
// Continue on....
First_Line = false;
continue;
}
First_Line = false;
} // End of First Line.
// *********************************************
// If a Comment, check to see if the comment
// is an Incremental LOG Change Comment.
// If it is change the operation.
//
if ((inputline.length() != 0) && (inputline.charAt(0) == '#')) {
if (inputline.startsWith("# Change Type:[")) {
attributeName = "IDXCHANGETYPE";
int lix = inputline.lastIndexOf(']');
if (lix <= 0) {
continue;
}
String evalue = inputline.substring(15, lix);
if (entry == null) {
entry = new BasicAttributes(true);
}
add(entry, attributeName,
evalue, false);
attributeName = null;
entry_value = null;
} else if (inputline.startsWith("# DN:[")) {
attributeName = "IDXCHANGETYPEDN";
int lix = inputline.lastIndexOf(']');
if (lix <= 0) {
continue;
}
String evalue = inputline.substring(6, lix);
if (entry == null) {
entry = new BasicAttributes(true);
}
add(entry, attributeName,
evalue, false);
attributeName = null;
entry_value = null;
entry_dn = new StringBuffer(evalue);
} else if (inputline.startsWith("# OLD DN:[")) {
attributeName = "IDXCHANGETYPEOLDDN";
int lix = inputline.lastIndexOf(']');
if (lix <= 0) {
continue;
}
String evalue = inputline.substring(10, lix);
if (entry == null) {
entry = new BasicAttributes(true);
}
add(entry, attributeName,
evalue, false);
attributeName = null;
entry_value = null;
}
continue;
}
// *************************************************
// Do I have a new Attribute and not a Continuation?
position = inputline.indexOf(":");
if (position != -1 && inputline.charAt(0) != ' ') {
if (attributeName != null && entry_value != null) {
if (entry == null) {
entry = new BasicAttributes(true);
}
add(entry, attributeName,
entry_value.toString(), entry_value_encoded);
} // End of if not null attribute or value.
to = position;
from = position + 1;
if (inputline.length() > from) {
if (inputline.charAt(from) == ':') {
entry_value_encoded = true;
from++;
} else {
entry_value_encoded = false;
} // End of Else
if (inputline.charAt(from) == ' ') {
from++;
}
attributeName = inputline.substring(0, to).toLowerCase();
entry_value = new StringBuffer(inputline.substring(from));
} else {
attributeName = inputline.substring(0, to).toLowerCase();
entry_value = new StringBuffer("");
}
if (entry_dn == null) {
entry_dn = entry_value;
attributeName = null;
} // End of If.
// *************************************************
// Do I have an Entry Seperator Line?
} else if (inputline.length() == 0) {
if (attributeName != null && entry_value != null) {
if (entry == null) {
entry = new BasicAttributes(true);
}
add(entry, attributeName,
entry_value.toString(), entry_value_encoded);
attributeName = null;
entry_value = null;
} // End of If.
// has no dn?
if (entry_dn == null) {
continue;
}
myCurrent_DN = entry_dn.toString();
return (entry);
} else if (inputline.charAt(0) == ' ') {
entry_value.append(inputline.substring(1));
} // end of Else if.
} // End of While.
hasMore = false;
myCurrent_DN = null;
return null;
} // End of getNextEntry Method
/**
* Obtains the current DN found during our getNextEntry
* method.
*
* @return String of current DN.
*/
public String getCurrentDN() {
return (myCurrent_DN);
} // End of getCurrentDN Method.
/**
* Provides an Iteration indicator method.
*
* @return boolean indicator if additional entries need
* to be processed.
*/
public boolean hasMore() {
return (hasMore);
} // End of hasMore Method.
/**
* Obtains detected LDIF Version.
*
* @return String of LDIF Version.
*/
public String getVersion() {
return (LDIFVersion);
} // End of getVersion Method.
/**
* Private method for creating Attributes Object of
* current LDIF entry being formulated.
*/
private void add(Attributes entry,
String attribute,
String value,
boolean encoded) {
Attribute vals = entry.get(attribute);
if (vals == null) {
vals = new BasicAttribute(attribute);
}
if (encoded) {
vals.add(idxIRRBase64.decode(value.toCharArray()));
} else {
vals.add(value);
}
entry.put(vals);
} // End of Private add Method.
} ///:~ End of idxChangeLogLDIFReader Class
| |
/*
* Copyright 2000-2014 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.ide.ui.customization;
import com.intellij.icons.AllIcons;
import com.intellij.ide.IdeBundle;
import com.intellij.openapi.actionSystem.ActionManager;
import com.intellij.openapi.actionSystem.AnAction;
import com.intellij.openapi.actionSystem.Separator;
import com.intellij.openapi.actionSystem.ex.QuickList;
import com.intellij.openapi.actionSystem.ex.QuickListsManager;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.fileChooser.FileChooserDescriptor;
import com.intellij.openapi.keymap.impl.ui.ActionsTree;
import com.intellij.openapi.keymap.impl.ui.ActionsTreeUtil;
import com.intellij.openapi.keymap.impl.ui.Group;
import com.intellij.openapi.options.ConfigurationException;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.project.ProjectManager;
import com.intellij.openapi.ui.DialogWrapper;
import com.intellij.openapi.ui.Messages;
import com.intellij.openapi.ui.TextFieldWithBrowseButton;
import com.intellij.openapi.util.IconLoader;
import com.intellij.openapi.util.Pair;
import com.intellij.openapi.util.io.FileUtil;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.openapi.vfs.VfsUtil;
import com.intellij.openapi.vfs.VfsUtilCore;
import com.intellij.openapi.vfs.VirtualFile;
import com.intellij.openapi.wm.ex.WindowManagerEx;
import com.intellij.openapi.wm.impl.IdeFrameImpl;
import com.intellij.packageDependencies.ui.TreeExpansionMonitor;
import com.intellij.ui.DocumentAdapter;
import com.intellij.ui.InsertPathAction;
import com.intellij.ui.ScrollPaneFactory;
import com.intellij.ui.treeStructure.Tree;
import com.intellij.util.ImageLoader;
import com.intellij.util.ObjectUtils;
import com.intellij.util.ui.EmptyIcon;
import com.intellij.util.ui.UIUtil;
import com.intellij.util.ui.tree.TreeUtil;
import org.jetbrains.annotations.Nullable;
import javax.swing.*;
import javax.swing.event.DocumentEvent;
import javax.swing.event.TreeSelectionEvent;
import javax.swing.event.TreeSelectionListener;
import javax.swing.tree.*;
import java.awt.*;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import java.io.File;
import java.io.IOException;
import java.util.*;
import java.util.List;
/**
* User: anna
* Date: Mar 17, 2005
*/
public class CustomizableActionsPanel {
private static final Logger LOG = Logger.getInstance("#com.intellij.ide.ui.customization.CustomizableActionsPanel");
private JButton myEditIconButton;
private JButton myRemoveActionButton;
private JButton myAddActionButton;
private JButton myMoveActionDownButton;
private JButton myMoveActionUpButton;
private JPanel myPanel;
private JTree myActionsTree;
private JButton myAddSeparatorButton;
private final TreeExpansionMonitor myTreeExpansionMonitor;
private CustomActionsSchema mySelectedSchema;
private JButton myRestoreAllDefaultButton;
private JButton myRestoreDefaultButton;
public CustomizableActionsPanel() {
//noinspection HardCodedStringLiteral
Group rootGroup = new Group("root", null, null);
final DefaultMutableTreeNode root = new DefaultMutableTreeNode(rootGroup);
DefaultTreeModel model = new DefaultTreeModel(root);
myActionsTree.setModel(model);
myActionsTree.setRootVisible(false);
myActionsTree.setShowsRootHandles(true);
UIUtil.setLineStyleAngled(myActionsTree);
myActionsTree.setCellRenderer(new MyTreeCellRenderer());
setButtonsDisabled();
final ActionManager actionManager = ActionManager.getInstance();
myActionsTree.getSelectionModel().addTreeSelectionListener(new TreeSelectionListener() {
public void valueChanged(TreeSelectionEvent e) {
final TreePath[] selectionPaths = myActionsTree.getSelectionPaths();
final boolean isSingleSelection = selectionPaths != null && selectionPaths.length == 1;
myAddActionButton.setEnabled(isSingleSelection);
if (isSingleSelection) {
final DefaultMutableTreeNode node = (DefaultMutableTreeNode)selectionPaths[0].getLastPathComponent();
String actionId = getActionId(node);
if (actionId != null) {
final AnAction action = actionManager.getAction(actionId);
myEditIconButton.setEnabled(action != null &&
action.getTemplatePresentation() != null);
}
else {
myEditIconButton.setEnabled(false);
}
}
else {
myEditIconButton.setEnabled(false);
}
myAddSeparatorButton.setEnabled(isSingleSelection);
myRemoveActionButton.setEnabled(selectionPaths != null);
if (selectionPaths != null) {
for (TreePath selectionPath : selectionPaths) {
if (selectionPath.getPath() != null && selectionPath.getPath().length <= 2) {
setButtonsDisabled();
return;
}
}
}
myMoveActionUpButton.setEnabled(isMoveSupported(myActionsTree, -1));
myMoveActionDownButton.setEnabled(isMoveSupported(myActionsTree, 1));
myRestoreDefaultButton.setEnabled(!findActionsUnderSelection().isEmpty());
}
});
myAddActionButton.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent e) {
final List<TreePath> expandedPaths = TreeUtil.collectExpandedPaths(myActionsTree);
final TreePath selectionPath = myActionsTree.getLeadSelectionPath();
if (selectionPath != null) {
DefaultMutableTreeNode node = (DefaultMutableTreeNode)selectionPath.getLastPathComponent();
final FindAvailableActionsDialog dlg = new FindAvailableActionsDialog();
if (dlg.showAndGet()) {
final Set<Object> toAdd = dlg.getTreeSelectedActionIds();
if (toAdd == null) return;
for (final Object o : toAdd) {
final ActionUrl url = new ActionUrl(ActionUrl.getGroupPath(new TreePath(node.getPath())), o, ActionUrl.ADDED,
node.getParent().getIndex(node) + 1);
addCustomizedAction(url);
ActionUrl.changePathInActionsTree(myActionsTree, url);
if (o instanceof String) {
DefaultMutableTreeNode current = new DefaultMutableTreeNode(url.getComponent());
current.setParent((DefaultMutableTreeNode)node.getParent());
editToolbarIcon((String)o, current);
}
}
((DefaultTreeModel)myActionsTree.getModel()).reload();
}
}
TreeUtil.restoreExpandedPaths(myActionsTree, expandedPaths);
}
});
myEditIconButton.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent e) {
myRestoreAllDefaultButton.setEnabled(true);
final List<TreePath> expandedPaths = TreeUtil.collectExpandedPaths(myActionsTree);
final TreePath selectionPath = myActionsTree.getLeadSelectionPath();
if (selectionPath != null) {
EditIconDialog dlg = new EditIconDialog((DefaultMutableTreeNode)selectionPath.getLastPathComponent());
if (dlg.showAndGet()) {
myActionsTree.repaint();
}
}
TreeUtil.restoreExpandedPaths(myActionsTree, expandedPaths);
}
});
myAddSeparatorButton.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent e) {
final List<TreePath> expandedPaths = TreeUtil.collectExpandedPaths(myActionsTree);
final TreePath selectionPath = myActionsTree.getLeadSelectionPath();
if (selectionPath != null) {
DefaultMutableTreeNode node = (DefaultMutableTreeNode)selectionPath.getLastPathComponent();
final ActionUrl url = new ActionUrl(ActionUrl.getGroupPath(selectionPath), Separator.getInstance(), ActionUrl.ADDED,
node.getParent().getIndex(node) + 1);
ActionUrl.changePathInActionsTree(myActionsTree, url);
addCustomizedAction(url);
((DefaultTreeModel)myActionsTree.getModel()).reload();
}
TreeUtil.restoreExpandedPaths(myActionsTree, expandedPaths);
}
});
myRemoveActionButton.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent e) {
final List<TreePath> expandedPaths = TreeUtil.collectExpandedPaths(myActionsTree);
final TreePath[] selectionPath = myActionsTree.getSelectionPaths();
if (selectionPath != null) {
for (TreePath treePath : selectionPath) {
final ActionUrl url = CustomizationUtil.getActionUrl(treePath, ActionUrl.DELETED);
ActionUrl.changePathInActionsTree(myActionsTree, url);
addCustomizedAction(url);
}
((DefaultTreeModel)myActionsTree.getModel()).reload();
}
TreeUtil.restoreExpandedPaths(myActionsTree, expandedPaths);
}
});
myMoveActionUpButton.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent e) {
final List<TreePath> expandedPaths = TreeUtil.collectExpandedPaths(myActionsTree);
final TreePath[] selectionPath = myActionsTree.getSelectionPaths();
if (selectionPath != null) {
for (TreePath treePath : selectionPath) {
final ActionUrl url = CustomizationUtil.getActionUrl(treePath, ActionUrl.MOVE);
final int absolutePosition = url.getAbsolutePosition();
url.setInitialPosition(absolutePosition);
url.setAbsolutePosition(absolutePosition - 1);
ActionUrl.changePathInActionsTree(myActionsTree, url);
addCustomizedAction(url);
}
((DefaultTreeModel)myActionsTree.getModel()).reload();
TreeUtil.restoreExpandedPaths(myActionsTree, expandedPaths);
for (TreePath path : selectionPath) {
myActionsTree.addSelectionPath(path);
}
}
}
});
myMoveActionDownButton.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent e) {
final List<TreePath> expandedPaths = TreeUtil.collectExpandedPaths(myActionsTree);
final TreePath[] selectionPath = myActionsTree.getSelectionPaths();
if (selectionPath != null) {
for (int i = selectionPath.length - 1; i >= 0; i--) {
TreePath treePath = selectionPath[i];
final ActionUrl url = CustomizationUtil.getActionUrl(treePath, ActionUrl.MOVE);
final int absolutePosition = url.getAbsolutePosition();
url.setInitialPosition(absolutePosition);
url.setAbsolutePosition(absolutePosition + 1);
ActionUrl.changePathInActionsTree(myActionsTree, url);
addCustomizedAction(url);
}
((DefaultTreeModel)myActionsTree.getModel()).reload();
TreeUtil.restoreExpandedPaths(myActionsTree, expandedPaths);
for (TreePath path : selectionPath) {
myActionsTree.addSelectionPath(path);
}
}
}
});
myRestoreAllDefaultButton.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent e) {
mySelectedSchema.copyFrom(new CustomActionsSchema());
patchActionsTreeCorrespondingToSchema(root);
myRestoreAllDefaultButton.setEnabled(false);
}
});
myRestoreDefaultButton.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent e) {
final List<ActionUrl> otherActions = new ArrayList<ActionUrl>(mySelectedSchema.getActions());
otherActions.removeAll(findActionsUnderSelection());
mySelectedSchema.copyFrom(new CustomActionsSchema());
for (ActionUrl otherAction : otherActions) {
mySelectedSchema.addAction(otherAction);
}
final List<TreePath> treePaths = TreeUtil.collectExpandedPaths(myActionsTree);
patchActionsTreeCorrespondingToSchema(root);
restorePathsAfterTreeOptimization(treePaths);
myRestoreDefaultButton.setEnabled(false);
}
});
patchActionsTreeCorrespondingToSchema(root);
myTreeExpansionMonitor = TreeExpansionMonitor.install(myActionsTree);
}
private List<ActionUrl> findActionsUnderSelection() {
final ArrayList<ActionUrl> actions = new ArrayList<ActionUrl>();
final TreePath[] selectionPaths = myActionsTree.getSelectionPaths();
if (selectionPaths != null) {
for (TreePath path : selectionPaths) {
final ActionUrl selectedUrl = CustomizationUtil.getActionUrl(path, ActionUrl.MOVE);
final ArrayList<String> selectedGroupPath = new ArrayList<String>(selectedUrl.getGroupPath());
final Object component = selectedUrl.getComponent();
if (component instanceof Group) {
selectedGroupPath.add(((Group)component).getName());
for (ActionUrl action : mySelectedSchema.getActions()) {
final ArrayList<String> groupPath = action.getGroupPath();
final int idx = Collections.indexOfSubList(groupPath, selectedGroupPath);
if (idx > -1) {
actions.add(action);
}
}
}
}
}
return actions;
}
private void addCustomizedAction(ActionUrl url) {
mySelectedSchema.addAction(url);
myRestoreAllDefaultButton.setEnabled(true);
}
private void editToolbarIcon(String actionId, DefaultMutableTreeNode node) {
final AnAction anAction = ActionManager.getInstance().getAction(actionId);
if (isToolbarAction(node) &&
anAction.getTemplatePresentation() != null &&
anAction.getTemplatePresentation().getIcon() == null) {
final int exitCode = Messages.showOkCancelDialog(IdeBundle.message("error.adding.action.without.icon.to.toolbar"),
IdeBundle.message("title.unable.to.add.action.without.icon.to.toolbar"),
Messages.getInformationIcon());
if (exitCode == Messages.OK) {
mySelectedSchema.addIconCustomization(actionId, null);
anAction.getTemplatePresentation().setIcon(AllIcons.Toolbar.Unknown);
anAction.getTemplatePresentation().setDisabledIcon(IconLoader.getDisabledIcon(AllIcons.Toolbar.Unknown));
anAction.setDefaultIcon(false);
node.setUserObject(Pair.create(actionId, AllIcons.Toolbar.Unknown));
myActionsTree.repaint();
setCustomizationSchemaForCurrentProjects();
}
}
}
private void setButtonsDisabled() {
myRemoveActionButton.setEnabled(false);
myAddActionButton.setEnabled(false);
myEditIconButton.setEnabled(false);
myAddSeparatorButton.setEnabled(false);
myMoveActionDownButton.setEnabled(false);
myMoveActionUpButton.setEnabled(false);
}
private static boolean isMoveSupported(JTree tree, int dir) {
final TreePath[] selectionPaths = tree.getSelectionPaths();
if (selectionPaths != null) {
DefaultMutableTreeNode parent = null;
for (TreePath treePath : selectionPaths)
if (treePath.getLastPathComponent() != null) {
final DefaultMutableTreeNode node = (DefaultMutableTreeNode)treePath.getLastPathComponent();
if (parent == null) {
parent = (DefaultMutableTreeNode)node.getParent();
}
if (parent != node.getParent()) {
return false;
}
if (dir > 0) {
if (parent.getIndex(node) == parent.getChildCount() - 1) {
return false;
}
}
else {
if (parent.getIndex(node) == 0) {
return false;
}
}
}
return true;
}
return false;
}
public JPanel getPanel() {
return myPanel;
}
private static void setCustomizationSchemaForCurrentProjects() {
final Project[] openProjects = ProjectManager.getInstance().getOpenProjects();
for (Project project : openProjects) {
final IdeFrameImpl frame = WindowManagerEx.getInstanceEx().getFrame(project);
if (frame != null) {
frame.updateView();
}
//final FavoritesManager favoritesView = FavoritesManager.getInstance(project);
//final String[] availableFavoritesLists = favoritesView.getAvailableFavoritesLists();
//for (String favoritesList : availableFavoritesLists) {
// favoritesView.getFavoritesTreeViewPanel(favoritesList).updateTreePopupHandler();
//}
}
final IdeFrameImpl frame = WindowManagerEx.getInstanceEx().getFrame(null);
if (frame != null) {
frame.updateView();
}
}
public void apply() throws ConfigurationException {
final List<TreePath> treePaths = TreeUtil.collectExpandedPaths(myActionsTree);
if (mySelectedSchema != null) {
CustomizationUtil.optimizeSchema(myActionsTree, mySelectedSchema);
}
restorePathsAfterTreeOptimization(treePaths);
CustomActionsSchema.getInstance().copyFrom(mySelectedSchema);
setCustomizationSchemaForCurrentProjects();
}
private void restorePathsAfterTreeOptimization(final List<TreePath> treePaths) {
for (final TreePath treePath : treePaths) {
myActionsTree.expandPath(CustomizationUtil.getPathByUserObjects(myActionsTree, treePath));
}
}
public void reset() {
mySelectedSchema = new CustomActionsSchema();
mySelectedSchema.copyFrom(CustomActionsSchema.getInstance());
patchActionsTreeCorrespondingToSchema((DefaultMutableTreeNode)myActionsTree.getModel().getRoot());
myRestoreAllDefaultButton.setEnabled(mySelectedSchema.isModified(new CustomActionsSchema()));
}
public boolean isModified() {
CustomizationUtil.optimizeSchema(myActionsTree, mySelectedSchema);
return CustomActionsSchema.getInstance().isModified(mySelectedSchema);
}
private void patchActionsTreeCorrespondingToSchema(DefaultMutableTreeNode root) {
root.removeAllChildren();
if (mySelectedSchema != null) {
mySelectedSchema.fillActionGroups(root);
for (final ActionUrl actionUrl : mySelectedSchema.getActions()) {
ActionUrl.changePathInActionsTree(myActionsTree, actionUrl);
}
}
((DefaultTreeModel)myActionsTree.getModel()).reload();
}
private static class MyTreeCellRenderer extends DefaultTreeCellRenderer {
public Component getTreeCellRendererComponent(JTree tree,
Object value,
boolean sel,
boolean expanded,
boolean leaf,
int row,
boolean hasFocus) {
super.getTreeCellRendererComponent(tree, value, sel, expanded, leaf, row, hasFocus);
if (value instanceof DefaultMutableTreeNode) {
Object userObject = ((DefaultMutableTreeNode)value).getUserObject();
Icon icon = null;
if (userObject instanceof Group) {
Group group = (Group)userObject;
String name = group.getName();
setText(name != null ? name : group.getId());
icon = ObjectUtils.notNull(group.getIcon(), AllIcons.Nodes.Folder);
}
else if (userObject instanceof String) {
String actionId = (String)userObject;
AnAction action = ActionManager.getInstance().getAction(actionId);
String name = action != null ? action.getTemplatePresentation().getText() : null;
setText(!StringUtil.isEmptyOrSpaces(name) ? name : actionId);
if (action != null) {
Icon actionIcon = action.getTemplatePresentation().getIcon();
if (actionIcon != null) {
icon = actionIcon;
}
}
}
else if (userObject instanceof Pair) {
String actionId = (String)((Pair)userObject).first;
AnAction action = ActionManager.getInstance().getAction(actionId);
setText(action != null ? action.getTemplatePresentation().getText() : actionId);
icon = (Icon)((Pair)userObject).second;
}
else if (userObject instanceof Separator) {
setText("-------------");
}
else if (userObject instanceof QuickList) {
setText(((QuickList)userObject).getName());
icon = AllIcons.Actions.QuickList;
}
else {
throw new IllegalArgumentException("unknown userObject: " + userObject);
}
setIcon(ActionsTree.getEvenIcon(icon));
if (sel) {
setForeground(UIUtil.getTreeSelectionForeground());
}
else {
setForeground(UIUtil.getTreeForeground());
}
}
return this;
}
}
private static boolean isToolbarAction(DefaultMutableTreeNode node) {
return node.getParent() != null && ((DefaultMutableTreeNode)node.getParent()).getUserObject() instanceof Group &&
((Group)((DefaultMutableTreeNode)node.getParent()).getUserObject()).getName().equals(ActionsTreeUtil.MAIN_TOOLBAR);
}
@Nullable
private static String getActionId(DefaultMutableTreeNode node) {
return (String)(node.getUserObject() instanceof String ? node.getUserObject() :
node.getUserObject() instanceof Pair ? ((Pair)node.getUserObject()).first : null);
}
protected boolean doSetIcon(DefaultMutableTreeNode node, @Nullable String path, Component component) {
if (StringUtil.isNotEmpty(path) && !new File(path).isFile()) {
Messages
.showErrorDialog(component, IdeBundle.message("error.file.not.found.message", path), IdeBundle.message("title.choose.action.icon"));
return false;
}
String actionId = getActionId(node);
if (actionId == null) return false;
final AnAction action = ActionManager.getInstance().getAction(actionId);
if (action != null && action.getTemplatePresentation() != null) {
if (StringUtil.isNotEmpty(path)) {
Image image = null;
try {
image = ImageLoader.loadFromStream(VfsUtilCore.convertToURL(VfsUtil.pathToUrl(path.replace(File.separatorChar,
'/'))).openStream());
}
catch (IOException e) {
LOG.debug(e);
}
Icon icon = new File(path).exists() ? IconLoader.getIcon(image) : null;
if (icon != null) {
if (icon.getIconWidth() > EmptyIcon.ICON_18.getIconWidth() || icon.getIconHeight() > EmptyIcon.ICON_18.getIconHeight()) {
Messages.showErrorDialog(component, IdeBundle.message("custom.icon.validation.message"), IdeBundle.message("title.choose.action.icon"));
return false;
}
node.setUserObject(Pair.create(actionId, icon));
mySelectedSchema.addIconCustomization(actionId, path);
}
}
else {
node.setUserObject(Pair.create(actionId, null));
mySelectedSchema.removeIconCustomization(actionId);
final DefaultMutableTreeNode nodeOnToolbar = findNodeOnToolbar(actionId);
if (nodeOnToolbar != null){
editToolbarIcon(actionId, nodeOnToolbar);
node.setUserObject(nodeOnToolbar.getUserObject());
}
}
return true;
}
return false;
}
private static TextFieldWithBrowseButton createBrowseField(){
TextFieldWithBrowseButton textField = new TextFieldWithBrowseButton();
textField.setPreferredSize(new Dimension(200, textField.getPreferredSize().height));
textField.setMinimumSize(new Dimension(200, textField.getPreferredSize().height));
final FileChooserDescriptor fileChooserDescriptor = new FileChooserDescriptor(true, false, false, false, false, false) {
public boolean isFileSelectable(VirtualFile file) {
//noinspection HardCodedStringLiteral
return file.getName().endsWith(".png");
}
};
textField.addBrowseFolderListener(IdeBundle.message("title.browse.icon"), IdeBundle.message("prompt.browse.icon.for.selected.action"), null,
fileChooserDescriptor);
InsertPathAction.addTo(textField.getTextField(), fileChooserDescriptor);
return textField;
}
private class EditIconDialog extends DialogWrapper {
private final DefaultMutableTreeNode myNode;
protected TextFieldWithBrowseButton myTextField;
protected EditIconDialog(DefaultMutableTreeNode node) {
super(false);
setTitle(IdeBundle.message("title.choose.action.icon"));
init();
myNode = node;
final String actionId = getActionId(node);
if (actionId != null) {
final String iconPath = mySelectedSchema.getIconPath(actionId);
myTextField.setText(FileUtil.toSystemDependentName(iconPath));
}
}
@Override
public JComponent getPreferredFocusedComponent() {
return myTextField.getChildComponent();
}
protected String getDimensionServiceKey() {
return getClass().getName();
}
protected JComponent createCenterPanel() {
myTextField = createBrowseField();
JPanel northPanel = new JPanel(new BorderLayout());
northPanel.add(myTextField, BorderLayout.NORTH);
return northPanel;
}
protected void doOKAction() {
if (myNode != null) {
if (!doSetIcon(myNode, myTextField.getText(), getContentPane())) {
return;
}
final Object userObject = myNode.getUserObject();
if (userObject instanceof Pair) {
String actionId = (String)((Pair)userObject).first;
final AnAction action = ActionManager.getInstance().getAction(actionId);
final Icon icon = (Icon)((Pair)userObject).second;
action.getTemplatePresentation().setIcon(icon);
action.setDefaultIcon(icon == null);
editToolbarIcon(actionId, myNode);
}
myActionsTree.repaint();
}
setCustomizationSchemaForCurrentProjects();
super.doOKAction();
}
}
@Nullable
private DefaultMutableTreeNode findNodeOnToolbar(String actionId){
final TreeNode toolbar = ((DefaultMutableTreeNode)myActionsTree.getModel().getRoot()).getChildAt(1);
for(int i = 0; i < toolbar.getChildCount(); i++){
final DefaultMutableTreeNode child = (DefaultMutableTreeNode)toolbar.getChildAt(i);
final String childId = getActionId(child);
if (childId != null && childId.equals(actionId)){
return child;
}
}
return null;
}
private class FindAvailableActionsDialog extends DialogWrapper{
private JTree myTree;
private JButton mySetIconButton;
private TextFieldWithBrowseButton myTextField;
FindAvailableActionsDialog() {
super(false);
setTitle(IdeBundle.message("action.choose.actions.to.add"));
init();
}
protected JComponent createCenterPanel() {
Group rootGroup = ActionsTreeUtil.createMainGroup(null, null, QuickListsManager.getInstance().getAllQuickLists());
DefaultMutableTreeNode root = ActionsTreeUtil.createNode(rootGroup);
DefaultTreeModel model = new DefaultTreeModel(root);
myTree = new Tree();
myTree.setModel(model);
myTree.setCellRenderer(new MyTreeCellRenderer());
final ActionManager actionManager = ActionManager.getInstance();
mySetIconButton = new JButton(IdeBundle.message("button.set.icon"));
mySetIconButton.setEnabled(false);
mySetIconButton.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent e) {
final TreePath selectionPath = myTree.getSelectionPath();
if (selectionPath != null) {
doSetIcon((DefaultMutableTreeNode)selectionPath.getLastPathComponent(), myTextField.getText(), getContentPane());
myTree.repaint();
}
}
});
myTextField = createBrowseField();
myTextField.getTextField().getDocument().addDocumentListener(new DocumentAdapter() {
protected void textChanged(DocumentEvent e) {
enableSetIconButton(actionManager);
}
});
JPanel northPanel = new JPanel(new BorderLayout());
northPanel.add(myTextField, BorderLayout.CENTER);
final JLabel label = new JLabel(IdeBundle.message("label.icon.path"));
label.setLabelFor(myTextField.getChildComponent());
northPanel.add(label, BorderLayout.WEST);
northPanel.add(mySetIconButton, BorderLayout.EAST);
northPanel.setBorder(BorderFactory.createEmptyBorder(0, 0, 5, 0));
JPanel panel = new JPanel(new BorderLayout());
panel.add(northPanel, BorderLayout.NORTH);
panel.add(ScrollPaneFactory.createScrollPane(myTree), BorderLayout.CENTER);
myTree.getSelectionModel().addTreeSelectionListener(new TreeSelectionListener() {
public void valueChanged(TreeSelectionEvent e) {
enableSetIconButton(actionManager);
final TreePath selectionPath = myTree.getSelectionPath();
if (selectionPath != null) {
final DefaultMutableTreeNode node = (DefaultMutableTreeNode)selectionPath.getLastPathComponent();
final String actionId = getActionId(node);
if (actionId != null) {
final String iconPath = mySelectedSchema.getIconPath(actionId);
myTextField.setText(FileUtil.toSystemDependentName(iconPath));
}
}
}
});
return panel;
}
protected void doOKAction() {
final ActionManager actionManager = ActionManager.getInstance();
TreeUtil.traverseDepth((TreeNode)myTree.getModel().getRoot(), new TreeUtil.Traverse() {
public boolean accept(Object node) {
if (node instanceof DefaultMutableTreeNode) {
final DefaultMutableTreeNode mutableNode = (DefaultMutableTreeNode)node;
final Object userObject = mutableNode.getUserObject();
if (userObject instanceof Pair) {
String actionId = (String)((Pair)userObject).first;
final AnAction action = actionManager.getAction(actionId);
Icon icon = (Icon)((Pair)userObject).second;
action.getTemplatePresentation().setIcon(icon);
action.setDefaultIcon(icon == null);
editToolbarIcon(actionId, mutableNode);
}
}
return true;
}
});
super.doOKAction();
setCustomizationSchemaForCurrentProjects();
}
protected void enableSetIconButton(ActionManager actionManager) {
final TreePath selectionPath = myTree.getSelectionPath();
Object userObject = null;
if (selectionPath != null) {
userObject = ((DefaultMutableTreeNode)selectionPath.getLastPathComponent()).getUserObject();
if (userObject instanceof String) {
final AnAction action = actionManager.getAction((String)userObject);
if (action != null &&
action.getTemplatePresentation() != null &&
action.getTemplatePresentation().getIcon() != null) {
mySetIconButton.setEnabled(true);
return;
}
}
}
mySetIconButton.setEnabled(myTextField.getText().length() != 0 &&
selectionPath != null &&
new DefaultMutableTreeNode(selectionPath).isLeaf() &&
!(userObject instanceof Separator));
}
@Nullable
public Set<Object> getTreeSelectedActionIds() {
TreePath[] paths = myTree.getSelectionPaths();
if (paths == null) return null;
Set<Object> actions = new HashSet<Object>();
for (TreePath path : paths) {
Object node = path.getLastPathComponent();
if (node instanceof DefaultMutableTreeNode) {
DefaultMutableTreeNode defNode = (DefaultMutableTreeNode)node;
Object userObject = defNode.getUserObject();
actions.add(userObject);
}
}
return actions;
}
protected String getDimensionServiceKey() {
return "#com.intellij.ide.ui.customization.CustomizableActionsPanel.FindAvailableActionsDialog";
}
}
}
| |
/**
* Copyright 2014-2016 CyberVision, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.kaaproject.kaa.common.dto.admin;
import java.io.Serializable;
import java.util.List;
public class SdkTokenDto implements Serializable {
private static final long serialVersionUID = 7784970390585790120L;
protected Integer configurationSchemaVersion;
protected Integer profileSchemaVersion;
protected Integer notificationSchemaVersion;
protected Integer logSchemaVersion;
protected List<String> aefMapIds;
protected String defaultVerifierToken;
protected String applicationToken;
protected String name;
public SdkTokenDto() {
super();
}
public SdkTokenDto(Integer configurationSchemaVersion,
Integer profileSchemaVersion, Integer notificationSchemaVersion,
Integer logSchemaVersion, List<String> aefMapIds,
String defaultVerifierToken, String applicationToken, String name) {
super();
this.configurationSchemaVersion = configurationSchemaVersion;
this.profileSchemaVersion = profileSchemaVersion;
this.notificationSchemaVersion = notificationSchemaVersion;
this.logSchemaVersion = logSchemaVersion;
this.aefMapIds = aefMapIds;
this.defaultVerifierToken = defaultVerifierToken;
this.applicationToken = applicationToken;
this.name = name;
}
public Integer getConfigurationSchemaVersion() {
return configurationSchemaVersion;
}
public void setConfigurationSchemaVersion(Integer configurationSchemaVersion) {
this.configurationSchemaVersion = configurationSchemaVersion;
}
public Integer getProfileSchemaVersion() {
return profileSchemaVersion;
}
public void setProfileSchemaVersion(Integer profileSchemaVersion) {
this.profileSchemaVersion = profileSchemaVersion;
}
public Integer getNotificationSchemaVersion() {
return notificationSchemaVersion;
}
public void setNotificationSchemaVersion(Integer notificationSchemaVersion) {
this.notificationSchemaVersion = notificationSchemaVersion;
}
public Integer getLogSchemaVersion() {
return logSchemaVersion;
}
public void setLogSchemaVersion(Integer logSchemaVersion) {
this.logSchemaVersion = logSchemaVersion;
}
public List<String> getAefMapIds() {
return aefMapIds;
}
public void setAefMapIds(List<String> aefMapIds) {
this.aefMapIds = aefMapIds;
}
public String getDefaultVerifierToken() {
return defaultVerifierToken;
}
public void setDefaultVerifierToken(String defaultVerifierToken) {
this.defaultVerifierToken = defaultVerifierToken;
}
public String getApplicationToken() {
return applicationToken;
}
public void setApplicationToken(String applicationToken) {
this.applicationToken = applicationToken;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result
+ ((aefMapIds == null) ? 0 : aefMapIds.hashCode());
result = prime
* result
+ ((applicationToken == null) ? 0 : applicationToken.hashCode());
result = prime
* result
+ ((configurationSchemaVersion == null) ? 0
: configurationSchemaVersion.hashCode());
result = prime
* result
+ ((defaultVerifierToken == null) ? 0 : defaultVerifierToken
.hashCode());
result = prime
* result
+ ((logSchemaVersion == null) ? 0 : logSchemaVersion.hashCode());
result = prime * result + ((name == null) ? 0 : name.hashCode());
result = prime
* result
+ ((notificationSchemaVersion == null) ? 0
: notificationSchemaVersion.hashCode());
result = prime
* result
+ ((profileSchemaVersion == null) ? 0 : profileSchemaVersion
.hashCode());
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj == null) {
return false;
}
if (getClass() != obj.getClass()) {
return false;
}
SdkTokenDto other = (SdkTokenDto) obj;
if (aefMapIds == null) {
if (other.aefMapIds != null) {
return false;
}
} else if (!aefMapIds.equals(other.aefMapIds)) {
return false;
}
if (applicationToken == null) {
if (other.applicationToken != null) {
return false;
}
} else if (!applicationToken.equals(other.applicationToken)) {
return false;
}
if (configurationSchemaVersion == null) {
if (other.configurationSchemaVersion != null) {
return false;
}
} else if (!configurationSchemaVersion
.equals(other.configurationSchemaVersion)) {
return false;
}
if (defaultVerifierToken == null) {
if (other.defaultVerifierToken != null) {
return false;
}
} else if (!defaultVerifierToken.equals(other.defaultVerifierToken)) {
return false;
}
if (logSchemaVersion == null) {
if (other.logSchemaVersion != null) {
return false;
}
} else if (!logSchemaVersion.equals(other.logSchemaVersion)) {
return false;
}
if (name == null) {
if (other.name != null) {
return false;
}
} else if (!name.equals(other.name)) {
return false;
}
if (notificationSchemaVersion == null) {
if (other.notificationSchemaVersion != null) {
return false;
}
} else if (!notificationSchemaVersion
.equals(other.notificationSchemaVersion)) {
return false;
}
if (profileSchemaVersion == null) {
if (other.profileSchemaVersion != null) {
return false;
}
} else if (!profileSchemaVersion.equals(other.profileSchemaVersion)) {
return false;
}
return true;
}
@Override
public String toString() {
StringBuilder builder = new StringBuilder();
builder.append("SdkTokenDto [configurationSchemaVersion=");
builder.append(configurationSchemaVersion);
builder.append(", profileSchemaVersion=");
builder.append(profileSchemaVersion);
builder.append(", notificationSchemaVersion=");
builder.append(notificationSchemaVersion);
builder.append(", logSchemaVersion=");
builder.append(logSchemaVersion);
builder.append(", aefMapIds=");
builder.append(aefMapIds);
builder.append(", defaultVerifierToken=");
builder.append(defaultVerifierToken);
builder.append(", applicationToken=");
builder.append(applicationToken);
builder.append(", name=");
builder.append(name);
builder.append("]");
return builder.toString();
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ambari.server.state;
import static org.apache.ambari.server.state.ConfigMergeHelper.ThreeWayValue;
import static org.easymock.EasyMock.anyString;
import static org.easymock.EasyMock.createNiceMock;
import static org.easymock.EasyMock.expect;
import static org.easymock.EasyMock.replay;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.fail;
import java.sql.SQLException;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
import org.apache.ambari.server.AmbariException;
import org.apache.ambari.server.H2DatabaseCleaner;
import org.apache.ambari.server.api.services.AmbariMetaInfo;
import org.apache.ambari.server.orm.GuiceJpaInitializer;
import org.apache.ambari.server.orm.InMemoryDefaultTestModule;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import com.google.inject.Binder;
import com.google.inject.Guice;
import com.google.inject.Injector;
import com.google.inject.Module;
import com.google.inject.util.Modules;
/**
* Tests the {@link ConfigMergeHelper} class
*/
public class ConfigMergeHelperTest {
private static final StackId currentStackId = new StackId("HDP-2.1.1");
private static final StackId newStackId = new StackId("HPD-2.2.0");
private Injector injector;
private Clusters clustersMock;
private AmbariMetaInfo ambariMetaInfoMock;
@Before
public void before() throws Exception {
clustersMock = createNiceMock(Clusters.class);
ambariMetaInfoMock = createNiceMock(AmbariMetaInfo.class);
final InMemoryDefaultTestModule injectorModule = new InMemoryDefaultTestModule() {
@Override
protected void configure() {
super.configure();
}
};
MockModule mockModule = new MockModule();
// create an injector which will inject the mocks
injector = Guice.createInjector(Modules.override(injectorModule).with(mockModule));
injector.getInstance(GuiceJpaInitializer.class);
}
@After
public void teardown() throws AmbariException, SQLException {
H2DatabaseCleaner.clearDatabaseAndStopPersistenceService(injector);
}
@Test
public void testGetConflicts() throws Exception {
Cluster clusterMock = createNiceMock(Cluster.class);
expect(clustersMock.getCluster(anyString())).andReturn(clusterMock);
expect(clusterMock.getCurrentStackVersion()).andReturn(currentStackId);
expect(clusterMock.getServices()).andReturn(new HashMap<String, Service>() {{
put("HDFS", createNiceMock(Service.class));
put("ZK", createNiceMock(Service.class));
}});
Set<PropertyInfo> currentHDFSProperties = new HashSet<PropertyInfo>() {{
add(createPropertyInfo("hdfs-env.xml", "equal.key", "equal-value"));
}};
Set<PropertyInfo> currentZKProperties = new HashSet<PropertyInfo>() {{
add(createPropertyInfo("zk-env.xml", "different.key", "different-value-1"));
}};
Set<PropertyInfo> currentStackProperties = new HashSet<PropertyInfo>() {{
add(createPropertyInfo("hadoop-env.xml", "equal.key", "modified.value"));
}};
expect(ambariMetaInfoMock.getServiceProperties(currentStackId.getStackName(),
currentStackId.getStackVersion(), "HDFS")).andReturn(currentHDFSProperties);
expect(ambariMetaInfoMock.getServiceProperties(currentStackId.getStackName(),
currentStackId.getStackVersion(), "ZK")).andReturn(currentZKProperties);
expect(ambariMetaInfoMock.getStackProperties(currentStackId.getStackName(),
currentStackId.getStackVersion())).andReturn(currentStackProperties);
Set<PropertyInfo> newHDFSProperties = new HashSet<PropertyInfo>() {{
add(createPropertyInfo("hdfs-env.xml", "equal.key", "equal-value"));
add(createPropertyInfo("new-hdfs-config.xml", "equal.key", "equal-value"));
}};
Set<PropertyInfo> newZKProperties = new HashSet<PropertyInfo>() {{
add(createPropertyInfo("zk-env.xml", "equal.key", "different-value-2"));
add(createPropertyInfo("zk-env.xml", "new.key", "new-value-2"));
}};
Set<PropertyInfo> newStackProperties = new HashSet<PropertyInfo>() {{
add(createPropertyInfo("hadoop-env.xml", "equal.key", "another.value"));
}};
expect(ambariMetaInfoMock.getServiceProperties(newStackId.getStackName(),
newStackId.getStackVersion(), "HDFS")).andReturn(newHDFSProperties);
expect(ambariMetaInfoMock.getServiceProperties(newStackId.getStackName(),
newStackId.getStackVersion(), "ZK")).andReturn(newZKProperties);
expect(ambariMetaInfoMock.getStackProperties(newStackId.getStackName(),
newStackId.getStackVersion())).andReturn(newStackProperties);
// desired config of hdfs-env.xml
Map<String, String> desiredHdfsEnvProperties = new HashMap<>();
expect(clusterMock.getDesiredConfigByType("hdfs-env.xml")).andReturn(
createConfigMock(desiredHdfsEnvProperties)
);
// desired config of zk-env.xml
Map<String, String> desiredZkEnvProperties = new HashMap<>();
expect(clusterMock.getDesiredConfigByType("hdfs-env.xml")).andReturn(
createConfigMock(desiredZkEnvProperties)
);
// desired config of hadoop-env.xml
Map<String, String> desiredHadoopEnvProperties = new HashMap<>();
expect(clusterMock.getDesiredConfigByType("hadoop-env.xml")).andReturn(
createConfigMock(desiredHadoopEnvProperties)
);
replay(clusterMock, clustersMock, ambariMetaInfoMock);
ConfigMergeHelper configMergeHelper = injector.getInstance(ConfigMergeHelper.class);
Map<String, Map<String, ThreeWayValue>> conflicts = configMergeHelper.getConflicts(
"clustername", newStackId);
assertNotNull(conflicts);
assertEquals(2, conflicts.size());
for (String key : conflicts.keySet()) {
if (key.equals("hdfs-env")) {
Map<String, ThreeWayValue> stringThreeWayValueMap = conflicts.get(key);
assertEquals(1, stringThreeWayValueMap.size());
assertEquals("equal-value", stringThreeWayValueMap.get("equal.key").oldStackValue);
assertEquals("equal-value", stringThreeWayValueMap.get("equal.key").newStackValue);
assertEquals("", stringThreeWayValueMap.get("equal.key").savedValue);
} else if (key.equals("hadoop-env")) {
Map<String, ThreeWayValue> stringThreeWayValueMap = conflicts.get(key);
assertEquals(1, stringThreeWayValueMap.size());
assertEquals("modified.value", stringThreeWayValueMap.get("equal.key").oldStackValue);
assertEquals("another.value", stringThreeWayValueMap.get("equal.key").newStackValue);
assertEquals("", stringThreeWayValueMap.get("equal.key").savedValue);
} else {
fail("Unexpected key");
}
}
assertEquals(2, conflicts.size());
}
private PropertyInfo createPropertyInfo(String fileName, String name, String value) {
PropertyInfo result = new PropertyInfo();
result.setFilename(fileName);
result.setName(name);
result.setValue(value);
return result;
}
/**
* Generates config that returns properties
* @param properties properties that should be returned by config mock
* @return mock
*/
private Config createConfigMock(Map<String, String> properties) {
Config result = createNiceMock(Config.class);
expect(result.getProperties()).andReturn(properties);
return result;
}
@Test
public void testNormalizeValue() throws Exception{
// If template not defined
String normalizedValue = ConfigMergeHelper.normalizeValue(null, "2048m");
assertEquals("2048m", normalizedValue);
// Template does not define heap
normalizedValue = ConfigMergeHelper.normalizeValue("3k", "2048");
assertEquals("2048", normalizedValue);
// Template - megabytes
normalizedValue = ConfigMergeHelper.normalizeValue("1024m", "2048");
assertEquals("2048m", normalizedValue);
normalizedValue = ConfigMergeHelper.normalizeValue("1024M", "2048");
assertEquals("2048M", normalizedValue);
// Template - gigabytes
normalizedValue = ConfigMergeHelper.normalizeValue("4g", "2");
assertEquals("2g", normalizedValue);
normalizedValue = ConfigMergeHelper.normalizeValue("4G", "2");
assertEquals("2G", normalizedValue);
}
private class MockModule implements Module {
@Override
public void configure(Binder binder) {
binder.bind(Clusters.class).toInstance(clustersMock);
binder.bind(AmbariMetaInfo.class).toInstance(ambariMetaInfoMock);
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ambari.server.stack;
import static org.junit.Assert.assertEquals;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
import java.io.File;
import java.util.ArrayList;
import java.util.List;
import java.util.Set;
import java.util.UUID;
import org.apache.ambari.server.AmbariException;
import org.apache.ambari.server.state.RepositoryInfo;
import org.apache.ambari.server.state.ServiceInfo;
import org.apache.ambari.server.state.stack.RepositoryXml;
import org.apache.ambari.server.state.stack.ServiceMetainfoXml;
import org.junit.Test;
import com.google.common.base.Optional;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableMultiset;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Lists;
import com.google.common.collect.Multiset;
/**
* Tests for StackModule
*/
public class StackModuleTest {
@Test
public void stackServiceReposAreRead() throws Exception {
StackModule sm = createStackModule("FooBar",
"2.4",
Optional.of(Lists.newArrayList(repoInfo("foo", "1.0.1", "http://foo.org"))),
Lists.newArrayList(repoInfo("bar", "2.0.1", "http://bar.org")));
Set<String> repoIds = getIds(sm.getModuleInfo().getRepositories());
assertEquals(ImmutableSet.of("foo:1.0.1", "bar:2.0.1"), repoIds);
}
/**
* If more add-on services define the same repo, the duplicate repo definitions should be disregarded.
* @throws Exception
*/
@Test
public void duplicateStackServiceReposAreDiscarded() throws Exception {
StackModule sm = createStackModule("FooBar",
"2.4",
// stack repos
Optional.of(Lists.newArrayList(repoInfo("StackRepoA", "1.1.1", "http://repos.org/stackrepoA"),
repoInfo("StackRepoB", "2.2.2", "http://repos.org/stackrepoB"))),
// stack service repos
// These two should be preserved. even though duplicates, the contents are the same
Lists.newArrayList(repoInfo("serviceRepoA", "1.0.0", "http://bar.org/1_0_0")),
Lists.newArrayList(repoInfo("serviceRepoA", "1.0.0", "http://bar.org/1_0_0")),
// These should be dropped as the names are the same but contents are different
Lists.newArrayList(repoInfo("serviceRepoB", "1.2.1", "http://bar.org/1_1_1")),
Lists.newArrayList(repoInfo("serviceRepoB", "1.2.3", "http://bar.org/1_1_1")),
// The first one should be dropped (overrides a stack repo), the rest only generates warnings (duplicate urls)
Lists.newArrayList(repoInfo("StackRepoA", "2.0.0", "http://repos.org/stackrepoA_200"),
repoInfo("ShouldBeJustAWarning1", "3.1.1", "http://repos.org/stackrepoA"),
repoInfo("ShouldBeJustAWarning2", "1.0.0", "http://bar.org/1_0_0")));
List<RepositoryInfo> repos = sm.getModuleInfo().getRepositories();
Set<String> repoIds = getIds(repos);
assertEquals("Unexpected number of repos. Each repo should be added only once", repoIds.size(), repos.size());
assertEquals("Unexpected repositories",
ImmutableSet.of("StackRepoA:1.1.1",
"StackRepoB:2.2.2",
"serviceRepoA:1.0.0",
"ShouldBeJustAWarning1:3.1.1",
"ShouldBeJustAWarning2:1.0.0"), repoIds);
}
@Test
public void serviceReposAreProcessedEvenIfNoStackRepo() throws Exception {
StackModule sm = createStackModule("FooBar",
"2.4",
Optional.absent(),
Lists.newArrayList(repoInfo("bar", "2.0.1", "http://bar.org")));
Set<String> repoIds = getIds(sm.getModuleInfo().getRepositories());
assertEquals(ImmutableSet.of("bar:2.0.1"), repoIds);
}
/**
* If two add-on services define the same repo, the repo should be disregarded.
* This applies per os, so the same repo can be defined for multiple os'es (e.g redhat5 and redhat6)
* @throws Exception
*/
@Test
public void duplicateStackServiceReposAreCheckedPerOs() throws Exception {
StackModule sm = createStackModule("FooBar",
"2.4",
Optional.absent(),
Lists.newArrayList(repoInfo("bar", "2.0.1", "http://bar.org", "centos6")),
Lists.newArrayList(repoInfo("bar", "2.0.1", "http://bar.org", "centos7")));
Multiset<String> repoIds = getIdsMultiple(sm.getModuleInfo().getRepositories());
assertEquals("Repo should be occur exactly twice, once for each os type.",
ImmutableMultiset.of("bar:2.0.1", "bar:2.0.1"), repoIds);
}
@Test
public void removedServicesInitialValue () throws Exception {
StackModule sm = createStackModule("FooBar",
"2.4",
Optional.absent(),
Lists.newArrayList(repoInfo("bar", "2.0.1", "http://bar.org", "centos6")),
Lists.newArrayList(repoInfo("bar", "2.0.1", "http://bar.org", "centos7")));
List<String> removedServices = sm.getModuleInfo().getRemovedServices();
assertEquals(removedServices.size(), 0);
}
@Test
public void servicesWithNoConfigsInitialValue() throws Exception {
StackModule sm = createStackModule("FooBar",
"2.4",
Optional.absent(),
Lists.newArrayList(repoInfo("bar", "2.0.1", "http://bar.org", "centos6")),
Lists.newArrayList(repoInfo("bar", "2.0.1", "http://bar.org", "centos7")));
List<String> servicesWithNoConfigs = sm.getModuleInfo().getServicesWithNoConfigs();
assertEquals(servicesWithNoConfigs.size(), 0);
}
@SafeVarargs
private static StackModule createStackModule(String stackName, String stackVersion, Optional<? extends List<RepositoryInfo>> stackRepos,
List<RepositoryInfo>... serviceRepoLists) throws AmbariException {
StackDirectory sd = mock(StackDirectory.class);
List<ServiceDirectory> serviceDirectories = Lists.newArrayList();
for (List<RepositoryInfo> serviceRepoList: serviceRepoLists) {
StackServiceDirectory svd = mock(StackServiceDirectory.class);
RepositoryXml serviceRepoXml = mock(RepositoryXml.class);
when(svd.getRepoFile()).thenReturn(serviceRepoXml);
when(serviceRepoXml.getRepositories()).thenReturn(serviceRepoList);
ServiceMetainfoXml serviceMetainfoXml = mock(ServiceMetainfoXml.class);
when(serviceMetainfoXml.isValid()).thenReturn(true);
ServiceInfo serviceInfo = mock(ServiceInfo.class);
when(serviceInfo.isValid()).thenReturn(true);
when(serviceInfo.getName()).thenReturn(UUID.randomUUID().toString()); // unique service names
when(serviceMetainfoXml.getServices()).thenReturn(Lists.newArrayList(serviceInfo));
when(svd.getMetaInfoFile()).thenReturn(serviceMetainfoXml);
serviceDirectories.add(svd);
}
if (stackRepos.isPresent()) {
RepositoryXml stackRepoXml = mock(RepositoryXml.class);
when(sd.getRepoFile()).thenReturn(stackRepoXml);
when(stackRepoXml.getRepositories()).thenReturn(stackRepos.get());
}
when(sd.getServiceDirectories()).thenReturn(serviceDirectories);
when(sd.getStackDirName()).thenReturn(stackName);
when(sd.getDirectory()).thenReturn(new File(stackVersion));
StackContext ctx = mock(StackContext.class);
StackModule sm = new StackModule(sd, ctx);
sm.resolve(null,
ImmutableMap.of(String.format("%s:%s", stackName, stackVersion), sm),
ImmutableMap.of(), ImmutableMap.of());
return sm;
}
private RepositoryInfo repoInfo(String repoName, String repoVersion, String url) {
return repoInfo(repoName, repoVersion, url, "centos6");
}
private List<RepositoryInfo> repoInfosForAllOs(String repoName, String repoVersion, String url) {
List<RepositoryInfo> repos = new ArrayList<>(3);
for (String os: new String[]{ "centos5", "centos6", "centos7"}) {
repos.add(repoInfo(repoName, repoVersion, url, os));
}
return repos;
}
private RepositoryInfo repoInfo(String repoName, String repoVersion, String url, String osType) {
RepositoryInfo info = new RepositoryInfo();
info.setRepoId(String.format("%s:%s", repoName, repoVersion));
info.setRepoName(repoName);
info.setBaseUrl(url);
info.setOsType(osType);
return info;
}
private Set<String> getIds(List<RepositoryInfo> repoInfos) {
return ImmutableSet.copyOf(Lists.transform(repoInfos, RepositoryInfo.GET_REPO_ID_FUNCTION));
}
private Multiset<String> getIdsMultiple(List<RepositoryInfo> repoInfos) {
return ImmutableMultiset.copyOf(Lists.transform(repoInfos, RepositoryInfo.GET_REPO_ID_FUNCTION));
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.apache.skywalking.apm.plugin.grpc.v1.client;
import io.grpc.Channel;
import io.grpc.ClientCall;
import io.grpc.ForwardingClientCall;
import io.grpc.ForwardingClientCallListener;
import io.grpc.Metadata;
import io.grpc.MethodDescriptor;
import io.grpc.Status;
import javax.annotation.Nullable;
import org.apache.skywalking.apm.agent.core.context.CarrierItem;
import org.apache.skywalking.apm.agent.core.context.ContextCarrier;
import org.apache.skywalking.apm.agent.core.context.ContextManager;
import org.apache.skywalking.apm.agent.core.context.ContextSnapshot;
import org.apache.skywalking.apm.agent.core.context.tag.Tags;
import org.apache.skywalking.apm.agent.core.context.trace.AbstractSpan;
import org.apache.skywalking.apm.agent.core.context.trace.SpanLayer;
import org.apache.skywalking.apm.network.trace.component.ComponentsDefine;
import org.apache.skywalking.apm.plugin.grpc.v1.OperationNameFormatUtil;
import static org.apache.skywalking.apm.plugin.grpc.v1.Constants.BLOCKING_CALL_EXIT_SPAN;
import static org.apache.skywalking.apm.plugin.grpc.v1.Constants.CLIENT;
import static org.apache.skywalking.apm.plugin.grpc.v1.Constants.REQUEST_ON_CANCEL_OPERATION_NAME;
import static org.apache.skywalking.apm.plugin.grpc.v1.Constants.REQUEST_ON_COMPLETE_OPERATION_NAME;
import static org.apache.skywalking.apm.plugin.grpc.v1.Constants.REQUEST_ON_MESSAGE_OPERATION_NAME;
import static org.apache.skywalking.apm.plugin.grpc.v1.Constants.RESPONSE_ON_CLOSE_OPERATION_NAME;
import static org.apache.skywalking.apm.plugin.grpc.v1.Constants.RESPONSE_ON_MESSAGE_OPERATION_NAME;
import static org.apache.skywalking.apm.plugin.grpc.v1.OperationNameFormatUtil.formatOperationName;
/**
* Fully client tracing for gRPC servers.
*/
class TracingClientCall<REQUEST, RESPONSE> extends ForwardingClientCall.SimpleForwardingClientCall<REQUEST, RESPONSE> {
private final String serviceName;
private final String remotePeer;
private final String operationPrefix;
private final MethodDescriptor<REQUEST, RESPONSE> methodDescriptor;
private ContextSnapshot snapshot;
TracingClientCall(ClientCall<REQUEST, RESPONSE> delegate, MethodDescriptor<REQUEST, RESPONSE> method,
Channel channel) {
super(delegate);
this.methodDescriptor = method;
this.serviceName = formatOperationName(method);
this.remotePeer = channel.authority();
this.operationPrefix = OperationNameFormatUtil.formatOperationName(method) + CLIENT;
}
@Override
public void start(Listener<RESPONSE> responseListener, Metadata headers) {
final AbstractSpan blockingSpan = (AbstractSpan) ContextManager.getRuntimeContext()
.get(BLOCKING_CALL_EXIT_SPAN);
final ContextCarrier contextCarrier = new ContextCarrier();
// Avoid create ExitSpan repeatedly, ExitSpan of blocking calls will create by BlockingCallInterceptor.
if (blockingSpan == null) {
final AbstractSpan span = ContextManager.createExitSpan(serviceName, remotePeer);
span.setComponent(ComponentsDefine.GRPC);
span.setLayer(SpanLayer.RPC_FRAMEWORK);
} else {
ContextManager.getRuntimeContext().remove(BLOCKING_CALL_EXIT_SPAN);
}
ContextManager.inject(contextCarrier);
CarrierItem contextItem = contextCarrier.items();
while (contextItem.hasNext()) {
contextItem = contextItem.next();
Metadata.Key<String> headerKey = Metadata.Key.of(contextItem.getHeadKey(), Metadata.ASCII_STRING_MARSHALLER);
headers.put(headerKey, contextItem.getHeadValue());
}
snapshot = ContextManager.capture();
try {
delegate().start(new TracingClientCallListener(responseListener, snapshot), headers);
} catch (Throwable t) {
ContextManager.activeSpan().log(t);
throw t;
} finally {
if (blockingSpan == null) {
ContextManager.stopSpan();
}
}
}
@Override
public void sendMessage(REQUEST message) {
if (methodDescriptor.getType().clientSendsOneMessage()) {
super.sendMessage(message);
return;
}
final AbstractSpan span = ContextManager.createLocalSpan(operationPrefix + REQUEST_ON_MESSAGE_OPERATION_NAME);
span.setComponent(ComponentsDefine.GRPC);
span.setLayer(SpanLayer.RPC_FRAMEWORK);
ContextManager.continued(snapshot);
try {
super.sendMessage(message);
} catch (Throwable t) {
ContextManager.activeSpan().log(t);
throw t;
} finally {
ContextManager.stopSpan();
}
}
@Override
public void halfClose() {
final AbstractSpan span = ContextManager.createLocalSpan(operationPrefix + REQUEST_ON_COMPLETE_OPERATION_NAME);
span.setComponent(ComponentsDefine.GRPC);
span.setLayer(SpanLayer.RPC_FRAMEWORK);
ContextManager.continued(snapshot);
try {
super.halfClose();
} catch (Throwable t) {
ContextManager.activeSpan().log(t);
throw t;
} finally {
ContextManager.stopSpan();
}
}
@Override
public void cancel(@Nullable String message, @Nullable Throwable cause) {
final AbstractSpan span = ContextManager.createLocalSpan(operationPrefix + REQUEST_ON_CANCEL_OPERATION_NAME);
span.setComponent(ComponentsDefine.GRPC);
span.setLayer(SpanLayer.RPC_FRAMEWORK);
ContextManager.continued(snapshot);
if (cause != null) {
span.log(cause);
}
try {
super.cancel(message, cause);
} catch (Throwable t) {
ContextManager.activeSpan().log(t);
throw t;
} finally {
ContextManager.stopSpan();
}
}
class TracingClientCallListener extends ForwardingClientCallListener.SimpleForwardingClientCallListener<RESPONSE> {
private final ContextSnapshot contextSnapshot;
TracingClientCallListener(Listener<RESPONSE> delegate, ContextSnapshot contextSnapshot) {
super(delegate);
this.contextSnapshot = contextSnapshot;
}
@Override
public void onMessage(RESPONSE message) {
if (methodDescriptor.getType().serverSendsOneMessage()) {
super.onMessage(message);
return;
}
final AbstractSpan span = ContextManager.createLocalSpan(operationPrefix + RESPONSE_ON_MESSAGE_OPERATION_NAME);
span.setComponent(ComponentsDefine.GRPC);
span.setLayer(SpanLayer.RPC_FRAMEWORK);
ContextManager.continued(contextSnapshot);
try {
delegate().onMessage(message);
} catch (Throwable t) {
ContextManager.activeSpan().log(t);
} finally {
ContextManager.stopSpan();
}
}
@Override
public void onClose(Status status, Metadata trailers) {
final AbstractSpan span = ContextManager.createLocalSpan(operationPrefix + RESPONSE_ON_CLOSE_OPERATION_NAME);
span.setComponent(ComponentsDefine.GRPC);
span.setLayer(SpanLayer.RPC_FRAMEWORK);
ContextManager.continued(contextSnapshot);
if (!status.isOk()) {
span.log(status.asRuntimeException());
Tags.STATUS_CODE.set(span, status.getCode().name());
}
try {
delegate().onClose(status, trailers);
} catch (Throwable t) {
ContextManager.activeSpan().log(t);
} finally {
ContextManager.stopSpan();
}
}
}
}
| |
/*
* Copyright 2000-2015 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jetbrains.plugins.gradle.service.project;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.externalSystem.model.DataNode;
import com.intellij.openapi.externalSystem.model.Key;
import com.intellij.openapi.externalSystem.model.ProjectKeys;
import com.intellij.openapi.externalSystem.model.project.*;
import com.intellij.openapi.externalSystem.model.task.TaskData;
import com.intellij.openapi.externalSystem.util.ExternalSystemApiUtil;
import com.intellij.openapi.externalSystem.util.ExternalSystemConstants;
import com.intellij.openapi.externalSystem.util.ExternalSystemDebugEnvironment;
import com.intellij.openapi.module.StdModuleTypes;
import com.intellij.openapi.roots.DependencyScope;
import com.intellij.openapi.util.Pair;
import com.intellij.openapi.util.io.FileUtil;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.util.containers.ContainerUtil;
import org.gradle.api.artifacts.Dependency;
import org.gradle.tooling.model.GradleProject;
import org.gradle.tooling.model.gradle.BasicGradleProject;
import org.gradle.tooling.model.gradle.GradleBuild;
import org.gradle.tooling.model.idea.IdeaModule;
import org.gradle.util.GradleVersion;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import org.jetbrains.plugins.gradle.DefaultExternalDependencyId;
import org.jetbrains.plugins.gradle.ExternalDependencyId;
import org.jetbrains.plugins.gradle.model.*;
import org.jetbrains.plugins.gradle.model.data.GradleSourceSetData;
import org.jetbrains.plugins.gradle.util.GradleConstants;
import org.jetbrains.plugins.gradle.util.GradleUtil;
import java.io.File;
import java.io.IOException;
import java.util.Collection;
import java.util.Collections;
import java.util.Map;
import java.util.Queue;
import static org.jetbrains.plugins.gradle.service.project.GradleProjectResolver.CONFIGURATION_ARTIFACTS;
/**
* @author Vladislav.Soroka
* @since 10/6/2015
*/
public class GradleProjectResolverUtil {
private static final Logger LOG = Logger.getInstance(GradleProjectResolverUtil.class);
@NotNull
private static final Key<Object> CONTAINER_KEY = Key.create(Object.class, ExternalSystemConstants.UNORDERED);
@NotNull
public static DataNode<ModuleData> createMainModule(@NotNull ProjectResolverContext resolverCtx,
@NotNull IdeaModule gradleModule,
@NotNull DataNode<ProjectData> projectDataNode) {
final String moduleName = gradleModule.getName();
if (moduleName == null) {
throw new IllegalStateException("Module with undefined name detected: " + gradleModule);
}
final ProjectData projectData = projectDataNode.getData();
final String mainModuleConfigPath = getModuleConfigPath(resolverCtx, gradleModule, projectData.getLinkedExternalProjectPath());
final String ideProjectPath = resolverCtx.getIdeProjectPath();
final String relativePath;
if (FileUtil.isAncestor(projectData.getLinkedExternalProjectPath(), mainModuleConfigPath, false)) {
relativePath = FileUtil.getRelativePath(projectData.getLinkedExternalProjectPath(), mainModuleConfigPath, '/');
}
else {
relativePath = String.valueOf(FileUtil.pathHashCode(mainModuleConfigPath));
}
final String mainModuleFileDirectoryPath =
ideProjectPath == null ? mainModuleConfigPath : ideProjectPath + "/.idea/modules/" +
(relativePath == null || relativePath.equals(".") ? "" : relativePath);
if (ExternalSystemDebugEnvironment.DEBUG_ORPHAN_MODULES_PROCESSING) {
LOG.info(String.format(
"Creating module data ('%s') with the external config path: '%s'", gradleModule.getGradleProject().getPath(), mainModuleConfigPath
));
}
String gradlePath = gradleModule.getGradleProject().getPath();
final boolean isRootModule = StringUtil.isEmpty(gradlePath) || ":".equals(gradlePath);
String mainModuleId = isRootModule ? moduleName : gradlePath;
final ModuleData moduleData =
new ModuleData(mainModuleId, GradleConstants.SYSTEM_ID, StdModuleTypes.JAVA.getId(), moduleName,
mainModuleFileDirectoryPath, mainModuleConfigPath);
ExternalProject externalProject = resolverCtx.getExtraProject(gradleModule, ExternalProject.class);
if (externalProject != null) {
moduleData.setGroup(externalProject.getGroup());
moduleData.setVersion(externalProject.getVersion());
moduleData.setDescription(externalProject.getDescription());
moduleData.setArtifacts(externalProject.getArtifacts());
}
return projectDataNode.createChild(ProjectKeys.MODULE, moduleData);
}
@NotNull
public static String getModuleConfigPath(@NotNull ProjectResolverContext resolverCtx,
@NotNull IdeaModule gradleModule,
@NotNull String rootProjectPath) {
GradleBuild build = resolverCtx.getExtraProject(gradleModule, GradleBuild.class);
if (build != null) {
String gradlePath = gradleModule.getGradleProject().getPath();
File moduleDirPath = getModuleDirPath(build, gradlePath);
if (moduleDirPath == null) {
throw new IllegalStateException(String.format("Unable to find root directory for module '%s'", gradleModule.getName()));
}
try {
return ExternalSystemApiUtil.toCanonicalPath(moduleDirPath.getCanonicalPath());
}
catch (IOException e) {
LOG.warn("construction of the canonical path for the module fails", e);
}
}
return GradleUtil.getConfigPath(gradleModule.getGradleProject(), rootProjectPath);
}
/**
* Returns the physical path of the module's root directory (the path in the file system.)
* <p>
* It is important to note that Gradle has its own "logical" path that may or may not be equal to the physical path of a Gradle project.
* For example, the sub-project at ${projectRootDir}/apps/app will have the Gradle path :apps:app. Gradle also allows mapping physical
* paths to a different logical path. For example, in settings.gradle:
* <pre>
* include ':app'
* project(':app').projectDir = new File(rootDir, 'apps/app')
* </pre>
* In this example, sub-project at ${projectRootDir}/apps/app will have the Gradle path :app.
* </p>
*
* @param build contains information about the root Gradle project and its sub-projects. Such information includes the physical path of
* the root Gradle project and its sub-projects.
* @param path the Gradle "logical" path. This path uses colon as separator, and may or may not be equal to the physical path of a
* Gradle project.
* @return the physical path of the module's root directory.
*/
@Nullable
public static File getModuleDirPath(@NotNull GradleBuild build, @NotNull String path) {
for (BasicGradleProject project : build.getProjects()) {
if (project.getPath().equals(path)) {
return project.getProjectDirectory();
}
}
return null;
}
@NotNull
public static String getModuleId(@NotNull IdeaModule gradleModule) {
GradleProject gradleProject = gradleModule.getGradleProject();
return getModuleId(gradleProject.getPath(), gradleProject.getName());
}
@NotNull
public static String getModuleId(@NotNull ExternalProject externalProject) {
return getModuleId(externalProject.getQName(), externalProject.getName());
}
@NotNull
public static String getModuleId(String gradlePath, String moduleName) {
return StringUtil.isEmpty(gradlePath) || ":".equals(gradlePath) ? moduleName : gradlePath;
}
@NotNull
public static String getModuleId(@NotNull ExternalProject externalProject, @NotNull ExternalSourceSet sourceSet) {
String mainModuleId = getModuleId(externalProject);
return mainModuleId + ":" + sourceSet.getName();
}
@NotNull
public static String getModuleId(@NotNull ExternalProjectDependency projectDependency) {
DependencyScope dependencyScope = getDependencyScope(projectDependency.getScope());
String projectPath = projectDependency.getProjectPath();
String moduleId = StringUtil.isEmpty(projectPath) || ":".equals(projectPath) ? projectDependency.getName() : projectPath;
if (Dependency.DEFAULT_CONFIGURATION.equals(projectDependency.getConfigurationName())) {
moduleId += dependencyScope == DependencyScope.TEST ? ":test" : ":main";
}
else {
moduleId += (':' + projectDependency.getConfigurationName());
}
return moduleId;
}
@NotNull
public static DependencyScope getDependencyScope(@Nullable String scope) {
return scope != null ? DependencyScope.valueOf(scope) : DependencyScope.COMPILE;
}
public static void attachGradleSdkSources(@NotNull final IdeaModule gradleModule,
@Nullable final File libFile,
@NotNull final LibraryData library,
@NotNull final ProjectResolverContext resolverCtx) {
final BuildScriptClasspathModel buildScriptClasspathModel =
resolverCtx.getExtraProject(gradleModule, BuildScriptClasspathModel.class);
if (buildScriptClasspathModel == null) return;
final File gradleHomeDir = buildScriptClasspathModel.getGradleHomeDir();
if (gradleHomeDir == null) return;
final GradleVersion gradleVersion = GradleVersion.version(buildScriptClasspathModel.getGradleVersion());
attachGradleSdkSources(libFile, library, gradleHomeDir, gradleVersion);
}
public static void attachGradleSdkSources(@Nullable final File libFile,
@NotNull final LibraryData library,
@NotNull final File gradleHomeDir,
@NotNull final GradleVersion gradleVersion) {
if (libFile == null || !libFile.getName().startsWith("gradle-")) return;
if (!FileUtil.isAncestor(gradleHomeDir, libFile, true)) return;
File libOrPluginsFile = libFile.getParentFile();
if (libOrPluginsFile != null && ("plugins".equals(libOrPluginsFile.getName()))) {
libOrPluginsFile = libOrPluginsFile.getParentFile();
}
if (libOrPluginsFile != null && "lib".equals(libOrPluginsFile.getName()) && libOrPluginsFile.getParentFile() != null) {
File srcDir = new File(libOrPluginsFile.getParentFile(), "src");
if (gradleVersion.compareTo(GradleVersion.version("1.9")) >= 0) {
int endIndex = libFile.getName().indexOf(gradleVersion.getVersion());
if (endIndex != -1) {
String srcDirChild = libFile.getName().substring("gradle-".length(), endIndex - 1);
srcDir = new File(srcDir, srcDirChild);
}
}
if (srcDir.isDirectory()) {
library.addPath(LibraryPathType.SOURCE, srcDir.getAbsolutePath());
}
}
}
@SuppressWarnings("unchecked")
public static Collection<DependencyData> getIdeDependencies(@NotNull DataNode<? extends ModuleData> moduleDataNode,
@NotNull Collection<ExternalDependency> dependencies)
throws IllegalStateException {
final DataNode<ProjectData> ideProject = ExternalSystemApiUtil.findParent(moduleDataNode, ProjectKeys.PROJECT);
assert ideProject != null;
final Map<String, Pair<DataNode<GradleSourceSetData>, ExternalSourceSet>> sourceSetMap =
ideProject.getUserData(GradleProjectResolver.RESOLVED_SOURCE_SETS);
assert sourceSetMap != null;
final Map<String, String> artifactsMap = ideProject.getUserData(CONFIGURATION_ARTIFACTS);
assert artifactsMap != null;
DataNode fakeNode = new DataNode(CONTAINER_KEY, moduleDataNode.getData(), null);
buildDependencies(sourceSetMap, artifactsMap, fakeNode, dependencies, null);
final Collection<DataNode<?>> dataNodes =
ExternalSystemApiUtil.findAllRecursively(fakeNode, node -> node.getData() instanceof DependencyData);
return ContainerUtil.map(dataNodes, node -> (DependencyData)node.getData());
}
public static void buildDependencies(@NotNull Map<String, Pair<DataNode<GradleSourceSetData>, ExternalSourceSet>> sourceSetMap,
@NotNull final Map<String, String> artifactsMap,
@NotNull DataNode<? extends ExternalEntityData> ownerDataNode,
@NotNull Collection<ExternalDependency> dependencies,
@Nullable DataNode<ProjectData> ideProject) throws IllegalStateException {
Map<ExternalDependencyId, ExternalDependency> dependencyMap = ContainerUtil.newHashMap();
Queue<ExternalDependency> queue = ContainerUtil.newLinkedList(dependencies);
while (!queue.isEmpty()) {
final ExternalDependency dependency = queue.remove();
ExternalDependency seenDependency = dependencyMap.get(dependency.getId());
if (seenDependency != null) {
if (dependency instanceof ExternalLibraryDependency) {
if (seenDependency instanceof ExternalLibraryDependency &&
!FileUtil.filesEqual(((ExternalLibraryDependency)seenDependency).getFile(),
((ExternalLibraryDependency)dependency).getFile())) {
DefaultExternalMultiLibraryDependency mergedDependency = new DefaultExternalMultiLibraryDependency();
mergedDependency.setName(dependency.getId().getName());
mergedDependency.setGroup(dependency.getId().getGroup());
mergedDependency.setVersion(dependency.getId().getVersion());
mergedDependency.setPackaging(dependency.getId().getPackaging());
mergedDependency.setClassifier(dependency.getId().getClassifier());
mergedDependency.setScope(dependency.getScope());
mergedDependency.setClasspathOrder(dependency.getClasspathOrder());
mergedDependency.getDependencies().addAll(dependency.getDependencies());
mergedDependency.getFiles().addAll(ContainerUtil.packNullables(
((ExternalLibraryDependency)seenDependency).getFile(), ((ExternalLibraryDependency)dependency).getFile()));
mergedDependency.getSources().addAll((ContainerUtil.packNullables(
((ExternalLibraryDependency)seenDependency).getSource(), ((ExternalLibraryDependency)dependency).getSource())));
mergedDependency.getJavadoc().addAll((ContainerUtil.packNullables(
((ExternalLibraryDependency)seenDependency).getJavadoc(), ((ExternalLibraryDependency)dependency).getJavadoc())));
dependencyMap.put(dependency.getId(), mergedDependency);
continue;
}
else if (seenDependency instanceof DefaultExternalMultiLibraryDependency) {
DefaultExternalMultiLibraryDependency mergedDependency = (DefaultExternalMultiLibraryDependency)seenDependency;
mergedDependency.getFiles().addAll(ContainerUtil.packNullables(((ExternalLibraryDependency)dependency).getFile()));
mergedDependency.getSources().addAll(ContainerUtil.packNullables(((ExternalLibraryDependency)dependency).getSource()));
mergedDependency.getJavadoc().addAll(ContainerUtil.packNullables(((ExternalLibraryDependency)dependency).getJavadoc()));
continue;
}
}
DependencyScope prevScope =
seenDependency.getScope() == null ? DependencyScope.COMPILE : DependencyScope.valueOf(seenDependency.getScope());
DependencyScope currentScope =
dependency.getScope() == null ? DependencyScope.COMPILE : DependencyScope.valueOf(dependency.getScope());
if (prevScope.isForProductionCompile()) continue;
if (prevScope.isForProductionRuntime() && currentScope.isForProductionRuntime()) continue;
}
dependencyMap.put(new DefaultExternalDependencyId(dependency.getId()), dependency);
queue.addAll(dependency.getDependencies());
}
doBuildDependencies(sourceSetMap, artifactsMap, dependencyMap, ownerDataNode, dependencies, ideProject);
}
private static void doBuildDependencies(@NotNull Map<String, Pair<DataNode<GradleSourceSetData>, ExternalSourceSet>> sourceSetMap,
@NotNull final Map<String, String> artifactsMap,
@NotNull Map<ExternalDependencyId, ExternalDependency> mergedDependencyMap,
@NotNull DataNode<? extends ExternalEntityData> ownerDataNode,
@NotNull Collection<ExternalDependency> dependencies,
@Nullable DataNode<ProjectData> ideProject) throws IllegalStateException {
Map<ExternalDependencyId, ExternalDependency> dependencyMap = ContainerUtil.newLinkedHashMap();
for (ExternalDependency dependency : dependencies) {
final ExternalDependency dep = dependencyMap.get(dependency.getId());
if (dep instanceof AbstractExternalDependency) {
dep.getDependencies().addAll(ContainerUtil.subtract(dependency.getDependencies(), dep.getDependencies()));
}
else {
dependencyMap.put(dependency.getId(), dependency);
}
}
for (ExternalDependency dependency : dependencyMap.values()) {
final ExternalDependency mergedDependency = ContainerUtil.getOrElse(mergedDependencyMap, dependency.getId(), dependency);
DependencyScope dependencyScope = getDependencyScope(mergedDependency.getScope());
ModuleData ownerModule = null;
if (ownerDataNode.getData() instanceof ModuleData) {
ownerModule = (ModuleData)ownerDataNode.getData();
}
else if (ownerDataNode.getData() instanceof DependencyData) {
ownerModule = ((DependencyData)ownerDataNode.getData()).getOwnerModule();
}
assert ownerModule != null;
DataNode<? extends ExternalEntityData> depOwnerDataNode = null;
if (mergedDependency instanceof ExternalProjectDependency) {
class ProjectDependencyInfo {
DataNode<GradleSourceSetData> dataNode;
ExternalSourceSet sourceSet;
Collection<File> dependencyArtifacts;
public ProjectDependencyInfo(Pair<DataNode<GradleSourceSetData>, ExternalSourceSet> pair, Collection<File> dependencyArtifacts) {
this.dataNode = pair.first;
this.sourceSet = pair.second;
this.dependencyArtifacts = dependencyArtifacts;
}
}
final ExternalProjectDependency projectDependency = (ExternalProjectDependency)mergedDependency;
String moduleId = getModuleId(projectDependency);
Collection<ProjectDependencyInfo> projectDependencyInfos = ContainerUtil.newArrayList();
Pair<DataNode<GradleSourceSetData>, ExternalSourceSet> projectPair = sourceSetMap.get(moduleId);
if (projectPair == null) {
for (File file : projectDependency.getProjectDependencyArtifacts()) {
moduleId = artifactsMap.get(ExternalSystemApiUtil.toCanonicalPath(file.getAbsolutePath()));
if (moduleId != null) {
projectPair = sourceSetMap.get(moduleId);
if (projectPair != null) {
projectDependencyInfos.add(new ProjectDependencyInfo(projectPair, Collections.singleton(file)));
}
}
}
}
else {
projectDependencyInfos.add(new ProjectDependencyInfo(projectPair, projectDependency.getProjectDependencyArtifacts()));
}
if (projectDependencyInfos.isEmpty()) {
final LibraryLevel level = LibraryLevel.MODULE;
final LibraryData library = new LibraryData(GradleConstants.SYSTEM_ID, "");
LibraryDependencyData libraryDependencyData = new LibraryDependencyData(ownerModule, library, level);
libraryDependencyData.setScope(dependencyScope);
libraryDependencyData.setOrder(mergedDependency.getClasspathOrder());
libraryDependencyData.setExported(mergedDependency.getExported());
if (!projectDependency.getProjectDependencyArtifacts().isEmpty()) {
for (File artifact : projectDependency.getProjectDependencyArtifacts()) {
library.addPath(LibraryPathType.BINARY, artifact.getPath());
}
depOwnerDataNode = ownerDataNode.createChild(ProjectKeys.LIBRARY_DEPENDENCY, libraryDependencyData);
} else {
depOwnerDataNode = ownerDataNode;
}
}
else {
for (ProjectDependencyInfo projectDependencyInfo : projectDependencyInfos) {
ModuleDependencyData moduleDependencyData = new ModuleDependencyData(ownerModule, projectDependencyInfo.dataNode.getData());
moduleDependencyData.setScope(dependencyScope);
if ("test".equals(projectDependencyInfo.sourceSet.getName())) {
moduleDependencyData.setProductionOnTestDependency(true);
}
moduleDependencyData.setOrder(mergedDependency.getClasspathOrder());
moduleDependencyData.setExported(mergedDependency.getExported());
moduleDependencyData.setModuleDependencyArtifacts(ContainerUtil.map(projectDependencyInfo.dependencyArtifacts, File::getPath));
depOwnerDataNode = ownerDataNode.createChild(ProjectKeys.MODULE_DEPENDENCY, moduleDependencyData);
}
// put transitive dependencies to the ownerDataNode,
// since we can not determine from what project dependency artifact it was originated
if(projectDependencyInfos.size() > 1) {
depOwnerDataNode = ownerDataNode;
}
}
}
else if (mergedDependency instanceof ExternalLibraryDependency) {
String libraryName = mergedDependency.getId().getPresentableName();
final LibraryLevel level = StringUtil.isNotEmpty(libraryName) ? LibraryLevel.PROJECT : LibraryLevel.MODULE;
final LibraryData library = new LibraryData(GradleConstants.SYSTEM_ID, libraryName);
LibraryDependencyData libraryDependencyData = new LibraryDependencyData(ownerModule, library, level);
libraryDependencyData.setScope(dependencyScope);
libraryDependencyData.setOrder(mergedDependency.getClasspathOrder());
libraryDependencyData.setExported(mergedDependency.getExported());
library.addPath(LibraryPathType.BINARY, ((ExternalLibraryDependency)mergedDependency).getFile().getAbsolutePath());
File sourcePath = ((ExternalLibraryDependency)mergedDependency).getSource();
if (sourcePath != null) {
library.addPath(LibraryPathType.SOURCE, sourcePath.getAbsolutePath());
}
File javaDocPath = ((ExternalLibraryDependency)mergedDependency).getJavadoc();
if (javaDocPath != null) {
library.addPath(LibraryPathType.DOC, javaDocPath.getAbsolutePath());
}
depOwnerDataNode = ownerDataNode.createChild(ProjectKeys.LIBRARY_DEPENDENCY, libraryDependencyData);
if (StringUtil.isNotEmpty(libraryName)) {
linkProjectLibrary(ideProject, library);
}
}
else if (mergedDependency instanceof ExternalMultiLibraryDependency) {
final LibraryLevel level = LibraryLevel.MODULE;
String libraryName = mergedDependency.getId().getPresentableName();
final LibraryData library = new LibraryData(GradleConstants.SYSTEM_ID, libraryName);
LibraryDependencyData libraryDependencyData = new LibraryDependencyData(ownerModule, library, level);
libraryDependencyData.setScope(dependencyScope);
libraryDependencyData.setOrder(mergedDependency.getClasspathOrder());
libraryDependencyData.setExported(mergedDependency.getExported());
for (File file : ((ExternalMultiLibraryDependency)mergedDependency).getFiles()) {
library.addPath(LibraryPathType.BINARY, file.getAbsolutePath());
}
for (File file : ((ExternalMultiLibraryDependency)mergedDependency).getSources()) {
library.addPath(LibraryPathType.SOURCE, file.getAbsolutePath());
}
for (File file : ((ExternalMultiLibraryDependency)mergedDependency).getJavadoc()) {
library.addPath(LibraryPathType.DOC, file.getAbsolutePath());
}
depOwnerDataNode = ownerDataNode.createChild(ProjectKeys.LIBRARY_DEPENDENCY, libraryDependencyData);
}
else if (mergedDependency instanceof FileCollectionDependency) {
final LibraryLevel level = LibraryLevel.MODULE;
String libraryName = "";
final LibraryData library = new LibraryData(GradleConstants.SYSTEM_ID, libraryName);
LibraryDependencyData libraryDependencyData = new LibraryDependencyData(ownerModule, library, level);
libraryDependencyData.setScope(dependencyScope);
libraryDependencyData.setOrder(mergedDependency.getClasspathOrder());
libraryDependencyData.setExported(mergedDependency.getExported());
for (File file : ((FileCollectionDependency)mergedDependency).getFiles()) {
library.addPath(LibraryPathType.BINARY, file.getAbsolutePath());
}
ownerDataNode.createChild(ProjectKeys.LIBRARY_DEPENDENCY, libraryDependencyData);
}
else if (mergedDependency instanceof UnresolvedExternalDependency) {
final LibraryLevel level = LibraryLevel.PROJECT;
String libraryName = mergedDependency.getId().getPresentableName();
final LibraryData library = new LibraryData(GradleConstants.SYSTEM_ID, libraryName, true);
LibraryDependencyData libraryDependencyData = new LibraryDependencyData(ownerModule, library, level);
libraryDependencyData.setScope(dependencyScope);
final String failureMessage = ((UnresolvedExternalDependency)mergedDependency).getFailureMessage();
if (failureMessage != null) {
library.addPath(LibraryPathType.BINARY, failureMessage);
}
ownerDataNode.createChild(ProjectKeys.LIBRARY_DEPENDENCY, libraryDependencyData);
linkProjectLibrary(ideProject, library);
}
if (depOwnerDataNode != null) {
doBuildDependencies(sourceSetMap, artifactsMap, mergedDependencyMap, depOwnerDataNode, dependency.getDependencies(), ideProject);
}
}
}
public static void linkProjectLibrary(@Nullable DataNode<ProjectData> ideProject, @NotNull final LibraryData library) {
if (ideProject == null) return;
DataNode<LibraryData> libraryData =
ExternalSystemApiUtil.find(ideProject, ProjectKeys.LIBRARY, node -> library.equals(node.getData()));
if (libraryData == null) {
ideProject.createChild(ProjectKeys.LIBRARY, library);
}
}
public static boolean isIdeaTask(final String taskName, @Nullable String group) {
if ((group == null || "ide".equalsIgnoreCase(group)) && StringUtil.containsIgnoreCase(taskName, "idea")) return true;
return "other".equalsIgnoreCase(group) && StringUtil.containsIgnoreCase(taskName, "idea");
}
@Nullable
public static DataNode<ModuleData> findModule(@Nullable final DataNode<ProjectData> projectNode, @NotNull final String modulePath) {
if (projectNode == null) return null;
return ExternalSystemApiUtil.find(projectNode, ProjectKeys.MODULE,
node -> node.getData().getLinkedExternalProjectPath().equals(modulePath));
}
@Nullable
public static DataNode<ModuleData> findModuleById(@Nullable final DataNode<ProjectData> projectNode, @NotNull final String path) {
if (projectNode == null) return null;
return ExternalSystemApiUtil.find(projectNode, ProjectKeys.MODULE, node -> node.getData().getId().equals(path));
}
@Nullable
public static DataNode<TaskData> findTask(@Nullable final DataNode<ProjectData> projectNode,
@NotNull final String modulePath,
@NotNull final String taskPath) {
DataNode<ModuleData> moduleNode;
final String taskName;
if (StringUtil.startsWith(taskPath, ":")) {
final int i = taskPath.lastIndexOf(':');
String path = taskPath.substring(0, i);
moduleNode = findModuleById(projectNode, path);
if (moduleNode == null || !FileUtil.isAncestor(moduleNode.getData().getLinkedExternalProjectPath(), modulePath, false)) {
moduleNode = findModule(projectNode, modulePath);
}
taskName = (i + 1) <= taskPath.length() ? taskPath.substring(i + 1) : taskPath;
}
else {
moduleNode = findModule(projectNode, modulePath);
taskName = taskPath;
}
if (moduleNode == null) return null;
return ExternalSystemApiUtil.find(moduleNode, ProjectKeys.TASK, node -> {
String name = node.getData().getName();
return name.equals(taskName) || name.equals(taskPath);
});
}
}
| |
package server.rest.controllers;
import org.springframework.web.bind.annotation.*;
import server.database.DatabaseConnection;
import server.model.*;
import server.rest.responses.ContractorsResponse;
import server.rest.responses.ReportResponse;
import server.rest.responses.Response;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import java.util.UUID;
import java.util.logging.Level;
import java.util.logging.Logger;
/**
* Controller for Contractors Table
* Provides all the REST endpoints related to Contractors and stored SQL procedures
*/
@CrossOrigin(origins = {"http://localhost:1234","http://theterminal.s3-website.us-west-2.amazonaws.com"}, methods = {RequestMethod.GET, RequestMethod.POST})
@RestController
public class ContractorsController extends Controller {
private final static String DATE_FORMAT = "yyyy-MM-dd";
private final static String getQuery = "select * from Contractor";
private final static String insertContractorQuery = "INSERT INTO Contractor(id, firstName, surname, agencySource, status, rehire) VALUES (?,?,?,?,?,?)";
private static final String editContractorQuery = "UPDATE Contractor SET firstName=?, surname=?, agencySource=?, status=? WHERE id=?";
private final static String insertEngagementContractQuery = "INSERT INTO EngagementContract(" +
"id," +
"startDate," +
"endDate," +
"rateType," +
"projectName," +
"chargeType," +
"dailyAllowance," +
"originalDocumentation," +
"terminationNum," +
"contractorId," +
"resourceId," +
"hrPositionId," +
"hrPayGradeId," +
"costCenterId," +
"reportingManagerUserId," +
"currencyCode," +
"mainSkillId," +
"timeAndMaterialTerms," +
"poNum," +
"hourlyRate)" +
"VALUES (?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?)";
private final static String editEngagementContractorQuery = "UPDATE EngagementContract SET " +
"startDate=?, " +
"endDate=?, " +
"rateType=?, " +
"projectName=?, " +
"chargeType=?, " +
"dailyAllowance=?, " +
"originalDocumentation=?, " +
"terminationNum=?, " +
"contractorId=?, " +
"resourceId=?, " +
"hrPositionId=?, " +
"hrPayGradeId=?, " +
"costCenterId=?, " +
"reportingManagerUserId=?, " +
"currencyCode=?, " +
"mainSkillId=?, " +
"timeAndMaterialTerms=?, " +
"poNum=?, " +
"hourlyRate=? " +
"WHERE id=?";
private final static String viewAllContractorDataQuery = "SELECT * FROM Contractor c\n" +
"INNER JOIN EngagementContract e ON e.contractorId=c.id\n" +
"INNER JOIN HRPositionRole p ON p.id=e.hrPositionId\n" +
"INNER JOIN HRPayGrade pg ON pg.id=e.hrPayGradeId\n" +
"INNER JOIN CostCenter cc on cc.id=e.costCenterId\n" +
"INNER JOIN Skill s on s.id=e.mainSkillId\n" +
"INNER JOIN HiringManager rp on rp.userId=e.reportingManagerUserId\n" +
"ORDER BY c.id";
/**
* Edits a Contractor in the Database
* @param id ID of the contractor to be edited
* @param firstName first name of the contractor
* @param lastName last name of the contractor
* @param agencySource agency source of the contractor
* @param status status of the contractor
* @return list of contractor with just the edited contractor as an element in it.
* @throws SQLException if something goes wrong whilst querying/updating the database
*/
public ArrayList<Contractor> editContractor(String id, String firstName, String lastName, String agencySource, String status) throws SQLException {
DatabaseConnection connection = new DatabaseConnection(dbConnectionUrl, dbUsername, dbPassword);
ArrayList<Contractor> contractors = new ArrayList<Contractor>();
connection.openConnection();
if (!connection.isConnected()) {
throw new SQLException("Failed to connect to database");
}
PreparedStatement st = connection.getPreparedStatement(editContractorQuery);
int index=1;
st.setString(index++, firstName);
st.setString(index++, lastName);
st.setString(index++, agencySource);
st.setString(index++, status);
st.setString(index++, id);
int success = st.executeUpdate();
if (success == 0) {
throw new SQLException("Failed to update Contractor data");
}
connection.commitTransaction();
connection.closeConnection();
Contractor c = new Contractor(id, firstName, lastName, agencySource, status, true);
contractors.add(c);
return contractors;
}
/**
* Gets the list of all contractors in the database
* @return The list of all contractors in the database
* @throws SQLException If something goes wrong whilst querying the database
*/
public ArrayList<Contractor> getContractors() throws SQLException {
DatabaseConnection connection = new DatabaseConnection(dbConnectionUrl, dbUsername, dbPassword);
connection.openConnection();
if (!connection.isConnected()) {
throw new SQLException("Failed to connect to database");
}
ArrayList<Contractor> contractors = new ArrayList<Contractor>();
PreparedStatement st = connection.getPreparedStatement(getQuery);
ResultSet set = st.executeQuery();
while(set.next()) {
Contractor c = new Contractor(set.getString("id"),
set.getString("firstName"),
set.getString("surname"),
set.getString("agencySource"),
set.getString("status"),
set.getBoolean("rehire"));
contractors.add(c);
}
connection.closeConnection();
return contractors;
}
/**
* Adds a contractor to the database
* @param firstName First Name of the contractor
* @param surName Last Name of the Contractor
* @param agencySource Agency Source of the Contractor
* @param status Status of the contractor
* @return The list of Contractors with only the newly added contractor as its element
* @throws SQLException if something oes wrong whilst querying/updating the database
*/
public ArrayList<Contractor> addContractor(String firstName, String surName, String agencySource, String status) throws SQLException {
DatabaseConnection connection = new DatabaseConnection(dbConnectionUrl, dbUsername, dbPassword);
ArrayList<Contractor> newContractor = new ArrayList<>();
connection.openConnection();
if (!connection.isConnected()) {
throw new SQLException("Failed to connect to database");
}
String newContractorId = UUID.randomUUID().toString();
final boolean rehire = false;
PreparedStatement st = connection.getPreparedStatement(insertContractorQuery);
int i =1;
st.setString(i++, newContractorId);
st.setString(i++, firstName);
st.setString(i++, surName);
st.setString(i++, agencySource);
st.setString(i++, status);
st.setBoolean(i++, rehire);
int success = st.executeUpdate();
connection.commitTransaction();
if(success == 0){
throw new SQLException("Failed to add contractor. SQL Update failed");
}
Contractor contractor = new Contractor(newContractorId, firstName, surName, agencySource, status, false);
newContractor.add(contractor);
connection.closeConnection();
return newContractor;
}
/**
* Converts the date in SQL format from String format
* @param date The date in String format
* @return The date in SQL format
* @throws ParseException if the string provided is not a valid date
*/
private java.sql.Date getSQLDate(String date) throws ParseException {
Date dateParsed = new SimpleDateFormat(DATE_FORMAT).parse(date);
return new java.sql.Date(dateParsed.getTime());
}
/**
* REST API call for viewing all the contractors
* @param token The unique token of the user making the api call
* @return Response that contains all the contractors as a response
*/
@RequestMapping("/contractors/view")
public ContractorsResponse contractors(@RequestParam("token") String token) {
if (!isUserLoggedIn(token)) {
return ContractorsResponse.contractorsFailure("User is not logged in");
}
ArrayList<Contractor> contractors;
try {
contractors = this.getContractors();
} catch (SQLException e) {
Logger logger = Logger.getAnonymousLogger();
logger.log(Level.INFO, "Get Contractors Failed: " + e.getMessage());
return ContractorsResponse.contractorsFailure(e.getMessage());
}
return new ContractorsResponse(contractors);
}
/**
* REST API call for adding a new contractor
* @param token The unique token of the user making the api call
* @param firstName The first name of the new Contractor
* @param surName The last name of the new Contractor
* @param agencySource The agency source of the new Contractor
* @param status The status of the new Contractor
* @return Response that contains the added contractor or an error response
*/
@RequestMapping("/contractors/add")
public ContractorsResponse addContractor(
@RequestParam("token") String token,
@RequestParam("firstName") String firstName,
@RequestParam("surname") String surName,
@RequestParam("agencySource") String agencySource,
@RequestParam("status") String status) {
if (!isUserLoggedIn(token)) {
return ContractorsResponse.contractorsFailure("User not logged in");
}
List<Contractor> newContractor;
try {
newContractor = this.addContractor(firstName, surName, agencySource, status);
} catch (SQLException e) {
Logger logger = Logger.getAnonymousLogger();
logger.log(Level.INFO, "Add Contractor Failed: " + e.getMessage());
return ContractorsResponse.contractorsFailure(e.getMessage());
}
return new ContractorsResponse(newContractor);
}
/**
* REST API call to edit an engagement contract of a specific Contractor
* @param token The unique token of the user making the API call
* @param id The id of the engagement contract
* @param startDate The start date of the contract
* @param endDate The end date of the contract
* @param rateType The rate type of the contract
* @param projectName The project name of the contract
* @param chargeType The charge type of the contract
* @param dailyAllowance The daily allowance of the contract
* @param originalDocumentation The original documentation of the contract
* @param terminationNum The termination number of the contract
* @param contractorId The id of the contractor associated with the contractor
* @param resourceId The id of the resource provided
* @param hrPositionId The id of the hr position
* @param hrPayGradeId The id of the hr pay grade for the contract
* @param costCenterId The id of the cost center associated with the contractor
* @param reportingManagerId The id of the reporting manager
* @param currencyCode The currency code for all the input fields
* @param mainSkillId The id of the skill that was used for hiring
* @param timeMaterialTerms The time material terms for the contract
* @param poNum
* @param hourlyRate The hourly rate negotiated for this contract
* @return Response stating whether the edit was successful or not
*/
@CrossOrigin("*")
@RequestMapping(value = "/contractors/edit/engagementContract", method={RequestMethod.POST})
public Response editEngagementContract(
@RequestParam("token") String token,
@RequestParam("id") String id,
@RequestParam("startDate") String startDate,
@RequestParam("endDate") String endDate,
@RequestParam("rateType") String rateType,
@RequestParam("projectName") String projectName,
@RequestParam("chargeType") String chargeType,
@RequestParam("dailyAllowance") int dailyAllowance,
@RequestParam("originalDocumentation") String originalDocumentation,
@RequestParam("terminationNum") int terminationNum,
@RequestParam("contractorId") String contractorId,
@RequestParam("resourceId") String resourceId,
@RequestParam("hrPositionId") String hrPositionId,
@RequestParam("hrPayGradeId") String hrPayGradeId,
@RequestParam("costCenterId") String costCenterId,
@RequestParam("reportingManagerId") String reportingManagerId,
@RequestParam("currencyCode") String currencyCode,
@RequestParam("mainSkillId") String mainSkillId,
@RequestParam("timeMaterialTerms") int timeMaterialTerms,
@RequestParam("poNum") int poNum,
@RequestParam("hourlyrate") int hourlyRate
) {
if (!isUserLoggedIn(token)) {
return Response.createErrorResponse("User not logged in");
}
DatabaseConnection connection = new DatabaseConnection(dbConnectionUrl, dbUsername, dbPassword);
try {
connection.openConnection();
if (!connection.isConnected()) {
return Response.createErrorResponse("Edit engagement Contract: Failed to open database");
}
java.sql.Date startDateSQL = getSQLDate(startDate);
java.sql.Date endDateSQL = getSQLDate(endDate);
PreparedStatement st = connection.getPreparedStatement(editEngagementContractorQuery);
int i = 1;
st.setDate(i++, startDateSQL);
st.setDate(i++, endDateSQL);
st.setString(i++, rateType);
st.setString(i++, projectName);
st.setString(i++, chargeType);
st.setInt(i++, dailyAllowance);
st.setString(i++, originalDocumentation);
st.setInt(i++, terminationNum);
st.setString(i++, contractorId);
st.setString(i++, resourceId);
st.setString(i++, hrPositionId);
st.setString(i++, hrPayGradeId);
st.setString(i++, costCenterId);
st.setString(i++, reportingManagerId);
st.setString(i++, currencyCode);
st.setString(i++, mainSkillId);
st.setInt(i++, timeMaterialTerms);
st.setInt(i++, poNum);
st.setInt(i++, hourlyRate);
st.setString(i++, id);
int success = st.executeUpdate();
if(success == 0) {
return Response.createErrorResponse("Add Engagement Contract failed. SQL Update failed");
}
connection.commitTransaction();
connection.closeConnection();
} catch (SQLException e) {
Logger.getAnonymousLogger().log(Level.INFO, e.getMessage());
Response.createErrorResponse("Edit engagement Contract: " + e.getMessage());
} catch (ParseException e) {
Logger.getAnonymousLogger().log(Level.INFO, e.getMessage());
Response.createErrorResponse("Edit engagement Contract: " + e.getMessage());
}
return new Response();
}
/**
* REST API call to edit an engagement contract of a specific Contractor
* @param token The unique token of the user making the API call
* @param startDate The start date of the contract
* @param endDate The end date of the contract
* @param rateType The rate type of the contract
* @param projectName The project name of the contract
* @param chargeType The charge type of the contract
* @param dailyAllowance The daily allowance of the contract
* @param originalDocumentation The original documentation of the contract
* @param terminationNum The termination number of the contract
* @param contractorId The id of the contractor associated with the contractor
* @param resourceId The id of the resource provided
* @param hrPositionId The id of the hr position
* @param hrPayGradeId The id of the hr pay grade for the contract
* @param costCenterId The id of the cost center associated with the contractor
* @param reportingManagerId The id of the reporting manager
* @param currencyCode The currency code for all the input fields
* @param mainSkillId The id of the skill that was used for hiring
* @param timeMaterialTerms The time material terms for the contract
* @param poNum
* @param hourlyRate The hourly rate negotiated for this contract
* @return Response stating whether the edit was successful or not
*/
@CrossOrigin("*")
@RequestMapping(value = "/contractors/add/engagementContract", method={RequestMethod.POST})
public Response addEngagementContract(@RequestParam("token") String token,
@RequestParam("startDate") String startDate,
@RequestParam("endDate") String endDate,
@RequestParam("rateType") String rateType,
@RequestParam("projectName") String projectName,
@RequestParam("chargeType") String chargeType,
@RequestParam("dailyAllowance") int dailyAllowance,
@RequestParam("originalDocumentation") String originalDocumentation,
@RequestParam("terminationNum") int terminationNum,
@RequestParam("contractorId") String contractorId,
@RequestParam("resourceId") String resourceId,
@RequestParam("hrPositionId") String hrPositionId,
@RequestParam("hrPayGradeId") String hrPayGradeId,
@RequestParam("costCenterId") String costCenterId,
@RequestParam("reportingManagerId") String reportingManagerId,
@RequestParam("currencyCode") String currencyCode,
@RequestParam("mainSkillId") String mainSkillId,
@RequestParam("timeMaterialTerms") int timeMaterialTerms,
@RequestParam("poNum") int poNum,
@RequestParam("hourlyrate") int hourlyRate) {
if (!isUserLoggedIn(token)) {
return Response.createErrorResponse("User not logged in");
}
DatabaseConnection connection = new DatabaseConnection(dbConnectionUrl, dbUsername, dbPassword);
try {
connection.openConnection();
if (!connection.isConnected()) {
return Response.createErrorResponse("Add Engagement Contract: Error opening database connection");
}
String engagementContractId = UUID.randomUUID().toString();
java.sql.Date startDateSQL = getSQLDate(startDate);
java.sql.Date endDateSQL = getSQLDate(endDate);
PreparedStatement st = connection.getPreparedStatement(insertEngagementContractQuery);
int i = 1;
st.setString(i++, engagementContractId);
st.setDate(i++, startDateSQL);
st.setDate(i++, endDateSQL);
st.setString(i++, rateType);
st.setString(i++, projectName);
st.setString(i++, chargeType);
st.setInt(i++, dailyAllowance);
st.setString(i++, originalDocumentation);
st.setInt(i++, terminationNum);
st.setString(i++, contractorId);
st.setString(i++, resourceId);
st.setString(i++, hrPositionId);
st.setString(i++, hrPayGradeId);
st.setString(i++, costCenterId);
st.setString(i++, reportingManagerId);
st.setString(i++, currencyCode);
st.setString(i++, mainSkillId);
st.setInt(i++, timeMaterialTerms);
st.setInt(i++, poNum);
st.setInt(i++, hourlyRate);
int success = st.executeUpdate();
if(success == 0) {
return Response.createErrorResponse("Add Engagement Contract failed. SQL Update failed");
}
connection.commitTransaction();
connection.closeConnection();
} catch (SQLException e) {
Logger logger = Logger.getAnonymousLogger();
logger.log(Level.INFO, "Add Engagement Contract Failed: " + e.getMessage());
return Response.createErrorResponse("Add Engagement Contract failed: " + e.getMessage());
} catch (ParseException e) {
Logger logger = Logger.getAnonymousLogger();
logger.log(Level.INFO, "Add Engagement Contract Failed: " + e.getMessage());
return Response.createErrorResponse("Add Engagement Contract Failed:" + e.getMessage());
}
return new Response();
}
/**
* REST API call to edit a contractor
* @param token The unique token of the user making the API call
* @param id The id of the contractor
* @param firstName The first name of the Contractor
* @param surname The last name of the Contractor
* @param agencySource The Agency Source of the Contractor
* @param status The status of the Contractor
* @return Response that contains the edited contractor or an error response
*/
@RequestMapping("/contractors/edit")
public ContractorsResponse editContractor(
@RequestParam("token") String token,
@RequestParam("id") String id,
@RequestParam("firstName") String firstName,
@RequestParam("surname") String surname,
@RequestParam("agencySource") String agencySource,
@RequestParam("status") String status) {
if (!isUserLoggedIn(token)) {
return ContractorsResponse.contractorsFailure("User is not logged in");
}
ArrayList<Contractor> contractors;
try {
contractors = editContractor(id, firstName, surname, agencySource, status);
} catch (SQLException e) {
Logger.getAnonymousLogger().log(Level.INFO, e.getMessage());
return ContractorsResponse.contractorsFailure(e.getMessage());
}
return new ContractorsResponse(contractors);
}
/**
* REST API call for viewing all the data
* @param token The unique token of the User making the API call
* @return Response that contains all the contractor info or an error response
*/
@RequestMapping("/contractors/viewAllData")
public ContractorsResponse viewAllContractorData(@RequestParam("token") String token) {
if (!isUserLoggedIn(token)) {
return ContractorsResponse.contractorsFailure("User is not logged in");
}
DatabaseConnection connection = new DatabaseConnection(dbConnectionUrl, dbUsername, dbPassword);
List<Contractor> allContractorData = new ArrayList<>();
try {
connection.openConnection();
if (!connection.isConnected()) {
return ContractorsResponse.contractorsFailure("View All Data Failed: Error opening database connection");
}
PreparedStatement st = connection.getPreparedStatement(viewAllContractorDataQuery);
ResultSet set = st.executeQuery();
Contractor lastContractor = null;
while(set.next()) {
String contractorId = set.getString("c.id");
if(lastContractor == null || !lastContractor.getId().equals(contractorId)) {
//get contractor data
lastContractor = new Contractor(contractorId,
set.getString("c.firstName"),
set.getString("c.surname"),
set.getString("agencySource"),
set.getString("status"),
set.getBoolean("rehire"));
allContractorData.add(lastContractor);
}
CostCenter costCenter = new CostCenter(
set.getString("cc.id"),
set.getString("location")
);
HRPositionRole positionRole = new HRPositionRole(
set.getString("p.id"),
set.getString("roleName"),
set.getString("p.description")
);
HRPayGrade payGrade = new HRPayGrade(
set.getString("pg.id"),
set.getInt("startAmount"),
set.getInt("endAmount"),
set.getString("pg.name")
);
Skill mainSkill = new Skill(
set.getString("s.id"),
set.getString("s.name"),
set.getString("type"),
set.getString("s.description")
);
HiringManager hiringManager = new HiringManager(
set.getString("userId"),
set.getString("rp.firstName"),
set.getString("lastName")
);
EngagementContract newContract = new EngagementContract(
set.getString("e.id"),
set.getDate("startDate"),
set.getDate("endDate"),
set.getString("rateType"),
set.getString("projectName"),
set.getString("chargeType"),
set.getInt("dailyAllowance"),
set.getString("originalDocumentation"),
set.getInt("terminationNum"),
costCenter,
set.getString("currencyCode"),
set.getInt("timeAndMaterialTerms"),
set.getInt("poNum"),
set.getInt("hourlyRate"),
positionRole,
payGrade,
mainSkill,
set.getBoolean("rehire"),
hiringManager
);
lastContractor.addEngagementContract(newContract);
}
connection.closeConnection();
} catch (SQLException e) {
Logger logger = Logger.getAnonymousLogger();
logger.log(Level.INFO, "View all contractor data failed: " + e.getMessage());
return ContractorsResponse.contractorsFailure("View all contractor data failed: " + e.getMessage());
}
return new ContractorsResponse(allContractorData);
}
/**
* Get's contractor data formatted for viewing reports.
* @param token The user log in token.
* @return Contractor Data formatted for viewing reports.
*/
@RequestMapping("contractors/viewReportData")
public ReportResponse viewReportData(@RequestParam("token") String token) {
if (!isUserLoggedIn(token)) {
return ReportResponse.reportsFailure("User is not logged in");
}
List<ReportData> reportData = new ArrayList<>();
ContractorsResponse response = viewAllContractorData(token);
if (response.isError()) {
return ReportResponse.reportsFailure(response.getErrorMessage());
}
for(Contractor contractor : response.getContractors()) {
reportData.addAll(Contractor.generateReportData(contractor));
}
return new ReportResponse(reportData);
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.runtime.executiongraph;
import org.apache.flink.api.common.JobID;
import org.apache.flink.api.common.accumulators.Accumulator;
import org.apache.flink.api.common.accumulators.IntCounter;
import org.apache.flink.api.common.time.Time;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.configuration.CheckpointingOptions;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.metrics.groups.UnregisteredMetricsGroup;
import org.apache.flink.runtime.accumulators.AccumulatorSnapshot;
import org.apache.flink.runtime.akka.AkkaUtils;
import org.apache.flink.runtime.blob.BlobWriter;
import org.apache.flink.runtime.blob.PermanentBlobService;
import org.apache.flink.runtime.blob.VoidBlobWriter;
import org.apache.flink.runtime.checkpoint.CheckpointRetentionPolicy;
import org.apache.flink.runtime.checkpoint.StandaloneCheckpointRecoveryFactory;
import org.apache.flink.runtime.deployment.InputGateDeploymentDescriptor;
import org.apache.flink.runtime.deployment.ResultPartitionDeploymentDescriptor;
import org.apache.flink.runtime.deployment.TaskDeploymentDescriptor;
import org.apache.flink.runtime.execution.ExecutionState;
import org.apache.flink.runtime.executiongraph.failover.RestartAllStrategy;
import org.apache.flink.runtime.executiongraph.restart.NoRestartStrategy;
import org.apache.flink.runtime.executiongraph.utils.SimpleAckingTaskManagerGateway;
import org.apache.flink.runtime.instance.SimpleSlot;
import org.apache.flink.runtime.io.network.partition.ResultPartitionType;
import org.apache.flink.runtime.jobgraph.DistributionPattern;
import org.apache.flink.runtime.jobgraph.JobGraph;
import org.apache.flink.runtime.jobgraph.JobStatus;
import org.apache.flink.runtime.jobgraph.JobVertex;
import org.apache.flink.runtime.jobgraph.JobVertexID;
import org.apache.flink.runtime.jobgraph.ScheduleMode;
import org.apache.flink.runtime.jobgraph.tasks.CheckpointCoordinatorConfiguration;
import org.apache.flink.runtime.jobgraph.tasks.JobCheckpointingSettings;
import org.apache.flink.runtime.jobmaster.LogicalSlot;
import org.apache.flink.runtime.jobmaster.SlotOwner;
import org.apache.flink.runtime.jobmaster.TestingLogicalSlot;
import org.apache.flink.runtime.jobmaster.slotpool.SlotProvider;
import org.apache.flink.runtime.operators.BatchTask;
import org.apache.flink.runtime.taskmanager.LocalTaskManagerLocation;
import org.apache.flink.runtime.taskmanager.TaskExecutionState;
import org.apache.flink.runtime.taskmanager.TaskManagerLocation;
import org.apache.flink.runtime.testingUtils.TestingUtils;
import org.apache.flink.runtime.testtasks.NoOpInvokable;
import org.apache.flink.runtime.testutils.DirectScheduledExecutorService;
import org.apache.flink.util.TestLogger;
import org.apache.flink.util.function.FunctionUtils;
import org.junit.Test;
import org.slf4j.LoggerFactory;
import java.util.ArrayDeque;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.ScheduledThreadPoolExecutor;
import static junit.framework.TestCase.assertTrue;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotEquals;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.fail;
import static org.mockito.Mockito.mock;
/**
* Tests for {@link ExecutionGraph} deployment.
*/
public class ExecutionGraphDeploymentTest extends TestLogger {
/**
* BLOB server instance to use for the job graph.
*/
protected BlobWriter blobWriter = VoidBlobWriter.getInstance();
/**
* Permanent BLOB cache instance to use for the actor gateway that handles the {@link
* TaskDeploymentDescriptor} loading (may be <tt>null</tt>).
*/
protected PermanentBlobService blobCache = null;
/**
* Checks that the job information for the given ID has been offloaded successfully (if
* offloading is used).
*
* @param eg the execution graph that was created
*/
protected void checkJobOffloaded(ExecutionGraph eg) throws Exception {
assertTrue(eg.getJobInformationOrBlobKey().isLeft());
}
/**
* Checks that the task information for the job vertex has been offloaded successfully (if
* offloading is used).
*
* @param eg the execution graph that was created
* @param jobVertexId job vertex ID
*/
protected void checkTaskOffloaded(ExecutionGraph eg, JobVertexID jobVertexId) throws Exception {
assertTrue(eg.getJobVertex(jobVertexId).getTaskInformationOrBlobKey().isLeft());
}
@Test
public void testBuildDeploymentDescriptor() {
try {
final JobID jobId = new JobID();
final JobVertexID jid1 = new JobVertexID();
final JobVertexID jid2 = new JobVertexID();
final JobVertexID jid3 = new JobVertexID();
final JobVertexID jid4 = new JobVertexID();
JobVertex v1 = new JobVertex("v1", jid1);
JobVertex v2 = new JobVertex("v2", jid2);
JobVertex v3 = new JobVertex("v3", jid3);
JobVertex v4 = new JobVertex("v4", jid4);
v1.setParallelism(10);
v2.setParallelism(10);
v3.setParallelism(10);
v4.setParallelism(10);
v1.setInvokableClass(BatchTask.class);
v2.setInvokableClass(BatchTask.class);
v3.setInvokableClass(BatchTask.class);
v4.setInvokableClass(BatchTask.class);
v2.connectNewDataSetAsInput(v1, DistributionPattern.ALL_TO_ALL, ResultPartitionType.PIPELINED);
v3.connectNewDataSetAsInput(v2, DistributionPattern.ALL_TO_ALL, ResultPartitionType.PIPELINED);
v4.connectNewDataSetAsInput(v2, DistributionPattern.ALL_TO_ALL, ResultPartitionType.PIPELINED);
final JobInformation expectedJobInformation = new DummyJobInformation(
jobId,
"some job");
DirectScheduledExecutorService executor = new DirectScheduledExecutorService();
ExecutionGraph eg = new ExecutionGraph(
expectedJobInformation,
executor,
executor,
AkkaUtils.getDefaultTimeout(),
new NoRestartStrategy(),
new RestartAllStrategy.Factory(),
new TestingSlotProvider(ignore -> new CompletableFuture<>()),
ExecutionGraph.class.getClassLoader(),
blobWriter,
AkkaUtils.getDefaultTimeout());
eg.start(TestingComponentMainThreadExecutorServiceAdapter.forMainThread());
checkJobOffloaded(eg);
List<JobVertex> ordered = Arrays.asList(v1, v2, v3, v4);
eg.attachJobGraph(ordered);
ExecutionJobVertex ejv = eg.getAllVertices().get(jid2);
ExecutionVertex vertex = ejv.getTaskVertices()[3];
final SimpleAckingTaskManagerGateway taskManagerGateway = new SimpleAckingTaskManagerGateway();
final CompletableFuture<TaskDeploymentDescriptor> tdd = new CompletableFuture<>();
taskManagerGateway.setSubmitConsumer(FunctionUtils.uncheckedConsumer(taskDeploymentDescriptor -> {
taskDeploymentDescriptor.loadBigData(blobCache);
tdd.complete(taskDeploymentDescriptor);
}));
final LogicalSlot slot = new TestingLogicalSlot(taskManagerGateway);
assertEquals(ExecutionState.CREATED, vertex.getExecutionState());
vertex.deployToSlot(slot);
assertEquals(ExecutionState.DEPLOYING, vertex.getExecutionState());
checkTaskOffloaded(eg, vertex.getJobvertexId());
TaskDeploymentDescriptor descr = tdd.get();
assertNotNull(descr);
JobInformation jobInformation =
descr.getSerializedJobInformation().deserializeValue(getClass().getClassLoader());
TaskInformation taskInformation =
descr.getSerializedTaskInformation().deserializeValue(getClass().getClassLoader());
assertEquals(jobId, descr.getJobId());
assertEquals(jobId, jobInformation.getJobId());
assertEquals(jid2, taskInformation.getJobVertexId());
assertEquals(3, descr.getSubtaskIndex());
assertEquals(10, taskInformation.getNumberOfSubtasks());
assertEquals(BatchTask.class.getName(), taskInformation.getInvokableClassName());
assertEquals("v2", taskInformation.getTaskName());
Collection<ResultPartitionDeploymentDescriptor> producedPartitions = descr.getProducedPartitions();
Collection<InputGateDeploymentDescriptor> consumedPartitions = descr.getInputGates();
assertEquals(2, producedPartitions.size());
assertEquals(1, consumedPartitions.size());
Iterator<ResultPartitionDeploymentDescriptor> iteratorProducedPartitions = producedPartitions.iterator();
Iterator<InputGateDeploymentDescriptor> iteratorConsumedPartitions = consumedPartitions.iterator();
assertEquals(10, iteratorProducedPartitions.next().getNumberOfSubpartitions());
assertEquals(10, iteratorProducedPartitions.next().getNumberOfSubpartitions());
assertEquals(10, iteratorConsumedPartitions.next().getInputChannelDeploymentDescriptors().length);
}
catch (Exception e) {
e.printStackTrace();
fail(e.getMessage());
}
}
@Test
public void testRegistrationOfExecutionsFinishing() {
try {
final JobVertexID jid1 = new JobVertexID();
final JobVertexID jid2 = new JobVertexID();
JobVertex v1 = new JobVertex("v1", jid1);
JobVertex v2 = new JobVertex("v2", jid2);
Map<ExecutionAttemptID, Execution> executions = setupExecution(v1, 7650, v2, 2350).f1;
for (Execution e : executions.values()) {
e.markFinished();
}
assertEquals(0, executions.size());
}
catch (Exception e) {
e.printStackTrace();
fail(e.getMessage());
}
}
@Test
public void testRegistrationOfExecutionsFailing() {
try {
final JobVertexID jid1 = new JobVertexID();
final JobVertexID jid2 = new JobVertexID();
JobVertex v1 = new JobVertex("v1", jid1);
JobVertex v2 = new JobVertex("v2", jid2);
Map<ExecutionAttemptID, Execution> executions = setupExecution(v1, 7, v2, 6).f1;
for (Execution e : executions.values()) {
e.markFailed(null);
}
assertEquals(0, executions.size());
}
catch (Exception e) {
e.printStackTrace();
fail(e.getMessage());
}
}
@Test
public void testRegistrationOfExecutionsFailedExternally() {
try {
final JobVertexID jid1 = new JobVertexID();
final JobVertexID jid2 = new JobVertexID();
JobVertex v1 = new JobVertex("v1", jid1);
JobVertex v2 = new JobVertex("v2", jid2);
Map<ExecutionAttemptID, Execution> executions = setupExecution(v1, 7, v2, 6).f1;
for (Execution e : executions.values()) {
e.fail(null);
}
assertEquals(0, executions.size());
}
catch (Exception e) {
e.printStackTrace();
fail(e.getMessage());
}
}
/**
* Verifies that {@link ExecutionGraph#updateState(TaskExecutionState)} updates the accumulators and metrics for an
* execution that failed or was canceled.
*/
@Test
public void testAccumulatorsAndMetricsForwarding() throws Exception {
final JobVertexID jid1 = new JobVertexID();
final JobVertexID jid2 = new JobVertexID();
JobVertex v1 = new JobVertex("v1", jid1);
JobVertex v2 = new JobVertex("v2", jid2);
Tuple2<ExecutionGraph, Map<ExecutionAttemptID, Execution>> graphAndExecutions = setupExecution(v1, 1, v2, 1);
ExecutionGraph graph = graphAndExecutions.f0;
// verify behavior for canceled executions
Execution execution1 = graphAndExecutions.f1.values().iterator().next();
IOMetrics ioMetrics = new IOMetrics(0, 0, 0, 0, 0, 0.0, 0.0, 0.0, 0.0, 0.0);
Map<String, Accumulator<?, ?>> accumulators = new HashMap<>();
accumulators.put("acc", new IntCounter(4));
AccumulatorSnapshot accumulatorSnapshot = new AccumulatorSnapshot(graph.getJobID(), execution1.getAttemptId(), accumulators);
TaskExecutionState state = new TaskExecutionState(graph.getJobID(), execution1.getAttemptId(), ExecutionState.CANCELED, null, accumulatorSnapshot, ioMetrics);
graph.updateState(state);
assertEquals(ioMetrics, execution1.getIOMetrics());
assertNotNull(execution1.getUserAccumulators());
assertEquals(4, execution1.getUserAccumulators().get("acc").getLocalValue());
// verify behavior for failed executions
Execution execution2 = graphAndExecutions.f1.values().iterator().next();
IOMetrics ioMetrics2 = new IOMetrics(0, 0, 0, 0, 0, 0.0, 0.0, 0.0, 0.0, 0.0);
Map<String, Accumulator<?, ?>> accumulators2 = new HashMap<>();
accumulators2.put("acc", new IntCounter(8));
AccumulatorSnapshot accumulatorSnapshot2 = new AccumulatorSnapshot(graph.getJobID(), execution2.getAttemptId(), accumulators2);
TaskExecutionState state2 = new TaskExecutionState(graph.getJobID(), execution2.getAttemptId(), ExecutionState.FAILED, null, accumulatorSnapshot2, ioMetrics2);
graph.updateState(state2);
assertEquals(ioMetrics2, execution2.getIOMetrics());
assertNotNull(execution2.getUserAccumulators());
assertEquals(8, execution2.getUserAccumulators().get("acc").getLocalValue());
}
/**
* Verifies that {@link Execution#completeCancelling(Map, IOMetrics)} and {@link Execution#markFailed(Throwable, Map, IOMetrics)}
* store the given accumulators and metrics correctly.
*/
@Test
public void testAccumulatorsAndMetricsStorage() throws Exception {
final JobVertexID jid1 = new JobVertexID();
final JobVertexID jid2 = new JobVertexID();
JobVertex v1 = new JobVertex("v1", jid1);
JobVertex v2 = new JobVertex("v2", jid2);
Map<ExecutionAttemptID, Execution> executions = setupExecution(v1, 1, v2, 1).f1;
IOMetrics ioMetrics = new IOMetrics(0, 0, 0, 0, 0, 0.0, 0.0, 0.0, 0.0, 0.0);
Map<String, Accumulator<?, ?>> accumulators = Collections.emptyMap();
Execution execution1 = executions.values().iterator().next();
execution1.cancel();
execution1.completeCancelling(accumulators, ioMetrics);
assertEquals(ioMetrics, execution1.getIOMetrics());
assertEquals(accumulators, execution1.getUserAccumulators());
Execution execution2 = executions.values().iterator().next();
execution2.markFailed(new Throwable(), accumulators, ioMetrics);
assertEquals(ioMetrics, execution2.getIOMetrics());
assertEquals(accumulators, execution2.getUserAccumulators());
}
@Test
public void testRegistrationOfExecutionsCanceled() {
try {
final JobVertexID jid1 = new JobVertexID();
final JobVertexID jid2 = new JobVertexID();
JobVertex v1 = new JobVertex("v1", jid1);
JobVertex v2 = new JobVertex("v2", jid2);
Map<ExecutionAttemptID, Execution> executions = setupExecution(v1, 19, v2, 37).f1;
for (Execution e : executions.values()) {
e.cancel();
e.completeCancelling();
}
assertEquals(0, executions.size());
}
catch (Exception e) {
e.printStackTrace();
fail(e.getMessage());
}
}
/**
* Tests that a blocking batch job fails if there are not enough resources left to schedule the
* succeeding tasks. This test case is related to [FLINK-4296] where finished producing tasks
* swallow the fail exception when scheduling a consumer task.
*/
@Test
public void testNoResourceAvailableFailure() throws Exception {
final JobID jobId = new JobID();
JobVertex v1 = new JobVertex("source");
JobVertex v2 = new JobVertex("sink");
int dop1 = 1;
int dop2 = 1;
v1.setParallelism(dop1);
v2.setParallelism(dop2);
v1.setInvokableClass(BatchTask.class);
v2.setInvokableClass(BatchTask.class);
v2.connectNewDataSetAsInput(v1, DistributionPattern.POINTWISE, ResultPartitionType.BLOCKING);
final ArrayDeque<CompletableFuture<LogicalSlot>> slotFutures = new ArrayDeque<>();
for (int i = 0; i < dop1; i++) {
slotFutures.addLast(CompletableFuture.completedFuture(new TestingLogicalSlot()));
}
final SlotProvider slotProvider = new TestingSlotProvider(ignore -> slotFutures.removeFirst());
final JobInformation jobInformation = new DummyJobInformation(
jobId,
"failing test job");
DirectScheduledExecutorService directExecutor = new DirectScheduledExecutorService();
// execution graph that executes actions synchronously
ExecutionGraph eg = new ExecutionGraph(
jobInformation,
directExecutor,
TestingUtils.defaultExecutor(),
AkkaUtils.getDefaultTimeout(),
new NoRestartStrategy(),
new RestartAllStrategy.Factory(),
slotProvider,
ExecutionGraph.class.getClassLoader(),
blobWriter,
AkkaUtils.getDefaultTimeout());
eg.start(TestingComponentMainThreadExecutorServiceAdapter.forMainThread());
checkJobOffloaded(eg);
eg.setQueuedSchedulingAllowed(false);
List<JobVertex> ordered = Arrays.asList(v1, v2);
eg.attachJobGraph(ordered);
// schedule, this triggers mock deployment
eg.scheduleForExecution();
ExecutionAttemptID attemptID = eg.getJobVertex(v1.getID()).getTaskVertices()[0].getCurrentExecutionAttempt().getAttemptId();
eg.updateState(new TaskExecutionState(jobId, attemptID, ExecutionState.RUNNING));
eg.updateState(new TaskExecutionState(jobId, attemptID, ExecutionState.FINISHED, null));
assertEquals(JobStatus.FAILED, eg.getState());
}
// ------------------------------------------------------------------------
// retained checkpoints config test
// ------------------------------------------------------------------------
@Test
public void testSettingDefaultMaxNumberOfCheckpointsToRetain() throws Exception {
final Configuration jobManagerConfig = new Configuration();
final ExecutionGraph eg = createExecutionGraph(jobManagerConfig);
assertEquals(CheckpointingOptions.MAX_RETAINED_CHECKPOINTS.defaultValue().intValue(),
eg.getCheckpointCoordinator().getCheckpointStore().getMaxNumberOfRetainedCheckpoints());
}
@Test
public void testSettingMaxNumberOfCheckpointsToRetain() throws Exception {
final int maxNumberOfCheckpointsToRetain = 10;
final Configuration jobManagerConfig = new Configuration();
jobManagerConfig.setInteger(CheckpointingOptions.MAX_RETAINED_CHECKPOINTS,
maxNumberOfCheckpointsToRetain);
final ExecutionGraph eg = createExecutionGraph(jobManagerConfig);
assertEquals(maxNumberOfCheckpointsToRetain,
eg.getCheckpointCoordinator().getCheckpointStore().getMaxNumberOfRetainedCheckpoints());
}
private Tuple2<ExecutionGraph, Map<ExecutionAttemptID, Execution>> setupExecution(JobVertex v1, int dop1, JobVertex v2, int dop2) throws Exception {
final JobID jobId = new JobID();
v1.setParallelism(dop1);
v2.setParallelism(dop2);
v1.setInvokableClass(BatchTask.class);
v2.setInvokableClass(BatchTask.class);
final ArrayDeque<CompletableFuture<LogicalSlot>> slotFutures = new ArrayDeque<>();
for (int i = 0; i < dop1 + dop2; i++) {
slotFutures.addLast(CompletableFuture.completedFuture(new TestingLogicalSlot()));
}
final SlotProvider slotProvider = new TestingSlotProvider(ignore -> slotFutures.removeFirst());
final JobInformation jobInformation = new DummyJobInformation(
jobId,
"some job");
DirectScheduledExecutorService executorService = new DirectScheduledExecutorService();
// execution graph that executes actions synchronously
ExecutionGraph eg = new ExecutionGraph(
jobInformation,
executorService,
TestingUtils.defaultExecutor(),
AkkaUtils.getDefaultTimeout(),
new NoRestartStrategy(),
new RestartAllStrategy.Factory(),
slotProvider,
ExecutionGraph.class.getClassLoader(),
blobWriter,
AkkaUtils.getDefaultTimeout());
checkJobOffloaded(eg);
eg.start(TestingComponentMainThreadExecutorServiceAdapter.forMainThread());
eg.setQueuedSchedulingAllowed(false);
List<JobVertex> ordered = Arrays.asList(v1, v2);
eg.attachJobGraph(ordered);
// schedule, this triggers mock deployment
eg.scheduleForExecution();
Map<ExecutionAttemptID, Execution> executions = eg.getRegisteredExecutions();
assertEquals(dop1 + dop2, executions.size());
return new Tuple2<>(eg, executions);
}
@Test
public void testSettingIllegalMaxNumberOfCheckpointsToRetain() throws Exception {
final int negativeMaxNumberOfCheckpointsToRetain = -10;
final Configuration jobManagerConfig = new Configuration();
jobManagerConfig.setInteger(CheckpointingOptions.MAX_RETAINED_CHECKPOINTS,
negativeMaxNumberOfCheckpointsToRetain);
final ExecutionGraph eg = createExecutionGraph(jobManagerConfig);
assertNotEquals(negativeMaxNumberOfCheckpointsToRetain,
eg.getCheckpointCoordinator().getCheckpointStore().getMaxNumberOfRetainedCheckpoints());
assertEquals(CheckpointingOptions.MAX_RETAINED_CHECKPOINTS.defaultValue().intValue(),
eg.getCheckpointCoordinator().getCheckpointStore().getMaxNumberOfRetainedCheckpoints());
}
/**
* Tests that eager scheduling will wait until all input locations have been set before
* scheduling a task.
*/
@Test
public void testEagerSchedulingWaitsOnAllInputPreferredLocations() throws Exception {
final int parallelism = 2;
final ProgrammedSlotProvider slotProvider = new ProgrammedSlotProvider(parallelism);
final Time timeout = Time.hours(1L);
final JobVertexID sourceVertexId = new JobVertexID();
final JobVertex sourceVertex = new JobVertex("Test source", sourceVertexId);
sourceVertex.setInvokableClass(NoOpInvokable.class);
sourceVertex.setParallelism(parallelism);
final JobVertexID sinkVertexId = new JobVertexID();
final JobVertex sinkVertex = new JobVertex("Test sink", sinkVertexId);
sinkVertex.setInvokableClass(NoOpInvokable.class);
sinkVertex.setParallelism(parallelism);
sinkVertex.connectNewDataSetAsInput(sourceVertex, DistributionPattern.ALL_TO_ALL, ResultPartitionType.PIPELINED);
final Map<JobVertexID, CompletableFuture<LogicalSlot>[]> slotFutures = new HashMap<>(2);
for (JobVertexID jobVertexID : Arrays.asList(sourceVertexId, sinkVertexId)) {
CompletableFuture<LogicalSlot>[] slotFutureArray = new CompletableFuture[parallelism];
for (int i = 0; i < parallelism; i++) {
slotFutureArray[i] = new CompletableFuture<>();
}
slotFutures.put(jobVertexID, slotFutureArray);
slotProvider.addSlots(jobVertexID, slotFutureArray);
}
final ScheduledExecutorService scheduledExecutorService = new ScheduledThreadPoolExecutor(3);
final ExecutionGraph executionGraph = ExecutionGraphTestUtils.createExecutionGraph(
new JobID(),
slotProvider,
new NoRestartStrategy(),
scheduledExecutorService,
timeout,
sourceVertex,
sinkVertex);
executionGraph.start(TestingComponentMainThreadExecutorServiceAdapter.forMainThread());
executionGraph.setScheduleMode(ScheduleMode.EAGER);
executionGraph.scheduleForExecution();
// all tasks should be in state SCHEDULED
for (ExecutionVertex executionVertex : executionGraph.getAllExecutionVertices()) {
assertEquals(ExecutionState.SCHEDULED, executionVertex.getCurrentExecutionAttempt().getState());
}
// wait until the source vertex slots have been requested
assertTrue(slotProvider.getSlotRequestedFuture(sourceVertexId, 0).get());
assertTrue(slotProvider.getSlotRequestedFuture(sourceVertexId, 1).get());
// check that the sinks have not requested their slots because they need the location
// information of the sources
assertFalse(slotProvider.getSlotRequestedFuture(sinkVertexId, 0).isDone());
assertFalse(slotProvider.getSlotRequestedFuture(sinkVertexId, 1).isDone());
final TaskManagerLocation localTaskManagerLocation = new LocalTaskManagerLocation();
final SimpleSlot sourceSlot1 = createSlot(localTaskManagerLocation, 0);
final SimpleSlot sourceSlot2 = createSlot(localTaskManagerLocation, 1);
final SimpleSlot sinkSlot1 = createSlot(localTaskManagerLocation, 0);
final SimpleSlot sinkSlot2 = createSlot(localTaskManagerLocation, 1);
slotFutures.get(sourceVertexId)[0].complete(sourceSlot1);
slotFutures.get(sourceVertexId)[1].complete(sourceSlot2);
// wait until the sink vertex slots have been requested after we completed the source slots
assertTrue(slotProvider.getSlotRequestedFuture(sinkVertexId, 0).get());
assertTrue(slotProvider.getSlotRequestedFuture(sinkVertexId, 1).get());
slotFutures.get(sinkVertexId)[0].complete(sinkSlot1);
slotFutures.get(sinkVertexId)[1].complete(sinkSlot2);
for (ExecutionVertex executionVertex : executionGraph.getAllExecutionVertices()) {
ExecutionGraphTestUtils.waitUntilExecutionState(executionVertex.getCurrentExecutionAttempt(), ExecutionState.DEPLOYING, 5000L);
}
}
private SimpleSlot createSlot(TaskManagerLocation taskManagerLocation, int index) {
return new SimpleSlot(
mock(SlotOwner.class),
taskManagerLocation,
index,
new SimpleAckingTaskManagerGateway());
}
@SuppressWarnings("serial")
public static class FailingFinalizeJobVertex extends JobVertex {
public FailingFinalizeJobVertex(String name, JobVertexID id) {
super(name, id);
}
@Override
public void finalizeOnMaster(ClassLoader cl) throws Exception {
throw new Exception();
}
}
private ExecutionGraph createExecutionGraph(Configuration configuration) throws Exception {
final ScheduledExecutorService executor = TestingUtils.defaultExecutor();
final JobID jobId = new JobID();
final JobGraph jobGraph = new JobGraph(jobId, "test");
jobGraph.setSnapshotSettings(
new JobCheckpointingSettings(
Collections.<JobVertexID>emptyList(),
Collections.<JobVertexID>emptyList(),
Collections.<JobVertexID>emptyList(),
new CheckpointCoordinatorConfiguration(
100,
10 * 60 * 1000,
0,
1,
CheckpointRetentionPolicy.NEVER_RETAIN_AFTER_TERMINATION,
false),
null));
final Time timeout = Time.seconds(10L);
return ExecutionGraphBuilder.buildGraph(
null,
jobGraph,
configuration,
executor,
executor,
new ProgrammedSlotProvider(1),
getClass().getClassLoader(),
new StandaloneCheckpointRecoveryFactory(),
timeout,
new NoRestartStrategy(),
new UnregisteredMetricsGroup(),
1,
blobWriter,
timeout,
LoggerFactory.getLogger(getClass()));
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.component.kafka;
import java.net.URI;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Properties;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Future;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.stream.Collectors;
import org.apache.camel.AsyncCallback;
import org.apache.camel.Endpoint;
import org.apache.camel.Exchange;
import org.apache.camel.component.kafka.serde.KafkaHeaderSerializer;
import org.apache.camel.impl.DefaultAsyncProducer;
import org.apache.camel.spi.HeaderFilterStrategy;
import org.apache.camel.util.URISupport;
import org.apache.kafka.clients.producer.Callback;
import org.apache.kafka.clients.producer.ProducerConfig;
import org.apache.kafka.clients.producer.ProducerRecord;
import org.apache.kafka.clients.producer.RecordMetadata;
import org.apache.kafka.common.header.Header;
import org.apache.kafka.common.header.internals.RecordHeader;
import org.apache.kafka.common.utils.Bytes;
public class KafkaProducer extends DefaultAsyncProducer {
@SuppressWarnings("rawtypes")
private org.apache.kafka.clients.producer.KafkaProducer kafkaProducer;
private final KafkaEndpoint endpoint;
private ExecutorService workerPool;
private boolean shutdownWorkerPool;
public KafkaProducer(KafkaEndpoint endpoint) {
super(endpoint);
this.endpoint = endpoint;
}
Properties getProps() {
Properties props = endpoint.getConfiguration().createProducerProperties();
endpoint.updateClassProperties(props);
// brokers can be configured on endpoint or component level
String brokers = endpoint.getConfiguration().getBrokers();
if (brokers == null) {
brokers = endpoint.getComponent().getBrokers();
}
if (brokers == null) {
throw new IllegalArgumentException("URL to the Kafka brokers must be configured with the brokers option on either the component or endpoint.");
}
props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, brokers);
return props;
}
@SuppressWarnings("rawtypes")
public org.apache.kafka.clients.producer.KafkaProducer getKafkaProducer() {
return kafkaProducer;
}
/**
* To use a custom {@link org.apache.kafka.clients.producer.KafkaProducer} instance.
*/
@SuppressWarnings("rawtypes")
public void setKafkaProducer(org.apache.kafka.clients.producer.KafkaProducer kafkaProducer) {
this.kafkaProducer = kafkaProducer;
}
public ExecutorService getWorkerPool() {
return workerPool;
}
public void setWorkerPool(ExecutorService workerPool) {
this.workerPool = workerPool;
}
@Override
@SuppressWarnings("rawtypes")
protected void doStart() throws Exception {
Properties props = getProps();
if (kafkaProducer == null) {
ClassLoader threadClassLoader = Thread.currentThread().getContextClassLoader();
try {
// Kafka uses reflection for loading authentication settings, use its classloader
Thread.currentThread().setContextClassLoader(org.apache.kafka.clients.producer.KafkaProducer.class.getClassLoader());
kafkaProducer = new org.apache.kafka.clients.producer.KafkaProducer(props);
} finally {
Thread.currentThread().setContextClassLoader(threadClassLoader);
}
}
// if we are in asynchronous mode we need a worker pool
if (!endpoint.isSynchronous() && workerPool == null) {
workerPool = endpoint.createProducerExecutor();
// we create a thread pool so we should also shut it down
shutdownWorkerPool = true;
}
}
@Override
protected void doStop() throws Exception {
if (kafkaProducer != null) {
kafkaProducer.close();
}
if (shutdownWorkerPool && workerPool != null) {
endpoint.getCamelContext().getExecutorServiceManager().shutdown(workerPool);
workerPool = null;
}
}
@SuppressWarnings({"unchecked", "rawtypes"})
protected Iterator<ProducerRecord> createRecorder(Exchange exchange) throws Exception {
String topic = endpoint.getConfiguration().getTopic();
if (!endpoint.getConfiguration().isBridgeEndpoint()) {
String headerTopic = exchange.getIn().getHeader(KafkaConstants.TOPIC, String.class);
boolean allowHeader = true;
// when we do not bridge then detect if we try to send back to ourselves
// which we most likely do not want to do
if (headerTopic != null && endpoint.getConfiguration().isCircularTopicDetection()) {
Endpoint from = exchange.getFromEndpoint();
if (from instanceof KafkaEndpoint) {
String fromTopic = ((KafkaEndpoint) from).getConfiguration().getTopic();
allowHeader = !headerTopic.equals(fromTopic);
if (!allowHeader) {
log.debug("Circular topic detected from message header."
+ " Cannot send to same topic as the message comes from: {}"
+ ". Will use endpoint configured topic: {}", from, topic);
}
}
}
if (allowHeader && headerTopic != null) {
topic = headerTopic;
}
}
if (topic == null) {
// if topic property was not received from configuration or header parameters take it from the remaining URI
topic = URISupport.extractRemainderPath(new URI(endpoint.getEndpointUri()), true);
}
// endpoint take precedence over header configuration
final Integer partitionKey = endpoint.getConfiguration().getPartitionKey() != null
? endpoint.getConfiguration().getPartitionKey() : exchange.getIn().getHeader(KafkaConstants.PARTITION_KEY, Integer.class);
final boolean hasPartitionKey = partitionKey != null;
// endpoint take precedence over header configuration
Object key = endpoint.getConfiguration().getKey() != null
? endpoint.getConfiguration().getKey() : exchange.getIn().getHeader(KafkaConstants.KEY);
final Object messageKey = key != null
? tryConvertToSerializedType(exchange, key, endpoint.getConfiguration().getKeySerializerClass()) : null;
final boolean hasMessageKey = messageKey != null;
// extracting headers which need to be propagated
List<Header> propagatedHeaders = getPropagatedHeaders(exchange, endpoint.getConfiguration());
Object msg = exchange.getIn().getBody();
// is the message body a list or something that contains multiple values
Iterator<Object> iterator = null;
if (msg instanceof Iterable) {
iterator = ((Iterable<Object>) msg).iterator();
} else if (msg instanceof Iterator) {
iterator = (Iterator<Object>) msg;
}
if (iterator != null) {
final Iterator<Object> msgList = iterator;
final String msgTopic = topic;
return new Iterator<ProducerRecord>() {
@Override
public boolean hasNext() {
return msgList.hasNext();
}
@Override
public ProducerRecord next() {
// must convert each entry of the iterator into the value according to the serializer
Object next = msgList.next();
Object value = tryConvertToSerializedType(exchange, next, endpoint.getConfiguration().getSerializerClass());
if (hasPartitionKey && hasMessageKey) {
return new ProducerRecord(msgTopic, partitionKey, null, key, value, propagatedHeaders);
} else if (hasMessageKey) {
return new ProducerRecord(msgTopic, null, null, key, value, propagatedHeaders);
} else {
return new ProducerRecord(msgTopic, null, null, null, value, propagatedHeaders);
}
}
@Override
public void remove() {
msgList.remove();
}
};
}
// must convert each entry of the iterator into the value according to the serializer
Object value = tryConvertToSerializedType(exchange, msg, endpoint.getConfiguration().getSerializerClass());
ProducerRecord record;
if (hasPartitionKey && hasMessageKey) {
record = new ProducerRecord(topic, partitionKey, null, key, value, propagatedHeaders);
} else if (hasMessageKey) {
record = new ProducerRecord(topic, null, null, key, value, propagatedHeaders);
} else {
record = new ProducerRecord(topic, null, null, null, value, propagatedHeaders);
}
return Collections.singletonList(record).iterator();
}
private List<Header> getPropagatedHeaders(Exchange exchange, KafkaConfiguration getConfiguration) {
HeaderFilterStrategy headerFilterStrategy = getConfiguration.getHeaderFilterStrategy();
KafkaHeaderSerializer headerSerializer = getConfiguration.getKafkaHeaderSerializer();
return exchange.getIn().getHeaders().entrySet().stream()
.filter(entry -> shouldBeFiltered(entry, exchange, headerFilterStrategy))
.map(entry -> getRecordHeader(entry, headerSerializer))
.filter(Objects::nonNull)
.collect(Collectors.toList());
}
private boolean shouldBeFiltered(Map.Entry<String, Object> entry, Exchange exchange, HeaderFilterStrategy headerFilterStrategy) {
return !headerFilterStrategy.applyFilterToExternalHeaders(entry.getKey(), entry.getValue(), exchange);
}
private RecordHeader getRecordHeader(Map.Entry<String, Object> entry, KafkaHeaderSerializer headerSerializer) {
byte[] headerValue = headerSerializer.serialize(entry.getKey(), entry.getValue());
if (headerValue == null) {
return null;
}
return new RecordHeader(entry.getKey(), headerValue);
}
@Override
@SuppressWarnings({"unchecked", "rawtypes"})
// Camel calls this method if the endpoint isSynchronous(), as the KafkaEndpoint creates a SynchronousDelegateProducer for it
public void process(Exchange exchange) throws Exception {
Iterator<ProducerRecord> c = createRecorder(exchange);
List<Future<RecordMetadata>> futures = new LinkedList<>();
List<RecordMetadata> recordMetadatas = new ArrayList<>();
if (endpoint.getConfiguration().isRecordMetadata()) {
if (exchange.hasOut()) {
exchange.getOut().setHeader(KafkaConstants.KAFKA_RECORDMETA, recordMetadatas);
} else {
exchange.getIn().setHeader(KafkaConstants.KAFKA_RECORDMETA, recordMetadatas);
}
}
while (c.hasNext()) {
ProducerRecord rec = c.next();
if (log.isDebugEnabled()) {
log.debug("Sending message to topic: {}, partition: {}, key: {}", rec.topic(), rec.partition(), rec.key());
}
futures.add(kafkaProducer.send(rec));
}
for (Future<RecordMetadata> f : futures) {
//wait for them all to be sent
recordMetadatas.add(f.get());
}
}
@Override
@SuppressWarnings({"unchecked", "rawtypes"})
public boolean process(Exchange exchange, AsyncCallback callback) {
try {
Iterator<ProducerRecord> c = createRecorder(exchange);
KafkaProducerCallBack cb = new KafkaProducerCallBack(exchange, callback);
while (c.hasNext()) {
cb.increment();
ProducerRecord rec = c.next();
if (log.isDebugEnabled()) {
log.debug("Sending message to topic: {}, partition: {}, key: {}", rec.topic(), rec.partition(), rec.key());
}
kafkaProducer.send(rec, cb);
}
return cb.allSent();
} catch (Exception ex) {
exchange.setException(ex);
}
callback.done(true);
return true;
}
/**
* Attempts to convert the object to the same type as the serialized class specified
*/
protected Object tryConvertToSerializedType(Exchange exchange, Object object, String serializerClass) {
Object answer = null;
if (KafkaConstants.KAFKA_DEFAULT_SERIALIZER.equals(serializerClass)) {
answer = exchange.getContext().getTypeConverter().tryConvertTo(String.class, exchange, object);
} else if ("org.apache.kafka.common.serialization.ByteArraySerializer".equals(serializerClass)) {
answer = exchange.getContext().getTypeConverter().tryConvertTo(byte[].class, exchange, object);
} else if ("org.apache.kafka.common.serialization.ByteBufferSerializer".equals(serializerClass)) {
answer = exchange.getContext().getTypeConverter().tryConvertTo(ByteBuffer.class, exchange, object);
} else if ("org.apache.kafka.common.serialization.BytesSerializer".equals(serializerClass)) {
// we need to convert to byte array first
byte[] array = exchange.getContext().getTypeConverter().tryConvertTo(byte[].class, exchange, object);
if (array != null) {
answer = new Bytes(array);
}
}
return answer != null ? answer : object;
}
private final class KafkaProducerCallBack implements Callback {
private final Exchange exchange;
private final AsyncCallback callback;
private final AtomicInteger count = new AtomicInteger(1);
private final List<RecordMetadata> recordMetadatas = new ArrayList<>();
KafkaProducerCallBack(Exchange exchange, AsyncCallback callback) {
this.exchange = exchange;
this.callback = callback;
if (endpoint.getConfiguration().isRecordMetadata()) {
if (exchange.hasOut()) {
exchange.getOut().setHeader(KafkaConstants.KAFKA_RECORDMETA, recordMetadatas);
} else {
exchange.getIn().setHeader(KafkaConstants.KAFKA_RECORDMETA, recordMetadatas);
}
}
}
void increment() {
count.incrementAndGet();
}
boolean allSent() {
if (count.decrementAndGet() == 0) {
log.trace("All messages sent, continue routing.");
//was able to get all the work done while queuing the requests
callback.done(true);
return true;
}
return false;
}
@Override
public void onCompletion(RecordMetadata recordMetadata, Exception e) {
if (e != null) {
exchange.setException(e);
}
recordMetadatas.add(recordMetadata);
if (count.decrementAndGet() == 0) {
// use worker pool to continue routing the exchange
// as this thread is from Kafka Callback and should not be used by Camel routing
workerPool.submit(new Runnable() {
@Override
public void run() {
log.trace("All messages sent, continue routing.");
callback.done(false);
}
});
}
}
}
}
| |
package com.avaje.ebean.config;
/**
* Naming convention used for constraint names.
* <p>
* Note that these constraint names are trimmed in the PlatformDdl which can be overridden
* but provides a decent default implementation.
* </p>
*/
public class DbConstraintNaming {
/**
* Defines how constraint names are shortened if required based on platform limitations.
*/
public interface MaxLength {
/**
* Truncate or shorten the constraint name to support DB platform limitations.
* <p>
* There is a default implementation of this which is used if an implementation is
* not specified.
* </p>
*/
String maxLength(String constraintName, int count);
}
protected String pkPrefix = "pk_";
protected String pkSuffix = "";
protected String fkPrefix = "fk_";
protected String fkMiddle = "_";
protected String fkSuffix = "";
protected String fkIndexPrefix = "ix_";
protected String fkIndexMiddle = "_";
protected String fkIndexSuffix = "";
protected String indexPrefix = "ix_";
protected String indexMiddle = "_";
protected String indexSuffix = "";
protected String uqPrefix = "uq_";
protected String uqSuffix = "";
protected String ckPrefix = "ck_";
protected String ckSuffix = "";
protected MaxLength maxLength;
protected DbConstraintNormalise normalise;
/**
* Construct using default of lower case for both table and column names.
*/
public DbConstraintNaming() {
this(true, true);
}
/**
* Construct specifying if lower case should be used (for both table and column names).
*/
public DbConstraintNaming(boolean lowerCase) {
this(lowerCase, lowerCase);
}
/**
* Construct specifying if lower case should be used for both table and column names.
*/
public DbConstraintNaming(boolean lowerCaseTableNames, boolean lowerCaseColumnNames) {
this.normalise = new DbConstraintNormalise(lowerCaseTableNames, lowerCaseColumnNames);
}
/**
* Return the MaxLength implementation used to truncate/shorten db constraint names as necessary.
*/
public MaxLength getMaxLength() {
return maxLength;
}
/**
* Set the MaxLength implementation used to truncate/shorten db constraint names as necessary.
*/
public void setMaxLength(MaxLength maxLength) {
this.maxLength = maxLength;
}
/**
* Return the primary key constraint name.
*/
public String primaryKeyName(String tableName) {
return pkPrefix + normaliseTable(tableName) + pkSuffix;
}
/**
* Return the foreign key constraint name given a single column foreign key.
*/
public String foreignKeyConstraintName(String tableName, String columnName) {
return fkPrefix + normaliseTable(tableName) + fkMiddle + normaliseColumn(columnName) + fkSuffix;
}
/**
* Return the index name associated with a foreign key constraint given multiple columns.
*/
public String foreignKeyIndexName(String tableName, String[] columns) {
String colPart = joinColumnNames(columns);
return fkIndexPrefix + normaliseTable(tableName) + fkIndexMiddle + colPart + fkIndexSuffix;
}
/**
* Return the index name associated with a foreign key constraint given a single column foreign key.
*/
public String foreignKeyIndexName(String tableName, String column) {
String colPart = normaliseTable(column);
return fkIndexPrefix + normaliseTable(tableName) + fkIndexMiddle + colPart + fkIndexSuffix;
}
/**
* Return the index name for a general index (not associated with a foreign key).
*/
public String indexName(String tableName, String column) {
String colPart = normaliseTable(column);
return indexPrefix + normaliseTable(tableName) + indexMiddle + colPart + indexSuffix;
}
/**
* Return the index name for a general index (not associated with a foreign key).
*/
public String indexName(String tableName, String[] columns) {
String colPart = joinColumnNames(columns);
return indexPrefix + normaliseTable(tableName) + indexMiddle + colPart + indexSuffix;
}
/**
* Join the column names together with underscores.
*/
protected String joinColumnNames(String[] columns) {
if (columns.length == 1) {
return normaliseColumn(columns[0]);
}
StringBuilder sb = new StringBuilder();
for (int i = 0; i < columns.length; i++) {
if (i > 0) {
sb.append("_");
}
sb.append(normaliseColumn(columns[i]));
}
return sb.toString();
}
/**
* Return the unique constraint name.
*/
public String uniqueConstraintName(String tableName, String columnName) {
return uqPrefix + normaliseTable(tableName) + "_" + normaliseColumn(columnName) + uqSuffix;
}
/**
* Return the unique constraint name.
*/
public String uniqueConstraintName(String tableName, String[] columns) {
String colPart = joinColumnNames(columns);
return uqPrefix + normaliseTable(tableName) + "_" + colPart + uqSuffix;
}
/**
* Return the check constraint name.
*/
public String checkConstraintName(String tableName, String columnName) {
return ckPrefix + normaliseTable(tableName) + "_" + normaliseColumn(columnName) + ckSuffix;
}
/**
* Normalise the table name by trimming catalog and schema and removing any
* quoted identifier characters (",',[,] etc).
*/
public String normaliseTable(String tableName) {
return normalise.normaliseTable(tableName);
}
/**
* Normalise the column name by removing any quoted identifier characters (",',[,] etc).
*/
public String normaliseColumn(String tableName) {
return normalise.normaliseColumn(tableName);
}
/**
* Lower case the table name checking for quoted identifiers.
*/
public String lowerTableName(String tableName) {
return normalise.lowerTableName(tableName);
}
/**
* Lower case the column name checking for quoted identifiers.
*/
public String lowerColumnName(String name) {
return normalise.lowerColumnName(name);
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.gemstone.gemfire.internal.cache;
import java.io.DataInput;
import java.io.DataOutput;
import java.io.IOException;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Set;
import org.apache.logging.log4j.Logger;
import com.gemstone.gemfire.DataSerializer;
import com.gemstone.gemfire.cache.Cache;
import com.gemstone.gemfire.distributed.internal.DM;
import com.gemstone.gemfire.distributed.internal.DistributionManager;
import com.gemstone.gemfire.distributed.internal.DistributionMessage;
import com.gemstone.gemfire.distributed.internal.MessageWithReply;
import com.gemstone.gemfire.distributed.internal.PooledDistributionMessage;
import com.gemstone.gemfire.distributed.internal.ReplyException;
import com.gemstone.gemfire.distributed.internal.ReplyMessage;
import com.gemstone.gemfire.distributed.internal.ReplyProcessor21;
import com.gemstone.gemfire.distributed.internal.ServerLocation;
import com.gemstone.gemfire.distributed.internal.ServerLocator;
import com.gemstone.gemfire.internal.cache.tier.sockets.ClientProxyMembershipID;
import com.gemstone.gemfire.internal.logging.LogService;
/**
* A processor for helping a locator find the durable queues for a given durable client id.
* Asks each bridge server if they have the durable id and builds a list of matching servers.
*
* @since 5.7
* @author Greg Passmore
*/
public class FindDurableQueueProcessor extends ReplyProcessor21 {
private static final Logger logger = LogService.getLogger();
////////// Public static entry point /////////
final ArrayList durableLocations = new ArrayList();
// @todo gregp: add javadocs
public static ArrayList sendAndFind(ServerLocator locator, ClientProxyMembershipID proxyId,
DM dm) {
Set members = ((ControllerAdvisor)locator.getDistributionAdvisor()).adviseBridgeServers();
if (members.contains(dm.getId())) {
// Don't send message to local server, see #50534.
Set remoteMembers = new HashSet(members);
remoteMembers.remove(dm.getId());
members = remoteMembers;
}
FindDurableQueueProcessor processor =
new FindDurableQueueProcessor(dm,members);
FindDurableQueueMessage.send(proxyId, dm, members,processor);
try {
processor.waitForRepliesUninterruptibly();
} catch (ReplyException e) {
e.handleAsUnexpected();
}
ArrayList locations = processor.durableLocations;
//This will add any local queues to the list
findLocalDurableQueues(proxyId, locations);
return locations;
}
private static void findLocalDurableQueues(ClientProxyMembershipID proxyId, ArrayList<ServerLocation> matches) {
Cache c = GemFireCacheImpl.getInstance();
if(c!=null) {
List l = c.getCacheServers();
if(l!=null) {
Iterator i = l.iterator();
while(i.hasNext()) {
CacheServerImpl bs = (CacheServerImpl)i.next();
if(bs.getAcceptor().getCacheClientNotifier().getClientProxy(proxyId)!=null) {
ServerLocation loc = new ServerLocation(bs.getExternalAddress(),bs.getPort());
matches.add(loc);
}
}
}
}
}
//////////// Instance methods //////////////
@Override
public void process(DistributionMessage msg) {
// TODO Auto-generated method stub
if(msg instanceof FindDurableQueueReply) {
FindDurableQueueReply reply = (FindDurableQueueReply)msg;
synchronized(durableLocations) {
//add me to the durable member set
durableLocations.addAll(reply.getMatches());
}
}
super.process(msg);
}
/** Creates a new instance of FindDurableQueueProcessor
*/
private FindDurableQueueProcessor(DM dm,Set members) {
super(dm, members);
}
/////////////// Inner message classes //////////////////
public static class FindDurableQueueMessage
extends PooledDistributionMessage implements MessageWithReply
{
private int processorId;
private ClientProxyMembershipID proxyId;
protected static void send(ClientProxyMembershipID proxyId,
DM dm,Set members,
ReplyProcessor21 proc)
{
FindDurableQueueMessage msg = new FindDurableQueueMessage();
msg.processorId = proc.getProcessorId();
msg.proxyId = proxyId;
msg.setRecipients(members);
if (logger.isDebugEnabled()) {
logger.debug("FindDurableQueueMessage sending {} to {}", msg, members);
}
dm.putOutgoing(msg);
}
@Override
public int getProcessorId() {
return this.processorId;
}
public ClientProxyMembershipID getProxyId() {
return this.proxyId;
}
@Override
protected void process(final DistributionManager dm) {
ArrayList<ServerLocation> matches = new ArrayList<ServerLocation>();
try {
findLocalDurableQueues(proxyId, matches);
} finally {
FindDurableQueueReply reply = new FindDurableQueueReply();
reply.setProcessorId(this.getProcessorId());
reply.matches = matches;
reply.setRecipient(getSender());
if (dm.getId().equals(getSender())) {
reply.setSender(getSender());
reply.dmProcess(dm);
}
else {
dm.putOutgoing(reply);
}
}
}
public int getDSFID() {
return FIND_DURABLE_QUEUE;
}
@Override
protected Object clone() throws CloneNotSupportedException {
// TODO Auto-generated method stub
return super.clone();
}
@Override
public void fromData(DataInput in)
throws IOException, ClassNotFoundException {
super.fromData(in);
this.processorId = in.readInt();
this.proxyId = ClientProxyMembershipID.readCanonicalized(in);
}
@Override
public void toData(DataOutput out) throws IOException {
super.toData(out);
out.writeInt(this.processorId);
DataSerializer.writeObject(this.proxyId, out);
}
@Override
public String toString() {
StringBuffer buff = new StringBuffer();
buff.append("FindDurableQueueMessage (proxyId='")
.append(this.proxyId)
.append("' processorId=")
.append(this.processorId)
.append(")");
return buff.toString();
}
}
public static class FindDurableQueueReply extends ReplyMessage {
protected ArrayList matches = null;
public ArrayList getMatches() {
return this.matches;
}
@Override
public int getDSFID() {
return FIND_DURABLE_QUEUE_REPLY;
}
@Override
public void fromData(DataInput in)
throws IOException, ClassNotFoundException {
super.fromData(in);
this.matches = DataSerializer.readArrayList(in);
}
@Override
public void toData(DataOutput out) throws IOException {
super.toData(out);
DataSerializer.writeArrayList(matches, out);
}
@Override
public String toString() {
StringBuffer buff = new StringBuffer();
buff.append("FindDurableQueueReply (matches='")
.append(this.matches)
.append("' processorId=")
.append(this.processorId)
.append(")");
return buff.toString();
}
}
}
| |
/*******************************************************************************
* Copyright (c) 2015
*
* Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"),
* to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense,
* and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
* DEALINGS IN THE SOFTWARE.
*******************************************************************************/
package jsettlers.logic.movable.strategies.soldiers;
import jsettlers.algorithms.path.Path;
import jsettlers.common.buildings.OccupyerPlace;
import jsettlers.common.movable.EDirection;
import jsettlers.common.movable.EMovableType;
import jsettlers.common.movable.ESoldierClass;
import jsettlers.common.position.ShortPoint2D;
import jsettlers.logic.buildings.military.IBuildingOccupyableMovable;
import jsettlers.logic.buildings.military.IOccupyableBuilding;
import jsettlers.logic.movable.Movable;
import jsettlers.logic.movable.MovableStrategy;
import jsettlers.logic.movable.interfaces.AbstractStrategyGrid;
import jsettlers.logic.movable.interfaces.IAttackable;
public abstract class SoldierStrategy extends MovableStrategy implements IBuildingOccupyableMovable {
private static final long serialVersionUID = 5246120883607071865L;
/**
* Internal state of the {@link SoldierStrategy} class.
*
* @author Andreas Eberle
*/
private static enum ESoldierState {
AGGRESSIVE,
SEARCH_FOR_ENEMIES,
HITTING,
INIT_GOTO_TOWER,
GOING_TO_TOWER,
}
private final EMovableType movableType;
private ESoldierState state = ESoldierState.AGGRESSIVE;
private IOccupyableBuilding building;
private IAttackable enemy;
private ShortPoint2D oldPathTarget;
private boolean inSaveGotoMode = false;
private boolean isInTower;
private ShortPoint2D inTowerAttackPosition;
private boolean defending;
public SoldierStrategy(Movable movable, EMovableType movableType) {
super(movable);
this.movableType = movableType;
}
@Override
protected void action() {
switch (state) {
case AGGRESSIVE:
break;
case HITTING:
if (!isEnemyAttackable(enemy, isInTower)) {
changeStateTo(ESoldierState.SEARCH_FOR_ENEMIES);
} else {
hitEnemy(enemy); // after the animation, execute the actual hit.
if (state != ESoldierState.HITTING) {
break; // the soldier could have entered an attacked tower
}
if (enemy.getHealth() <= 0) {
enemy = null;
changeStateTo(ESoldierState.SEARCH_FOR_ENEMIES);
break; // don't directly walk on the enemy's position, because there may be others to walk in first
}
}
changeStateTo(ESoldierState.SEARCH_FOR_ENEMIES);
case SEARCH_FOR_ENEMIES:
final short minSearchDistance = getMinSearchDistance();
IAttackable oldEnemy = enemy;
enemy = super.getStrategyGrid().getEnemyInSearchArea(getAttackPosition(), super.getMovable(), minSearchDistance,
getMaxSearchDistance(isInTower), !defending);
// check if we have a new enemy. If so, go in unsafe mode again.
if (oldEnemy != null && oldEnemy != enemy) {
inSaveGotoMode = false;
}
// no enemy found, go back in normal mode
if (enemy == null) {
if (minSearchDistance > 0) {
IAttackable toCloseEnemy = super.getStrategyGrid().getEnemyInSearchArea(
getAttackPosition(), super.getMovable(), (short) 0, minSearchDistance, !defending);
if (toCloseEnemy != null) {
if (!isInTower) { // we are in danger because an enemy entered our range where we can't attack => run away
EDirection escapeDirection = EDirection.getApproxDirection(toCloseEnemy.getPos(), getMovable().getPos());
super.goInDirection(escapeDirection, false);
super.getMovable().moveTo(null); // reset moveToRequest, so the soldier doesn't go there after fleeing.
} // else { // we are in the tower, so wait and check again next time.
break;
}
}
if (defending) {
building.towerDefended(this);
defending = false;
}
changeStateTo(ESoldierState.AGGRESSIVE);
} else if (isEnemyAttackable(enemy, isInTower)) { // if enemy is close enough, attack it
super.lookInDirection(EDirection.getApproxDirection(super.getPos(), enemy.getPos()));
startAttackAnimation(enemy);
changeStateTo(ESoldierState.HITTING);
} else if (!isInTower) {
changeStateTo(ESoldierState.SEARCH_FOR_ENEMIES);
goToEnemy(enemy);
} else {
changeStateTo(ESoldierState.SEARCH_FOR_ENEMIES);
}
break;
case INIT_GOTO_TOWER:
changeStateTo(ESoldierState.GOING_TO_TOWER); // change state before requesting path because of checkPathStepPreconditions()
if (!super.getPos().equals(building.getDoor()) && !super.goToPos(building.getDoor())) {
notifyTowerThatRequestFailed();
}
break;
case GOING_TO_TOWER:
if (building.isNotDestroyed() && building.getPlayer() == super.getPlayer()) {
OccupyerPlace place = building.addSoldier(this);
super.setPosition(place.getPosition().calculatePoint(building.getDoor()));
super.enableNothingToDoAction(false);
super.setVisible(false);
if (isBowman()) {
this.inTowerAttackPosition = building.getTowerBowmanSearchPosition(place);
}
changeStateTo(ESoldierState.AGGRESSIVE);
isInTower = true;
} else {
changeStateTo(ESoldierState.AGGRESSIVE); // do a check of the surrounding to find possible enemies.
building = null;
}
break;
}
}
private void notifyTowerThatRequestFailed() {
if (building.getPlayer() == super.getPlayer()) { // only notify, if the tower still belongs to this player
building.requestFailed(this.movableType);
building = null;
state = ESoldierState.AGGRESSIVE;
}
}
protected abstract short getMaxSearchDistance(boolean isInTower);
protected abstract short getMinSearchDistance();
protected ShortPoint2D getAttackPosition() {
return isInTower && isBowman() ? inTowerAttackPosition : super.getPos();
}
private boolean isBowman() {
return getSoldierClass() == ESoldierClass.BOWMAN;
}
private void goToEnemy(IAttackable enemy) {
if (inSaveGotoMode) {
goToSavely(enemy);
} else {
ShortPoint2D pos = super.getPos();
EDirection dir = EDirection.getApproxDirection(pos, enemy.getPos());
if (super.goInDirection(dir, false)) {
return;
} else {
inSaveGotoMode = true;
goToSavely(enemy);
}
}
}
private void goToSavely(IAttackable enemy) {
super.goToPos(enemy.getPos());
}
private void changeStateTo(ESoldierState state) {
this.state = state;
switch (state) {
case AGGRESSIVE:
if (oldPathTarget != null) {
super.goToPos(oldPathTarget);
oldPathTarget = null;
}
break;
default:
break;
}
}
protected abstract void hitEnemy(IAttackable enemy);
protected abstract void startAttackAnimation(IAttackable enemy);
protected abstract boolean isEnemyAttackable(IAttackable enemy, boolean isInTower);
@Override
public boolean setOccupyableBuilding(IOccupyableBuilding building) {
if (state != ESoldierState.GOING_TO_TOWER && state != ESoldierState.INIT_GOTO_TOWER) {
this.building = building;
changeStateTo(ESoldierState.INIT_GOTO_TOWER);
super.abortPath();
this.oldPathTarget = null; // this prevents that the soldiers go to this position after he leaves the tower.
return true;
} else {
return false;
}
}
@Override
public EMovableType getMovableType() {
return movableType;
}
@Override
public Movable getMovable() {
return super.getMovable();
}
@Override
public void leaveOccupyableBuilding(ShortPoint2D newPosition) {
super.setPosition(newPosition);
super.enableNothingToDoAction(true);
super.setVisible(true);
isInTower = false;
building = null;
defending = false;
changeStateTo(ESoldierState.SEARCH_FOR_ENEMIES);
}
@Override
public void setSelected(boolean selected) {
super.setSelected(selected);
}
@Override
public void informAboutAttackable(IAttackable other) {
if (state == ESoldierState.AGGRESSIVE && (!isInTower || getSoldierClass() == ESoldierClass.BOWMAN)) {
changeStateTo(ESoldierState.SEARCH_FOR_ENEMIES); // this searches for the enemy on the next timer click
}
}
@Override
public void setDefendingAt(ShortPoint2D pos) {
super.setPosition(pos);
changeStateTo(ESoldierState.SEARCH_FOR_ENEMIES);
defending = true;
}
@Override
protected boolean checkPathStepPreconditions(ShortPoint2D pathTarget, int step) {
if (state == ESoldierState.INIT_GOTO_TOWER) {
return false; // abort previous path when we just got a tower set
}
boolean result = !((state == ESoldierState.SEARCH_FOR_ENEMIES || state == ESoldierState.HITTING) && step >= 2);
if (!result && oldPathTarget == null) {
oldPathTarget = pathTarget;
}
if (state == ESoldierState.GOING_TO_TOWER && (!building.isNotDestroyed() || building.getPlayer() != super.getPlayer())) {
result = false;
}
if (enemy != null && state == ESoldierState.SEARCH_FOR_ENEMIES && isEnemyAttackable(enemy, false)) {
result = false;
}
return result;
}
@Override
protected void moveToPathSet(ShortPoint2D oldPosition, ShortPoint2D oldTargetPos, ShortPoint2D targetPos) {
if (targetPos != null && this.oldPathTarget != null) {
oldPathTarget = null; // reset the path target to be able to get the new one when we hijack the path
inSaveGotoMode = false;
}
changeStateTo(ESoldierState.SEARCH_FOR_ENEMIES);
}
@Override
protected boolean isMoveToAble() {
return state != ESoldierState.INIT_GOTO_TOWER && state != ESoldierState.GOING_TO_TOWER && !isInTower;
}
@Override
protected Path findWayAroundObstacle(ShortPoint2D position, Path path) {
if (state == ESoldierState.SEARCH_FOR_ENEMIES) {
EDirection direction = EDirection.getDirection(position, path.getNextPos());
AbstractStrategyGrid grid = super.getStrategyGrid();
EDirection leftDir = direction.getNeighbor(-1);
ShortPoint2D leftPos = leftDir.getNextHexPoint(position);
EDirection rightDir = direction.getNeighbor(1);
ShortPoint2D rightPos = rightDir.getNextHexPoint(position);
if (grid.isFreePosition(leftPos)) {
return new Path(leftPos);
} else if (grid.isFreePosition(rightPos)) {
return new Path(rightPos);
} else {
EDirection twoLeftDir = direction.getNeighbor(-2);
ShortPoint2D twoLeftPos = twoLeftDir.getNextHexPoint(position);
EDirection twoRightDir = direction.getNeighbor(2);
ShortPoint2D twoRightPos = twoRightDir.getNextHexPoint(position);
if (grid.isFreePosition(twoLeftPos)) {
return new Path(twoLeftPos);
} else if (grid.isFreePosition(twoRightPos)) {
return new Path(twoRightPos);
} else {
return path;
}
}
} else {
return super.findWayAroundObstacle(position, path);
}
}
@Override
protected void strategyKilledEvent(ShortPoint2D pathTarget) {
if (building != null) {
if (isInTower) {
building.removeSoldier(this);
} else {
notifyTowerThatRequestFailed();
}
}
}
@Override
protected void pathAborted(ShortPoint2D pathTarget) {
switch (state) {
case INIT_GOTO_TOWER:
case GOING_TO_TOWER:
notifyTowerThatRequestFailed();
break;
default:
state = ESoldierState.AGGRESSIVE;
break;
}
}
protected float getCombatStrength() {
return super.getPlayer().getCombatStrengthInformation().getCombatStrength(isOnOwnGround());
}
}
| |
/*
* Copyright (c) 2008-2018, Hazelcast, Inc. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.hazelcast.client.map.impl.nearcache;
import com.hazelcast.client.config.ClientConfig;
import com.hazelcast.client.proxy.NearCachedClientMapProxy;
import com.hazelcast.client.test.TestHazelcastFactory;
import com.hazelcast.config.Config;
import com.hazelcast.config.InMemoryFormat;
import com.hazelcast.config.NearCacheConfig;
import com.hazelcast.core.HazelcastInstance;
import com.hazelcast.core.IMap;
import com.hazelcast.logging.ILogger;
import com.hazelcast.logging.Logger;
import com.hazelcast.test.HazelcastParallelClassRunner;
import com.hazelcast.test.HazelcastTestSupport;
import com.hazelcast.test.annotation.ParallelTest;
import com.hazelcast.test.annotation.SlowTest;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.runner.RunWith;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicInteger;
import static com.hazelcast.internal.nearcache.NearCacheTestUtils.getBaseConfig;
import static com.hazelcast.internal.nearcache.impl.invalidation.RepairingTask.MAX_TOLERATED_MISS_COUNT;
import static com.hazelcast.spi.properties.GroupProperty.MAP_INVALIDATION_MESSAGE_BATCH_FREQUENCY_SECONDS;
import static java.lang.Integer.parseInt;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.fail;
/**
* Tests that the Near Cache doesn't lose invalidations.
* <p>
* Issue: https://github.com/hazelcast/hazelcast/issues/4671
* <p>
* Thanks Lukas Blunschi for the original test (https://github.com/lukasblu).
*/
@RunWith(HazelcastParallelClassRunner.class)
@Category({SlowTest.class, ParallelTest.class})
public class ClientMapNearCacheStaleReadTest extends HazelcastTestSupport {
private static final int NUM_GETTERS = 7;
private static final int MAX_RUNTIME = 30;
private static final String KEY = "key123";
private static final ILogger LOGGER = Logger.getLogger(ClientMapNearCacheStaleReadTest.class);
private AtomicInteger valuePut = new AtomicInteger(0);
private AtomicBoolean stop = new AtomicBoolean(false);
private AtomicInteger assertionViolationCount = new AtomicInteger(0);
private AtomicBoolean failed = new AtomicBoolean(false);
private HazelcastInstance member;
private HazelcastInstance client;
private IMap<String, String> map;
@Before
public void setUp() {
String mapName = randomMapName();
TestHazelcastFactory factory = new TestHazelcastFactory();
Config config = getBaseConfig()
.setProperty(MAP_INVALIDATION_MESSAGE_BATCH_FREQUENCY_SECONDS.getName(), "2");
ClientConfig clientConfig = getClientConfig(mapName)
.setProperty(MAX_TOLERATED_MISS_COUNT.getName(), "0");
member = factory.newHazelcastInstance(config);
client = factory.newHazelcastClient(clientConfig);
map = client.getMap(mapName);
}
@After
public void tearDown() {
client.shutdown();
member.shutdown();
}
@Test
public void testNoLostInvalidationsEventually() {
testNoLostInvalidationsStrict(false);
}
@Test
public void testNoLostInvalidationsStrict() {
testNoLostInvalidationsStrict(true);
}
private void testNoLostInvalidationsStrict(boolean strict) {
// run test
runTestInternal();
if (!strict) {
// test eventually consistent
sleepSeconds(2);
}
int valuePutLast = valuePut.get();
String valueMapStr = map.get(KEY);
int valueMap = parseInt(valueMapStr);
// fail if not eventually consistent
String msg = null;
if (valueMap < valuePutLast) {
msg = "Near Cache did *not* become consistent. (valueMap = " + valueMap + ", valuePut = " + valuePutLast + ").";
// flush Near Cache and re-fetch value
flushClientNearCache(map);
String valueMap2Str = map.get(KEY);
int valueMap2 = parseInt(valueMap2Str);
// test again
if (valueMap2 < valuePutLast) {
msg += " Unexpected inconsistency! (valueMap2 = " + valueMap2 + ", valuePut = " + valuePutLast + ").";
} else {
msg += " Flushing the Near Cache cleared the inconsistency. (valueMap2 = " + valueMap2
+ ", valuePut = " + valuePutLast + ").";
}
}
// stop instance
client.getLifecycleService().terminate();
// fail after stopping instance
if (msg != null) {
LOGGER.warning(msg);
fail(msg);
}
// fail if strict is required and assertion was violated
if (strict && assertionViolationCount.get() > 0) {
msg = "Assertion violated " + assertionViolationCount.get() + " times.";
LOGGER.warning(msg);
fail(msg);
}
}
protected ClientConfig getClientConfig(String mapName) {
NearCacheConfig nearCacheConfig = getNearCacheConfig(mapName);
return new ClientConfig()
.addNearCacheConfig(nearCacheConfig);
}
protected NearCacheConfig getNearCacheConfig(String mapName) {
return new NearCacheConfig(mapName)
.setInMemoryFormat(InMemoryFormat.BINARY);
}
/**
* Flush Near Cache from client map with Near Cache.
* <p>
* Warning: this uses Hazelcast internals which might change from one version to the other.
*/
private void flushClientNearCache(IMap map) {
if (!(map instanceof NearCachedClientMapProxy)) {
return;
}
NearCachedClientMapProxy clientMapProxy = (NearCachedClientMapProxy) map;
clientMapProxy.getNearCache().clear();
}
private void runTestInternal() {
// start 1 putter thread (put0)
Thread threadPut = new Thread(new PutRunnable(), "put0");
threadPut.start();
// wait for putter thread to start before starting getter threads
sleepMillis(300);
// start numGetters getter threads (get0-numGetters)
List<Thread> threads = new ArrayList<Thread>();
for (int i = 0; i < NUM_GETTERS; i++) {
Thread thread = new Thread(new GetRunnable(), "get" + i);
threads.add(thread);
}
for (Thread thread : threads) {
thread.start();
}
// stop after maxRuntime seconds
int i = 0;
while (!stop.get() && i++ < MAX_RUNTIME) {
sleepMillis(1000);
}
if (!stop.get()) {
LOGGER.info("Problem did not occur within " + MAX_RUNTIME + "s.");
}
stop.set(true);
assertJoinable(threadPut);
for (Thread thread : threads) {
assertJoinable(thread);
}
}
private class PutRunnable implements Runnable {
@Override
public void run() {
LOGGER.info(Thread.currentThread().getName() + " started.");
int i = 0;
while (!stop.get()) {
i++;
// put new value and update last state
// note: the value in the map/Near Cache is *always* larger or equal to valuePut
// assertion: valueMap >= valuePut
map.put(KEY, String.valueOf(i));
valuePut.set(i);
// check if we see our last update
String valueMapStr = map.get(KEY);
if (valueMapStr == null) {
continue;
}
int valueMap = parseInt(valueMapStr);
if (valueMap != i) {
assertionViolationCount.incrementAndGet();
LOGGER.warning("Assertion violated! (valueMap = " + valueMap + ", i = " + i + ")");
// sleep to ensure Near Cache invalidation is really lost
try {
Thread.sleep(100);
} catch (InterruptedException e) {
LOGGER.warning("Interrupted: " + e.getMessage());
}
// test again and stop if really lost
valueMapStr = map.get(KEY);
valueMap = parseInt(valueMapStr);
if (valueMap != i) {
LOGGER.warning("Near Cache invalidation lost! (valueMap = " + valueMap + ", i = " + i + ")");
failed.set(true);
stop.set(true);
}
}
}
LOGGER.info(Thread.currentThread().getName() + " performed " + i + " operations.");
}
}
private class GetRunnable implements Runnable {
@Override
public void run() {
LOGGER.info(Thread.currentThread().getName() + " started.");
int n = 0;
while (!stop.get()) {
n++;
// blindly get the value (to trigger the issue) and parse the value (to get some CPU load)
String valueMapStr = map.get(KEY);
int i = parseInt(valueMapStr);
assertEquals("" + i, valueMapStr);
}
LOGGER.info(Thread.currentThread().getName() + " performed " + n + " operations.");
}
}
}
| |
/*
* Generated by the Jasper component of Apache Tomcat
* Version: JspC/ApacheTomcat8
* Generated at: 2016-08-23 16:29:31 UTC
* Note: The last modified time of this file was set to
* the last modified time of the source file after
* generation to assist with modification tracking.
*/
package org.jivesoftware.openfire.admin.setup;
import javax.servlet.*;
import javax.servlet.http.*;
import javax.servlet.jsp.*;
import org.jivesoftware.openfire.XMPPServer;
import org.jivesoftware.util.LocaleUtils;
import org.jivesoftware.util.ParamUtils;
import org.jivesoftware.openfire.ldap.LdapManager;
import org.jivesoftware.util.JiveGlobals;
import java.util.*;
public final class setup_002dldap_002dserver_jsp extends org.apache.jasper.runtime.HttpJspBase
implements org.apache.jasper.runtime.JspSourceDependent {
private static final javax.servlet.jsp.JspFactory _jspxFactory =
javax.servlet.jsp.JspFactory.getDefaultFactory();
private static java.util.Map<java.lang.String,java.lang.Long> _jspx_dependants;
static {
_jspx_dependants = new java.util.HashMap<java.lang.String,java.lang.Long>(1);
_jspx_dependants.put("/setup/ldap-server.jspf", Long.valueOf(1471969768000L));
}
private org.apache.jasper.runtime.TagHandlerPool _005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody;
private javax.el.ExpressionFactory _el_expressionfactory;
private org.apache.tomcat.InstanceManager _jsp_instancemanager;
public java.util.Map<java.lang.String,java.lang.Long> getDependants() {
return _jspx_dependants;
}
public void _jspInit() {
_005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody = org.apache.jasper.runtime.TagHandlerPool.getTagHandlerPool(getServletConfig());
_el_expressionfactory = _jspxFactory.getJspApplicationContext(getServletConfig().getServletContext()).getExpressionFactory();
_jsp_instancemanager = org.apache.jasper.runtime.InstanceManagerFactory.getInstanceManager(getServletConfig());
}
public void _jspDestroy() {
_005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.release();
}
public void _jspService(final javax.servlet.http.HttpServletRequest request, final javax.servlet.http.HttpServletResponse response)
throws java.io.IOException, javax.servlet.ServletException {
final javax.servlet.jsp.PageContext pageContext;
javax.servlet.http.HttpSession session = null;
final javax.servlet.ServletContext application;
final javax.servlet.ServletConfig config;
javax.servlet.jsp.JspWriter out = null;
final java.lang.Object page = this;
javax.servlet.jsp.JspWriter _jspx_out = null;
javax.servlet.jsp.PageContext _jspx_page_context = null;
try {
response.setContentType("text/html");
pageContext = _jspxFactory.getPageContext(this, request, response,
null, true, 8192, true);
_jspx_page_context = pageContext;
application = pageContext.getServletContext();
config = pageContext.getServletConfig();
session = pageContext.getSession();
out = pageContext.getOut();
_jspx_out = out;
out.write('\n');
// Redirect if we've already run setup:
if (!XMPPServer.getInstance().isSetupMode()) {
response.sendRedirect("setup-completed.jsp");
return;
}
out.write('\n');
out.write('\n');
String serverType = null;
if (request.getParameter("save") != null || request.getParameter("test") != null) {
int serverTypeInt = ParamUtils.getIntParameter(request, "servertype", 1);
switch (serverTypeInt) {
case 1:
serverType = "other";
break;
case 2:
serverType = "activedirectory";
break;
case 3:
serverType = "openldap";
break;
default:
serverType = "other";
}
}
boolean initialSetup = true;
String currentPage = "setup-ldap-server.jsp";
String testPage = "setup-ldap-server_test.jsp?serverType="+ serverType;
String nextPage = "setup-ldap-user.jsp?serverType=" + serverType;
Map<String, String> meta = new HashMap<String, String>();
meta.put("currentStep", "3");
out.write('\n');
out.write("\n\n\n\n\n\n\n\n");
String host;
int port = 389;
String baseDN;
String adminDN;
String adminPassword;
boolean connectionPoolEnabled = true;
boolean sslEnabled = false;
boolean startTlsEnabled = false;
boolean debugEnabled = false;
boolean referralsEnabled = false;
boolean aliasReferralsEnabled = true;
boolean encloseDNs = true;
@SuppressWarnings("unchecked")
Map<String,String> xmppSettings = (Map<String,String>)session.getAttribute("xmppSettings");
// Get parameters
boolean save = request.getParameter("save") != null;
boolean test = request.getParameter("test") != null;
LdapManager manager = LdapManager.getInstance();
Map<String, String> errors = new HashMap<String, String>();
if (save || test) {
host = ParamUtils.getParameter(request, "host");
if (host == null) {
errors.put("host", LocaleUtils.getLocalizedString("setup.ldap.server.host_error"));
}
port = ParamUtils.getIntParameter(request, "port", port);
if (port <= 0) {
errors.put("port", LocaleUtils.getLocalizedString("setup.ldap.server.port_error"));
}
baseDN = ParamUtils.getParameter(request, "basedn");
if (baseDN == null) {
errors.put("baseDN", LocaleUtils.getLocalizedString("setup.ldap.server.basedn_error"));
}
adminDN = ParamUtils.getParameter(request, "admindn");
adminPassword = ParamUtils.getParameter(request, "adminpwd");
connectionPoolEnabled =
ParamUtils.getBooleanParameter(request, "connectionpool", connectionPoolEnabled);
sslEnabled = ParamUtils.getBooleanParameter(request, "ssl", sslEnabled);
startTlsEnabled = ParamUtils.getBooleanParameter(request, "starttls", startTlsEnabled);
debugEnabled = ParamUtils.getBooleanParameter(request, "debug", debugEnabled);
referralsEnabled = ParamUtils.getBooleanParameter(request, "referrals", referralsEnabled);
aliasReferralsEnabled = ParamUtils.getBooleanParameter(request, "aliasreferrals", aliasReferralsEnabled);
encloseDNs = ParamUtils.getBooleanParameter(request, "enclosedns", encloseDNs);
if (errors.isEmpty()) {
// Store settings in a map and keep it in the session
Map<String, String> settings = new HashMap<String, String>();
settings.put("ldap.serverType", serverType);
settings.put("ldap.host", host);
settings.put("ldap.port", Integer.toString(port));
settings.put("ldap.baseDN", baseDN);
if (adminDN != null) {
settings.put("ldap.adminDN", adminDN);
}
if (adminPassword != null) {
settings.put("ldap.adminPassword", adminPassword);
}
settings.put("ldap.connectionPoolEnabled",
Boolean.toString(connectionPoolEnabled));
settings.put("ldap.sslEnabled", Boolean.toString(sslEnabled));
settings.put("ldap.startTlsEnabled", Boolean.toString(startTlsEnabled));
settings.put("ldap.debugEnabled", Boolean.toString(debugEnabled));
settings.put("ldap.autoFollowReferrals", Boolean.toString(referralsEnabled));
settings.put("ldap.autoFollowAliasReferrals", Boolean.toString(aliasReferralsEnabled));
settings.put("ldap.encloseDNs", Boolean.toString(encloseDNs));
// Always disable connection pooling so that connections aren't left hanging open.
settings.put("ldap.connectionPoolEnabled", "false");
session.setAttribute("ldapSettings", settings);
if (save) {
// Save settings and redirect
Collection<String> hosts = new ArrayList<String>();
StringTokenizer st = new StringTokenizer(host, " ,\t\n\r\f");
while (st.hasMoreTokens()) {
hosts.add(st.nextToken());
}
manager.setHosts(hosts);
manager.setPort(port);
manager.setBaseDN(baseDN);
manager.setAdminDN(adminDN);
manager.setAdminPassword(adminPassword);
manager.setConnectionPoolEnabled(connectionPoolEnabled);
manager.setSslEnabled(sslEnabled);
manager.setStartTlsEnabled(startTlsEnabled);
manager.setDebugEnabled(debugEnabled);
manager.setFollowReferralsEnabled(referralsEnabled);
manager.setFollowAliasReferralsEnabled(aliasReferralsEnabled);
manager.setIsEnclosingDNs(encloseDNs);
// Save the settings for later, if we're in setup
if (xmppSettings != null) {
xmppSettings.put("ldap.host", host);
xmppSettings.put("ldap.port", Integer.toString(port));
xmppSettings.put("ldap.baseDN", baseDN);
xmppSettings.put("ldap.adminDN", adminDN);
xmppSettings.put("ldap.adminPassword", adminPassword);
xmppSettings.put("ldap.connectionPoolEnabled", Boolean.toString(connectionPoolEnabled));
xmppSettings.put("ldap.sslEnabled", Boolean.toString(sslEnabled));
xmppSettings.put("ldap.startTlsEnabled", Boolean.toString(startTlsEnabled));
xmppSettings.put("ldap.debugEnabled", Boolean.toString(debugEnabled));
xmppSettings.put("ldap.autoFollowReferrals", Boolean.toString(referralsEnabled));
xmppSettings.put("ldap.autoFollowAliasReferrals", Boolean.toString(aliasReferralsEnabled));
xmppSettings.put("ldap.encloseDNs", Boolean.toString(encloseDNs));
JiveGlobals.setPropertyEncrypted("ldap.adminDN", true);
JiveGlobals.setPropertyEncrypted("ldap.adminPassword", true);
session.setAttribute("xmppSettings", xmppSettings);
}
// Redirect to next step.
response.sendRedirect(nextPage);
return;
}
}
} else {
// See if there are already values for the variables defined.
StringBuilder sb = new StringBuilder();
for (String aHost : LdapManager.getInstance().getHosts()) {
sb.append(aHost).append(", ");
}
host = sb.toString();
if (host.trim().length() > 0) {
host = host.substring(0, host.length() - 2);
}
port = manager.getPort();
baseDN = manager.getBaseDN();
adminDN = manager.getAdminDN();
adminPassword = manager.getAdminPassword();
connectionPoolEnabled = manager.isConnectionPoolEnabled();
sslEnabled = manager.isSslEnabled();
startTlsEnabled = manager.isStartTlsEnabled();
debugEnabled = manager.isDebugEnabled();
referralsEnabled = manager.isFollowReferralsEnabled();
aliasReferralsEnabled = manager.isFollowAliasReferralsEnabled();
encloseDNs = manager.isEnclosingDNs();
}
out.write("\n<html>\n<head>\n <title>");
if (_jspx_meth_fmt_005fmessage_005f0(_jspx_page_context))
return;
out.write("</title>\n ");
for (Map.Entry<String, String> entry : meta.entrySet()) {
out.write("\n <meta name=\"");
out.print( entry.getKey());
out.write("\" content=\"");
out.print( entry.getValue());
out.write("\"/>\n ");
}
out.write("\n</head>\n<body>\n\n ");
if (test && errors.isEmpty()) {
out.write("\n\n <a href=\"");
out.print( testPage);
out.write("\" id=\"lbmessage\" title=\"");
if (_jspx_meth_fmt_005fmessage_005f1(_jspx_page_context))
return;
out.write("\" style=\"display:none;\"></a>\n <script type=\"text/javascript\">\n function loadMsg() {\n var lb = new lightbox(document.getElementById('lbmessage'));\n lb.activate();\n }\n setTimeout('loadMsg()', 500);\n </script>\n\n ");
}
out.write("\n\n ");
if (initialSetup) {
out.write("\n <h1>");
if (_jspx_meth_fmt_005fmessage_005f2(_jspx_page_context))
return;
out.write(": <span>");
if (_jspx_meth_fmt_005fmessage_005f3(_jspx_page_context))
return;
out.write("</span></h1>\n ");
}
out.write("\n\n <!-- BEGIN jive-contentBox_stepbar -->\n\t<div id=\"jive-contentBox_stepbar\">\n\t\t<span class=\"jive-stepbar_step\"><strong>1. ");
if (_jspx_meth_fmt_005fmessage_005f4(_jspx_page_context))
return;
out.write("</strong></span>\n\t\t<span class=\"jive-stepbar_step\"><em>2. ");
if (_jspx_meth_fmt_005fmessage_005f5(_jspx_page_context))
return;
out.write("</em></span>\n\t\t<span class=\"jive-stepbar_step\"><em>3. ");
if (_jspx_meth_fmt_005fmessage_005f6(_jspx_page_context))
return;
out.write("</em></span>\n\t</div>\n\t<!-- END jive-contentBox-stepbar -->\n\n <!-- BEGIN jive-contentBox -->\n <div class=\"jive-contentBox jive-contentBox_for-stepbar\">\n\n\t<h2>");
if (_jspx_meth_fmt_005fmessage_005f7(_jspx_page_context))
return;
out.write(": <span>");
if (_jspx_meth_fmt_005fmessage_005f8(_jspx_page_context))
return;
out.write("</span></h2>\n\t<p>");
if (_jspx_meth_fmt_005fmessage_005f9(_jspx_page_context))
return;
out.write("</p>\n\n ");
if (errors.size() > 0) {
out.write("\n\n <div class=\"error\">\n ");
for (String error:errors.values()) {
out.write("\n ");
out.print( error);
out.write("<br/>\n ");
}
out.write("\n </div>\n\n ");
}
out.write("\n\n <form action=\"");
out.print( currentPage);
out.write("\" method=\"post\">\n\t\t<!-- BEGIN jive-contentBox_bluebox -->\n\t\t<div class=\"jive-contentBox_bluebox\">\n\t\t\t<table border=\"0\" cellpadding=\"0\" cellspacing=\"2\">\n\t\t\t<tr>\n\t\t\t <td colspan=\"4\"><strong>");
if (_jspx_meth_fmt_005fmessage_005f10(_jspx_page_context))
return;
out.write("</strong></td>\n\t\t\t</tr>\n ");
if (initialSetup) {
out.write("\n <tr>\n <td align=\"right\" width=\"1%\" nowrap=\"nowrap\">");
if (_jspx_meth_fmt_005fmessage_005f11(_jspx_page_context))
return;
out.write(":</td>\n <td colspan=\"3\" nowrap>\n <select name=\"servertype\" size=\"1\" id=\"jiveLDAPserverType\" style=\"width:90%;\">\n <option value=\"1\" ");
out.print( serverType == null ? "selected" : "" );
out.write('>');
if (_jspx_meth_fmt_005fmessage_005f12(_jspx_page_context))
return;
out.write("</option>\n <option value=\"2\" ");
out.print( "activedirectory".equals(serverType) ? "selected" : "" );
out.write(">Active Directory</option>\n <option value=\"3\" ");
out.print( "openldap".equals(serverType) ? "selected" : "" );
out.write(">OpenLDAP</option>\n <option value=\"4\" ");
out.print( "other".equals(serverType) ? "selected" : "" );
out.write('>');
if (_jspx_meth_fmt_005fmessage_005f13(_jspx_page_context))
return;
out.write("</option>\n </select><span class=\"jive-setup-helpicon\" onmouseover=\"domTT_activate(this, event, 'content', '");
if (_jspx_meth_fmt_005fmessage_005f14(_jspx_page_context))
return;
out.write("', 'styleClass', 'jiveTooltip', 'trail', true, 'delay', 300, 'lifetime', 8000);\"></span>\n </td>\n\t\t\t</tr>\n ");
}
out.write("\n <tr>\n\t\t\t <td align=\"right\" width=\"1%\" nowrap=\"nowrap\">");
if (_jspx_meth_fmt_005fmessage_005f15(_jspx_page_context))
return;
out.write(":</td>\n <td width=\"1%\">\n <table cellpadding=\"0\" cellspacing=\"0\" border=\"0\" width=\"100%\">\n <tr>\n <td width=\"1%\" nowrap=\"nowrap\">\n <input type=\"text\" name=\"host\" id=\"jiveLDAPphost\" size=\"22\" maxlength=\"50\" value=\"");
out.print( host!=null?host:"" );
out.write("\">\n </td>\n <td width=\"99%\">\n <span class=\"jive-setup-helpicon\" onmouseover=\"domTT_activate(this, event, 'content', '");
if (_jspx_meth_fmt_005fmessage_005f16(_jspx_page_context))
return;
out.write("', 'styleClass', 'jiveTooltip', 'trail', true, 'delay', 300, 'lifetime', 8000);\"></span>\n </td>\n </tr>\n </table>\n </td>\n <td align=\"right\" width=\"1%\" nowrap=\"nowrap\"> ");
if (_jspx_meth_fmt_005fmessage_005f17(_jspx_page_context))
return;
out.write(":</td>\n <td><input type=\"text\" name=\"port\" id=\"jiveLDAPport\" size=\"5\" maxlength=\"5\" value=\"");
out.print( port );
out.write("\"><span class=\"jive-setup-helpicon\" onmouseover=\"domTT_activate(this, event, 'content', '");
if (_jspx_meth_fmt_005fmessage_005f18(_jspx_page_context))
return;
out.write("', 'styleClass', 'jiveTooltip', 'trail', true, 'delay', 300, 'lifetime', 8000);\"></span></td>\n\t\t\t</tr>\n\t\t\t<tr>\n <td align=\"right\">");
if (_jspx_meth_fmt_005fmessage_005f19(_jspx_page_context))
return;
out.write(":</td>\n <td colspan=\"3\">\n <input type=\"text\" name=\"basedn\" id=\"jiveLDAPbasedn\" size=\"40\" maxlength=\"150\" value=\"");
out.print( baseDN!=null?baseDN:"");
out.write("\" style=\"width:90%;\">\n <span class=\"jive-setup-helpicon\" onmouseover=\"domTT_activate(this, event, 'content', '");
if (_jspx_meth_fmt_005fmessage_005f20(_jspx_page_context))
return;
out.write("', 'styleClass', 'jiveTooltip', 'trail', true, 'delay', 300, 'lifetime', 16000);\"></span>\n </td>\n\t\t\t</tr>\n <tr><td colspan=\"4\"> </td></tr>\n <tr>\n\t\t\t <td colspan=\"4\"><strong>");
if (_jspx_meth_fmt_005fmessage_005f21(_jspx_page_context))
return;
out.write(":</strong></td>\n\t\t\t</tr>\n\t\t\t<tr>\n <td align=\"right\" width=\"1%\" nowrap=\"nowrap\">");
if (_jspx_meth_fmt_005fmessage_005f22(_jspx_page_context))
return;
out.write(":</td>\n <td colspan=\"3\">\n <input type=\"text\" name=\"admindn\" id=\"jiveLDAPadmindn\" size=\"40\" maxlength=\"150\" value=\"");
out.print( adminDN!=null?adminDN:"");
out.write("\" style=\"width:90%;\">\n <span class=\"jive-setup-helpicon\" onmouseover=\"domTT_activate(this, event, 'content', '");
if (_jspx_meth_fmt_005fmessage_005f23(_jspx_page_context))
return;
out.write("', 'styleClass', 'jiveTooltip', 'trail', true, 'delay', 300, 'lifetime', -1);\"></span>\n </td>\n\t\t\t</tr>\n\t\t\t<tr>\n <td align=\"right\" width=\"1%\" nowrap=\"nowrap\">");
if (_jspx_meth_fmt_005fmessage_005f24(_jspx_page_context))
return;
out.write(":</td>\n <td colspan=\"3\"><input type=\"password\" name=\"adminpwd\" id=\"jiveLDAPadminpwd\" size=\"22\" maxlength=\"30\" value=\"");
out.print( adminPassword!=null?adminPassword:"");
out.write("\"> <span class=\"jive-setup-helpicon\" onmouseover=\"domTT_activate(this, event, 'content', '");
if (_jspx_meth_fmt_005fmessage_005f25(_jspx_page_context))
return;
out.write("', 'styleClass', 'jiveTooltip', 'trail', true, 'delay', 300, 'lifetime', 8000);\"></span></td>\n\t\t\t</tr>\n\t\t\t</table>\n\t\t</div>\n\t\t<!-- END jive-contentBox_bluebox -->\n\n\n\t\t<!-- BEGIN jiveAdvancedButton -->\n\t\t<div class=\"jiveAdvancedButton\">\n\t\t\t<a href=\"#\" onclick=\"togglePanel(jiveAdvanced); return false;\" id=\"jiveAdvancedLink\">");
if (_jspx_meth_fmt_005fmessage_005f26(_jspx_page_context))
return;
out.write("</a>\n\t\t</div>\n\t\t<!-- END jiveAdvancedButton -->\n\n\t\t<!-- BEGIN jiveAdvancedPanelcs (advanced connection settings) -->\n\t\t<div class=\"jiveadvancedPanelcs\" id=\"jiveAdvanced\" style=\"display: none;\">\n\t\t\t<div>\n\t\t\t\t<table border=\"0\" cellpadding=\"0\" cellspacing=\"1\">\n\t\t\t\t<thead>\n\t\t\t\t<tr>\n\t\t\t\t\t<th width=\"10%\"></th>\n\t\t\t\t\t<th></th>\n\t\t\t\t\t<th width=\"50\">");
if (_jspx_meth_fmt_005fmessage_005f27(_jspx_page_context))
return;
out.write("</th>\n\t\t\t\t\t<th width=\"50\">");
if (_jspx_meth_fmt_005fmessage_005f28(_jspx_page_context))
return;
out.write("</th>\n\t\t\t\t</tr>\n\t\t\t\t</thead>\n\t\t\t\t<tbody>\n\t\t\t\t<tr>\n\t\t\t\t\t<td class=\"jive-advancedLabel\" nowrap>\n\t\t\t\t\t\t");
if (_jspx_meth_fmt_005fmessage_005f29(_jspx_page_context))
return;
out.write(":\n\t\t\t\t\t</td>\n\t\t\t\t\t<td class=\"jive-advancedDesc jive-advancedBorderBottom jive-advancedBorderRight\">\n\t\t\t\t\t ");
if (_jspx_meth_fmt_005fmessage_005f30(_jspx_page_context))
return;
out.write("\n\t\t\t\t\t</td>\n\t\t\t\t\t<td class=\"jive-advancedBorderBottom jive-advancedBorderRight\" align=\"center\">\n\t\t\t\t\t\t<input type=\"radio\" name=\"connectionpool\" value=\"true\" ");
if (connectionPoolEnabled) {
out.write("checked ");
}
out.write(">\n\t\t\t\t\t</td>\n\t\t\t\t\t<td class=\"jive-advancedBorderBottom\" align=\"center\">\n\t\t\t\t\t\t<input type=\"radio\" name=\"connectionpool\" value=\"false\" ");
if (!connectionPoolEnabled) {
out.write("checked ");
}
out.write(">\n\t\t\t\t\t</td>\n\t\t\t\t</tr>\n\t\t\t\t<tr>\n\t\t\t\t\t<td class=\"jive-advancedLabel\" nowrap>\n\t\t\t\t\t\t");
if (_jspx_meth_fmt_005fmessage_005f31(_jspx_page_context))
return;
out.write(":\n\t\t\t\t\t</td>\n\t\t\t\t\t<td class=\"jive-advancedDesc jive-advancedBorderBottom jive-advancedBorderRight\">\n\t\t\t\t\t\t");
if (_jspx_meth_fmt_005fmessage_005f32(_jspx_page_context))
return;
out.write("\n\t\t\t\t\t</td>\n\t\t\t\t\t<td class=\"jive-advancedBorderBottom jive-advancedBorderRight\" align=\"center\">\n\t\t\t\t\t\t<input type=\"radio\" name=\"ssl\" value=\"true\" ");
if (sslEnabled) {
out.write("checked ");
}
out.write(">\n\t\t\t\t\t</td>\n\t\t\t\t\t<td class=\"jive-advancedBorderBottom\" align=\"center\">\n\t\t\t\t\t\t<input type=\"radio\" name=\"ssl\" value=\"false\" ");
if (!sslEnabled) {
out.write("checked ");
}
out.write(">\n\t\t\t\t\t</td>\n\t\t\t\t</tr>\n\t\t\t\t<tr>\n\t\t\t\t\t<td class=\"jive-advancedLabel\" nowrap>\n\t\t\t\t\t\t");
if (_jspx_meth_fmt_005fmessage_005f33(_jspx_page_context))
return;
out.write(":\n\t\t\t\t\t</td>\n\t\t\t\t\t<td class=\"jive-advancedDesc jive-advancedBorderBottom jive-advancedBorderRight\">\n\t\t\t\t\t\t");
if (_jspx_meth_fmt_005fmessage_005f34(_jspx_page_context))
return;
out.write("\n\t\t\t\t\t</td>\n\t\t\t\t\t<td class=\"jive-advancedBorderBottom jive-advancedBorderRight\" align=\"center\">\n\t\t\t\t\t\t<input type=\"radio\" name=\"starttls\" value=\"true\" ");
if (startTlsEnabled) {
out.write("checked ");
}
out.write(">\n\t\t\t\t\t</td>\n\t\t\t\t\t<td class=\"jive-advancedBorderBottom\" align=\"center\">\n\t\t\t\t\t\t<input type=\"radio\" name=\"starttls\" value=\"false\" ");
if (!startTlsEnabled) {
out.write("checked ");
}
out.write(">\n\t\t\t\t\t</td>\n\t\t\t\t</tr>\n\t\t\t\t<tr>\n\t\t\t\t\t<td class=\"jive-advancedLabel\" nowrap>\n\t\t\t\t\t\t");
if (_jspx_meth_fmt_005fmessage_005f35(_jspx_page_context))
return;
out.write(":\n\t\t\t\t\t</td>\n\t\t\t\t\t<td class=\"jive-advancedDesc jive-advancedBorderBottom jive-advancedBorderRight\">\n\t\t\t\t\t\t");
if (_jspx_meth_fmt_005fmessage_005f36(_jspx_page_context))
return;
out.write("\n\t\t\t\t\t</td>\n\t\t\t\t\t<td class=\"jive-advancedBorderBottom jive-advancedBorderRight\" align=\"center\">\n\t\t\t\t\t\t<input type=\"radio\" name=\"debug\" value=\"true\" ");
if (debugEnabled) {
out.write("checked ");
}
out.write(">\n\t\t\t\t\t</td>\n\t\t\t\t\t<td class=\"jive-advancedBorderBottom\" align=\"center\">\n\t\t\t\t\t\t<input type=\"radio\" name=\"debug\" value=\"false\" ");
if (!debugEnabled) {
out.write("checked ");
}
out.write(">\n\t\t\t\t\t</td>\n\t\t\t\t</tr>\n\t\t\t\t<tr>\n\t\t\t\t\t<td class=\"jive-advancedLabel\" nowrap>\n\t\t\t\t\t\t");
if (_jspx_meth_fmt_005fmessage_005f37(_jspx_page_context))
return;
out.write(":\n\t\t\t\t\t</td>\n\t\t\t\t\t<td class=\"jive-advancedDesc jive-advancedBorderBottom jive-advancedBorderRight\">\n\t\t\t\t\t\t");
if (_jspx_meth_fmt_005fmessage_005f38(_jspx_page_context))
return;
out.write("\n\t\t\t\t\t</td>\n\t\t\t\t\t<td class=\"jive-advancedBorderBottom jive-advancedBorderRight\" align=\"center\">\n\t\t\t\t\t\t<input type=\"radio\" name=\"referrals\" value=\"true\" ");
if (referralsEnabled) {
out.write("checked ");
}
out.write(">\n\t\t\t\t\t</td>\n\t\t\t\t\t<td class=\"jive-advancedBorderBottom\" align=\"center\">\n\t\t\t\t\t\t<input type=\"radio\" name=\"referrals\" value=\"false\" ");
if (!referralsEnabled) {
out.write("checked ");
}
out.write(">\n\t\t\t\t\t</td>\n\t\t\t\t</tr>\n <tr>\n <td class=\"jive-advancedLabel\" nowrap>\n ");
if (_jspx_meth_fmt_005fmessage_005f39(_jspx_page_context))
return;
out.write(":\n </td>\n <td class=\"jive-advancedDesc jive-advancedBorderBottom jive-advancedBorderRight\">\n ");
if (_jspx_meth_fmt_005fmessage_005f40(_jspx_page_context))
return;
out.write("\n </td>\n <td class=\"jive-advancedBorderBottom jive-advancedBorderRight\" align=\"center\">\n <input type=\"radio\" name=\"aliasreferrals\" value=\"true\" ");
if (aliasReferralsEnabled) {
out.write("checked ");
}
out.write(">\n </td>\n <td class=\"jive-advancedBorderBottom\" align=\"center\">\n <input type=\"radio\" name=\"aliasreferrals\" value=\"false\" ");
if (!aliasReferralsEnabled) {
out.write("checked ");
}
out.write(">\n </td>\n </tr>\n <tr>\n <td class=\"jive-advancedLabel\" nowrap>\n ");
if (_jspx_meth_fmt_005fmessage_005f41(_jspx_page_context))
return;
out.write(":\n </td>\n <td class=\"jive-advancedDesc jive-advancedBorderBottom jive-advancedBorderRight\">\n ");
if (_jspx_meth_fmt_005fmessage_005f42(_jspx_page_context))
return;
out.write("\n </td>\n <td class=\"jive-advancedBorderBottom jive-advancedBorderRight\" align=\"center\">\n <input type=\"radio\" name=\"enclosedns\" value=\"true\" ");
if (encloseDNs) {
out.write("checked ");
}
out.write(">\n </td>\n <td class=\"jive-advancedBorderBottom\" align=\"center\">\n <input type=\"radio\" name=\"enclosedns\" value=\"false\" ");
if (!encloseDNs) {
out.write("checked ");
}
out.write(">\n </td>\n </tr> \n </tbody>\n\t\t\t\t</table>\n\t\t\t</div>\n\t\t</div>\n\t\t<!-- END jiveAdvancedPanelcs (advanced connection settings) -->\n\n\n\t\t<!-- BEGIN jive-buttons -->\n\t\t<div class=\"jive-buttons\">\n\n\t\t\t<!-- BEGIN right-aligned buttons -->\n\t\t\t<div align=\"right\">\n\n <input type=\"Submit\" name=\"test\" value=\"");
if (_jspx_meth_fmt_005fmessage_005f43(_jspx_page_context))
return;
out.write("\" id=\"jive-setup-test\" border=\"0\">\n\n <input type=\"Submit\" name=\"save\" value=\"");
if (_jspx_meth_fmt_005fmessage_005f44(_jspx_page_context))
return;
out.write("\" id=\"jive-setup-save\" border=\"0\">\n\t\t\t</div>\n\t\t\t<!-- END right-aligned buttons -->\n\n\t\t</div>\n\t\t<!-- END jive-buttons -->\n\n\t</form>\n\n\t</div>\n\t<!-- END jive-contentBox -->\n\n\n\n</body>\n</html>\n");
out.write('\n');
} catch (java.lang.Throwable t) {
if (!(t instanceof javax.servlet.jsp.SkipPageException)){
out = _jspx_out;
if (out != null && out.getBufferSize() != 0)
try {
if (response.isCommitted()) {
out.flush();
} else {
out.clearBuffer();
}
} catch (java.io.IOException e) {}
if (_jspx_page_context != null) _jspx_page_context.handlePageException(t);
else throw new ServletException(t);
}
} finally {
_jspxFactory.releasePageContext(_jspx_page_context);
}
}
private boolean _jspx_meth_fmt_005fmessage_005f0(javax.servlet.jsp.PageContext _jspx_page_context)
throws java.lang.Throwable {
javax.servlet.jsp.PageContext pageContext = _jspx_page_context;
javax.servlet.jsp.JspWriter out = _jspx_page_context.getOut();
// fmt:message
org.apache.taglibs.standard.tag.rt.fmt.MessageTag _jspx_th_fmt_005fmessage_005f0 = (org.apache.taglibs.standard.tag.rt.fmt.MessageTag) _005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.get(org.apache.taglibs.standard.tag.rt.fmt.MessageTag.class);
_jspx_th_fmt_005fmessage_005f0.setPageContext(_jspx_page_context);
_jspx_th_fmt_005fmessage_005f0.setParent(null);
// /setup/ldap-server.jspf(154,11) name = key type = null reqTime = true required = false fragment = false deferredValue = false expectedTypeName = null deferredMethod = false methodSignature = null
_jspx_th_fmt_005fmessage_005f0.setKey("setup.ldap.title");
int _jspx_eval_fmt_005fmessage_005f0 = _jspx_th_fmt_005fmessage_005f0.doStartTag();
if (_jspx_th_fmt_005fmessage_005f0.doEndTag() == javax.servlet.jsp.tagext.Tag.SKIP_PAGE) {
_005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.reuse(_jspx_th_fmt_005fmessage_005f0);
return true;
}
_005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.reuse(_jspx_th_fmt_005fmessage_005f0);
return false;
}
private boolean _jspx_meth_fmt_005fmessage_005f1(javax.servlet.jsp.PageContext _jspx_page_context)
throws java.lang.Throwable {
javax.servlet.jsp.PageContext pageContext = _jspx_page_context;
javax.servlet.jsp.JspWriter out = _jspx_page_context.getOut();
// fmt:message
org.apache.taglibs.standard.tag.rt.fmt.MessageTag _jspx_th_fmt_005fmessage_005f1 = (org.apache.taglibs.standard.tag.rt.fmt.MessageTag) _005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.get(org.apache.taglibs.standard.tag.rt.fmt.MessageTag.class);
_jspx_th_fmt_005fmessage_005f1.setPageContext(_jspx_page_context);
_jspx_th_fmt_005fmessage_005f1.setParent(null);
// /setup/ldap-server.jspf(163,55) name = key type = null reqTime = true required = false fragment = false deferredValue = false expectedTypeName = null deferredMethod = false methodSignature = null
_jspx_th_fmt_005fmessage_005f1.setKey("global.test");
int _jspx_eval_fmt_005fmessage_005f1 = _jspx_th_fmt_005fmessage_005f1.doStartTag();
if (_jspx_th_fmt_005fmessage_005f1.doEndTag() == javax.servlet.jsp.tagext.Tag.SKIP_PAGE) {
_005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.reuse(_jspx_th_fmt_005fmessage_005f1);
return true;
}
_005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.reuse(_jspx_th_fmt_005fmessage_005f1);
return false;
}
private boolean _jspx_meth_fmt_005fmessage_005f2(javax.servlet.jsp.PageContext _jspx_page_context)
throws java.lang.Throwable {
javax.servlet.jsp.PageContext pageContext = _jspx_page_context;
javax.servlet.jsp.JspWriter out = _jspx_page_context.getOut();
// fmt:message
org.apache.taglibs.standard.tag.rt.fmt.MessageTag _jspx_th_fmt_005fmessage_005f2 = (org.apache.taglibs.standard.tag.rt.fmt.MessageTag) _005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.get(org.apache.taglibs.standard.tag.rt.fmt.MessageTag.class);
_jspx_th_fmt_005fmessage_005f2.setPageContext(_jspx_page_context);
_jspx_th_fmt_005fmessage_005f2.setParent(null);
// /setup/ldap-server.jspf(175,8) name = key type = null reqTime = true required = false fragment = false deferredValue = false expectedTypeName = null deferredMethod = false methodSignature = null
_jspx_th_fmt_005fmessage_005f2.setKey("setup.ldap.profile");
int _jspx_eval_fmt_005fmessage_005f2 = _jspx_th_fmt_005fmessage_005f2.doStartTag();
if (_jspx_th_fmt_005fmessage_005f2.doEndTag() == javax.servlet.jsp.tagext.Tag.SKIP_PAGE) {
_005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.reuse(_jspx_th_fmt_005fmessage_005f2);
return true;
}
_005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.reuse(_jspx_th_fmt_005fmessage_005f2);
return false;
}
private boolean _jspx_meth_fmt_005fmessage_005f3(javax.servlet.jsp.PageContext _jspx_page_context)
throws java.lang.Throwable {
javax.servlet.jsp.PageContext pageContext = _jspx_page_context;
javax.servlet.jsp.JspWriter out = _jspx_page_context.getOut();
// fmt:message
org.apache.taglibs.standard.tag.rt.fmt.MessageTag _jspx_th_fmt_005fmessage_005f3 = (org.apache.taglibs.standard.tag.rt.fmt.MessageTag) _005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.get(org.apache.taglibs.standard.tag.rt.fmt.MessageTag.class);
_jspx_th_fmt_005fmessage_005f3.setPageContext(_jspx_page_context);
_jspx_th_fmt_005fmessage_005f3.setParent(null);
// /setup/ldap-server.jspf(175,56) name = key type = null reqTime = true required = false fragment = false deferredValue = false expectedTypeName = null deferredMethod = false methodSignature = null
_jspx_th_fmt_005fmessage_005f3.setKey("setup.ldap.connection_settings");
int _jspx_eval_fmt_005fmessage_005f3 = _jspx_th_fmt_005fmessage_005f3.doStartTag();
if (_jspx_th_fmt_005fmessage_005f3.doEndTag() == javax.servlet.jsp.tagext.Tag.SKIP_PAGE) {
_005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.reuse(_jspx_th_fmt_005fmessage_005f3);
return true;
}
_005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.reuse(_jspx_th_fmt_005fmessage_005f3);
return false;
}
private boolean _jspx_meth_fmt_005fmessage_005f4(javax.servlet.jsp.PageContext _jspx_page_context)
throws java.lang.Throwable {
javax.servlet.jsp.PageContext pageContext = _jspx_page_context;
javax.servlet.jsp.JspWriter out = _jspx_page_context.getOut();
// fmt:message
org.apache.taglibs.standard.tag.rt.fmt.MessageTag _jspx_th_fmt_005fmessage_005f4 = (org.apache.taglibs.standard.tag.rt.fmt.MessageTag) _005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.get(org.apache.taglibs.standard.tag.rt.fmt.MessageTag.class);
_jspx_th_fmt_005fmessage_005f4.setPageContext(_jspx_page_context);
_jspx_th_fmt_005fmessage_005f4.setParent(null);
// /setup/ldap-server.jspf(180,45) name = key type = null reqTime = true required = false fragment = false deferredValue = false expectedTypeName = null deferredMethod = false methodSignature = null
_jspx_th_fmt_005fmessage_005f4.setKey("setup.ldap.connection_settings");
int _jspx_eval_fmt_005fmessage_005f4 = _jspx_th_fmt_005fmessage_005f4.doStartTag();
if (_jspx_th_fmt_005fmessage_005f4.doEndTag() == javax.servlet.jsp.tagext.Tag.SKIP_PAGE) {
_005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.reuse(_jspx_th_fmt_005fmessage_005f4);
return true;
}
_005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.reuse(_jspx_th_fmt_005fmessage_005f4);
return false;
}
private boolean _jspx_meth_fmt_005fmessage_005f5(javax.servlet.jsp.PageContext _jspx_page_context)
throws java.lang.Throwable {
javax.servlet.jsp.PageContext pageContext = _jspx_page_context;
javax.servlet.jsp.JspWriter out = _jspx_page_context.getOut();
// fmt:message
org.apache.taglibs.standard.tag.rt.fmt.MessageTag _jspx_th_fmt_005fmessage_005f5 = (org.apache.taglibs.standard.tag.rt.fmt.MessageTag) _005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.get(org.apache.taglibs.standard.tag.rt.fmt.MessageTag.class);
_jspx_th_fmt_005fmessage_005f5.setPageContext(_jspx_page_context);
_jspx_th_fmt_005fmessage_005f5.setParent(null);
// /setup/ldap-server.jspf(181,41) name = key type = null reqTime = true required = false fragment = false deferredValue = false expectedTypeName = null deferredMethod = false methodSignature = null
_jspx_th_fmt_005fmessage_005f5.setKey("setup.ldap.user_mapping");
int _jspx_eval_fmt_005fmessage_005f5 = _jspx_th_fmt_005fmessage_005f5.doStartTag();
if (_jspx_th_fmt_005fmessage_005f5.doEndTag() == javax.servlet.jsp.tagext.Tag.SKIP_PAGE) {
_005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.reuse(_jspx_th_fmt_005fmessage_005f5);
return true;
}
_005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.reuse(_jspx_th_fmt_005fmessage_005f5);
return false;
}
private boolean _jspx_meth_fmt_005fmessage_005f6(javax.servlet.jsp.PageContext _jspx_page_context)
throws java.lang.Throwable {
javax.servlet.jsp.PageContext pageContext = _jspx_page_context;
javax.servlet.jsp.JspWriter out = _jspx_page_context.getOut();
// fmt:message
org.apache.taglibs.standard.tag.rt.fmt.MessageTag _jspx_th_fmt_005fmessage_005f6 = (org.apache.taglibs.standard.tag.rt.fmt.MessageTag) _005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.get(org.apache.taglibs.standard.tag.rt.fmt.MessageTag.class);
_jspx_th_fmt_005fmessage_005f6.setPageContext(_jspx_page_context);
_jspx_th_fmt_005fmessage_005f6.setParent(null);
// /setup/ldap-server.jspf(182,41) name = key type = null reqTime = true required = false fragment = false deferredValue = false expectedTypeName = null deferredMethod = false methodSignature = null
_jspx_th_fmt_005fmessage_005f6.setKey("setup.ldap.group_mapping");
int _jspx_eval_fmt_005fmessage_005f6 = _jspx_th_fmt_005fmessage_005f6.doStartTag();
if (_jspx_th_fmt_005fmessage_005f6.doEndTag() == javax.servlet.jsp.tagext.Tag.SKIP_PAGE) {
_005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.reuse(_jspx_th_fmt_005fmessage_005f6);
return true;
}
_005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.reuse(_jspx_th_fmt_005fmessage_005f6);
return false;
}
private boolean _jspx_meth_fmt_005fmessage_005f7(javax.servlet.jsp.PageContext _jspx_page_context)
throws java.lang.Throwable {
javax.servlet.jsp.PageContext pageContext = _jspx_page_context;
javax.servlet.jsp.JspWriter out = _jspx_page_context.getOut();
// fmt:message
org.apache.taglibs.standard.tag.rt.fmt.MessageTag _jspx_th_fmt_005fmessage_005f7 = (org.apache.taglibs.standard.tag.rt.fmt.MessageTag) _005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.get(org.apache.taglibs.standard.tag.rt.fmt.MessageTag.class);
_jspx_th_fmt_005fmessage_005f7.setPageContext(_jspx_page_context);
_jspx_th_fmt_005fmessage_005f7.setParent(null);
// /setup/ldap-server.jspf(189,5) name = key type = null reqTime = true required = false fragment = false deferredValue = false expectedTypeName = null deferredMethod = false methodSignature = null
_jspx_th_fmt_005fmessage_005f7.setKey("setup.ldap.step_one");
int _jspx_eval_fmt_005fmessage_005f7 = _jspx_th_fmt_005fmessage_005f7.doStartTag();
if (_jspx_th_fmt_005fmessage_005f7.doEndTag() == javax.servlet.jsp.tagext.Tag.SKIP_PAGE) {
_005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.reuse(_jspx_th_fmt_005fmessage_005f7);
return true;
}
_005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.reuse(_jspx_th_fmt_005fmessage_005f7);
return false;
}
private boolean _jspx_meth_fmt_005fmessage_005f8(javax.servlet.jsp.PageContext _jspx_page_context)
throws java.lang.Throwable {
javax.servlet.jsp.PageContext pageContext = _jspx_page_context;
javax.servlet.jsp.JspWriter out = _jspx_page_context.getOut();
// fmt:message
org.apache.taglibs.standard.tag.rt.fmt.MessageTag _jspx_th_fmt_005fmessage_005f8 = (org.apache.taglibs.standard.tag.rt.fmt.MessageTag) _005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.get(org.apache.taglibs.standard.tag.rt.fmt.MessageTag.class);
_jspx_th_fmt_005fmessage_005f8.setPageContext(_jspx_page_context);
_jspx_th_fmt_005fmessage_005f8.setParent(null);
// /setup/ldap-server.jspf(189,54) name = key type = null reqTime = true required = false fragment = false deferredValue = false expectedTypeName = null deferredMethod = false methodSignature = null
_jspx_th_fmt_005fmessage_005f8.setKey("setup.ldap.connection_settings");
int _jspx_eval_fmt_005fmessage_005f8 = _jspx_th_fmt_005fmessage_005f8.doStartTag();
if (_jspx_th_fmt_005fmessage_005f8.doEndTag() == javax.servlet.jsp.tagext.Tag.SKIP_PAGE) {
_005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.reuse(_jspx_th_fmt_005fmessage_005f8);
return true;
}
_005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.reuse(_jspx_th_fmt_005fmessage_005f8);
return false;
}
private boolean _jspx_meth_fmt_005fmessage_005f9(javax.servlet.jsp.PageContext _jspx_page_context)
throws java.lang.Throwable {
javax.servlet.jsp.PageContext pageContext = _jspx_page_context;
javax.servlet.jsp.JspWriter out = _jspx_page_context.getOut();
// fmt:message
org.apache.taglibs.standard.tag.rt.fmt.MessageTag _jspx_th_fmt_005fmessage_005f9 = (org.apache.taglibs.standard.tag.rt.fmt.MessageTag) _005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.get(org.apache.taglibs.standard.tag.rt.fmt.MessageTag.class);
_jspx_th_fmt_005fmessage_005f9.setPageContext(_jspx_page_context);
_jspx_th_fmt_005fmessage_005f9.setParent(null);
// /setup/ldap-server.jspf(190,4) name = key type = null reqTime = true required = false fragment = false deferredValue = false expectedTypeName = null deferredMethod = false methodSignature = null
_jspx_th_fmt_005fmessage_005f9.setKey("setup.ldap.server.description");
int _jspx_eval_fmt_005fmessage_005f9 = _jspx_th_fmt_005fmessage_005f9.doStartTag();
if (_jspx_th_fmt_005fmessage_005f9.doEndTag() == javax.servlet.jsp.tagext.Tag.SKIP_PAGE) {
_005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.reuse(_jspx_th_fmt_005fmessage_005f9);
return true;
}
_005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.reuse(_jspx_th_fmt_005fmessage_005f9);
return false;
}
private boolean _jspx_meth_fmt_005fmessage_005f10(javax.servlet.jsp.PageContext _jspx_page_context)
throws java.lang.Throwable {
javax.servlet.jsp.PageContext pageContext = _jspx_page_context;
javax.servlet.jsp.JspWriter out = _jspx_page_context.getOut();
// fmt:message
org.apache.taglibs.standard.tag.rt.fmt.MessageTag _jspx_th_fmt_005fmessage_005f10 = (org.apache.taglibs.standard.tag.rt.fmt.MessageTag) _005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.get(org.apache.taglibs.standard.tag.rt.fmt.MessageTag.class);
_jspx_th_fmt_005fmessage_005f10.setPageContext(_jspx_page_context);
_jspx_th_fmt_005fmessage_005f10.setParent(null);
// /setup/ldap-server.jspf(207,31) name = key type = null reqTime = true required = false fragment = false deferredValue = false expectedTypeName = null deferredMethod = false methodSignature = null
_jspx_th_fmt_005fmessage_005f10.setKey("setup.ldap.server.ldap_server");
int _jspx_eval_fmt_005fmessage_005f10 = _jspx_th_fmt_005fmessage_005f10.doStartTag();
if (_jspx_th_fmt_005fmessage_005f10.doEndTag() == javax.servlet.jsp.tagext.Tag.SKIP_PAGE) {
_005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.reuse(_jspx_th_fmt_005fmessage_005f10);
return true;
}
_005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.reuse(_jspx_th_fmt_005fmessage_005f10);
return false;
}
private boolean _jspx_meth_fmt_005fmessage_005f11(javax.servlet.jsp.PageContext _jspx_page_context)
throws java.lang.Throwable {
javax.servlet.jsp.PageContext pageContext = _jspx_page_context;
javax.servlet.jsp.JspWriter out = _jspx_page_context.getOut();
// fmt:message
org.apache.taglibs.standard.tag.rt.fmt.MessageTag _jspx_th_fmt_005fmessage_005f11 = (org.apache.taglibs.standard.tag.rt.fmt.MessageTag) _005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.get(org.apache.taglibs.standard.tag.rt.fmt.MessageTag.class);
_jspx_th_fmt_005fmessage_005f11.setPageContext(_jspx_page_context);
_jspx_th_fmt_005fmessage_005f11.setParent(null);
// /setup/ldap-server.jspf(211,61) name = key type = null reqTime = true required = false fragment = false deferredValue = false expectedTypeName = null deferredMethod = false methodSignature = null
_jspx_th_fmt_005fmessage_005f11.setKey("setup.ldap.server.type");
int _jspx_eval_fmt_005fmessage_005f11 = _jspx_th_fmt_005fmessage_005f11.doStartTag();
if (_jspx_th_fmt_005fmessage_005f11.doEndTag() == javax.servlet.jsp.tagext.Tag.SKIP_PAGE) {
_005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.reuse(_jspx_th_fmt_005fmessage_005f11);
return true;
}
_005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.reuse(_jspx_th_fmt_005fmessage_005f11);
return false;
}
private boolean _jspx_meth_fmt_005fmessage_005f12(javax.servlet.jsp.PageContext _jspx_page_context)
throws java.lang.Throwable {
javax.servlet.jsp.PageContext pageContext = _jspx_page_context;
javax.servlet.jsp.JspWriter out = _jspx_page_context.getOut();
// fmt:message
org.apache.taglibs.standard.tag.rt.fmt.MessageTag _jspx_th_fmt_005fmessage_005f12 = (org.apache.taglibs.standard.tag.rt.fmt.MessageTag) _005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.get(org.apache.taglibs.standard.tag.rt.fmt.MessageTag.class);
_jspx_th_fmt_005fmessage_005f12.setPageContext(_jspx_page_context);
_jspx_th_fmt_005fmessage_005f12.setParent(null);
// /setup/ldap-server.jspf(214,86) name = key type = null reqTime = true required = false fragment = false deferredValue = false expectedTypeName = null deferredMethod = false methodSignature = null
_jspx_th_fmt_005fmessage_005f12.setKey("setup.ldap.server.type_select");
int _jspx_eval_fmt_005fmessage_005f12 = _jspx_th_fmt_005fmessage_005f12.doStartTag();
if (_jspx_th_fmt_005fmessage_005f12.doEndTag() == javax.servlet.jsp.tagext.Tag.SKIP_PAGE) {
_005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.reuse(_jspx_th_fmt_005fmessage_005f12);
return true;
}
_005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.reuse(_jspx_th_fmt_005fmessage_005f12);
return false;
}
private boolean _jspx_meth_fmt_005fmessage_005f13(javax.servlet.jsp.PageContext _jspx_page_context)
throws java.lang.Throwable {
javax.servlet.jsp.PageContext pageContext = _jspx_page_context;
javax.servlet.jsp.JspWriter out = _jspx_page_context.getOut();
// fmt:message
org.apache.taglibs.standard.tag.rt.fmt.MessageTag _jspx_th_fmt_005fmessage_005f13 = (org.apache.taglibs.standard.tag.rt.fmt.MessageTag) _005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.get(org.apache.taglibs.standard.tag.rt.fmt.MessageTag.class);
_jspx_th_fmt_005fmessage_005f13.setPageContext(_jspx_page_context);
_jspx_th_fmt_005fmessage_005f13.setParent(null);
// /setup/ldap-server.jspf(217,94) name = key type = null reqTime = true required = false fragment = false deferredValue = false expectedTypeName = null deferredMethod = false methodSignature = null
_jspx_th_fmt_005fmessage_005f13.setKey("setup.ldap.server.type_other");
int _jspx_eval_fmt_005fmessage_005f13 = _jspx_th_fmt_005fmessage_005f13.doStartTag();
if (_jspx_th_fmt_005fmessage_005f13.doEndTag() == javax.servlet.jsp.tagext.Tag.SKIP_PAGE) {
_005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.reuse(_jspx_th_fmt_005fmessage_005f13);
return true;
}
_005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.reuse(_jspx_th_fmt_005fmessage_005f13);
return false;
}
private boolean _jspx_meth_fmt_005fmessage_005f14(javax.servlet.jsp.PageContext _jspx_page_context)
throws java.lang.Throwable {
javax.servlet.jsp.PageContext pageContext = _jspx_page_context;
javax.servlet.jsp.JspWriter out = _jspx_page_context.getOut();
// fmt:message
org.apache.taglibs.standard.tag.rt.fmt.MessageTag _jspx_th_fmt_005fmessage_005f14 = (org.apache.taglibs.standard.tag.rt.fmt.MessageTag) _005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.get(org.apache.taglibs.standard.tag.rt.fmt.MessageTag.class);
_jspx_th_fmt_005fmessage_005f14.setPageContext(_jspx_page_context);
_jspx_th_fmt_005fmessage_005f14.setParent(null);
// /setup/ldap-server.jspf(218,116) name = key type = null reqTime = true required = false fragment = false deferredValue = false expectedTypeName = null deferredMethod = false methodSignature = null
_jspx_th_fmt_005fmessage_005f14.setKey("setup.ldap.server.type_help");
int _jspx_eval_fmt_005fmessage_005f14 = _jspx_th_fmt_005fmessage_005f14.doStartTag();
if (_jspx_th_fmt_005fmessage_005f14.doEndTag() == javax.servlet.jsp.tagext.Tag.SKIP_PAGE) {
_005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.reuse(_jspx_th_fmt_005fmessage_005f14);
return true;
}
_005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.reuse(_jspx_th_fmt_005fmessage_005f14);
return false;
}
private boolean _jspx_meth_fmt_005fmessage_005f15(javax.servlet.jsp.PageContext _jspx_page_context)
throws java.lang.Throwable {
javax.servlet.jsp.PageContext pageContext = _jspx_page_context;
javax.servlet.jsp.JspWriter out = _jspx_page_context.getOut();
// fmt:message
org.apache.taglibs.standard.tag.rt.fmt.MessageTag _jspx_th_fmt_005fmessage_005f15 = (org.apache.taglibs.standard.tag.rt.fmt.MessageTag) _005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.get(org.apache.taglibs.standard.tag.rt.fmt.MessageTag.class);
_jspx_th_fmt_005fmessage_005f15.setPageContext(_jspx_page_context);
_jspx_th_fmt_005fmessage_005f15.setParent(null);
// /setup/ldap-server.jspf(223,52) name = key type = null reqTime = true required = false fragment = false deferredValue = false expectedTypeName = null deferredMethod = false methodSignature = null
_jspx_th_fmt_005fmessage_005f15.setKey("setup.ldap.server.host");
int _jspx_eval_fmt_005fmessage_005f15 = _jspx_th_fmt_005fmessage_005f15.doStartTag();
if (_jspx_th_fmt_005fmessage_005f15.doEndTag() == javax.servlet.jsp.tagext.Tag.SKIP_PAGE) {
_005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.reuse(_jspx_th_fmt_005fmessage_005f15);
return true;
}
_005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.reuse(_jspx_th_fmt_005fmessage_005f15);
return false;
}
private boolean _jspx_meth_fmt_005fmessage_005f16(javax.servlet.jsp.PageContext _jspx_page_context)
throws java.lang.Throwable {
javax.servlet.jsp.PageContext pageContext = _jspx_page_context;
javax.servlet.jsp.JspWriter out = _jspx_page_context.getOut();
// fmt:message
org.apache.taglibs.standard.tag.rt.fmt.MessageTag _jspx_th_fmt_005fmessage_005f16 = (org.apache.taglibs.standard.tag.rt.fmt.MessageTag) _005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.get(org.apache.taglibs.standard.tag.rt.fmt.MessageTag.class);
_jspx_th_fmt_005fmessage_005f16.setPageContext(_jspx_page_context);
_jspx_th_fmt_005fmessage_005f16.setParent(null);
// /setup/ldap-server.jspf(231,115) name = key type = null reqTime = true required = false fragment = false deferredValue = false expectedTypeName = null deferredMethod = false methodSignature = null
_jspx_th_fmt_005fmessage_005f16.setKey("setup.ldap.server.host_help");
int _jspx_eval_fmt_005fmessage_005f16 = _jspx_th_fmt_005fmessage_005f16.doStartTag();
if (_jspx_th_fmt_005fmessage_005f16.doEndTag() == javax.servlet.jsp.tagext.Tag.SKIP_PAGE) {
_005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.reuse(_jspx_th_fmt_005fmessage_005f16);
return true;
}
_005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.reuse(_jspx_th_fmt_005fmessage_005f16);
return false;
}
private boolean _jspx_meth_fmt_005fmessage_005f17(javax.servlet.jsp.PageContext _jspx_page_context)
throws java.lang.Throwable {
javax.servlet.jsp.PageContext pageContext = _jspx_page_context;
javax.servlet.jsp.JspWriter out = _jspx_page_context.getOut();
// fmt:message
org.apache.taglibs.standard.tag.rt.fmt.MessageTag _jspx_th_fmt_005fmessage_005f17 = (org.apache.taglibs.standard.tag.rt.fmt.MessageTag) _005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.get(org.apache.taglibs.standard.tag.rt.fmt.MessageTag.class);
_jspx_th_fmt_005fmessage_005f17.setPageContext(_jspx_page_context);
_jspx_th_fmt_005fmessage_005f17.setParent(null);
// /setup/ldap-server.jspf(236,74) name = key type = null reqTime = true required = false fragment = false deferredValue = false expectedTypeName = null deferredMethod = false methodSignature = null
_jspx_th_fmt_005fmessage_005f17.setKey("setup.ldap.server.port");
int _jspx_eval_fmt_005fmessage_005f17 = _jspx_th_fmt_005fmessage_005f17.doStartTag();
if (_jspx_th_fmt_005fmessage_005f17.doEndTag() == javax.servlet.jsp.tagext.Tag.SKIP_PAGE) {
_005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.reuse(_jspx_th_fmt_005fmessage_005f17);
return true;
}
_005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.reuse(_jspx_th_fmt_005fmessage_005f17);
return false;
}
private boolean _jspx_meth_fmt_005fmessage_005f18(javax.servlet.jsp.PageContext _jspx_page_context)
throws java.lang.Throwable {
javax.servlet.jsp.PageContext pageContext = _jspx_page_context;
javax.servlet.jsp.JspWriter out = _jspx_page_context.getOut();
// fmt:message
org.apache.taglibs.standard.tag.rt.fmt.MessageTag _jspx_th_fmt_005fmessage_005f18 = (org.apache.taglibs.standard.tag.rt.fmt.MessageTag) _005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.get(org.apache.taglibs.standard.tag.rt.fmt.MessageTag.class);
_jspx_th_fmt_005fmessage_005f18.setPageContext(_jspx_page_context);
_jspx_th_fmt_005fmessage_005f18.setParent(null);
// /setup/ldap-server.jspf(237,199) name = key type = null reqTime = true required = false fragment = false deferredValue = false expectedTypeName = null deferredMethod = false methodSignature = null
_jspx_th_fmt_005fmessage_005f18.setKey("setup.ldap.server.port_help");
int _jspx_eval_fmt_005fmessage_005f18 = _jspx_th_fmt_005fmessage_005f18.doStartTag();
if (_jspx_th_fmt_005fmessage_005f18.doEndTag() == javax.servlet.jsp.tagext.Tag.SKIP_PAGE) {
_005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.reuse(_jspx_th_fmt_005fmessage_005f18);
return true;
}
_005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.reuse(_jspx_th_fmt_005fmessage_005f18);
return false;
}
private boolean _jspx_meth_fmt_005fmessage_005f19(javax.servlet.jsp.PageContext _jspx_page_context)
throws java.lang.Throwable {
javax.servlet.jsp.PageContext pageContext = _jspx_page_context;
javax.servlet.jsp.JspWriter out = _jspx_page_context.getOut();
// fmt:message
org.apache.taglibs.standard.tag.rt.fmt.MessageTag _jspx_th_fmt_005fmessage_005f19 = (org.apache.taglibs.standard.tag.rt.fmt.MessageTag) _005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.get(org.apache.taglibs.standard.tag.rt.fmt.MessageTag.class);
_jspx_th_fmt_005fmessage_005f19.setPageContext(_jspx_page_context);
_jspx_th_fmt_005fmessage_005f19.setParent(null);
// /setup/ldap-server.jspf(240,34) name = key type = null reqTime = true required = false fragment = false deferredValue = false expectedTypeName = null deferredMethod = false methodSignature = null
_jspx_th_fmt_005fmessage_005f19.setKey("setup.ldap.server.basedn");
int _jspx_eval_fmt_005fmessage_005f19 = _jspx_th_fmt_005fmessage_005f19.doStartTag();
if (_jspx_th_fmt_005fmessage_005f19.doEndTag() == javax.servlet.jsp.tagext.Tag.SKIP_PAGE) {
_005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.reuse(_jspx_th_fmt_005fmessage_005f19);
return true;
}
_005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.reuse(_jspx_th_fmt_005fmessage_005f19);
return false;
}
private boolean _jspx_meth_fmt_005fmessage_005f20(javax.servlet.jsp.PageContext _jspx_page_context)
throws java.lang.Throwable {
javax.servlet.jsp.PageContext pageContext = _jspx_page_context;
javax.servlet.jsp.JspWriter out = _jspx_page_context.getOut();
// fmt:message
org.apache.taglibs.standard.tag.rt.fmt.MessageTag _jspx_th_fmt_005fmessage_005f20 = (org.apache.taglibs.standard.tag.rt.fmt.MessageTag) _005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.get(org.apache.taglibs.standard.tag.rt.fmt.MessageTag.class);
_jspx_th_fmt_005fmessage_005f20.setPageContext(_jspx_page_context);
_jspx_th_fmt_005fmessage_005f20.setParent(null);
// /setup/ldap-server.jspf(243,107) name = key type = null reqTime = true required = false fragment = false deferredValue = false expectedTypeName = null deferredMethod = false methodSignature = null
_jspx_th_fmt_005fmessage_005f20.setKey("setup.ldap.server.basedn_help");
int _jspx_eval_fmt_005fmessage_005f20 = _jspx_th_fmt_005fmessage_005f20.doStartTag();
if (_jspx_th_fmt_005fmessage_005f20.doEndTag() == javax.servlet.jsp.tagext.Tag.SKIP_PAGE) {
_005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.reuse(_jspx_th_fmt_005fmessage_005f20);
return true;
}
_005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.reuse(_jspx_th_fmt_005fmessage_005f20);
return false;
}
private boolean _jspx_meth_fmt_005fmessage_005f21(javax.servlet.jsp.PageContext _jspx_page_context)
throws java.lang.Throwable {
javax.servlet.jsp.PageContext pageContext = _jspx_page_context;
javax.servlet.jsp.JspWriter out = _jspx_page_context.getOut();
// fmt:message
org.apache.taglibs.standard.tag.rt.fmt.MessageTag _jspx_th_fmt_005fmessage_005f21 = (org.apache.taglibs.standard.tag.rt.fmt.MessageTag) _005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.get(org.apache.taglibs.standard.tag.rt.fmt.MessageTag.class);
_jspx_th_fmt_005fmessage_005f21.setPageContext(_jspx_page_context);
_jspx_th_fmt_005fmessage_005f21.setParent(null);
// /setup/ldap-server.jspf(248,31) name = key type = null reqTime = true required = false fragment = false deferredValue = false expectedTypeName = null deferredMethod = false methodSignature = null
_jspx_th_fmt_005fmessage_005f21.setKey("setup.ldap.server.auth");
int _jspx_eval_fmt_005fmessage_005f21 = _jspx_th_fmt_005fmessage_005f21.doStartTag();
if (_jspx_th_fmt_005fmessage_005f21.doEndTag() == javax.servlet.jsp.tagext.Tag.SKIP_PAGE) {
_005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.reuse(_jspx_th_fmt_005fmessage_005f21);
return true;
}
_005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.reuse(_jspx_th_fmt_005fmessage_005f21);
return false;
}
private boolean _jspx_meth_fmt_005fmessage_005f22(javax.servlet.jsp.PageContext _jspx_page_context)
throws java.lang.Throwable {
javax.servlet.jsp.PageContext pageContext = _jspx_page_context;
javax.servlet.jsp.JspWriter out = _jspx_page_context.getOut();
// fmt:message
org.apache.taglibs.standard.tag.rt.fmt.MessageTag _jspx_th_fmt_005fmessage_005f22 = (org.apache.taglibs.standard.tag.rt.fmt.MessageTag) _005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.get(org.apache.taglibs.standard.tag.rt.fmt.MessageTag.class);
_jspx_th_fmt_005fmessage_005f22.setPageContext(_jspx_page_context);
_jspx_th_fmt_005fmessage_005f22.setParent(null);
// /setup/ldap-server.jspf(251,61) name = key type = null reqTime = true required = false fragment = false deferredValue = false expectedTypeName = null deferredMethod = false methodSignature = null
_jspx_th_fmt_005fmessage_005f22.setKey("setup.ldap.server.admindn");
int _jspx_eval_fmt_005fmessage_005f22 = _jspx_th_fmt_005fmessage_005f22.doStartTag();
if (_jspx_th_fmt_005fmessage_005f22.doEndTag() == javax.servlet.jsp.tagext.Tag.SKIP_PAGE) {
_005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.reuse(_jspx_th_fmt_005fmessage_005f22);
return true;
}
_005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.reuse(_jspx_th_fmt_005fmessage_005f22);
return false;
}
private boolean _jspx_meth_fmt_005fmessage_005f23(javax.servlet.jsp.PageContext _jspx_page_context)
throws java.lang.Throwable {
javax.servlet.jsp.PageContext pageContext = _jspx_page_context;
javax.servlet.jsp.JspWriter out = _jspx_page_context.getOut();
// fmt:message
org.apache.taglibs.standard.tag.rt.fmt.MessageTag _jspx_th_fmt_005fmessage_005f23 = (org.apache.taglibs.standard.tag.rt.fmt.MessageTag) _005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.get(org.apache.taglibs.standard.tag.rt.fmt.MessageTag.class);
_jspx_th_fmt_005fmessage_005f23.setPageContext(_jspx_page_context);
_jspx_th_fmt_005fmessage_005f23.setParent(null);
// /setup/ldap-server.jspf(254,107) name = key type = null reqTime = true required = false fragment = false deferredValue = false expectedTypeName = null deferredMethod = false methodSignature = null
_jspx_th_fmt_005fmessage_005f23.setKey("setup.ldap.server.admindn_help");
int _jspx_eval_fmt_005fmessage_005f23 = _jspx_th_fmt_005fmessage_005f23.doStartTag();
if (_jspx_th_fmt_005fmessage_005f23.doEndTag() == javax.servlet.jsp.tagext.Tag.SKIP_PAGE) {
_005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.reuse(_jspx_th_fmt_005fmessage_005f23);
return true;
}
_005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.reuse(_jspx_th_fmt_005fmessage_005f23);
return false;
}
private boolean _jspx_meth_fmt_005fmessage_005f24(javax.servlet.jsp.PageContext _jspx_page_context)
throws java.lang.Throwable {
javax.servlet.jsp.PageContext pageContext = _jspx_page_context;
javax.servlet.jsp.JspWriter out = _jspx_page_context.getOut();
// fmt:message
org.apache.taglibs.standard.tag.rt.fmt.MessageTag _jspx_th_fmt_005fmessage_005f24 = (org.apache.taglibs.standard.tag.rt.fmt.MessageTag) _005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.get(org.apache.taglibs.standard.tag.rt.fmt.MessageTag.class);
_jspx_th_fmt_005fmessage_005f24.setPageContext(_jspx_page_context);
_jspx_th_fmt_005fmessage_005f24.setParent(null);
// /setup/ldap-server.jspf(258,61) name = key type = null reqTime = true required = false fragment = false deferredValue = false expectedTypeName = null deferredMethod = false methodSignature = null
_jspx_th_fmt_005fmessage_005f24.setKey("setup.ldap.server.password");
int _jspx_eval_fmt_005fmessage_005f24 = _jspx_th_fmt_005fmessage_005f24.doStartTag();
if (_jspx_th_fmt_005fmessage_005f24.doEndTag() == javax.servlet.jsp.tagext.Tag.SKIP_PAGE) {
_005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.reuse(_jspx_th_fmt_005fmessage_005f24);
return true;
}
_005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.reuse(_jspx_th_fmt_005fmessage_005f24);
return false;
}
private boolean _jspx_meth_fmt_005fmessage_005f25(javax.servlet.jsp.PageContext _jspx_page_context)
throws java.lang.Throwable {
javax.servlet.jsp.PageContext pageContext = _jspx_page_context;
javax.servlet.jsp.JspWriter out = _jspx_page_context.getOut();
// fmt:message
org.apache.taglibs.standard.tag.rt.fmt.MessageTag _jspx_th_fmt_005fmessage_005f25 = (org.apache.taglibs.standard.tag.rt.fmt.MessageTag) _005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.get(org.apache.taglibs.standard.tag.rt.fmt.MessageTag.class);
_jspx_th_fmt_005fmessage_005f25.setPageContext(_jspx_page_context);
_jspx_th_fmt_005fmessage_005f25.setParent(null);
// /setup/ldap-server.jspf(259,257) name = key type = null reqTime = true required = false fragment = false deferredValue = false expectedTypeName = null deferredMethod = false methodSignature = null
_jspx_th_fmt_005fmessage_005f25.setKey("setup.ldap.server.password_help");
int _jspx_eval_fmt_005fmessage_005f25 = _jspx_th_fmt_005fmessage_005f25.doStartTag();
if (_jspx_th_fmt_005fmessage_005f25.doEndTag() == javax.servlet.jsp.tagext.Tag.SKIP_PAGE) {
_005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.reuse(_jspx_th_fmt_005fmessage_005f25);
return true;
}
_005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.reuse(_jspx_th_fmt_005fmessage_005f25);
return false;
}
private boolean _jspx_meth_fmt_005fmessage_005f26(javax.servlet.jsp.PageContext _jspx_page_context)
throws java.lang.Throwable {
javax.servlet.jsp.PageContext pageContext = _jspx_page_context;
javax.servlet.jsp.JspWriter out = _jspx_page_context.getOut();
// fmt:message
org.apache.taglibs.standard.tag.rt.fmt.MessageTag _jspx_th_fmt_005fmessage_005f26 = (org.apache.taglibs.standard.tag.rt.fmt.MessageTag) _005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.get(org.apache.taglibs.standard.tag.rt.fmt.MessageTag.class);
_jspx_th_fmt_005fmessage_005f26.setPageContext(_jspx_page_context);
_jspx_th_fmt_005fmessage_005f26.setParent(null);
// /setup/ldap-server.jspf(268,88) name = key type = null reqTime = true required = false fragment = false deferredValue = false expectedTypeName = null deferredMethod = false methodSignature = null
_jspx_th_fmt_005fmessage_005f26.setKey("setup.ldap.advanced");
int _jspx_eval_fmt_005fmessage_005f26 = _jspx_th_fmt_005fmessage_005f26.doStartTag();
if (_jspx_th_fmt_005fmessage_005f26.doEndTag() == javax.servlet.jsp.tagext.Tag.SKIP_PAGE) {
_005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.reuse(_jspx_th_fmt_005fmessage_005f26);
return true;
}
_005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.reuse(_jspx_th_fmt_005fmessage_005f26);
return false;
}
private boolean _jspx_meth_fmt_005fmessage_005f27(javax.servlet.jsp.PageContext _jspx_page_context)
throws java.lang.Throwable {
javax.servlet.jsp.PageContext pageContext = _jspx_page_context;
javax.servlet.jsp.JspWriter out = _jspx_page_context.getOut();
// fmt:message
org.apache.taglibs.standard.tag.rt.fmt.MessageTag _jspx_th_fmt_005fmessage_005f27 = (org.apache.taglibs.standard.tag.rt.fmt.MessageTag) _005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.get(org.apache.taglibs.standard.tag.rt.fmt.MessageTag.class);
_jspx_th_fmt_005fmessage_005f27.setPageContext(_jspx_page_context);
_jspx_th_fmt_005fmessage_005f27.setParent(null);
// /setup/ldap-server.jspf(280,20) name = key type = null reqTime = true required = false fragment = false deferredValue = false expectedTypeName = null deferredMethod = false methodSignature = null
_jspx_th_fmt_005fmessage_005f27.setKey("global.yes");
int _jspx_eval_fmt_005fmessage_005f27 = _jspx_th_fmt_005fmessage_005f27.doStartTag();
if (_jspx_th_fmt_005fmessage_005f27.doEndTag() == javax.servlet.jsp.tagext.Tag.SKIP_PAGE) {
_005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.reuse(_jspx_th_fmt_005fmessage_005f27);
return true;
}
_005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.reuse(_jspx_th_fmt_005fmessage_005f27);
return false;
}
private boolean _jspx_meth_fmt_005fmessage_005f28(javax.servlet.jsp.PageContext _jspx_page_context)
throws java.lang.Throwable {
javax.servlet.jsp.PageContext pageContext = _jspx_page_context;
javax.servlet.jsp.JspWriter out = _jspx_page_context.getOut();
// fmt:message
org.apache.taglibs.standard.tag.rt.fmt.MessageTag _jspx_th_fmt_005fmessage_005f28 = (org.apache.taglibs.standard.tag.rt.fmt.MessageTag) _005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.get(org.apache.taglibs.standard.tag.rt.fmt.MessageTag.class);
_jspx_th_fmt_005fmessage_005f28.setPageContext(_jspx_page_context);
_jspx_th_fmt_005fmessage_005f28.setParent(null);
// /setup/ldap-server.jspf(281,20) name = key type = null reqTime = true required = false fragment = false deferredValue = false expectedTypeName = null deferredMethod = false methodSignature = null
_jspx_th_fmt_005fmessage_005f28.setKey("global.no");
int _jspx_eval_fmt_005fmessage_005f28 = _jspx_th_fmt_005fmessage_005f28.doStartTag();
if (_jspx_th_fmt_005fmessage_005f28.doEndTag() == javax.servlet.jsp.tagext.Tag.SKIP_PAGE) {
_005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.reuse(_jspx_th_fmt_005fmessage_005f28);
return true;
}
_005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.reuse(_jspx_th_fmt_005fmessage_005f28);
return false;
}
private boolean _jspx_meth_fmt_005fmessage_005f29(javax.servlet.jsp.PageContext _jspx_page_context)
throws java.lang.Throwable {
javax.servlet.jsp.PageContext pageContext = _jspx_page_context;
javax.servlet.jsp.JspWriter out = _jspx_page_context.getOut();
// fmt:message
org.apache.taglibs.standard.tag.rt.fmt.MessageTag _jspx_th_fmt_005fmessage_005f29 = (org.apache.taglibs.standard.tag.rt.fmt.MessageTag) _005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.get(org.apache.taglibs.standard.tag.rt.fmt.MessageTag.class);
_jspx_th_fmt_005fmessage_005f29.setPageContext(_jspx_page_context);
_jspx_th_fmt_005fmessage_005f29.setParent(null);
// /setup/ldap-server.jspf(287,6) name = key type = null reqTime = true required = false fragment = false deferredValue = false expectedTypeName = null deferredMethod = false methodSignature = null
_jspx_th_fmt_005fmessage_005f29.setKey("setup.ldap.server.connection_pool");
int _jspx_eval_fmt_005fmessage_005f29 = _jspx_th_fmt_005fmessage_005f29.doStartTag();
if (_jspx_th_fmt_005fmessage_005f29.doEndTag() == javax.servlet.jsp.tagext.Tag.SKIP_PAGE) {
_005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.reuse(_jspx_th_fmt_005fmessage_005f29);
return true;
}
_005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.reuse(_jspx_th_fmt_005fmessage_005f29);
return false;
}
private boolean _jspx_meth_fmt_005fmessage_005f30(javax.servlet.jsp.PageContext _jspx_page_context)
throws java.lang.Throwable {
javax.servlet.jsp.PageContext pageContext = _jspx_page_context;
javax.servlet.jsp.JspWriter out = _jspx_page_context.getOut();
// fmt:message
org.apache.taglibs.standard.tag.rt.fmt.MessageTag _jspx_th_fmt_005fmessage_005f30 = (org.apache.taglibs.standard.tag.rt.fmt.MessageTag) _005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.get(org.apache.taglibs.standard.tag.rt.fmt.MessageTag.class);
_jspx_th_fmt_005fmessage_005f30.setPageContext(_jspx_page_context);
_jspx_th_fmt_005fmessage_005f30.setParent(null);
// /setup/ldap-server.jspf(290,9) name = key type = null reqTime = true required = false fragment = false deferredValue = false expectedTypeName = null deferredMethod = false methodSignature = null
_jspx_th_fmt_005fmessage_005f30.setKey("setup.ldap.server.connection_pool_help");
int _jspx_eval_fmt_005fmessage_005f30 = _jspx_th_fmt_005fmessage_005f30.doStartTag();
if (_jspx_th_fmt_005fmessage_005f30.doEndTag() == javax.servlet.jsp.tagext.Tag.SKIP_PAGE) {
_005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.reuse(_jspx_th_fmt_005fmessage_005f30);
return true;
}
_005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.reuse(_jspx_th_fmt_005fmessage_005f30);
return false;
}
private boolean _jspx_meth_fmt_005fmessage_005f31(javax.servlet.jsp.PageContext _jspx_page_context)
throws java.lang.Throwable {
javax.servlet.jsp.PageContext pageContext = _jspx_page_context;
javax.servlet.jsp.JspWriter out = _jspx_page_context.getOut();
// fmt:message
org.apache.taglibs.standard.tag.rt.fmt.MessageTag _jspx_th_fmt_005fmessage_005f31 = (org.apache.taglibs.standard.tag.rt.fmt.MessageTag) _005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.get(org.apache.taglibs.standard.tag.rt.fmt.MessageTag.class);
_jspx_th_fmt_005fmessage_005f31.setPageContext(_jspx_page_context);
_jspx_th_fmt_005fmessage_005f31.setParent(null);
// /setup/ldap-server.jspf(301,6) name = key type = null reqTime = true required = false fragment = false deferredValue = false expectedTypeName = null deferredMethod = false methodSignature = null
_jspx_th_fmt_005fmessage_005f31.setKey("setup.ldap.server.ssl");
int _jspx_eval_fmt_005fmessage_005f31 = _jspx_th_fmt_005fmessage_005f31.doStartTag();
if (_jspx_th_fmt_005fmessage_005f31.doEndTag() == javax.servlet.jsp.tagext.Tag.SKIP_PAGE) {
_005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.reuse(_jspx_th_fmt_005fmessage_005f31);
return true;
}
_005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.reuse(_jspx_th_fmt_005fmessage_005f31);
return false;
}
private boolean _jspx_meth_fmt_005fmessage_005f32(javax.servlet.jsp.PageContext _jspx_page_context)
throws java.lang.Throwable {
javax.servlet.jsp.PageContext pageContext = _jspx_page_context;
javax.servlet.jsp.JspWriter out = _jspx_page_context.getOut();
// fmt:message
org.apache.taglibs.standard.tag.rt.fmt.MessageTag _jspx_th_fmt_005fmessage_005f32 = (org.apache.taglibs.standard.tag.rt.fmt.MessageTag) _005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.get(org.apache.taglibs.standard.tag.rt.fmt.MessageTag.class);
_jspx_th_fmt_005fmessage_005f32.setPageContext(_jspx_page_context);
_jspx_th_fmt_005fmessage_005f32.setParent(null);
// /setup/ldap-server.jspf(304,6) name = key type = null reqTime = true required = false fragment = false deferredValue = false expectedTypeName = null deferredMethod = false methodSignature = null
_jspx_th_fmt_005fmessage_005f32.setKey("setup.ldap.server.ssl_help");
int _jspx_eval_fmt_005fmessage_005f32 = _jspx_th_fmt_005fmessage_005f32.doStartTag();
if (_jspx_th_fmt_005fmessage_005f32.doEndTag() == javax.servlet.jsp.tagext.Tag.SKIP_PAGE) {
_005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.reuse(_jspx_th_fmt_005fmessage_005f32);
return true;
}
_005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.reuse(_jspx_th_fmt_005fmessage_005f32);
return false;
}
private boolean _jspx_meth_fmt_005fmessage_005f33(javax.servlet.jsp.PageContext _jspx_page_context)
throws java.lang.Throwable {
javax.servlet.jsp.PageContext pageContext = _jspx_page_context;
javax.servlet.jsp.JspWriter out = _jspx_page_context.getOut();
// fmt:message
org.apache.taglibs.standard.tag.rt.fmt.MessageTag _jspx_th_fmt_005fmessage_005f33 = (org.apache.taglibs.standard.tag.rt.fmt.MessageTag) _005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.get(org.apache.taglibs.standard.tag.rt.fmt.MessageTag.class);
_jspx_th_fmt_005fmessage_005f33.setPageContext(_jspx_page_context);
_jspx_th_fmt_005fmessage_005f33.setParent(null);
// /setup/ldap-server.jspf(315,6) name = key type = null reqTime = true required = false fragment = false deferredValue = false expectedTypeName = null deferredMethod = false methodSignature = null
_jspx_th_fmt_005fmessage_005f33.setKey("setup.ldap.server.starttls");
int _jspx_eval_fmt_005fmessage_005f33 = _jspx_th_fmt_005fmessage_005f33.doStartTag();
if (_jspx_th_fmt_005fmessage_005f33.doEndTag() == javax.servlet.jsp.tagext.Tag.SKIP_PAGE) {
_005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.reuse(_jspx_th_fmt_005fmessage_005f33);
return true;
}
_005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.reuse(_jspx_th_fmt_005fmessage_005f33);
return false;
}
private boolean _jspx_meth_fmt_005fmessage_005f34(javax.servlet.jsp.PageContext _jspx_page_context)
throws java.lang.Throwable {
javax.servlet.jsp.PageContext pageContext = _jspx_page_context;
javax.servlet.jsp.JspWriter out = _jspx_page_context.getOut();
// fmt:message
org.apache.taglibs.standard.tag.rt.fmt.MessageTag _jspx_th_fmt_005fmessage_005f34 = (org.apache.taglibs.standard.tag.rt.fmt.MessageTag) _005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.get(org.apache.taglibs.standard.tag.rt.fmt.MessageTag.class);
_jspx_th_fmt_005fmessage_005f34.setPageContext(_jspx_page_context);
_jspx_th_fmt_005fmessage_005f34.setParent(null);
// /setup/ldap-server.jspf(318,6) name = key type = null reqTime = true required = false fragment = false deferredValue = false expectedTypeName = null deferredMethod = false methodSignature = null
_jspx_th_fmt_005fmessage_005f34.setKey("setup.ldap.server.starttls_help");
int _jspx_eval_fmt_005fmessage_005f34 = _jspx_th_fmt_005fmessage_005f34.doStartTag();
if (_jspx_th_fmt_005fmessage_005f34.doEndTag() == javax.servlet.jsp.tagext.Tag.SKIP_PAGE) {
_005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.reuse(_jspx_th_fmt_005fmessage_005f34);
return true;
}
_005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.reuse(_jspx_th_fmt_005fmessage_005f34);
return false;
}
private boolean _jspx_meth_fmt_005fmessage_005f35(javax.servlet.jsp.PageContext _jspx_page_context)
throws java.lang.Throwable {
javax.servlet.jsp.PageContext pageContext = _jspx_page_context;
javax.servlet.jsp.JspWriter out = _jspx_page_context.getOut();
// fmt:message
org.apache.taglibs.standard.tag.rt.fmt.MessageTag _jspx_th_fmt_005fmessage_005f35 = (org.apache.taglibs.standard.tag.rt.fmt.MessageTag) _005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.get(org.apache.taglibs.standard.tag.rt.fmt.MessageTag.class);
_jspx_th_fmt_005fmessage_005f35.setPageContext(_jspx_page_context);
_jspx_th_fmt_005fmessage_005f35.setParent(null);
// /setup/ldap-server.jspf(329,6) name = key type = null reqTime = true required = false fragment = false deferredValue = false expectedTypeName = null deferredMethod = false methodSignature = null
_jspx_th_fmt_005fmessage_005f35.setKey("setup.ldap.server.debug");
int _jspx_eval_fmt_005fmessage_005f35 = _jspx_th_fmt_005fmessage_005f35.doStartTag();
if (_jspx_th_fmt_005fmessage_005f35.doEndTag() == javax.servlet.jsp.tagext.Tag.SKIP_PAGE) {
_005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.reuse(_jspx_th_fmt_005fmessage_005f35);
return true;
}
_005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.reuse(_jspx_th_fmt_005fmessage_005f35);
return false;
}
private boolean _jspx_meth_fmt_005fmessage_005f36(javax.servlet.jsp.PageContext _jspx_page_context)
throws java.lang.Throwable {
javax.servlet.jsp.PageContext pageContext = _jspx_page_context;
javax.servlet.jsp.JspWriter out = _jspx_page_context.getOut();
// fmt:message
org.apache.taglibs.standard.tag.rt.fmt.MessageTag _jspx_th_fmt_005fmessage_005f36 = (org.apache.taglibs.standard.tag.rt.fmt.MessageTag) _005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.get(org.apache.taglibs.standard.tag.rt.fmt.MessageTag.class);
_jspx_th_fmt_005fmessage_005f36.setPageContext(_jspx_page_context);
_jspx_th_fmt_005fmessage_005f36.setParent(null);
// /setup/ldap-server.jspf(332,6) name = key type = null reqTime = true required = false fragment = false deferredValue = false expectedTypeName = null deferredMethod = false methodSignature = null
_jspx_th_fmt_005fmessage_005f36.setKey("setup.ldap.server.debug_help");
int _jspx_eval_fmt_005fmessage_005f36 = _jspx_th_fmt_005fmessage_005f36.doStartTag();
if (_jspx_th_fmt_005fmessage_005f36.doEndTag() == javax.servlet.jsp.tagext.Tag.SKIP_PAGE) {
_005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.reuse(_jspx_th_fmt_005fmessage_005f36);
return true;
}
_005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.reuse(_jspx_th_fmt_005fmessage_005f36);
return false;
}
private boolean _jspx_meth_fmt_005fmessage_005f37(javax.servlet.jsp.PageContext _jspx_page_context)
throws java.lang.Throwable {
javax.servlet.jsp.PageContext pageContext = _jspx_page_context;
javax.servlet.jsp.JspWriter out = _jspx_page_context.getOut();
// fmt:message
org.apache.taglibs.standard.tag.rt.fmt.MessageTag _jspx_th_fmt_005fmessage_005f37 = (org.apache.taglibs.standard.tag.rt.fmt.MessageTag) _005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.get(org.apache.taglibs.standard.tag.rt.fmt.MessageTag.class);
_jspx_th_fmt_005fmessage_005f37.setPageContext(_jspx_page_context);
_jspx_th_fmt_005fmessage_005f37.setParent(null);
// /setup/ldap-server.jspf(343,6) name = key type = null reqTime = true required = false fragment = false deferredValue = false expectedTypeName = null deferredMethod = false methodSignature = null
_jspx_th_fmt_005fmessage_005f37.setKey("setup.ldap.server.referral");
int _jspx_eval_fmt_005fmessage_005f37 = _jspx_th_fmt_005fmessage_005f37.doStartTag();
if (_jspx_th_fmt_005fmessage_005f37.doEndTag() == javax.servlet.jsp.tagext.Tag.SKIP_PAGE) {
_005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.reuse(_jspx_th_fmt_005fmessage_005f37);
return true;
}
_005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.reuse(_jspx_th_fmt_005fmessage_005f37);
return false;
}
private boolean _jspx_meth_fmt_005fmessage_005f38(javax.servlet.jsp.PageContext _jspx_page_context)
throws java.lang.Throwable {
javax.servlet.jsp.PageContext pageContext = _jspx_page_context;
javax.servlet.jsp.JspWriter out = _jspx_page_context.getOut();
// fmt:message
org.apache.taglibs.standard.tag.rt.fmt.MessageTag _jspx_th_fmt_005fmessage_005f38 = (org.apache.taglibs.standard.tag.rt.fmt.MessageTag) _005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.get(org.apache.taglibs.standard.tag.rt.fmt.MessageTag.class);
_jspx_th_fmt_005fmessage_005f38.setPageContext(_jspx_page_context);
_jspx_th_fmt_005fmessage_005f38.setParent(null);
// /setup/ldap-server.jspf(346,6) name = key type = null reqTime = true required = false fragment = false deferredValue = false expectedTypeName = null deferredMethod = false methodSignature = null
_jspx_th_fmt_005fmessage_005f38.setKey("setup.ldap.server.referral_help");
int _jspx_eval_fmt_005fmessage_005f38 = _jspx_th_fmt_005fmessage_005f38.doStartTag();
if (_jspx_th_fmt_005fmessage_005f38.doEndTag() == javax.servlet.jsp.tagext.Tag.SKIP_PAGE) {
_005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.reuse(_jspx_th_fmt_005fmessage_005f38);
return true;
}
_005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.reuse(_jspx_th_fmt_005fmessage_005f38);
return false;
}
private boolean _jspx_meth_fmt_005fmessage_005f39(javax.servlet.jsp.PageContext _jspx_page_context)
throws java.lang.Throwable {
javax.servlet.jsp.PageContext pageContext = _jspx_page_context;
javax.servlet.jsp.JspWriter out = _jspx_page_context.getOut();
// fmt:message
org.apache.taglibs.standard.tag.rt.fmt.MessageTag _jspx_th_fmt_005fmessage_005f39 = (org.apache.taglibs.standard.tag.rt.fmt.MessageTag) _005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.get(org.apache.taglibs.standard.tag.rt.fmt.MessageTag.class);
_jspx_th_fmt_005fmessage_005f39.setPageContext(_jspx_page_context);
_jspx_th_fmt_005fmessage_005f39.setParent(null);
// /setup/ldap-server.jspf(357,24) name = key type = null reqTime = true required = false fragment = false deferredValue = false expectedTypeName = null deferredMethod = false methodSignature = null
_jspx_th_fmt_005fmessage_005f39.setKey("setup.ldap.server.alias_dereference");
int _jspx_eval_fmt_005fmessage_005f39 = _jspx_th_fmt_005fmessage_005f39.doStartTag();
if (_jspx_th_fmt_005fmessage_005f39.doEndTag() == javax.servlet.jsp.tagext.Tag.SKIP_PAGE) {
_005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.reuse(_jspx_th_fmt_005fmessage_005f39);
return true;
}
_005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.reuse(_jspx_th_fmt_005fmessage_005f39);
return false;
}
private boolean _jspx_meth_fmt_005fmessage_005f40(javax.servlet.jsp.PageContext _jspx_page_context)
throws java.lang.Throwable {
javax.servlet.jsp.PageContext pageContext = _jspx_page_context;
javax.servlet.jsp.JspWriter out = _jspx_page_context.getOut();
// fmt:message
org.apache.taglibs.standard.tag.rt.fmt.MessageTag _jspx_th_fmt_005fmessage_005f40 = (org.apache.taglibs.standard.tag.rt.fmt.MessageTag) _005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.get(org.apache.taglibs.standard.tag.rt.fmt.MessageTag.class);
_jspx_th_fmt_005fmessage_005f40.setPageContext(_jspx_page_context);
_jspx_th_fmt_005fmessage_005f40.setParent(null);
// /setup/ldap-server.jspf(360,24) name = key type = null reqTime = true required = false fragment = false deferredValue = false expectedTypeName = null deferredMethod = false methodSignature = null
_jspx_th_fmt_005fmessage_005f40.setKey("setup.ldap.server.alias_dereference_help");
int _jspx_eval_fmt_005fmessage_005f40 = _jspx_th_fmt_005fmessage_005f40.doStartTag();
if (_jspx_th_fmt_005fmessage_005f40.doEndTag() == javax.servlet.jsp.tagext.Tag.SKIP_PAGE) {
_005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.reuse(_jspx_th_fmt_005fmessage_005f40);
return true;
}
_005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.reuse(_jspx_th_fmt_005fmessage_005f40);
return false;
}
private boolean _jspx_meth_fmt_005fmessage_005f41(javax.servlet.jsp.PageContext _jspx_page_context)
throws java.lang.Throwable {
javax.servlet.jsp.PageContext pageContext = _jspx_page_context;
javax.servlet.jsp.JspWriter out = _jspx_page_context.getOut();
// fmt:message
org.apache.taglibs.standard.tag.rt.fmt.MessageTag _jspx_th_fmt_005fmessage_005f41 = (org.apache.taglibs.standard.tag.rt.fmt.MessageTag) _005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.get(org.apache.taglibs.standard.tag.rt.fmt.MessageTag.class);
_jspx_th_fmt_005fmessage_005f41.setPageContext(_jspx_page_context);
_jspx_th_fmt_005fmessage_005f41.setParent(null);
// /setup/ldap-server.jspf(371,24) name = key type = null reqTime = true required = false fragment = false deferredValue = false expectedTypeName = null deferredMethod = false methodSignature = null
_jspx_th_fmt_005fmessage_005f41.setKey("setup.ldap.server.enclose_dns");
int _jspx_eval_fmt_005fmessage_005f41 = _jspx_th_fmt_005fmessage_005f41.doStartTag();
if (_jspx_th_fmt_005fmessage_005f41.doEndTag() == javax.servlet.jsp.tagext.Tag.SKIP_PAGE) {
_005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.reuse(_jspx_th_fmt_005fmessage_005f41);
return true;
}
_005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.reuse(_jspx_th_fmt_005fmessage_005f41);
return false;
}
private boolean _jspx_meth_fmt_005fmessage_005f42(javax.servlet.jsp.PageContext _jspx_page_context)
throws java.lang.Throwable {
javax.servlet.jsp.PageContext pageContext = _jspx_page_context;
javax.servlet.jsp.JspWriter out = _jspx_page_context.getOut();
// fmt:message
org.apache.taglibs.standard.tag.rt.fmt.MessageTag _jspx_th_fmt_005fmessage_005f42 = (org.apache.taglibs.standard.tag.rt.fmt.MessageTag) _005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.get(org.apache.taglibs.standard.tag.rt.fmt.MessageTag.class);
_jspx_th_fmt_005fmessage_005f42.setPageContext(_jspx_page_context);
_jspx_th_fmt_005fmessage_005f42.setParent(null);
// /setup/ldap-server.jspf(374,24) name = key type = null reqTime = true required = false fragment = false deferredValue = false expectedTypeName = null deferredMethod = false methodSignature = null
_jspx_th_fmt_005fmessage_005f42.setKey("setup.ldap.server.enclose_dns_help");
int _jspx_eval_fmt_005fmessage_005f42 = _jspx_th_fmt_005fmessage_005f42.doStartTag();
if (_jspx_th_fmt_005fmessage_005f42.doEndTag() == javax.servlet.jsp.tagext.Tag.SKIP_PAGE) {
_005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.reuse(_jspx_th_fmt_005fmessage_005f42);
return true;
}
_005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.reuse(_jspx_th_fmt_005fmessage_005f42);
return false;
}
private boolean _jspx_meth_fmt_005fmessage_005f43(javax.servlet.jsp.PageContext _jspx_page_context)
throws java.lang.Throwable {
javax.servlet.jsp.PageContext pageContext = _jspx_page_context;
javax.servlet.jsp.JspWriter out = _jspx_page_context.getOut();
// fmt:message
org.apache.taglibs.standard.tag.rt.fmt.MessageTag _jspx_th_fmt_005fmessage_005f43 = (org.apache.taglibs.standard.tag.rt.fmt.MessageTag) _005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.get(org.apache.taglibs.standard.tag.rt.fmt.MessageTag.class);
_jspx_th_fmt_005fmessage_005f43.setPageContext(_jspx_page_context);
_jspx_th_fmt_005fmessage_005f43.setParent(null);
// /setup/ldap-server.jspf(396,56) name = key type = null reqTime = true required = false fragment = false deferredValue = false expectedTypeName = null deferredMethod = false methodSignature = null
_jspx_th_fmt_005fmessage_005f43.setKey("setup.ldap.test");
int _jspx_eval_fmt_005fmessage_005f43 = _jspx_th_fmt_005fmessage_005f43.doStartTag();
if (_jspx_th_fmt_005fmessage_005f43.doEndTag() == javax.servlet.jsp.tagext.Tag.SKIP_PAGE) {
_005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.reuse(_jspx_th_fmt_005fmessage_005f43);
return true;
}
_005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.reuse(_jspx_th_fmt_005fmessage_005f43);
return false;
}
private boolean _jspx_meth_fmt_005fmessage_005f44(javax.servlet.jsp.PageContext _jspx_page_context)
throws java.lang.Throwable {
javax.servlet.jsp.PageContext pageContext = _jspx_page_context;
javax.servlet.jsp.JspWriter out = _jspx_page_context.getOut();
// fmt:message
org.apache.taglibs.standard.tag.rt.fmt.MessageTag _jspx_th_fmt_005fmessage_005f44 = (org.apache.taglibs.standard.tag.rt.fmt.MessageTag) _005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.get(org.apache.taglibs.standard.tag.rt.fmt.MessageTag.class);
_jspx_th_fmt_005fmessage_005f44.setPageContext(_jspx_page_context);
_jspx_th_fmt_005fmessage_005f44.setParent(null);
// /setup/ldap-server.jspf(398,56) name = key type = null reqTime = true required = false fragment = false deferredValue = false expectedTypeName = null deferredMethod = false methodSignature = null
_jspx_th_fmt_005fmessage_005f44.setKey("setup.ldap.continue");
int _jspx_eval_fmt_005fmessage_005f44 = _jspx_th_fmt_005fmessage_005f44.doStartTag();
if (_jspx_th_fmt_005fmessage_005f44.doEndTag() == javax.servlet.jsp.tagext.Tag.SKIP_PAGE) {
_005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.reuse(_jspx_th_fmt_005fmessage_005f44);
return true;
}
_005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.reuse(_jspx_th_fmt_005fmessage_005f44);
return false;
}
}
| |
/*
* The MIT License
*
* Copyright (c) 2004-2010, Sun Microsystems, Inc., Kohsuke Kawaguchi,
* Alan Harder, Yahoo! Inc.
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package hudson.util;
import hudson.Util;
import java.util.ArrayList;
import java.util.List;
import java.util.Arrays;
import java.util.Map;
import java.util.BitSet;
import java.util.Properties;
import java.util.Map.Entry;
import java.io.Serializable;
import java.io.File;
import java.io.IOException;
import java.util.Set;
/**
* Used to build up arguments for a process invocation.
*
* @author Kohsuke Kawaguchi
*/
public class ArgumentListBuilder implements Serializable {
private final List<String> args = new ArrayList<String>();
/**
* Bit mask indicating arguments that shouldn't be echoed-back (e.g., password)
*/
private BitSet mask = new BitSet();
public ArgumentListBuilder() {
}
public ArgumentListBuilder(String... args) {
add(args);
}
public ArgumentListBuilder add(Object a) {
return add(a.toString(), false);
}
/**
* @since 1.378
*/
public ArgumentListBuilder add(Object a, boolean mask) {
return add(a.toString(), mask);
}
public ArgumentListBuilder add(File f) {
return add(f.getAbsolutePath(), false);
}
public ArgumentListBuilder add(String a) {
return add(a,false);
}
/**
* @since 1.378
*/
public ArgumentListBuilder add(String a, boolean mask) {
if(a!=null) {
if(mask) {
this.mask.set(args.size());
}
args.add(a);
}
return this;
}
public ArgumentListBuilder prepend(String... args) {
// left-shift the mask
BitSet nm = new BitSet(this.args.size()+args.length);
for(int i=0; i<this.args.size(); i++)
nm.set(i+args.length, mask.get(i));
mask = nm;
this.args.addAll(0, Arrays.asList(args));
return this;
}
/**
* Adds an argument by quoting it.
* This is necessary only in a rare circumstance,
* such as when adding argument for ssh and rsh.
*
* Normal process invocations don't need it, because each
* argument is treated as its own string and never merged into one.
*/
public ArgumentListBuilder addQuoted(String a) {
return add('"'+a+'"', false);
}
/**
* @since 1.378
*/
public ArgumentListBuilder addQuoted(String a, boolean mask) {
return add('"'+a+'"', mask);
}
public ArgumentListBuilder add(String... args) {
for (String arg : args) {
add(arg);
}
return this;
}
/**
* Decomposes the given token into multiple arguments by splitting via whitespace.
*/
public ArgumentListBuilder addTokenized(String s) {
if(s==null) return this;
add(Util.tokenize(s));
return this;
}
/**
* @since 1.378
*/
public ArgumentListBuilder addKeyValuePair(String prefix, String key, String value, boolean mask) {
if(key==null) return this;
add(((prefix==null)?"-D":prefix)+key+'='+value, mask);
return this;
}
/**
* Adds key value pairs as "-Dkey=value -Dkey=value ..."
*
* <tt>-D</tt> portion is configurable as the 'prefix' parameter.
* @since 1.114
*/
public ArgumentListBuilder addKeyValuePairs(String prefix, Map<String,String> props) {
for (Entry<String,String> e : props.entrySet())
addKeyValuePair(prefix, e.getKey(), e.getValue(), false);
return this;
}
/**
* Adds key value pairs as "-Dkey=value -Dkey=value ..." with masking.
*
* @param prefix
* Configures the -D portion of the example. Defaults to -D if null.
* @param props
* The map of key/value pairs to add
* @param propsToMask
* Set containing key names to mark as masked in the argument list. Key
* names that do not exist in the set will be added unmasked.
* @since 1.378
*/
public ArgumentListBuilder addKeyValuePairs(String prefix, Map<String,String> props, Set<String> propsToMask) {
for (Entry<String,String> e : props.entrySet()) {
addKeyValuePair(prefix, e.getKey(), e.getValue(), (propsToMask == null) ? false : propsToMask.contains(e.getKey()));
}
return this;
}
/**
* Adds key value pairs as "-Dkey=value -Dkey=value ..." by parsing a given string using {@link Properties}.
*
* @param prefix
* The '-D' portion of the example. Defaults to -D if null.
* @param properties
* The persisted form of {@link Properties}. For example, "abc=def\nghi=jkl". Can be null, in which
* case this method becomes no-op.
* @param vr
* {@link VariableResolver} to be performed on the values.
* @since 1.262
*/
public ArgumentListBuilder addKeyValuePairsFromPropertyString(String prefix, String properties, VariableResolver vr) throws IOException {
if(properties==null) return this;
for (Entry<Object,Object> entry : Util.loadProperties(properties).entrySet()) {
addKeyValuePair(prefix, (String)entry.getKey(), Util.replaceMacro(entry.getValue().toString(),vr), false);
}
return this;
}
/**
* Adds key value pairs as "-Dkey=value -Dkey=value ..." by parsing a given string using {@link Properties} with masking.
*
* @param prefix
* The '-D' portion of the example. Defaults to -D if null.
* @param properties
* The persisted form of {@link Properties}. For example, "abc=def\nghi=jkl". Can be null, in which
* case this method becomes no-op.
* @param vr
* {@link VariableResolver} to be performed on the values.
* @param propsToMask
* Set containing key names to mark as masked in the argument list. Key
* names that do not exist in the set will be added unmasked.
* @since 1.378
*/
public ArgumentListBuilder addKeyValuePairsFromPropertyString(String prefix, String properties, VariableResolver vr, Set<String> propsToMask) throws IOException {
if(properties==null) return this;
for (Entry<Object,Object> entry : Util.loadProperties(properties).entrySet()) {
addKeyValuePair(prefix, (String)entry.getKey(), Util.replaceMacro(entry.getValue().toString(),vr), (propsToMask == null) ? false : propsToMask.contains((String)entry.getKey()));
}
return this;
}
public String[] toCommandArray() {
return args.toArray(new String[args.size()]);
}
@Override
public ArgumentListBuilder clone() {
ArgumentListBuilder r = new ArgumentListBuilder();
r.args.addAll(this.args);
r.mask = (BitSet) this.mask.clone();
return r;
}
/**
* Re-initializes the arguments list.
*/
public void clear() {
args.clear();
mask.clear();
}
public List<String> toList() {
return args;
}
/**
* Just adds quotes around args containing spaces, but no other special characters,
* so this method should generally be used only for informational/logging purposes.
*/
public String toStringWithQuote() {
StringBuilder buf = new StringBuilder();
for (String arg : args) {
if(buf.length()>0) buf.append(' ');
if(arg.indexOf(' ')>=0 || arg.length()==0)
buf.append('"').append(arg).append('"');
else
buf.append(arg);
}
return buf.toString();
}
/**
* Wrap command in a CMD.EXE call so we can return the exit code (ERRORLEVEL).
* This method takes care of escaping special characters in the command, which
* is needed since the command is now passed as a string to the CMD.EXE shell.
* This is done as follows:
* Wrap arguments in double quotes if they contain any of:
* space *?,;^&<>|" or % followed by a letter.
* <br/> When testing from command prompt, these characters also need to be
* prepended with a ^ character: ^&<>| -- however, invoking cmd.exe from
* Hudson does not seem to require this extra escaping so it is not added by
* this method.
* <br/> A " is prepended with another " character. Note: Windows has issues
* escaping some combinations of quotes and spaces. Quotes should be avoided.
* <br/> A % followed by a letter has that letter wrapped in double quotes,
* to avoid possible variable expansion. ie, %foo% becomes "%"f"oo%".
* The second % does not need special handling because it is not followed
* by a letter. <br/>
* Example: "-Dfoo=*abc?def;ghi^jkl&mno<pqr>stu|vwx""yz%"e"nd"
* @return new ArgumentListBuilder that runs given command through cmd.exe /C
* @since 1.386
*/
public ArgumentListBuilder toWindowsCommand() {
StringBuilder quotedArgs = new StringBuilder();
boolean quoted, percent;
for (String arg : args) {
quoted = percent = false;
for (int i = 0; i < arg.length(); i++) {
char c = arg.charAt(i);
if (!quoted && (c == ' ' || c == '*' || c == '?' || c == ',' || c == ';')) {
quoted = startQuoting(quotedArgs, arg, i);
}
else if (c == '^' || c == '&' || c == '<' || c == '>' || c == '|') {
if (!quoted) quoted = startQuoting(quotedArgs, arg, i);
// quotedArgs.append('^'); See note in javadoc above
}
else if (c == '"') {
if (!quoted) quoted = startQuoting(quotedArgs, arg, i);
quotedArgs.append('"');
}
else if (percent && ((c >= 'A' && c <= 'Z') || (c >= 'a' && c <= 'z'))) {
if (!quoted) quoted = startQuoting(quotedArgs, arg, i);
quotedArgs.append('"').append(c);
c = '"';
}
percent = (c == '%');
if (quoted) quotedArgs.append(c);
}
if (quoted) quotedArgs.append('"'); else quotedArgs.append(arg);
quotedArgs.append(' ');
}
// (comment copied from old code in hudson.tasks.Ant)
// on Windows, executing batch file can't return the correct error code,
// so we need to wrap it into cmd.exe.
// double %% is needed because we want ERRORLEVEL to be expanded after
// batch file executed, not before. This alone shows how broken Windows is...
quotedArgs.append("&& exit %%ERRORLEVEL%%");
return new ArgumentListBuilder().add("cmd.exe", "/C").addQuoted(quotedArgs.toString());
}
private static boolean startQuoting(StringBuilder buf, String arg, int atIndex) {
buf.append('"').append(arg.substring(0, atIndex));
return true;
}
/**
* Returns true if there are any masked arguments.
* @return true if there are any masked arguments; false otherwise
*/
public boolean hasMaskedArguments() {
return mask.length()>0;
}
/**
* Returns an array of booleans where the masked arguments are marked as true
* @return an array of booleans.
*/
public boolean[] toMaskArray() {
boolean[] mask = new boolean[args.size()];
for( int i=0; i<mask.length; i++)
mask[i] = this.mask.get(i);
return mask;
}
/**
* Add a masked argument
* @param string the argument
*/
public void addMasked(String string) {
add(string, true);
}
private static final long serialVersionUID = 1L;
}
| |
/*
* CGateInterface - A library to allow interaction with Clipsal C-Gate.
*
* Copyright 2008, 2009, 2012, 2017 Dave Oxley <dave@daveoxley.co.uk>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.daveoxley.cbus;
import java.util.ArrayList;
import java.util.HashMap;
/**
*
* @author Dave Oxley <dave@daveoxley.co.uk>
*/
public final class Network extends CGateObject implements Comparable<Network>
{
private Project project;
private int net_id;
private Network(CGateSession cgate_session, Project project, String cgate_response) throws CGateException
{
super(cgate_session);
this.project = project;
this.net_id = getNetworkID(project, cgate_response);
setupSubtreeCache("application");
setupSubtreeCache("unit");
}
@Override
protected String getKey()
{
return String.valueOf(net_id);
}
@Override
public CGateObject getCGateObject(String address) throws CGateException
{
if (address.startsWith("//"))
throw new IllegalArgumentException("Address must be a relative address. i.e. Not starting with //");
boolean return_next = false;
int next_part_index = address.indexOf("/");
if (next_part_index == -1)
{
next_part_index = address.length();
return_next = true;
}
String next_part = address.substring(0, next_part_index);
if (next_part.equals("p"))
{
if (return_next)
throw new IllegalArgumentException("The address must not end with p");
int unit_part_index = address.substring(next_part_index + 1).indexOf("/");
if (unit_part_index == -1)
{
unit_part_index = address.length();
return_next = true;
}
next_part = address.substring(next_part_index + 1, unit_part_index);
int unit_id = Integer.parseInt(next_part);
Unit unit = getUnit(unit_id);
if (unit == null)
throw new IllegalArgumentException("No unit found: " + address);
if (return_next)
return unit;
return unit.getCGateObject(address.substring(next_part_index + 1));
}
else
{
int application_id = Integer.parseInt(next_part);
Application application = getApplication(application_id);
if (application == null)
throw new IllegalArgumentException("No application found: " + address);
if (return_next)
return application;
return application.getCGateObject(address.substring(next_part_index + 1));
}
}
@Override
String getProjectAddress()
{
return "//" + getProjectName();
}
@Override
String getResponseAddress(boolean id)
{
return String.valueOf(getNetworkID());
}
@Override
public int compareTo(Network o) {
int cmp = project.compareTo(o.project);
if (cmp != 0)
return cmp;
return (getNetworkID()<o.getNetworkID() ? -1 : (getNetworkID()==o.getNetworkID() ? 0 : 1));
}
/**
* Issue a <code>net list_all</code> to the C-Gate server.
*
* @see <a href="http://www.clipsal.com/cis/downloads/Toolkit/CGateServerGuide_1_0.pdf">
* <i>C-Gate Server Guide 4.3.63</i></a>
* @param cgate_session The C-Gate session
* @param cached_objects Return cached Project objects or rebuild list from C-Gate
* @return ArrayList of Networks
* @throws CGateException
*/
public static ArrayList<Network> listAll(CGateSession cgate_session, boolean cached_objects) throws CGateException
{
Response resp = cgate_session.sendCommand("net list_all");
if (!cached_objects)
{
for (Project project : Project.dir(cgate_session, true))
project.clearCache("network");
}
ArrayList<Network> networks = new ArrayList<Network>();
for (String response : resp)
networks.add(getOrCreateNetwork(cgate_session, response));
return networks;
}
/**
* Retrieve the Unit Object for the specified unit id.
*
* @param unit_id The unit to retrieve
* @return The Unit
* @throws CGateException
*/
public Application getApplication(int application_id) throws CGateException
{
Application application = (Application)getCachedObject("application", String.valueOf(application_id));
if (application != null)
return application;
getApplications(false);
return (Application)getCachedObject("application", String.valueOf(application_id));
}
/**
* Retrieve the Unit Object for the specified unit id.
*
* @param unit_id The unit to retrieve
* @return The Unit
* @throws CGateException
*/
public Unit getUnit(int unit_id) throws CGateException
{
Unit unit = (Unit)getCachedObject("unit", String.valueOf(unit_id));
if (unit != null)
return unit;
getUnits(false);
return (Unit)getCachedObject("unit", String.valueOf(unit_id));
}
private static Network getOrCreateNetwork(CGateSession cgate_session, String cgate_response) throws CGateException
{
HashMap<String,String> resp_map = responseToMap(cgate_response);
Project.dir(cgate_session, true);
Project project = Project.getProject(cgate_session, resp_map.get("project"));
int net_id = getNetworkID(project, cgate_response);
Network network = (Network)project.getCachedObject("network", String.valueOf(net_id));
if (network == null)
{
network = new Network(cgate_session, project, cgate_response);
project.cacheObject("network", network);
}
return network;
}
static int getNetworkID(Project project, String cgate_response) throws CGateException
{
HashMap<String,String> resp_map = responseToMap(cgate_response);
int net_id = -1;
String value = resp_map.get("network");
if (value != null)
net_id = Integer.parseInt(value.trim());
else
{
value = resp_map.get("address");
if (value != null)
{
String net_str = value.substring(project.getName().length() + 3);
net_id = Integer.parseInt(net_str.trim());
}
}
if (net_id < 0)
throw new CGateException();
return net_id;
}
/**
*
* @return
*/
public int getNetworkID()
{
return net_id;
}
public Project getProject()
{
return project;
}
public String getProjectName()
{
return project.getName();
}
public String getName() throws CGateException
{
String address = getResponseAddress(true) + "/TagName";
ArrayList<String> resp_array = getCGateSession().sendCommand("dbget " + getProjectAddress() + "/" + address).toArray();
return responseToMap(resp_array.get(0), true).get(address);
}
public String getType() throws CGateException
{
ArrayList<String> resp_array = getCGateSession().sendCommand("show " + getProjectAddress() + "/" + getResponseAddress(true) + " Type").toArray();
return responseToMap(resp_array.get(0)).get("Type");
}
public String getInterfaceAddress() throws CGateException
{
ArrayList<String> resp_array = getCGateSession().sendCommand("show " + getProjectAddress() + "/" + getResponseAddress(true) + " InterfaceAddress").toArray();
return responseToMap(resp_array.get(0)).get("InterfaceAddress");
}
public String getState() throws CGateException
{
ArrayList<String> resp_array = getCGateSession().sendCommand("show " + getProjectAddress() + "/" + getResponseAddress(true) + " State").toArray();
return responseToMap(resp_array.get(0)).get("State");
}
static String getApplicationType(Network network, String response)
{
String network_address = network.getResponseAddress(true) + "/";
int index = response.indexOf(network_address);
int application_index = response.indexOf("/", index + network_address.length());
return response.substring(index + network_address.length(), application_index);
}
public ArrayList<Unit> getUnits(boolean cached_objects) throws CGateException
{
if (!cached_objects)
clearCache("unit");
Response resp = dbget(null);
int number_of_units = -1;
for (String response : resp) {
String address = "" + getResponseAddress(true) + "/Unit[";
int index = response.indexOf(address);
if (index > -1) {
int index2 = response.indexOf("]", index + address.length());
number_of_units = Integer.parseInt(response.substring(index + address.length(), index2));
break;
}
}
ArrayList<Unit> units = new ArrayList<Unit>();
for (int i = 1; i <= number_of_units; i++) {
ArrayList<String> resp_array = dbget("Unit[" + i + "]/Address").toArray();
Unit unit = Unit.getOrCreateUnit(getCGateSession(), this, i, resp_array.get(0));
if (unit != null)
units.add(unit);
}
tree();
return units;
}
/**
* Issue a <code>tree //PROJECT/NET_ID</code> to the C-Gate server.
*
* @see <a href="http://www.clipsal.com/cis/downloads/Toolkit/CGateServerGuide_1_0.pdf">
* <i>C-Gate Server Guide 4.3.124</i></a>
* @throws CGateException
*/
void tree() throws CGateException
{
CGateSession cgate_session = getCGateSession();
getApplications(true);
Response resp = cgate_session.sendCommand("tree " + getAddress());
for (String response : resp)
{
if (response.indexOf("" + getAddress() + "/") > -1)
{
if (getApplicationType(this, response).equals("p"))
Unit.createDBUnit(cgate_session, this, response);
else
Group.createDBGroup(cgate_session, this, response);
}
}
}
/**
* Issue a <code>net open //PROJECT/NET_ID</code> to the C-Gate server.
*
* @see <a href="http://www.clipsal.com/cis/downloads/Toolkit/CGateServerGuide_1_0.pdf">
* <i>C-Gate Server Guide 4.3.65</i></a>
* @throws CGateException
*/
public void open() throws CGateException
{
getCGateSession().sendCommand("net open " + getProjectAddress() + "/" + getResponseAddress(true)).handle200();
}
Response dbget(String param_name) throws CGateException
{
return getCGateSession().sendCommand("dbget " + getProjectAddress() + "/" + getResponseAddress(true) + (param_name == null ? "" : ("/" + param_name)));
}
/**
* Get all Application objects for this Network.
*
* @return ArrayList of Applications
* @param cached_objects Return cached Project objects or rebuild list from C-Gate
* @throws CGateException
*/
public ArrayList<Application> getApplications(boolean cached_objects) throws CGateException
{
CGateSession cgate_session = getCGateSession();
Response resp = dbget(null);
if (!cached_objects)
clearCache("application");
int number_of_applications = -1;
for (String response : resp)
{
String address = getResponseAddress(true) + "/Application[";
int index = response.indexOf(address);
if (index > -1)
{
int index2 = response.indexOf("]", index + address.length());
number_of_applications = Integer.parseInt(response.substring(index + address.length(), index2));
break;
}
}
ArrayList<Application> applications = new ArrayList<Application>();
for (int i = 1; i <= number_of_applications; i++)
{
ArrayList<String> resp_array = dbget("Application[" + i + "]/Address").toArray();
Application application = Application.getOrCreateApplication(cgate_session, this, i, resp_array.get(0));
if (application != null)
applications.add(application);
}
return applications;
}
}
| |
package lodVader.threads;
import java.io.BufferedReader;
import java.io.BufferedWriter;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileReader;
import java.io.FileWriter;
import java.io.IOException;
import java.util.HashMap;
import java.util.TreeMap;
import java.util.concurrent.atomic.AtomicInteger;
import lodVader.bloomfilters.BloomFilterI;
import lodVader.bloomfilters.models.DatasetLinksContainer;
import lodVader.configuration.LODVaderProperties;
import lodVader.enumerators.TuplePart;
import lodVader.linksets.DistributionBloomFilterContainer;
import lodVader.mongodb.collections.DistributionDB;
import lodVader.mongodb.collections.gridFS.SuperBucket;
public class LinksetDataThread extends Thread {
// true if the source distribution is the subject column
//
// sourceColumnIsSubject = true
//
// Target Source Target
// BF dist. BF
// ____ __________ ____
// | o| <- | s| p| o| |s |
// | o| <- | s| p| o| |s |
// | o| <- | s| p| o| |s |
// | o| <- | s| p| o| |s |
// | o| <- | s| p| o| |s |
//
// sourceColumnIsSubject = false
//
// Target Source Target
// BF dist. BF
// ____ __________ ____
// | o| | s| p| o| -> |s |
// | o| | s| p| o| -> |s |
// | o| | s| p| o| -> |s |
// | o| | s| p| o| -> |s |
//
//
public String targetDistributionTitle;
public int sourceDistributionID = 0;
public int sourceDatasetID = 0;
public int targetDistributionID = 0;
public int targetDatasetID = 0;
// 0 for filter not loaded, 1 for loading and 2 for loaded
public AtomicInteger filterLoaded = new AtomicInteger(0);
private HashMap<String, Integer> validLinks = null;
private HashMap<String, Integer> invalidLinks = null;
public BufferedWriter validLinksWriter;
public BufferedWriter invalidLinksWriter;
public AtomicInteger numberOfValidLinks = new AtomicInteger(0);
public AtomicInteger numberOfInvalidLinks = new AtomicInteger(0);
public TreeMap<String, ? extends SuperBucket> distributionFilters = null;
// public HashSet<String> targetNSSet = new HashSet<String>();
public BloomFilterI targetNSSet;
public TuplePart tuplePart;
// flat to execute or not this model in a thread
public boolean active = false;
DatasetLinksContainer datasetLinkContainer;
public LinksetDataThread(DistributionDB sourceDistribution, DistributionDB targetDistribution,
DistributionBloomFilterContainer distributionFilter, TuplePart tuplePart) {
this.tuplePart = tuplePart;
this.sourceDatasetID = sourceDistribution.getTopDatasetID();
this.sourceDistributionID = sourceDistribution.getLODVaderID();
this.targetDistributionID = targetDistribution.getLODVaderID();
this.targetDatasetID = targetDistribution.getTopDatasetID();
this.targetDistributionTitle = targetDistribution.getTitle();
this.datasetLinkContainer = new DatasetLinksContainer();
try {
validLinksWriter = new BufferedWriter(new FileWriter(LODVaderProperties.TMP_FOLDER + "valid_"
+ this.sourceDistributionID + "_" + this.targetDistributionID));
invalidLinksWriter = new BufferedWriter(new FileWriter(LODVaderProperties.TMP_FOLDER + "invalid_"
+ this.sourceDistributionID + "_" + this.targetDistributionID));
} catch (IOException e) {
e.printStackTrace();
}
if (tuplePart.equals(TuplePart.SUBJECT)) {
this.distributionFilters = distributionFilter.getObjectBuckets();
// this.targetNSSet = distributionFilter.objectsNS;
this.targetNSSet = distributionFilter.getFilterObjectsNS();
} else if ((tuplePart.equals(TuplePart.OBJECT))) {
this.distributionFilters = distributionFilter.getSubjectBuckets();
// this.targetNSSet = distributionFilter.subjectsNS;
this.targetNSSet = distributionFilter.getFilterSubjectsNS();
}
}
public void addValidLink(String resource) {
// if(!datasetLinkContainer.queryDataset(resource)){
// datasetLinkContainer.incrementDatasetCounter();
// datasetLinkContainer.addResource(resource);
// }
try {
validLinksWriter.write(resource + "\n");
} catch (IOException e) {
e.printStackTrace();
}
}
public void addInvalidLink(String resource) {
try {
invalidLinksWriter.write(resource + "\n");
} catch (IOException e) {
e.printStackTrace();
}
}
public void setValidLinks(HashMap<String, Integer> l){
validLinks = l;
}
public void setInvalidLinks(HashMap<String, Integer> l){
invalidLinks = l;
}
public HashMap<String, Integer> getAllValidLinks() {
try {
validLinksWriter.close();
} catch (IOException e) {
e.printStackTrace();
}
if (validLinks == null)
validLinks = getLinks(LODVaderProperties.TMP_FOLDER + "valid_" + this.sourceDistributionID + "_"
+ this.targetDistributionID);
return validLinks;
}
public HashMap<String, Integer> getAllInvalidLinks() {
try {
invalidLinksWriter.close();
} catch (IOException e) {
e.printStackTrace();
}
if (invalidLinks == null)
invalidLinks = getLinks(LODVaderProperties.TMP_FOLDER + "invalid_" + this.sourceDistributionID + "_"
+ this.targetDistributionID);
return invalidLinks;
}
public HashMap<String, Integer> getLinks(String fileName) {
HashMap<String, Integer> links = new HashMap<String, Integer>();
String resource = null;
BufferedReader br = null;
Integer n = null;
try {
br = new BufferedReader(new FileReader(fileName));
while ((resource = br.readLine()) != null) {
n = links.get(resource);
if (n != null)
links.put(resource, n + 1);
else
links.put(resource, 1);
if(links.size()>10000000)
break;
}
br.close();
File f = new File(fileName);
f.delete();
} catch (FileNotFoundException e){
}
catch (Exception e) {
e.printStackTrace();
}
return links;
}
public void closeFiles() {
try {
validLinksWriter.close();
invalidLinksWriter.close();
} catch (IOException e) {
e.printStackTrace();
}
}
}
| |
/*
* Copyright (C) 2014-2022 Philip Helger (www.helger.com)
* philip[at]helger[dot]com
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.helger.html.js;
import javax.annotation.Nonnegative;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
import javax.annotation.concurrent.NotThreadSafe;
import com.helger.commons.annotation.ReturnsMutableCopy;
import com.helger.commons.annotation.ReturnsMutableObject;
import com.helger.commons.collection.CollectionHelper;
import com.helger.commons.collection.impl.CommonsArrayList;
import com.helger.commons.collection.impl.ICommonsList;
import com.helger.commons.hashcode.HashCodeGenerator;
import com.helger.commons.lang.ICloneable;
import com.helger.commons.lang.IHasSize;
import com.helger.commons.string.ToStringGenerator;
/**
* A JSCode provider that encapsulates a list of {@link IHasJSCode} elements and
* itself implements {@link IHasJSCode}.
*
* @author Philip Helger
*/
@NotThreadSafe
public class CollectingJSCodeProvider implements IHasJSCodeWithSettings, IHasSize, ICloneable <CollectingJSCodeProvider>
{
private final ICommonsList <IHasJSCode> m_aList = new CommonsArrayList <> ();
public CollectingJSCodeProvider ()
{}
public CollectingJSCodeProvider (@Nullable final IHasJSCode... aProviders)
{
if (aProviders != null)
for (final IHasJSCode aProvider : aProviders)
if (aProvider != null)
append (aProvider);
}
public CollectingJSCodeProvider (@Nullable final Iterable <? extends IHasJSCode> aProviders)
{
if (aProviders != null)
for (final IHasJSCode aProvider : aProviders)
if (aProvider != null)
append (aProvider);
}
/**
* @return The underlying modifiable list. Never <code>null</code> but maybe
* empty.
*/
@Nonnull
@ReturnsMutableObject
public ICommonsList <IHasJSCode> directAll ()
{
return m_aList;
}
/**
* @return A copy of the list with all members. Never <code>null</code> but
* maybe empty.
*/
@Nonnull
@ReturnsMutableCopy
public ICommonsList <IHasJSCode> getAll ()
{
return m_aList.getClone ();
}
/**
* Add JS code at the specified index.
*
* @param nIndex
* The index where the element should be added. Should be ≥ 0.
* @param aProvider
* The JS code provider to be added. May be <code>null</code>.
* @return this for chaining
*/
@Nonnull
public CollectingJSCodeProvider addAt (@Nonnegative final int nIndex, @Nullable final IHasJSCode aProvider)
{
if (aProvider != null)
m_aList.add (nIndex, aProvider);
return this;
}
/**
* Add JS code at the specified index but unwrapping any
* {@link CollectingJSCodeProvider} instances.
*
* @param nIndex
* The index where the element should be added. Should be ≥ 0.
* @param aProvider
* The JS code provider to be added. May be <code>null</code>.
* @return this for chaining
*/
@Nonnull
public CollectingJSCodeProvider addFlattenedAt (@Nonnegative final int nIndex, @Nullable final IHasJSCode aProvider)
{
if (aProvider != null)
if (aProvider instanceof CollectingJSCodeProvider)
m_aList.addAll (nIndex, ((CollectingJSCodeProvider) aProvider).m_aList);
else
m_aList.add (nIndex, aProvider);
return this;
}
@Nonnull
public CollectingJSCodeProvider append (@Nullable final IHasJSCode aProvider)
{
if (aProvider != null)
m_aList.add (aProvider);
return this;
}
@Nonnull
public CollectingJSCodeProvider appendFlattened (@Nullable final IHasJSCode aProvider)
{
if (aProvider != null)
if (aProvider instanceof CollectingJSCodeProvider)
m_aList.addAll (((CollectingJSCodeProvider) aProvider).m_aList);
else
m_aList.add (aProvider);
return this;
}
@Nonnull
public CollectingJSCodeProvider prepend (@Nullable final IHasJSCode aProvider)
{
return addAt (0, aProvider);
}
@Nonnull
public CollectingJSCodeProvider prependFlattened (@Nullable final IHasJSCode aProvider)
{
return addFlattenedAt (0, aProvider);
}
/**
* Remove the entry at the specified index.
*
* @param nIndex
* the index to be removed. Should be ≥ 0.
* @return this for chaining
*/
@Nonnull
public CollectingJSCodeProvider removeAt (@Nonnegative final int nIndex)
{
m_aList.removeAtIndex (nIndex);
return this;
}
public void reset ()
{
m_aList.clear ();
}
@Nonnegative
public int size ()
{
return m_aList.size ();
}
public boolean isEmpty ()
{
return m_aList.isEmpty ();
}
@Nonnull
public String getJSCode (@Nullable final IJSWriterSettings aSettings)
{
final StringBuilder aSB = new StringBuilder ();
for (final IHasJSCode aJSCodeProvider : m_aList)
{
String sJSCode;
if (aJSCodeProvider instanceof IHasJSCodeWithSettings)
sJSCode = ((IHasJSCodeWithSettings) aJSCodeProvider).getJSCode (aSettings);
else
sJSCode = aJSCodeProvider.getJSCode ();
aSB.append (sJSCode);
}
return aSB.toString ();
}
@Nonnull
public CollectingJSCodeProvider getClone ()
{
return new CollectingJSCodeProvider (m_aList);
}
@Override
public boolean equals (final Object o)
{
if (o == this)
return true;
if (o == null || !getClass ().equals (o.getClass ()))
return false;
final CollectingJSCodeProvider rhs = (CollectingJSCodeProvider) o;
return m_aList.equals (rhs.m_aList);
}
@Override
public int hashCode ()
{
return new HashCodeGenerator (this).append (m_aList).getHashCode ();
}
@Override
public String toString ()
{
return new ToStringGenerator (this).appendIf ("list", m_aList, CollectionHelper::isNotEmpty).getToString ();
}
}
| |
package com.biggestnerd.civradar;
import java.awt.Color;
import java.util.ArrayList;
import java.util.List;
import net.minecraft.client.Minecraft;
import net.minecraft.client.entity.EntityOtherPlayerMP;
import net.minecraft.client.gui.FontRenderer;
import net.minecraft.client.gui.Gui;
import net.minecraft.client.gui.ScaledResolution;
import net.minecraft.client.renderer.Tessellator;
import net.minecraft.client.renderer.WorldRenderer;
import net.minecraft.client.renderer.entity.RenderManager;
import net.minecraft.entity.Entity;
import net.minecraft.entity.item.EntityItem;
import net.minecraft.entity.item.EntityMinecart;
import net.minecraft.entity.player.EntityPlayer;
import net.minecraft.init.Items;
import net.minecraft.item.ItemStack;
import net.minecraft.util.BlockPos;
import net.minecraft.util.ResourceLocation;
import net.minecraft.world.chunk.Chunk;
import net.minecraftforge.client.event.RenderGameOverlayEvent;
import net.minecraftforge.client.event.RenderWorldLastEvent;
import net.minecraftforge.event.world.ChunkEvent;
import net.minecraftforge.fml.common.eventhandler.SubscribeEvent;
import net.minecraftforge.fml.common.gameevent.TickEvent;
import net.minecraftforge.fml.common.gameevent.TickEvent.ClientTickEvent;
import org.lwjgl.opengl.GL11;
import com.biggestnerd.civradar.Config.NameLocation;
public class RenderHandler extends Gui {
private Config config = CivRadar.instance.getConfig();
private Minecraft mc = Minecraft.getMinecraft();
private Color radarColor;
private double pingDelay = 63.0D;
private List entityList;
private float radarScale;
ArrayList<String> inRangePlayers;
public RenderHandler() {
inRangePlayers = new ArrayList<String>();
}
@SubscribeEvent
public void renderRadar(RenderGameOverlayEvent event) {
if(event.type != RenderGameOverlayEvent.ElementType.CROSSHAIRS)
return;
if(config.isEnabled()) {
drawRadar();
}
}
@SubscribeEvent
public void onTick(ClientTickEvent event) {
if(event.phase == TickEvent.Phase.START && mc.theWorld != null) {
if(pingDelay <= -10.0D) {
pingDelay = 63.0D;
}
pingDelay -= 1.0D;
entityList = mc.theWorld.loadedEntityList;
ArrayList<String> newInRangePlayers = new ArrayList();
for(Object o : entityList) {
if(o instanceof EntityOtherPlayerMP) {
newInRangePlayers.add(((EntityOtherPlayerMP)o).getName());
}
}
ArrayList<String> temp = (ArrayList)newInRangePlayers.clone();
newInRangePlayers.removeAll(inRangePlayers);
for(String name : newInRangePlayers) {
mc.theWorld.playSound(mc.thePlayer.posX, mc.thePlayer.posY, mc.thePlayer.posZ, "minecraft:note.pling", config.getPingVolume(), 1.0F, false);
}
inRangePlayers = temp;
}
}
@SubscribeEvent
public void renderWaypoints(RenderWorldLastEvent event) {
if(CivRadar.instance.getWaypointSave() == null) {
return;
}
if(config.isRenderWaypoints()) {
for(Waypoint point : CivRadar.instance.getWaypointSave().getWaypoints()) {
if(point.getDimension() == mc.theWorld.provider.getDimensionId() && point.isEnabled()) {
renderWaypoint(point, event);
}
}
}
}
private void drawRadar() {
radarColor = config.getRadarColor();
radarScale = config.getRadarScale();
ScaledResolution res = new ScaledResolution(mc, mc.displayWidth, mc.displayHeight);
int width = res.getScaledWidth();
GL11.glPushMatrix();
GL11.glTranslatef(width - (65 * radarScale) + (config.getRadarX()), (65 * radarScale) + (config.getRadarY()), 0.0F);
GL11.glScalef(1.0F, 1.0F, 1.0F);
if(config.isRenderCoordinates()) {
String coords = "(" + (int) mc.thePlayer.posX + "," + (int) mc.thePlayer.posY + "," + (int) mc.thePlayer.posZ + ")";
mc.fontRendererObj.drawStringWithShadow(coords, -(mc.fontRendererObj.getStringWidth(coords) / 2), 65 * radarScale, 14737632);
}
GL11.glScalef(radarScale, radarScale, radarScale);
GL11.glRotatef(-mc.thePlayer.rotationYaw, 0.0F, 0.0F, 1.0F);
drawCircle(0, 0, 63.0D, radarColor, true);
GL11.glLineWidth(2.0F);
drawCircle(0, 0, 63.0D, radarColor, false);
GL11.glLineWidth(1.0F);
if(pingDelay > 0) {
drawCircle(0, 0, 63.0D - pingDelay, radarColor, false);
}
GL11.glLineWidth(2.0F);
GL11.glDisable(GL11.GL_TEXTURE_2D);
GL11.glDisable(GL11.GL_LIGHTING);
GL11.glBegin(1);
GL11.glColor4f(radarColor.getRed() / 255.0F, radarColor.getGreen() / 255.0F, radarColor.getBlue() / 255.0F, config.getRadarOpacity() + 0.5F);
GL11.glVertex2d(0.0D, -63.0D);
GL11.glVertex2d(0.0D, 63.0D);
GL11.glVertex2d(-63.0D, 0.0D);
GL11.glVertex2d(63.0D, 0.0D);
GL11.glVertex2d(-44.5D, -44.5D);
GL11.glVertex2d(44.5D, 44.5D);
GL11.glVertex2d(-44.5D, 44.5D);
GL11.glVertex2d(44.5D, -44.5D);
GL11.glEnd();
GL11.glDisable(GL11.GL_BLEND);
GL11.glEnable(GL11.GL_TEXTURE_2D);
drawRadarIcons();
GL11.glRotatef(mc.thePlayer.rotationYaw, 0.0F, 0.0F, 1.0F);
drawTriangle(0, 0, Color.WHITE);
GL11.glScalef(2.0F, 2.0F, 2.0F);
GL11.glPopMatrix();
}
private void drawCircle(int x, int y, double radius, Color c, boolean filled) {
GL11.glEnable(3042);
GL11.glDisable(GL11.GL_TEXTURE_2D);
GL11.glEnable(2848);
GL11.glBlendFunc(770, 771);
GL11.glColor4f(c.getRed() / 255.0F, c.getGreen() / 255.0F, c.getBlue() / 255.0F, filled ? config.getRadarOpacity() : config.getRadarOpacity() + 0.5F);
GL11.glBegin(filled ? 6 : 2);
for (int i = 0; i <= 360; i++) {
double x2 = Math.sin(i * Math.PI / 180.0D) * radius;
double y2 = Math.cos(i * Math.PI / 180.0D) * radius;
GL11.glVertex2d(x + x2, y + y2);
}
GL11.glEnd();
GL11.glDisable(2848);
GL11.glEnable(GL11.GL_TEXTURE_2D);
GL11.glDisable(3042);
}
private void drawTriangle(int x, int y, Color c) {
GL11.glRotatef(180.0F, 0.0F, 0.0F, 1.0F);
GL11.glColor4f(c.getRed() / 255.0F, c.getGreen() / 255.0F, c.getBlue() / 255.0F, 1.0F);
GL11.glEnable(3042);
GL11.glDisable(3553);
GL11.glEnable(2848);
GL11.glBlendFunc(770, 771);
GL11.glBegin(4);
GL11.glVertex2d(x, y + 3);
GL11.glVertex2d(x + 3, y - 3);
GL11.glVertex2d(x - 3, y - 3);
GL11.glEnd();
GL11.glDisable(2848);
GL11.glEnable(3553);
GL11.glDisable(3042);
GL11.glRotatef(-180.0F, 0.0F, 0.0F, 1.0F);
}
private void drawRadarIcons() {
if(entityList == null) {
return;
}
for(Object o : entityList) {
Entity e = (Entity) o;
int playerPosX = (int) mc.thePlayer.posX;
int playerPosZ = (int) mc.thePlayer.posZ;
int entityPosX = (int) e.posX;
int entityPosZ = (int) e.posZ;
int displayPosX = playerPosX - entityPosX;
int displayPosZ = playerPosZ - entityPosZ;
if(e != mc.thePlayer) {
if(e instanceof EntityItem) {
EntityItem item = (EntityItem) e;
if(config.isRender(EntityItem.class)) {
renderItemIcon(displayPosX, displayPosZ, item.getEntityItem());
}
} else if(e instanceof EntityOtherPlayerMP) {
if(config.isRender(EntityPlayer.class)) {
EntityOtherPlayerMP eop = (EntityOtherPlayerMP) e;
try {
renderPlayerHeadIcon(displayPosX, displayPosZ, eop);
} catch (Exception ex) {
ex.printStackTrace();
}
}
} else if(e instanceof EntityMinecart) {
if(config.isRender(EntityMinecart.class)) {
ItemStack cart = new ItemStack(Items.minecart);
renderItemIcon(displayPosX, displayPosZ, cart);
}
} else if(config.isRender(o.getClass())) {
renderIcon(displayPosX, displayPosZ, config.getMob(o.getClass()).getResource());
}
}
}
}
private void renderItemIcon(int x, int y, ItemStack item) {
GL11.glPushMatrix();
GL11.glScalef(0.5F, 0.5F, 0.5F);
GL11.glTranslatef(x +1, y +1, 0.0F);
GL11.glColor4f(1.0F, 1.0F, 1.0F, config.getRadarOpacity() + 0.5F);
GL11.glRotatef(mc.thePlayer.rotationYaw, 0.0F, 0.0F, 1.0F);
mc.getRenderItem().renderItemIntoGUI(item, -8, -8);
GL11.glTranslatef(-x -1, -y -1, 0.0F);
GL11.glScalef(2.0F, 2.0F, 2.0F);
GL11.glDisable(2896);
GL11.glPopMatrix();
}
private void renderPlayerHeadIcon(int x, int y, EntityOtherPlayerMP player) throws Exception {
GL11.glColor4f(1.0F, 1.0F, 1.0F, config.getRadarOpacity() + 0.5F);
GL11.glEnable(3042);
GL11.glPushMatrix();
GL11.glScalef(0.5F, 0.5F, 0.5F);
GL11.glTranslatef(x + 1, y + 1, 0.0F);
GL11.glRotatef(mc.thePlayer.rotationYaw, 0.0F, 0.0F, 1.0F);
mc.getTextureManager().bindTexture(new ResourceLocation("civRadar/icons/player.png"));
drawModalRectWithCustomSizedTexture(-8, -8, 0, 0, 16, 16, 16, 16);
GL11.glTranslatef(-x -1, -y -1, 0.0F);
GL11.glScalef(2.0F, 2.0F, 2.0F);
GL11.glDisable(2896);
GL11.glDisable(3042);
GL11.glPopMatrix();
if(config.isPlayerNames()) {
GL11.glPushMatrix();
GL11.glScalef(0.5F, 0.5F, 0.5F);
GL11.glTranslatef(x, y, 0.0F);
GL11.glRotatef(mc.thePlayer.rotationYaw, 0.0F, 0.0F, 1.0F);
GL11.glTranslatef(-x - 8, -y, 0.0F);
String playerName = player.getName();
if(config.isExtraPlayerInfo()) {
playerName += " (" + (int) mc.thePlayer.getDistanceToEntity(player) + "m)(Y" + (int) player.posY + ")";
}
int yOffset = config.getNameLocation() == NameLocation.below ? 10 : -10;
drawCenteredString(mc.fontRendererObj, playerName, x + 8, y + yOffset, Color.WHITE.getRGB());
GL11.glScalef(2.0F, 2.0F, 2.0F);
GL11.glPopMatrix();
}
}
private void renderIcon(int x, int y, ResourceLocation resource) {
mc.getTextureManager().bindTexture(resource);
GL11.glColor4f(1.0F, 1.0F, 1.0F, config.getRadarOpacity() + 0.5F);
GL11.glEnable(3042);
GL11.glPushMatrix();
GL11.glScalef(0.5F, 0.5F, 0.5F);
GL11.glTranslatef(x + 1, y + 1, 0.0F);
GL11.glRotatef(mc.thePlayer.rotationYaw, 0.0F, 0.0F, 1.0F);
drawModalRectWithCustomSizedTexture(-8, -8, 0, 0, 16, 16, 16, 16);
GL11.glTranslatef(-x -1, -y -1, 0.0F);
GL11.glScalef(2.0F, 2.0F, 2.0F);
GL11.glDisable(2896);
GL11.glDisable(3042);
GL11.glPopMatrix();
}
private void renderWaypoint(Waypoint point, RenderWorldLastEvent event) {
String name = point.getName();
Color c = point.getColor();
float partialTickTime = event.partialTicks;
double distance = point.getDistance(mc);
if(distance <= config.getMaxWaypointDistance() || config.getMaxWaypointDistance() < 0) {
FontRenderer fr = mc.fontRendererObj;
Tessellator tess = Tessellator.getInstance();
WorldRenderer wr = tess.getWorldRenderer();
RenderManager rm = mc.getRenderManager();
float playerX = (float) (mc.thePlayer.lastTickPosX + (mc.thePlayer.posX - mc.thePlayer.lastTickPosX) * partialTickTime);
float playerY = (float) (mc.thePlayer.lastTickPosY + (mc.thePlayer.posY - mc.thePlayer.lastTickPosY) * partialTickTime);
float playerZ = (float) (mc.thePlayer.lastTickPosZ + (mc.thePlayer.posZ - mc.thePlayer.lastTickPosZ) * partialTickTime);
float displayX = (float)point.getX() - playerX;
float displayY = (float)point.getY() + 1.3f - playerY;
float displayZ = (float)point.getZ() - playerZ;
float scale = (float) (Math.max(2, distance /5) * 0.0185f);
GL11.glColor4f(1f, 1f, 1f, 1f);
GL11.glPushMatrix();
GL11.glTranslatef(displayX, displayY, displayZ);
GL11.glRotatef(-rm.playerViewY, 0.0F, 1.0F, 0.0F);
GL11.glRotatef(rm.playerViewX, 1.0F, 0.0F, 0.0F);
GL11.glScalef(-scale, -scale, scale);
GL11.glDisable(GL11.GL_LIGHTING);
GL11.glDepthMask(false);
GL11.glDisable(GL11.GL_DEPTH_TEST);
GL11.glEnable(GL11.GL_BLEND);
GL11.glBlendFunc(GL11.GL_SRC_ALPHA, GL11.GL_ONE_MINUS_SRC_ALPHA);
name += " (" + (int)distance + "m)";
int width = fr.getStringWidth(name);
int height = 10;
GL11.glDisable(GL11.GL_TEXTURE_2D);
wr.startDrawingQuads();
int stringMiddle = width / 2;
wr.setColorRGBA_F(c.getRed() / 255.0F, c.getGreen() / 255.0F, c.getBlue() / 255.0F, config.getWaypointOpcaity());
wr.addVertex(-stringMiddle - 1, -1, 0.0D);
wr.addVertex(-stringMiddle - 1, 1 + height, 0.0D);
wr.addVertex(stringMiddle + 1, 1 + height, 0.0D);
wr.addVertex(stringMiddle + 1, -1, 0.0D);
tess.draw();
GL11.glEnable(GL11.GL_TEXTURE_2D);
fr.drawString(name, -width / 2, 1, Color.WHITE.getRGB());
GL11.glColor4f(1.0F, 1.0F, 1.0F, 1.0F);
GL11.glDepthMask(true);
GL11.glEnable(GL11.GL_DEPTH_TEST);
GL11.glPopMatrix();
}
}
}
| |
/* ----------------------------------------------------------------------------------
* Package Name : SimpleSocket.server.net
* Class Name : Listener.java
*
* ---------------------------
* Created by : Igor Androsov
* Creation date : 2002/10/12
* ---------------------------
*
* Change log:
*
* Date Author Description
* --------------------------------------------------------------------------------
* 2002/10/12 Igor Androsov
*
* ---------------------------------------------------------------------------------
*/
package SimpleSocket.server.net;
import com.wm.app.b2b.server.*;
import com.wm.util.*;
import java.net.InetAddress;
import java.util.Enumeration;
import java.util.Hashtable;
import SimpleSocket.server.net.resources.ServerListenerExceptionBundle;
public abstract class Listener
implements Runnable, ServerListenerIf
{
static final String DEF_PACKAGE = "SimpleSocket";
protected Thread _thread;
protected boolean _running;
protected boolean _enabled;
protected String _protocol;
protected int _port;
protected InetAddress _bindAddress;
protected String _package;
protected Hashtable _allowList;
protected Hashtable _denyList;
protected int _ipAccessMode;
protected boolean _isAccessModeSet;
public Listener()
{
_allowList = new Hashtable(11);
_denyList = new Hashtable(11);
_isAccessModeSet = false;
_thread = null;
_running = false;
_enabled = false;
_package = "SimpleSocket";
_ipAccessMode = 0;
}
public Listener(Values props)
{
this();
if(props != null)
{
String mode = (String)props.get("hostAccessMode");
String allowlist[] = (String[])props.get("hostAllow");
String denylist[] = (String[])props.get("hostDeny");
if(mode != null)
{
boolean allowByDefault = !"exclude".equalsIgnoreCase(mode);
if(allowByDefault)
{
if(denylist != null)
_ipAccessMode = 1;
} else
if(allowlist != null)
_ipAccessMode = 2;
}
if(_ipAccessMode != 0)
if(_ipAccessMode == 1)
{
if(denylist != null)
setAccessList(denylist, false);
} else
if(_ipAccessMode == 2 && allowlist != null)
setAccessList(allowlist, true);
String bindaddr = (String)props.get("bindAddress");
try
{
if(bindaddr != null && bindaddr.length() > 0)
_bindAddress = InetAddress.getByName(bindaddr);
}
catch(Exception exception) { }
}
}
public synchronized boolean isRunning()
{
return _running;
}
public boolean isListening()
{
return _running;
}
public boolean isEnabled()
{
return _enabled;
}
public boolean isPrimary()
{
return ListenerAdmin.isPrimaryListener(this);
}
public int getPort()
{
return _port;
}
public InetAddress getBindAddress()
{
return _bindAddress;
}
public String getPackage()
{
return _package;
}
public String getProtocol()
{
return _protocol;
}
public Thread getThread()
{
return _thread;
}
public synchronized void setRunning(boolean running)
{
_running = running;
}
public void setEnabled(boolean enable)
{
_enabled = enable;
}
public void setThread(Thread thread)
{
_thread = thread;
}
public void setPort(int port)
{
_port = port;
}
public void setBindAddress(InetAddress bindAddr)
{
_bindAddress = bindAddr;
}
public void setPackage(String pkg)
{
_package = pkg;
if(_package == null)
_package = "SimpleSocket";
}
public abstract void setup()
throws ServerListenerException;
public abstract void cleanup()
throws ServerListenerException;
public void startListening()
throws ServerListenerException
{
if(isRunning())
return;
if(!isEnabled())
throw new ServerListenerException(SimpleSocket.server.net.resources.ServerListenerExceptionBundle.class, ServerListenerExceptionBundle.LISTENER_NOT_ENABLED, "");
try
{
//System.out.println("Listener - startListening call.");
Thread t = new Thread(this);
setThread(t);
setup();
t.setName(getKey());
t.start();
setRunning(true);
JournalLogger.logDebugPlus(1, 23, 70, getKey());
}
catch(ServerListenerException sle)
{
cleanup();
throw sle;
}
}
public void stopListening()
throws ServerListenerException
{
try
{
//System.out.println("##### TEST");
//System.out.println("Listener - stopListening call.");
JournalLogger.logDebugPlus(1, 24, 70, getKey());
setRunning(false);
cleanup();
Thread t = getThread();
if(t != null)
t.stop();
setThread(null);
//System.out.println("Listener - thread stopped and set - null.");
}
catch(ServerListenerException serverlistenerexception) { }
}
public void finish()
throws ServerListenerException
{
stopListening();
}
public abstract void loop();
public void run()
{
try
{
setRunning(true);
JournalLogger.logDebugPlus(1, 17, 70, getKey());
PortManager.addListener(getKey());
loop();
try
{
cleanup();
}
catch(ServerListenerException sle)
{
sle.printStackTrace();
}
setRunning(false);
setThread(null);
}
finally
{
JournalLogger.logDebugPlus(1, 18, 70, getKey());
PortManager.removeListener(getKey());
}
}
public Values getProperties()
{
Object o[][] = {
{
"port", new Integer(getPort())
}, {
"protocol", getProtocol()
}, {
"pkg", getPackage()
}, {
"enabled", (new Boolean(isEnabled())).toString()
}, {
"key", getKey()
}
};
Values v = new Values(o);
if(_ipAccessMode != 0)
{
v.put("hostAccessMode", _ipAccessMode != 1 ? "exclude" : "include");
v.put("hostAllow", getAccessList(true));
v.put("hostDeny", getAccessList(false));
}
if(_bindAddress != null)
v.put("bindAddress", _bindAddress.getHostAddress());
return v;
}
public boolean isAccessAllowed(InetAddress iAddress)
{
String address = iAddress.getHostAddress();
boolean allowed;
switch(_ipAccessMode)
{
case 1: // '\001'
if(_denyList.size() > 0)
{
String host = iAddress.getHostName();
for(Enumeration e = _denyList.keys(); e.hasMoreElements();)
{
StringMatcher sm = new StringMatcher((String)e.nextElement());
if(sm.match(host.toLowerCase()) || sm.match(address))
{
JournalLogger.logDebug(25, 70, new Object[] {
getKey(), iAddress.getHostAddress()
});
return false;
}
}
}
return true;
case 2: // '\002'
if(_allowList.size() > 0)
{
String host = iAddress.getHostName();
for(Enumeration e = _allowList.keys(); e.hasMoreElements();)
{
StringMatcher sm = new StringMatcher((String)e.nextElement());
if(sm.match(host.toLowerCase()) || sm.match(address))
return true;
}
}
JournalLogger.logDebug(25, 70, new Object[] {
getKey(), iAddress.getHostAddress()
});
return false;
case 0: // '\0'
allowed = Server.connectAllowed(iAddress);
if(!allowed)
JournalLogger.logDebug(25, 70, new Object[] {
getKey(), iAddress.getHostAddress()
});
return allowed;
}
allowed = Server.connectAllowed(iAddress);
if(!allowed)
JournalLogger.logDebug(25, 70, new Object[] {
getKey(), iAddress.getHostAddress()
});
return allowed;
}
public String[] getAccessList(boolean allowedList)
{
String accessList[] = null;
if(allowedList)
{
Enumeration e = _allowList.keys();
accessList = new String[_allowList.size()];
int i = 0;
while(e.hasMoreElements())
accessList[i++] = (String)e.nextElement();
} else
{
Enumeration e = _denyList.keys();
accessList = new String[_denyList.size()];
int i = 0;
while(e.hasMoreElements())
accessList[i++] = (String)e.nextElement();
}
return accessList;
}
public void setAccessList(String hostList[], boolean allowedList)
{
if(hostList == null)
return;
for(int i = 0; i < hostList.length; i++)
{
String host = hostList[i];
if(host != null)
if(allowedList)
_allowList.put(hostList[i].toLowerCase(), "");
else
_denyList.put(hostList[i].toLowerCase(), "");
}
}
public void deleteFromAccessList(String hostName, boolean allowedList)
{
if(allowedList)
_allowList.remove(hostName.toLowerCase());
else
_denyList.remove(hostName.toLowerCase());
}
public synchronized void setIPAccessMode(int accessMode)
{
_ipAccessMode = accessMode;
}
public synchronized int getIPAccessMode()
{
return _ipAccessMode;
}
public abstract String getKey();
public abstract String getStatus();
}
| |
/*
* Copyright (c) 2007, 2015, Oracle and/or its affiliates. All rights reserved.
* ORACLE PROPRIETARY/CONFIDENTIAL. Use is subject to license terms.
*/
/*
* Copyright 2001, 2002,2004 The Apache Software Foundation.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.sun.org.apache.xerces.internal.impl.xs.opti;
import com.sun.org.apache.xerces.internal.xni.QName;
import com.sun.org.apache.xerces.internal.xni.XMLString;
import com.sun.org.apache.xerces.internal.xni.NamespaceContext;
import com.sun.org.apache.xerces.internal.xni.XMLLocator;
import com.sun.org.apache.xerces.internal.xni.Augmentations;
import com.sun.org.apache.xerces.internal.xni.XMLAttributes;
import com.sun.org.apache.xerces.internal.xni.XMLDTDHandler;
import com.sun.org.apache.xerces.internal.xni.XMLDocumentHandler;
import com.sun.org.apache.xerces.internal.xni.XMLDTDContentModelHandler;
import com.sun.org.apache.xerces.internal.xni.XMLResourceIdentifier;
import com.sun.org.apache.xerces.internal.xni.parser.XMLDocumentSource;
import com.sun.org.apache.xerces.internal.xni.parser.XMLDTDSource;
import com.sun.org.apache.xerces.internal.xni.parser.XMLDTDContentModelSource;
import com.sun.org.apache.xerces.internal.xni.XNIException;
/**
* @xerces.internal
*
* @author Rahul Srivastava, Sun Microsystems Inc.
* @author Sandy Gao, IBM
*
*/
public class DefaultXMLDocumentHandler implements XMLDocumentHandler,
XMLDTDHandler,
XMLDTDContentModelHandler {
/** Default Constructor */
public DefaultXMLDocumentHandler() {
}
//
// XMLDocumentHandler methods
//
/**
* The start of the document.
*
* @param locator The document locator, or null if the document
* location cannot be reported during the parsing
* of this document. However, it is <em>strongly</em>
* recommended that a locator be supplied that can
* at least report the system identifier of the
* document.
* @param encoding The auto-detected IANA encoding name of the entity
* stream. This value will be null in those situations
* where the entity encoding is not auto-detected (e.g.
* internal entities or a document entity that is
* parsed from a java.io.Reader).
* @param augs Additional information that may include infoset augmentations
* @exception XNIException
* Thrown by handler to signal an error.
*/
public void startDocument(XMLLocator locator, String encoding,
NamespaceContext context, Augmentations augs)
throws XNIException {
}
/**
* Notifies of the presence of an XMLDecl line in the document. If
* present, this method will be called immediately following the
* startDocument call.
*
* @param version The XML version.
* @param encoding The IANA encoding name of the document, or null if
* not specified.
* @param standalone The standalone value, or null if not specified.
* @param augs Additional information that may include infoset augmentations
*
* @exception XNIException
* Thrown by handler to signal an error.
*/
public void xmlDecl(String version, String encoding, String standalone, Augmentations augs)
throws XNIException {
}
/**
* Notifies of the presence of the DOCTYPE line in the document.
*
* @param rootElement
* The name of the root element.
* @param publicId The public identifier if an external DTD or null
* if the external DTD is specified using SYSTEM.
* @param systemId The system identifier if an external DTD, null
* otherwise.
* @param augs Additional information that may include infoset augmentations
*
* @exception XNIException
* Thrown by handler to signal an error.
*/
public void doctypeDecl(String rootElement, String publicId, String systemId, Augmentations augs)
throws XNIException {
}
/**
* A comment.
*
* @param text The text in the comment.
* @param augs Additional information that may include infoset augmentations
*
* @exception XNIException
* Thrown by application to signal an error.
*/
public void comment(XMLString text, Augmentations augs) throws XNIException {
}
/**
* A processing instruction. Processing instructions consist of a
* target name and, optionally, text data. The data is only meaningful
* to the application.
* <p>
* Typically, a processing instruction's data will contain a series
* of pseudo-attributes. These pseudo-attributes follow the form of
* element attributes but are <strong>not</strong> parsed or presented
* to the application as anything other than text. The application is
* responsible for parsing the data.
*
* @param target The target.
* @param data The data or null if none specified.
* @param augs Additional information that may include infoset augmentations
*
* @exception XNIException
* Thrown by handler to signal an error.
*/
public void processingInstruction(String target, XMLString data, Augmentations augs)
throws XNIException {
}
/**
* The start of a namespace prefix mapping. This method will only be
* called when namespace processing is enabled.
*
* @param prefix The namespace prefix.
* @param uri The URI bound to the prefix.
* @param augs Additional information that may include infoset augmentations
*
* @exception XNIException
* Thrown by handler to signal an error.
*/
public void startPrefixMapping(String prefix, String uri, Augmentations augs)
throws XNIException {
}
/**
* The start of an element.
*
* @param element The name of the element.
* @param attributes The element attributes.
* @param augs Additional information that may include infoset augmentations
*
* @exception XNIException
* Thrown by handler to signal an error.
*/
public void startElement(QName element, XMLAttributes attributes, Augmentations augs)
throws XNIException {
}
/**
* An empty element.
*
* @param element The name of the element.
* @param attributes The element attributes.
* @param augs Additional information that may include infoset augmentations
*
* @exception XNIException
* Thrown by handler to signal an error.
*/
public void emptyElement(QName element, XMLAttributes attributes, Augmentations augs)
throws XNIException {
}
/**
* This method notifies the start of a general entity.
* <p>
* <strong>Note:</strong> This method is not called for entity references
* appearing as part of attribute values.
*
* @param name The name of the general entity.
* @param identifier The resource identifier.
* @param encoding The auto-detected IANA encoding name of the entity
* stream. This value will be null in those situations
* where the entity encoding is not auto-detected (e.g.
* internal entities or a document entity that is
* parsed from a java.io.Reader).
* @param augs Additional information that may include infoset augmentations
*
* @exception XNIException Thrown by handler to signal an error.
*/
public void startGeneralEntity(String name,
XMLResourceIdentifier identifier,
String encoding,
Augmentations augs) throws XNIException {
}
/**
* Notifies of the presence of a TextDecl line in an entity. If present,
* this method will be called immediately following the startEntity call.
* <p>
* <strong>Note:</strong> This method will never be called for the
* document entity; it is only called for external general entities
* referenced in document content.
* <p>
* <strong>Note:</strong> This method is not called for entity references
* appearing as part of attribute values.
*
* @param version The XML version, or null if not specified.
* @param encoding The IANA encoding name of the entity.
* @param augs Additional information that may include infoset augmentations
*
* @exception XNIException
* Thrown by handler to signal an error.
*/
public void textDecl(String version, String encoding, Augmentations augs) throws XNIException {
}
/**
* This method notifies the end of a general entity.
* <p>
* <strong>Note:</strong> This method is not called for entity references
* appearing as part of attribute values.
*
* @param name The name of the entity.
* @param augs Additional information that may include infoset augmentations
*
* @exception XNIException
* Thrown by handler to signal an error.
*/
public void endGeneralEntity(String name, Augmentations augs) throws XNIException {
}
/**
* Character content.
*
* @param text The content.
* @param augs Additional information that may include infoset augmentations
*
* @exception XNIException
* Thrown by handler to signal an error.
*/
public void characters(XMLString text, Augmentations augs) throws XNIException {
}
/**
* Ignorable whitespace. For this method to be called, the document
* source must have some way of determining that the text containing
* only whitespace characters should be considered ignorable. For
* example, the validator can determine if a length of whitespace
* characters in the document are ignorable based on the element
* content model.
*
* @param text The ignorable whitespace.
* @param augs Additional information that may include infoset augmentations
*
* @exception XNIException
* Thrown by handler to signal an error.
*/
public void ignorableWhitespace(XMLString text, Augmentations augs) throws XNIException {
}
/**
* The end of an element.
*
* @param element The name of the element.
* @param augs Additional information that may include infoset augmentations
*
* @exception XNIException
* Thrown by handler to signal an error.
*/
public void endElement(QName element, Augmentations augs) throws XNIException {
}
/**
* The end of a namespace prefix mapping. This method will only be
* called when namespace processing is enabled.
*
* @param prefix The namespace prefix.
* @param augs Additional information that may include infoset augmentations
*
* @exception XNIException
* Thrown by handler to signal an error.
*/
public void endPrefixMapping(String prefix, Augmentations augs) throws XNIException {
}
/**
* The start of a CDATA section.
*
* @param augs Additional information that may include infoset augmentations
*
* @exception XNIException
* Thrown by handler to signal an error.
*/
public void startCDATA(Augmentations augs) throws XNIException {
}
/**
* The end of a CDATA section.
*
* @param augs Additional information that may include infoset augmentations
*
* @exception XNIException
* Thrown by handler to signal an error.
*/
public void endCDATA(Augmentations augs) throws XNIException {
}
/**
* The end of the document.
*
* @param augs Additional information that may include infoset augmentations
*
* @exception XNIException
* Thrown by handler to signal an error.
*/
public void endDocument(Augmentations augs) throws XNIException {
}
//
// XMLDTDHandler methods
//
/**
* The start of the DTD.
*
* @param locator The document locator, or null if the document
* location cannot be reported during the parsing of
* the document DTD. However, it is <em>strongly</em>
* recommended that a locator be supplied that can
* at least report the base system identifier of the
* DTD.
* @param augmentations Additional information that may include infoset
* augmentations.
*
* @throws XNIException Thrown by handler to signal an error.
*/
public void startDTD(XMLLocator locator, Augmentations augmentations)
throws XNIException {
}
/**
* This method notifies of the start of a parameter entity. The parameter
* entity name start with a '%' character.
*
* @param name The name of the parameter entity.
* @param identifier The resource identifier.
* @param encoding The auto-detected IANA encoding name of the entity
* stream. This value will be null in those situations
* where the entity encoding is not auto-detected (e.g.
* internal parameter entities).
* @param augmentations Additional information that may include infoset
* augmentations.
*
* @throws XNIException Thrown by handler to signal an error.
*/
public void startParameterEntity(String name,
XMLResourceIdentifier identifier,
String encoding,
Augmentations augmentations) throws XNIException {
}
/**
* Notifies of the presence of a TextDecl line in an entity. If present,
* this method will be called immediately following the startEntity call.
* <p>
* <strong>Note:</strong> This method is only called for external
* parameter entities referenced in the DTD.
*
* @param version The XML version, or null if not specified.
* @param encoding The IANA encoding name of the entity.
* @param augmentations Additional information that may include infoset
* augmentations.
*
* @throws XNIException Thrown by handler to signal an error.
*/
/*
public void textDecl(String version, String encoding,
Augmentations augmentations) throws XNIException {
}
*/
/**
* This method notifies the end of a parameter entity. Parameter entity
* names begin with a '%' character.
*
* @param name The name of the parameter entity.
* @param augmentations Additional information that may include infoset
* augmentations.
*
* @throws XNIException Thrown by handler to signal an error.
*/
public void endParameterEntity(String name, Augmentations augmentations)
throws XNIException {
}
/**
* The start of the DTD external subset.
*
* @param identifier The resource identifier.
* @param augmentations
* Additional information that may include infoset
* augmentations.
* @exception XNIException
* Thrown by handler to signal an error.
*/
public void startExternalSubset(XMLResourceIdentifier identifier,
Augmentations augmentations)
throws XNIException {
}
/**
* The end of the DTD external subset.
*
* @param augmentations Additional information that may include infoset
* augmentations.
*
* @throws XNIException Thrown by handler to signal an error.
*/
public void endExternalSubset(Augmentations augmentations)
throws XNIException {
}
/**
* A comment.
*
* @param text The text in the comment.
* @param augmentations Additional information that may include infoset
* augmentations.
*
* @throws XNIException Thrown by application to signal an error.
*/
/*
public void comment(XMLString text, Augmentations augmentations)
throws XNIException {
}
*/
/**
* A processing instruction. Processing instructions consist of a
* target name and, optionally, text data. The data is only meaningful
* to the application.
* <p>
* Typically, a processing instruction's data will contain a series
* of pseudo-attributes. These pseudo-attributes follow the form of
* element attributes but are <strong>not</strong> parsed or presented
* to the application as anything other than text. The application is
* responsible for parsing the data.
*
* @param target The target.
* @param data The data or null if none specified.
* @param augmentations Additional information that may include infoset
* augmentations.
*
* @throws XNIException Thrown by handler to signal an error.
*/
/*
public void processingInstruction(String target, XMLString data,
Augmentations augmentations)
throws XNIException {
}
*/
/**
* An element declaration.
*
* @param name The name of the element.
* @param contentModel The element content model.
* @param augmentations Additional information that may include infoset
* augmentations.
*
* @throws XNIException Thrown by handler to signal an error.
*/
public void elementDecl(String name, String contentModel,
Augmentations augmentations)
throws XNIException {
}
/**
* The start of an attribute list.
*
* @param elementName The name of the element that this attribute
* list is associated with.
* @param augmentations Additional information that may include infoset
* augmentations.
*
* @throws XNIException Thrown by handler to signal an error.
*/
public void startAttlist(String elementName,
Augmentations augmentations) throws XNIException {
}
/**
* An attribute declaration.
*
* @param elementName The name of the element that this attribute
* is associated with.
* @param attributeName The name of the attribute.
* @param type The attribute type. This value will be one of
* the following: "CDATA", "ENTITY", "ENTITIES",
* "ENUMERATION", "ID", "IDREF", "IDREFS",
* "NMTOKEN", "NMTOKENS", or "NOTATION".
* @param enumeration If the type has the value "ENUMERATION" or
* "NOTATION", this array holds the allowed attribute
* values; otherwise, this array is null.
* @param defaultType The attribute default type. This value will be
* one of the following: "#FIXED", "#IMPLIED",
* "#REQUIRED", or null.
* @param defaultValue The attribute default value, or null if no
* default value is specified.
* @param nonNormalizedDefaultValue The attribute default value with no normalization
* performed, or null if no default value is specified.
* @param augmentations Additional information that may include infoset
* augmentations.
*
* @throws XNIException Thrown by handler to signal an error.
*/
public void attributeDecl(String elementName, String attributeName,
String type, String[] enumeration,
String defaultType, XMLString defaultValue,
XMLString nonNormalizedDefaultValue, Augmentations augmentations)
throws XNIException {
}
/**
* The end of an attribute list.
*
* @param augmentations Additional information that may include infoset
* augmentations.
*
* @throws XNIException Thrown by handler to signal an error.
*/
public void endAttlist(Augmentations augmentations) throws XNIException {
}
/**
* An internal entity declaration.
*
* @param name The name of the entity. Parameter entity names start with
* '%', whereas the name of a general entity is just the
* entity name.
* @param text The value of the entity.
* @param nonNormalizedText The non-normalized value of the entity. This
* value contains the same sequence of characters that was in
* the internal entity declaration, without any entity
* references expanded.
* @param augmentations Additional information that may include infoset
* augmentations.
*
* @throws XNIException Thrown by handler to signal an error.
*/
public void internalEntityDecl(String name, XMLString text,
XMLString nonNormalizedText,
Augmentations augmentations)
throws XNIException {
}
/**
* An external entity declaration.
*
* @param name The name of the entity. Parameter entity names start
* with '%', whereas the name of a general entity is just
* the entity name.
* @param identifier An object containing all location information
* pertinent to this external entity.
* @param augmentations Additional information that may include infoset
* augmentations.
*
* @throws XNIException Thrown by handler to signal an error.
*/
public void externalEntityDecl(String name,
XMLResourceIdentifier identifier,
Augmentations augmentations)
throws XNIException {
}
/**
* An unparsed entity declaration.
*
* @param name The name of the entity.
* @param identifier An object containing all location information
* pertinent to this unparsed entity declaration.
* @param notation The name of the notation.
* @param augmentations Additional information that may include infoset
* augmentations.
*
* @throws XNIException Thrown by handler to signal an error.
*/
public void unparsedEntityDecl(String name,
XMLResourceIdentifier identifier,
String notation, Augmentations augmentations)
throws XNIException {
}
/**
* A notation declaration
*
* @param name The name of the notation.
* @param identifier An object containing all location information
* pertinent to this notation.
* @param augmentations Additional information that may include infoset
* augmentations.
*
* @throws XNIException Thrown by handler to signal an error.
*/
public void notationDecl(String name, XMLResourceIdentifier identifier,
Augmentations augmentations) throws XNIException {
}
/**
* The start of a conditional section.
*
* @param type The type of the conditional section. This value will
* either be CONDITIONAL_INCLUDE or CONDITIONAL_IGNORE.
* @param augmentations Additional information that may include infoset
* augmentations.
*
* @throws XNIException Thrown by handler to signal an error.
*
* @see #CONDITIONAL_INCLUDE
* @see #CONDITIONAL_IGNORE
*/
public void startConditional(short type, Augmentations augmentations)
throws XNIException {
}
/**
* Characters within an IGNORE conditional section.
*
* @param text The ignored text.
* @param augmentations Additional information that may include infoset
* augmentations.
*
* @throws XNIException Thrown by handler to signal an error.
*/
public void ignoredCharacters(XMLString text, Augmentations augmentations)
throws XNIException {
}
/**
* The end of a conditional section.
*
* @param augmentations Additional information that may include infoset
* augmentations.
*
* @throws XNIException Thrown by handler to signal an error.
*/
public void endConditional(Augmentations augmentations) throws XNIException {
}
/**
* The end of the DTD.
*
* @param augmentations Additional information that may include infoset
* augmentations.
*
* @throws XNIException Thrown by handler to signal an error.
*/
public void endDTD(Augmentations augmentations) throws XNIException {
}
//
// XMLDTDContentModelHandler methods
//
/**
* The start of a content model. Depending on the type of the content
* model, specific methods may be called between the call to the
* startContentModel method and the call to the endContentModel method.
*
* @param elementName The name of the element.
* @param augmentations Additional information that may include infoset
* augmentations.
*
* @throws XNIException Thrown by handler to signal an error.
*/
public void startContentModel(String elementName, Augmentations augmentations)
throws XNIException {
}
/**
* A content model of ANY.
*
* @param augmentations Additional information that may include infoset
* augmentations.
*
* @throws XNIException Thrown by handler to signal an error.
*
* @see #empty
* @see #startGroup
*/
public void any(Augmentations augmentations) throws XNIException {
}
/**
* A content model of EMPTY.
*
* @throws XNIException Thrown by handler to signal an error.
*
* @param augmentations Additional information that may include infoset
* augmentations.
*
* @see #any
* @see #startGroup
*/
public void empty(Augmentations augmentations) throws XNIException {
}
/**
* A start of either a mixed or children content model. A mixed
* content model will immediately be followed by a call to the
* <code>pcdata()</code> method. A children content model will
* contain additional groups and/or elements.
*
* @param augmentations Additional information that may include infoset
* augmentations.
*
* @throws XNIException Thrown by handler to signal an error.
*
* @see #any
* @see #empty
*/
public void startGroup(Augmentations augmentations) throws XNIException {
}
/**
* The appearance of "#PCDATA" within a group signifying a
* mixed content model. This method will be the first called
* following the content model's <code>startGroup()</code>.
*
* @param augmentations Additional information that may include infoset
* augmentations.
*
* @throws XNIException Thrown by handler to signal an error.
*
* @see #startGroup
*/
public void pcdata(Augmentations augmentations) throws XNIException {
}
/**
* A referenced element in a mixed or children content model.
*
* @param elementName The name of the referenced element.
* @param augmentations Additional information that may include infoset
* augmentations.
*
* @throws XNIException Thrown by handler to signal an error.
*/
public void element(String elementName, Augmentations augmentations)
throws XNIException {
}
/**
* The separator between choices or sequences of a mixed or children
* content model.
*
* @param separator The type of children separator.
* @param augmentations Additional information that may include infoset
* augmentations.
*
* @throws XNIException Thrown by handler to signal an error.
*
* @see #SEPARATOR_CHOICE
* @see #SEPARATOR_SEQUENCE
*/
public void separator(short separator, Augmentations augmentations)
throws XNIException {
}
/**
* The occurrence count for a child in a children content model or
* for the mixed content model group.
*
* @param occurrence The occurrence count for the last element
* or group.
* @param augmentations Additional information that may include infoset
* augmentations.
*
* @throws XNIException Thrown by handler to signal an error.
*
* @see #OCCURS_ZERO_OR_ONE
* @see #OCCURS_ZERO_OR_MORE
* @see #OCCURS_ONE_OR_MORE
*/
public void occurrence(short occurrence, Augmentations augmentations)
throws XNIException {
}
/**
* The end of a group for mixed or children content models.
*
* @param augmentations Additional information that may include infoset
* augmentations.
*
* @throws XNIException Thrown by handler to signal an error.
*/
public void endGroup(Augmentations augmentations) throws XNIException {
}
/**
* The end of a content model.
*
* @param augmentations Additional information that may include infoset
* augmentations.
*
* @throws XNIException Thrown by handler to signal an error.
*/
public void endContentModel(Augmentations augmentations) throws XNIException {
}
private XMLDocumentSource fDocumentSource;
/** Sets the document source. */
public void setDocumentSource(XMLDocumentSource source) {
fDocumentSource = source;
}
/** Returns the document source. */
public XMLDocumentSource getDocumentSource() {
return fDocumentSource;
}
private XMLDTDSource fDTDSource;
// set the source of this handler
public void setDTDSource(XMLDTDSource source) {
fDTDSource = source;
}
// return the source from which this handler derives its events
public XMLDTDSource getDTDSource() {
return fDTDSource;
}
private XMLDTDContentModelSource fCMSource;
// set content model source
public void setDTDContentModelSource(XMLDTDContentModelSource source) {
fCMSource = source;
}
// get content model source
public XMLDTDContentModelSource getDTDContentModelSource() {
return fCMSource;
}
}
| |
/* ###
* IP: GHIDRA
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package ghidradev.ghidrascripteditor;
import java.awt.Dimension;
import java.io.File;
import java.util.*;
import org.eclipse.core.resources.*;
import org.eclipse.core.runtime.*;
import org.eclipse.jdt.core.IJavaProject;
import org.eclipse.jface.viewers.LabelProvider;
import org.eclipse.jface.viewers.StructuredSelection;
import org.eclipse.jface.window.Window;
import org.eclipse.jface.wizard.WizardDialog;
import org.eclipse.ui.*;
import org.eclipse.ui.dialogs.ElementListSelectionDialog;
import org.eclipse.ui.ide.IDE;
import ghidradev.EclipseMessageUtils;
import ghidradev.ghidraprojectcreator.utils.GhidraProjectUtils;
import ghidradev.ghidraprojectcreator.wizards.CreateGhidraScriptProjectWizard;
public class OpenFileRunnable implements Runnable {
private String filePath;
public OpenFileRunnable(String filePath) {
this.filePath = filePath;
}
@Override
public void run() {
List<IFile> projectFiles = findMatchingFiles(filePath);
IFile[] filesToOpen = maybePromptUserForFilesToOpen(projectFiles);
openFiles(filesToOpen);
}
private void openFiles(IFile[] userFileChoices) {
if (userFileChoices == null) {
return; // user cancelled
}
for (IFile file : userFileChoices) {
openFile(file);
}
}
private void openFile(IFile file) {
IWorkbenchPage page = EclipseMessageUtils.getWorkbenchPage();
try {
IDE.openEditor(page, file);
}
catch (PartInitException e) {
EclipseMessageUtils.showErrorDialog("Unable to Open Script",
"Couldn't open editor for " + filePath);
}
page.getWorkbenchWindow().getShell().forceActive();
}
private IFile[] maybePromptUserForFilesToOpen(List<IFile> projectFiles) {
if (projectFiles.size() == 0) {
return null;
}
if (projectFiles.size() == 1) {
return new IFile[] { projectFiles.get(0) };
}
// look for any project ending in 'scripts' and assume that is the preferred project
for (IFile iFile : projectFiles) {
IProject project = iFile.getProject();
String projectName = project.getName();
if (projectName.toLowerCase().endsWith("scripts")) {
return new IFile[] { projectFiles.get(0) };
}
}
IWorkbenchPage page = EclipseMessageUtils.getWorkbenchPage();
ElementListSelectionDialog dialog = new ElementListSelectionDialog(
page.getWorkbenchWindow().getShell(), new LabelProvider());
dialog.setTitle("Choose a File");
List<DisplayableIFile> displayableFiles = formatStrings(projectFiles);
dialog.setMultipleSelection(true);
dialog.setElements(displayableFiles.toArray(new DisplayableIFile[displayableFiles.size()]));
dialog.setMessage("Select a file to open");
Dimension size = calculatePreferredSizeInCharacters(displayableFiles);
dialog.setSize(size.width, size.height);
dialog.open();
Object[] results = dialog.getResult();
IFile[] resultFiles = new IFile[results.length];
for (int i = 0; i < results.length; i++) {
resultFiles[i] = ((DisplayableIFile) results[i]).getFile();
}
return resultFiles;
}
private List<IFile> findMatchingFiles(String path) {
Collection<IJavaProject> javaProjects = GhidraProjectUtils.getGhidraProjects();
List<IFile> projectFiles = findMatchingFilesInProjects(path, javaProjects);
if (projectFiles.isEmpty()) {
try {
for (IJavaProject javaProject : javaProjects) {
javaProject.getProject().refreshLocal(IResource.DEPTH_INFINITE,
new NullProgressMonitor());
}
}
catch (CoreException e1) {
EclipseMessageUtils.showErrorDialog("Unable to Open Script",
"Unexpected Exception refreshing project");
return new ArrayList<IFile>();
}
}
projectFiles = findMatchingFilesInProjects(path, javaProjects);
if (projectFiles.isEmpty()) {
boolean createProject = EclipseMessageUtils.showConfirmDialog("Unable to Open Script",
"File does not exist in any Eclipse project in your workspace.\n\n" +
"Would you like to create a new Ghidra Scripting project?");
if (createProject) {
INewWizard wizard = new CreateGhidraScriptProjectWizard();
wizard.init(PlatformUI.getWorkbench(), new StructuredSelection());
WizardDialog dialog = new WizardDialog(
PlatformUI.getWorkbench().getActiveWorkbenchWindow().getShell(), wizard);
dialog.setBlockOnOpen(true);
if (dialog.open() == Window.OK) {
return findMatchingFilesInProjects(path,
GhidraProjectUtils.getGhidraProjects());
}
}
return new ArrayList<IFile>();
}
return projectFiles;
}
private Dimension calculatePreferredSizeInCharacters(List<DisplayableIFile> files) {
int width = 0;
int height = 10;
for (DisplayableIFile file : files) {
String displayString = file.getDisplayString();
width = Math.max(width, displayString.length());
}
width = Math.min(width + 7, 100);
height = Math.min(height, files.size() + 3);
return new Dimension(width, height);
}
private List<IFile> findMatchingFilesInProjects(String pathString,
Collection<IJavaProject> javaProjects) {
List<IFile> files = new ArrayList<IFile>();
for (IJavaProject javaProject : javaProjects) {
IProject project = javaProject.getProject();
if (!project.isOpen()) {
continue;
}
try {
IPath path = findPathFromFolder(pathString, project);
if (path != null) {
IFile file = project.getFile(path);
files.add(file);
}
}
catch (CoreException e) {
EclipseMessageUtils.error("Unexpected exception accessing project members", e);
}
}
return files;
}
private IPath findPathFromFolder(String pathString, IResource resource) throws CoreException {
if (!(resource instanceof IContainer)) {
return null;
}
IContainer container = (IContainer) resource;
IResource[] members = container.members();
for (IResource member : members) {
IPath location = member.getLocation();
// compare as files in order to bypass path separator issues
File fileForPath = new File(pathString);
File fileForLocation = location.toFile();
if (fileForLocation.equals(fileForPath)) {
return member.getProjectRelativePath();
}
IPath pathFromFolder = findPathFromFolder(pathString, member);
if (pathFromFolder != null) {
return pathFromFolder;
}
}
return null;
}
private List<DisplayableIFile> formatStrings(List<IFile> projectFiles) {
List<DisplayableIFile> list = new ArrayList<DisplayableIFile>();
for (IFile file : projectFiles) {
list.add(new DisplayableIFile(file));
}
return list;
}
//==================================================================================================
// Inner Classes
//==================================================================================================
private class DisplayableIFile {
private final IFile file;
private final String displayString;
private DisplayableIFile(IFile file) {
this.file = file;
String format = "";
String[] strings = file.toString().split("/");
for (int i = 1; i < strings.length - 1; i++) {
format += strings[i] + "/";
}
displayString =
format.substring(0, format.length() - 1) + " - " + strings[strings.length - 1];
}
IFile getFile() {
return file;
}
String getDisplayString() {
return displayString;
}
@Override
public String toString() {
return getDisplayString();
}
}
}
| |
package org.eclipse.jetty.servlets;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.net.InetSocketAddress;
import java.net.Socket;
import java.net.SocketAddress;
import java.net.URI;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.eclipse.jetty.util.log.Log;
import org.eclipse.jetty.util.log.Logger;
import org.eclipse.jetty.util.log.StdErrLog;
import org.junit.Assert;
import static org.hamcrest.Matchers.not;
public class PipelineHelper
{
private static final Logger LOG = Log.getLogger(PipelineHelper.class);
private URI uri;
private SocketAddress endpoint;
private Socket socket;
private OutputStream outputStream;
private InputStream inputStream;
private String encodingHeader;
public PipelineHelper(URI uri, String encodingHeader)
{
if (LOG instanceof StdErrLog)
{
((StdErrLog)LOG).setLevel(StdErrLog.LEVEL_DEBUG);
}
this.uri = uri;
this.endpoint = new InetSocketAddress(uri.getHost(),uri.getPort());
this.encodingHeader = encodingHeader;
}
/**
* Open the Socket to the destination endpoint and
*
* @return the open java Socket.
* @throws IOException
*/
public Socket connect() throws IOException
{
LOG.info("Connecting to endpoint: " + endpoint);
socket = new Socket();
socket.setTcpNoDelay(true);
socket.connect(endpoint,1000);
outputStream = socket.getOutputStream();
inputStream = socket.getInputStream();
return socket;
}
/**
* Issue a HTTP/1.1 GET request with Connection:keep-alive set.
*
* @param path
* the path to GET
* @param acceptGzipped
* to turn on acceptance of GZIP compressed responses
* @throws IOException
*/
public void issueGET(String path, boolean acceptGzipped, boolean close) throws IOException
{
LOG.debug("Issuing GET on " + path);
StringBuilder req = new StringBuilder();
req.append("GET ").append(uri.resolve(path).getPath()).append(" HTTP/1.1\r\n");
req.append("Host: ").append(uri.getHost()).append(":").append(uri.getPort()).append("\r\n");
req.append("User-Agent: Mozilla/5.0 (iPhone; CPU iPhone OS 5_0_1 like Mac OS X) AppleWebKit/534.46 (KHTML, like Gecko) Version/5.1 Mobile/9A405 Safari/7534.48.3\r\n");
req.append("Accept: */*\r\n");
req.append("Referer: http://mycompany.com/index.html\r\n");
req.append("Accept-Language: en-us\r\n");
if (acceptGzipped)
{
req.append("Accept-Encoding: " + encodingHeader + "\r\n");
}
req.append("Cookie: JSESSIONID=spqx8v8szylt1336t96vc6mw0\r\n");
if ( close )
{
req.append("Connection: close\r\n");
}
else
{
req.append("Connection: keep-alive\r\n");
}
req.append("\r\n");
LOG.debug("Request:" + req);
// Send HTTP GET Request
byte buf[] = req.toString().getBytes();
outputStream.write(buf,0,buf.length);
outputStream.flush();
}
public String readResponseHeader() throws IOException
{
// Read Response Header
socket.setSoTimeout(10000);
LOG.debug("Reading http header");
StringBuilder response = new StringBuilder();
boolean foundEnd = false;
String line;
while (!foundEnd)
{
line = readLine();
// System.out.printf("RESP: \"%s\"%n",line);
if (line.length() == 0)
{
foundEnd = true;
LOG.debug("Got full http response header");
}
else
{
response.append(line).append("\r\n");
}
}
return response.toString();
}
public String readLine() throws IOException
{
StringBuilder line = new StringBuilder();
boolean foundCR = false;
boolean foundLF = false;
int b;
while (!(foundCR && foundLF))
{
b = inputStream.read();
Assert.assertThat("Should not have hit EOL (yet) during chunk size read",b,not(-1));
if (b == 0x0D)
{
foundCR = true;
}
else if (b == 0x0A)
{
foundLF = true;
}
else
{
foundCR = false;
foundLF = false;
line.append((char)b);
}
}
return line.toString();
}
public long readChunkSize() throws IOException
{
StringBuilder chunkSize = new StringBuilder();
String validHex = "0123456789ABCDEF";
boolean foundCR = false;
boolean foundLF = false;
int b;
while (!(foundCR && foundLF))
{
b = inputStream.read();
Assert.assertThat("Should not have hit EOL (yet) during chunk size read",b,not(-1));
if (b == 0x0D)
{
foundCR = true;
}
else if (b == 0x0A)
{
foundLF = true;
}
else
{
foundCR = false;
foundLF = false;
// Must be valid char
char c = (char)b;
if (validHex.indexOf(c) >= 0)
{
chunkSize.append(c);
}
else
{
Assert.fail(String.format("Encountered invalid chunk size byte 0x%X",b));
}
}
}
return Long.parseLong(chunkSize.toString(),16);
}
public int readBody(OutputStream stream, int size) throws IOException
{
int left = size;
while (left > 0)
{
int val = inputStream.read();
try
{
if (left % 10 == 0)
Thread.sleep(1);
}
catch (InterruptedException e)
{
e.printStackTrace();
}
if (val == (-1))
{
Assert.fail(String.format("Encountered an early EOL (expected another %,d bytes)",left));
}
stream.write(val);
left--;
}
return size - left;
}
public byte[] readResponseBody(int size) throws IOException
{
byte partial[] = new byte[size];
int readBytes = 0;
int bytesLeft = size;
while (readBytes < size)
{
int len = inputStream.read(partial,readBytes,bytesLeft);
Assert.assertThat("Read should not have hit EOL yet",len,not(-1));
System.out.printf("Read %,d bytes%n",len);
if (len > 0)
{
readBytes += len;
bytesLeft -= len;
}
}
return partial;
}
public OutputStream getOutputStream()
{
return outputStream;
}
public InputStream getInputStream()
{
return inputStream;
}
public SocketAddress getEndpoint()
{
return endpoint;
}
public Socket getSocket()
{
return socket;
}
public void disconnect() throws IOException
{
LOG.debug("disconnect");
socket.close();
}
public int getContentLength(String respHeader)
{
Pattern pat = Pattern.compile("Content-Length: ([0-9]*)",Pattern.CASE_INSENSITIVE);
Matcher mat = pat.matcher(respHeader);
if (mat.find())
{
try
{
return Integer.parseInt(mat.group(1));
}
catch (NumberFormatException e)
{
return -1;
}
}
else
{
// Undefined content length
return -1;
}
}
}
| |
package org.apache.lucene.search.suggest;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import java.io.BufferedReader;
import java.io.InputStreamReader;
import java.net.URL;
import java.nio.charset.Charset;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import java.util.Locale;
import java.util.Random;
import java.util.concurrent.Callable;
import org.apache.lucene.util.*;
import org.apache.lucene.search.suggest.Lookup;
import org.apache.lucene.search.suggest.fst.FSTCompletionLookup;
import org.apache.lucene.search.suggest.fst.WFSTCompletionLookup;
import org.apache.lucene.search.suggest.jaspell.JaspellLookup;
import org.apache.lucene.search.suggest.tst.TSTLookup;
import org.junit.BeforeClass;
import org.junit.Ignore;
/**
* Benchmarks tests for implementations of {@link Lookup} interface.
*/
@Ignore("COMMENT ME TO RUN BENCHMARKS!")
public class LookupBenchmarkTest extends LuceneTestCase {
@SuppressWarnings("unchecked")
private final List<Class<? extends Lookup>> benchmarkClasses = Arrays.asList(
JaspellLookup.class,
TSTLookup.class,
FSTCompletionLookup.class,
WFSTCompletionLookup.class);
private final static int rounds = 15;
private final static int warmup = 5;
private final int num = 7;
private final boolean onlyMorePopular = true;
private final static Random random = new Random(0xdeadbeef);
/**
* Input term/weight pairs.
*/
private static TermFreq [] dictionaryInput;
/**
* Benchmark term/weight pairs (randomized order).
*/
private static List<TermFreq> benchmarkInput;
/**
* Loads terms and frequencies from Wikipedia (cached).
*/
@BeforeClass
public static void setup() throws Exception {
assert false : "disable assertions before running benchmarks!";
List<TermFreq> input = readTop50KWiki();
Collections.shuffle(input, random);
LookupBenchmarkTest.dictionaryInput = input.toArray(new TermFreq [input.size()]);
Collections.shuffle(input, random);
LookupBenchmarkTest.benchmarkInput = input;
}
static final Charset UTF_8 = Charset.forName("UTF-8");
/**
* Collect the multilingual input for benchmarks/ tests.
*/
public static List<TermFreq> readTop50KWiki() throws Exception {
List<TermFreq> input = new ArrayList<TermFreq>();
URL resource = LookupBenchmarkTest.class.getResource("Top50KWiki.utf8");
assert resource != null : "Resource missing: Top50KWiki.utf8";
String line = null;
BufferedReader br = new BufferedReader(new InputStreamReader(resource.openStream(), UTF_8));
while ((line = br.readLine()) != null) {
int tab = line.indexOf('|');
assertTrue("No | separator?: " + line, tab >= 0);
int weight = Integer.parseInt(line.substring(tab + 1));
String key = line.substring(0, tab);
input.add(new TermFreq(key, weight));
}
br.close();
return input;
}
/**
* Test construction time.
*/
public void testConstructionTime() throws Exception {
System.err.println("-- construction time");
for (final Class<? extends Lookup> cls : benchmarkClasses) {
BenchmarkResult result = measure(new Callable<Integer>() {
public Integer call() throws Exception {
final Lookup lookup = buildLookup(cls, dictionaryInput);
return lookup.hashCode();
}
});
System.err.println(
String.format(Locale.ROOT, "%-15s input: %d, time[ms]: %s",
cls.getSimpleName(),
dictionaryInput.length,
result.average.toString()));
}
}
/**
* Test memory required for the storage.
*/
public void testStorageNeeds() throws Exception {
System.err.println("-- RAM consumption");
for (Class<? extends Lookup> cls : benchmarkClasses) {
Lookup lookup = buildLookup(cls, dictionaryInput);
System.err.println(
String.format(Locale.ROOT, "%-15s size[B]:%,13d",
lookup.getClass().getSimpleName(),
RamUsageEstimator.sizeOf(lookup)));
}
}
/**
* Create {@link Lookup} instance and populate it.
*/
private Lookup buildLookup(Class<? extends Lookup> cls, TermFreq[] input) throws Exception {
Lookup lookup = cls.newInstance();
lookup.build(new TermFreqArrayIterator(input));
return lookup;
}
/**
* Test performance of lookup on full hits.
*/
public void testPerformanceOnFullHits() throws Exception {
final int minPrefixLen = 100;
final int maxPrefixLen = 200;
runPerformanceTest(minPrefixLen, maxPrefixLen, num, onlyMorePopular);
}
/**
* Test performance of lookup on longer term prefixes (6-9 letters or shorter).
*/
public void testPerformanceOnPrefixes6_9() throws Exception {
final int minPrefixLen = 6;
final int maxPrefixLen = 9;
runPerformanceTest(minPrefixLen, maxPrefixLen, num, onlyMorePopular);
}
/**
* Test performance of lookup on short term prefixes (2-4 letters or shorter).
*/
public void testPerformanceOnPrefixes2_4() throws Exception {
final int minPrefixLen = 2;
final int maxPrefixLen = 4;
runPerformanceTest(minPrefixLen, maxPrefixLen, num, onlyMorePopular);
}
/**
* Run the actual benchmark.
*/
public void runPerformanceTest(final int minPrefixLen, final int maxPrefixLen,
final int num, final boolean onlyMorePopular) throws Exception {
System.err.println(String.format(Locale.ROOT,
"-- prefixes: %d-%d, num: %d, onlyMorePopular: %s",
minPrefixLen, maxPrefixLen, num, onlyMorePopular));
for (Class<? extends Lookup> cls : benchmarkClasses) {
final Lookup lookup = buildLookup(cls, dictionaryInput);
final List<String> input = new ArrayList<String>(benchmarkInput.size());
for (TermFreq tf : benchmarkInput) {
String s = tf.term.utf8ToString();
input.add(s.substring(0, Math.min(s.length(),
minPrefixLen + random.nextInt(maxPrefixLen - minPrefixLen + 1))));
}
BenchmarkResult result = measure(new Callable<Integer>() {
public Integer call() throws Exception {
int v = 0;
for (String term : input) {
v += lookup.lookup(term, onlyMorePopular, num).size();
}
return v;
}
});
System.err.println(
String.format(Locale.ROOT, "%-15s queries: %d, time[ms]: %s, ~kQPS: %.0f",
lookup.getClass().getSimpleName(),
input.size(),
result.average.toString(),
input.size() / result.average.avg));
}
}
/**
* Do the measurements.
*/
private BenchmarkResult measure(Callable<Integer> callable) {
final double NANOS_PER_MS = 1000000;
try {
List<Double> times = new ArrayList<Double>();
for (int i = 0; i < warmup + rounds; i++) {
final long start = System.nanoTime();
guard = callable.call().intValue();
times.add((System.nanoTime() - start) / NANOS_PER_MS);
}
return new BenchmarkResult(times, warmup, rounds);
} catch (Exception e) {
throw new RuntimeException(e);
}
}
/** Guard against opts. */
@SuppressWarnings("unused")
private static volatile int guard;
private static class BenchmarkResult {
/** Average time per round (ms). */
public final Average average;
public BenchmarkResult(List<Double> times, int warmup, int rounds) {
this.average = Average.from(times.subList(warmup, times.size()));
}
}
}
| |
package com.beef.dataorigin.util;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.util.ArrayList;
import java.util.List;
import org.apache.poi.hssf.usermodel.HSSFWorkbook;
import org.apache.poi.ss.usermodel.Cell;
import org.apache.poi.ss.usermodel.DateUtil;
import org.apache.poi.ss.usermodel.FormulaEvaluator;
import org.apache.poi.ss.usermodel.Row;
import org.apache.poi.ss.usermodel.Sheet;
import org.apache.poi.ss.usermodel.Workbook;
import org.apache.poi.xssf.usermodel.XSSFWorkbook;
public class ExcelUtil {
public static List<String> getAllSheetNameList(InputStream inputExcel,
boolean isXLSX) throws IOException {
Workbook workbook = createWorkbook(inputExcel, isXLSX);
List<String> sheetNameList = new ArrayList<String>();
int sheetCount = workbook.getNumberOfSheets();
for(int i = 0; i < sheetCount; i++) {
sheetNameList.add(workbook.getSheetName(i));
}
return sheetNameList;
}
/**
*
* @param inputExcel
* @param isXLSX
* @param sheetIndex
* @param beginCol
* @param endCol
* @param beginRow
* @return Object could be String, Boolean, Double, java.util.Date
* @throws IOException
*/
public static List<List<Object>> readRows(InputStream inputExcel,
boolean isXLSX, int sheetIndex, int beginCol, int endCol,
int beginRow) throws IOException {
Workbook workbook = createWorkbook(inputExcel, isXLSX);
Sheet sheet = workbook.getSheetAt(sheetIndex);
int endRow = sheet.getLastRowNum();
return readRows(sheet, beginCol, endCol, beginRow, endRow);
}
/**
*
* @param inputExcel
* @param isXLSX
* @param sheetIndex
* @param beginCol
* @param maxCol
* @param beginRow
* @return Object could be String, Boolean, Double, java.util.Date
* @throws IOException
*/
public static List<List<Object>> readRowsAutoDetectEndCol(
InputStream inputExcel, boolean isXLSX, int sheetIndex,
int beginCol, int maxCol, int beginRow) throws IOException {
Workbook workbook = createWorkbook(inputExcel, isXLSX);
Sheet sheet = workbook.getSheetAt(sheetIndex);
return readRowsAutoDetectEndCol(sheet, beginCol, maxCol, beginRow);
}
public static List<List<Object>> readRowsAutoDetectEndCol(
Sheet sheet,
int beginCol, int maxCol, int beginRow) throws IOException {
int endRow = sheet.getLastRowNum();
int endCol = maxCol;
Row row = null;
Cell cell = null;
Object cellVal = null;
FormulaEvaluator evaluator = sheet.getWorkbook().getCreationHelper().createFormulaEvaluator();
row = sheet.getRow(beginRow);
for (int j = 0; j < maxCol; j++) {
cell = row.getCell(j);
cellVal = getCellValue(evaluator, cell);
if(cellVal == null || (cellVal.getClass() == String.class && ((String)cellVal).length() == 0)) {
endCol = j - 1;
break;
}
}
return readRows(sheet, beginCol, endCol, beginRow, endRow);
}
public static List<Object> readRowAutoDetectEndCol(Sheet sheet,
int beginCol, int maxCol, int rowIndex) {
//int endCol = maxCol;
FormulaEvaluator evaluator = sheet.getWorkbook().getCreationHelper().createFormulaEvaluator();
Row row = sheet.getRow(rowIndex);
Cell cell = null;
Object cellVal = null;
List<Object> cellValList = new ArrayList<Object>();
for (int j = 0; j < maxCol; j++) {
cell = row.getCell(j);
cellVal = getCellValue(evaluator, cell);
if(cellVal == null || (cellVal.getClass() == String.class && ((String)cellVal).length() == 0)) {
//endCol = j - 1;
break;
}
cellValList.add(cellVal);
}
return cellValList;
}
public static Workbook createWorkbook(File excelFile, boolean isXLSX) throws IOException {
InputStream inputExcel = null;
try {
inputExcel = new FileInputStream(excelFile);
return createWorkbook(inputExcel, isXLSX);
} finally {
inputExcel.close();
}
}
public static Workbook createWorkbook(InputStream inputExcel, boolean isXLSX) throws IOException {
if (isXLSX) {
return new XSSFWorkbook(inputExcel);
} else {
return new HSSFWorkbook(inputExcel);
}
}
/**
* Include endCol and endRow
*
* @param sheet
* @param beginCol
* @param endCol
* @param beginRow
* @param endRow
* @return Object could be String, Boolean, Double, java.util.Date
*/
public static List<List<Object>> readRows(
Sheet sheet,
int beginCol, int endCol, int beginRow, int endRow) {
List<List<Object>> allRows = new ArrayList<List<Object>>();
Row row = null;
FormulaEvaluator evaluator = sheet.getWorkbook().getCreationHelper().createFormulaEvaluator();
int i, j;
List<Object> cellValList = null;
for(i = beginRow; i <= endRow; i++) {
row = sheet.getRow(i);
if(row == null) {
continue;
}
//1 row
cellValList = readRow(evaluator, row, beginCol, endCol);
allRows.add(cellValList);
}
return allRows;
}
public static List<Object> readRow(
FormulaEvaluator evaluator,
Row row,
int beginCol, int endCol) {
List<Object> cellValList = new ArrayList<Object>();
Cell cell = null;
Object cellVal = null;
for(int j = beginCol; j <= endCol; j++) {
cell = row.getCell(j);
if(cell == null) {
cellValList.add(null);
} else {
cellVal = getCellValue(evaluator, cell);
cellValList.add(cellVal);
}
}
return cellValList;
}
/**
*
* @param cell
* @return Object could be String, Boolean, Double, java.util.Date
*/
public static Object getCellValue(
FormulaEvaluator evaluator,
Cell cell) {
if(cell == null) {
return null;
}
int cellType = evaluator.evaluateFormulaCell(cell);
if (cellType == Cell.CELL_TYPE_STRING) {
return cell.getStringCellValue();
} else if (cellType == Cell.CELL_TYPE_NUMERIC) {
if (DateUtil.isCellDateFormatted(cell)) {
return cell.getDateCellValue();
} else {
return cell.getNumericCellValue();
}
} else if (cellType == Cell.CELL_TYPE_BOOLEAN) {
return cell.getBooleanCellValue();
} else if (cellType == Cell.CELL_TYPE_BLANK) {
return null;
} else if (cellType == Cell.CELL_TYPE_ERROR) {
return null;
} else {
return cell.toString();
}
}
}
| |
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.ml.filestructurefinder;
import org.elasticsearch.common.collect.Tuple;
import org.elasticsearch.xpack.core.ml.filestructurefinder.FileStructure;
import org.supercsv.prefs.CsvPreference;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.BitSet;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.TreeMap;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import static org.elasticsearch.xpack.ml.filestructurefinder.DelimitedFileStructureFinder.levenshteinFieldwiseCompareRows;
import static org.elasticsearch.xpack.ml.filestructurefinder.DelimitedFileStructureFinder.levenshteinDistance;
import static org.hamcrest.Matchers.arrayContaining;
import static org.hamcrest.Matchers.contains;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.hasKey;
import static org.hamcrest.Matchers.not;
public class DelimitedFileStructureFinderTests extends FileStructureTestCase {
private FileStructureFinderFactory csvFactory = new DelimitedFileStructureFinderFactory(',', '"', 2, false);
private FileStructureFinderFactory tsvFactory = new DelimitedFileStructureFinderFactory('\t', '"', 3, false);
public void testCreateConfigsGivenCompleteCsv() throws Exception {
String sample = "time,message\n" +
"2018-05-17T13:41:23,hello\n" +
"2018-05-17T13:41:32,hello again\n";
assertTrue(csvFactory.canCreateFromSample(explanation, sample));
String charset = randomFrom(POSSIBLE_CHARSETS);
Boolean hasByteOrderMarker = randomHasByteOrderMarker(charset);
FileStructureFinder structureFinder = csvFactory.createFromSample(explanation, sample, charset, hasByteOrderMarker,
FileStructureFinderManager.DEFAULT_LINE_MERGE_SIZE_LIMIT, FileStructureOverrides.EMPTY_OVERRIDES, NOOP_TIMEOUT_CHECKER);
FileStructure structure = structureFinder.getStructure();
assertEquals(FileStructure.Format.DELIMITED, structure.getFormat());
assertEquals(charset, structure.getCharset());
if (hasByteOrderMarker == null) {
assertNull(structure.getHasByteOrderMarker());
} else {
assertEquals(hasByteOrderMarker, structure.getHasByteOrderMarker());
}
assertEquals("^\"?time\"?,\"?message\"?", structure.getExcludeLinesPattern());
assertNull(structure.getMultilineStartPattern());
assertEquals(Character.valueOf(','), structure.getDelimiter());
assertEquals(Character.valueOf('"'), structure.getQuote());
assertTrue(structure.getHasHeaderRow());
assertNull(structure.getShouldTrimFields());
assertEquals(Arrays.asList("time", "message"), structure.getColumnNames());
assertNull(structure.getGrokPattern());
assertEquals("time", structure.getTimestampField());
assertEquals(Collections.singletonList("ISO8601"), structure.getJodaTimestampFormats());
}
public void testCreateConfigsGivenCompleteCsvAndColumnNamesOverride() throws Exception {
FileStructureOverrides overrides = FileStructureOverrides.builder().setColumnNames(Arrays.asList("my_time", "my_message")).build();
String sample = "time,message\n" +
"2018-05-17T13:41:23,hello\n" +
"2018-05-17T13:41:32,hello again\n";
assertTrue(csvFactory.canCreateFromSample(explanation, sample));
String charset = randomFrom(POSSIBLE_CHARSETS);
Boolean hasByteOrderMarker = randomHasByteOrderMarker(charset);
FileStructureFinder structureFinder = csvFactory.createFromSample(explanation, sample, charset, hasByteOrderMarker,
FileStructureFinderManager.DEFAULT_LINE_MERGE_SIZE_LIMIT, overrides, NOOP_TIMEOUT_CHECKER);
FileStructure structure = structureFinder.getStructure();
assertEquals(FileStructure.Format.DELIMITED, structure.getFormat());
assertEquals(charset, structure.getCharset());
if (hasByteOrderMarker == null) {
assertNull(structure.getHasByteOrderMarker());
} else {
assertEquals(hasByteOrderMarker, structure.getHasByteOrderMarker());
}
assertEquals("^\"?time\"?,\"?message\"?", structure.getExcludeLinesPattern());
assertNull(structure.getMultilineStartPattern());
assertEquals(Character.valueOf(','), structure.getDelimiter());
assertEquals(Character.valueOf('"'), structure.getQuote());
assertTrue(structure.getHasHeaderRow());
assertNull(structure.getShouldTrimFields());
assertEquals(Arrays.asList("my_time", "my_message"), structure.getColumnNames());
assertNull(structure.getGrokPattern());
assertEquals("my_time", structure.getTimestampField());
assertEquals(Collections.singletonList("ISO8601"), structure.getJodaTimestampFormats());
}
public void testCreateConfigsGivenCompleteCsvAndHasHeaderRowOverride() throws Exception {
// It's obvious the first row really should be a header row, so by overriding
// detection with the wrong choice the results will be completely changed
FileStructureOverrides overrides = FileStructureOverrides.builder().setHasHeaderRow(false).build();
String sample = "time,message\n" +
"2018-05-17T13:41:23,hello\n" +
"2018-05-17T13:41:32,hello again\n";
assertTrue(csvFactory.canCreateFromSample(explanation, sample));
String charset = randomFrom(POSSIBLE_CHARSETS);
Boolean hasByteOrderMarker = randomHasByteOrderMarker(charset);
FileStructureFinder structureFinder = csvFactory.createFromSample(explanation, sample, charset, hasByteOrderMarker,
FileStructureFinderManager.DEFAULT_LINE_MERGE_SIZE_LIMIT, overrides, NOOP_TIMEOUT_CHECKER);
FileStructure structure = structureFinder.getStructure();
assertEquals(FileStructure.Format.DELIMITED, structure.getFormat());
assertEquals(charset, structure.getCharset());
if (hasByteOrderMarker == null) {
assertNull(structure.getHasByteOrderMarker());
} else {
assertEquals(hasByteOrderMarker, structure.getHasByteOrderMarker());
}
assertNull(structure.getExcludeLinesPattern());
assertNull(structure.getMultilineStartPattern());
assertEquals(Character.valueOf(','), structure.getDelimiter());
assertEquals(Character.valueOf('"'), structure.getQuote());
assertFalse(structure.getHasHeaderRow());
assertNull(structure.getShouldTrimFields());
assertEquals(Arrays.asList("column1", "column2"), structure.getColumnNames());
assertNull(structure.getGrokPattern());
assertNull(structure.getTimestampField());
assertNull(structure.getJodaTimestampFormats());
}
public void testCreateConfigsGivenCsvWithIncompleteLastRecord() throws Exception {
String sample = "time,message,count\n" +
"2018-05-17T13:41:23,\"hello\n" +
"world\",1\n" +
"2019-01-18T14:46:57,\"hello again\n"; // note that this last record is truncated
assertTrue(csvFactory.canCreateFromSample(explanation, sample));
String charset = randomFrom(POSSIBLE_CHARSETS);
Boolean hasByteOrderMarker = randomHasByteOrderMarker(charset);
FileStructureFinder structureFinder = csvFactory.createFromSample(explanation, sample, charset, hasByteOrderMarker,
FileStructureFinderManager.DEFAULT_LINE_MERGE_SIZE_LIMIT, FileStructureOverrides.EMPTY_OVERRIDES, NOOP_TIMEOUT_CHECKER);
FileStructure structure = structureFinder.getStructure();
assertEquals(FileStructure.Format.DELIMITED, structure.getFormat());
assertEquals(charset, structure.getCharset());
if (hasByteOrderMarker == null) {
assertNull(structure.getHasByteOrderMarker());
} else {
assertEquals(hasByteOrderMarker, structure.getHasByteOrderMarker());
}
assertEquals("^\"?time\"?,\"?message\"?,\"?count\"?", structure.getExcludeLinesPattern());
assertEquals("^\"?\\d{4}-\\d{2}-\\d{2}[T ]\\d{2}:\\d{2}", structure.getMultilineStartPattern());
assertEquals(Character.valueOf(','), structure.getDelimiter());
assertEquals(Character.valueOf('"'), structure.getQuote());
assertTrue(structure.getHasHeaderRow());
assertNull(structure.getShouldTrimFields());
assertEquals(Arrays.asList("time", "message", "count"), structure.getColumnNames());
assertNull(structure.getGrokPattern());
assertEquals("time", structure.getTimestampField());
assertEquals(Collections.singletonList("ISO8601"), structure.getJodaTimestampFormats());
}
public void testCreateConfigsGivenCsvWithTrailingNulls() throws Exception {
String sample = "VendorID,tpep_pickup_datetime,tpep_dropoff_datetime,passenger_count,trip_distance,RatecodeID," +
"store_and_fwd_flag,PULocationID,DOLocationID,payment_type,fare_amount,extra,mta_tax,tip_amount,tolls_amount," +
"improvement_surcharge,total_amount,,\n" +
"2,2016-12-31 15:15:01,2016-12-31 15:15:09,1,.00,1,N,264,264,2,1,0,0.5,0,0,0.3,1.8,,\n" +
"1,2016-12-01 00:00:01,2016-12-01 00:10:22,1,1.60,1,N,163,143,2,9,0.5,0.5,0,0,0.3,10.3,,\n" +
"1,2016-12-01 00:00:01,2016-12-01 00:11:01,1,1.40,1,N,164,229,1,9,0.5,0.5,2.05,0,0.3,12.35,,\n";
assertTrue(csvFactory.canCreateFromSample(explanation, sample));
String charset = randomFrom(POSSIBLE_CHARSETS);
Boolean hasByteOrderMarker = randomHasByteOrderMarker(charset);
FileStructureFinder structureFinder = csvFactory.createFromSample(explanation, sample, charset, hasByteOrderMarker,
FileStructureFinderManager.DEFAULT_LINE_MERGE_SIZE_LIMIT, FileStructureOverrides.EMPTY_OVERRIDES, NOOP_TIMEOUT_CHECKER);
FileStructure structure = structureFinder.getStructure();
assertEquals(FileStructure.Format.DELIMITED, structure.getFormat());
assertEquals(charset, structure.getCharset());
if (hasByteOrderMarker == null) {
assertNull(structure.getHasByteOrderMarker());
} else {
assertEquals(hasByteOrderMarker, structure.getHasByteOrderMarker());
}
assertEquals("^\"?VendorID\"?,\"?tpep_pickup_datetime\"?,\"?tpep_dropoff_datetime\"?,\"?passenger_count\"?,\"?trip_distance\"?," +
"\"?RatecodeID\"?,\"?store_and_fwd_flag\"?,\"?PULocationID\"?,\"?DOLocationID\"?,\"?payment_type\"?,\"?fare_amount\"?," +
"\"?extra\"?,\"?mta_tax\"?,\"?tip_amount\"?,\"?tolls_amount\"?,\"?improvement_surcharge\"?,\"?total_amount\"?,\"?\"?,\"?\"?",
structure.getExcludeLinesPattern());
assertNull(structure.getMultilineStartPattern());
assertEquals(Character.valueOf(','), structure.getDelimiter());
assertEquals(Character.valueOf('"'), structure.getQuote());
assertTrue(structure.getHasHeaderRow());
assertNull(structure.getShouldTrimFields());
assertEquals(Arrays.asList("VendorID", "tpep_pickup_datetime", "tpep_dropoff_datetime", "passenger_count", "trip_distance",
"RatecodeID", "store_and_fwd_flag", "PULocationID", "DOLocationID", "payment_type", "fare_amount", "extra", "mta_tax",
"tip_amount", "tolls_amount", "improvement_surcharge", "total_amount", "column18", "column19"), structure.getColumnNames());
assertNull(structure.getGrokPattern());
assertEquals("tpep_pickup_datetime", structure.getTimestampField());
assertEquals(Collections.singletonList("YYYY-MM-dd HH:mm:ss"), structure.getJodaTimestampFormats());
}
public void testCreateConfigsGivenCsvWithTrailingNullsAndOverriddenTimeField() throws Exception {
// Default timestamp field is the first field from the start of each row that contains a
// consistent timestamp format, so if we want the second we need an override
FileStructureOverrides overrides = FileStructureOverrides.builder().setTimestampField("tpep_dropoff_datetime").build();
String sample = "VendorID,tpep_pickup_datetime,tpep_dropoff_datetime,passenger_count,trip_distance,RatecodeID," +
"store_and_fwd_flag,PULocationID,DOLocationID,payment_type,fare_amount,extra,mta_tax,tip_amount,tolls_amount," +
"improvement_surcharge,total_amount,,\n" +
"2,2016-12-31 15:15:01,2016-12-31 15:15:09,1,.00,1,N,264,264,2,1,0,0.5,0,0,0.3,1.8,,\n" +
"1,2016-12-01 00:00:01,2016-12-01 00:10:22,1,1.60,1,N,163,143,2,9,0.5,0.5,0,0,0.3,10.3,,\n" +
"1,2016-12-01 00:00:01,2016-12-01 00:11:01,1,1.40,1,N,164,229,1,9,0.5,0.5,2.05,0,0.3,12.35,,\n";
assertTrue(csvFactory.canCreateFromSample(explanation, sample));
String charset = randomFrom(POSSIBLE_CHARSETS);
Boolean hasByteOrderMarker = randomHasByteOrderMarker(charset);
FileStructureFinder structureFinder = csvFactory.createFromSample(explanation, sample, charset, hasByteOrderMarker,
FileStructureFinderManager.DEFAULT_LINE_MERGE_SIZE_LIMIT, overrides, NOOP_TIMEOUT_CHECKER);
FileStructure structure = structureFinder.getStructure();
assertEquals(FileStructure.Format.DELIMITED, structure.getFormat());
assertEquals(charset, structure.getCharset());
if (hasByteOrderMarker == null) {
assertNull(structure.getHasByteOrderMarker());
} else {
assertEquals(hasByteOrderMarker, structure.getHasByteOrderMarker());
}
assertEquals("^\"?VendorID\"?,\"?tpep_pickup_datetime\"?,\"?tpep_dropoff_datetime\"?,\"?passenger_count\"?,\"?trip_distance\"?," +
"\"?RatecodeID\"?,\"?store_and_fwd_flag\"?,\"?PULocationID\"?,\"?DOLocationID\"?,\"?payment_type\"?,\"?fare_amount\"?," +
"\"?extra\"?,\"?mta_tax\"?,\"?tip_amount\"?,\"?tolls_amount\"?,\"?improvement_surcharge\"?,\"?total_amount\"?,\"?\"?,\"?\"?",
structure.getExcludeLinesPattern());
assertNull(structure.getMultilineStartPattern());
assertEquals(Character.valueOf(','), structure.getDelimiter());
assertEquals(Character.valueOf('"'), structure.getQuote());
assertTrue(structure.getHasHeaderRow());
assertNull(structure.getShouldTrimFields());
assertEquals(Arrays.asList("VendorID", "tpep_pickup_datetime", "tpep_dropoff_datetime", "passenger_count", "trip_distance",
"RatecodeID", "store_and_fwd_flag", "PULocationID", "DOLocationID", "payment_type", "fare_amount", "extra", "mta_tax",
"tip_amount", "tolls_amount", "improvement_surcharge", "total_amount", "column18", "column19"), structure.getColumnNames());
assertNull(structure.getGrokPattern());
assertEquals("tpep_dropoff_datetime", structure.getTimestampField());
assertEquals(Collections.singletonList("YYYY-MM-dd HH:mm:ss"), structure.getJodaTimestampFormats());
}
public void testCreateConfigsGivenCsvWithTrailingNullsExceptHeader() throws Exception {
String sample = "VendorID,tpep_pickup_datetime,tpep_dropoff_datetime,passenger_count,trip_distance,RatecodeID," +
"store_and_fwd_flag,PULocationID,DOLocationID,payment_type,fare_amount,extra,mta_tax,tip_amount,tolls_amount," +
"improvement_surcharge,total_amount\n" +
"2,2016-12-31 15:15:01,2016-12-31 15:15:09,1,.00,1,N,264,264,2,1,0,0.5,0,0,0.3,1.8,,\n" +
"1,2016-12-01 00:00:01,2016-12-01 00:10:22,1,1.60,1,N,163,143,2,9,0.5,0.5,0,0,0.3,10.3,,\n" +
"1,2016-12-01 00:00:01,2016-12-01 00:11:01,1,1.40,1,N,164,229,1,9,0.5,0.5,2.05,0,0.3,12.35,,\n";
assertTrue(csvFactory.canCreateFromSample(explanation, sample));
String charset = randomFrom(POSSIBLE_CHARSETS);
Boolean hasByteOrderMarker = randomHasByteOrderMarker(charset);
FileStructureFinder structureFinder = csvFactory.createFromSample(explanation, sample, charset, hasByteOrderMarker,
FileStructureFinderManager.DEFAULT_LINE_MERGE_SIZE_LIMIT, FileStructureOverrides.EMPTY_OVERRIDES, NOOP_TIMEOUT_CHECKER);
FileStructure structure = structureFinder.getStructure();
assertEquals(FileStructure.Format.DELIMITED, structure.getFormat());
assertEquals(charset, structure.getCharset());
if (hasByteOrderMarker == null) {
assertNull(structure.getHasByteOrderMarker());
} else {
assertEquals(hasByteOrderMarker, structure.getHasByteOrderMarker());
}
assertEquals("^\"?VendorID\"?,\"?tpep_pickup_datetime\"?,\"?tpep_dropoff_datetime\"?,\"?passenger_count\"?,\"?trip_distance\"?," +
"\"?RatecodeID\"?,\"?store_and_fwd_flag\"?,\"?PULocationID\"?,\"?DOLocationID\"?,\"?payment_type\"?,\"?fare_amount\"?," +
"\"?extra\"?,\"?mta_tax\"?,\"?tip_amount\"?,\"?tolls_amount\"?,\"?improvement_surcharge\"?,\"?total_amount\"?",
structure.getExcludeLinesPattern());
assertNull(structure.getMultilineStartPattern());
assertEquals(Character.valueOf(','), structure.getDelimiter());
assertEquals(Character.valueOf('"'), structure.getQuote());
assertTrue(structure.getHasHeaderRow());
assertNull(structure.getShouldTrimFields());
assertEquals(Arrays.asList("VendorID", "tpep_pickup_datetime", "tpep_dropoff_datetime", "passenger_count", "trip_distance",
"RatecodeID", "store_and_fwd_flag", "PULocationID", "DOLocationID", "payment_type", "fare_amount", "extra", "mta_tax",
"tip_amount", "tolls_amount", "improvement_surcharge", "total_amount"), structure.getColumnNames());
assertNull(structure.getGrokPattern());
assertEquals("tpep_pickup_datetime", structure.getTimestampField());
assertEquals(Collections.singletonList("YYYY-MM-dd HH:mm:ss"), structure.getJodaTimestampFormats());
}
public void testCreateConfigsGivenCsvWithTrailingNullsExceptHeaderAndColumnNamesOverride() throws Exception {
FileStructureOverrides overrides = FileStructureOverrides.builder()
.setColumnNames(Arrays.asList("my_VendorID", "my_tpep_pickup_datetime", "my_tpep_dropoff_datetime", "my_passenger_count",
"my_trip_distance", "my_RatecodeID", "my_store_and_fwd_flag", "my_PULocationID", "my_DOLocationID", "my_payment_type",
"my_fare_amount", "my_extra", "my_mta_tax", "my_tip_amount", "my_tolls_amount", "my_improvement_surcharge",
"my_total_amount")).build();
String sample = "VendorID,tpep_pickup_datetime,tpep_dropoff_datetime,passenger_count,trip_distance,RatecodeID," +
"store_and_fwd_flag,PULocationID,DOLocationID,payment_type,fare_amount,extra,mta_tax,tip_amount,tolls_amount," +
"improvement_surcharge,total_amount\n" +
"2,2016-12-31 15:15:01,2016-12-31 15:15:09,1,.00,1,N,264,264,2,1,0,0.5,0,0,0.3,1.8,,\n" +
"1,2016-12-01 00:00:01,2016-12-01 00:10:22,1,1.60,1,N,163,143,2,9,0.5,0.5,0,0,0.3,10.3,,\n" +
"1,2016-12-01 00:00:01,2016-12-01 00:11:01,1,1.40,1,N,164,229,1,9,0.5,0.5,2.05,0,0.3,12.35,,\n";
assertTrue(csvFactory.canCreateFromSample(explanation, sample));
String charset = randomFrom(POSSIBLE_CHARSETS);
Boolean hasByteOrderMarker = randomHasByteOrderMarker(charset);
FileStructureFinder structureFinder = csvFactory.createFromSample(explanation, sample, charset, hasByteOrderMarker,
FileStructureFinderManager.DEFAULT_LINE_MERGE_SIZE_LIMIT, overrides, NOOP_TIMEOUT_CHECKER);
FileStructure structure = structureFinder.getStructure();
assertEquals(FileStructure.Format.DELIMITED, structure.getFormat());
assertEquals(charset, structure.getCharset());
if (hasByteOrderMarker == null) {
assertNull(structure.getHasByteOrderMarker());
} else {
assertEquals(hasByteOrderMarker, structure.getHasByteOrderMarker());
}
assertEquals("^\"?VendorID\"?,\"?tpep_pickup_datetime\"?,\"?tpep_dropoff_datetime\"?,\"?passenger_count\"?,\"?trip_distance\"?," +
"\"?RatecodeID\"?,\"?store_and_fwd_flag\"?,\"?PULocationID\"?,\"?DOLocationID\"?,\"?payment_type\"?,\"?fare_amount\"?," +
"\"?extra\"?,\"?mta_tax\"?,\"?tip_amount\"?,\"?tolls_amount\"?,\"?improvement_surcharge\"?,\"?total_amount\"?",
structure.getExcludeLinesPattern());
assertNull(structure.getMultilineStartPattern());
assertEquals(Character.valueOf(','), structure.getDelimiter());
assertEquals(Character.valueOf('"'), structure.getQuote());
assertTrue(structure.getHasHeaderRow());
assertNull(structure.getShouldTrimFields());
assertEquals(Arrays.asList("my_VendorID", "my_tpep_pickup_datetime", "my_tpep_dropoff_datetime", "my_passenger_count",
"my_trip_distance", "my_RatecodeID", "my_store_and_fwd_flag", "my_PULocationID", "my_DOLocationID", "my_payment_type",
"my_fare_amount", "my_extra", "my_mta_tax", "my_tip_amount", "my_tolls_amount", "my_improvement_surcharge", "my_total_amount"),
structure.getColumnNames());
assertNull(structure.getGrokPattern());
assertEquals("my_tpep_pickup_datetime", structure.getTimestampField());
assertEquals(Collections.singletonList("YYYY-MM-dd HH:mm:ss"), structure.getJodaTimestampFormats());
}
public void testCreateConfigsGivenCsvWithTimeLastColumn() throws Exception {
String sample = "\"pos_id\",\"trip_id\",\"latitude\",\"longitude\",\"altitude\",\"timestamp\"\n" +
"\"1\",\"3\",\"4703.7815\",\"1527.4713\",\"359.9\",\"2017-01-19 16:19:04.742113\"\n" +
"\"2\",\"3\",\"4703.7815\",\"1527.4714\",\"359.9\",\"2017-01-19 16:19:05.741890\"\n";
assertTrue(csvFactory.canCreateFromSample(explanation, sample));
String charset = randomFrom(POSSIBLE_CHARSETS);
Boolean hasByteOrderMarker = randomHasByteOrderMarker(charset);
FileStructureFinder structureFinder = csvFactory.createFromSample(explanation, sample, charset, hasByteOrderMarker,
FileStructureFinderManager.DEFAULT_LINE_MERGE_SIZE_LIMIT, FileStructureOverrides.EMPTY_OVERRIDES, NOOP_TIMEOUT_CHECKER);
FileStructure structure = structureFinder.getStructure();
assertEquals(FileStructure.Format.DELIMITED, structure.getFormat());
assertEquals(charset, structure.getCharset());
if (hasByteOrderMarker == null) {
assertNull(structure.getHasByteOrderMarker());
} else {
assertEquals(hasByteOrderMarker, structure.getHasByteOrderMarker());
}
assertEquals("^\"?pos_id\"?,\"?trip_id\"?,\"?latitude\"?,\"?longitude\"?,\"?altitude\"?,\"?timestamp\"?",
structure.getExcludeLinesPattern());
assertNull(structure.getMultilineStartPattern());
assertEquals(Character.valueOf(','), structure.getDelimiter());
assertEquals(Character.valueOf('"'), structure.getQuote());
assertTrue(structure.getHasHeaderRow());
assertNull(structure.getShouldTrimFields());
assertEquals(Arrays.asList("pos_id", "trip_id", "latitude", "longitude", "altitude", "timestamp"), structure.getColumnNames());
assertNull(structure.getGrokPattern());
assertEquals("timestamp", structure.getTimestampField());
assertEquals(Collections.singletonList("YYYY-MM-dd HH:mm:ss.SSSSSS"), structure.getJodaTimestampFormats());
}
public void testCreateConfigsGivenTsvWithSyslogLikeTimestamp() throws Exception {
String sample = "Latitude\tLongitude\tloc\tTimestamp\n" +
"25.78042\t18.441196\t\"25.7804200000,18.4411960000\"\tJun 30 2019 13:21:24\n" +
"25.743484\t18.443047\t\"25.7434840000,18.4430470000\"\tJun 30 2019 06:02:35\n" +
"25.744583\t18.442783\t\"25.7445830000,18.4427830000\"\tJun 30 2019 06:02:35\n" +
"25.754593\t18.431637\t\"25.7545930000,18.4316370000\"\tJul 1 2019 06:02:43\n" +
"25.768574\t18.433483\t\"25.7685740000,18.4334830000\"\tJul 1 2019 06:21:28\n" +
"25.757736\t18.438683\t\"25.7577360000,18.4386830000\"\tJul 1 2019 12:06:08\n" +
"25.76615\t18.436565\t\"25.7661500000,18.4365650000\"\tJul 1 2019 12:06:08\n" +
"25.76896\t18.43586\t\"25.7689600000,18.4358600000\"\tJul 1 2019 12:13:50\n" +
"25.76423\t18.43705\t\"25.7642300000,18.4370500000\"\tJul 1 2019 12:39:10\n";
assertTrue(tsvFactory.canCreateFromSample(explanation, sample));
String charset = randomFrom(POSSIBLE_CHARSETS);
Boolean hasByteOrderMarker = randomHasByteOrderMarker(charset);
FileStructureFinder structureFinder = tsvFactory.createFromSample(explanation, sample, charset, hasByteOrderMarker,
FileStructureFinderManager.DEFAULT_LINE_MERGE_SIZE_LIMIT, FileStructureOverrides.EMPTY_OVERRIDES, NOOP_TIMEOUT_CHECKER);
FileStructure structure = structureFinder.getStructure();
assertEquals(FileStructure.Format.DELIMITED, structure.getFormat());
assertEquals(charset, structure.getCharset());
if (hasByteOrderMarker == null) {
assertNull(structure.getHasByteOrderMarker());
} else {
assertEquals(hasByteOrderMarker, structure.getHasByteOrderMarker());
}
assertEquals("^\"?Latitude\"?\\t\"?Longitude\"?\\t\"?loc\"?\\t\"?Timestamp\"?",
structure.getExcludeLinesPattern());
assertNull(structure.getMultilineStartPattern());
assertEquals(Character.valueOf('\t'), structure.getDelimiter());
assertEquals(Character.valueOf('"'), structure.getQuote());
assertTrue(structure.getHasHeaderRow());
assertNull(structure.getShouldTrimFields());
assertEquals(Arrays.asList("Latitude", "Longitude", "loc", "Timestamp"), structure.getColumnNames());
assertNull(structure.getGrokPattern());
assertEquals("Timestamp", structure.getTimestampField());
assertEquals(Arrays.asList("MMM dd YYYY HH:mm:ss", "MMM d YYYY HH:mm:ss", "MMM d YYYY HH:mm:ss"),
structure.getJodaTimestampFormats());
}
public void testCreateConfigsGivenDotInFieldName() throws Exception {
String sample = "time.iso8601,message\n" +
"2018-05-17T13:41:23,hello\n" +
"2018-05-17T13:41:32,hello again\n";
assertTrue(csvFactory.canCreateFromSample(explanation, sample));
String charset = randomFrom(POSSIBLE_CHARSETS);
Boolean hasByteOrderMarker = randomHasByteOrderMarker(charset);
FileStructureFinder structureFinder = csvFactory.createFromSample(explanation, sample, charset, hasByteOrderMarker,
FileStructureFinderManager.DEFAULT_LINE_MERGE_SIZE_LIMIT, FileStructureOverrides.EMPTY_OVERRIDES, NOOP_TIMEOUT_CHECKER);
FileStructure structure = structureFinder.getStructure();
assertEquals(FileStructure.Format.DELIMITED, structure.getFormat());
assertEquals(charset, structure.getCharset());
if (hasByteOrderMarker == null) {
assertNull(structure.getHasByteOrderMarker());
} else {
assertEquals(hasByteOrderMarker, structure.getHasByteOrderMarker());
}
// The exclude pattern needs to work on the raw text, so reflects the unmodified field names
assertEquals("^\"?time\\.iso8601\"?,\"?message\"?", structure.getExcludeLinesPattern());
assertNull(structure.getMultilineStartPattern());
assertEquals(Character.valueOf(','), structure.getDelimiter());
assertEquals(Character.valueOf('"'), structure.getQuote());
assertTrue(structure.getHasHeaderRow());
assertNull(structure.getShouldTrimFields());
assertEquals(Arrays.asList("time_iso8601", "message"), structure.getColumnNames());
assertNull(structure.getGrokPattern());
assertEquals("time_iso8601", structure.getTimestampField());
assertEquals(Collections.singletonList("ISO8601"), structure.getJodaTimestampFormats());
}
public void testFindHeaderFromSampleGivenHeaderInSample() throws IOException {
String withHeader = "time,airline,responsetime,sourcetype\n" +
"2014-06-23 00:00:00Z,AAL,132.2046,farequote\n" +
"2014-06-23 00:00:00Z,JZA,990.4628,farequote\n" +
"2014-06-23 00:00:01Z,JBU,877.5927,farequote\n" +
"2014-06-23 00:00:01Z,KLM,1355.4812,farequote\n";
Tuple<Boolean, String[]> header = DelimitedFileStructureFinder.findHeaderFromSample(explanation,
DelimitedFileStructureFinder.readRows(withHeader, CsvPreference.EXCEL_PREFERENCE, NOOP_TIMEOUT_CHECKER).v1(),
FileStructureOverrides.EMPTY_OVERRIDES);
assertTrue(header.v1());
assertThat(header.v2(), arrayContaining("time", "airline", "responsetime", "sourcetype"));
}
public void testFindHeaderFromSampleGivenHeaderNotInSample() throws IOException {
String noHeader = "2014-06-23 00:00:00Z,AAL,132.2046,farequote\n" +
"2014-06-23 00:00:00Z,JZA,990.4628,farequote\n" +
"2014-06-23 00:00:01Z,JBU,877.5927,farequote\n" +
"2014-06-23 00:00:01Z,KLM,1355.4812,farequote\n";
Tuple<Boolean, String[]> header = DelimitedFileStructureFinder.findHeaderFromSample(explanation,
DelimitedFileStructureFinder.readRows(noHeader, CsvPreference.EXCEL_PREFERENCE, NOOP_TIMEOUT_CHECKER).v1(),
FileStructureOverrides.EMPTY_OVERRIDES);
assertFalse(header.v1());
assertThat(header.v2(), arrayContaining("", "", "", ""));
}
public void testLevenshteinDistance() {
assertEquals(0, levenshteinDistance("cat", "cat"));
assertEquals(3, levenshteinDistance("cat", "dog"));
assertEquals(5, levenshteinDistance("cat", "mouse"));
assertEquals(3, levenshteinDistance("cat", ""));
assertEquals(3, levenshteinDistance("dog", "cat"));
assertEquals(0, levenshteinDistance("dog", "dog"));
assertEquals(4, levenshteinDistance("dog", "mouse"));
assertEquals(3, levenshteinDistance("dog", ""));
assertEquals(5, levenshteinDistance("mouse", "cat"));
assertEquals(4, levenshteinDistance("mouse", "dog"));
assertEquals(0, levenshteinDistance("mouse", "mouse"));
assertEquals(5, levenshteinDistance("mouse", ""));
assertEquals(3, levenshteinDistance("", "cat"));
assertEquals(3, levenshteinDistance("", "dog"));
assertEquals(5, levenshteinDistance("", "mouse"));
assertEquals(0, levenshteinDistance("", ""));
}
public void testMakeShortFieldMask() {
List<List<String>> rows = new ArrayList<>();
rows.add(Arrays.asList(randomAlphaOfLength(5), randomAlphaOfLength(20), randomAlphaOfLength(5)));
rows.add(Arrays.asList(randomAlphaOfLength(50), randomAlphaOfLength(5), randomAlphaOfLength(5)));
rows.add(Arrays.asList(randomAlphaOfLength(5), randomAlphaOfLength(5), randomAlphaOfLength(5)));
rows.add(Arrays.asList(randomAlphaOfLength(5), randomAlphaOfLength(5), randomAlphaOfLength(80)));
BitSet shortFieldMask = DelimitedFileStructureFinder.makeShortFieldMask(rows, 110);
assertThat(shortFieldMask, equalTo(TimestampFormatFinder.stringToNumberPosBitSet("111")));
shortFieldMask = DelimitedFileStructureFinder.makeShortFieldMask(rows, 80);
assertThat(shortFieldMask, equalTo(TimestampFormatFinder.stringToNumberPosBitSet("11 ")));
shortFieldMask = DelimitedFileStructureFinder.makeShortFieldMask(rows, 50);
assertThat(shortFieldMask, equalTo(TimestampFormatFinder.stringToNumberPosBitSet(" 1 ")));
shortFieldMask = DelimitedFileStructureFinder.makeShortFieldMask(rows, 20);
assertThat(shortFieldMask, equalTo(TimestampFormatFinder.stringToNumberPosBitSet(" ")));
}
public void testLevenshteinCompareRows() {
assertEquals(0, levenshteinFieldwiseCompareRows(Arrays.asList("cat", "dog"), Arrays.asList("cat", "dog")));
assertEquals(3, levenshteinFieldwiseCompareRows(Arrays.asList("cat", "dog"), Arrays.asList("cat", "cat")));
assertEquals(6, levenshteinFieldwiseCompareRows(Arrays.asList("cat", "dog"), Arrays.asList("dog", "cat")));
assertEquals(8, levenshteinFieldwiseCompareRows(Arrays.asList("cat", "dog"), Arrays.asList("mouse", "cat")));
assertEquals(10, levenshteinFieldwiseCompareRows(Arrays.asList("cat", "dog", "mouse"), Arrays.asList("mouse", "dog", "cat")));
assertEquals(9, levenshteinFieldwiseCompareRows(Arrays.asList("cat", "dog", "mouse"), Arrays.asList("mouse", "mouse", "mouse")));
assertEquals(12, levenshteinFieldwiseCompareRows(Arrays.asList("cat", "dog", "mouse"), Arrays.asList("mouse", "cat", "dog")));
}
public void testLevenshteinCompareRowsWithMask() {
assertEquals(0, levenshteinFieldwiseCompareRows(Arrays.asList("cat", "dog"), Arrays.asList("cat", "dog"),
TimestampFormatFinder.stringToNumberPosBitSet(randomFrom(" ", "1 ", " 1", "11"))));
assertEquals(0, levenshteinFieldwiseCompareRows(Arrays.asList("cat", "dog"), Arrays.asList("cat", "cat"),
TimestampFormatFinder.stringToNumberPosBitSet(randomFrom(" ", "1 "))));
assertEquals(3, levenshteinFieldwiseCompareRows(Arrays.asList("cat", "dog"), Arrays.asList("dog", "cat"),
TimestampFormatFinder.stringToNumberPosBitSet(randomFrom(" 1", "1 "))));
assertEquals(3, levenshteinFieldwiseCompareRows(Arrays.asList("cat", "dog"), Arrays.asList("mouse", "cat"),
TimestampFormatFinder.stringToNumberPosBitSet(" 1")));
assertEquals(5, levenshteinFieldwiseCompareRows(Arrays.asList("cat", "dog", "mouse"), Arrays.asList("mouse", "dog", "cat"),
TimestampFormatFinder.stringToNumberPosBitSet(" 11")));
assertEquals(4, levenshteinFieldwiseCompareRows(Arrays.asList("cat", "dog", "mouse"), Arrays.asList("mouse", "mouse", "mouse"),
TimestampFormatFinder.stringToNumberPosBitSet(" 11")));
assertEquals(7, levenshteinFieldwiseCompareRows(Arrays.asList("cat", "dog", "mouse"), Arrays.asList("mouse", "cat", "dog"),
TimestampFormatFinder.stringToNumberPosBitSet(" 11")));
}
public void testLineHasUnescapedQuote() {
assertFalse(DelimitedFileStructureFinder.lineHasUnescapedQuote("a,b,c", CsvPreference.EXCEL_PREFERENCE));
assertFalse(DelimitedFileStructureFinder.lineHasUnescapedQuote("\"a\",b,c", CsvPreference.EXCEL_PREFERENCE));
assertFalse(DelimitedFileStructureFinder.lineHasUnescapedQuote("\"a,b\",c", CsvPreference.EXCEL_PREFERENCE));
assertFalse(DelimitedFileStructureFinder.lineHasUnescapedQuote("\"a,b,c\"", CsvPreference.EXCEL_PREFERENCE));
assertFalse(DelimitedFileStructureFinder.lineHasUnescapedQuote("a,\"b\",c", CsvPreference.EXCEL_PREFERENCE));
assertFalse(DelimitedFileStructureFinder.lineHasUnescapedQuote("a,b,\"c\"", CsvPreference.EXCEL_PREFERENCE));
assertFalse(DelimitedFileStructureFinder.lineHasUnescapedQuote("a,\"b\"\"\",c", CsvPreference.EXCEL_PREFERENCE));
assertFalse(DelimitedFileStructureFinder.lineHasUnescapedQuote("a,b,\"c\"\"\"", CsvPreference.EXCEL_PREFERENCE));
assertFalse(DelimitedFileStructureFinder.lineHasUnescapedQuote("\"\"\"a\",b,c", CsvPreference.EXCEL_PREFERENCE));
assertFalse(DelimitedFileStructureFinder.lineHasUnescapedQuote("\"a\"\"\",b,c", CsvPreference.EXCEL_PREFERENCE));
assertFalse(DelimitedFileStructureFinder.lineHasUnescapedQuote("\"a,\"\"b\",c", CsvPreference.EXCEL_PREFERENCE));
assertTrue(DelimitedFileStructureFinder.lineHasUnescapedQuote("between\"words,b,c", CsvPreference.EXCEL_PREFERENCE));
assertTrue(DelimitedFileStructureFinder.lineHasUnescapedQuote("x and \"y\",b,c", CsvPreference.EXCEL_PREFERENCE));
assertFalse(DelimitedFileStructureFinder.lineHasUnescapedQuote("a\tb\tc", CsvPreference.TAB_PREFERENCE));
assertFalse(DelimitedFileStructureFinder.lineHasUnescapedQuote("\"a\"\tb\tc", CsvPreference.TAB_PREFERENCE));
assertFalse(DelimitedFileStructureFinder.lineHasUnescapedQuote("\"a\tb\"\tc", CsvPreference.TAB_PREFERENCE));
assertFalse(DelimitedFileStructureFinder.lineHasUnescapedQuote("\"a\tb\tc\"", CsvPreference.TAB_PREFERENCE));
assertFalse(DelimitedFileStructureFinder.lineHasUnescapedQuote("a\t\"b\"\tc", CsvPreference.TAB_PREFERENCE));
assertFalse(DelimitedFileStructureFinder.lineHasUnescapedQuote("a\tb\t\"c\"", CsvPreference.TAB_PREFERENCE));
assertFalse(DelimitedFileStructureFinder.lineHasUnescapedQuote("a\t\"b\"\"\"\tc", CsvPreference.TAB_PREFERENCE));
assertFalse(DelimitedFileStructureFinder.lineHasUnescapedQuote("a\tb\t\"c\"\"\"", CsvPreference.TAB_PREFERENCE));
assertFalse(DelimitedFileStructureFinder.lineHasUnescapedQuote("\"\"\"a\"\tb\tc", CsvPreference.TAB_PREFERENCE));
assertFalse(DelimitedFileStructureFinder.lineHasUnescapedQuote("\"a\"\"\"\tb\tc", CsvPreference.TAB_PREFERENCE));
assertFalse(DelimitedFileStructureFinder.lineHasUnescapedQuote("\"a\t\"\"b\"\tc", CsvPreference.TAB_PREFERENCE));
assertTrue(DelimitedFileStructureFinder.lineHasUnescapedQuote("between\"words\tb\tc", CsvPreference.TAB_PREFERENCE));
assertTrue(DelimitedFileStructureFinder.lineHasUnescapedQuote("x and \"y\"\tb\tc", CsvPreference.TAB_PREFERENCE));
}
public void testRowContainsDuplicateNonEmptyValues() {
assertNull(DelimitedFileStructureFinder.findDuplicateNonEmptyValues(Collections.singletonList("a")));
assertNull(DelimitedFileStructureFinder.findDuplicateNonEmptyValues(Collections.singletonList("")));
assertNull(DelimitedFileStructureFinder.findDuplicateNonEmptyValues(Arrays.asList("a", "b", "c")));
assertEquals("a", DelimitedFileStructureFinder.findDuplicateNonEmptyValues(Arrays.asList("a", "b", "a")));
assertEquals("b", DelimitedFileStructureFinder.findDuplicateNonEmptyValues(Arrays.asList("a", "b", "b")));
assertNull(DelimitedFileStructureFinder.findDuplicateNonEmptyValues(Arrays.asList("a", "", "")));
assertNull(DelimitedFileStructureFinder.findDuplicateNonEmptyValues(Arrays.asList("", "a", "")));
}
public void testMakeCsvProcessorSettings() {
String field = randomAlphaOfLength(10);
List<String> targetFields = Arrays.asList(generateRandomStringArray(10, field.length() - 1, false, false));
char separator = randomFrom(',', ';', '\t', '|');
char quote = randomFrom('"', '\'');
boolean trim = randomBoolean();
Map<String, Object> settings = DelimitedFileStructureFinder.makeCsvProcessorSettings(field, targetFields, separator, quote, trim);
assertThat(settings.get("field"), equalTo(field));
assertThat(settings.get("target_fields"), equalTo(targetFields));
assertThat(settings.get("ignore_missing"), equalTo(false));
if (separator == ',') {
assertThat(settings, not(hasKey("separator")));
} else {
assertThat(settings.get("separator"), equalTo(String.valueOf(separator)));
}
if (quote == '"') {
assertThat(settings, not(hasKey("quote")));
} else {
assertThat(settings.get("quote"), equalTo(String.valueOf(quote)));
}
if (trim) {
assertThat(settings.get("trim"), equalTo(true));
} else {
assertThat(settings, not(hasKey("trim")));
}
}
public void testMultilineStartPatternGivenNoMultiline() {
List<String> columnNames = Stream.generate(() -> randomAlphaOfLengthBetween(5, 10)).limit(10).collect(Collectors.toList());
String timeFieldName;
TimestampFormatFinder timeFieldFormat;
if (randomBoolean()) {
timeFieldName = columnNames.get(randomIntBetween(0, columnNames.size() - 1));
timeFieldFormat = new TimestampFormatFinder(explanation, true, true, true, NOOP_TIMEOUT_CHECKER);
timeFieldFormat.addSample("2020-01-30T15:05:09");
} else {
timeFieldName = null;
timeFieldFormat = null;
}
Map<String, Object> mappings = new TreeMap<>();
for (String columnName : columnNames) {
if (columnName.equals(timeFieldName)) {
mappings.put(columnName, Collections.singletonMap(FileStructureUtils.MAPPING_TYPE_SETTING, "date"));
} else {
mappings.put(columnName,
Collections.singletonMap(FileStructureUtils.MAPPING_TYPE_SETTING,
randomFrom("boolean", "long", "double", "text", "keyword")));
}
}
assertNull(DelimitedFileStructureFinder.makeMultilineStartPattern(explanation, columnNames, 1, ",", "\"", mappings, timeFieldName,
timeFieldFormat));
assertThat(explanation, contains("Not creating a multi-line start pattern as no sampled message spanned multiple lines"));
}
public void testMultilineStartPatternFromTimeField() {
List<String> columnNames = Stream.generate(() -> randomAlphaOfLengthBetween(5, 10)).limit(10).collect(Collectors.toList());
int timeFieldColumnIndex = randomIntBetween(0, columnNames.size() - 2);
String timeFieldName = columnNames.get(timeFieldColumnIndex);
TimestampFormatFinder timeFieldFormat = new TimestampFormatFinder(explanation, true, true, true, NOOP_TIMEOUT_CHECKER);
timeFieldFormat.addSample("2020-01-30T15:05:09");
Map<String, Object> mappings = new TreeMap<>();
for (String columnName : columnNames) {
if (columnName.equals(timeFieldName)) {
mappings.put(columnName, Collections.singletonMap(FileStructureUtils.MAPPING_TYPE_SETTING, "date"));
} else {
mappings.put(columnName, Collections.singletonMap(FileStructureUtils.MAPPING_TYPE_SETTING, randomFrom("text", "keyword")));
}
}
String expected = "^" + Stream.generate(() -> ".*?,").limit(timeFieldColumnIndex).collect(Collectors.joining()) +
"\"?\\d{4}-\\d{2}-\\d{2}[T ]\\d{2}:\\d{2}";
assertEquals(expected, DelimitedFileStructureFinder.makeMultilineStartPattern(explanation, columnNames, 2, ",", "\"", mappings,
timeFieldName, timeFieldFormat));
assertThat(explanation, contains("Created a multi-line start pattern based on timestamp column [" + timeFieldName + "]"));
}
public void testMultilineStartPatternFromMappings() {
int randomIndex = randomIntBetween(0, 2);
String type = new String[]{ "boolean", "long", "double" }[randomIndex];
String expectedTypePattern =
new String[]{ "(?:true|false)", "[+-]?\\d+", "[+-]?(?:\\d+(?:\\.\\d+)?|\\.\\d+)(?:[eE][+-]?\\d+)?" }[randomIndex];
List<String> columnNames = Stream.generate(() -> randomAlphaOfLengthBetween(5, 10)).limit(10).collect(Collectors.toList());
int chosenFieldColumnIndex = randomIntBetween(0, columnNames.size() - 2);
String chosenField = columnNames.get(chosenFieldColumnIndex);
Map<String, Object> mappings = new TreeMap<>();
for (String columnName : columnNames) {
if (columnName.equals(chosenField)) {
mappings.put(columnName, Collections.singletonMap(FileStructureUtils.MAPPING_TYPE_SETTING, type));
} else {
mappings.put(columnName, Collections.singletonMap(FileStructureUtils.MAPPING_TYPE_SETTING, randomFrom("text", "keyword")));
}
}
String expected = "^" + Stream.generate(() -> ".*?,").limit(chosenFieldColumnIndex).collect(Collectors.joining()) +
"(?:" + expectedTypePattern + "|\"" + expectedTypePattern + "\"),";
assertEquals(expected, DelimitedFileStructureFinder.makeMultilineStartPattern(explanation, columnNames, 2, ",", "\"", mappings,
null, null));
assertThat(explanation, contains("Created a multi-line start pattern based on [" + type + "] column [" + chosenField + "]"));
}
public void testMultilineStartPatternDeterminationTooHard() {
List<String> columnNames = Stream.generate(() -> randomAlphaOfLengthBetween(5, 10)).limit(10).collect(Collectors.toList());
Map<String, Object> mappings = new TreeMap<>();
for (String columnName : columnNames) {
mappings.put(columnName, Collections.singletonMap(FileStructureUtils.MAPPING_TYPE_SETTING, randomFrom("text", "keyword")));
}
assertNull(DelimitedFileStructureFinder.makeMultilineStartPattern(explanation, columnNames, 2, ",", "\"", mappings, null, null));
assertThat(explanation, contains("Failed to create a suitable multi-line start pattern"));
}
static Map<String, Object> randomCsvProcessorSettings() {
String field = randomAlphaOfLength(10);
return DelimitedFileStructureFinder.makeCsvProcessorSettings(field,
Arrays.asList(generateRandomStringArray(10, field.length() - 1, false, false)), randomFrom(',', ';', '\t', '|'),
randomFrom('"', '\''), randomBoolean());
}
}
| |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.mapper.core;
import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.document.Field;
import org.apache.lucene.document.FieldType.NumericType;
import org.apache.lucene.index.IndexOptions;
import org.apache.lucene.index.Terms;
import org.apache.lucene.search.NumericRangeQuery;
import org.apache.lucene.search.Query;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.BytesRefBuilder;
import org.apache.lucene.util.NumericUtils;
import org.elasticsearch.action.fieldstats.FieldStats;
import org.elasticsearch.common.Explicit;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.Numbers;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.Fuzziness;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.analysis.NamedAnalyzer;
import org.elasticsearch.index.analysis.NumericIntegerAnalyzer;
import org.elasticsearch.index.fielddata.FieldDataType;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.Mapper;
import org.elasticsearch.index.mapper.MapperParsingException;
import org.elasticsearch.index.mapper.MergeMappingException;
import org.elasticsearch.index.mapper.MergeResult;
import org.elasticsearch.index.mapper.ParseContext;
import org.elasticsearch.index.query.QueryParseContext;
import java.io.IOException;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import static org.elasticsearch.common.xcontent.support.XContentMapValues.nodeShortValue;
import static org.elasticsearch.index.mapper.MapperBuilders.shortField;
import static org.elasticsearch.index.mapper.core.TypeParsers.parseNumberField;
/**
*
*/
public class ShortFieldMapper extends NumberFieldMapper {
public static final String CONTENT_TYPE = "short";
public static final int DEFAULT_PRECISION_STEP = 8;
public static class Defaults extends NumberFieldMapper.Defaults {
public static final MappedFieldType FIELD_TYPE = new ShortFieldType();
static {
FIELD_TYPE.freeze();
}
}
public static class Builder extends NumberFieldMapper.Builder<Builder, ShortFieldMapper> {
public Builder(String name) {
super(name, Defaults.FIELD_TYPE, DEFAULT_PRECISION_STEP);
builder = this;
}
@Override
public ShortFieldMapper build(BuilderContext context) {
setupFieldType(context);
ShortFieldMapper fieldMapper = new ShortFieldMapper(name, fieldType, docValues,
ignoreMalformed(context), coerce(context), fieldDataSettings,
context.indexSettings(), multiFieldsBuilder.build(this, context), copyTo);
fieldMapper.includeInAll(includeInAll);
return fieldMapper;
}
@Override
protected NamedAnalyzer makeNumberAnalyzer(int precisionStep) {
String name = precisionStep == Integer.MAX_VALUE ? "_short/max" : ("_short/" + precisionStep);
return new NamedAnalyzer(name, new NumericIntegerAnalyzer(precisionStep));
}
@Override
protected int maxPrecisionStep() {
return 32;
}
}
public static class TypeParser implements Mapper.TypeParser {
@Override
public Mapper.Builder parse(String name, Map<String, Object> node, ParserContext parserContext) throws MapperParsingException {
ShortFieldMapper.Builder builder = shortField(name);
parseNumberField(builder, name, node, parserContext);
for (Iterator<Map.Entry<String, Object>> iterator = node.entrySet().iterator(); iterator.hasNext();) {
Map.Entry<String, Object> entry = iterator.next();
String propName = Strings.toUnderscoreCase(entry.getKey());
Object propNode = entry.getValue();
if (propName.equals("null_value")) {
if (propNode == null) {
throw new MapperParsingException("Property [null_value] cannot be null.");
}
builder.nullValue(nodeShortValue(propNode));
iterator.remove();
}
}
return builder;
}
}
static final class ShortFieldType extends NumberFieldType {
public ShortFieldType() {
super(NumericType.INT);
}
protected ShortFieldType(ShortFieldType ref) {
super(ref);
}
@Override
public NumberFieldType clone() {
return new ShortFieldType(this);
}
@Override
public String typeName() {
return CONTENT_TYPE;
}
@Override
public Short nullValue() {
return (Short)super.nullValue();
}
@Override
public Short value(Object value) {
if (value == null) {
return null;
}
if (value instanceof Number) {
return ((Number) value).shortValue();
}
if (value instanceof BytesRef) {
return Numbers.bytesToShort((BytesRef) value);
}
return Short.parseShort(value.toString());
}
@Override
public BytesRef indexedValueForSearch(Object value) {
BytesRefBuilder bytesRef = new BytesRefBuilder();
NumericUtils.intToPrefixCoded(parseValue(value), 0, bytesRef); // 0 because of exact match
return bytesRef.get();
}
@Override
public Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper, @Nullable QueryParseContext context) {
return NumericRangeQuery.newIntRange(names().indexName(), numericPrecisionStep(),
lowerTerm == null ? null : (int)parseValue(lowerTerm),
upperTerm == null ? null : (int)parseValue(upperTerm),
includeLower, includeUpper);
}
@Override
public Query fuzzyQuery(String value, Fuzziness fuzziness, int prefixLength, int maxExpansions, boolean transpositions) {
short iValue = Short.parseShort(value);
short iSim = fuzziness.asShort();
return NumericRangeQuery.newIntRange(names().indexName(), numericPrecisionStep(),
iValue - iSim,
iValue + iSim,
true, true);
}
@Override
public FieldStats stats(Terms terms, int maxDoc) throws IOException {
long minValue = NumericUtils.getMinInt(terms);
long maxValue = NumericUtils.getMaxInt(terms);
return new FieldStats.Long(
maxDoc, terms.getDocCount(), terms.getSumDocFreq(), terms.getSumTotalTermFreq(), minValue, maxValue
);
}
}
protected ShortFieldMapper(String simpleName, MappedFieldType fieldType, Boolean docValues,
Explicit<Boolean> ignoreMalformed, Explicit<Boolean> coerce,
@Nullable Settings fieldDataSettings,
Settings indexSettings, MultiFields multiFields, CopyTo copyTo) {
super(simpleName, fieldType, docValues, ignoreMalformed, coerce,
fieldDataSettings, indexSettings, multiFields, copyTo);
}
@Override
public ShortFieldType fieldType() {
return (ShortFieldType) super.fieldType();
}
@Override
public MappedFieldType defaultFieldType() {
return Defaults.FIELD_TYPE;
}
@Override
public FieldDataType defaultFieldDataType() {
return new FieldDataType("short");
}
private static short parseValue(Object value) {
if (value instanceof Number) {
return ((Number) value).shortValue();
}
if (value instanceof BytesRef) {
return Short.parseShort(((BytesRef) value).utf8ToString());
}
return Short.parseShort(value.toString());
}
@Override
protected boolean customBoost() {
return true;
}
@Override
protected void innerParseCreateField(ParseContext context, List<Field> fields) throws IOException {
short value;
float boost = fieldType().boost();
if (context.externalValueSet()) {
Object externalValue = context.externalValue();
if (externalValue == null) {
if (fieldType().nullValue() == null) {
return;
}
value = fieldType().nullValue();
} else if (externalValue instanceof String) {
String sExternalValue = (String) externalValue;
if (sExternalValue.length() == 0) {
if (fieldType().nullValue() == null) {
return;
}
value = fieldType().nullValue();
} else {
value = Short.parseShort(sExternalValue);
}
} else {
value = ((Number) externalValue).shortValue();
}
if (context.includeInAll(includeInAll, this)) {
context.allEntries().addText(fieldType().names().fullName(), Short.toString(value), boost);
}
} else {
XContentParser parser = context.parser();
if (parser.currentToken() == XContentParser.Token.VALUE_NULL ||
(parser.currentToken() == XContentParser.Token.VALUE_STRING && parser.textLength() == 0)) {
if (fieldType().nullValue() == null) {
return;
}
value = fieldType().nullValue();
if (fieldType().nullValueAsString() != null && (context.includeInAll(includeInAll, this))) {
context.allEntries().addText(fieldType().names().fullName(), fieldType().nullValueAsString(), boost);
}
} else if (parser.currentToken() == XContentParser.Token.START_OBJECT) {
XContentParser.Token token;
String currentFieldName = null;
Short objValue = fieldType().nullValue();
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) {
currentFieldName = parser.currentName();
} else {
if ("value".equals(currentFieldName) || "_value".equals(currentFieldName)) {
if (parser.currentToken() != XContentParser.Token.VALUE_NULL) {
objValue = parser.shortValue(coerce.value());
}
} else if ("boost".equals(currentFieldName) || "_boost".equals(currentFieldName)) {
boost = parser.floatValue();
} else {
throw new IllegalArgumentException("unknown property [" + currentFieldName + "]");
}
}
}
if (objValue == null) {
// no value
return;
}
value = objValue;
} else {
value = parser.shortValue(coerce.value());
if (context.includeInAll(includeInAll, this)) {
context.allEntries().addText(fieldType().names().fullName(), parser.text(), boost);
}
}
}
if (fieldType().indexOptions() != IndexOptions.NONE || fieldType().stored()) {
CustomShortNumericField field = new CustomShortNumericField(value, fieldType());
field.setBoost(boost);
fields.add(field);
}
if (fieldType().hasDocValues()) {
addDocValue(context, fields, value);
}
}
@Override
protected String contentType() {
return CONTENT_TYPE;
}
@Override
protected void doXContentBody(XContentBuilder builder, boolean includeDefaults, Params params) throws IOException {
super.doXContentBody(builder, includeDefaults, params);
if (includeDefaults || fieldType().numericPrecisionStep() != DEFAULT_PRECISION_STEP) {
builder.field("precision_step", fieldType().numericPrecisionStep());
}
if (includeDefaults || fieldType().nullValue() != null) {
builder.field("null_value", fieldType().nullValue());
}
if (includeInAll != null) {
builder.field("include_in_all", includeInAll);
} else if (includeDefaults) {
builder.field("include_in_all", false);
}
}
public static class CustomShortNumericField extends CustomNumericField {
private final short number;
public CustomShortNumericField(short number, NumberFieldType fieldType) {
super(number, fieldType);
this.number = number;
}
@Override
public TokenStream tokenStream(Analyzer analyzer, TokenStream previous) throws IOException {
if (fieldType().indexOptions() != IndexOptions.NONE) {
return getCachedStream().setIntValue(number);
}
return null;
}
@Override
public String numericAsString() {
return Short.toString(number);
}
}
}
| |
/*
* Copyright 2015 Red Hat, Inc. and/or its affiliates.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.drools.mvel.compiler.builder;
import java.io.File;
import java.io.IOException;
import java.io.Reader;
import java.util.ArrayList;
import java.util.List;
import org.drools.compiler.compiler.io.memory.MemoryFileSystem;
import org.drools.compiler.kie.builder.impl.InternalKieModule;
import org.drools.compiler.kie.builder.impl.KieBuilderImpl;
import org.drools.compiler.kie.builder.impl.KieFileSystemImpl;
import org.drools.compiler.kproject.models.KieBaseModelImpl;
import org.drools.core.util.FileManager;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.kie.api.KieBase;
import org.kie.api.KieServices;
import org.kie.api.builder.KieBuilder;
import org.kie.api.builder.KieFileSystem;
import org.kie.api.builder.KieModule;
import org.kie.api.builder.KieRepository;
import org.kie.api.builder.Message.Level;
import org.kie.api.builder.ReleaseId;
import org.kie.api.builder.model.KieBaseModel;
import org.kie.api.builder.model.KieModuleModel;
import org.kie.api.conf.EqualityBehaviorOption;
import org.kie.api.conf.EventProcessingOption;
import org.kie.api.runtime.KieContainer;
import org.kie.api.runtime.KieSession;
import org.kie.util.maven.support.PomModel;
import org.kie.util.maven.support.ReleaseIdImpl;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
public class KieBuilderTest {
protected FileManager fileManager;
@Before
public void setUp() throws Exception {
this.fileManager = new FileManager();
this.fileManager.setUp();
}
@After
public void tearDown() throws Exception {
this.fileManager.tearDown();
}
@Test
public void testInMemory() throws ClassNotFoundException, InterruptedException, IOException {
String namespace = "org.kie.test";
ReleaseId releaseId = KieServices.Factory.get().newReleaseId( namespace, "memory", "1.0" );
KieModuleModel kProj = createKieProject( namespace );
KieFileSystem kfs = KieServices.Factory.get().newKieFileSystem();
generateAll( kfs, namespace, releaseId, kProj );
createAndTestKieContainer( releaseId, createKieBuilder( kfs ), namespace );
}
@Test
public void testOnDisc() throws ClassNotFoundException, InterruptedException, IOException {
String namespace = "org.kie.test";
KieModuleModel kProj = createKieProject(namespace);
ReleaseId releaseId = KieServices.Factory.get().newReleaseId(namespace, "memory", "1.0");
KieFileSystem kfs = KieServices.Factory.get().newKieFileSystem();
generateAll(kfs, namespace, releaseId, kProj);
MemoryFileSystem mfs = (( KieFileSystemImpl )kfs).asMemoryFileSystem();
File file = fileManager.getRootDirectory() ;
mfs.writeAsFs( file );
createAndTestKieContainer(releaseId, createKieBuilder(kfs), namespace);
}
@Test
public void testKieModuleDependencies() throws ClassNotFoundException, InterruptedException, IOException {
KieServices ks = KieServices.Factory.get();
String namespace1 = "org.kie.test1";
ReleaseId releaseId1 = KieServices.Factory.get().newReleaseId(namespace1, "memory", "1.0");
KieModuleModel kProj1 = createKieProject(namespace1);
KieFileSystem kfs1 = KieServices.Factory.get().newKieFileSystem();
generateAll(kfs1, namespace1, releaseId1, kProj1);
KieBuilder kb1 = createKieBuilder(kfs1);
kb1.buildAll();
if ( kb1.getResults().hasMessages(Level.ERROR) ) {
fail("Unable to build KieJar\n" + kb1.getResults( ).toString() );
}
KieRepository kr = ks.getRepository();
KieModule kModule1 = kr.getKieModule(releaseId1);
assertNotNull( kModule1 );
String namespace2 = "org.kie.test2";
ReleaseId releaseId2 = KieServices.Factory.get().newReleaseId(namespace2, "memory", "1.0");
KieModuleModel kProj2 = createKieProject(namespace2);
KieBaseModelImpl kieBase2 = ( KieBaseModelImpl ) kProj2.getKieBaseModels().get( namespace2 );
kieBase2.addInclude( namespace1 );
KieFileSystem kfs2 = KieServices.Factory.get().newKieFileSystem();
generateAll(kfs2, namespace2, releaseId2, kProj2);
KieBuilder kb2 = createKieBuilder(kfs2);
kb2.setDependencies( kModule1 );
kb2.buildAll();
if ( kb2.getResults().hasMessages(Level.ERROR) ) {
fail("Unable to build KieJar\n" + kb2.getResults( ).toString() );
}
KieModule kModule2= kr.getKieModule(releaseId2);
assertNotNull( kModule2);
KieContainer kContainer = ks.newKieContainer(releaseId2);
KieBase kBase = kContainer.getKieBase( namespace2 );
KieSession kSession = kBase.newKieSession();
List list = new ArrayList();
kSession.setGlobal( "list", list );
kSession.fireAllRules();
assertEquals( 2, list.size() );
if ("org.kie.test1.Message".equals(list.get(0).getClass().getName())) {
assertEquals( "org.kie.test2.Message", list.get(1).getClass().getName() );
} else {
assertEquals( "org.kie.test2.Message", list.get(0).getClass().getName() );
assertEquals( "org.kie.test1.Message", list.get(1).getClass().getName() );
}
}
@Test
public void testNotExistingInclude() throws Exception {
String drl = "package org.drools.mvel.compiler.integrationtests\n" +
"declare CancelFact\n" +
" cancel : boolean = true\n" +
"end\n" +
"rule R1 when\n" +
" $m : CancelFact( cancel == true )\n" +
"then\n" +
"end\n";
KieServices ks = KieServices.Factory.get();
KieFileSystem kfs = ks.newKieFileSystem().write( "src/main/resources/r1.drl", drl );
KieModuleModel module = ks.newKieModuleModel();
final String defaultBaseName = "defaultKBase";
KieBaseModel defaultBase = module.newKieBaseModel(defaultBaseName)
.addInclude( "notExistingKB1" )
.addInclude( "notExistingKB2" );
defaultBase.setDefault(true);
defaultBase.addPackage( "*" );
defaultBase.newKieSessionModel("defaultKSession").setDefault( true );
kfs.writeKModuleXML( module.toXML() );
KieBuilder kb = ks.newKieBuilder( kfs ).buildAll();
assertEquals( 2, kb.getResults().getMessages().size() );
}
@Test
public void testNoPomXml() throws ClassNotFoundException, InterruptedException, IOException {
String namespace = "org.kie.test";
KieModuleModel kProj = createKieProject( namespace );
ReleaseId releaseId = KieServices.Factory.get().getRepository().getDefaultReleaseId();
KieFileSystem kfs = KieServices.Factory.get().newKieFileSystem();
generateKProjectXML( kfs, namespace, kProj );
generateMessageClass( kfs, namespace );
generateRule( kfs, namespace );
MemoryFileSystem mfs = ((KieFileSystemImpl)kfs).asMemoryFileSystem();
createAndTestKieContainer( releaseId, createKieBuilder( kfs ), namespace );
}
@Test
public void testNoProjectXml() throws ClassNotFoundException, InterruptedException, IOException {
String namespace = "org.kie.test";
ReleaseId releaseId = KieServices.Factory.get().newReleaseId( namespace, "memory", "1.0" );
KieFileSystem kfs = KieServices.Factory.get().newKieFileSystem();
generatePomXML( kfs, releaseId );
generateMessageClass( kfs, namespace );
generateRule( kfs, namespace );
MemoryFileSystem mfs = ((KieFileSystemImpl)kfs).asMemoryFileSystem();
createAndTestKieContainer(releaseId, createKieBuilder(kfs), null );
}
@Test
public void testEmptyProjectXml() throws ClassNotFoundException, InterruptedException, IOException {
String namespace = "org.kie.test";
KieServices ks = KieServices.Factory.get();
KieModuleModel kProj = ks.newKieModuleModel();
ReleaseId releaseId = KieServices.Factory.get().newReleaseId(namespace, "memory", "1.0");
KieFileSystem kfs = KieServices.Factory.get().newKieFileSystem();
generateKProjectXML( kfs, namespace, kProj );
generatePomXML(kfs, releaseId);
generateMessageClass( kfs, namespace );
generateRule( kfs, namespace );
MemoryFileSystem mfs = ((KieFileSystemImpl)kfs).asMemoryFileSystem();
createAndTestKieContainer(releaseId, createKieBuilder(kfs), null );
}
@Test
public void testNoPomAndProjectXml() throws ClassNotFoundException, InterruptedException, IOException {
String namespace = "org.kie.test";
ReleaseId releaseId = KieServices.Factory.get().getRepository().getDefaultReleaseId();
KieFileSystem kfs = KieServices.Factory.get().newKieFileSystem();
generateMessageClass( kfs, namespace );
generateRule( kfs, namespace );
MemoryFileSystem mfs = ((KieFileSystemImpl)kfs).asMemoryFileSystem();
createAndTestKieContainer( releaseId, createKieBuilder( kfs ), null );
}
@Test
public void testInvalidPomXmlGAV() throws ClassNotFoundException, InterruptedException, IOException {
String namespace = "org.kie.test";
KieModuleModel kProj = createKieProject(namespace);
ReleaseId releaseId = new ReleaseIdImpl( "", "", "" );
KieFileSystem kfs = KieServices.Factory.get().newKieFileSystem();
generatePomXML( kfs, releaseId );
generateMessageClass( kfs, namespace );
generateRule( kfs, namespace );
MemoryFileSystem mfs = ((KieFileSystemImpl)kfs).asMemoryFileSystem();
KieBuilder kieBuilder = createKieBuilder( kfs );
kieBuilder.buildAll();
assertTrue( kieBuilder.getResults().hasMessages( Level.ERROR ) );
}
@Test
public void testInvalidPomXmlContent() throws ClassNotFoundException, InterruptedException, IOException {
String namespace = "org.kie.test";
KieModuleModel kProj = createKieProject(namespace);
ReleaseId releaseId = KieServices.Factory.get().newReleaseId(namespace, "memory", "1.0");
KieFileSystem kfs = KieServices.Factory.get().newKieFileSystem();
kfs.write( "pom.xml", "xxxx" );
generateKProjectXML( kfs, namespace, kProj );
generateMessageClass( kfs, namespace );
generateRule( kfs, namespace );
KieBuilder kieBuilder = createKieBuilder(kfs);
kieBuilder.buildAll();
assertTrue ( kieBuilder.getResults().hasMessages(Level.ERROR) );
}
@Test
public void testInvalidProjectXml() throws ClassNotFoundException, InterruptedException, IOException {
String namespace = "org.kie.test";
KieModuleModel kProj = createKieProject( namespace );
ReleaseId releaseId = KieServices.Factory.get().newReleaseId( namespace, "memory", "1.0" );
KieFileSystem kfs = KieServices.Factory.get().newKieFileSystem();
generatePomXML( kfs, releaseId );
kfs.writeKModuleXML( "xxxx" );
generateMessageClass( kfs, namespace );
generateRule( kfs, namespace );
KieBuilder kieBuilder = createKieBuilder(kfs);
kieBuilder.buildAll();
assertTrue( kieBuilder.getResults().hasMessages( Level.ERROR ) );
}
@Test
public void testSetPomModelReuse() throws IOException {
String namespace = "org.kie.test";
ReleaseId releaseId = KieServices.Factory.get().newReleaseId( namespace,
"pomModelReuse",
"1.0" );
String pom = KieBuilderImpl.generatePomXml( releaseId );
KieFileSystem kfs = KieServices.Factory.get().newKieFileSystem();
kfs.writePomXML( pom );
//Create a KieBuilder instance
KieBuilder kieBuilder1 = createKieBuilder( kfs );
kieBuilder1.buildAll();
//Get PomModel to re-use in second KieBuilder instance
PomModel pomModel = ( (KieBuilderImpl) kieBuilder1 ).getPomModel();
kfs.writePomXML( pom );
//Create another KieBuilder instance with the same KieFileSystem, setting PomModel
KieBuilder kieBuilder2 = createKieBuilder( kfs );
( (KieBuilderImpl) kieBuilder2 ).setPomModel( pomModel );
kieBuilder2.buildAll();
//Read pom.xml from first KieBuilder's KieModule
InternalKieModule kieModule1 = (InternalKieModule) ( (KieBuilderImpl) kieBuilder1 ).getKieModuleIgnoringErrors();
final Reader reader1 = kieModule1.getResource( "META-INF/maven/org.kie.test/pomModelReuse/pom.xml" ).getReader();
int charCode;
String readPom1 = "";
while ( ( charCode = reader1.read() ) != -1 ) {
readPom1 = readPom1 + (char) charCode;
}
reader1.close();
assertEquals( pom,
readPom1 );
//Read pom.xml from second KieBuilder's KieModule
InternalKieModule kieModule2 = (InternalKieModule) ( (KieBuilderImpl) kieBuilder2 ).getKieModuleIgnoringErrors();
final Reader reader2 = kieModule2.getResource( "META-INF/maven/org.kie.test/pomModelReuse/pom.xml" ).getReader();
String readPom2 = "";
while ( ( charCode = reader2.read() ) != -1 ) {
readPom2 = readPom2 + (char) charCode;
}
reader1.close();
assertEquals( pom,
readPom2 );
}
public KieModuleModel createKieProject(String namespace) {
KieServices ks = KieServices.Factory.get();
KieModuleModel kProj = ks.newKieModuleModel();
KieBaseModel kBase1 = kProj.newKieBaseModel(namespace)
.setEqualsBehavior( EqualityBehaviorOption.EQUALITY )
.setEventProcessingMode( EventProcessingOption.STREAM );
return kProj;
}
public void generateAll(KieFileSystem kfs, String namespace, ReleaseId releaseId, KieModuleModel kProj) {
generatePomXML(kfs, releaseId);
generateKProjectXML( kfs, namespace, kProj );
generateMessageClass( kfs, namespace );
generateRule( kfs, namespace );
}
public void generatePomXML(KieFileSystem kfs, ReleaseId releaseId) {
kfs.writePomXML( KieBuilderImpl.generatePomXml(releaseId) );
}
public void generateKProjectXML(KieFileSystem kfs, String namespace, KieModuleModel kProj) {
kfs.writeKModuleXML( kProj.toXML() );
}
public void generateMessageClass(KieFileSystem kfs, String namespace) {
kfs.write("src/main/java/" + namespace.replace('.', '/') + "/Message.java", getMessageClass( namespace ) );
}
public void generateRule(KieFileSystem kfs, String namespace) {
kfs.write("src/main/resources/" + namespace.replace('.', '/') + "/rule1.drl", getRule(namespace, namespace, "r1") );
}
public KieBuilder createKieBuilder(KieFileSystem kfs) {
KieServices ks = KieServices.Factory.get();
return ks.newKieBuilder( kfs );
}
public KieBuilder createKieBuilder(File file) {
KieServices ks = KieServices.Factory.get();
return ks.newKieBuilder( file );
}
public void createAndTestKieContainer(ReleaseId releaseId, KieBuilder kb, String kBaseName) throws IOException,
ClassNotFoundException,
InterruptedException {
KieServices ks = KieServices.Factory.get();
kb.buildAll();
if ( kb.getResults().hasMessages(Level.ERROR) ) {
fail("Unable to build KieModule\n" + kb.getResults( ).toString() );
}
KieRepository kr = ks.getRepository();
KieModule kJar = kr.getKieModule(releaseId);
assertNotNull( kJar );
KieContainer kContainer = ks.newKieContainer(releaseId);
KieBase kBase = kBaseName != null ? kContainer.getKieBase( kBaseName ) : kContainer.getKieBase();
KieSession kSession = kBase.newKieSession();
List list = new ArrayList();
kSession.setGlobal( "list", list );
kSession.fireAllRules();
assertEquals( 1, list.size() );
assertEquals( "org.kie.test.Message", list.get(0).getClass().getName() );
}
public String getRule(String namespace,
String messageNS,
String ruleName) {
String s = "package " + namespace + "\n" +
"import " + messageNS + ".Message;\n"+
"global java.util.List list;\n" +
"rule " + ruleName + " when \n" +
"then \n" +
" Message msg = new Message('hello');" +
" list.add(msg); " +
"end \n" +
"";
return s;
}
public String getMessageClass(String namespace) {
String s = "package " + namespace + ";\n" +
"import java.lang.*;\n" +
"public class Message {\n" +
" private String text; \n " +
" public Message(String text) { \n" +
" this.text = text; \n" +
" } \n" +
" \n" +
" public String getText() { \n" +
" return this.text;\n" +
" }\n" +
"}\n";
return s;
}
}
| |
package datawave.iterators;
import java.nio.charset.CharacterCodingException;
import java.util.AbstractList;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import org.apache.accumulo.core.data.ByteSequence;
import org.apache.accumulo.core.data.Key;
import org.apache.accumulo.core.security.ColumnVisibility;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.text.StringEscapeUtils;
import org.apache.hadoop.io.Text;
import org.apache.log4j.Logger;
/**
* This is an abstraction of the keys found in the protobuf edge table.
*
*/
public class EdgeKey {
private final EDGE_FORMAT format;
private final STATS_TYPE statsType;
// rowid parts
private final String sourceData;
private final String sinkData;
// colf parts
private final String type;
private final String sourceRelationship;
private final String sinkRelationship;
// colq
private final String sourceAttribute1;
private final String sinkAttribute1;
private final String attribute2;
private final String attribute3;
private final String yyyymmdd;
private final DATE_TYPE dateType;
// colv
private final Text colvis;
// other key parts
private final long timestamp;
private final boolean deleted;
private static final Logger log = Logger.getLogger(EdgeKey.class);
// use the builder, not this nightmare constructor
private EdgeKey(EDGE_FORMAT format, STATS_TYPE statsType, String sourceData, String sinkData, String family, String sourceRelationship,
String sinkRelationship, String sourceAttribute1, String sinkAttribute1, String yyyymmdd, String attribute3, String attribute2,
Text colvis, long timestamp, boolean deleted, DATE_TYPE dateType) {
this.format = format;
this.statsType = statsType;
this.sourceData = sourceData;
this.sinkData = sinkData;
this.type = family;
this.sourceRelationship = sourceRelationship;
this.sinkRelationship = sinkRelationship;
this.sourceAttribute1 = sourceAttribute1;
this.sinkAttribute1 = sinkAttribute1;
this.yyyymmdd = yyyymmdd;
this.attribute2 = attribute2;
this.attribute3 = attribute3;
this.dateType = dateType;
this.colvis = colvis;
this.timestamp = timestamp;
this.deleted = deleted;
}
public static class EdgeKeyBuilder {
private EDGE_FORMAT format;
private STATS_TYPE statsType;
// rowid parts
private String sourceData;
private String sinkData;
// colf parts
private String type;
private String sourceRelationship;
private String sinkRelationship;
// colq
private String sourceAttribute1;
private String sinkAttribute1;
private String attribute2;
private String attribute3;
private String yyyymmdd;
private DATE_TYPE dateType;
// colv
private Text colvis;
// Other key parts
private long timestamp = Long.MAX_VALUE;
private boolean deleted = false;
private static final String EMPTY = "";
private boolean escape;
private boolean unescape;
private EdgeKeyBuilder() {
colvis = new Text();
clearFields();
}
private EdgeKeyBuilder(EdgeKey key) {
escape = false;
format = key.getFormat();
statsType = key.getStatsType();
sourceData = key.getSourceData();
sinkData = key.getSinkData();
type = key.getType();
unescape = false;
sourceRelationship = key.getSourceRelationship();
sinkRelationship = key.getSinkRelationship();
sourceAttribute1 = key.getSourceAttribute1();
sinkAttribute1 = key.getSinkAttribute1();
attribute2 = key.getAttribute2();
attribute3 = key.getAttribute3();
yyyymmdd = key.getYyyymmdd();
dateType = key.getDateType();
colvis = new Text(key.getColvis());
timestamp = key.getTimestamp();
deleted = key.isDeleted();
}
public EdgeKeyBuilder clearFields() {
format = EDGE_FORMAT.STANDARD;
escape = false;
unescape = false;
deleted = false;
statsType = STATS_TYPE.ACTIVITY;
sourceData = "";
sinkData = "";
type = "";
sourceRelationship = "";
sinkRelationship = "";
sourceAttribute1 = "";
sinkAttribute1 = "";
attribute2 = "";
attribute3 = "";
yyyymmdd = "";
dateType = DATE_TYPE.OLD_EVENT;
colvis.clear();
timestamp = Long.MAX_VALUE;
return this;
}
/**
* Builds the edge Key using the provided information. If the escape/unescape sequence fails, we will rely on the original data, since the decode/decode
* sequence will succeed nonetheless.
*
* @return the built edge key.
*/
public EdgeKey build() {
String tempSourceData = this.sourceData;
String tempSinkData = this.sinkData;
try {
if (log.isTraceEnabled()) {
log.trace("Attempting escape sequencing isEscape? " + escape + " isUnescape? " + unescape);
log.trace("Values before attempt source data " + tempSourceData + ", sink data " + tempSinkData);
}
if (escape && !unescape) {
tempSourceData = StringEscapeUtils.escapeJava(sourceData);
tempSinkData = StringEscapeUtils.escapeJava(sinkData);
} else if (unescape && !escape) {
tempSourceData = StringEscapeUtils.unescapeJava(sourceData);
tempSinkData = StringEscapeUtils.unescapeJava(sinkData);
}
// moving the assignment here since we want to rely on the original data
// if for some reason either of the escape/unescape sequence fails
this.sourceData = tempSourceData;
this.sinkData = tempSinkData;
} catch (Exception e) {
log.error("Avoiding escape sequencing, due to : " + e);
}
return new EdgeKey(getFormat(), getStatsType(), getSourceData(), getSinkData(), getType(), getSourceRelationship(), getSinkRelationship(),
getSourceAttribute1(), getSinkAttribute1(), getYyyymmdd(), getAttribute3(), getAttribute2(), getColvis(), getTimestamp(),
isDeleted(), getDateType());
}
public EDGE_FORMAT getFormat() {
return format;
}
public EdgeKeyBuilder setFormat(EDGE_FORMAT format) {
this.format = format;
return this;
}
public STATS_TYPE getStatsType() {
return statsType;
}
public EdgeKeyBuilder setStatsType(STATS_TYPE type) {
this.statsType = type;
return this;
}
public String getSourceData() {
return (null == sourceData) ? EMPTY : sourceData;
}
public EdgeKeyBuilder setSourceData(String sourceData) {
this.sourceData = sourceData;
return this;
}
public String getSinkData() {
return (null == sinkData) ? EMPTY : sinkData;
}
public EdgeKeyBuilder setSinkData(String sinkData) {
this.sinkData = sinkData;
return this;
}
public String getType() {
return (null == type) ? EMPTY : type;
}
public EdgeKeyBuilder setType(String type) {
this.type = type;
return this;
}
public EdgeKeyBuilder escape() {
this.escape = true;
this.unescape = false;
return this;
}
public EdgeKeyBuilder unescape() {
this.unescape = true;
this.escape = false;
return this;
}
public String getSourceRelationship() {
return (null == sourceRelationship) ? EMPTY : sourceRelationship;
}
public EdgeKeyBuilder setSourceRelationship(String sourceRelationship) {
this.sourceRelationship = sourceRelationship;
return this;
}
public String getSinkRelationship() {
return (null == sinkRelationship) ? EMPTY : sinkRelationship;
}
public EdgeKeyBuilder setSinkRelationship(String sinkRelationship) {
this.sinkRelationship = sinkRelationship;
return this;
}
public String getSourceAttribute1() {
return (null == sourceAttribute1) ? EMPTY : sourceAttribute1;
}
public EdgeKeyBuilder setSourceAttribute1(String sourceAttribute1) {
this.sourceAttribute1 = sourceAttribute1;
return this;
}
public String getSinkAttribute1() {
return (null == sinkAttribute1) ? EMPTY : sinkAttribute1;
}
public EdgeKeyBuilder setSinkAttribute1(String sinkAttribute1) {
this.sinkAttribute1 = sinkAttribute1;
return this;
}
public String getAttribute2() {
return (null == attribute2) ? EMPTY : attribute2;
}
public EdgeKeyBuilder setAttribute2(String attribute2) {
this.attribute2 = attribute2;
return this;
}
public String getAttribute3() {
return (null == attribute3) ? EMPTY : attribute3;
}
public EdgeKeyBuilder setAttribute3(String attribute3) {
this.attribute3 = attribute3;
return this;
}
public String getYyyymmdd() {
return (null == yyyymmdd) ? EMPTY : yyyymmdd;
}
public DATE_TYPE getDateType() {
return dateType;
}
public void setDateType(DATE_TYPE dateType) {
this.dateType = dateType;
}
public EdgeKeyBuilder setYyyymmdd(String yyyymmdd) {
this.yyyymmdd = yyyymmdd;
return this;
}
public Text getColvis() {
return (null == colvis) ? new Text() : colvis;
}
public EdgeKeyBuilder setColvis(Text colvis) {
this.colvis = new Text(colvis);
return this;
}
public EdgeKeyBuilder setColvis(ColumnVisibility colvis) {
this.colvis = new Text(colvis.getExpression());
return this;
}
public long getTimestamp() {
return timestamp;
}
public EdgeKeyBuilder setTimestamp(long timestamp) {
this.timestamp = timestamp;
return this;
}
public boolean isDeleted() {
return deleted;
}
public EdgeKeyBuilder setDeleted(boolean deleted) {
this.deleted = deleted;
return this;
}
@Override
public int hashCode() {
final int prime = 223;
int result = 1;
result = prime * result + ((colvis == null) ? 0 : colvis.hashCode());
result = prime * result + (deleted ? 1231 : 1237);
result = prime * result + ((format == null) ? 0 : format.hashCode());
result = prime * result + ((type == null) ? 0 : type.hashCode());
result = prime * result + ((sinkData == null) ? 0 : sinkData.hashCode());
result = prime * result + ((sinkRelationship == null) ? 0 : sinkRelationship.hashCode());
result = prime * result + ((sinkAttribute1 == null) ? 0 : sinkAttribute1.hashCode());
result = prime * result + ((sourceData == null) ? 0 : sourceData.hashCode());
result = prime * result + ((sourceRelationship == null) ? 0 : sourceRelationship.hashCode());
result = prime * result + ((sourceAttribute1 == null) ? 0 : sourceAttribute1.hashCode());
result = prime * result + ((statsType == null) ? 0 : statsType.hashCode());
result = prime * result + (int) (timestamp ^ (timestamp >>> 32));
result = prime * result + ((attribute3 == null) ? 0 : attribute3.hashCode());
result = prime * result + ((attribute2 == null) ? 0 : attribute2.hashCode());
result = prime * result + ((yyyymmdd == null) ? 0 : yyyymmdd.hashCode());
result = prime * result + ((dateType == null) ? 0 : dateType.hashCode());
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
EdgeKeyBuilder other = (EdgeKeyBuilder) obj;
if (colvis == null) {
if (other.colvis != null)
return false;
} else if (!colvis.equals(other.colvis))
return false;
if (deleted != other.deleted)
return false;
if (format != other.format)
return false;
if (type == null) {
if (other.type != null)
return false;
} else if (!type.equals(other.type))
return false;
if (sinkData == null) {
if (other.sinkData != null)
return false;
} else if (!sinkData.equals(other.sinkData))
return false;
if (sinkRelationship == null) {
if (other.sinkRelationship != null)
return false;
} else if (!sinkRelationship.equals(other.sinkRelationship))
return false;
if (sinkAttribute1 == null) {
if (other.sinkAttribute1 != null)
return false;
} else if (!sinkAttribute1.equals(other.sinkAttribute1))
return false;
if (sourceData == null) {
if (other.sourceData != null)
return false;
} else if (!sourceData.equals(other.sourceData))
return false;
if (sourceRelationship == null) {
if (other.sourceRelationship != null)
return false;
} else if (!sourceRelationship.equals(other.sourceRelationship))
return false;
if (sourceAttribute1 == null) {
if (other.sourceAttribute1 != null)
return false;
} else if (!sourceAttribute1.equals(other.sourceAttribute1))
return false;
if (statsType != other.statsType)
return false;
if (timestamp != other.timestamp)
return false;
if (attribute2 == null) {
if (other.attribute2 != null)
return false;
} else if (!attribute2.equals(other.attribute2))
return false;
if (attribute3 == null) {
if (other.attribute3 != null)
return false;
} else if (!attribute3.equals(other.attribute3))
return false;
if (yyyymmdd == null) {
if (other.yyyymmdd != null)
return false;
} else if (!yyyymmdd.equals(other.yyyymmdd))
return false;
if (dateType == null) {
if (other.dateType != null) {
return false;
}
} else if (!dateType.equals(other.dateType)) {
return false;
}
return true;
}
} // End of EdgeKeyBuilder
public static EdgeKeyBuilder newBuilder() {
return new EdgeKeyBuilder();
}
public static EdgeKeyBuilder newBuilder(EdgeKey edgeKey) {
return new EdgeKeyBuilder(edgeKey);
}
public static EdgeKeyBuilder newBuilder(EdgeKey.EDGE_FORMAT format) {
EdgeKeyBuilder builder = EdgeKey.newBuilder();
return builder.setFormat(format);
}
// Generate a new key with the source and sink stuff swapped
public static EdgeKey swapSourceSink(EdgeKey swap) {
EdgeKeyBuilder builder = newBuilder(swap);
builder.setSourceData(swap.getSinkData());
builder.setSinkData(swap.getSourceData());
builder.setSourceRelationship(swap.getSinkRelationship());
builder.setSinkRelationship(swap.getSourceRelationship());
builder.setSourceAttribute1(swap.getSinkAttribute1());
builder.setSinkAttribute1(swap.getSourceAttribute1());
return builder.build();
}
private static final String EDGE_METADATA_COLUMN = "edge";
private static final String STATS_COLF = "STATS";
static final byte[] STATS_BYTES = STATS_COLF.getBytes();
private static final int SOURCE_INDEX = 0;
private static final int SINK_INDEX = 1;
public static final char COL_SEPARATOR = '/';
public static final String COL_SEPARATOR_STR = String.valueOf(COL_SEPARATOR);
public static final byte COL_SEPARATOR_BYTE = '/';
public static final char COL_SUB_SEPARATOR = '-';
public enum STATS_TYPE {
DURATION, ACTIVITY, LINKS;
public static STATS_TYPE getStatsType(String statsLabel) {
if (DURATION.name().equals(statsLabel)) {
return DURATION;
} else if (ACTIVITY.name().equals(statsLabel)) {
return ACTIVITY;
} else if (LINKS.name().equals(statsLabel)) {
return LINKS;
} else {
throw new EnumConstantNotPresentException(STATS_TYPE.class, statsLabel);
}
}
public static int getMaxLength() {
return 8;
}
}
/**
* Note that keys within DataWave's edge model typically have a date value encoded within them. As such, this enum allows that date value to be further
* qualified, wrt the source "event" from which the edge key was derived.
*
* <p>
* Thus, the {@link DATE_TYPE#abbreviation} field here is written into the edge key as a qualifier for the date value to enable greater flexibility in
* date-filtering logic, should clients need it.
*
* <p>
* An "EVENT*" type here denotes that the date within the edge key originated from the raw source event's date field, ie, from
* datawave.ingest.data.RawRecordContainer.getDate, at ingest time. In the DW data model, this date represents the date portion of the Accumulo row id for
* the source record within DataWave's shard table.
*
* <p>
* Likewise, an "ACTIVITY*" type is simply a general term denoting that date in the edge key is associated with some other date value within the raw record,
* not the event date (or 'shard date').
*/
public enum DATE_TYPE {
ACTIVITY_ONLY("C"), EVENT_ONLY("A"), ACTIVITY_AND_EVENT("B"), OLD_EVENT("");
String abbreviation;
DATE_TYPE(String character) {
this.abbreviation = character;
}
public static DATE_TYPE parse(String dateType) {
for (DATE_TYPE dType : DATE_TYPE.values()) {
if (dType.abbreviation.equals(dateType)) {
return dType;
}
}
return null; // default is unidirectional
}
@Override
public String toString() {
return abbreviation;
}
}
public enum EDGE_FORMAT {
STANDARD(2), STATS(1), UNKNOWN(0);
private final int splitLength;
private EDGE_FORMAT(int splitLength) {
this.splitLength = splitLength;
}
public static EDGE_FORMAT getFormatFromRow(int splitLength) {
if (splitLength == STANDARD.splitLength) {
return STANDARD;
} else if (splitLength == STATS.splitLength) {
return STATS;
} else {
return UNKNOWN;
}
}
}
/*
* for determining what version of edge is being parsed as the table evolves need to be careful that we don't make it so we can't tell the difference
* between an older edge and a newer edge
*
* also, it keeps the index numbering nightmare in a single place.
*
* indices assume colf and colq are in a single list.
*/
public enum EDGE_VERSION {
/* 0 1 2 3 4 5 6 7 */
STATS_BASE(5, 1, 2, 3, 4, 5, -1, -1, -1, EDGE_FORMAT.STATS, false, false), /* STATS / STATTYPE / TYPE / RELATIONSHIP / CATEGORY : YYYYMMDD */
STATS_ATTRIBUTE2(6, 1, 2, 3, 4, 6, 5, -1, -1, EDGE_FORMAT.STATS, true, false), /*
* STATS / STATTYPE / TYPE / RELATIONSHIP / CATEGORY / ATTRIBUTE2 :
* YYYYMMDD
*/
BASE(3, 1, 0, 1, 2, 3, -1, -1, -1, EDGE_FORMAT.STANDARD, false, false), /* TYPE / RELATIONSHIP / CATEGORY : YYYYMMDD */
BASE_ATTRIBUTE2(4, 1, 0, 1, 2, 4, 3, -1, -1, EDGE_FORMAT.STANDARD, true, false), /* TYPE / RELATIONSHIP / CATEGORY / ATTRIBUTE2 : YYYYMMDD */
STATS_PROTOBUF(4, 4, 2, 3, 5, 4, 6, 7, -1, EDGE_FORMAT.STATS, true, true), /*
* STATS / STATTYPE / TYPE / RELATIONSHIP : YYYYMMDD / CATEGORY / ATTRIBUTE2
* / ATTRIBUTE3
*/
DATE_STATS_PROTOBUF(4, 5, 2, 3, 5, 4, 6, 7, 8, EDGE_FORMAT.STATS, true, true), /*
* STATS / STATTYPE / TYPE / RELATIONSHIP : YYYYMMDD / CATEGORY /
* ATTRIBUTE2 / ATTRIBUTE3 / DATETYPE
*/
PROTOBUF(2, 4, 0, 1, 3, 2, 4, 5, -1, EDGE_FORMAT.STANDARD, true, true), /* TYPE / RELATIONSHIP : YYYYMMDD / CATEGORY / ATTRIBUTE2 / ATTRIBUTE3 */
DATE_PROTOBUF(2, 5, 0, 1, 3, 2, 4, 5, 6, EDGE_FORMAT.STANDARD, true, true), /*
* TYPE / RELATIONSHIP : YYYYMMDD / CATEGORY / ATTRIBUTE2 / ATTRIBUTE3
* /DATETYPE
*/
UNKNOWN(-1, -1, -1, -1, -1, -1, -1, -1, -1, EDGE_FORMAT.STANDARD, false, false);
private final int ncf;
private final int ncq;
private final int iType;
private final int iRelationship;
private final int iCategory;
private final int iYYYYMMDD;
private final int iAttribute2;
private final int iAttribute3;
private final int iDateType;
private final EDGE_FORMAT format;
private final boolean hasAttribute2;
private final boolean hasAttribute3;
private final int iStatsType = 1;
private EDGE_VERSION(int ncf, int ncq, int iType, int iRelationship, int iCategory, int iYYYYMMDD, int iAttribute2, int iAttribute3, int iDateType,
EDGE_FORMAT format, boolean hasAttribute2, boolean hasAttribute3) {
this.ncf = ncf;
this.ncq = ncq;
this.iType = iType;
this.iRelationship = iRelationship;
this.iCategory = iCategory;
this.iYYYYMMDD = iYYYYMMDD;
this.iAttribute2 = iAttribute2;
this.iAttribute3 = iAttribute3;
this.iDateType = iDateType;
this.format = format;
this.hasAttribute2 = hasAttribute2;
this.hasAttribute3 = hasAttribute3;
}
public int getNumColfPieces() {
return ncf;
}
public int getNumColqPieces() {
return ncq;
}
public int getTotalColPieces() {
return ncf + ncq;
}
public int getTypeIndex() {
return iType;
}
public int getStatsTypeIndex() {
return iStatsType;
}
public int getRelationshipIndex() {
return iRelationship;
}
public int getCategoryIndex() {
return iCategory;
}
public int getYMDIndex() {
return iYYYYMMDD;
}
public int getAttribute2Index() {
return iAttribute2;
}
public int getAttribute3Index() {
return iAttribute3;
}
public int getDateTypeIndex() {
return iDateType;
}
public EDGE_FORMAT getFormat() {
return format;
}
public boolean hasAttribute2() {
return hasAttribute2;
}
public boolean hasAttribute3() {
return hasAttribute3;
}
public STATS_TYPE getStatsType(List<String> pieces) {
return STATS_TYPE.getStatsType(pieces.get(iStatsType));
}
public static EDGE_VERSION getEdgeVersion(List<String> pieces) {
int nPieces = pieces.size();
if (nPieces < 4) {
return UNKNOWN;
}
if (pieces.get(0).equals(STATS_COLF)) {
if (nPieces == 6) {
return STATS_BASE;
} else if (nPieces == 7) {
return STATS_ATTRIBUTE2;
} else if (nPieces == 8) {
return STATS_PROTOBUF;
} else if (nPieces == 9) {
return DATE_STATS_PROTOBUF;
}
} else {
if (nPieces == 4) {
return BASE;
} else if (nPieces == 5) {
return BASE_ATTRIBUTE2;
} else if (nPieces == 6) {
return PROTOBUF;
} else if (nPieces == 7) {
return DATE_PROTOBUF;
}
}
return UNKNOWN;
}
}
/**
* An abstraction of the column family and qualifier pieces and parts.
*/
public static class EdgeColumnParts extends AbstractList<String> implements List<String> {
private String[] parts = new String[9];
private int pLen = 0;
/**
* This constructor is preferred because it allows the client to reuse Text objects to avoid constructing and destructing Text objects
*
* @param colFam
* @param colQual
*/
public EdgeColumnParts(Text colFam, Text colQual) {
getParts(colFam);
getParts(colQual);
}
/**
* This constructor should be avoided because it creates two new Text objects each time it is called
*
* @param key
*/
public EdgeColumnParts(Key key) {
getParts(key.getColumnFamilyData());
getParts(key.getColumnQualifierData());
}
private void getParts(Text text) {
getParts(text.getBytes(), text.getLength());
}
/**
* @param bytes
* byte array holding the parts of the edge key
* @param bLen
* number of bytes to use (important: the byte array may be reused so its length may not be correct)
*/
private void getParts(byte[] bytes, int bLen) {
try {
int start = 0;
for (int i = 0; i < bLen; i++) {
if (pLen >= parts.length) {
throw new RuntimeException("Exceeded number of possible number of parts (" + parts.length + ")." + " bytes as String: "
+ new Text(bytes) + " parts: " + Arrays.toString(parts));
}
if (bytes[i] == COL_SEPARATOR_BYTE) {
parts[pLen++] = Text.decode(bytes, start, i - start);
start = i + 1;
}
}
parts[pLen++] = Text.decode(bytes, start, bLen - start);
} catch (CharacterCodingException e) {
throw new RuntimeException("Edge key column encoding exception", e);
}
}
private void getParts(ByteSequence byteSeq) {
byte[] bytes = byteSeq.getBackingArray();
getParts(bytes, byteSeq.length());
}
@Override
public int size() {
return pLen;
}
@Override
public String get(int pos) {
return parts[pos];
}
}
/**
* Parses edge table keys into the various fields encoded within the key. The source and sink are unescaped, ready to send to external clients.
*
* @param key
* a key from the Datawave edge table
* @return an immutable EdgeKey object
*/
public static EdgeKey decode(Key key) {
return decode(key, EdgeKey.newBuilder().unescape());
}
/**
* Decode the key leaving the row portion native to the accumulo key, leaving the source and sink as they are in the accumulo row. Iterators which create
* new keys without the builders, can safely decode with this method and reseek to the same position, without knowledge of the underlying escaping encodings
*
* @param key
* a key from the Datawave edge table
* @return an immutable EdgeKey object
*/
public static EdgeKey decodeForInternal(Key key) {
EdgeKeyBuilder builder = EdgeKey.newBuilder();
builder.unescape = false;
builder.escape = false;
return decode(key, builder);
}
protected static EdgeKey decode(Key key, EdgeKeyBuilder builder) {
EdgeKeyDecoder edgeKeyDecoder = new EdgeKeyDecoder(); // to maintain method's static modifier
return edgeKeyDecoder.decode(key, builder);
}
private Key encode(EDGE_VERSION version) {
Text rowid = null;
Text colf = null;
Text colq = null;
Text colvis = null;
boolean deleted = false;
long timestamp = Long.MAX_VALUE;
List<String> parts = new ArrayList<>(version.getTotalColPieces());
for (int ii = 0; ii < version.getTotalColPieces(); ii++) {
parts.add("null");
}
// row id
StringBuilder rowsb = new StringBuilder();
if (this.format == EDGE_FORMAT.STATS) {
rowsb.append(this.getSourceData());
parts.set(0, STATS_COLF);
parts.set(version.getStatsTypeIndex(), this.getStatsType().name());
} else if (this.format == EDGE_FORMAT.STANDARD) {
rowsb.append(this.getSourceData()).append("\0").append(this.getSinkData());
} else {
throw new IllegalStateException("Invalid Edge Type encountered: " + this.format);
}
rowid = new Text(rowsb.toString());
// populate the parts array according to the version
parts.set(version.getTypeIndex(), this.getType());
parts.set(version.getRelationshipIndex(), this.getRelationship());
parts.set(version.getCategoryIndex(), this.getAttribute1());
parts.set(version.getYMDIndex(), this.getYyyymmdd());
if (version.hasAttribute2()) {
parts.set(version.getAttribute2Index(), this.getAttribute2());
}
;
if (version.hasAttribute3()) {
parts.set(version.getAttribute3Index(), this.getAttribute3());
}
;
if (version.getDateTypeIndex() >= 0) {
parts.set(version.getDateTypeIndex(), this.getDateType().toString());
}
// serialize the colf
StringBuilder colsb = new StringBuilder(parts.get(0));
for (int ii = 1; ii < version.getNumColfPieces(); ii++) {
colsb.append(COL_SEPARATOR).append(parts.get(ii));
}
colf = new Text(colsb.toString());
// serialize the colq
colsb = new StringBuilder(parts.get(version.getNumColfPieces()));
for (int ii = version.getNumColfPieces() + 1; ii < version.getTotalColPieces(); ii++) {
colsb.append(COL_SEPARATOR).append(parts.get(ii));
}
colq = new Text(colsb.toString());
// colvis
colvis = new Text(this.getColvis());
// timestamp
timestamp = this.getTimestamp();
// deleted
deleted = this.isDeleted();
Key key = new Key(rowid, colf, colq, colvis, timestamp);
key.setDeleted(deleted);
return key;
}
/**
* Creates an edge table key from the various EdgeKey fields.
*
* @return a key for the Datawave edge table
*/
public Key encode() {
if (this.getDateType() == DATE_TYPE.OLD_EVENT) {
return encodeLegacyProtobufKey();
} else {
if (this.getFormat() == EDGE_FORMAT.STATS) {
return encode(EDGE_VERSION.DATE_STATS_PROTOBUF);
} else if (this.getFormat() == EDGE_FORMAT.STANDARD) {
return encode(EDGE_VERSION.DATE_PROTOBUF);
} else {
// EDGE_FORMAT.UNKNOWN
throw new IllegalStateException("Can't encode unknown edge key format." + this);
}
}
}
public Key encodeLegacyProtobufKey() {
if (this.getFormat() == EDGE_FORMAT.STATS) {
return encode(EDGE_VERSION.STATS_PROTOBUF);
} else if (this.getFormat() == EDGE_FORMAT.STANDARD) {
return encode(EDGE_VERSION.PROTOBUF);
} else {
// EDGE_FORMAT.UNKNOWN
throw new IllegalStateException("Can't encode unknown edge key format." + this);
}
}
public Key encodeLegacyAttribute2Key() {
if (this.getFormat() == EDGE_FORMAT.STATS) {
return encode(EDGE_VERSION.STATS_ATTRIBUTE2);
} else if (this.getFormat() == EDGE_FORMAT.STANDARD) {
return encode(EDGE_VERSION.BASE_ATTRIBUTE2);
} else {
// EDGE_FORMAT.UNKNOWN
throw new IllegalStateException("Can't encode unknown edge key format." + this);
}
}
public Key encodeLegacyKey() {
if (this.getFormat() == EDGE_FORMAT.STATS) {
return encode(EDGE_VERSION.STATS_BASE);
} else if (this.getFormat() == EDGE_FORMAT.STANDARD) {
return encode(EDGE_VERSION.BASE);
} else {
// EDGE_FORMAT.UNKNOWN
throw new IllegalStateException("Can't encode unknown edge key format." + this);
}
}
/**
* Creates the metadata table key entry for this EdgeKey
*
* @return a key object for use in the Datawave Metadata table
*/
public Key getMetadataKey() {
Text row = new Text(this.getType() + COL_SEPARATOR + this.getRelationship());
Text colf = new Text(EDGE_METADATA_COLUMN);
Text colq = new Text(this.getAttribute1());
return new Key(row, colf, colq, new Text(""), this.getTimestamp());
}
/**
* Creates the metadata table key entry for a given edge table Key
*
* @return a key object for use in the Datawave Metadata table
*/
public static Key getMetadataKey(Key key) {
EdgeKey eKey = EdgeKey.decode(key);
return eKey.getMetadataKey();
}
// Getter nightmare below here
public EDGE_FORMAT getFormat() {
return format;
}
public STATS_TYPE getStatsType() {
return statsType;
}
public String getSourceData() {
return sourceData;
}
public String getSinkData() {
return sinkData;
}
public String getType() {
return type;
}
public String getRelationship() {
// return a relationship string based on the edge type.
if (this.getFormat() == EDGE_FORMAT.STANDARD) {
return getSourceRelationship() + COL_SUB_SEPARATOR + getSinkRelationship();
}
return getSourceRelationship();
}
public String getSourceRelationship() {
return sourceRelationship;
}
public String getSinkRelationship() {
return sinkRelationship;
}
public String getAttribute1() {
// return a relationship string based on the edge type.
if ((this.getFormat() == EDGE_FORMAT.STANDARD) || ((getFormat() == EDGE_FORMAT.STATS) && this.getStatsType() == STATS_TYPE.LINKS)) {
return getSourceAttribute1() + COL_SUB_SEPARATOR + getSinkAttribute1();
}
return getSourceAttribute1();
}
public String getSourceAttribute1() {
return sourceAttribute1;
}
public String getSinkAttribute1() {
return sinkAttribute1;
}
public boolean hasAttribute2() {
return StringUtils.isNotBlank(attribute2);
}
public String getAttribute2() {
return attribute2;
}
public boolean hasAttribute3() {
return StringUtils.isNotBlank(attribute3);
}
public String getAttribute3() {
return attribute3;
}
public String getYyyymmdd() {
return yyyymmdd;
}
public DATE_TYPE getDateType() {
return dateType;
}
public Text getColvis() {
return colvis == null ? new Text(colvis) : colvis;
}
public long getTimestamp() {
return timestamp;
}
public boolean isDeleted() {
return deleted;
}
public boolean isStatsKey() {
return (this.getFormat() == EDGE_FORMAT.STATS);
}
@Override
public int hashCode() {
final int prime = 223;
int result = 1;
result = prime * result + ((colvis == null) ? 0 : colvis.hashCode());
result = prime * result + (deleted ? 1231 : 1237);
result = prime * result + ((format == null) ? 0 : format.hashCode());
result = prime * result + ((type == null) ? 0 : type.hashCode());
result = prime * result + ((sinkData == null) ? 0 : sinkData.hashCode());
result = prime * result + ((sinkRelationship == null) ? 0 : sinkRelationship.hashCode());
result = prime * result + ((sinkAttribute1 == null) ? 0 : sinkAttribute1.hashCode());
result = prime * result + ((sourceData == null) ? 0 : sourceData.hashCode());
result = prime * result + ((sourceRelationship == null) ? 0 : sourceRelationship.hashCode());
result = prime * result + ((sourceAttribute1 == null) ? 0 : sourceAttribute1.hashCode());
result = prime * result + ((statsType == null) ? 0 : statsType.hashCode());
result = prime * result + (int) (timestamp ^ (timestamp >>> 32));
result = prime * result + ((attribute3 == null) ? 0 : attribute3.hashCode());
result = prime * result + ((attribute2 == null) ? 0 : attribute2.hashCode());
result = prime * result + ((yyyymmdd == null) ? 0 : yyyymmdd.hashCode());
result = prime * result + ((dateType == null) ? 0 : dateType.hashCode());
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
EdgeKey other = (EdgeKey) obj;
if (colvis == null) {
if (other.colvis != null)
return false;
} else if (!colvis.equals(other.colvis))
return false;
if (deleted != other.deleted)
return false;
if (format != other.format)
return false;
if (type == null) {
if (other.type != null)
return false;
} else if (!type.equals(other.type))
return false;
if (sinkData == null) {
if (other.sinkData != null)
return false;
} else if (!sinkData.equals(other.sinkData))
return false;
if (sinkRelationship == null) {
if (other.sinkRelationship != null)
return false;
} else if (!sinkRelationship.equals(other.sinkRelationship))
return false;
if (sinkAttribute1 == null) {
if (other.sinkAttribute1 != null)
return false;
} else if (!sinkAttribute1.equals(other.sinkAttribute1))
return false;
if (sourceData == null) {
if (other.sourceData != null)
return false;
} else if (!sourceData.equals(other.sourceData))
return false;
if (sourceRelationship == null) {
if (other.sourceRelationship != null)
return false;
} else if (!sourceRelationship.equals(other.sourceRelationship))
return false;
if (sourceAttribute1 == null) {
if (other.sourceAttribute1 != null)
return false;
} else if (!sourceAttribute1.equals(other.sourceAttribute1))
return false;
if (statsType != other.statsType)
return false;
if (timestamp != other.timestamp)
return false;
if (attribute2 == null) {
if (other.attribute2 != null)
return false;
} else if (!attribute2.equals(other.attribute2))
return false;
if (attribute3 == null) {
if (other.attribute3 != null)
return false;
} else if (!attribute3.equals(other.attribute3))
return false;
if (yyyymmdd == null) {
if (other.yyyymmdd != null)
return false;
} else if (!yyyymmdd.equals(other.yyyymmdd))
return false;
if (dateType == null) {
if (other.dateType != null) {
return false;
}
} else if (!dateType.equals(other.dateType)) {
return false;
}
return true;
}
/**
* Determine if an edge is based on event date.
*
* @param k
* edge key
* @return True if this is edge date (YYYYMMDD) is based on event date.
* @note An edge can be both an event and and activity edge. Hence, do not test for event edge by doing !isActivityEdge(k).
*/
/**
* Determine as fast as possible the date type of an edge key without having to decode into and EdgeKey
*
* @param key
* @return the date type of this accumulo edge key
*/
public static DATE_TYPE getDateType(Key key) {
EdgeColumnParts parts = new EdgeColumnParts(key);
EDGE_VERSION version = EDGE_VERSION.getEdgeVersion(parts);
if (version == EDGE_VERSION.DATE_STATS_PROTOBUF) {
return DATE_TYPE.parse(parts.get(8));
} else if (version == EDGE_VERSION.DATE_PROTOBUF) {
return DATE_TYPE.parse(parts.get(6));
} else {
return DATE_TYPE.OLD_EVENT;
}
}
}
| |
/*
* Copyright 2014 WSO2, Inc. (http://wso2.com)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.wso2.developerstudio.datamapper.diagram.custom.configuration.operators;
import org.eclipse.emf.common.util.EList;
import org.eclipse.emf.ecore.EObject;
import org.wso2.developerstudio.datamapper.Element;
import org.wso2.developerstudio.datamapper.Operator;
import org.wso2.developerstudio.datamapper.OperatorLeftConnector;
import org.wso2.developerstudio.datamapper.OperatorRightConnector;
import org.wso2.developerstudio.datamapper.SchemaDataType;
import org.wso2.developerstudio.datamapper.TreeNode;
import org.wso2.developerstudio.datamapper.diagram.custom.configuration.function.AssignmentStatement;
public class OneToOneTransformer implements OperatorsTransformer {
private static final String INDEX = "[i]";
@Override
public AssignmentStatement transform(Operator operator) {
return null;
}
@Override
public TreeNode getOutputElementParent(Operator operator) {
EObject outputObject = getOutputObject(operator);
EObject inputObject = getInputObject(operator);
Element outputElement;
TreeNode outputParent;
Element inputElement;
if (inputObject instanceof Element && outputObject instanceof Element) {
outputElement = (Element) outputObject;
outputParent = outputElement.getFieldParent();
inputElement = (Element) inputObject;
outputParent = getOutputArrayParent(inputElement, outputParent);
return outputParent;
}
else if (inputObject instanceof Element && outputObject instanceof OperatorLeftConnector) {
Operator nextOperator = (Operator) getOperator(outputObject);
inputElement = (Element) inputObject;
OperatorsTransformer transformer = DataMapperTransformerRegistry.getInstance().getTransformer(nextOperator);
outputParent = transformer.getOutputElementParent(nextOperator);
outputParent = getOutputArrayParent(inputElement, outputParent);
return outputParent;
} else if (inputObject instanceof OperatorRightConnector && outputObject instanceof OperatorLeftConnector) {
Operator nextOperator = (Operator) getOperator(outputObject);
OperatorsTransformer transformer = DataMapperTransformerRegistry.getInstance().getTransformer(nextOperator);
outputParent = transformer.getOutputElementParent(nextOperator);
return outputParent;
}
else if (inputObject instanceof OperatorRightConnector && outputObject instanceof Element) {
outputElement = (Element) outputObject;
outputParent = outputElement.getFieldParent();
return outputParent;
}
return null;
}
/**
* when input is an element of array treenode, the outputparent treenode
* also need to be an array
*
* @param inputElement
* input schema element
* @param outputParent
* @return
*/
private TreeNode getOutputArrayParent(Element inputElement, TreeNode outputParent) {
if (inputElement.getFieldParent().getSchemaDataType().equals(SchemaDataType.ARRAY) && !(outputParent.getSchemaDataType().equals(SchemaDataType.ARRAY))) {
while (outputParent.getFieldParent() != null && !(outputParent.getSchemaDataType().equals(SchemaDataType.ARRAY))) {
outputParent = outputParent.getFieldParent();
}
}
return outputParent;
}
@Override
public TreeNode getInputElementParent(Operator operator) {
return getInputElement(operator).getFieldParent();
}
/**
* traverse up from a given treeNode to given root treenode and build path
*
* @param tree
* given treenode
* @param parent
* given root treenode
* @return path to root treeNode from given treeNode
*/
protected String getTreeHierarchy(TreeNode tree, TreeNode parent) {
StringBuilder hierarchy = new StringBuilder();
while (!(tree.equals(parent))) {
hierarchy.insert(0, tree.getName());
hierarchy.insert(0, ".");
tree = tree.getFieldParent();
}
if (tree.getSchemaDataType().equals(SchemaDataType.ARRAY)) {
hierarchy.insert(0, (tree.getName() + getIndex()));
} else {
hierarchy.insert(0, tree.getName());
}
return hierarchy.toString();
}
/**
* retrieve mapped element from output schema
*
* @param operator
* this operator
* @return mapped element from output schema
*/
protected Element getOutputElement(Operator operator) {
return operator.getBasicContainer().getRightContainer().getRightConnectors().get(0).getOutNode().getOutgoingLink().get(0).getInNode().getElementParent();
}
/**
* retrieve mapped element from input schema
*
* @param operator
* this operator
* @return mapped element from input schema
*/
private Element getInputElement(Operator operator) {
return operator.getBasicContainer().getLeftContainer().getLeftConnectors().get(0).getInNode().getIncomingLink().get(0).getOutNode().getElementParent();
}
/**
* retrieve mapped object to input connector of operator
*
* @param operator
* this operator
* @return mapped object to input connector of operator
*/
protected EObject getInputObject(Operator operator) {
EList<OperatorLeftConnector> leftConnectors = operator.getBasicContainer().getLeftContainer().getLeftConnectors();
EObject inputObject = leftConnectors.get(0).getInNode().getIncomingLink().get(0).getOutNode().eContainer();
return inputObject;
}
/**
* retrieve mapped object to output connector of operator
*
* @param operator
* this operator
* @return mapped object to output connector of operator
*/
public EObject getOutputObject(Operator operator) {
EList<OperatorRightConnector> rightConnectors = operator.getBasicContainer().getRightContainer().getRightConnectors();
return rightConnectors.get(0).getOutNode().getOutgoingLink().get(0).getInNode().eContainer();
}
public static String getIndex() {
return INDEX;
}
/**
* in three econtainers of a eobject would be an operator. it would be easy
* to declare it a class
*
* @param connector
* in or put node eobject
* @return operator which connected to in/out node
*/
public Operator getOperator(EObject connector) {
return (Operator) connector.eContainer().eContainer().eContainer();
}
@Override
public String trasnform(String statement, Operator operator, Operator nextOperator) {
// TODO Auto-generated method stub
return null;
}
}
| |
package it.finsiel.siged.mvc.presentation.action.protocollo;
import it.finsiel.siged.constant.Constants;
import it.finsiel.siged.exception.DataException;
import it.finsiel.siged.model.organizzazione.Utente;
import it.finsiel.siged.model.protocollo.ProtocolloIngresso;
import it.finsiel.siged.mvc.business.DocumentoDelegate;
import it.finsiel.siged.mvc.business.ProtocolloDelegate;
import it.finsiel.siged.mvc.presentation.actionform.protocollo.ScaricoForm;
import it.finsiel.siged.mvc.presentation.helper.ReportProtocolloView;
import it.finsiel.siged.mvc.vo.protocollo.AssegnatarioVO;
import it.finsiel.siged.mvc.vo.protocollo.DocumentoVO;
import it.finsiel.siged.mvc.vo.protocollo.ProtocolloVO;
import it.finsiel.siged.util.FileUtil;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.text.SimpleDateFormat;
import java.util.Collection;
import java.util.Date;
import java.util.Iterator;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import javax.servlet.http.HttpSession;
import org.apache.log4j.Logger;
import org.apache.struts.Globals;
import org.apache.struts.action.Action;
import org.apache.struts.action.ActionForm;
import org.apache.struts.action.ActionForward;
import org.apache.struts.action.ActionMapping;
import org.apache.struts.action.ActionMessage;
import org.apache.struts.action.ActionMessages;
import org.apache.struts.util.MessageResources;
public class ScaricoAction extends Action {
static Logger logger = Logger.getLogger(ScaricoAction.class.getName());
public final static String PROTOCOLLI_ASSEGNATI = "A";
public final static String PROTOCOLLI_AGLI_ATTI = "A";
public final static String PROTOCOLLI_IN_LAVORAZIONE = "N";
public final static String PROTOCOLLI_IN_RISPOSTA = "R";
public ActionForward execute(ActionMapping mapping, ActionForm form,
HttpServletRequest request, HttpServletResponse response)
throws Exception {
ActionMessages errors = new ActionMessages();
HttpSession session = request.getSession(true);
ProtocolloDelegate delegate = ProtocolloDelegate.getInstance();
ScaricoForm scarico = (ScaricoForm) form;
Date dataProtocolloDa = null;
Date dataProtocolloA = null;
int numeroProtocolloDa = 0;
int numeroProtocolloA = 0;
int annoProtocolloDa = 0;
int annoProtocolloA = 0;
SimpleDateFormat df = new SimpleDateFormat("dd/MM/yyyy");
// String tipoUtenteUfficio = request.getParameter("tipoUtenteUfficio");
String tipoUtenteUfficio = "T"; // tipo utente
if (request.getParameter("dataRegistrazioneDa") != null
&& !"".equals(request.getParameter("dataRegistrazioneDa"))) {
dataProtocolloDa = df.parse(request
.getParameter("dataRegistrazioneDa"));
}
if (request.getParameter("dataRegistrazioneA") != null
&& !"".equals(request.getParameter("dataRegistrazioneA"))) {
dataProtocolloA = df.parse(request
.getParameter("dataRegistrazioneA"));
}
if (request.getParameter("numeroProtocolloDa") != null
&& !"".equals(request.getParameter("numeroProtocolloDa"))) {
numeroProtocolloDa = Integer.parseInt(request
.getParameter("numeroProtocolloDa"));
}
if (request.getParameter("numeroProtocolloA") != null
&& !"".equals(request.getParameter("numeroProtocolloA"))) {
numeroProtocolloA = Integer.parseInt(request
.getParameter("numeroProtocolloA"));
}
if (request.getParameter("annoProtocolloDa") != null
&& !"".equals(request.getParameter("annoProtocolloDa"))) {
annoProtocolloDa = Integer.parseInt(request
.getParameter("annoProtocolloDa"));
}
if (request.getParameter("annoProtocolloA") != null
&& !"".equals(request.getParameter("annoProtocolloA"))) {
annoProtocolloA = Integer.parseInt(request
.getParameter("annoProtocolloA"));
}
if (form == null) {
logger.info(" Creating new ScaricoAction");
form = new ScaricoForm();
session.setAttribute(mapping.getAttribute(), form);
}
Utente utente = (Utente) session.getAttribute(Constants.UTENTE_KEY);
if (scarico.getBtnAtti() != null) {
scarico.setBtnAtti(null);
String protocolloSelezionato = request
.getParameter("protocolloScarico");
if (protocolloSelezionato != null) {
ProtocolloVO protocolloVO = delegate.getProtocolloById(Integer
.parseInt(protocolloSelezionato));
delegate.updateScarico(protocolloVO, PROTOCOLLI_AGLI_ATTI,
utente);
scarico.setProtocolloScarico(null);
scarico.removeProtocolliScarico();
scarico.setMsgAssegnatarioCompetente(null);
scarico.setProtocolliScarico(delegate.getProtocolliAssegnati(
utente, annoProtocolloDa, annoProtocolloA,
numeroProtocolloDa, numeroProtocolloA,
dataProtocolloDa, dataProtocolloA,
PROTOCOLLI_ASSEGNATI, scarico.getStatoProtocollo(),
tipoUtenteUfficio));
errors.add("operazione_ok", new ActionMessage("operazione_ok"));
}
} else if (scarico.getBtnLavorazione() != null) {
scarico.setBtnLavorazione(null);
String protocolloSelezionato = request
.getParameter("protocolloScarico");
if (protocolloSelezionato != null) {
ProtocolloVO protocolloVO = delegate.getProtocolloById(Integer
.parseInt(protocolloSelezionato));
delegate.updateScarico(protocolloVO, PROTOCOLLI_IN_LAVORAZIONE,
utente);
scarico.setProtocolloScarico(null);
scarico.removeProtocolliScarico();
scarico.setMsgAssegnatarioCompetente(null);
scarico.setProtocolliScarico(delegate.getProtocolliAssegnati(
utente, annoProtocolloDa, annoProtocolloA,
numeroProtocolloDa, numeroProtocolloA,
dataProtocolloDa, dataProtocolloA,
PROTOCOLLI_ASSEGNATI, scarico.getStatoProtocollo(),
tipoUtenteUfficio));
errors.add("operazione_ok", new ActionMessage("operazione_ok"));
}
} else if (scarico.getBtnRisposta() != null) {
scarico.setBtnRisposta(null);
if (scarico.getProtocolloScarico() != null) {
// operazione avvenuta con successo, ripulisco le variabili
// seleziono l'id del protocollo in ingresso
Integer protocolloId = new Integer(scarico
.getProtocolloScarico());
if (protocolloId != null) {
request.setAttribute("risposta", Boolean.TRUE);
request.setAttribute("protocolloId", protocolloId);
saveToken(request);
return (mapping.findForward("creaProtocolloRisposta"));
}
}
} else if (scarico.getBtnRifiuta() != null) {
scarico.setBtnRifiuta(null);
if (scarico.getProtocolloScarico() != null) {
Integer protocolloId = new Integer(scarico
.getProtocolloScarico());
if (protocolloId != null) {
ProtocolloIngresso pi = delegate
.getProtocolloIngressoById(Integer.parseInt(scarico
.getProtocolloScarico()));
ProtocolloVO protocolloVO = pi.getProtocollo();
protocolloVO.setDataScarico(null);
if (scarico.getMsgAssegnatarioCompetente() != null
&& !"".equals(scarico
.getMsgAssegnatarioCompetente().trim())) {
aggiornaMsgRifiutoAssegnatari(scarico
.getMsgAssegnatarioCompetente(), pi);
}
delegate.rifiutaProtocollo(pi, "R", "F", utente);
}
scarico.setProtocolloScarico(null);
scarico.removeProtocolliScarico();
scarico.setMsgAssegnatarioCompetente(null);
scarico.setProtocolliScarico(delegate.getProtocolliAssegnati(
utente, annoProtocolloDa, annoProtocolloA,
numeroProtocolloDa, numeroProtocolloA,
dataProtocolloDa, dataProtocolloA,
PROTOCOLLI_ASSEGNATI, scarico.getStatoProtocollo(),
tipoUtenteUfficio));
errors.add("operazione_ok", new ActionMessage("operazione_ok"));
}
} else if (scarico.getBtnCerca() != null) {
scarico.setBtnCerca(null);
scarico.setProtocolloScarico(null);
scarico.setMsgAssegnatarioCompetente(null);
MessageResources bundle = (MessageResources) request
.getAttribute(Globals.MESSAGES_KEY);
int maxRighe = Integer.parseInt(bundle
.getMessage("protocollo.max.righe.lista"));
int contaRighe = delegate.contaProtocolliAssegnati(utente,
annoProtocolloDa, annoProtocolloA, numeroProtocolloDa,
numeroProtocolloA, dataProtocolloDa, dataProtocolloA,
PROTOCOLLI_ASSEGNATI, scarico.getStatoProtocollo(),
tipoUtenteUfficio);
if (contaRighe == 0) {
errors.add("nessun_dato", new ActionMessage("nessun_dato", "",
""));
} else if (contaRighe <= maxRighe) {
scarico.setProtocolliScarico(delegate.getProtocolliAssegnati(
utente, annoProtocolloDa, annoProtocolloA,
numeroProtocolloDa, numeroProtocolloA,
dataProtocolloDa, dataProtocolloA,
PROTOCOLLI_ASSEGNATI, scarico.getStatoProtocollo(),
tipoUtenteUfficio));
return (mapping.findForward("input"));
} else {
errors.add("controllo.maxrighe", new ActionMessage(
"controllo.maxrighe", "" + contaRighe,
"protocolli scaricati/riassegnati", "" + maxRighe));
}
// scarico.setProtocolliScarico(delegate.getProtocolliAssegnati(
// utente, annoProtocolloDa, annoProtocolloA,
// numeroProtocolloDa, numeroProtocolloA, dataProtocolloDa,
// dataProtocolloA, PROTOCOLLI_ASSEGNATI, scarico
// .getStatoProtocollo(), tipoUtenteUfficio));
// return (mapping.findForward("input"));
} else if (request.getParameter("btnRiassegna") != null) {
request.setAttribute("protocolloId", new Integer(scarico
.getProtocolloScarico()));
return (mapping.findForward("riassegnaProtocollo"));
} else if (request.getParameter("protocolloSelezionato") != null) {
request.setAttribute("protocolloId", new Integer(request
.getParameter("protocolloSelezionato")));
return (mapping.findForward("visualizzaProtocolloIngresso"));
} else if (request.getParameter("downloadDocprotocolloSelezionato") != null) {
Integer id = new Integer(Integer.parseInt(request
.getParameter("downloadDocprotocolloSelezionato")));
ReportProtocolloView prot = (ReportProtocolloView) scarico
.getProtocolliScarico().get(id);
InputStream is = null;
OutputStream os = null;
try {
DocumentoVO doc = DocumentoDelegate.getInstance().getDocumento(
prot.getDocumentoId());
if (doc != null) {
os = response.getOutputStream();
response.setContentType(doc.getContentType());
response.setHeader("Content-Disposition",
"attachment;filename=" + doc.getFileName());
response.setHeader("Cache-control", "");
if ((doc.getId() != null && !doc.isMustCreateNew())) {
DocumentoDelegate.getInstance().writeDocumentToStream(
doc.getId().intValue(), os);
} else {
is = new FileInputStream(doc.getPath());
FileUtil.writeFile(is, os);
}
}
} catch (FileNotFoundException e) {
logger.error("", e);
errors.add("download", new ActionMessage("error.notfound"));
} catch (IOException e) {
logger.error("", e);
errors.add("download", new ActionMessage("error.cannot.read"));
} catch (DataException e) {
logger.error("", e);
errors.add("download", new ActionMessage("error.cannot.read"));
} finally {
FileUtil.closeIS(is);
FileUtil.closeOS(os);
}
return null;
}
if (!errors.isEmpty()) {
saveErrors(request, errors);
}
scarico.setProtocolliScarico(null);
logger.info("Execute ScaricoAction");
return (mapping.findForward("input"));
}
private void aggiornaMsgRifiutoAssegnatari(
String msgAssegnatarioCompetente, ProtocolloIngresso protocollo) {
Collection assegnatari = protocollo.getAssegnatari();
if (assegnatari != null) {
for (Iterator i = assegnatari.iterator(); i.hasNext();) {
AssegnatarioVO assegnatario = (AssegnatarioVO) i.next();
;
if (assegnatario.isCompetente()) {
assegnatario
.setMsgAssegnatarioCompetente(msgAssegnatarioCompetente);
protocollo
.setMsgAssegnatarioCompetente(msgAssegnatarioCompetente);
}
}
}
}
}
| |
package varaha.text;
import java.util.Set;
import java.util.HashSet;
import org.apache.lucene.util.Version;
import org.apache.lucene.analysis.util.CharArraySet;
public class StopWords {
public static final CharArraySet ENGLISH_STOP_WORDS = new CharArraySet(Version.LUCENE_44, 600, true) {
{
add("a");
add("about");
add("above");
add("across");
add("after");
add("again");
add("against");
add("all");
add("almost");
add("alone");
add("along");
add("already");
add("also");
add("although");
add("always");
add("among");
add("an");
add("and");
add("another");
add("any");
add("anybody");
add("anyone");
add("anything");
add("anywhere");
add("apos");
add("are");
add("area");
add("areas");
add("around");
add("as");
add("ask");
add("asked");
add("asking");
add("asks");
add("at");
add("away");
add("back");
add("backed");
add("backing");
add("backs");
add("be");
add("became");
add("because");
add("become");
add("becomes");
add("been");
add("before");
add("began");
add("behind");
add("being");
add("beings");
add("best");
add("better");
add("between");
add("big");
add("both");
add("but");
add("by");
add("came");
add("can");
add("cannot");
add("case");
add("cases");
add("certain");
add("certainly");
add("clear");
add("clearly");
add("come");
add("could");
add("did");
add("differ");
add("different");
add("differently");
add("do");
add("does");
add("done");
add("down");
add("down");
add("downed");
add("downing");
add("downs");
add("during");
add("each");
add("early");
add("either");
add("end");
add("ended");
add("ending");
add("ends");
add("enough");
add("even");
add("evenly");
add("ever");
add("every");
add("everybody");
add("everyone");
add("everything");
add("everywhere");
add("face");
add("faces");
add("fact");
add("facts");
add("far");
add("felt");
add("few");
add("find");
add("finds");
add("first");
add("for");
add("four");
add("from");
add("full");
add("fully");
add("further");
add("furthered");
add("furthering");
add("furthers");
add("gave");
add("general");
add("generally");
add("get");
add("gets");
add("give");
add("given");
add("gives");
add("go");
add("going");
add("good");
add("goods");
add("got");
add("great");
add("greater");
add("greatest");
add("group");
add("grouped");
add("grouping");
add("groups");
add("had");
add("has");
add("have");
add("having");
add("he");
add("her");
add("here");
add("herself");
add("high");
add("high");
add("high");
add("higher");
add("highest");
add("him");
add("himself");
add("his");
add("how");
add("however");
add("i");
add("if");
add("important");
add("in");
add("interest");
add("interested");
add("interesting");
add("interests");
add("into");
add("is");
add("it");
add("its");
add("it's");
add("itself");
add("just");
add("keep");
add("keeps");
add("kind");
add("knew");
add("know");
add("known");
add("knows");
add("large");
add("largely");
add("last");
add("later");
add("latest");
add("least");
add("less");
add("let");
add("lets");
add("like");
add("likely");
add("long");
add("longer");
add("longest");
add("made");
add("make");
add("making");
add("man");
add("many");
add("may");
add("me");
add("member");
add("members");
add("men");
add("might");
add("more");
add("most");
add("mostly");
add("mr");
add("mrs");
add("much");
add("must");
add("my");
add("myself");
add("nbsp");
add("necessary");
add("need");
add("needed");
add("needing");
add("needs");
add("never");
add("new");
add("new");
add("newer");
add("newest");
add("next");
add("no");
add("nobody");
add("non");
add("noone");
add("not");
add("nothing");
add("now");
add("nowhere");
add("number");
add("numbers");
add("of");
add("off");
add("often");
add("old");
add("older");
add("oldest");
add("on");
add("once");
add("one");
add("only");
add("open");
add("opened");
add("opening");
add("opens");
add("or");
add("order");
add("ordered");
add("ordering");
add("orders");
add("other");
add("others");
add("our");
add("out");
add("over");
add("part");
add("parted");
add("parting");
add("parts");
add("per");
add("perhaps");
add("place");
add("places");
add("point");
add("pointed");
add("pointing");
add("points");
add("possible");
add("present");
add("presented");
add("presenting");
add("presents");
add("problem");
add("problems");
add("put");
add("puts");
add("quite");
add("quot");
add("rather");
add("really");
add("right");
add("right");
add("room");
add("rooms");
add("said");
add("same");
add("saw");
add("say");
add("says");
add("second");
add("seconds");
add("see");
add("seem");
add("seemed");
add("seeming");
add("seems");
add("sees");
add("several");
add("shall");
add("she");
add("should");
add("show");
add("showed");
add("showing");
add("shows");
add("side");
add("sides");
add("since");
add("small");
add("smaller");
add("smallest");
add("so");
add("some");
add("somebody");
add("someone");
add("something");
add("somewhere");
add("state");
add("states");
add("still");
add("still");
add("such");
add("sure");
add("take");
add("taken");
add("than");
add("that");
add("the");
add("their");
add("them");
add("then");
add("there");
add("therefore");
add("these");
add("they");
add("thing");
add("things");
add("think");
add("thinks");
add("this");
add("those");
add("though");
add("thought");
add("thoughts");
add("three");
add("through");
add("thus");
add("to");
add("today");
add("together");
add("too");
add("took");
add("toward");
add("turn");
add("turned");
add("turning");
add("turns");
add("two");
add("under");
add("until");
add("up");
add("upon");
add("us");
add("use");
add("used");
add("uses");
add("very");
add("want");
add("wanted");
add("wanting");
add("wants");
add("was");
add("way");
add("ways");
add("we");
add("well");
add("wells");
add("went");
add("were");
add("what");
add("when");
add("where");
add("whether");
add("which");
add("while");
add("who");
add("whole");
add("whose");
add("why");
add("will");
add("with");
add("within");
add("without");
add("work");
add("worked");
add("working");
add("works");
add("would");
add("year");
add("years");
add("yet");
add("you");
add("young");
add("younger");
add("youngest");
add("your");
add("yours");
// Common names, you may not want this in general
add("alex");
add("alex's");
add("alexis");
add("alexis's");
add("amy");
add("amy's");
add("andre");
add("andre's");
add("andrea");
add("andrea's");
add("ann");
add("ann's");
add("anne");
add("anne's");
add("arthur");
add("arthur's");
add("betty");
add("betty's");
add("bill");
add("bill's");
add("carol");
add("carol's");
add("cathy");
add("cathy's");
add("celia");
add("celia's");
add("chris");
add("chris's");
add("cindy");
add("cindy's");
add("cynthia");
add("cynthia's");
add("dan");
add("dan's");
add("dave");
add("dave's");
add("debbi");
add("debbi's");
add("debbie");
add("debbie's");
add("don");
add("don's");
add("donna");
add("donna's");
add("doug");
add("doug's");
add("dylan");
add("dylan's");
add("edward");
add("edward's");
add("eileen");
add("eileen's");
add("ellen");
add("ellen's");
add("emily");
add("emily's");
add("eric");
add("eric's");
add("erica");
add("erica's");
add("eva");
add("eva's");
add("helen");
add("helen's");
add("jane");
add("jane's");
add("jeff");
add("jeff's");
add("jenni");
add("jenni's");
add("jenny");
add("jenny's");
add("jill");
add("jill's");
add("jim");
add("jim's");
add("joann");
add("joann's");
add("joe");
add("joe's");
add("judy");
add("judy's");
add("karen");
add("karen's");
add("kat");
add("kat's");
add("katt");
add("katt's");
add("kim");
add("kim's");
add("linda");
add("linda's");
add("lisa");
add("lisa's");
add("lloyd");
add("lloyd's");
add("lynn");
add("lynn's");
add("marcy");
add("marcy's");
add("mark");
add("mark's");
add("mary");
add("mary's");
add("melanie");
add("melanie's");
add("mona");
add("mona's");
add("nancy");
add("nancy's");
add("nicole");
add("nicole's");
add("nina");
add("nina's");
add("peggy");
add("peggy's");
add("renee");
add("renee's");
add("riki");
add("riki's");
add("rikki");
add("rikki's");
add("robert");
add("robert's");
add("ruth");
add("ruth's");
add("sally");
add("sally's");
add("sara");
add("sara's");
add("sarah");
add("sarah's");
add("sharon");
add("sharon's");
add("sue");
add("sue's");
add("teddy");
add("teddy's");
add("teresa");
add("teresa's");
add("terri");
add("terri's");
add("terry");
add("terry's");
add("tim");
add("tim's");
add("timmy");
add("timmy's");
add("tom");
add("tom's");
add("vicky");
add("vicky's");
add("zeke");
add("zeke's");
}
};
}
| |
///////////////////////////////////////////////////////////////////////////////
// Copyright (c) 2001, Eric D. Friedman All Rights Reserved.
// Copyright (c) 2009, Rob Eden All Rights Reserved.
// Copyright (c) 2009, Jeff Randall All Rights Reserved.
//
// This library is free software; you can redistribute it and/or
// modify it under the terms of the GNU Lesser General Public
// License as published by the Free Software Foundation; either
// version 2.1 of the License, or (at your option) any later version.
//
// This library is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
//
// You should have received a copy of the GNU Lesser General Public
// License along with this program; if not, write to the Free Software
// Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
///////////////////////////////////////////////////////////////////////////////
package gnu.trove.map.hash;
//////////////////////////////////////////////////
// THIS IS A GENERATED CLASS. DO NOT HAND EDIT! //
//////////////////////////////////////////////////
import gnu.trove.map.TFloatByteMap;
import gnu.trove.function.TByteFunction;
import gnu.trove.procedure.*;
import gnu.trove.set.*;
import gnu.trove.iterator.*;
import gnu.trove.iterator.hash.*;
import gnu.trove.impl.hash.*;
import gnu.trove.impl.HashFunctions;
import gnu.trove.*;
import java.io.*;
import java.util.*;
/**
* An open addressed Map implementation for float keys and byte values.
*
* @author Eric D. Friedman
* @author Rob Eden
* @author Jeff Randall
* @version $Id: _K__V_HashMap.template,v 1.1.2.16 2010/03/02 04:09:50 robeden Exp $
*/
public class TFloatByteHashMap extends TFloatByteHash implements TFloatByteMap, Externalizable {
static final long serialVersionUID = 1L;
/** the values of the map */
protected transient byte[] _values;
/**
* Creates a new <code>TFloatByteHashMap</code> instance with the default
* capacity and load factor.
*/
public TFloatByteHashMap() {
super();
}
/**
* Creates a new <code>TFloatByteHashMap</code> instance with a prime
* capacity equal to or greater than <tt>initialCapacity</tt> and
* with the default load factor.
*
* @param initialCapacity an <code>int</code> value
*/
public TFloatByteHashMap( int initialCapacity ) {
super( initialCapacity );
}
/**
* Creates a new <code>TFloatByteHashMap</code> instance with a prime
* capacity equal to or greater than <tt>initialCapacity</tt> and
* with the specified load factor.
*
* @param initialCapacity an <code>int</code> value
* @param loadFactor a <code>float</code> value
*/
public TFloatByteHashMap( int initialCapacity, float loadFactor ) {
super( initialCapacity, loadFactor );
}
/**
* Creates a new <code>TFloatByteHashMap</code> instance with a prime
* capacity equal to or greater than <tt>initialCapacity</tt> and
* with the specified load factor.
*
* @param initialCapacity an <code>int</code> value
* @param loadFactor a <code>float</code> value
* @param noEntryKey a <code>float</code> value that represents
* <tt>null</tt> for the Key set.
* @param noEntryValue a <code>byte</code> value that represents
* <tt>null</tt> for the Value set.
*/
public TFloatByteHashMap( int initialCapacity, float loadFactor,
float noEntryKey, byte noEntryValue ) {
super( initialCapacity, loadFactor, noEntryKey, noEntryValue );
}
/**
* Creates a new <code>TFloatByteHashMap</code> instance containing
* all of the entries in the map passed in.
*
* @param keys a <tt>float</tt> array containing the keys for the matching values.
* @param values a <tt>byte</tt> array containing the values.
*/
public TFloatByteHashMap( float[] keys, byte[] values ) {
super( Math.max( keys.length, values.length ) );
int size = Math.min( keys.length, values.length );
for ( int i = 0; i < size; i++ ) {
this.put( keys[i], values[i] );
}
}
/**
* Creates a new <code>TFloatByteHashMap</code> instance containing
* all of the entries in the map passed in.
*
* @param map a <tt>TFloatByteMap</tt> that will be duplicated.
*/
public TFloatByteHashMap( TFloatByteMap map ) {
super( map.size() );
if ( map instanceof TFloatByteHashMap ) {
TFloatByteHashMap hashmap = ( TFloatByteHashMap ) map;
this._loadFactor = hashmap._loadFactor;
this.no_entry_key = hashmap.no_entry_key;
this.no_entry_value = hashmap.no_entry_value;
//noinspection RedundantCast
if ( this.no_entry_key != ( float ) 0 ) {
Arrays.fill( _set, this.no_entry_key );
}
//noinspection RedundantCast
if ( this.no_entry_value != ( byte ) 0 ) {
Arrays.fill( _values, this.no_entry_value );
}
setUp( (int) Math.ceil( DEFAULT_CAPACITY / _loadFactor ) );
}
putAll( map );
}
/**
* initializes the hashtable to a prime capacity which is at least
* <tt>initialCapacity + 1</tt>.
*
* @param initialCapacity an <code>int</code> value
* @return the actual capacity chosen
*/
protected int setUp( int initialCapacity ) {
int capacity;
capacity = super.setUp( initialCapacity );
_values = new byte[capacity];
return capacity;
}
/**
* rehashes the map to the new capacity.
*
* @param newCapacity an <code>int</code> value
*/
/** {@inheritDoc} */
protected void rehash( int newCapacity ) {
int oldCapacity = _set.length;
float oldKeys[] = _set;
byte oldVals[] = _values;
byte oldStates[] = _states;
_set = new float[newCapacity];
_values = new byte[newCapacity];
_states = new byte[newCapacity];
for ( int i = oldCapacity; i-- > 0; ) {
if( oldStates[i] == FULL ) {
float o = oldKeys[i];
int index = insertKey( o );
_values[index] = oldVals[i];
}
}
}
/** {@inheritDoc} */
public byte put( float key, byte value ) {
int index = insertKey( key );
return doPut( key, value, index );
}
/** {@inheritDoc} */
public byte putIfAbsent( float key, byte value ) {
int index = insertKey( key );
if (index < 0)
return _values[-index - 1];
return doPut( key, value, index );
}
private byte doPut( float key, byte value, int index ) {
byte previous = no_entry_value;
boolean isNewMapping = true;
if ( index < 0 ) {
index = -index -1;
previous = _values[index];
isNewMapping = false;
}
_values[index] = value;
if (isNewMapping) {
postInsertHook( consumeFreeSlot );
}
return previous;
}
/** {@inheritDoc} */
public void putAll( Map<? extends Float, ? extends Byte> map ) {
ensureCapacity( map.size() );
// could optimize this for cases when map instanceof THashMap
for ( Map.Entry<? extends Float, ? extends Byte> entry : map.entrySet() ) {
this.put( entry.getKey().floatValue(), entry.getValue().byteValue() );
}
}
/** {@inheritDoc} */
public void putAll( TFloatByteMap map ) {
ensureCapacity( map.size() );
TFloatByteIterator iter = map.iterator();
while ( iter.hasNext() ) {
iter.advance();
this.put( iter.key(), iter.value() );
}
}
/** {@inheritDoc} */
public byte get( float key ) {
int index = index( key );
return index < 0 ? no_entry_value : _values[index];
}
/** {@inheritDoc} */
public void clear() {
super.clear();
Arrays.fill( _set, 0, _set.length, no_entry_key );
Arrays.fill( _values, 0, _values.length, no_entry_value );
Arrays.fill( _states, 0, _states.length, FREE );
}
/** {@inheritDoc} */
public boolean isEmpty() {
return 0 == _size;
}
/** {@inheritDoc} */
public byte remove( float key ) {
byte prev = no_entry_value;
int index = index( key );
if ( index >= 0 ) {
prev = _values[index];
removeAt( index ); // clear key,state; adjust size
}
return prev;
}
/** {@inheritDoc} */
protected void removeAt( int index ) {
_values[index] = no_entry_value;
super.removeAt( index ); // clear key, state; adjust size
}
/** {@inheritDoc} */
public TFloatSet keySet() {
return new TKeyView();
}
/** {@inheritDoc} */
public float[] keys() {
float[] keys = new float[size()];
float[] k = _set;
byte[] states = _states;
for ( int i = k.length, j = 0; i-- > 0; ) {
if ( states[i] == FULL ) {
keys[j++] = k[i];
}
}
return keys;
}
/** {@inheritDoc} */
public float[] keys( float[] array ) {
int size = size();
if ( array.length < size ) {
array = new float[size];
}
float[] keys = _set;
byte[] states = _states;
for ( int i = keys.length, j = 0; i-- > 0; ) {
if ( states[i] == FULL ) {
array[j++] = keys[i];
}
}
return array;
}
/** {@inheritDoc} */
public TByteCollection valueCollection() {
return new TValueView();
}
/** {@inheritDoc} */
public byte[] values() {
byte[] vals = new byte[size()];
byte[] v = _values;
byte[] states = _states;
for ( int i = v.length, j = 0; i-- > 0; ) {
if ( states[i] == FULL ) {
vals[j++] = v[i];
}
}
return vals;
}
/** {@inheritDoc} */
public byte[] values( byte[] array ) {
int size = size();
if ( array.length < size ) {
array = new byte[size];
}
byte[] v = _values;
byte[] states = _states;
for ( int i = v.length, j = 0; i-- > 0; ) {
if ( states[i] == FULL ) {
array[j++] = v[i];
}
}
return array;
}
/** {@inheritDoc} */
public boolean containsValue( byte val ) {
byte[] states = _states;
byte[] vals = _values;
for ( int i = vals.length; i-- > 0; ) {
if ( states[i] == FULL && val == vals[i] ) {
return true;
}
}
return false;
}
/** {@inheritDoc} */
public boolean containsKey( float key ) {
return contains( key );
}
/** {@inheritDoc} */
public TFloatByteIterator iterator() {
return new TFloatByteHashIterator( this );
}
/** {@inheritDoc} */
public boolean forEachKey( TFloatProcedure procedure ) {
return forEach( procedure );
}
/** {@inheritDoc} */
public boolean forEachValue( TByteProcedure procedure ) {
byte[] states = _states;
byte[] values = _values;
for ( int i = values.length; i-- > 0; ) {
if ( states[i] == FULL && ! procedure.execute( values[i] ) ) {
return false;
}
}
return true;
}
/** {@inheritDoc} */
public boolean forEachEntry( TFloatByteProcedure procedure ) {
byte[] states = _states;
float[] keys = _set;
byte[] values = _values;
for ( int i = keys.length; i-- > 0; ) {
if ( states[i] == FULL && ! procedure.execute( keys[i], values[i] ) ) {
return false;
}
}
return true;
}
/** {@inheritDoc} */
public void transformValues( TByteFunction function ) {
byte[] states = _states;
byte[] values = _values;
for ( int i = values.length; i-- > 0; ) {
if ( states[i] == FULL ) {
values[i] = function.execute( values[i] );
}
}
}
/** {@inheritDoc} */
public boolean retainEntries( TFloatByteProcedure procedure ) {
boolean modified = false;
byte[] states = _states;
float[] keys = _set;
byte[] values = _values;
// Temporarily disable compaction. This is a fix for bug #1738760
tempDisableAutoCompaction();
try {
for ( int i = keys.length; i-- > 0; ) {
if ( states[i] == FULL && ! procedure.execute( keys[i], values[i] ) ) {
removeAt( i );
modified = true;
}
}
}
finally {
reenableAutoCompaction( true );
}
return modified;
}
/** {@inheritDoc} */
public boolean increment( float key ) {
return adjustValue( key, ( byte ) 1 );
}
/** {@inheritDoc} */
public boolean adjustValue( float key, byte amount ) {
int index = index( key );
if (index < 0) {
return false;
} else {
_values[index] += amount;
return true;
}
}
/** {@inheritDoc} */
public byte adjustOrPutValue( float key, byte adjust_amount, byte put_amount ) {
int index = insertKey( key );
final boolean isNewMapping;
final byte newValue;
if ( index < 0 ) {
index = -index -1;
newValue = ( _values[index] += adjust_amount );
isNewMapping = false;
} else {
newValue = ( _values[index] = put_amount );
isNewMapping = true;
}
byte previousState = _states[index];
if ( isNewMapping ) {
postInsertHook(consumeFreeSlot);
}
return newValue;
}
/** a view onto the keys of the map. */
protected class TKeyView implements TFloatSet {
/** {@inheritDoc} */
public TFloatIterator iterator() {
return new TFloatByteKeyHashIterator( TFloatByteHashMap.this );
}
/** {@inheritDoc} */
public float getNoEntryValue() {
return no_entry_key;
}
/** {@inheritDoc} */
public int size() {
return _size;
}
/** {@inheritDoc} */
public boolean isEmpty() {
return 0 == _size;
}
/** {@inheritDoc} */
public boolean contains( float entry ) {
return TFloatByteHashMap.this.contains( entry );
}
/** {@inheritDoc} */
public float[] toArray() {
return TFloatByteHashMap.this.keys();
}
/** {@inheritDoc} */
public float[] toArray( float[] dest ) {
return TFloatByteHashMap.this.keys( dest );
}
/**
* Unsupported when operating upon a Key Set view of a TFloatByteMap
* <p/>
* {@inheritDoc}
*/
public boolean add( float entry ) {
throw new UnsupportedOperationException();
}
/** {@inheritDoc} */
public boolean remove( float entry ) {
return no_entry_value != TFloatByteHashMap.this.remove( entry );
}
/** {@inheritDoc} */
public boolean containsAll( Collection<?> collection ) {
for ( Object element : collection ) {
if ( element instanceof Float ) {
float ele = ( ( Float ) element ).floatValue();
if ( ! TFloatByteHashMap.this.containsKey( ele ) ) {
return false;
}
} else {
return false;
}
}
return true;
}
/** {@inheritDoc} */
public boolean containsAll( TFloatCollection collection ) {
TFloatIterator iter = collection.iterator();
while ( iter.hasNext() ) {
if ( ! TFloatByteHashMap.this.containsKey( iter.next() ) ) {
return false;
}
}
return true;
}
/** {@inheritDoc} */
public boolean containsAll( float[] array ) {
for ( float element : array ) {
if ( ! TFloatByteHashMap.this.contains( element ) ) {
return false;
}
}
return true;
}
/**
* Unsupported when operating upon a Key Set view of a TFloatByteMap
* <p/>
* {@inheritDoc}
*/
public boolean addAll( Collection<? extends Float> collection ) {
throw new UnsupportedOperationException();
}
/**
* Unsupported when operating upon a Key Set view of a TFloatByteMap
* <p/>
* {@inheritDoc}
*/
public boolean addAll( TFloatCollection collection ) {
throw new UnsupportedOperationException();
}
/**
* Unsupported when operating upon a Key Set view of a TFloatByteMap
* <p/>
* {@inheritDoc}
*/
public boolean addAll( float[] array ) {
throw new UnsupportedOperationException();
}
/** {@inheritDoc} */
@SuppressWarnings({"SuspiciousMethodCalls"})
public boolean retainAll( Collection<?> collection ) {
boolean modified = false;
TFloatIterator iter = iterator();
while ( iter.hasNext() ) {
if ( ! collection.contains( Float.valueOf ( iter.next() ) ) ) {
iter.remove();
modified = true;
}
}
return modified;
}
/** {@inheritDoc} */
public boolean retainAll( TFloatCollection collection ) {
if ( this == collection ) {
return false;
}
boolean modified = false;
TFloatIterator iter = iterator();
while ( iter.hasNext() ) {
if ( ! collection.contains( iter.next() ) ) {
iter.remove();
modified = true;
}
}
return modified;
}
/** {@inheritDoc} */
public boolean retainAll( float[] array ) {
boolean changed = false;
Arrays.sort( array );
float[] set = _set;
byte[] states = _states;
for ( int i = set.length; i-- > 0; ) {
if ( states[i] == FULL && ( Arrays.binarySearch( array, set[i] ) < 0) ) {
removeAt( i );
changed = true;
}
}
return changed;
}
/** {@inheritDoc} */
public boolean removeAll( Collection<?> collection ) {
boolean changed = false;
for ( Object element : collection ) {
if ( element instanceof Float ) {
float c = ( ( Float ) element ).floatValue();
if ( remove( c ) ) {
changed = true;
}
}
}
return changed;
}
/** {@inheritDoc} */
public boolean removeAll( TFloatCollection collection ) {
if ( this == collection ) {
clear();
return true;
}
boolean changed = false;
TFloatIterator iter = collection.iterator();
while ( iter.hasNext() ) {
float element = iter.next();
if ( remove( element ) ) {
changed = true;
}
}
return changed;
}
/** {@inheritDoc} */
public boolean removeAll( float[] array ) {
boolean changed = false;
for ( int i = array.length; i-- > 0; ) {
if ( remove( array[i] ) ) {
changed = true;
}
}
return changed;
}
/** {@inheritDoc} */
public void clear() {
TFloatByteHashMap.this.clear();
}
/** {@inheritDoc} */
public boolean forEach( TFloatProcedure procedure ) {
return TFloatByteHashMap.this.forEachKey( procedure );
}
@Override
public boolean equals( Object other ) {
if (! (other instanceof TFloatSet)) {
return false;
}
final TFloatSet that = ( TFloatSet ) other;
if ( that.size() != this.size() ) {
return false;
}
for ( int i = _states.length; i-- > 0; ) {
if ( _states[i] == FULL ) {
if ( ! that.contains( _set[i] ) ) {
return false;
}
}
}
return true;
}
@Override
public int hashCode() {
int hashcode = 0;
for ( int i = _states.length; i-- > 0; ) {
if ( _states[i] == FULL ) {
hashcode += HashFunctions.hash( _set[i] );
}
}
return hashcode;
}
@Override
public String toString() {
final StringBuilder buf = new StringBuilder( "{" );
forEachKey( new TFloatProcedure() {
private boolean first = true;
public boolean execute( float key ) {
if ( first ) {
first = false;
} else {
buf.append( ", " );
}
buf.append( key );
return true;
}
} );
buf.append( "}" );
return buf.toString();
}
}
/** a view onto the values of the map. */
protected class TValueView implements TByteCollection {
/** {@inheritDoc} */
public TByteIterator iterator() {
return new TFloatByteValueHashIterator( TFloatByteHashMap.this );
}
/** {@inheritDoc} */
public byte getNoEntryValue() {
return no_entry_value;
}
/** {@inheritDoc} */
public int size() {
return _size;
}
/** {@inheritDoc} */
public boolean isEmpty() {
return 0 == _size;
}
/** {@inheritDoc} */
public boolean contains( byte entry ) {
return TFloatByteHashMap.this.containsValue( entry );
}
/** {@inheritDoc} */
public byte[] toArray() {
return TFloatByteHashMap.this.values();
}
/** {@inheritDoc} */
public byte[] toArray( byte[] dest ) {
return TFloatByteHashMap.this.values( dest );
}
public boolean add( byte entry ) {
throw new UnsupportedOperationException();
}
/** {@inheritDoc} */
public boolean remove( byte entry ) {
byte[] values = _values;
float[] set = _set;
for ( int i = values.length; i-- > 0; ) {
if ( ( set[i] != FREE && set[i] != REMOVED ) && entry == values[i] ) {
removeAt( i );
return true;
}
}
return false;
}
/** {@inheritDoc} */
public boolean containsAll( Collection<?> collection ) {
for ( Object element : collection ) {
if ( element instanceof Byte ) {
byte ele = ( ( Byte ) element ).byteValue();
if ( ! TFloatByteHashMap.this.containsValue( ele ) ) {
return false;
}
} else {
return false;
}
}
return true;
}
/** {@inheritDoc} */
public boolean containsAll( TByteCollection collection ) {
TByteIterator iter = collection.iterator();
while ( iter.hasNext() ) {
if ( ! TFloatByteHashMap.this.containsValue( iter.next() ) ) {
return false;
}
}
return true;
}
/** {@inheritDoc} */
public boolean containsAll( byte[] array ) {
for ( byte element : array ) {
if ( ! TFloatByteHashMap.this.containsValue( element ) ) {
return false;
}
}
return true;
}
/** {@inheritDoc} */
public boolean addAll( Collection<? extends Byte> collection ) {
throw new UnsupportedOperationException();
}
/** {@inheritDoc} */
public boolean addAll( TByteCollection collection ) {
throw new UnsupportedOperationException();
}
/** {@inheritDoc} */
public boolean addAll( byte[] array ) {
throw new UnsupportedOperationException();
}
/** {@inheritDoc} */
@SuppressWarnings({"SuspiciousMethodCalls"})
public boolean retainAll( Collection<?> collection ) {
boolean modified = false;
TByteIterator iter = iterator();
while ( iter.hasNext() ) {
if ( ! collection.contains( Byte.valueOf ( iter.next() ) ) ) {
iter.remove();
modified = true;
}
}
return modified;
}
/** {@inheritDoc} */
public boolean retainAll( TByteCollection collection ) {
if ( this == collection ) {
return false;
}
boolean modified = false;
TByteIterator iter = iterator();
while ( iter.hasNext() ) {
if ( ! collection.contains( iter.next() ) ) {
iter.remove();
modified = true;
}
}
return modified;
}
/** {@inheritDoc} */
public boolean retainAll( byte[] array ) {
boolean changed = false;
Arrays.sort( array );
byte[] values = _values;
byte[] states = _states;
for ( int i = values.length; i-- > 0; ) {
if ( states[i] == FULL && ( Arrays.binarySearch( array, values[i] ) < 0) ) {
removeAt( i );
changed = true;
}
}
return changed;
}
/** {@inheritDoc} */
public boolean removeAll( Collection<?> collection ) {
boolean changed = false;
for ( Object element : collection ) {
if ( element instanceof Byte ) {
byte c = ( ( Byte ) element ).byteValue();
if ( remove( c ) ) {
changed = true;
}
}
}
return changed;
}
/** {@inheritDoc} */
public boolean removeAll( TByteCollection collection ) {
if ( this == collection ) {
clear();
return true;
}
boolean changed = false;
TByteIterator iter = collection.iterator();
while ( iter.hasNext() ) {
byte element = iter.next();
if ( remove( element ) ) {
changed = true;
}
}
return changed;
}
/** {@inheritDoc} */
public boolean removeAll( byte[] array ) {
boolean changed = false;
for ( int i = array.length; i-- > 0; ) {
if ( remove( array[i] ) ) {
changed = true;
}
}
return changed;
}
/** {@inheritDoc} */
public void clear() {
TFloatByteHashMap.this.clear();
}
/** {@inheritDoc} */
public boolean forEach( TByteProcedure procedure ) {
return TFloatByteHashMap.this.forEachValue( procedure );
}
/** {@inheritDoc} */
@Override
public String toString() {
final StringBuilder buf = new StringBuilder( "{" );
forEachValue( new TByteProcedure() {
private boolean first = true;
public boolean execute( byte value ) {
if ( first ) {
first = false;
} else {
buf.append( ", " );
}
buf.append( value );
return true;
}
} );
buf.append( "}" );
return buf.toString();
}
}
class TFloatByteKeyHashIterator extends THashPrimitiveIterator implements TFloatIterator {
/**
* Creates an iterator over the specified map
*
* @param hash the <tt>TPrimitiveHash</tt> we will be iterating over.
*/
TFloatByteKeyHashIterator( TPrimitiveHash hash ) {
super( hash );
}
/** {@inheritDoc} */
public float next() {
moveToNextIndex();
return _set[_index];
}
/** @{inheritDoc} */
public void remove() {
if ( _expectedSize != _hash.size() ) {
throw new ConcurrentModificationException();
}
// Disable auto compaction during the remove. This is a workaround for bug 1642768.
try {
_hash.tempDisableAutoCompaction();
TFloatByteHashMap.this.removeAt( _index );
}
finally {
_hash.reenableAutoCompaction( false );
}
_expectedSize--;
}
}
class TFloatByteValueHashIterator extends THashPrimitiveIterator implements TByteIterator {
/**
* Creates an iterator over the specified map
*
* @param hash the <tt>TPrimitiveHash</tt> we will be iterating over.
*/
TFloatByteValueHashIterator( TPrimitiveHash hash ) {
super( hash );
}
/** {@inheritDoc} */
public byte next() {
moveToNextIndex();
return _values[_index];
}
/** @{inheritDoc} */
public void remove() {
if ( _expectedSize != _hash.size() ) {
throw new ConcurrentModificationException();
}
// Disable auto compaction during the remove. This is a workaround for bug 1642768.
try {
_hash.tempDisableAutoCompaction();
TFloatByteHashMap.this.removeAt( _index );
}
finally {
_hash.reenableAutoCompaction( false );
}
_expectedSize--;
}
}
class TFloatByteHashIterator extends THashPrimitiveIterator implements TFloatByteIterator {
/**
* Creates an iterator over the specified map
*
* @param map the <tt>TFloatByteHashMap</tt> we will be iterating over.
*/
TFloatByteHashIterator( TFloatByteHashMap map ) {
super( map );
}
/** {@inheritDoc} */
public void advance() {
moveToNextIndex();
}
/** {@inheritDoc} */
public float key() {
return _set[_index];
}
/** {@inheritDoc} */
public byte value() {
return _values[_index];
}
/** {@inheritDoc} */
public byte setValue( byte val ) {
byte old = value();
_values[_index] = val;
return old;
}
/** @{inheritDoc} */
public void remove() {
if ( _expectedSize != _hash.size() ) {
throw new ConcurrentModificationException();
}
// Disable auto compaction during the remove. This is a workaround for bug 1642768.
try {
_hash.tempDisableAutoCompaction();
TFloatByteHashMap.this.removeAt( _index );
}
finally {
_hash.reenableAutoCompaction( false );
}
_expectedSize--;
}
}
/** {@inheritDoc} */
@Override
public boolean equals( Object other ) {
if ( ! ( other instanceof TFloatByteMap ) ) {
return false;
}
TFloatByteMap that = ( TFloatByteMap ) other;
if ( that.size() != this.size() ) {
return false;
}
byte[] values = _values;
byte[] states = _states;
byte this_no_entry_value = getNoEntryValue();
byte that_no_entry_value = that.getNoEntryValue();
for ( int i = values.length; i-- > 0; ) {
if ( states[i] == FULL ) {
float key = _set[i];
byte that_value = that.get( key );
byte this_value = values[i];
if ( ( this_value != that_value ) &&
( this_value != this_no_entry_value ) &&
( that_value != that_no_entry_value ) ) {
return false;
}
}
}
return true;
}
/** {@inheritDoc} */
@Override
public int hashCode() {
int hashcode = 0;
byte[] states = _states;
for ( int i = _values.length; i-- > 0; ) {
if ( states[i] == FULL ) {
hashcode += HashFunctions.hash( _set[i] ) ^
HashFunctions.hash( _values[i] );
}
}
return hashcode;
}
/** {@inheritDoc} */
@Override
public String toString() {
final StringBuilder buf = new StringBuilder( "{" );
forEachEntry( new TFloatByteProcedure() {
private boolean first = true;
public boolean execute( float key, byte value ) {
if ( first ) first = false;
else buf.append( ", " );
buf.append(key);
buf.append("=");
buf.append(value);
return true;
}
});
buf.append( "}" );
return buf.toString();
}
/** {@inheritDoc} */
public void writeExternal(ObjectOutput out) throws IOException {
// VERSION
out.writeByte( 0 );
// SUPER
super.writeExternal( out );
// NUMBER OF ENTRIES
out.writeInt( _size );
// ENTRIES
for ( int i = _states.length; i-- > 0; ) {
if ( _states[i] == FULL ) {
out.writeFloat( _set[i] );
out.writeByte( _values[i] );
}
}
}
/** {@inheritDoc} */
public void readExternal(ObjectInput in) throws IOException, ClassNotFoundException {
// VERSION
in.readByte();
// SUPER
super.readExternal( in );
// NUMBER OF ENTRIES
int size = in.readInt();
setUp( size );
// ENTRIES
while (size-- > 0) {
float key = in.readFloat();
byte val = in.readByte();
put(key, val);
}
}
} // TFloatByteHashMap
| |
package io.dropwizard.health;
import com.codahale.metrics.Counter;
import com.codahale.metrics.Gauge;
import com.codahale.metrics.MetricRegistry;
import com.codahale.metrics.health.HealthCheck;
import com.codahale.metrics.health.HealthCheckRegistryListener;
import io.dropwizard.util.Duration;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
import java.util.Collection;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Optional;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.function.Function;
import java.util.stream.Collectors;
class HealthCheckManager implements HealthCheckRegistryListener, HealthStatusChecker, ShutdownNotifier,
HealthStateListener, HealthStateAggregator {
private static final Logger LOGGER = LoggerFactory.getLogger(HealthCheckManager.class);
private final AtomicBoolean isAppAlive = new AtomicBoolean(true);
private final AtomicBoolean isAppHealthy = new AtomicBoolean(false);
private final AtomicInteger unhealthyCriticalHealthChecks = new AtomicInteger();
private final AtomicInteger unhealthyCriticalAliveChecks = new AtomicInteger();
@Nonnull
private final HealthCheckScheduler scheduler;
@Nonnull
private final Map<String, HealthCheckConfiguration> configs;
@Nonnull
private final Collection<HealthStateListener> healthStateListeners;
@Nonnull
private final MetricRegistry metrics;
private final Duration shutdownWaitPeriod;
private final boolean initialOverallState;
@Nonnull
private final String aggregateHealthyName;
@Nonnull
private final String aggregateUnhealthyName;
@Nonnull
private Map<String, ScheduledHealthCheck> checks;
private volatile boolean shuttingDown = false;
public HealthCheckManager(final List<HealthCheckConfiguration> configs,
final HealthCheckScheduler scheduler,
final MetricRegistry metrics,
final Duration shutdownWaitPeriod,
final boolean initialOverallState,
final Collection<HealthStateListener> healthStateListeners) {
this.configs = configs.stream()
.collect(Collectors.toMap(HealthCheckConfiguration::getName, Function.identity()));
this.scheduler = Objects.requireNonNull(scheduler);
this.metrics = Objects.requireNonNull(metrics);
this.shutdownWaitPeriod = shutdownWaitPeriod;
this.initialOverallState = initialOverallState;
this.checks = new HashMap<>();
this.healthStateListeners = Objects.requireNonNull(healthStateListeners);
this.aggregateHealthyName = MetricRegistry.name("health", "aggregate", "healthy");
this.aggregateUnhealthyName = MetricRegistry.name("health", "aggregate", "unhealthy");
metrics.register(aggregateHealthyName, (Gauge<Long>) this::calculateNumberOfHealthyChecks);
metrics.register(aggregateUnhealthyName, (Gauge<Long>) this::calculateNumberOfUnhealthyChecks);
}
// visible for testing
void setChecks(final Map<String, ScheduledHealthCheck> checks) {
this.checks = checks;
}
@Override
public void onHealthCheckAdded(final String name, final HealthCheck healthCheck) {
final HealthCheckConfiguration config = configs.get(name);
if (config == null) {
LOGGER.debug("ignoring registered health check that isn't configured: name={}", name);
return;
}
final Schedule schedule = config.getSchedule();
final HealthCheckType type = config.getType();
// type of 'alive' implies 'critical'
final boolean critical = (type == HealthCheckType.ALIVE) || config.isCritical();
final boolean initialState = config.isInitialState();
final State state = new State(name, schedule.getFailureAttempts(), schedule.getSuccessAttempts(), initialState, this);
final Counter healthyCheckCounter = metrics.counter(MetricRegistry.name("health", name, "healthy"));
final Counter unhealthyCheckCounter = metrics.counter(MetricRegistry.name("health", name, "unhealthy"));
final ScheduledHealthCheck check = new ScheduledHealthCheck(name, type, critical, healthCheck, schedule, state,
healthyCheckCounter, unhealthyCheckCounter);
checks.put(name, check);
// handle initial state of 'false' to ensure counts line up
if (!initialState && critical) {
handleCriticalHealthChange(name, type, false);
}
scheduler.scheduleInitial(check);
}
@Override
public void onHealthCheckRemoved(final String name, final HealthCheck healthCheck) {
scheduler.unschedule(name);
}
@Override
public void onStateChanged(final String name, final boolean isNowHealthy) {
LOGGER.debug("health check changed state: name={} state={}", name, isNowHealthy);
final ScheduledHealthCheck check = checks.get(name);
if (check == null) {
LOGGER.error("State changed for unconfigured health check: name={} state={}", name, isNowHealthy);
return;
}
if (check.isCritical()) {
handleCriticalHealthChange(check.getName(), check.getType(), isNowHealthy);
} else {
handleNonCriticalHealthChange(check.getName(), check.getType(), isNowHealthy);
}
scheduler.schedule(check, isNowHealthy);
healthStateListeners.forEach(listener -> {
try {
listener.onStateChanged(name, isNowHealthy);
} catch (final RuntimeException e) {
LOGGER.warn("Exception thrown for healthCheckName: {} from Health State listener onStateChanged: {}",
name, listener, e);
// swallow error
}
});
healthStateListeners.forEach(listener -> listener.onStateChanged(name, isNowHealthy));
}
protected void initializeAppHealth() {
this.isAppHealthy.set(initialOverallState);
}
private long calculateNumberOfHealthyChecks() {
return checks.values()
.stream()
.filter(ScheduledHealthCheck::isHealthy)
.count();
}
private long calculateNumberOfUnhealthyChecks() {
return checks.values()
.stream()
.filter(check -> !check.isHealthy())
.count();
}
private void handleCriticalHealthChange(final String name, final HealthCheckType type, final boolean isNowHealthy) {
if (isNowHealthy) {
LOGGER.info("A critical dependency is now healthy: name={}, type={}", name, type);
switch (type) {
case ALIVE:
updateCriticalStatus(isAppAlive, unhealthyCriticalAliveChecks.decrementAndGet());
return;
case READY:
if (!shuttingDown) {
updateCriticalStatus(isAppHealthy, unhealthyCriticalHealthChecks.decrementAndGet());
} else {
LOGGER.info("Status change is ignored during shutdown: name={}, type={}", name, type);
}
return;
}
} else {
LOGGER.error("A critical dependency is now unhealthy: name={}, type={}", name, type);
switch (type) {
case ALIVE:
updateCriticalStatus(isAppAlive, unhealthyCriticalAliveChecks.incrementAndGet());
return;
case READY:
updateCriticalStatus(isAppHealthy, unhealthyCriticalHealthChecks.incrementAndGet());
return;
}
}
LOGGER.warn("Unexpected health check type: type={}", type);
}
private void updateCriticalStatus(final AtomicBoolean status, final int count) {
status.set(count == 0);
LOGGER.debug("current status: unhealthy-critical={}", count);
}
private void handleNonCriticalHealthChange(final String name, final HealthCheckType type, final boolean isNowHealthy) {
if (isNowHealthy) {
LOGGER.info("A non-critical dependency is now healthy: name={}, type={}", name, type);
} else {
LOGGER.warn("A non-critical dependency is now unhealthy: name={}, type={}", name, type);
}
}
String getAggregateHealthyName() {
return aggregateHealthyName;
}
String getAggregateUnhealthyName() {
return aggregateUnhealthyName;
}
@Override
public boolean isHealthy() {
return isAppAlive.get() && isAppHealthy.get();
}
@Override
public boolean isHealthy(@Nullable String type) {
if (HealthCheckType.ALIVE.name().equalsIgnoreCase(type)) {
return isAppAlive.get();
} else {
return isHealthy();
}
}
@Override
public void notifyShutdownStarted() throws Exception {
shuttingDown = true;
LOGGER.info("delayed shutdown: started (waiting {})", shutdownWaitPeriod);
// set healthy to false to indicate to the load balancer that it should not be in rotation for requests
isAppHealthy.set(false);
// sleep for period of time to give time for load balancer to realize requests should not be sent anymore
Thread.sleep(shutdownWaitPeriod.toMilliseconds());
LOGGER.info("delayed shutdown: finished");
}
@Override
public void onHealthyCheck(final String healthCheckName) {
healthStateListeners.forEach(listener -> {
try {
listener.onHealthyCheck(healthCheckName);
} catch (final RuntimeException e) {
LOGGER.warn("Exception thrown for healthCheckName: {} from Health State listener onHealthyCheck: {}",
healthCheckName, listener, e);
// swallow error
}
});
}
@Override
public void onUnhealthyCheck(final String healthCheckName) {
healthStateListeners.forEach(listener -> {
try {
listener.onUnhealthyCheck(healthCheckName);
} catch (final RuntimeException e) {
LOGGER.warn("Exception thrown for healthCheckName: {} from Health State listener onUnhealthyCheck: {}",
healthCheckName, listener, e);
// swallow error
}
});
}
@Nonnull
@Override
public Collection<HealthStateView> healthStateViews() {
return checks.values()
.stream()
.map(ScheduledHealthCheck::view)
.collect(Collectors.toList());
}
@Nonnull
@Override
public Optional<HealthStateView> healthStateView(@Nonnull final String name) {
return Optional.ofNullable(checks.get(name))
.map(ScheduledHealthCheck::view);
}
}
| |
// ========================================================================
// Copyright (c) 2004-2009 Mort Bay Consulting Pty. Ltd.
// ------------------------------------------------------------------------
// All rights reserved. This program and the accompanying materials
// are made available under the terms of the Eclipse Public License v1.0
// and Apache License v2.0 which accompanies this distribution.
// The Eclipse Public License is available at
// http://www.eclipse.org/legal/epl-v10.html
// The Apache License v2.0 is available at
// http://www.opensource.org/licenses/apache2.0.php
// You may elect to redistribute this code under either of these licenses.
// ========================================================================
package org.eclipse.jetty.util;
import java.io.Serializable;
import java.util.Arrays;
import java.util.Collection;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
/* ------------------------------------------------------------ */
/** A multi valued Map.
* This Map specializes HashMap and provides methods
* that operate on multi valued items.
* <P>
* Implemented as a map of LazyList values
* @param <K> The key type of the map.
*
* @see LazyList
*
*/
public class MultiMap<K> implements ConcurrentMap<K,Object>, Serializable
{
private static final long serialVersionUID = -6878723138353851005L;
Map<K,Object> _map;
ConcurrentMap<K, Object> _cmap;
public MultiMap()
{
_map=new HashMap<K, Object>();
}
public MultiMap(Map map)
{
if (map instanceof ConcurrentMap)
_map=_cmap=new ConcurrentHashMap<K, Object>(map);
else
_map=new HashMap<K, Object>(map);
}
public MultiMap(MultiMap<K> map)
{
if (map._cmap!=null)
_map=_cmap=new ConcurrentHashMap<K, Object>(map._cmap);
else
_map=new HashMap<K,Object>(map._map);
}
public MultiMap(int capacity)
{
_map=new HashMap<K, Object>(capacity);
}
public MultiMap(boolean concurrent)
{
if (concurrent)
_map=_cmap=new ConcurrentHashMap<K, Object>();
else
_map=new HashMap<K, Object>();
}
/* ------------------------------------------------------------ */
/** Get multiple values.
* Single valued entries are converted to singleton lists.
* @param name The entry key.
* @return Unmodifieable List of values.
*/
public List<Object> getValues(Object name)
{
return LazyList.getList(_map.get(name),true);
}
/* ------------------------------------------------------------ */
/** Get a value from a multiple value.
* If the value is not a multivalue, then index 0 retrieves the
* value or null.
* @param name The entry key.
* @param i Index of element to get.
* @return Unmodifieable List of values.
*/
public Object getValue(Object name,int i)
{
Object l=_map.get(name);
if (i==0 && LazyList.size(l)==0)
return null;
return LazyList.get(l,i);
}
/* ------------------------------------------------------------ */
/** Get value as String.
* Single valued items are converted to a String with the toString()
* Object method. Multi valued entries are converted to a comma separated
* List. No quoting of commas within values is performed.
* @param name The entry key.
* @return String value.
*/
public String getString(Object name)
{
Object l=_map.get(name);
switch(LazyList.size(l))
{
case 0:
return null;
case 1:
Object o=LazyList.get(l,0);
return o==null?null:o.toString();
default:
{
StringBuilder values=new StringBuilder(128);
for (int i=0; i<LazyList.size(l); i++)
{
Object e=LazyList.get(l,i);
if (e!=null)
{
if (values.length()>0)
values.append(',');
values.append(e.toString());
}
}
return values.toString();
}
}
}
/* ------------------------------------------------------------ */
public Object get(Object name)
{
Object l=_map.get(name);
switch(LazyList.size(l))
{
case 0:
return null;
case 1:
Object o=LazyList.get(l,0);
return o;
default:
return LazyList.getList(l,true);
}
}
/* ------------------------------------------------------------ */
/** Put and entry into the map.
* @param name The entry key.
* @param value The entry value.
* @return The previous value or null.
*/
public Object put(K name, Object value)
{
return _map.put(name,LazyList.add(null,value));
}
/* ------------------------------------------------------------ */
/** Put multi valued entry.
* @param name The entry key.
* @param values The List of multiple values.
* @return The previous value or null.
*/
public Object putValues(K name, List values)
{
return _map.put(name,values);
}
/* ------------------------------------------------------------ */
/** Put multi valued entry.
* @param name The entry key.
* @param values The String array of multiple values.
* @return The previous value or null.
*/
public Object putValues(K name, String[] values)
{
Object list=null;
for (int i=0;i<values.length;i++)
list=LazyList.add(list,values[i]);
return put(name,list);
}
/* ------------------------------------------------------------ */
/** Add value to multi valued entry.
* If the entry is single valued, it is converted to the first
* value of a multi valued entry.
* @param name The entry key.
* @param value The entry value.
*/
public void add(K name, Object value)
{
Object lo = _map.get(name);
Object ln = LazyList.add(lo,value);
if (lo!=ln)
_map.put(name,ln);
}
/* ------------------------------------------------------------ */
/** Add values to multi valued entry.
* If the entry is single valued, it is converted to the first
* value of a multi valued entry.
* @param name The entry key.
* @param values The List of multiple values.
*/
public void addValues(K name, List values)
{
Object lo = _map.get(name);
Object ln = LazyList.addCollection(lo,values);
if (lo!=ln)
_map.put(name,ln);
}
/* ------------------------------------------------------------ */
/** Add values to multi valued entry.
* If the entry is single valued, it is converted to the first
* value of a multi valued entry.
* @param name The entry key.
* @param values The String array of multiple values.
*/
public void addValues(K name, String[] values)
{
Object lo = _map.get(name);
Object ln = LazyList.addCollection(lo,Arrays.asList(values));
if (lo!=ln)
_map.put(name,ln);
}
/* ------------------------------------------------------------ */
/** Remove value.
* @param name The entry key.
* @param value The entry value.
* @return true if it was removed.
*/
public boolean removeValue(K name,Object value)
{
Object lo = _map.get(name);
Object ln=lo;
int s=LazyList.size(lo);
if (s>0)
{
ln=LazyList.remove(lo,value);
if (ln==null)
_map.remove(name);
else
_map.put(name, ln);
}
return LazyList.size(ln)!=s;
}
/* ------------------------------------------------------------ */
/** Put all contents of map.
* @param m Map
*/
public void putAll(Map m)
{
Iterator i = m.entrySet().iterator();
boolean multi=m instanceof MultiMap;
while(i.hasNext())
{
Map.Entry entry = (Map.Entry)i.next();
if (multi)
_map.put((K)(entry.getKey()),LazyList.clone(entry.getValue()));
else
put((K)(entry.getKey()),entry.getValue());
}
}
/* ------------------------------------------------------------ */
/**
* @return Map of String arrays
*/
public Map toStringArrayMap()
{
HashMap map = new HashMap(_map.size()*3/2);
Iterator i = _map.entrySet().iterator();
while(i.hasNext())
{
Map.Entry entry = (Map.Entry)i.next();
Object l = entry.getValue();
String[] a = LazyList.toStringArray(l);
// for (int j=a.length;j-->0;)
// if (a[j]==null)
// a[j]="";
map.put(entry.getKey(),a);
}
return map;
}
@Override
public String toString()
{
return _cmap==null?_map.toString():_cmap.toString();
}
public void clear()
{
_map.clear();
}
public boolean containsKey(Object key)
{
return _map.containsKey(key);
}
public boolean containsValue(Object value)
{
return _map.containsValue(value);
}
public Set<Entry<K, Object>> entrySet()
{
return _map.entrySet();
}
@Override
public boolean equals(Object o)
{
return _map.equals(o);
}
@Override
public int hashCode()
{
return _map.hashCode();
}
public boolean isEmpty()
{
return _map.isEmpty();
}
public Set<K> keySet()
{
return _map.keySet();
}
public Object remove(Object key)
{
return _map.remove(key);
}
public int size()
{
return _map.size();
}
public Collection<Object> values()
{
return _map.values();
}
public Object putIfAbsent(K key, Object value)
{
if (_cmap==null)
throw new UnsupportedOperationException();
return _cmap.putIfAbsent(key,value);
}
public boolean remove(Object key, Object value)
{
if (_cmap==null)
throw new UnsupportedOperationException();
return _cmap.remove(key,value);
}
public boolean replace(K key, Object oldValue, Object newValue)
{
if (_cmap==null)
throw new UnsupportedOperationException();
return _cmap.replace(key,oldValue,newValue);
}
public Object replace(K key, Object value)
{
if (_cmap==null)
throw new UnsupportedOperationException();
return _cmap.replace(key,value);
}
}
| |
/**
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for
* license information.
*
* Code generated by Microsoft (R) AutoRest Code Generator.
*/
package com.microsoft.azure.management.network.v2020_05_01;
import com.microsoft.azure.arm.model.HasInner;
import com.microsoft.azure.arm.resources.models.Resource;
import com.microsoft.azure.arm.resources.models.GroupableResourceCore;
import com.microsoft.azure.arm.resources.models.HasResourceGroup;
import com.microsoft.azure.arm.model.Refreshable;
import com.microsoft.azure.arm.model.Updatable;
import com.microsoft.azure.arm.model.Appliable;
import com.microsoft.azure.arm.model.Creatable;
import com.microsoft.azure.arm.resources.models.HasManager;
import com.microsoft.azure.management.network.v2020_05_01.implementation.NetworkManager;
import java.util.List;
import com.microsoft.azure.management.network.v2020_05_01.implementation.VpnServerConfigurationInner;
/**
* Type representing VpnServerConfiguration.
*/
public interface VpnServerConfiguration extends HasInner<VpnServerConfigurationInner>, Resource, GroupableResourceCore<NetworkManager, VpnServerConfigurationInner>, HasResourceGroup, Refreshable<VpnServerConfiguration>, Updatable<VpnServerConfiguration.Update>, HasManager<NetworkManager> {
/**
* @return the aadAuthenticationParameters value.
*/
AadAuthenticationParameters aadAuthenticationParameters();
/**
* @return the etag value.
*/
String etag();
/**
* @return the p2SVpnGateways value.
*/
List<P2SVpnGateway> p2SVpnGateways();
/**
* @return the provisioningState value.
*/
String provisioningState();
/**
* @return the radiusClientRootCertificates value.
*/
List<VpnServerConfigRadiusClientRootCertificate> radiusClientRootCertificates();
/**
* @return the radiusServerAddress value.
*/
String radiusServerAddress();
/**
* @return the radiusServerRootCertificates value.
*/
List<VpnServerConfigRadiusServerRootCertificate> radiusServerRootCertificates();
/**
* @return the radiusServers value.
*/
List<RadiusServer> radiusServers();
/**
* @return the radiusServerSecret value.
*/
String radiusServerSecret();
/**
* @return the vpnAuthenticationTypes value.
*/
List<VpnAuthenticationType> vpnAuthenticationTypes();
/**
* @return the vpnClientIpsecPolicies value.
*/
List<IpsecPolicy> vpnClientIpsecPolicies();
/**
* @return the vpnClientRevokedCertificates value.
*/
List<VpnServerConfigVpnClientRevokedCertificate> vpnClientRevokedCertificates();
/**
* @return the vpnClientRootCertificates value.
*/
List<VpnServerConfigVpnClientRootCertificate> vpnClientRootCertificates();
/**
* @return the vpnProtocols value.
*/
List<VpnGatewayTunnelingProtocol> vpnProtocols();
/**
* @return the vpnServerConfigurationName value.
*/
String vpnServerConfigurationName();
/**
* @return the vpnServerConfigurationPropertiesEtag value.
*/
String vpnServerConfigurationPropertiesEtag();
/**
* The entirety of the VpnServerConfiguration definition.
*/
interface Definition extends DefinitionStages.Blank, DefinitionStages.WithGroup, DefinitionStages.WithCreate {
}
/**
* Grouping of VpnServerConfiguration definition stages.
*/
interface DefinitionStages {
/**
* The first stage of a VpnServerConfiguration definition.
*/
interface Blank extends GroupableResourceCore.DefinitionWithRegion<WithGroup> {
}
/**
* The stage of the VpnServerConfiguration definition allowing to specify the resource group.
*/
interface WithGroup extends GroupableResourceCore.DefinitionStages.WithGroup<WithCreate> {
}
/**
* The stage of the vpnserverconfiguration definition allowing to specify AadAuthenticationParameters.
*/
interface WithAadAuthenticationParameters {
/**
* Specifies aadAuthenticationParameters.
* @param aadAuthenticationParameters The set of aad vpn authentication parameters
* @return the next definition stage
*/
WithCreate withAadAuthenticationParameters(AadAuthenticationParameters aadAuthenticationParameters);
}
/**
* The stage of the vpnserverconfiguration definition allowing to specify RadiusClientRootCertificates.
*/
interface WithRadiusClientRootCertificates {
/**
* Specifies radiusClientRootCertificates.
* @param radiusClientRootCertificates Radius client root certificate of VpnServerConfiguration
* @return the next definition stage
*/
WithCreate withRadiusClientRootCertificates(List<VpnServerConfigRadiusClientRootCertificate> radiusClientRootCertificates);
}
/**
* The stage of the vpnserverconfiguration definition allowing to specify RadiusServerAddress.
*/
interface WithRadiusServerAddress {
/**
* Specifies radiusServerAddress.
* @param radiusServerAddress The radius server address property of the VpnServerConfiguration resource for point to site client connection
* @return the next definition stage
*/
WithCreate withRadiusServerAddress(String radiusServerAddress);
}
/**
* The stage of the vpnserverconfiguration definition allowing to specify RadiusServerRootCertificates.
*/
interface WithRadiusServerRootCertificates {
/**
* Specifies radiusServerRootCertificates.
* @param radiusServerRootCertificates Radius Server root certificate of VpnServerConfiguration
* @return the next definition stage
*/
WithCreate withRadiusServerRootCertificates(List<VpnServerConfigRadiusServerRootCertificate> radiusServerRootCertificates);
}
/**
* The stage of the vpnserverconfiguration definition allowing to specify RadiusServers.
*/
interface WithRadiusServers {
/**
* Specifies radiusServers.
* @param radiusServers Multiple Radius Server configuration for VpnServerConfiguration
* @return the next definition stage
*/
WithCreate withRadiusServers(List<RadiusServer> radiusServers);
}
/**
* The stage of the vpnserverconfiguration definition allowing to specify RadiusServerSecret.
*/
interface WithRadiusServerSecret {
/**
* Specifies radiusServerSecret.
* @param radiusServerSecret The radius secret property of the VpnServerConfiguration resource for point to site client connection
* @return the next definition stage
*/
WithCreate withRadiusServerSecret(String radiusServerSecret);
}
/**
* The stage of the vpnserverconfiguration definition allowing to specify VpnAuthenticationTypes.
*/
interface WithVpnAuthenticationTypes {
/**
* Specifies vpnAuthenticationTypes.
* @param vpnAuthenticationTypes VPN authentication types for the VpnServerConfiguration
* @return the next definition stage
*/
WithCreate withVpnAuthenticationTypes(List<VpnAuthenticationType> vpnAuthenticationTypes);
}
/**
* The stage of the vpnserverconfiguration definition allowing to specify VpnClientIpsecPolicies.
*/
interface WithVpnClientIpsecPolicies {
/**
* Specifies vpnClientIpsecPolicies.
* @param vpnClientIpsecPolicies VpnClientIpsecPolicies for VpnServerConfiguration
* @return the next definition stage
*/
WithCreate withVpnClientIpsecPolicies(List<IpsecPolicy> vpnClientIpsecPolicies);
}
/**
* The stage of the vpnserverconfiguration definition allowing to specify VpnClientRevokedCertificates.
*/
interface WithVpnClientRevokedCertificates {
/**
* Specifies vpnClientRevokedCertificates.
* @param vpnClientRevokedCertificates VPN client revoked certificate of VpnServerConfiguration
* @return the next definition stage
*/
WithCreate withVpnClientRevokedCertificates(List<VpnServerConfigVpnClientRevokedCertificate> vpnClientRevokedCertificates);
}
/**
* The stage of the vpnserverconfiguration definition allowing to specify VpnClientRootCertificates.
*/
interface WithVpnClientRootCertificates {
/**
* Specifies vpnClientRootCertificates.
* @param vpnClientRootCertificates VPN client root certificate of VpnServerConfiguration
* @return the next definition stage
*/
WithCreate withVpnClientRootCertificates(List<VpnServerConfigVpnClientRootCertificate> vpnClientRootCertificates);
}
/**
* The stage of the vpnserverconfiguration definition allowing to specify VpnProtocols.
*/
interface WithVpnProtocols {
/**
* Specifies vpnProtocols.
* @param vpnProtocols VPN protocols for the VpnServerConfiguration
* @return the next definition stage
*/
WithCreate withVpnProtocols(List<VpnGatewayTunnelingProtocol> vpnProtocols);
}
/**
* The stage of the vpnserverconfiguration definition allowing to specify VpnServerConfigurationName.
*/
interface WithVpnServerConfigurationName {
/**
* Specifies vpnServerConfigurationName.
* @param vpnServerConfigurationName The name of the VpnServerConfiguration that is unique within a resource group
* @return the next definition stage
*/
WithCreate withVpnServerConfigurationName(String vpnServerConfigurationName);
}
/**
* The stage of the definition which contains all the minimum required inputs for
* the resource to be created (via {@link WithCreate#create()}), but also allows
* for any other optional settings to be specified.
*/
interface WithCreate extends Creatable<VpnServerConfiguration>, Resource.DefinitionWithTags<WithCreate>, DefinitionStages.WithAadAuthenticationParameters, DefinitionStages.WithRadiusClientRootCertificates, DefinitionStages.WithRadiusServerAddress, DefinitionStages.WithRadiusServerRootCertificates, DefinitionStages.WithRadiusServers, DefinitionStages.WithRadiusServerSecret, DefinitionStages.WithVpnAuthenticationTypes, DefinitionStages.WithVpnClientIpsecPolicies, DefinitionStages.WithVpnClientRevokedCertificates, DefinitionStages.WithVpnClientRootCertificates, DefinitionStages.WithVpnProtocols, DefinitionStages.WithVpnServerConfigurationName {
}
}
/**
* The template for a VpnServerConfiguration update operation, containing all the settings that can be modified.
*/
interface Update extends Appliable<VpnServerConfiguration>, Resource.UpdateWithTags<Update>, UpdateStages.WithAadAuthenticationParameters, UpdateStages.WithRadiusClientRootCertificates, UpdateStages.WithRadiusServerAddress, UpdateStages.WithRadiusServerRootCertificates, UpdateStages.WithRadiusServers, UpdateStages.WithRadiusServerSecret, UpdateStages.WithVpnAuthenticationTypes, UpdateStages.WithVpnClientIpsecPolicies, UpdateStages.WithVpnClientRevokedCertificates, UpdateStages.WithVpnClientRootCertificates, UpdateStages.WithVpnProtocols, UpdateStages.WithVpnServerConfigurationName {
}
/**
* Grouping of VpnServerConfiguration update stages.
*/
interface UpdateStages {
/**
* The stage of the vpnserverconfiguration update allowing to specify AadAuthenticationParameters.
*/
interface WithAadAuthenticationParameters {
/**
* Specifies aadAuthenticationParameters.
* @param aadAuthenticationParameters The set of aad vpn authentication parameters
* @return the next update stage
*/
Update withAadAuthenticationParameters(AadAuthenticationParameters aadAuthenticationParameters);
}
/**
* The stage of the vpnserverconfiguration update allowing to specify RadiusClientRootCertificates.
*/
interface WithRadiusClientRootCertificates {
/**
* Specifies radiusClientRootCertificates.
* @param radiusClientRootCertificates Radius client root certificate of VpnServerConfiguration
* @return the next update stage
*/
Update withRadiusClientRootCertificates(List<VpnServerConfigRadiusClientRootCertificate> radiusClientRootCertificates);
}
/**
* The stage of the vpnserverconfiguration update allowing to specify RadiusServerAddress.
*/
interface WithRadiusServerAddress {
/**
* Specifies radiusServerAddress.
* @param radiusServerAddress The radius server address property of the VpnServerConfiguration resource for point to site client connection
* @return the next update stage
*/
Update withRadiusServerAddress(String radiusServerAddress);
}
/**
* The stage of the vpnserverconfiguration update allowing to specify RadiusServerRootCertificates.
*/
interface WithRadiusServerRootCertificates {
/**
* Specifies radiusServerRootCertificates.
* @param radiusServerRootCertificates Radius Server root certificate of VpnServerConfiguration
* @return the next update stage
*/
Update withRadiusServerRootCertificates(List<VpnServerConfigRadiusServerRootCertificate> radiusServerRootCertificates);
}
/**
* The stage of the vpnserverconfiguration update allowing to specify RadiusServers.
*/
interface WithRadiusServers {
/**
* Specifies radiusServers.
* @param radiusServers Multiple Radius Server configuration for VpnServerConfiguration
* @return the next update stage
*/
Update withRadiusServers(List<RadiusServer> radiusServers);
}
/**
* The stage of the vpnserverconfiguration update allowing to specify RadiusServerSecret.
*/
interface WithRadiusServerSecret {
/**
* Specifies radiusServerSecret.
* @param radiusServerSecret The radius secret property of the VpnServerConfiguration resource for point to site client connection
* @return the next update stage
*/
Update withRadiusServerSecret(String radiusServerSecret);
}
/**
* The stage of the vpnserverconfiguration update allowing to specify VpnAuthenticationTypes.
*/
interface WithVpnAuthenticationTypes {
/**
* Specifies vpnAuthenticationTypes.
* @param vpnAuthenticationTypes VPN authentication types for the VpnServerConfiguration
* @return the next update stage
*/
Update withVpnAuthenticationTypes(List<VpnAuthenticationType> vpnAuthenticationTypes);
}
/**
* The stage of the vpnserverconfiguration update allowing to specify VpnClientIpsecPolicies.
*/
interface WithVpnClientIpsecPolicies {
/**
* Specifies vpnClientIpsecPolicies.
* @param vpnClientIpsecPolicies VpnClientIpsecPolicies for VpnServerConfiguration
* @return the next update stage
*/
Update withVpnClientIpsecPolicies(List<IpsecPolicy> vpnClientIpsecPolicies);
}
/**
* The stage of the vpnserverconfiguration update allowing to specify VpnClientRevokedCertificates.
*/
interface WithVpnClientRevokedCertificates {
/**
* Specifies vpnClientRevokedCertificates.
* @param vpnClientRevokedCertificates VPN client revoked certificate of VpnServerConfiguration
* @return the next update stage
*/
Update withVpnClientRevokedCertificates(List<VpnServerConfigVpnClientRevokedCertificate> vpnClientRevokedCertificates);
}
/**
* The stage of the vpnserverconfiguration update allowing to specify VpnClientRootCertificates.
*/
interface WithVpnClientRootCertificates {
/**
* Specifies vpnClientRootCertificates.
* @param vpnClientRootCertificates VPN client root certificate of VpnServerConfiguration
* @return the next update stage
*/
Update withVpnClientRootCertificates(List<VpnServerConfigVpnClientRootCertificate> vpnClientRootCertificates);
}
/**
* The stage of the vpnserverconfiguration update allowing to specify VpnProtocols.
*/
interface WithVpnProtocols {
/**
* Specifies vpnProtocols.
* @param vpnProtocols VPN protocols for the VpnServerConfiguration
* @return the next update stage
*/
Update withVpnProtocols(List<VpnGatewayTunnelingProtocol> vpnProtocols);
}
/**
* The stage of the vpnserverconfiguration update allowing to specify VpnServerConfigurationName.
*/
interface WithVpnServerConfigurationName {
/**
* Specifies vpnServerConfigurationName.
* @param vpnServerConfigurationName The name of the VpnServerConfiguration that is unique within a resource group
* @return the next update stage
*/
Update withVpnServerConfigurationName(String vpnServerConfigurationName);
}
}
}
| |
package com.ymsino.water.service.data.checkingFreezeData;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlType;
/**
* <p>
* Java class for checkingFreezeDataReturn complex type.
*
* <p>
* The following schema fragment specifies the expected content contained within this class.
*
* <pre>
* <complexType name="checkingFreezeDataReturn">
* <complexContent>
* <restriction base="{http://www.w3.org/2001/XMLSchema}anyType">
* <sequence>
* <element name="chargingUnitId" type="{http://www.w3.org/2001/XMLSchema}string" minOccurs="0"/>
* <element name="concHardwareId" type="{http://www.w3.org/2001/XMLSchema}string" minOccurs="0"/>
* <element name="createTimestamp" type="{http://www.w3.org/2001/XMLSchema}long" minOccurs="0"/>
* <element name="freezeMonth" type="{http://www.w3.org/2001/XMLSchema}string" minOccurs="0"/>
* <element name="freezeYear" type="{http://www.w3.org/2001/XMLSchema}string" minOccurs="0"/>
* <element name="id" type="{http://www.w3.org/2001/XMLSchema}long" minOccurs="0"/>
* <element name="meterHardwareId" type="{http://www.w3.org/2001/XMLSchema}string" minOccurs="0"/>
* <element name="meterReading1" type="{http://www.w3.org/2001/XMLSchema}float" minOccurs="0"/>
* <element name="meterReading10" type="{http://www.w3.org/2001/XMLSchema}float" minOccurs="0"/>
* <element name="meterReading11" type="{http://www.w3.org/2001/XMLSchema}float" minOccurs="0"/>
* <element name="meterReading12" type="{http://www.w3.org/2001/XMLSchema}float" minOccurs="0"/>
* <element name="meterReading13" type="{http://www.w3.org/2001/XMLSchema}float" minOccurs="0"/>
* <element name="meterReading14" type="{http://www.w3.org/2001/XMLSchema}float" minOccurs="0"/>
* <element name="meterReading15" type="{http://www.w3.org/2001/XMLSchema}float" minOccurs="0"/>
* <element name="meterReading16" type="{http://www.w3.org/2001/XMLSchema}float" minOccurs="0"/>
* <element name="meterReading17" type="{http://www.w3.org/2001/XMLSchema}float" minOccurs="0"/>
* <element name="meterReading18" type="{http://www.w3.org/2001/XMLSchema}float" minOccurs="0"/>
* <element name="meterReading19" type="{http://www.w3.org/2001/XMLSchema}float" minOccurs="0"/>
* <element name="meterReading2" type="{http://www.w3.org/2001/XMLSchema}float" minOccurs="0"/>
* <element name="meterReading20" type="{http://www.w3.org/2001/XMLSchema}float" minOccurs="0"/>
* <element name="meterReading21" type="{http://www.w3.org/2001/XMLSchema}float" minOccurs="0"/>
* <element name="meterReading22" type="{http://www.w3.org/2001/XMLSchema}float" minOccurs="0"/>
* <element name="meterReading23" type="{http://www.w3.org/2001/XMLSchema}float" minOccurs="0"/>
* <element name="meterReading24" type="{http://www.w3.org/2001/XMLSchema}float" minOccurs="0"/>
* <element name="meterReading25" type="{http://www.w3.org/2001/XMLSchema}float" minOccurs="0"/>
* <element name="meterReading26" type="{http://www.w3.org/2001/XMLSchema}float" minOccurs="0"/>
* <element name="meterReading27" type="{http://www.w3.org/2001/XMLSchema}float" minOccurs="0"/>
* <element name="meterReading28" type="{http://www.w3.org/2001/XMLSchema}float" minOccurs="0"/>
* <element name="meterReading29" type="{http://www.w3.org/2001/XMLSchema}float" minOccurs="0"/>
* <element name="meterReading3" type="{http://www.w3.org/2001/XMLSchema}float" minOccurs="0"/>
* <element name="meterReading30" type="{http://www.w3.org/2001/XMLSchema}float" minOccurs="0"/>
* <element name="meterReading31" type="{http://www.w3.org/2001/XMLSchema}float" minOccurs="0"/>
* <element name="meterReading4" type="{http://www.w3.org/2001/XMLSchema}float" minOccurs="0"/>
* <element name="meterReading5" type="{http://www.w3.org/2001/XMLSchema}float" minOccurs="0"/>
* <element name="meterReading6" type="{http://www.w3.org/2001/XMLSchema}float" minOccurs="0"/>
* <element name="meterReading7" type="{http://www.w3.org/2001/XMLSchema}float" minOccurs="0"/>
* <element name="meterReading8" type="{http://www.w3.org/2001/XMLSchema}float" minOccurs="0"/>
* <element name="meterReading9" type="{http://www.w3.org/2001/XMLSchema}float" minOccurs="0"/>
* <element name="parentUnits" type="{http://www.w3.org/2001/XMLSchema}string" minOccurs="0"/>
* <element name="userId" type="{http://www.w3.org/2001/XMLSchema}string" minOccurs="0"/>
* <element name="waterCustomerId" type="{http://www.w3.org/2001/XMLSchema}string" minOccurs="0"/>
* </sequence>
* </restriction>
* </complexContent>
* </complexType>
* </pre>
*
*
*/
@XmlAccessorType(XmlAccessType.FIELD)
@XmlType(name = "checkingFreezeDataReturn", propOrder = { "chargingUnitId", "concHardwareId", "createTimestamp", "freezeMonth", "freezeYear", "id", "meterHardwareId", "meterReading1", "meterReading10", "meterReading11", "meterReading12", "meterReading13", "meterReading14", "meterReading15", "meterReading16", "meterReading17", "meterReading18", "meterReading19", "meterReading2", "meterReading20", "meterReading21", "meterReading22", "meterReading23", "meterReading24", "meterReading25", "meterReading26", "meterReading27", "meterReading28", "meterReading29", "meterReading3", "meterReading30", "meterReading31", "meterReading4", "meterReading5", "meterReading6", "meterReading7", "meterReading8", "meterReading9", "parentUnits", "userId", "waterCustomerId" })
public class CheckingFreezeDataReturn {
protected String chargingUnitId;
protected String concHardwareId;
protected Long createTimestamp;
protected String freezeMonth;
protected String freezeYear;
protected Long id;
protected String meterHardwareId;
protected Float meterReading1;
protected Float meterReading10;
protected Float meterReading11;
protected Float meterReading12;
protected Float meterReading13;
protected Float meterReading14;
protected Float meterReading15;
protected Float meterReading16;
protected Float meterReading17;
protected Float meterReading18;
protected Float meterReading19;
protected Float meterReading2;
protected Float meterReading20;
protected Float meterReading21;
protected Float meterReading22;
protected Float meterReading23;
protected Float meterReading24;
protected Float meterReading25;
protected Float meterReading26;
protected Float meterReading27;
protected Float meterReading28;
protected Float meterReading29;
protected Float meterReading3;
protected Float meterReading30;
protected Float meterReading31;
protected Float meterReading4;
protected Float meterReading5;
protected Float meterReading6;
protected Float meterReading7;
protected Float meterReading8;
protected Float meterReading9;
protected String parentUnits;
protected String userId;
protected String waterCustomerId;
/**
* Gets the value of the chargingUnitId property.
*
* @return possible object is {@link String }
*
*/
public String getChargingUnitId() {
return chargingUnitId;
}
/**
* Sets the value of the chargingUnitId property.
*
* @param value
* allowed object is {@link String }
*
*/
public void setChargingUnitId(String value) {
this.chargingUnitId = value;
}
/**
* Gets the value of the concHardwareId property.
*
* @return possible object is {@link String }
*
*/
public String getConcHardwareId() {
return concHardwareId;
}
/**
* Sets the value of the concHardwareId property.
*
* @param value
* allowed object is {@link String }
*
*/
public void setConcHardwareId(String value) {
this.concHardwareId = value;
}
/**
* Gets the value of the createTimestamp property.
*
* @return possible object is {@link Long }
*
*/
public Long getCreateTimestamp() {
return createTimestamp;
}
/**
* Sets the value of the createTimestamp property.
*
* @param value
* allowed object is {@link Long }
*
*/
public void setCreateTimestamp(Long value) {
this.createTimestamp = value;
}
/**
* Gets the value of the freezeMonth property.
*
* @return possible object is {@link String }
*
*/
public String getFreezeMonth() {
return freezeMonth;
}
/**
* Sets the value of the freezeMonth property.
*
* @param value
* allowed object is {@link String }
*
*/
public void setFreezeMonth(String value) {
this.freezeMonth = value;
}
/**
* Gets the value of the freezeYear property.
*
* @return possible object is {@link String }
*
*/
public String getFreezeYear() {
return freezeYear;
}
/**
* Sets the value of the freezeYear property.
*
* @param value
* allowed object is {@link String }
*
*/
public void setFreezeYear(String value) {
this.freezeYear = value;
}
/**
* Gets the value of the id property.
*
* @return possible object is {@link Long }
*
*/
public Long getId() {
return id;
}
/**
* Sets the value of the id property.
*
* @param value
* allowed object is {@link Long }
*
*/
public void setId(Long value) {
this.id = value;
}
/**
* Gets the value of the meterHardwareId property.
*
* @return possible object is {@link String }
*
*/
public String getMeterHardwareId() {
return meterHardwareId;
}
/**
* Sets the value of the meterHardwareId property.
*
* @param value
* allowed object is {@link String }
*
*/
public void setMeterHardwareId(String value) {
this.meterHardwareId = value;
}
/**
* Gets the value of the meterReading1 property.
*
* @return possible object is {@link Float }
*
*/
public Float getMeterReading1() {
return meterReading1;
}
/**
* Sets the value of the meterReading1 property.
*
* @param value
* allowed object is {@link Float }
*
*/
public void setMeterReading1(Float value) {
this.meterReading1 = value;
}
/**
* Gets the value of the meterReading10 property.
*
* @return possible object is {@link Float }
*
*/
public Float getMeterReading10() {
return meterReading10;
}
/**
* Sets the value of the meterReading10 property.
*
* @param value
* allowed object is {@link Float }
*
*/
public void setMeterReading10(Float value) {
this.meterReading10 = value;
}
/**
* Gets the value of the meterReading11 property.
*
* @return possible object is {@link Float }
*
*/
public Float getMeterReading11() {
return meterReading11;
}
/**
* Sets the value of the meterReading11 property.
*
* @param value
* allowed object is {@link Float }
*
*/
public void setMeterReading11(Float value) {
this.meterReading11 = value;
}
/**
* Gets the value of the meterReading12 property.
*
* @return possible object is {@link Float }
*
*/
public Float getMeterReading12() {
return meterReading12;
}
/**
* Sets the value of the meterReading12 property.
*
* @param value
* allowed object is {@link Float }
*
*/
public void setMeterReading12(Float value) {
this.meterReading12 = value;
}
/**
* Gets the value of the meterReading13 property.
*
* @return possible object is {@link Float }
*
*/
public Float getMeterReading13() {
return meterReading13;
}
/**
* Sets the value of the meterReading13 property.
*
* @param value
* allowed object is {@link Float }
*
*/
public void setMeterReading13(Float value) {
this.meterReading13 = value;
}
/**
* Gets the value of the meterReading14 property.
*
* @return possible object is {@link Float }
*
*/
public Float getMeterReading14() {
return meterReading14;
}
/**
* Sets the value of the meterReading14 property.
*
* @param value
* allowed object is {@link Float }
*
*/
public void setMeterReading14(Float value) {
this.meterReading14 = value;
}
/**
* Gets the value of the meterReading15 property.
*
* @return possible object is {@link Float }
*
*/
public Float getMeterReading15() {
return meterReading15;
}
/**
* Sets the value of the meterReading15 property.
*
* @param value
* allowed object is {@link Float }
*
*/
public void setMeterReading15(Float value) {
this.meterReading15 = value;
}
/**
* Gets the value of the meterReading16 property.
*
* @return possible object is {@link Float }
*
*/
public Float getMeterReading16() {
return meterReading16;
}
/**
* Sets the value of the meterReading16 property.
*
* @param value
* allowed object is {@link Float }
*
*/
public void setMeterReading16(Float value) {
this.meterReading16 = value;
}
/**
* Gets the value of the meterReading17 property.
*
* @return possible object is {@link Float }
*
*/
public Float getMeterReading17() {
return meterReading17;
}
/**
* Sets the value of the meterReading17 property.
*
* @param value
* allowed object is {@link Float }
*
*/
public void setMeterReading17(Float value) {
this.meterReading17 = value;
}
/**
* Gets the value of the meterReading18 property.
*
* @return possible object is {@link Float }
*
*/
public Float getMeterReading18() {
return meterReading18;
}
/**
* Sets the value of the meterReading18 property.
*
* @param value
* allowed object is {@link Float }
*
*/
public void setMeterReading18(Float value) {
this.meterReading18 = value;
}
/**
* Gets the value of the meterReading19 property.
*
* @return possible object is {@link Float }
*
*/
public Float getMeterReading19() {
return meterReading19;
}
/**
* Sets the value of the meterReading19 property.
*
* @param value
* allowed object is {@link Float }
*
*/
public void setMeterReading19(Float value) {
this.meterReading19 = value;
}
/**
* Gets the value of the meterReading2 property.
*
* @return possible object is {@link Float }
*
*/
public Float getMeterReading2() {
return meterReading2;
}
/**
* Sets the value of the meterReading2 property.
*
* @param value
* allowed object is {@link Float }
*
*/
public void setMeterReading2(Float value) {
this.meterReading2 = value;
}
/**
* Gets the value of the meterReading20 property.
*
* @return possible object is {@link Float }
*
*/
public Float getMeterReading20() {
return meterReading20;
}
/**
* Sets the value of the meterReading20 property.
*
* @param value
* allowed object is {@link Float }
*
*/
public void setMeterReading20(Float value) {
this.meterReading20 = value;
}
/**
* Gets the value of the meterReading21 property.
*
* @return possible object is {@link Float }
*
*/
public Float getMeterReading21() {
return meterReading21;
}
/**
* Sets the value of the meterReading21 property.
*
* @param value
* allowed object is {@link Float }
*
*/
public void setMeterReading21(Float value) {
this.meterReading21 = value;
}
/**
* Gets the value of the meterReading22 property.
*
* @return possible object is {@link Float }
*
*/
public Float getMeterReading22() {
return meterReading22;
}
/**
* Sets the value of the meterReading22 property.
*
* @param value
* allowed object is {@link Float }
*
*/
public void setMeterReading22(Float value) {
this.meterReading22 = value;
}
/**
* Gets the value of the meterReading23 property.
*
* @return possible object is {@link Float }
*
*/
public Float getMeterReading23() {
return meterReading23;
}
/**
* Sets the value of the meterReading23 property.
*
* @param value
* allowed object is {@link Float }
*
*/
public void setMeterReading23(Float value) {
this.meterReading23 = value;
}
/**
* Gets the value of the meterReading24 property.
*
* @return possible object is {@link Float }
*
*/
public Float getMeterReading24() {
return meterReading24;
}
/**
* Sets the value of the meterReading24 property.
*
* @param value
* allowed object is {@link Float }
*
*/
public void setMeterReading24(Float value) {
this.meterReading24 = value;
}
/**
* Gets the value of the meterReading25 property.
*
* @return possible object is {@link Float }
*
*/
public Float getMeterReading25() {
return meterReading25;
}
/**
* Sets the value of the meterReading25 property.
*
* @param value
* allowed object is {@link Float }
*
*/
public void setMeterReading25(Float value) {
this.meterReading25 = value;
}
/**
* Gets the value of the meterReading26 property.
*
* @return possible object is {@link Float }
*
*/
public Float getMeterReading26() {
return meterReading26;
}
/**
* Sets the value of the meterReading26 property.
*
* @param value
* allowed object is {@link Float }
*
*/
public void setMeterReading26(Float value) {
this.meterReading26 = value;
}
/**
* Gets the value of the meterReading27 property.
*
* @return possible object is {@link Float }
*
*/
public Float getMeterReading27() {
return meterReading27;
}
/**
* Sets the value of the meterReading27 property.
*
* @param value
* allowed object is {@link Float }
*
*/
public void setMeterReading27(Float value) {
this.meterReading27 = value;
}
/**
* Gets the value of the meterReading28 property.
*
* @return possible object is {@link Float }
*
*/
public Float getMeterReading28() {
return meterReading28;
}
/**
* Sets the value of the meterReading28 property.
*
* @param value
* allowed object is {@link Float }
*
*/
public void setMeterReading28(Float value) {
this.meterReading28 = value;
}
/**
* Gets the value of the meterReading29 property.
*
* @return possible object is {@link Float }
*
*/
public Float getMeterReading29() {
return meterReading29;
}
/**
* Sets the value of the meterReading29 property.
*
* @param value
* allowed object is {@link Float }
*
*/
public void setMeterReading29(Float value) {
this.meterReading29 = value;
}
/**
* Gets the value of the meterReading3 property.
*
* @return possible object is {@link Float }
*
*/
public Float getMeterReading3() {
return meterReading3;
}
/**
* Sets the value of the meterReading3 property.
*
* @param value
* allowed object is {@link Float }
*
*/
public void setMeterReading3(Float value) {
this.meterReading3 = value;
}
/**
* Gets the value of the meterReading30 property.
*
* @return possible object is {@link Float }
*
*/
public Float getMeterReading30() {
return meterReading30;
}
/**
* Sets the value of the meterReading30 property.
*
* @param value
* allowed object is {@link Float }
*
*/
public void setMeterReading30(Float value) {
this.meterReading30 = value;
}
/**
* Gets the value of the meterReading31 property.
*
* @return possible object is {@link Float }
*
*/
public Float getMeterReading31() {
return meterReading31;
}
/**
* Sets the value of the meterReading31 property.
*
* @param value
* allowed object is {@link Float }
*
*/
public void setMeterReading31(Float value) {
this.meterReading31 = value;
}
/**
* Gets the value of the meterReading4 property.
*
* @return possible object is {@link Float }
*
*/
public Float getMeterReading4() {
return meterReading4;
}
/**
* Sets the value of the meterReading4 property.
*
* @param value
* allowed object is {@link Float }
*
*/
public void setMeterReading4(Float value) {
this.meterReading4 = value;
}
/**
* Gets the value of the meterReading5 property.
*
* @return possible object is {@link Float }
*
*/
public Float getMeterReading5() {
return meterReading5;
}
/**
* Sets the value of the meterReading5 property.
*
* @param value
* allowed object is {@link Float }
*
*/
public void setMeterReading5(Float value) {
this.meterReading5 = value;
}
/**
* Gets the value of the meterReading6 property.
*
* @return possible object is {@link Float }
*
*/
public Float getMeterReading6() {
return meterReading6;
}
/**
* Sets the value of the meterReading6 property.
*
* @param value
* allowed object is {@link Float }
*
*/
public void setMeterReading6(Float value) {
this.meterReading6 = value;
}
/**
* Gets the value of the meterReading7 property.
*
* @return possible object is {@link Float }
*
*/
public Float getMeterReading7() {
return meterReading7;
}
/**
* Sets the value of the meterReading7 property.
*
* @param value
* allowed object is {@link Float }
*
*/
public void setMeterReading7(Float value) {
this.meterReading7 = value;
}
/**
* Gets the value of the meterReading8 property.
*
* @return possible object is {@link Float }
*
*/
public Float getMeterReading8() {
return meterReading8;
}
/**
* Sets the value of the meterReading8 property.
*
* @param value
* allowed object is {@link Float }
*
*/
public void setMeterReading8(Float value) {
this.meterReading8 = value;
}
/**
* Gets the value of the meterReading9 property.
*
* @return possible object is {@link Float }
*
*/
public Float getMeterReading9() {
return meterReading9;
}
/**
* Sets the value of the meterReading9 property.
*
* @param value
* allowed object is {@link Float }
*
*/
public void setMeterReading9(Float value) {
this.meterReading9 = value;
}
/**
* Gets the value of the parentUnits property.
*
* @return possible object is {@link String }
*
*/
public String getParentUnits() {
return parentUnits;
}
/**
* Sets the value of the parentUnits property.
*
* @param value
* allowed object is {@link String }
*
*/
public void setParentUnits(String value) {
this.parentUnits = value;
}
/**
* Gets the value of the userId property.
*
* @return possible object is {@link String }
*
*/
public String getUserId() {
return userId;
}
/**
* Sets the value of the userId property.
*
* @param value
* allowed object is {@link String }
*
*/
public void setUserId(String value) {
this.userId = value;
}
/**
* Gets the value of the waterCustomerId property.
*
* @return possible object is {@link String }
*
*/
public String getWaterCustomerId() {
return waterCustomerId;
}
/**
* Sets the value of the waterCustomerId property.
*
* @param value
* allowed object is {@link String }
*
*/
public void setWaterCustomerId(String value) {
this.waterCustomerId = value;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.api.internal;
import org.apache.flink.annotation.Internal;
import org.apache.flink.table.api.AggregatedTable;
import org.apache.flink.table.api.ExplainDetail;
import org.apache.flink.table.api.FlatAggregateTable;
import org.apache.flink.table.api.GroupWindow;
import org.apache.flink.table.api.GroupWindowedTable;
import org.apache.flink.table.api.GroupedTable;
import org.apache.flink.table.api.OverWindow;
import org.apache.flink.table.api.OverWindowedTable;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.TableDescriptor;
import org.apache.flink.table.api.TableEnvironment;
import org.apache.flink.table.api.TableException;
import org.apache.flink.table.api.TablePipeline;
import org.apache.flink.table.api.TableResult;
import org.apache.flink.table.api.ValidationException;
import org.apache.flink.table.api.WindowGroupedTable;
import org.apache.flink.table.catalog.ContextResolvedTable;
import org.apache.flink.table.catalog.FunctionLookup;
import org.apache.flink.table.catalog.ObjectIdentifier;
import org.apache.flink.table.catalog.ResolvedCatalogTable;
import org.apache.flink.table.catalog.ResolvedSchema;
import org.apache.flink.table.catalog.SchemaTranslator;
import org.apache.flink.table.catalog.UnresolvedIdentifier;
import org.apache.flink.table.delegation.ExpressionParser;
import org.apache.flink.table.expressions.Expression;
import org.apache.flink.table.expressions.UnresolvedReferenceExpression;
import org.apache.flink.table.expressions.resolver.LookupCallResolver;
import org.apache.flink.table.functions.TemporalTableFunction;
import org.apache.flink.table.functions.TemporalTableFunctionImpl;
import org.apache.flink.table.operations.JoinQueryOperation.JoinType;
import org.apache.flink.table.operations.QueryOperation;
import org.apache.flink.table.operations.SinkModifyOperation;
import org.apache.flink.table.operations.utils.OperationExpressionsUtils;
import org.apache.flink.table.operations.utils.OperationExpressionsUtils.CategorizedExpressions;
import org.apache.flink.table.operations.utils.OperationTreeBuilder;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import java.util.Optional;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.stream.Collectors;
import static org.apache.flink.table.api.Expressions.lit;
/** Implementation for {@link Table}. */
@Internal
public class TableImpl implements Table {
private static final AtomicInteger uniqueId = new AtomicInteger(0);
private final TableEnvironmentInternal tableEnvironment;
private final QueryOperation operationTree;
private final OperationTreeBuilder operationTreeBuilder;
private final LookupCallResolver lookupResolver;
private String tableName = null;
public TableEnvironment getTableEnvironment() {
return tableEnvironment;
}
private TableImpl(
TableEnvironmentInternal tableEnvironment,
QueryOperation operationTree,
OperationTreeBuilder operationTreeBuilder,
LookupCallResolver lookupResolver) {
this.tableEnvironment = tableEnvironment;
this.operationTree = operationTree;
this.operationTreeBuilder = operationTreeBuilder;
this.lookupResolver = lookupResolver;
}
public static TableImpl createTable(
TableEnvironmentInternal tableEnvironment,
QueryOperation operationTree,
OperationTreeBuilder operationTreeBuilder,
FunctionLookup functionLookup) {
return new TableImpl(
tableEnvironment,
operationTree,
operationTreeBuilder,
new LookupCallResolver(functionLookup));
}
@Override
public ResolvedSchema getResolvedSchema() {
return operationTree.getResolvedSchema();
}
@Override
public void printSchema() {
System.out.println(getResolvedSchema());
}
@Override
public QueryOperation getQueryOperation() {
return operationTree;
}
@Override
public Table select(String fields) {
return select(
ExpressionParser.INSTANCE.parseExpressionList(fields).toArray(new Expression[0]));
}
@Override
public Table select(Expression... fields) {
List<Expression> expressionsWithResolvedCalls = preprocessExpressions(fields);
CategorizedExpressions extracted =
OperationExpressionsUtils.extractAggregationsAndProperties(
expressionsWithResolvedCalls);
if (!extracted.getWindowProperties().isEmpty()) {
throw new ValidationException("Window properties can only be used on windowed tables.");
}
if (!extracted.getAggregations().isEmpty()) {
QueryOperation aggregate =
operationTreeBuilder.aggregate(
Collections.emptyList(), extracted.getAggregations(), operationTree);
return createTable(
operationTreeBuilder.project(extracted.getProjections(), aggregate, false));
} else {
return createTable(
operationTreeBuilder.project(
expressionsWithResolvedCalls, operationTree, false));
}
}
@Override
public TemporalTableFunction createTemporalTableFunction(
String timeAttribute, String primaryKey) {
return createTemporalTableFunction(
ExpressionParser.INSTANCE.parseExpression(timeAttribute),
ExpressionParser.INSTANCE.parseExpression(primaryKey));
}
@Override
public TemporalTableFunction createTemporalTableFunction(
Expression timeAttribute, Expression primaryKey) {
Expression resolvedTimeAttribute =
operationTreeBuilder.resolveExpression(timeAttribute, operationTree);
Expression resolvedPrimaryKey =
operationTreeBuilder.resolveExpression(primaryKey, operationTree);
return TemporalTableFunctionImpl.create(
operationTree, resolvedTimeAttribute, resolvedPrimaryKey);
}
@Override
public Table as(String field, String... fields) {
final List<Expression> fieldsExprs;
if (fields.length == 0 && operationTree.getResolvedSchema().getColumnCount() > 1) {
fieldsExprs = ExpressionParser.INSTANCE.parseExpressionList(field);
} else {
fieldsExprs = new ArrayList<>();
fieldsExprs.add(lit(field));
for (String extraField : fields) {
fieldsExprs.add(lit(extraField));
}
}
return createTable(operationTreeBuilder.alias(fieldsExprs, operationTree));
}
@Override
public Table as(Expression... fields) {
return createTable(operationTreeBuilder.alias(Arrays.asList(fields), operationTree));
}
@Override
public Table filter(String predicate) {
return filter(ExpressionParser.INSTANCE.parseExpression(predicate));
}
@Override
public Table filter(Expression predicate) {
Expression resolvedCallPredicate = predicate.accept(lookupResolver);
return createTable(operationTreeBuilder.filter(resolvedCallPredicate, operationTree));
}
@Override
public Table where(String predicate) {
return filter(predicate);
}
@Override
public Table where(Expression predicate) {
return filter(predicate);
}
@Override
public GroupedTable groupBy(String fields) {
return new GroupedTableImpl(this, ExpressionParser.INSTANCE.parseExpressionList(fields));
}
@Override
public GroupedTable groupBy(Expression... fields) {
return new GroupedTableImpl(this, Arrays.asList(fields));
}
@Override
public Table distinct() {
return createTable(operationTreeBuilder.distinct(operationTree));
}
@Override
public Table join(Table right) {
return joinInternal(right, Optional.empty(), JoinType.INNER);
}
@Override
public Table join(Table right, String joinPredicate) {
return join(right, ExpressionParser.INSTANCE.parseExpression(joinPredicate));
}
@Override
public Table join(Table right, Expression joinPredicate) {
return joinInternal(right, Optional.of(joinPredicate), JoinType.INNER);
}
@Override
public Table leftOuterJoin(Table right) {
return joinInternal(right, Optional.empty(), JoinType.LEFT_OUTER);
}
@Override
public Table leftOuterJoin(Table right, String joinPredicate) {
return leftOuterJoin(right, ExpressionParser.INSTANCE.parseExpression(joinPredicate));
}
@Override
public Table leftOuterJoin(Table right, Expression joinPredicate) {
return joinInternal(right, Optional.of(joinPredicate), JoinType.LEFT_OUTER);
}
@Override
public Table rightOuterJoin(Table right, String joinPredicate) {
return rightOuterJoin(right, ExpressionParser.INSTANCE.parseExpression(joinPredicate));
}
@Override
public Table rightOuterJoin(Table right, Expression joinPredicate) {
return joinInternal(right, Optional.of(joinPredicate), JoinType.RIGHT_OUTER);
}
@Override
public Table fullOuterJoin(Table right, String joinPredicate) {
return fullOuterJoin(right, ExpressionParser.INSTANCE.parseExpression(joinPredicate));
}
@Override
public Table fullOuterJoin(Table right, Expression joinPredicate) {
return joinInternal(right, Optional.of(joinPredicate), JoinType.FULL_OUTER);
}
private TableImpl joinInternal(
Table right, Optional<Expression> joinPredicate, JoinType joinType) {
verifyTableCompatible(right);
return createTable(
operationTreeBuilder.join(
this.operationTree,
right.getQueryOperation(),
joinType,
joinPredicate,
false));
}
private void verifyTableCompatible(Table right) {
// check that the TableEnvironment of right table is not null
// and right table belongs to the same TableEnvironment
if (((TableImpl) right).getTableEnvironment() != this.tableEnvironment) {
throw new ValidationException(
"Only tables from the same TableEnvironment can be joined.");
}
}
@Override
public Table joinLateral(String tableFunctionCall) {
return joinLateral(ExpressionParser.INSTANCE.parseExpression(tableFunctionCall));
}
@Override
public Table joinLateral(Expression tableFunctionCall) {
return joinLateralInternal(tableFunctionCall, Optional.empty(), JoinType.INNER);
}
@Override
public Table joinLateral(String tableFunctionCall, String joinPredicate) {
return joinLateral(
ExpressionParser.INSTANCE.parseExpression(tableFunctionCall),
ExpressionParser.INSTANCE.parseExpression(joinPredicate));
}
@Override
public Table joinLateral(Expression tableFunctionCall, Expression joinPredicate) {
return joinLateralInternal(tableFunctionCall, Optional.of(joinPredicate), JoinType.INNER);
}
@Override
public Table leftOuterJoinLateral(String tableFunctionCall) {
return leftOuterJoinLateral(ExpressionParser.INSTANCE.parseExpression(tableFunctionCall));
}
@Override
public Table leftOuterJoinLateral(Expression tableFunctionCall) {
return joinLateralInternal(tableFunctionCall, Optional.empty(), JoinType.LEFT_OUTER);
}
@Override
public Table leftOuterJoinLateral(String tableFunctionCall, String joinPredicate) {
return leftOuterJoinLateral(
ExpressionParser.INSTANCE.parseExpression(tableFunctionCall),
ExpressionParser.INSTANCE.parseExpression(joinPredicate));
}
@Override
public Table leftOuterJoinLateral(Expression tableFunctionCall, Expression joinPredicate) {
return joinLateralInternal(
tableFunctionCall, Optional.of(joinPredicate), JoinType.LEFT_OUTER);
}
private TableImpl joinLateralInternal(
Expression callExpr, Optional<Expression> joinPredicate, JoinType joinType) {
// check join type
if (joinType != JoinType.INNER && joinType != JoinType.LEFT_OUTER) {
throw new ValidationException(
"Table functions are currently only supported for inner and left outer lateral joins.");
}
return createTable(
operationTreeBuilder.joinLateral(
this.operationTree, callExpr, joinType, joinPredicate));
}
@Override
public Table minus(Table right) {
verifyTableCompatible(right);
return createTable(
operationTreeBuilder.minus(operationTree, right.getQueryOperation(), false));
}
@Override
public Table minusAll(Table right) {
verifyTableCompatible(right);
return createTable(
operationTreeBuilder.minus(operationTree, right.getQueryOperation(), true));
}
@Override
public Table union(Table right) {
verifyTableCompatible(right);
return createTable(
operationTreeBuilder.union(operationTree, right.getQueryOperation(), false));
}
@Override
public Table unionAll(Table right) {
verifyTableCompatible(right);
return createTable(
operationTreeBuilder.union(operationTree, right.getQueryOperation(), true));
}
@Override
public Table intersect(Table right) {
verifyTableCompatible(right);
return createTable(
operationTreeBuilder.intersect(operationTree, right.getQueryOperation(), false));
}
@Override
public Table intersectAll(Table right) {
verifyTableCompatible(right);
return createTable(
operationTreeBuilder.intersect(operationTree, right.getQueryOperation(), true));
}
@Override
public Table orderBy(String fields) {
return createTable(
operationTreeBuilder.sort(
ExpressionParser.INSTANCE.parseExpressionList(fields), operationTree));
}
@Override
public Table orderBy(Expression... fields) {
return createTable(operationTreeBuilder.sort(Arrays.asList(fields), operationTree));
}
@Override
public Table offset(int offset) {
return createTable(operationTreeBuilder.limitWithOffset(offset, operationTree));
}
@Override
public Table fetch(int fetch) {
if (fetch < 0) {
throw new ValidationException("FETCH count must be equal or larger than 0.");
}
return createTable(operationTreeBuilder.limitWithFetch(fetch, operationTree));
}
@Override
public GroupWindowedTable window(GroupWindow groupWindow) {
return new GroupWindowedTableImpl(this, groupWindow);
}
@Override
public OverWindowedTable window(OverWindow... overWindows) {
if (overWindows.length != 1) {
throw new TableException("Currently, only a single over window is supported.");
}
return new OverWindowedTableImpl(this, Arrays.asList(overWindows));
}
@Override
public Table addColumns(String fields) {
return addColumnsOperation(false, ExpressionParser.INSTANCE.parseExpressionList(fields));
}
@Override
public Table addColumns(Expression... fields) {
return addColumnsOperation(false, Arrays.asList(fields));
}
@Override
public Table addOrReplaceColumns(String fields) {
return addColumnsOperation(true, ExpressionParser.INSTANCE.parseExpressionList(fields));
}
@Override
public Table addOrReplaceColumns(Expression... fields) {
return addColumnsOperation(true, Arrays.asList(fields));
}
private Table addColumnsOperation(boolean replaceIfExist, List<Expression> fields) {
List<Expression> expressionsWithResolvedCalls = preprocessExpressions(fields);
CategorizedExpressions extracted =
OperationExpressionsUtils.extractAggregationsAndProperties(
expressionsWithResolvedCalls);
List<Expression> aggNames = extracted.getAggregations();
if (!aggNames.isEmpty()) {
throw new ValidationException(
"The added field expression cannot be an aggregation, found: "
+ aggNames.get(0));
}
return createTable(
operationTreeBuilder.addColumns(
replaceIfExist, expressionsWithResolvedCalls, operationTree));
}
@Override
public Table renameColumns(String fields) {
return createTable(
operationTreeBuilder.renameColumns(
ExpressionParser.INSTANCE.parseExpressionList(fields), operationTree));
}
@Override
public Table renameColumns(Expression... fields) {
return createTable(
operationTreeBuilder.renameColumns(Arrays.asList(fields), operationTree));
}
@Override
public Table dropColumns(String fields) {
return createTable(
operationTreeBuilder.dropColumns(
ExpressionParser.INSTANCE.parseExpressionList(fields), operationTree));
}
@Override
public Table dropColumns(Expression... fields) {
return createTable(operationTreeBuilder.dropColumns(Arrays.asList(fields), operationTree));
}
@Override
public Table map(String mapFunction) {
return map(ExpressionParser.INSTANCE.parseExpression(mapFunction));
}
@Override
public Table map(Expression mapFunction) {
return createTable(operationTreeBuilder.map(mapFunction, operationTree));
}
@Override
public Table flatMap(String tableFunction) {
return flatMap(ExpressionParser.INSTANCE.parseExpression(tableFunction));
}
@Override
public Table flatMap(Expression tableFunction) {
return createTable(operationTreeBuilder.flatMap(tableFunction, operationTree));
}
@Override
public AggregatedTable aggregate(String aggregateFunction) {
return aggregate(ExpressionParser.INSTANCE.parseExpression(aggregateFunction));
}
@Override
public AggregatedTable aggregate(Expression aggregateFunction) {
return groupBy().aggregate(aggregateFunction);
}
@Override
public FlatAggregateTable flatAggregate(String tableAggregateFunction) {
return groupBy().flatAggregate(tableAggregateFunction);
}
@Override
public FlatAggregateTable flatAggregate(Expression tableAggregateFunction) {
return groupBy().flatAggregate(tableAggregateFunction);
}
@Override
public TablePipeline insertInto(String tablePath) {
return insertInto(tablePath, false);
}
@Override
public TablePipeline insertInto(String tablePath, boolean overwrite) {
UnresolvedIdentifier unresolvedIdentifier =
tableEnvironment.getParser().parseIdentifier(tablePath);
ObjectIdentifier objectIdentifier =
tableEnvironment.getCatalogManager().qualifyIdentifier(unresolvedIdentifier);
ContextResolvedTable contextResolvedTable =
tableEnvironment.getCatalogManager().getTableOrError(objectIdentifier);
return insertInto(contextResolvedTable, overwrite);
}
@Override
public TablePipeline insertInto(TableDescriptor descriptor) {
return insertInto(descriptor, false);
}
@Override
public TablePipeline insertInto(TableDescriptor descriptor, boolean overwrite) {
final SchemaTranslator.ConsumingResult schemaTranslationResult =
SchemaTranslator.createConsumingResult(
tableEnvironment.getCatalogManager().getDataTypeFactory(),
getResolvedSchema().toSourceRowDataType(),
descriptor.getSchema().orElse(null),
false);
final TableDescriptor updatedDescriptor =
descriptor.toBuilder().schema(schemaTranslationResult.getSchema()).build();
final ResolvedCatalogTable resolvedCatalogBaseTable =
tableEnvironment
.getCatalogManager()
.resolveCatalogTable(updatedDescriptor.toCatalogTable());
return insertInto(ContextResolvedTable.anonymous(resolvedCatalogBaseTable), overwrite);
}
private TablePipeline insertInto(ContextResolvedTable contextResolvedTable, boolean overwrite) {
return new TablePipelineImpl(
tableEnvironment,
new SinkModifyOperation(
contextResolvedTable,
getQueryOperation(),
Collections.emptyMap(),
overwrite,
Collections.emptyMap()));
}
@Override
public TableResult execute() {
return tableEnvironment.executeInternal(getQueryOperation());
}
@Override
public String explain(ExplainDetail... extraDetails) {
return tableEnvironment.explainInternal(
Collections.singletonList(getQueryOperation()), extraDetails);
}
@Override
public String toString() {
if (tableName == null) {
tableName = "UnnamedTable$" + uniqueId.getAndIncrement();
tableEnvironment.registerTable(tableName, this);
}
return tableName;
}
private TableImpl createTable(QueryOperation operation) {
return new TableImpl(tableEnvironment, operation, operationTreeBuilder, lookupResolver);
}
private List<Expression> preprocessExpressions(List<Expression> expressions) {
return preprocessExpressions(expressions.toArray(new Expression[0]));
}
private List<Expression> preprocessExpressions(Expression[] expressions) {
return Arrays.stream(expressions)
.map(f -> f.accept(lookupResolver))
.collect(Collectors.toList());
}
private static final class GroupedTableImpl implements GroupedTable {
private final TableImpl table;
private final List<Expression> groupKeys;
private GroupedTableImpl(TableImpl table, List<Expression> groupKeys) {
this.table = table;
this.groupKeys = groupKeys;
}
@Override
public Table select(String fields) {
return select(
ExpressionParser.INSTANCE
.parseExpressionList(fields)
.toArray(new Expression[0]));
}
@Override
public Table select(Expression... fields) {
List<Expression> expressionsWithResolvedCalls = table.preprocessExpressions(fields);
CategorizedExpressions extracted =
OperationExpressionsUtils.extractAggregationsAndProperties(
expressionsWithResolvedCalls);
if (!extracted.getWindowProperties().isEmpty()) {
throw new ValidationException(
"Window properties can only be used on windowed tables.");
}
return table.createTable(
table.operationTreeBuilder.project(
extracted.getProjections(),
table.operationTreeBuilder.aggregate(
groupKeys, extracted.getAggregations(), table.operationTree)));
}
@Override
public AggregatedTable aggregate(String aggregateFunction) {
return aggregate(ExpressionParser.INSTANCE.parseExpression(aggregateFunction));
}
@Override
public AggregatedTable aggregate(Expression aggregateFunction) {
return new AggregatedTableImpl(table, groupKeys, aggregateFunction);
}
@Override
public FlatAggregateTable flatAggregate(String tableAggFunction) {
return flatAggregate(ExpressionParser.INSTANCE.parseExpression(tableAggFunction));
}
@Override
public FlatAggregateTable flatAggregate(Expression tableAggFunction) {
return new FlatAggregateTableImpl(table, groupKeys, tableAggFunction);
}
}
private static final class AggregatedTableImpl implements AggregatedTable {
private final TableImpl table;
private final List<Expression> groupKeys;
private final Expression aggregateFunction;
private AggregatedTableImpl(
TableImpl table, List<Expression> groupKeys, Expression aggregateFunction) {
this.table = table;
this.groupKeys = groupKeys;
this.aggregateFunction = aggregateFunction;
}
@Override
public Table select(String fields) {
return select(
ExpressionParser.INSTANCE
.parseExpressionList(fields)
.toArray(new Expression[0]));
}
@Override
public Table select(Expression... fields) {
return table.createTable(
table.operationTreeBuilder.project(
Arrays.asList(fields),
table.operationTreeBuilder.aggregate(
groupKeys, aggregateFunction, table.operationTree)));
}
}
private static final class FlatAggregateTableImpl implements FlatAggregateTable {
private final TableImpl table;
private final List<Expression> groupKey;
private final Expression tableAggregateFunction;
private FlatAggregateTableImpl(
TableImpl table, List<Expression> groupKey, Expression tableAggregateFunction) {
this.table = table;
this.groupKey = groupKey;
this.tableAggregateFunction = tableAggregateFunction;
}
@Override
public Table select(String fields) {
return table.createTable(
table.operationTreeBuilder.project(
ExpressionParser.INSTANCE.parseExpressionList(fields),
table.operationTreeBuilder.tableAggregate(
groupKey,
tableAggregateFunction.accept(table.lookupResolver),
table.operationTree)));
}
@Override
public Table select(Expression... fields) {
return table.createTable(
table.operationTreeBuilder.project(
Arrays.asList(fields),
table.operationTreeBuilder.tableAggregate(
groupKey,
tableAggregateFunction.accept(table.lookupResolver),
table.operationTree)));
}
}
private static final class GroupWindowedTableImpl implements GroupWindowedTable {
private final TableImpl table;
private final GroupWindow window;
private GroupWindowedTableImpl(TableImpl table, GroupWindow window) {
this.table = table;
this.window = window;
}
@Override
public WindowGroupedTable groupBy(String fields) {
return groupBy(
ExpressionParser.INSTANCE
.parseExpressionList(fields)
.toArray(new Expression[0]));
}
@Override
public WindowGroupedTable groupBy(Expression... fields) {
List<Expression> fieldsWithoutWindow =
table.preprocessExpressions(fields).stream()
.filter(f -> !window.getAlias().equals(f))
.collect(Collectors.toList());
if (fields.length != fieldsWithoutWindow.size() + 1) {
throw new ValidationException("GroupBy must contain exactly one window alias.");
}
return new WindowGroupedTableImpl(table, fieldsWithoutWindow, window);
}
}
private static final class WindowGroupedTableImpl implements WindowGroupedTable {
private final TableImpl table;
private final List<Expression> groupKeys;
private final GroupWindow window;
private WindowGroupedTableImpl(
TableImpl table, List<Expression> groupKeys, GroupWindow window) {
this.table = table;
this.groupKeys = groupKeys;
this.window = window;
}
@Override
public Table select(String fields) {
return select(
ExpressionParser.INSTANCE
.parseExpressionList(fields)
.toArray(new Expression[0]));
}
@Override
public Table select(Expression... fields) {
List<Expression> expressionsWithResolvedCalls = table.preprocessExpressions(fields);
CategorizedExpressions extracted =
OperationExpressionsUtils.extractAggregationsAndProperties(
expressionsWithResolvedCalls);
return table.createTable(
table.operationTreeBuilder.project(
extracted.getProjections(),
table.operationTreeBuilder.windowAggregate(
groupKeys,
window,
extracted.getWindowProperties(),
extracted.getAggregations(),
table.operationTree),
// required for proper resolution of the time attribute in multi-windows
true));
}
@Override
public AggregatedTable aggregate(String aggregateFunction) {
return aggregate(ExpressionParser.INSTANCE.parseExpression(aggregateFunction));
}
@Override
public AggregatedTable aggregate(Expression aggregateFunction) {
return new WindowAggregatedTableImpl(table, groupKeys, aggregateFunction, window);
}
@Override
public FlatAggregateTable flatAggregate(String tableAggregateFunction) {
return flatAggregate(ExpressionParser.INSTANCE.parseExpression(tableAggregateFunction));
}
@Override
public FlatAggregateTable flatAggregate(Expression tableAggregateFunction) {
return new WindowFlatAggregateTableImpl(
table, groupKeys, tableAggregateFunction, window);
}
}
private static final class WindowAggregatedTableImpl implements AggregatedTable {
private final TableImpl table;
private final List<Expression> groupKeys;
private final Expression aggregateFunction;
private final GroupWindow window;
private WindowAggregatedTableImpl(
TableImpl table,
List<Expression> groupKeys,
Expression aggregateFunction,
GroupWindow window) {
this.table = table;
this.groupKeys = groupKeys;
this.aggregateFunction = aggregateFunction;
this.window = window;
}
@Override
public Table select(String fields) {
return select(
ExpressionParser.INSTANCE
.parseExpressionList(fields)
.toArray(new Expression[0]));
}
@Override
public Table select(Expression... fields) {
List<Expression> expressionsWithResolvedCalls = table.preprocessExpressions(fields);
CategorizedExpressions extracted =
OperationExpressionsUtils.extractAggregationsAndProperties(
expressionsWithResolvedCalls);
if (!extracted.getAggregations().isEmpty()) {
throw new ValidationException(
"Aggregate functions cannot be used in the select right "
+ "after the aggregate.");
}
if (extracted.getProjections().stream()
.anyMatch(
p ->
(p instanceof UnresolvedReferenceExpression)
&& "*"
.equals(
((UnresolvedReferenceExpression) p)
.getName()))) {
throw new ValidationException("Can not use * for window aggregate!");
}
return table.createTable(
table.operationTreeBuilder.project(
extracted.getProjections(),
table.operationTreeBuilder.windowAggregate(
groupKeys,
window,
extracted.getWindowProperties(),
aggregateFunction,
table.operationTree)));
}
}
private static final class WindowFlatAggregateTableImpl implements FlatAggregateTable {
private final TableImpl table;
private final List<Expression> groupKeys;
private final Expression tableAggFunction;
private final GroupWindow window;
private WindowFlatAggregateTableImpl(
TableImpl table,
List<Expression> groupKeys,
Expression tableAggFunction,
GroupWindow window) {
this.table = table;
this.groupKeys = groupKeys;
this.tableAggFunction = tableAggFunction;
this.window = window;
}
@Override
public Table select(String fields) {
return select(
ExpressionParser.INSTANCE
.parseExpressionList(fields)
.toArray(new Expression[0]));
}
@Override
public Table select(Expression... fields) {
List<Expression> expressionsWithResolvedCalls = table.preprocessExpressions(fields);
CategorizedExpressions extracted =
OperationExpressionsUtils.extractAggregationsAndProperties(
expressionsWithResolvedCalls);
if (!extracted.getAggregations().isEmpty()) {
throw new ValidationException(
"Aggregate functions cannot be used in the select right "
+ "after the flatAggregate.");
}
if (extracted.getProjections().stream()
.anyMatch(
p ->
(p instanceof UnresolvedReferenceExpression)
&& "*"
.equals(
((UnresolvedReferenceExpression) p)
.getName()))) {
throw new ValidationException("Can not use * for window aggregate!");
}
return table.createTable(
table.operationTreeBuilder.project(
extracted.getProjections(),
table.operationTreeBuilder.windowTableAggregate(
groupKeys,
window,
extracted.getWindowProperties(),
tableAggFunction,
table.operationTree),
// required for proper resolution of the time attribute in multi-windows
true));
}
}
private static final class OverWindowedTableImpl implements OverWindowedTable {
private final TableImpl table;
private final List<OverWindow> overWindows;
private OverWindowedTableImpl(TableImpl table, List<OverWindow> overWindows) {
this.table = table;
this.overWindows = overWindows;
}
@Override
public Table select(String fields) {
return table.createTable(
table.operationTreeBuilder.project(
ExpressionParser.INSTANCE.parseExpressionList(fields),
table.operationTree,
overWindows));
}
@Override
public Table select(Expression... fields) {
return table.createTable(
table.operationTreeBuilder.project(
Arrays.asList(fields), table.operationTree, overWindows));
}
}
}
| |
/*
* @(#)Console.java 1.11 06/06/12
*
* Copyright 2006 Sun Microsystems, Inc. All rights reserved.
* SUN PROPRIETARY/CONFIDENTIAL. Use is subject to license terms.
*/
package java.io;
import java.util.*;
import java.nio.charset.Charset;
import sun.nio.cs.StreamDecoder;
import sun.nio.cs.StreamEncoder;
/**
* Methods to access the character-based console device, if any, associated
* with the current Java virtual machine.
*
* <p> Whether a virtual machine has a console is dependent upon the
* underlying platform and also upon the manner in which the virtual
* machine is invoked. If the virtual machine is started from an
* interactive command line without redirecting the standard input and
* output streams then its console will exist and will typically be
* connected to the keyboard and display from which the virtual machine
* was launched. If the virtual machine is started automatically, for
* example by a background job scheduler, then it will typically not
* have a console.
* <p>
* If this virtual machine has a console then it is represented by a
* unique instance of this class which can be obtained by invoking the
* {@link java.lang.System#console()} method. If no console device is
* available then an invocation of that method will return <tt>null</tt>.
* <p>
* Read and write operations are synchronized to guarantee the atomic
* completion of critical operations; therefore invoking methods
* {@link #readLine()}, {@link #readPassword()}, {@link #format format()},
* {@link #printf printf()} as well as the read, format and write operations
* on the objects returned by {@link #reader()} and {@link #writer()} may
* block in multithreaded scenarios.
* <p>
* Invoking <tt>close()</tt> on the objects returned by the {@link #reader()}
* and the {@link #writer()} will not close the underlying stream of those
* objects.
* <p>
* The console-read methods return <tt>null</tt> when the end of the
* console input stream is reached, for example by typing control-D on
* Unix or control-Z on Windows. Subsequent read operations will succeed
* if additional characters are later entered on the console's input
* device.
* <p>
* Unless otherwise specified, passing a <tt>null</tt> argument to any method
* in this class will cause a {@link NullPointerException} to be thrown.
* <p>
* <b>Security note:</b>
* If an application needs to read a password or other secure data, it should
* use {@link #readPassword()} or {@link #readPassword(String, Object...)} and
* manually zero the returned character array after processing to minimize the
* lifetime of sensitive data in memory.
*
* <blockquote><pre>
* Console cons;
* char[] passwd;
* if ((cons = System.console()) != null &&
* (passwd = cons.readPassword("[%s]", "Password:")) != null) {
* ...
* java.util.Arrays.fill(passwd, ' ');
* }
* </pre></blockquote>
*
* @author Xueming Shen
* @version 1.11, 06/12/06
* @since 1.6
*/
public final class Console implements Flushable
{
/**
* Retrieves the unique {@link java.io.PrintWriter PrintWriter} object
* associated with this console.
*
* @return The printwriter associated with this console
*/
public PrintWriter writer() {
return pw;
}
/**
* Retrieves the unique {@link java.io.Reader Reader} object associated
* with this console.
* <p>
* This method is intended to be used by sophisticated applications, for
* example, a {@link java.util.Scanner} object which utilizes the rich
* parsing/scanning functionality provided by the <tt>Scanner</tt>:
* <blockquote><pre>
* Console con = System.console();
* if (con != null) {
* Scanner sc = new Scanner(con.reader());
* ...
* }
* </pre></blockquote>
* <p>
* For simple applications requiring only line-oriented reading, use
* <tt>{@link #readLine}</tt>.
* <p>
* The bulk read operations {@link java.io.Reader#read(char[]) read(char[]) },
* {@link java.io.Reader#read(char[], int, int) read(char[], int, int) } and
* {@link java.io.Reader#read(java.nio.CharBuffer) read(java.nio.CharBuffer)}
* on the returned object will not read in characters beyond the line
* bound for each invocation, even if the destination buffer has space for
* more characters. A line bound is considered to be any one of a line feed
* (<tt>'\n'</tt>), a carriage return (<tt>'\r'</tt>), a carriage return
* followed immediately by a linefeed, or an end of stream.
*
* @return The reader associated with this console
*/
public Reader reader() {
return reader;
}
/**
* Writes a formatted string to this console's output stream using
* the specified format string and arguments.
*
* @param fmt
* A format string as described in <a
* href="../util/Formatter.html#syntax">Format string syntax</a>
*
* @param args
* Arguments referenced by the format specifiers in the format
* string. If there are more arguments than format specifiers, the
* extra arguments are ignored. The number of arguments is
* variable and may be zero. The maximum number of arguments is
* limited by the maximum dimension of a Java array as defined by
* the <a href="http://java.sun.com/docs/books/vmspec/">Java
* Virtual Machine Specification</a>. The behaviour on a
* <tt>null</tt> argument depends on the <a
* href="../util/Formatter.html#syntax">conversion</a>.
*
* @throws IllegalFormatException
* If a format string contains an illegal syntax, a format
* specifier that is incompatible with the given arguments,
* insufficient arguments given the format string, or other
* illegal conditions. For specification of all possible
* formatting errors, see the <a
* href="../util/Formatter.html#detail">Details</a> section
* of the formatter class specification.
*
* @return This console
*/
public Console format(String fmt, Object ...args) {
formatter.format(fmt, args).flush();
return this;
}
/**
* A convenience method to write a formatted string to this console's
* output stream using the specified format string and arguments.
*
* <p> An invocation of this method of the form <tt>con.printf(format,
* args)</tt> behaves in exactly the same way as the invocation of
* <pre>con.format(format, args)</pre>.
*
* @param format
* A format string as described in <a
* href="../util/Formatter.html#syntax">Format string syntax</a>.
*
* @param args
* Arguments referenced by the format specifiers in the format
* string. If there are more arguments than format specifiers, the
* extra arguments are ignored. The number of arguments is
* variable and may be zero. The maximum number of arguments is
* limited by the maximum dimension of a Java array as defined by
* the <a href="http://java.sun.com/docs/books/vmspec/">Java
* Virtual Machine Specification</a>. The behaviour on a
* <tt>null</tt> argument depends on the <a
* href="../util/Formatter.html#syntax">conversion</a>.
*
* @throws IllegalFormatException
* If a format string contains an illegal syntax, a format
* specifier that is incompatible with the given arguments,
* insufficient arguments given the format string, or other
* illegal conditions. For specification of all possible
* formatting errors, see the <a
* href="../util/Formatter.html#detail">Details</a> section of the
* formatter class specification.
*
* @return This console
*/
public Console printf(String format, Object ... args) {
return format(format, args);
}
/**
* Provides a formatted prompt, then reads a single line of text from the
* console.
*
* @param fmt
* A format string as described in <a
* href="../util/Formatter.html#syntax">Format string syntax</a>.
*
* @param args
* Arguments referenced by the format specifiers in the format
* string. If there are more arguments than format specifiers, the
* extra arguments are ignored. The maximum number of arguments is
* limited by the maximum dimension of a Java array as defined by
* the <a href="http://java.sun.com/docs/books/vmspec/">Java
* Virtual Machine Specification</a>.
*
* @throws IllegalFormatException
* If a format string contains an illegal syntax, a format
* specifier that is incompatible with the given arguments,
* insufficient arguments given the format string, or other
* illegal conditions. For specification of all possible
* formatting errors, see the <a
* href="../util/Formatter.html#detail">Details</a> section
* of the formatter class specification.
*
* @throws IOError
* If an I/O error occurs.
*
* @return A string containing the line read from the console, not
* including any line-termination characters, or <tt>null</tt>
* if an end of stream has been reached.
*/
public String readLine(String fmt, Object ... args) {
String line = null;
synchronized (writeLock) {
synchronized(readLock) {
if (fmt.length() != 0)
pw.format(fmt, args);
try {
char[] ca = readline(false);
if (ca != null)
line = new String(ca);
} catch (IOException x) {
throw new IOError(x);
}
}
}
return line;
}
/**
* Reads a single line of text from the console.
*
* @throws IOError
* If an I/O error occurs.
*
* @return A string containing the line read from the console, not
* including any line-termination characters, or <tt>null</tt>
* if an end of stream has been reached.
*/
public String readLine() {
return readLine("");
}
/**
* Provides a formatted prompt, then reads a password or passphrase from
* the console with echoing disabled.
*
* @param fmt
* A format string as described in <a
* href="../util/Formatter.html#syntax">Format string syntax</a>
* for the prompt text.
*
* @param args
* Arguments referenced by the format specifiers in the format
* string. If there are more arguments than format specifiers, the
* extra arguments are ignored. The maximum number of arguments is
* limited by the maximum dimension of a Java array as defined by
* the <a href="http://java.sun.com/docs/books/vmspec/">Java
* Virtual Machine Specification</a>.
*
* @throws IllegalFormatException
* If a format string contains an illegal syntax, a format
* specifier that is incompatible with the given arguments,
* insufficient arguments given the format string, or other
* illegal conditions. For specification of all possible
* formatting errors, see the <a
* href="../util/Formatter.html#detail">Details</a>
* section of the formatter class specification.
*
* @throws IOError
* If an I/O error occurs.
*
* @return A character array containing the password or passphrase read
* from the console, not including any line-termination characters,
* or <tt>null</tt> if an end of stream has been reached.
*/
public char[] readPassword(String fmt, Object ... args) {
char[] passwd = null;
synchronized (writeLock) {
synchronized(readLock) {
if (fmt.length() != 0)
pw.format(fmt, args);
try {
echoOff = echo(false);
passwd = readline(true);
} catch (IOException x) {
throw new IOError(x);
} finally {
try {
echoOff = echo(true);
} catch (IOException xx) {}
}
pw.println();
}
}
return passwd;
}
/**
* Reads a password or passphrase from the console with echoing disabled
*
* @throws IOError
* If an I/O error occurs.
*
* @return A character array containing the password or passphrase read
* from the console, not including any line-termination characters,
* or <tt>null</tt> if an end of stream has been reached.
*/
public char[] readPassword() {
return readPassword("");
}
/**
* Flushes the console and forces any buffered output to be written
* immediately .
*/
public void flush() {
pw.flush();
}
private Object readLock;
private Object writeLock;
private Reader reader;
private Writer out;
private PrintWriter pw;
private Formatter formatter;
private Charset cs;
private char[] rcb;
private static native String encoding();
private static native boolean echo(boolean on) throws IOException;
private static boolean echoOff;
private char[] readline(boolean zeroOut) throws IOException {
int len = reader.read(rcb, 0, rcb.length);
if (len < 0)
return null; //EOL
if (rcb[len-1] == '\r')
len--; //remove CR at end;
else if (rcb[len-1] == '\n') {
len--; //remove LF at end;
if (len > 0 && rcb[len-1] == '\r')
len--; //remove the CR, if there is one
}
char[] b = new char[len];
if (len > 0) {
System.arraycopy(rcb, 0, b, 0, len);
if (zeroOut) {
Arrays.fill(rcb, 0, len, ' ');
}
}
return b;
}
private char[] grow() {
assert Thread.holdsLock(readLock);
char[] t = new char[rcb.length * 2];
System.arraycopy(rcb, 0, t, 0, rcb.length);
rcb = t;
return rcb;
}
class LineReader extends Reader {
private Reader in;
private char[] cb;
private int nChars, nextChar;
boolean leftoverLF;
LineReader(Reader in) {
this.in = in;
cb = new char[1024];
nextChar = nChars = 0;
leftoverLF = false;
}
public void close () {}
public boolean ready() throws IOException {
//in.ready synchronizes on readLock already
return in.ready();
}
public int read(char cbuf[], int offset, int length)
throws IOException
{
int off = offset;
int end = offset + length;
if (offset < 0 || offset > cbuf.length || length < 0 ||
end < 0 || end > cbuf.length) {
throw new IndexOutOfBoundsException();
}
synchronized(readLock) {
boolean eof = false;
char c = 0;
for (;;) {
if (nextChar >= nChars) { //fill
int n = 0;
do {
n = in.read(cb, 0, cb.length);
} while (n == 0);
if (n > 0) {
nChars = n;
nextChar = 0;
if (n < cb.length &&
cb[n-1] != '\n' && cb[n-1] != '\r') {
/*
* we're in canonical mode so each "fill" should
* come back with an eol. if there no lf or nl at
* the end of returned bytes we reached an eof.
*/
eof = true;
}
} else { /*EOF*/
if (off - offset == 0)
return -1;
return off - offset;
}
}
if (leftoverLF && cbuf == rcb && cb[nextChar] == '\n') {
/*
* if invoked by our readline, skip the leftover, otherwise
* return the LF.
*/
nextChar++;
}
leftoverLF = false;
while (nextChar < nChars) {
c = cbuf[off++] = cb[nextChar];
cb[nextChar++] = 0;
if (c == '\n') {
return off - offset;
} else if (c == '\r') {
if (off == end) {
/* no space left even the next is LF, so return
* whatever we have if the invoker is not our
* readLine()
*/
if (cbuf == rcb) {
cbuf = grow();
end = cbuf.length;
} else {
leftoverLF = true;
return off - offset;
}
}
if (nextChar == nChars && in.ready()) {
/*
* we have a CR and we reached the end of
* the read in buffer, fill to make sure we
* don't miss a LF, if there is one, it's possible
* that it got cut off during last round reading
* simply because the read in buffer was full.
*/
nChars = in.read(cb, 0, cb.length);
nextChar = 0;
}
if (nextChar < nChars && cb[nextChar] == '\n') {
cbuf[off++] = '\n';
nextChar++;
}
return off - offset;
} else if (off == end) {
if (cbuf == rcb) {
cbuf = grow();
end = cbuf.length;
} else {
return off - offset;
}
}
}
if (eof)
return off - offset;
}
}
}
}
// Set up JavaIOAccess in SharedSecrets
static {
sun.misc.SharedSecrets.setJavaIOAccess(new sun.misc.JavaIOAccess() {
public Console console() {
if (istty()) {
if (cons == null)
cons = new Console();
return cons;
}
return null;
}
// Add a shutdown hook to restore console's echo state should
// it be necessary.
public Runnable consoleRestoreHook() {
return new Runnable() {
public void run() {
try {
if (echoOff) {
echo(true);
}
} catch (IOException x) {}
}
};
}
public Charset charset() {
// This method is called in sun.security.util.Password,
// cons already exists when this method is called
return cons.cs;
}
});
}
private static Console cons;
private native static boolean istty();
private Console() {
readLock = new Object();
writeLock = new Object();
String csname = encoding();
if (csname != null) {
try {
cs = Charset.forName(csname);
} catch (Exception x) {}
}
if (cs == null)
cs = Charset.defaultCharset();
out = StreamEncoder.forOutputStreamWriter(
new FileOutputStream(FileDescriptor.out),
writeLock,
cs);
pw = new PrintWriter(out, true) { public void close() {} };
formatter = new Formatter(out);
reader = new LineReader(StreamDecoder.forInputStreamReader(
new FileInputStream(FileDescriptor.in),
readLock,
cs));
rcb = new char[1024];
}
}
| |
/*
* Copyright 2002-2020 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.context.annotation;
import java.util.Properties;
import jakarta.annotation.PostConstruct;
import jakarta.annotation.PreDestroy;
import jakarta.annotation.Resource;
import jakarta.ejb.EJB;
import org.junit.jupiter.api.Test;
import org.springframework.beans.BeansException;
import org.springframework.beans.factory.BeanCreationException;
import org.springframework.beans.factory.BeanFactory;
import org.springframework.beans.factory.NoSuchBeanDefinitionException;
import org.springframework.beans.factory.ObjectFactory;
import org.springframework.beans.factory.annotation.InitDestroyAnnotationBeanPostProcessor;
import org.springframework.beans.factory.config.BeanDefinition;
import org.springframework.beans.factory.config.DestructionAwareBeanPostProcessor;
import org.springframework.beans.factory.support.DefaultListableBeanFactory;
import org.springframework.beans.factory.support.RootBeanDefinition;
import org.springframework.beans.testfixture.beans.INestedTestBean;
import org.springframework.beans.testfixture.beans.ITestBean;
import org.springframework.beans.testfixture.beans.NestedTestBean;
import org.springframework.beans.testfixture.beans.TestBean;
import org.springframework.context.support.GenericApplicationContext;
import org.springframework.context.testfixture.jndi.ExpectedLookupTemplate;
import org.springframework.core.testfixture.io.SerializationTestUtils;
import org.springframework.jndi.support.SimpleJndiBeanFactory;
import static org.assertj.core.api.Assertions.assertThat;
/**
* @author Juergen Hoeller
* @author Chris Beams
*/
public class CommonAnnotationBeanPostProcessorTests {
@Test
public void testPostConstructAndPreDestroy() {
DefaultListableBeanFactory bf = new DefaultListableBeanFactory();
bf.addBeanPostProcessor(new CommonAnnotationBeanPostProcessor());
bf.registerBeanDefinition("annotatedBean", new RootBeanDefinition(AnnotatedInitDestroyBean.class));
AnnotatedInitDestroyBean bean = (AnnotatedInitDestroyBean) bf.getBean("annotatedBean");
assertThat(bean.initCalled).isTrue();
bf.destroySingletons();
assertThat(bean.destroyCalled).isTrue();
}
@Test
public void testPostConstructAndPreDestroyWithPostProcessor() {
DefaultListableBeanFactory bf = new DefaultListableBeanFactory();
bf.addBeanPostProcessor(new InitDestroyBeanPostProcessor());
bf.addBeanPostProcessor(new CommonAnnotationBeanPostProcessor());
bf.registerBeanDefinition("annotatedBean", new RootBeanDefinition(AnnotatedInitDestroyBean.class));
AnnotatedInitDestroyBean bean = (AnnotatedInitDestroyBean) bf.getBean("annotatedBean");
assertThat(bean.initCalled).isTrue();
bf.destroySingletons();
assertThat(bean.destroyCalled).isTrue();
}
@Test
public void testPostConstructAndPreDestroyWithApplicationContextAndPostProcessor() {
GenericApplicationContext ctx = new GenericApplicationContext();
ctx.registerBeanDefinition("bpp1", new RootBeanDefinition(InitDestroyBeanPostProcessor.class));
ctx.registerBeanDefinition("bpp2", new RootBeanDefinition(CommonAnnotationBeanPostProcessor.class));
ctx.registerBeanDefinition("annotatedBean", new RootBeanDefinition(AnnotatedInitDestroyBean.class));
ctx.refresh();
AnnotatedInitDestroyBean bean = (AnnotatedInitDestroyBean) ctx.getBean("annotatedBean");
assertThat(bean.initCalled).isTrue();
ctx.close();
assertThat(bean.destroyCalled).isTrue();
}
@Test
public void testPostConstructAndPreDestroyWithManualConfiguration() {
DefaultListableBeanFactory bf = new DefaultListableBeanFactory();
InitDestroyAnnotationBeanPostProcessor bpp = new InitDestroyAnnotationBeanPostProcessor();
bpp.setInitAnnotationType(PostConstruct.class);
bpp.setDestroyAnnotationType(PreDestroy.class);
bf.addBeanPostProcessor(bpp);
bf.registerBeanDefinition("annotatedBean", new RootBeanDefinition(AnnotatedInitDestroyBean.class));
AnnotatedInitDestroyBean bean = (AnnotatedInitDestroyBean) bf.getBean("annotatedBean");
assertThat(bean.initCalled).isTrue();
bf.destroySingletons();
assertThat(bean.destroyCalled).isTrue();
}
@Test
public void testPostProcessorWithNullBean() {
DefaultListableBeanFactory bf = new DefaultListableBeanFactory();
bf.addBeanPostProcessor(new CommonAnnotationBeanPostProcessor());
RootBeanDefinition rbd = new RootBeanDefinition(NullFactory.class);
rbd.setFactoryMethodName("create");
bf.registerBeanDefinition("bean", rbd);
assertThat(bf.getBean("bean").toString()).isEqualTo("null");
bf.destroySingletons();
}
@Test
public void testSerialization() throws Exception {
CommonAnnotationBeanPostProcessor bpp = new CommonAnnotationBeanPostProcessor();
CommonAnnotationBeanPostProcessor bpp2 = SerializationTestUtils.serializeAndDeserialize(bpp);
AnnotatedInitDestroyBean bean = new AnnotatedInitDestroyBean();
bpp2.postProcessBeforeDestruction(bean, "annotatedBean");
assertThat(bean.destroyCalled).isTrue();
}
@Test
public void testSerializationWithManualConfiguration() throws Exception {
InitDestroyAnnotationBeanPostProcessor bpp = new InitDestroyAnnotationBeanPostProcessor();
bpp.setInitAnnotationType(PostConstruct.class);
bpp.setDestroyAnnotationType(PreDestroy.class);
InitDestroyAnnotationBeanPostProcessor bpp2 = SerializationTestUtils.serializeAndDeserialize(bpp);
AnnotatedInitDestroyBean bean = new AnnotatedInitDestroyBean();
bpp2.postProcessBeforeDestruction(bean, "annotatedBean");
assertThat(bean.destroyCalled).isTrue();
}
@Test
public void testResourceInjection() {
DefaultListableBeanFactory bf = new DefaultListableBeanFactory();
CommonAnnotationBeanPostProcessor bpp = new CommonAnnotationBeanPostProcessor();
bpp.setResourceFactory(bf);
bf.addBeanPostProcessor(bpp);
bf.registerBeanDefinition("annotatedBean", new RootBeanDefinition(ResourceInjectionBean.class));
TestBean tb = new TestBean();
bf.registerSingleton("testBean", tb);
TestBean tb2 = new TestBean();
bf.registerSingleton("testBean2", tb2);
ResourceInjectionBean bean = (ResourceInjectionBean) bf.getBean("annotatedBean");
assertThat(bean.initCalled).isTrue();
assertThat(bean.init2Called).isTrue();
assertThat(bean.init3Called).isTrue();
assertThat(bean.getTestBean()).isSameAs(tb);
assertThat(bean.getTestBean2()).isSameAs(tb2);
bf.destroySingletons();
assertThat(bean.destroyCalled).isTrue();
assertThat(bean.destroy2Called).isTrue();
assertThat(bean.destroy3Called).isTrue();
}
@Test
public void testResourceInjectionWithPrototypes() {
DefaultListableBeanFactory bf = new DefaultListableBeanFactory();
CommonAnnotationBeanPostProcessor bpp = new CommonAnnotationBeanPostProcessor();
bpp.setResourceFactory(bf);
bf.addBeanPostProcessor(bpp);
RootBeanDefinition abd = new RootBeanDefinition(ResourceInjectionBean.class);
abd.setScope(BeanDefinition.SCOPE_PROTOTYPE);
bf.registerBeanDefinition("annotatedBean", abd);
RootBeanDefinition tbd1 = new RootBeanDefinition(TestBean.class);
tbd1.setScope(BeanDefinition.SCOPE_PROTOTYPE);
bf.registerBeanDefinition("testBean", tbd1);
RootBeanDefinition tbd2 = new RootBeanDefinition(TestBean.class);
tbd2.setScope(BeanDefinition.SCOPE_PROTOTYPE);
bf.registerBeanDefinition("testBean2", tbd2);
ResourceInjectionBean bean = (ResourceInjectionBean) bf.getBean("annotatedBean");
assertThat(bean.initCalled).isTrue();
assertThat(bean.init2Called).isTrue();
assertThat(bean.init3Called).isTrue();
TestBean tb = bean.getTestBean();
TestBean tb2 = bean.getTestBean2();
assertThat(tb).isNotNull();
assertThat(tb2).isNotNull();
ResourceInjectionBean anotherBean = (ResourceInjectionBean) bf.getBean("annotatedBean");
assertThat(bean).isNotSameAs(anotherBean);
assertThat(tb).isNotSameAs(anotherBean.getTestBean());
assertThat(tb2).isNotSameAs(anotherBean.getTestBean2());
bf.destroyBean("annotatedBean", bean);
assertThat(bean.destroyCalled).isTrue();
assertThat(bean.destroy2Called).isTrue();
assertThat(bean.destroy3Called).isTrue();
}
@Test
public void testResourceInjectionWithResolvableDependencyType() {
DefaultListableBeanFactory bf = new DefaultListableBeanFactory();
CommonAnnotationBeanPostProcessor bpp = new CommonAnnotationBeanPostProcessor();
bpp.setBeanFactory(bf);
bf.addBeanPostProcessor(bpp);
RootBeanDefinition abd = new RootBeanDefinition(ExtendedResourceInjectionBean.class);
abd.setScope(BeanDefinition.SCOPE_PROTOTYPE);
bf.registerBeanDefinition("annotatedBean", abd);
RootBeanDefinition tbd = new RootBeanDefinition(TestBean.class);
tbd.setScope(BeanDefinition.SCOPE_PROTOTYPE);
bf.registerBeanDefinition("testBean4", tbd);
bf.registerResolvableDependency(BeanFactory.class, bf);
bf.registerResolvableDependency(INestedTestBean.class, (ObjectFactory<Object>) () -> new NestedTestBean());
@SuppressWarnings("deprecation")
org.springframework.beans.factory.config.PropertyPlaceholderConfigurer ppc = new org.springframework.beans.factory.config.PropertyPlaceholderConfigurer();
Properties props = new Properties();
props.setProperty("tb", "testBean4");
ppc.setProperties(props);
ppc.postProcessBeanFactory(bf);
ExtendedResourceInjectionBean bean = (ExtendedResourceInjectionBean) bf.getBean("annotatedBean");
INestedTestBean tb = bean.getTestBean6();
assertThat(tb).isNotNull();
ExtendedResourceInjectionBean anotherBean = (ExtendedResourceInjectionBean) bf.getBean("annotatedBean");
assertThat(bean).isNotSameAs(anotherBean);
assertThat(tb).isNotSameAs(anotherBean.getTestBean6());
String[] depBeans = bf.getDependenciesForBean("annotatedBean");
assertThat(depBeans.length).isEqualTo(1);
assertThat(depBeans[0]).isEqualTo("testBean4");
}
@Test
public void testResourceInjectionWithDefaultMethod() {
DefaultListableBeanFactory bf = new DefaultListableBeanFactory();
CommonAnnotationBeanPostProcessor bpp = new CommonAnnotationBeanPostProcessor();
bpp.setBeanFactory(bf);
bf.addBeanPostProcessor(bpp);
bf.registerBeanDefinition("annotatedBean", new RootBeanDefinition(DefaultMethodResourceInjectionBean.class));
TestBean tb2 = new TestBean();
bf.registerSingleton("testBean2", tb2);
NestedTestBean tb7 = new NestedTestBean();
bf.registerSingleton("testBean7", tb7);
DefaultMethodResourceInjectionBean bean = (DefaultMethodResourceInjectionBean) bf.getBean("annotatedBean");
assertThat(bean.getTestBean2()).isSameAs(tb2);
assertThat(bean.counter).isSameAs(2);
bf.destroySingletons();
assertThat(bean.counter).isSameAs(3);
}
@Test
public void testResourceInjectionWithTwoProcessors() {
DefaultListableBeanFactory bf = new DefaultListableBeanFactory();
CommonAnnotationBeanPostProcessor bpp = new CommonAnnotationBeanPostProcessor();
bpp.setResourceFactory(bf);
bf.addBeanPostProcessor(bpp);
CommonAnnotationBeanPostProcessor bpp2 = new CommonAnnotationBeanPostProcessor();
bpp2.setResourceFactory(bf);
bf.addBeanPostProcessor(bpp2);
bf.registerBeanDefinition("annotatedBean", new RootBeanDefinition(ResourceInjectionBean.class));
TestBean tb = new TestBean();
bf.registerSingleton("testBean", tb);
TestBean tb2 = new TestBean();
bf.registerSingleton("testBean2", tb2);
ResourceInjectionBean bean = (ResourceInjectionBean) bf.getBean("annotatedBean");
assertThat(bean.initCalled).isTrue();
assertThat(bean.init2Called).isTrue();
assertThat(bean.getTestBean()).isSameAs(tb);
assertThat(bean.getTestBean2()).isSameAs(tb2);
bf.destroySingletons();
assertThat(bean.destroyCalled).isTrue();
assertThat(bean.destroy2Called).isTrue();
}
@Test
public void testResourceInjectionFromJndi() {
DefaultListableBeanFactory bf = new DefaultListableBeanFactory();
CommonAnnotationBeanPostProcessor bpp = new CommonAnnotationBeanPostProcessor();
SimpleJndiBeanFactory resourceFactory = new SimpleJndiBeanFactory();
ExpectedLookupTemplate jndiTemplate = new ExpectedLookupTemplate();
TestBean tb = new TestBean();
jndiTemplate.addObject("java:comp/env/testBean", tb);
TestBean tb2 = new TestBean();
jndiTemplate.addObject("java:comp/env/testBean2", tb2);
resourceFactory.setJndiTemplate(jndiTemplate);
bpp.setResourceFactory(resourceFactory);
bf.addBeanPostProcessor(bpp);
bf.registerBeanDefinition("annotatedBean", new RootBeanDefinition(ResourceInjectionBean.class));
ResourceInjectionBean bean = (ResourceInjectionBean) bf.getBean("annotatedBean");
assertThat(bean.initCalled).isTrue();
assertThat(bean.init2Called).isTrue();
assertThat(bean.getTestBean()).isSameAs(tb);
assertThat(bean.getTestBean2()).isSameAs(tb2);
bf.destroySingletons();
assertThat(bean.destroyCalled).isTrue();
assertThat(bean.destroy2Called).isTrue();
}
@Test
public void testExtendedResourceInjection() {
DefaultListableBeanFactory bf = new DefaultListableBeanFactory();
CommonAnnotationBeanPostProcessor bpp = new CommonAnnotationBeanPostProcessor();
bpp.setBeanFactory(bf);
bf.addBeanPostProcessor(bpp);
bf.registerResolvableDependency(BeanFactory.class, bf);
@SuppressWarnings("deprecation")
org.springframework.beans.factory.config.PropertyPlaceholderConfigurer ppc = new org.springframework.beans.factory.config.PropertyPlaceholderConfigurer();
Properties props = new Properties();
props.setProperty("tb", "testBean3");
ppc.setProperties(props);
ppc.postProcessBeanFactory(bf);
bf.registerBeanDefinition("annotatedBean", new RootBeanDefinition(ExtendedResourceInjectionBean.class));
bf.registerBeanDefinition("annotatedBean2", new RootBeanDefinition(NamedResourceInjectionBean.class));
bf.registerBeanDefinition("annotatedBean3", new RootBeanDefinition(ConvertedResourceInjectionBean.class));
TestBean tb = new TestBean();
bf.registerSingleton("testBean", tb);
TestBean tb2 = new TestBean();
bf.registerSingleton("testBean2", tb2);
TestBean tb3 = new TestBean();
bf.registerSingleton("testBean3", tb3);
TestBean tb4 = new TestBean();
bf.registerSingleton("testBean4", tb4);
NestedTestBean tb6 = new NestedTestBean();
bf.registerSingleton("value", "5");
bf.registerSingleton("xy", tb6);
bf.registerAlias("xy", "testBean9");
ExtendedResourceInjectionBean bean = (ExtendedResourceInjectionBean) bf.getBean("annotatedBean");
assertThat(bean.initCalled).isTrue();
assertThat(bean.init2Called).isTrue();
assertThat(bean.getTestBean()).isSameAs(tb);
assertThat(bean.getTestBean2()).isSameAs(tb2);
assertThat(bean.getTestBean3()).isSameAs(tb4);
assertThat(bean.getTestBean4()).isSameAs(tb3);
assertThat(bean.testBean5).isSameAs(tb6);
assertThat(bean.testBean6).isSameAs(tb6);
assertThat(bean.beanFactory).isSameAs(bf);
NamedResourceInjectionBean bean2 = (NamedResourceInjectionBean) bf.getBean("annotatedBean2");
assertThat(bean2.testBean).isSameAs(tb6);
ConvertedResourceInjectionBean bean3 = (ConvertedResourceInjectionBean) bf.getBean("annotatedBean3");
assertThat(bean3.value).isSameAs(5);
bf.destroySingletons();
assertThat(bean.destroyCalled).isTrue();
assertThat(bean.destroy2Called).isTrue();
}
@Test
public void testExtendedResourceInjectionWithOverriding() {
DefaultListableBeanFactory bf = new DefaultListableBeanFactory();
CommonAnnotationBeanPostProcessor bpp = new CommonAnnotationBeanPostProcessor();
bpp.setBeanFactory(bf);
bf.addBeanPostProcessor(bpp);
bf.registerResolvableDependency(BeanFactory.class, bf);
@SuppressWarnings("deprecation")
org.springframework.beans.factory.config.PropertyPlaceholderConfigurer ppc = new org.springframework.beans.factory.config.PropertyPlaceholderConfigurer();
Properties props = new Properties();
props.setProperty("tb", "testBean3");
ppc.setProperties(props);
ppc.postProcessBeanFactory(bf);
RootBeanDefinition annotatedBd = new RootBeanDefinition(ExtendedResourceInjectionBean.class);
TestBean tb5 = new TestBean();
annotatedBd.getPropertyValues().add("testBean2", tb5);
bf.registerBeanDefinition("annotatedBean", annotatedBd);
bf.registerBeanDefinition("annotatedBean2", new RootBeanDefinition(NamedResourceInjectionBean.class));
TestBean tb = new TestBean();
bf.registerSingleton("testBean", tb);
TestBean tb2 = new TestBean();
bf.registerSingleton("testBean2", tb2);
TestBean tb3 = new TestBean();
bf.registerSingleton("testBean3", tb3);
TestBean tb4 = new TestBean();
bf.registerSingleton("testBean4", tb4);
NestedTestBean tb6 = new NestedTestBean();
bf.registerSingleton("xy", tb6);
ExtendedResourceInjectionBean bean = (ExtendedResourceInjectionBean) bf.getBean("annotatedBean");
assertThat(bean.initCalled).isTrue();
assertThat(bean.init2Called).isTrue();
assertThat(bean.getTestBean()).isSameAs(tb);
assertThat(bean.getTestBean2()).isSameAs(tb5);
assertThat(bean.getTestBean3()).isSameAs(tb4);
assertThat(bean.getTestBean4()).isSameAs(tb3);
assertThat(bean.testBean5).isSameAs(tb6);
assertThat(bean.testBean6).isSameAs(tb6);
assertThat(bean.beanFactory).isSameAs(bf);
try {
bf.getBean("annotatedBean2");
}
catch (BeanCreationException ex) {
boolean condition = ex.getRootCause() instanceof NoSuchBeanDefinitionException;
assertThat(condition).isTrue();
NoSuchBeanDefinitionException innerEx = (NoSuchBeanDefinitionException) ex.getRootCause();
assertThat(innerEx.getBeanName()).isEqualTo("testBean9");
}
bf.destroySingletons();
assertThat(bean.destroyCalled).isTrue();
assertThat(bean.destroy2Called).isTrue();
}
@Test
public void testExtendedEjbInjection() {
DefaultListableBeanFactory bf = new DefaultListableBeanFactory();
CommonAnnotationBeanPostProcessor bpp = new CommonAnnotationBeanPostProcessor();
bpp.setBeanFactory(bf);
bf.addBeanPostProcessor(bpp);
bf.registerResolvableDependency(BeanFactory.class, bf);
bf.registerBeanDefinition("annotatedBean", new RootBeanDefinition(ExtendedEjbInjectionBean.class));
TestBean tb = new TestBean();
bf.registerSingleton("testBean", tb);
TestBean tb2 = new TestBean();
bf.registerSingleton("testBean2", tb2);
TestBean tb3 = new TestBean();
bf.registerSingleton("testBean3", tb3);
TestBean tb4 = new TestBean();
bf.registerSingleton("testBean4", tb4);
NestedTestBean tb6 = new NestedTestBean();
bf.registerSingleton("xy", tb6);
bf.registerAlias("xy", "testBean9");
ExtendedEjbInjectionBean bean = (ExtendedEjbInjectionBean) bf.getBean("annotatedBean");
assertThat(bean.initCalled).isTrue();
assertThat(bean.init2Called).isTrue();
assertThat(bean.getTestBean()).isSameAs(tb);
assertThat(bean.getTestBean2()).isSameAs(tb2);
assertThat(bean.getTestBean3()).isSameAs(tb4);
assertThat(bean.getTestBean4()).isSameAs(tb3);
assertThat(bean.testBean5).isSameAs(tb6);
assertThat(bean.testBean6).isSameAs(tb6);
assertThat(bean.beanFactory).isSameAs(bf);
bf.destroySingletons();
assertThat(bean.destroyCalled).isTrue();
assertThat(bean.destroy2Called).isTrue();
}
@Test
public void testLazyResolutionWithResourceField() {
DefaultListableBeanFactory bf = new DefaultListableBeanFactory();
CommonAnnotationBeanPostProcessor bpp = new CommonAnnotationBeanPostProcessor();
bpp.setBeanFactory(bf);
bf.addBeanPostProcessor(bpp);
bf.registerBeanDefinition("annotatedBean", new RootBeanDefinition(LazyResourceFieldInjectionBean.class));
bf.registerBeanDefinition("testBean", new RootBeanDefinition(TestBean.class));
LazyResourceFieldInjectionBean bean = (LazyResourceFieldInjectionBean) bf.getBean("annotatedBean");
assertThat(bf.containsSingleton("testBean")).isFalse();
bean.testBean.setName("notLazyAnymore");
assertThat(bf.containsSingleton("testBean")).isTrue();
TestBean tb = (TestBean) bf.getBean("testBean");
assertThat(tb.getName()).isEqualTo("notLazyAnymore");
}
@Test
public void testLazyResolutionWithResourceMethod() {
DefaultListableBeanFactory bf = new DefaultListableBeanFactory();
CommonAnnotationBeanPostProcessor bpp = new CommonAnnotationBeanPostProcessor();
bpp.setBeanFactory(bf);
bf.addBeanPostProcessor(bpp);
bf.registerBeanDefinition("annotatedBean", new RootBeanDefinition(LazyResourceMethodInjectionBean.class));
bf.registerBeanDefinition("testBean", new RootBeanDefinition(TestBean.class));
LazyResourceMethodInjectionBean bean = (LazyResourceMethodInjectionBean) bf.getBean("annotatedBean");
assertThat(bf.containsSingleton("testBean")).isFalse();
bean.testBean.setName("notLazyAnymore");
assertThat(bf.containsSingleton("testBean")).isTrue();
TestBean tb = (TestBean) bf.getBean("testBean");
assertThat(tb.getName()).isEqualTo("notLazyAnymore");
}
@Test
public void testLazyResolutionWithCglibProxy() {
DefaultListableBeanFactory bf = new DefaultListableBeanFactory();
CommonAnnotationBeanPostProcessor bpp = new CommonAnnotationBeanPostProcessor();
bpp.setBeanFactory(bf);
bf.addBeanPostProcessor(bpp);
bf.registerBeanDefinition("annotatedBean", new RootBeanDefinition(LazyResourceCglibInjectionBean.class));
bf.registerBeanDefinition("testBean", new RootBeanDefinition(TestBean.class));
LazyResourceCglibInjectionBean bean = (LazyResourceCglibInjectionBean) bf.getBean("annotatedBean");
assertThat(bf.containsSingleton("testBean")).isFalse();
bean.testBean.setName("notLazyAnymore");
assertThat(bf.containsSingleton("testBean")).isTrue();
TestBean tb = (TestBean) bf.getBean("testBean");
assertThat(tb.getName()).isEqualTo("notLazyAnymore");
}
public static class AnnotatedInitDestroyBean {
public boolean initCalled = false;
public boolean destroyCalled = false;
@PostConstruct
private void init() {
if (this.initCalled) {
throw new IllegalStateException("Already called");
}
this.initCalled = true;
}
@PreDestroy
private void destroy() {
if (this.destroyCalled) {
throw new IllegalStateException("Already called");
}
this.destroyCalled = true;
}
}
public static class InitDestroyBeanPostProcessor implements DestructionAwareBeanPostProcessor {
@Override
public Object postProcessBeforeInitialization(Object bean, String beanName) throws BeansException {
if (bean instanceof AnnotatedInitDestroyBean) {
assertThat(((AnnotatedInitDestroyBean) bean).initCalled).isFalse();
}
return bean;
}
@Override
public Object postProcessAfterInitialization(Object bean, String beanName) throws BeansException {
if (bean instanceof AnnotatedInitDestroyBean) {
assertThat(((AnnotatedInitDestroyBean) bean).initCalled).isTrue();
}
return bean;
}
@Override
public void postProcessBeforeDestruction(Object bean, String beanName) throws BeansException {
if (bean instanceof AnnotatedInitDestroyBean) {
assertThat(((AnnotatedInitDestroyBean) bean).destroyCalled).isFalse();
}
}
@Override
public boolean requiresDestruction(Object bean) {
return true;
}
}
public static class ResourceInjectionBean extends AnnotatedInitDestroyBean {
public boolean init2Called = false;
public boolean init3Called = false;
public boolean destroy2Called = false;
public boolean destroy3Called = false;
@Resource
private TestBean testBean;
private TestBean testBean2;
@PostConstruct
protected void init2() {
if (this.testBean == null || this.testBean2 == null) {
throw new IllegalStateException("Resources not injected");
}
if (!this.initCalled) {
throw new IllegalStateException("Superclass init method not called yet");
}
if (this.init2Called) {
throw new IllegalStateException("Already called");
}
this.init2Called = true;
}
@PostConstruct
private void init() {
if (this.init3Called) {
throw new IllegalStateException("Already called");
}
this.init3Called = true;
}
@PreDestroy
protected void destroy2() {
if (this.destroyCalled) {
throw new IllegalStateException("Superclass destroy called too soon");
}
if (this.destroy2Called) {
throw new IllegalStateException("Already called");
}
this.destroy2Called = true;
}
@PreDestroy
private void destroy() {
if (this.destroyCalled) {
throw new IllegalStateException("Superclass destroy called too soon");
}
if (this.destroy3Called) {
throw new IllegalStateException("Already called");
}
this.destroy3Called = true;
}
@Resource
public void setTestBean2(TestBean testBean2) {
if (this.testBean2 != null) {
throw new IllegalStateException("Already called");
}
this.testBean2 = testBean2;
}
public TestBean getTestBean() {
return testBean;
}
public TestBean getTestBean2() {
return testBean2;
}
}
static class NonPublicResourceInjectionBean<B> extends ResourceInjectionBean {
@Resource(name="testBean4", type=TestBean.class)
protected ITestBean testBean3;
private B testBean4;
@Resource
INestedTestBean testBean5;
INestedTestBean testBean6;
@Resource
BeanFactory beanFactory;
@Override
@Resource
public void setTestBean2(TestBean testBean2) {
super.setTestBean2(testBean2);
}
@Resource(name="${tb}", type=ITestBean.class)
private void setTestBean4(B testBean4) {
if (this.testBean4 != null) {
throw new IllegalStateException("Already called");
}
this.testBean4 = testBean4;
}
@Resource
public void setTestBean6(INestedTestBean testBean6) {
if (this.testBean6 != null) {
throw new IllegalStateException("Already called");
}
this.testBean6 = testBean6;
}
public ITestBean getTestBean3() {
return testBean3;
}
public B getTestBean4() {
return testBean4;
}
public INestedTestBean getTestBean5() {
return testBean5;
}
public INestedTestBean getTestBean6() {
return testBean6;
}
@Override
@PostConstruct
protected void init2() {
if (this.testBean3 == null || this.testBean4 == null) {
throw new IllegalStateException("Resources not injected");
}
super.init2();
}
@Override
@PreDestroy
protected void destroy2() {
super.destroy2();
}
}
public static class ExtendedResourceInjectionBean extends NonPublicResourceInjectionBean<ITestBean> {
}
public interface InterfaceWithDefaultMethod {
@Resource
void setTestBean2(TestBean testBean2);
@Resource
default void setTestBean7(INestedTestBean testBean7) {
increaseCounter();
}
@PostConstruct
default void initDefault() {
increaseCounter();
}
@PreDestroy
default void destroyDefault() {
increaseCounter();
}
void increaseCounter();
}
public static class DefaultMethodResourceInjectionBean extends ResourceInjectionBean
implements InterfaceWithDefaultMethod {
public int counter = 0;
@Override
public void increaseCounter() {
counter++;
}
}
public static class ExtendedEjbInjectionBean extends ResourceInjectionBean {
@EJB(name="testBean4", beanInterface=TestBean.class)
protected ITestBean testBean3;
private ITestBean testBean4;
@EJB
private INestedTestBean testBean5;
private INestedTestBean testBean6;
@Resource
private BeanFactory beanFactory;
@Override
@EJB
public void setTestBean2(TestBean testBean2) {
super.setTestBean2(testBean2);
}
@EJB(beanName="testBean3", beanInterface=ITestBean.class)
private void setTestBean4(ITestBean testBean4) {
if (this.testBean4 != null) {
throw new IllegalStateException("Already called");
}
this.testBean4 = testBean4;
}
@EJB
public void setTestBean6(INestedTestBean testBean6) {
if (this.testBean6 != null) {
throw new IllegalStateException("Already called");
}
this.testBean6 = testBean6;
}
public ITestBean getTestBean3() {
return testBean3;
}
public ITestBean getTestBean4() {
return testBean4;
}
@Override
@PostConstruct
protected void init2() {
if (this.testBean3 == null || this.testBean4 == null) {
throw new IllegalStateException("Resources not injected");
}
super.init2();
}
@Override
@PreDestroy
protected void destroy2() {
super.destroy2();
}
}
private static class NamedResourceInjectionBean {
@Resource(name="testBean9")
private INestedTestBean testBean;
}
private static class ConvertedResourceInjectionBean {
@Resource(name="value")
private int value;
}
private static class LazyResourceFieldInjectionBean {
@Resource @Lazy
private ITestBean testBean;
}
private static class LazyResourceMethodInjectionBean {
private ITestBean testBean;
@Resource @Lazy
public void setTestBean(ITestBean testBean) {
this.testBean = testBean;
}
}
private static class LazyResourceCglibInjectionBean {
private TestBean testBean;
@Resource @Lazy
public void setTestBean(TestBean testBean) {
this.testBean = testBean;
}
}
@SuppressWarnings("unused")
private static class NullFactory {
public static Object create() {
return null;
}
}
}
| |
/*
* Copyright (c) 2009-2013, United States Government, as represented by the Secretary of Health and Human Services.
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
* * Redistributions of source code must retain the above
* copyright notice, this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* * Neither the name of the United States Government nor the
* names of its contributors may be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE UNITED STATES GOVERNMENT BE LIABLE FOR ANY
* DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package gov.hhs.fha.nhinc.docsubmission.configuration.jmx;
import gov.hhs.fha.nhinc.configuration.IConfiguration.serviceEnum;
import gov.hhs.fha.nhinc.docsubmission._20.entity.deferred.response.EntityDocSubmissionDeferredResponseSecured_g1;
import gov.hhs.fha.nhinc.docsubmission._20.entity.deferred.response.EntityDocSubmissionDeferredResponseUnsecured_g1;
import gov.hhs.fha.nhinc.docsubmission._20.nhin.deferred.response.NhinXDRResponse20;
import gov.hhs.fha.nhinc.docsubmission.inbound.deferred.response.InboundDocSubmissionDeferredResponse;
import gov.hhs.fha.nhinc.docsubmission.outbound.deferred.response.OutboundDocSubmissionDeferredResponse;
import javax.servlet.ServletContext;
/**
* The Class DocumentSubmissionDefRequest20WebServices.
*
* @author msw
*/
public class DocumentSubmissionDefResponse20WebServices extends AbstractDSDeferredRespWebServicesMXBean {
/** The Constant NHIN_DS_BEAN_NAME. */
private static final String NHIN_DS_BEAN_NAME = "nhinXDRDeferredResponse_g1";
/** The Constant ENTITY_UNSECURED_DS_BEAN_NAME. */
private static final String ENTITY_UNSECURED_DS_BEAN_NAME = "entityXDRDeferredResponseUnsecured_g1";
/** The Constant ENTITY_SECURED_DS_BEAN_NAME. */
private static final String ENTITY_SECURED_DS_BEAN_NAME = "entityXDRDeferredResponseSecured_g1";
private final serviceEnum serviceName = serviceEnum.DocumentSubmissionDeferredResponse;
/**
* Instantiates a new document submission def request20 web services.
*
* @param sc the sc
*/
public DocumentSubmissionDefResponse20WebServices(ServletContext sc) {
super(sc);
}
/*
* (non-Javadoc)
*
* @see gov.hhs.fha.nhinc.configuration.jmx.WebServicesMXBean#isInboundPassthru()
*/
@Override
public boolean isInboundPassthru() {
boolean isPassthru = false;
NhinXDRResponse20 nhinDS = retrieveBean(NhinXDRResponse20.class, getNhinBeanName());
InboundDocSubmissionDeferredResponse outboundDS = nhinDS.getInboundDocSubmission();
if (compareClassName(outboundDS, DEFAULT_INBOUND_PASSTHRU_IMPL_CLASS_NAME)) {
isPassthru = true;
}
return isPassthru;
}
/*
* (non-Javadoc)
*
* @see gov.hhs.fha.nhinc.configuration.jmx.WebServicesMXBean#isOutboundPassthru()
*/
@Override
public boolean isOutboundPassthru() {
boolean isPassthru = false;
EntityDocSubmissionDeferredResponseUnsecured_g1 entityDS = retrieveBean(
EntityDocSubmissionDeferredResponseUnsecured_g1.class, getEntityUnsecuredBeanName());
OutboundDocSubmissionDeferredResponse outboundDS = entityDS.getOutboundDocSubmission();
if (compareClassName(outboundDS, DEFAULT_OUTBOUND_PASSTHRU_IMPL_CLASS_NAME)) {
isPassthru = true;
}
return isPassthru;
}
/*
* (non-Javadoc)
*
* @see gov.hhs.fha.nhinc.configuration.jmx.AbstractWebServicesMXBean#getNhinBeanName()
*/
@Override
protected String getNhinBeanName() {
return NHIN_DS_BEAN_NAME;
}
/*
* (non-Javadoc)
*
* @see gov.hhs.fha.nhinc.configuration.jmx.AbstractWebServicesMXBean#getEntityUnsecuredBeanName()
*/
@Override
protected String getEntityUnsecuredBeanName() {
return ENTITY_UNSECURED_DS_BEAN_NAME;
}
/*
* (non-Javadoc)
*
* @see gov.hhs.fha.nhinc.configuration.jmx.AbstractWebServicesMXBean#getEntitySecuredBeanName()
*/
@Override
protected String getEntitySecuredBeanName() {
return ENTITY_SECURED_DS_BEAN_NAME;
}
/*
* (non-Javadoc)
*
* @see gov.hhs.fha.nhinc.configuration.jmx.AbstractWebServicesMXBean#configureInboundImpl(java.lang.String)
*/
@Override
public void configureInboundStdImpl() throws InstantiationException, IllegalAccessException,
ClassNotFoundException {
NhinXDRResponse20 nhinDS = retrieveBean(NhinXDRResponse20.class, getNhinBeanName());
InboundDocSubmissionDeferredResponse inboundDS = retrieveBean(InboundDocSubmissionDeferredResponse.class,
getStandardInboundBeanName());
nhinDS.setInboundDocSubmissionResponse(inboundDS);
}
/*
* (non-Javadoc)
*
* @see gov.hhs.fha.nhinc.configuration.jmx.AbstractWebServicesMXBean#configureInboundImpl(java.lang.String)
*/
@Override
public void configureInboundPtImpl() throws InstantiationException, IllegalAccessException,
ClassNotFoundException {
NhinXDRResponse20 nhinDS = retrieveBean(NhinXDRResponse20.class, getNhinBeanName());
InboundDocSubmissionDeferredResponse inboundDS = retrieveBean(InboundDocSubmissionDeferredResponse.class,
getPassthroughInboundBeanName());
nhinDS.setInboundDocSubmissionResponse(inboundDS);
}
/*
* (non-Javadoc)
*
* @see gov.hhs.fha.nhinc.configuration.jmx.AbstractWebServicesMXBean#configureOutboundImpl(java.lang.String)
*/
@Override
public void configureOutboundStdImpl() throws InstantiationException, IllegalAccessException,
ClassNotFoundException {
OutboundDocSubmissionDeferredResponse outboundDS = retrieveBean(
OutboundDocSubmissionDeferredResponse.class, getStandardOutboundBeanName());
EntityDocSubmissionDeferredResponseUnsecured_g1 entityDSUnsecured = retrieveBean(
EntityDocSubmissionDeferredResponseUnsecured_g1.class, getEntityUnsecuredBeanName());
EntityDocSubmissionDeferredResponseSecured_g1 entityDSSecured = retrieveBean(
EntityDocSubmissionDeferredResponseSecured_g1.class, getEntitySecuredBeanName());
entityDSSecured.setOutboundDocSubmissionResponse(outboundDS);
entityDSUnsecured.setOutboundDocSubmissionResponse(outboundDS);
}
/*
* (non-Javadoc)
*
* @see gov.hhs.fha.nhinc.configuration.jmx.AbstractWebServicesMXBean#configureOutboundImpl(java.lang.String)
*/
@Override
public void configureOutboundPtImpl() throws InstantiationException, IllegalAccessException,
ClassNotFoundException {
OutboundDocSubmissionDeferredResponse outboundDS = retrieveBean(
OutboundDocSubmissionDeferredResponse.class, getPassthroughOutboundBeanName());
EntityDocSubmissionDeferredResponseUnsecured_g1 entityDSUnsecured = retrieveBean(
EntityDocSubmissionDeferredResponseUnsecured_g1.class, getEntityUnsecuredBeanName());
EntityDocSubmissionDeferredResponseSecured_g1 entityDSSecured = retrieveBean(
EntityDocSubmissionDeferredResponseSecured_g1.class, getEntitySecuredBeanName());
entityDSSecured.setOutboundDocSubmissionResponse(outboundDS);
entityDSUnsecured.setOutboundDocSubmissionResponse(outboundDS);
}
public serviceEnum getServiceName() {
return this.serviceName;
}
/*
* (non-Javadoc)
*
* @see gov.hhs.fha.nhinc.configuration.jmx.WebServicesMXBean#isInboundStandard()
*/
@Override
public boolean isInboundStandard() {
boolean isStandard = false;
NhinXDRResponse20 nhinDS = retrieveBean(NhinXDRResponse20.class, getNhinBeanName());
InboundDocSubmissionDeferredResponse outboundDS = nhinDS.getInboundDocSubmission();
if (compareClassName(outboundDS, DEFAULT_INBOUND_STANDARD_IMPL_CLASS_NAME)) {
isStandard = true;
}
return isStandard;
}
/*
* (non-Javadoc)
*
* @see gov.hhs.fha.nhinc.configuration.jmx.WebServicesMXBean#isOutboundStandard()
*/
@Override
public boolean isOutboundStandard() {
boolean isStandard = false;
EntityDocSubmissionDeferredResponseUnsecured_g1 entityDS = retrieveBean(
EntityDocSubmissionDeferredResponseUnsecured_g1.class, getEntityUnsecuredBeanName());
OutboundDocSubmissionDeferredResponse outboundDS = entityDS.getOutboundDocSubmission();
if (compareClassName(outboundDS, DEFAULT_OUTBOUND_STANDARD_IMPL_CLASS_NAME)) {
isStandard = true;
}
return isStandard;
}
}
| |
package hudson.util;
import org.junit.Assert;
import org.junit.Test;
import java.io.IOException;
import java.time.Duration;
import java.time.Instant;
import java.util.concurrent.Callable;
import java.util.logging.Level;
import java.util.logging.Logger;
import static junit.framework.TestCase.fail;
import static org.junit.Assert.assertTrue;
public class RetrierTest {
private static Logger LOG = Logger.getLogger(RetrierTest.class.getName());
@Test
public void performedAtThirdAttemptTest() throws Exception {
final int SUCCESSFUL_ATTEMPT = 3;
final String ACTION = "print";
RingBufferLogHandler handler = new RingBufferLogHandler(20);
Logger.getLogger(Retrier.class.getName()).addHandler(handler);
Retrier<Boolean> r = new Retrier.Builder<>
// Set the required params
(
// action to perform
() -> {
LOG.info("action performed");
return true;
},
// check the result and return true if success
(currentAttempt, result) -> currentAttempt == SUCCESSFUL_ATTEMPT,
//name of the action
ACTION
)
// Set the optional parameters
.withAttempts(SUCCESSFUL_ATTEMPT + 1)
.withDelay(100)
// Construct the object
.build();
// Begin the process
Boolean finalResult = r.start();
Assert.assertTrue(finalResult == null ? false : finalResult);
String text = Messages.Retrier_Success(ACTION, SUCCESSFUL_ATTEMPT);
assertTrue(String.format("The log should contain '%s'", text), handler.getView().stream().anyMatch(m -> m.getMessage().contains(text)));
}
@Test
public void sleepWorksTest() throws Exception {
final int SUCCESSFUL_ATTEMPT = 2;
final String ACTION = "print";
final int SLEEP = 500;
RingBufferLogHandler handler = new RingBufferLogHandler(20);
Logger retrierLogger = Logger.getLogger(Retrier.class.getName());
// save current level, just in case it's needed in other tests
Level currentLogLevel = retrierLogger.getLevel();
retrierLogger.setLevel(Level.FINE);
retrierLogger.addHandler(handler);
Retrier<Boolean> r = new Retrier.Builder<>
// Set the required params
(
// action to perform
() -> {
LOG.info("action performed");
return true;
},
// check the result and return true if success
(currentAttempt, result) -> currentAttempt == SUCCESSFUL_ATTEMPT,
//name of the action
ACTION
)
// Set the optional parameters
.withAttempts(SUCCESSFUL_ATTEMPT)
// The time we want to wait between attempts. Let's set less time than default (1000) to have a faster
// test
.withDelay(SLEEP)
// Construct the object
.build();
// Begin the process measuring how long it takes
Instant start = Instant.now();
Boolean finalResult = r.start();
Instant finish = Instant.now();
long timeElapsed = Duration.between(start, finish).toMillis();
// Check delay works
Assert.assertTrue(timeElapsed >= SLEEP);
// Check result is true
Assert.assertTrue(finalResult == null ? false : finalResult);
// Check the log tell us the sleep time
String text = Messages.Retrier_Sleeping(SLEEP, ACTION);
assertTrue(String.format("The log should contain '%s'", text), handler.getView().stream().anyMatch(m -> m.getMessage().contains(text)));
// recover log level
retrierLogger.setLevel(currentLogLevel);
}
@Test
public void failedActionAfterThreeAttemptsTest() throws Exception {
final int ATTEMPTS = 3;
final String ACTION = "print";
RingBufferLogHandler handler = new RingBufferLogHandler(20);
Logger.getLogger(Retrier.class.getName()).addHandler(handler);
Retrier<Boolean> r = new Retrier.Builder<>
// Set the required params
(
// action to perform
() -> {
LOG.info("action performed");
return false;
},
// check the result and return true if success
(currentAttempt, result) -> result,
//name of the action
ACTION
)
// Set the optional parameters
.withAttempts(ATTEMPTS)
.withDelay(100)
// Construct the object
.build();
// Begin the process
Boolean finalResult = r.start();
Assert.assertFalse(finalResult == null ? false : finalResult);
String text = Messages.Retrier_NoSuccess(ACTION, ATTEMPTS);
assertTrue(String.format("The log should contain '%s'", text), handler.getView().stream().anyMatch(m -> m.getMessage().contains(text)));
}
@Test
public void failedActionWithExceptionAfterThreeAttemptsWithoutListenerTest() throws Exception {
final int ATTEMPTS = 3;
final String ACTION = "print";
RingBufferLogHandler handler = new RingBufferLogHandler(20);
Logger.getLogger(Retrier.class.getName()).addHandler(handler);
Retrier<Boolean> r = new Retrier.Builder<>
// Set the required params
(
// action to perform
(Callable<Boolean>) () -> {
throw new IndexOutOfBoundsException("Exception allowed considered as failure");
},
// check the result and return true (boolean primitive type) if success
(currentAttempt, result) -> result == null ? false : result,
//name of the action
ACTION
)
// Set the optional parameters
.withAttempts(ATTEMPTS)
.withDelay(100)
.withDuringActionExceptions(new Class[]{IndexOutOfBoundsException.class})
// Construct the object
.build();
// Begin the process without catching the allowed exceptions
Boolean finalResult = r.start();
Assert.assertNull(finalResult);
String textNoSuccess = Messages.Retrier_NoSuccess(ACTION, ATTEMPTS);
assertTrue(String.format("The log should contain '%s'", textNoSuccess), handler.getView().stream().anyMatch(m -> m.getMessage().contains(textNoSuccess)));
String testException = Messages.Retrier_ExceptionFailed(ATTEMPTS, ACTION);
assertTrue(String.format("The log should contain '%s'", testException), handler.getView().stream().anyMatch(m -> m.getMessage().startsWith(testException)));
}
@Test
public void failedActionWithAllowedExceptionWithListenerChangingResultTest() throws Exception {
final int ATTEMPTS = 1;
final String ACTION = "print";
RingBufferLogHandler handler = new RingBufferLogHandler(20);
Logger.getLogger(Retrier.class.getName()).addHandler(handler);
Retrier<Boolean> r = new Retrier.Builder<>
// Set the required params
(
// action to perform
(Callable<Boolean>) () -> {
throw new IndexOutOfBoundsException("Exception allowed considered as failure");
},
// check the result and return true if success
(currentAttempt, result) -> result,
//name of the action
ACTION
)
// Set the optional parameters
.withAttempts(ATTEMPTS)
// Exceptions allowed
.withDuringActionExceptions(new Class[]{IndexOutOfBoundsException.class})
// Listener to call. It change the result to success
.withDuringActionExceptionListener((attempt, exception) -> true)
// Construct the object
.build();
// Begin the process catching the allowed exception
Boolean finalResult = r.start();
Assert.assertTrue(finalResult == null ? false : finalResult);
// The action was a success
String textSuccess = Messages.Retrier_Success(ACTION, ATTEMPTS);
assertTrue(String.format("The log should contain '%s'", textSuccess), handler.getView().stream().anyMatch(m -> m.getMessage().contains(textSuccess)));
// And the message talking about the allowed raised is also there
String testException = Messages.Retrier_ExceptionFailed(ATTEMPTS, ACTION);
assertTrue(String.format("The log should contain '%s'", testException), handler.getView().stream().anyMatch(m -> m.getMessage().startsWith(testException)));
}
@Test
public void failedActionWithAllowedExceptionByInheritanceTest() throws Exception {
final int ATTEMPTS = 1;
final String ACTION = "print";
RingBufferLogHandler handler = new RingBufferLogHandler(20);
Logger.getLogger(Retrier.class.getName()).addHandler(handler);
Retrier<Boolean> r = new Retrier.Builder<>
// Set the required params
(
// action to perform
(Callable<Boolean>) () -> {
// This one is allowed because we allow IndexOutOfBoundsException (parent exception)
throw new ArrayIndexOutOfBoundsException("Unallowed exception breaks the process");
},
// check the result and return true if success
(currentAttempt, result) -> result,
//name of the action
ACTION
)
// Set the optional parameters
.withAttempts(ATTEMPTS)
// Exceptions allowed (not the one raised)
.withDuringActionExceptions(new Class[]{IndexOutOfBoundsException.class})
// Listener to call. It change the result to success
.withDuringActionExceptionListener((attempt, exception) -> true)
// Construct the object
.build();
// Begin the process catching the allowed exception
Boolean finalResult = r.start();
Assert.assertTrue(finalResult == null ? false : finalResult);
// The action was a success
String textSuccess = Messages.Retrier_Success(ACTION, ATTEMPTS);
assertTrue(String.format("The log should contain '%s'", textSuccess), handler.getView().stream().anyMatch(m -> m.getMessage().contains(textSuccess)));
// And the message talking about the allowed raised is also there
String testException = Messages.Retrier_ExceptionFailed(ATTEMPTS, ACTION);
assertTrue(String.format("The log should contain '%s'", testException), handler.getView().stream().anyMatch(m -> m.getMessage().startsWith(testException)));
}
@Test
public void failedActionWithUnAllowedExceptionTest() {
final int ATTEMPTS = 1;
final String ACTION = "print";
RingBufferLogHandler handler = new RingBufferLogHandler(20);
Logger.getLogger(Retrier.class.getName()).addHandler(handler);
Retrier<Boolean> r = new Retrier.Builder<>
// Set the required params
(
// action to perform
(Callable<Boolean>) () -> {
// This one is not allowed, so it is raised out of the start method
throw new IOException("Unallowed exception breaks the process");
},
// check the result and return true if success
(currentAttempt, result) -> result,
//name of the action
ACTION
)
// Set the optional parameters
.withAttempts(ATTEMPTS)
// Exceptions allowed (not the one raised)
.withDuringActionExceptions(new Class[]{IndexOutOfBoundsException.class})
// Construct the object
.build();
// Begin the process that raises an unexpected exception
try {
r.start();
fail("The process should be exited with an unexpected exception");
} catch (IOException e) {
String testFailure = Messages.Retrier_ExceptionThrown(ATTEMPTS, ACTION);
assertTrue(String.format("The log should contain '%s'", testFailure), handler.getView().stream().anyMatch(m -> m.getMessage().contains(testFailure)));
} catch (Exception e) {
fail(String.format("Unexpected exception: %s", e));
}
}
}
| |
/*
* Copyright (c) 2016 SUGRA-SYM LLC (Nathan Wiehoff, Geoffrey Hibbert)
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
/*
* Impossibly dense clouds of rocks in space
*/
package celestial;
import celestial.Ship.Ship;
import com.jme3.asset.AssetManager;
import com.jme3.bullet.BulletAppState;
import com.jme3.bullet.collision.shapes.CollisionShape;
import com.jme3.bullet.control.RigidBodyControl;
import com.jme3.bullet.util.CollisionShapeFactory;
import com.jme3.material.Material;
import com.jme3.math.FastMath;
import com.jme3.math.Vector3f;
import com.jme3.renderer.queue.RenderQueue;
import com.jme3.scene.Node;
import com.jme3.scene.Spatial;
import entity.Entity;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.Random;
import jmeplanet.PlanetAppState;
import lib.astral.Parser.Term;
import universe.Universe;
/**
*
* @author nwiehoff
*/
public class Field extends Celestial implements Serializable {
//location of the field
private Universe universe;
private Vector3f location = new Vector3f(0, 0, 0);
//seed to generate a block
private int seed;
private Term type;
//total volume of the field as an ellipsoid
private Vector3f size;
//materials and spatials
private transient Spatial asteroid;
private transient Material asteroidMat;
//density and zoning
private int count;
private int blockSize;
private int rockScale = 1;
private int diversity = 1;
//stores the position and rotation of each member of the block
private transient Block[] patterns;
private transient ArrayList<Block> zones;
private int step = 0;
//node for attatching blocks
private transient Node node;
private transient BulletAppState bulletAppState;
//mining
private boolean mineable = false;
private String resource;
public Field(Universe universe, String name, Term field, int seed, Vector3f location, Vector3f bounds) {
super(Float.POSITIVE_INFINITY, universe);
this.universe = universe;
this.seed = seed;
size = bounds.clone();
this.location = bounds.clone();
setName(name);
//extract terms
blockSize = Integer.parseInt(field.getValue("blockSize"));
rockScale = Integer.parseInt(field.getValue("rockScale"));
diversity = Integer.parseInt(field.getValue("diversity"));
count = Integer.parseInt(field.getValue("count"));
String mineableRaw = field.getValue("mineable");
if (mineableRaw != null) {
mineable = Boolean.parseBoolean(mineableRaw);
if (mineable) {
resource = field.getValue("resource");
}
}
type = field;
}
@Override
public void construct(AssetManager assets) {
/*
* Make asteroid dummies
*/
zones = new ArrayList<>();
nameControl.setParent(this);
generatePatterns();
generateAsteroids(assets);
}
@Override
public void deconstruct() {
super.deconstruct();
if (patterns != null) {
for (int a = 0; a < patterns.length; a++) {
patterns[a].deconstructBlock();
}
}
patterns = null;
asteroid = null;
asteroidMat = null;
}
private void generateAsteroids(AssetManager assets) {
/*
* Asteroid
*/
String ast = type.getValue("asset");
//load model
asteroid = assets.loadModel("Models/" + ast + "/Model.blend");
//load texture
asteroidMat = new Material(assets, "Common/MatDefs/Light/Lighting.j3md");
asteroidMat.setTexture("DiffuseMap",
assets.loadTexture("Models/" + ast + "/tex.png"));
//setup texture
asteroid.setShadowMode(RenderQueue.ShadowMode.CastAndReceive);
asteroid.setMaterial(asteroidMat);
/*
* Now make all the patterns
*/
for (int a = 0; a < patterns.length; a++) {
patterns[a].constructBlock();
}
}
private void generatePatterns() {
setBlocks(new Block[diversity]);
for (int x = 0; x < diversity; x++) {
Random rnd = new Random(seed + x);
Vector3f[] map = new Vector3f[count];
Vector3f[] rot = new Vector3f[count];
for (int a = 0; a < count; a++) {
float dx = (rnd.nextFloat() * getBlockSize() * 2.0f) - getBlockSize();
float dy = (rnd.nextFloat() * getBlockSize() * 2.0f) - getBlockSize();
float dz = (rnd.nextFloat() * getBlockSize() * 2.0f) - getBlockSize();
map[a] = new Vector3f(dx, dy, dz);
//rotate
float rx = rnd.nextFloat() * FastMath.TWO_PI;
float ry = rnd.nextFloat() * FastMath.TWO_PI;
float rz = rnd.nextFloat() * FastMath.TWO_PI;
rot[a] = new Vector3f(rx, ry, rz);
}
getBlocks()[x] = new Block(map, rot);
}
}
public boolean pointInsideField(Vector3f point) {
//adjust
Vector3f adj = point.subtract(location);
return pointInsideEllipse(adj.x, adj.y, adj.z, size.x, size.z, size.y);
}
private boolean pointInsideEllipse(float x, float y, float z, float l, float w, float h) {
float a = (x * x) / (l * l);
float b = (z * z) / (w * w);
float c = (y * y) / (h * h);
float sum = a + b + c;
return sum <= 1;
}
private boolean noExclusionZone() {
for (int a = 0; a < universe.getPlayerShip().getCurrentSystem().getCelestials().size(); a++) {
Entity test = universe.getPlayerShip().getCurrentSystem().getCelestials().get(a);
if (test instanceof Planet && !(test instanceof Jumphole)) {
Planet tmp = (Planet) test;
double dist = tmp.getLocation().distance(universe.getPlayerShip().getPhysicsLocation());
if (Math.max(dist - tmp.getRadius(), 0) < blockSize) {
//we are in an exclusion zone!
return false;
}
}
}
return true;
}
@Override
protected void alive() {
Ship host = universe.getPlayerShip();
try {
if (noExclusionZone()) {
if (pointInsideField(host.getPhysicsLocation())) {
//calculate distance to block
boolean inABlock = false;
for (int a = 0; a < zones.size(); a++) {
Block localBlock = zones.get(a);
float dist = localBlock.getLocation().distance(host.getPhysicsLocation());
if (dist < getBlockSize() * 0.5) {
inABlock = true;
} else if (dist >= getBlockSize()) {
localBlock.remove(node);
zones.remove(localBlock);
}
}
if (inABlock == false) {
Block tmp = new Block(patterns[step]);
tmp.setLocation(host.getPhysicsLocation());
zones.add(tmp);
tmp.add(node);
//increment through cycle
step++;
step %= diversity;
}
} else {
//out of field
}
} else {
//remove asteroids, we are in an exclusion zone
for (int a = 0; a < zones.size(); a++) {
Block localBlock = zones.get(a);
localBlock.remove(node);
zones.remove(localBlock);
}
}
} catch (Exception e) {
e.printStackTrace();
}
}
public int getSeed() {
return seed;
}
public void setSeed(int seed) {
this.seed = seed;
}
public Vector3f getSize() {
return size;
}
public void setSize(Vector3f size) {
this.size = size;
}
public int getCount() {
return count;
}
public void setCount(int count) {
this.count = count;
}
@Override
public Vector3f getLocation() {
return location;
}
@Override
public void setLocation(Vector3f location) {
Vector3f offset = this.location.subtract(location);
if (zones != null) {
for (int a = 0; a < zones.size(); a++) {
zones.get(a).applyOffset(offset.mult(-1.0f));
}
}
this.location = location;
}
public int getBlockSize() {
return blockSize;
}
public void setBlockSize(int blockSize) {
this.blockSize = blockSize;
}
public int getRockScale() {
return rockScale;
}
public void setRockScale(int rockScale) {
this.rockScale = rockScale;
}
private Block[] getBlocks() {
return patterns;
}
private void setBlocks(Block[] blocks) {
this.patterns = blocks;
}
public boolean isMineable() {
return mineable;
}
public void setMineable(boolean mineable) {
this.mineable = mineable;
}
public String getResource() {
return resource;
}
public void setResource(String resource) {
this.resource = resource;
}
private class Block {
private Vector3f[] map;
private Vector3f[] rot;
private Spatial[] roids;
//private Node block;
//location
private Vector3f location;
public Block(Vector3f[] map, Vector3f[] rot) {
this.map = map;
this.rot = rot;
roids = new Spatial[rot.length];
}
public Block(Block toClone) {
this.map = toClone.getMap().clone();
this.rot = toClone.getRot().clone();
this.roids = toClone.getRoids().clone();
}
public void constructBlock() {
//block = new Node();
for (int a = 0; a < map.length; a++) {
roids[a] = asteroid.clone();
roids[a].setLocalTranslation(map[a].x, map[a].y, map[a].z);
roids[a].rotate(rot[a].x, rot[a].y, rot[a].z);
roids[a].scale(new Random().nextInt(getRockScale()) + 1);
CollisionShape hullShape = CollisionShapeFactory.createDynamicMeshShape(roids[a]);
RigidBodyControl box = new RigidBodyControl(hullShape);
//box.setMass(0);
//box.setKinematic(false);
roids[a].addControl(box);
roids[a].addControl(nameControl);
System.out.println("Generating asteroid field - " + ((float) a / (float) map.length) * 100.0f);
}
}
public void deconstructBlock() {
//block = null;
roids = null;
}
public Spatial[] getRoids() {
return roids;
}
public void setRoids(Spatial[] roids) {
this.roids = roids;
}
public Vector3f[] getMap() {
return map;
}
public void setMap(Vector3f[] map) {
this.map = map;
}
public Vector3f[] getRot() {
return rot;
}
public void setRot(Vector3f[] rot) {
this.rot = rot;
}
public Vector3f getLocation() {
return location;
}
public void setLocation(Vector3f location) {
this.location = location;
}
private void remove(Node node) {
for (int a = 0; a < roids.length; a++) {
node.detachChild(roids[a]);
bulletAppState.getPhysicsSpace().remove(roids[a]);
}
}
private void add(Node node) {
for (int a = 0; a < roids.length; a++) {
node.attachChild(roids[a]);
roids[a].getControl(RigidBodyControl.class).setPhysicsLocation(location.add(map[a]));
roids[a].getControl(RigidBodyControl.class).setLinearVelocity(Vector3f.ZERO);
roids[a].getControl(RigidBodyControl.class).setAngularVelocity(Vector3f.ZERO);
roids[a].getControl(RigidBodyControl.class).clearForces();
bulletAppState.getPhysicsSpace().add(roids[a]);
}
}
public void applyOffset(Vector3f offset) {
for (int a = 0; a < roids.length; a++) {
roids[a].getControl(RigidBodyControl.class).activate();
roids[a].getControl(RigidBodyControl.class).setPhysicsLocation(roids[a].getControl(RigidBodyControl.class).getPhysicsLocation().add(offset));
}
location.addLocal(offset);
}
}
@Override
public void attach(Node node, BulletAppState physics, PlanetAppState planetAppState) {
this.node = node;
this.bulletAppState = physics;
}
@Override
public void detach(Node node, BulletAppState physics, PlanetAppState planetAppState) {
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.sql.tests;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.common.restartstrategy.RestartStrategies;
import org.apache.flink.api.common.serialization.Encoder;
import org.apache.flink.api.common.time.Time;
import org.apache.flink.api.common.typeinfo.TypeInformation;
import org.apache.flink.api.common.typeinfo.Types;
import org.apache.flink.api.java.typeutils.ResultTypeQueryable;
import org.apache.flink.api.java.utils.ParameterTool;
import org.apache.flink.core.fs.Path;
import org.apache.flink.core.io.SimpleVersionedSerializer;
import org.apache.flink.streaming.api.TimeCharacteristic;
import org.apache.flink.streaming.api.checkpoint.ListCheckpointed;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.sink.filesystem.BucketAssigner;
import org.apache.flink.streaming.api.functions.sink.filesystem.StreamingFileSink;
import org.apache.flink.streaming.api.functions.sink.filesystem.bucketassigners.SimpleVersionedStringSerializer;
import org.apache.flink.streaming.api.functions.sink.filesystem.rollingpolicies.OnCheckpointRollingPolicy;
import org.apache.flink.streaming.api.functions.source.SourceFunction;
import org.apache.flink.table.api.EnvironmentSettings;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.TableSchema;
import org.apache.flink.table.api.java.StreamTableEnvironment;
import org.apache.flink.table.sources.DefinedFieldMapping;
import org.apache.flink.table.sources.DefinedRowtimeAttributes;
import org.apache.flink.table.sources.RowtimeAttributeDescriptor;
import org.apache.flink.table.sources.StreamTableSource;
import org.apache.flink.table.sources.tsextractors.ExistingField;
import org.apache.flink.table.sources.wmstrategies.BoundedOutOfOrderTimestamps;
import org.apache.flink.types.Row;
import java.io.PrintStream;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.TimeUnit;
/**
* End-to-end test for Stream SQL queries.
*
* <p>Includes the following SQL features:
* - OVER window aggregation
* - keyed and non-keyed GROUP BY TUMBLE aggregation
* - windowed INNER JOIN
* - TableSource with event-time attribute
*
* <p>The stream is bounded and will complete after about a minute.
* The result is always constant.
* The job is killed on the first attempt and restarted.
*
* <p>Parameters:
* -outputPath Sets the path to where the result data is written.
*/
public class StreamSQLTestProgram {
public static void main(String[] args) throws Exception {
ParameterTool params = ParameterTool.fromArgs(args);
String outputPath = params.getRequired("outputPath");
String planner = params.get("planner", "old");
final EnvironmentSettings.Builder builder = EnvironmentSettings.newInstance();
builder.inStreamingMode();
if (planner.equals("old")) {
builder.useOldPlanner();
} else if (planner.equals("blink")) {
builder.useBlinkPlanner();
}
final EnvironmentSettings settings = builder.build();
final StreamExecutionEnvironment sEnv = StreamExecutionEnvironment.getExecutionEnvironment();
sEnv.setRestartStrategy(RestartStrategies.fixedDelayRestart(
3,
Time.of(10, TimeUnit.SECONDS)
));
sEnv.setStreamTimeCharacteristic(TimeCharacteristic.EventTime);
sEnv.enableCheckpointing(4000);
sEnv.getConfig().setAutoWatermarkInterval(1000);
final StreamTableEnvironment tEnv = StreamTableEnvironment.create(sEnv, settings);
tEnv.registerTableSource("table1", new GeneratorTableSource(10, 100, 60, 0));
tEnv.registerTableSource("table2", new GeneratorTableSource(5, 0.2f, 60, 5));
int overWindowSizeSeconds = 1;
int tumbleWindowSizeSeconds = 10;
String overQuery = String.format(
"SELECT " +
" key, " +
" rowtime, " +
" COUNT(*) OVER (PARTITION BY key ORDER BY rowtime RANGE BETWEEN INTERVAL '%d' SECOND PRECEDING AND CURRENT ROW) AS cnt " +
"FROM table1",
overWindowSizeSeconds);
String tumbleQuery = String.format(
"SELECT " +
" key, " +
" CASE SUM(cnt) / COUNT(*) WHEN 101 THEN 1 ELSE 99 END AS correct, " +
" TUMBLE_START(rowtime, INTERVAL '%d' SECOND) AS wStart, " +
" TUMBLE_ROWTIME(rowtime, INTERVAL '%d' SECOND) AS rowtime " +
"FROM (%s) " +
"WHERE rowtime > TIMESTAMP '1970-01-01 00:00:01' " +
"GROUP BY key, TUMBLE(rowtime, INTERVAL '%d' SECOND)",
tumbleWindowSizeSeconds,
tumbleWindowSizeSeconds,
overQuery,
tumbleWindowSizeSeconds);
String joinQuery = String.format(
"SELECT " +
" t1.key, " +
" t2.rowtime AS rowtime, " +
" t2.correct," +
" t2.wStart " +
"FROM table2 t1, (%s) t2 " +
"WHERE " +
" t1.key = t2.key AND " +
" t1.rowtime BETWEEN t2.rowtime AND t2.rowtime + INTERVAL '%d' SECOND",
tumbleQuery,
tumbleWindowSizeSeconds);
String finalAgg = String.format(
"SELECT " +
" SUM(correct) AS correct, " +
" TUMBLE_START(rowtime, INTERVAL '20' SECOND) AS rowtime " +
"FROM (%s) " +
"GROUP BY TUMBLE(rowtime, INTERVAL '20' SECOND)",
joinQuery);
// get Table for SQL query
Table result = tEnv.sqlQuery(finalAgg);
// convert Table into append-only DataStream
DataStream<Row> resultStream =
tEnv.toAppendStream(result, Types.ROW(Types.INT, Types.SQL_TIMESTAMP));
final StreamingFileSink<Row> sink = StreamingFileSink
.forRowFormat(new Path(outputPath), (Encoder<Row>) (element, stream) -> {
PrintStream out = new PrintStream(stream);
out.println(element.toString());
})
.withBucketAssigner(new KeyBucketAssigner())
.withRollingPolicy(OnCheckpointRollingPolicy.build())
.build();
resultStream
// inject a KillMapper that forwards all records but terminates the first execution attempt
.map(new KillMapper()).setParallelism(1)
// add sink function
.addSink(sink).setParallelism(1);
sEnv.execute();
}
/**
* Use first field for buckets.
*/
public static final class KeyBucketAssigner implements BucketAssigner<Row, String> {
private static final long serialVersionUID = 987325769970523326L;
@Override
public String getBucketId(final Row element, final Context context) {
return String.valueOf(element.getField(0));
}
@Override
public SimpleVersionedSerializer<String> getSerializer() {
return SimpleVersionedStringSerializer.INSTANCE;
}
}
/**
* TableSource for generated data.
*/
public static class GeneratorTableSource
implements StreamTableSource<Row>, DefinedRowtimeAttributes, DefinedFieldMapping {
private final int numKeys;
private final float recordsPerKeyAndSecond;
private final int durationSeconds;
private final int offsetSeconds;
public GeneratorTableSource(int numKeys, float recordsPerKeyAndSecond, int durationSeconds, int offsetSeconds) {
this.numKeys = numKeys;
this.recordsPerKeyAndSecond = recordsPerKeyAndSecond;
this.durationSeconds = durationSeconds;
this.offsetSeconds = offsetSeconds;
}
@Override
public DataStream<Row> getDataStream(StreamExecutionEnvironment execEnv) {
return execEnv.addSource(new Generator(numKeys, recordsPerKeyAndSecond, durationSeconds, offsetSeconds));
}
@Override
public TypeInformation<Row> getReturnType() {
return Types.ROW(Types.INT, Types.LONG, Types.STRING);
}
@Override
public TableSchema getTableSchema() {
return new TableSchema(
new String[] {"key", "rowtime", "payload"},
new TypeInformation[] {Types.INT, Types.SQL_TIMESTAMP, Types.STRING});
}
@Override
public String explainSource() {
return "GeneratorTableSource";
}
@Override
public List<RowtimeAttributeDescriptor> getRowtimeAttributeDescriptors() {
return Collections.singletonList(
new RowtimeAttributeDescriptor(
"rowtime",
new ExistingField("ts"),
new BoundedOutOfOrderTimestamps(100)));
}
@Override
public Map<String, String> getFieldMapping() {
Map<String, String> mapping = new HashMap<>();
mapping.put("key", "f0");
mapping.put("ts", "f1");
mapping.put("payload", "f2");
return mapping;
}
}
/**
* Data-generating source function.
*/
public static class Generator implements SourceFunction<Row>, ResultTypeQueryable<Row>, ListCheckpointed<Long> {
private final int numKeys;
private final int offsetSeconds;
private final int sleepMs;
private final int durationMs;
private long ms = 0;
public Generator(int numKeys, float rowsPerKeyAndSecond, int durationSeconds, int offsetSeconds) {
this.numKeys = numKeys;
this.durationMs = durationSeconds * 1000;
this.offsetSeconds = offsetSeconds;
this.sleepMs = (int) (1000 / rowsPerKeyAndSecond);
}
@Override
public void run(SourceContext<Row> ctx) throws Exception {
long offsetMS = offsetSeconds * 2000L;
while (ms < durationMs) {
synchronized (ctx.getCheckpointLock()) {
for (int i = 0; i < numKeys; i++) {
ctx.collect(Row.of(i, ms + offsetMS, "Some payload..."));
}
ms += sleepMs;
}
Thread.sleep(sleepMs);
}
}
@Override
public void cancel() { }
@Override
public TypeInformation<Row> getProducedType() {
return Types.ROW(Types.INT, Types.LONG, Types.STRING);
}
@Override
public List<Long> snapshotState(long checkpointId, long timestamp) {
return Collections.singletonList(ms);
}
@Override
public void restoreState(List<Long> state) {
for (Long l : state) {
ms += l;
}
}
}
/**
* Kills the first execution attempt of an application when it receives the second record.
*/
public static class KillMapper implements MapFunction<Row, Row>, ListCheckpointed<Integer>, ResultTypeQueryable {
// counts all processed records of all previous execution attempts
private int saveRecordCnt = 0;
// counts all processed records of this execution attempt
private int lostRecordCnt = 0;
@Override
public Row map(Row value) {
// the both counts are the same only in the first execution attempt
if (saveRecordCnt == 1 && lostRecordCnt == 1) {
throw new RuntimeException("Kill this Job!");
}
// update checkpointed counter
saveRecordCnt++;
// update non-checkpointed counter
lostRecordCnt++;
// forward record
return value;
}
@Override
public TypeInformation getProducedType() {
return Types.ROW(Types.INT, Types.SQL_TIMESTAMP);
}
@Override
public List<Integer> snapshotState(long checkpointId, long timestamp) {
return Collections.singletonList(saveRecordCnt);
}
@Override
public void restoreState(List<Integer> state) {
for (Integer i : state) {
saveRecordCnt += i;
}
}
}
}
| |
/**
* Copyright Microsoft Corporation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.microsoft.azure.storage.table;
import java.io.IOException;
import java.io.InputStream;
import java.io.UnsupportedEncodingException;
import java.net.URLEncoder;
import java.util.Date;
import java.util.HashMap;
import java.util.Map.Entry;
import com.fasterxml.jackson.core.JsonFactory;
import com.fasterxml.jackson.core.JsonParseException;
import com.fasterxml.jackson.core.JsonParser;
import com.fasterxml.jackson.core.JsonToken;
import com.microsoft.azure.storage.Constants;
import com.microsoft.azure.storage.OperationContext;
import com.microsoft.azure.storage.StorageErrorCodeStrings;
import com.microsoft.azure.storage.StorageException;
import com.microsoft.azure.storage.core.JsonUtilities;
import com.microsoft.azure.storage.core.SR;
/**
* Reserved for internal use. A class used to read Table entities.
*/
final class TableDeserializer {
/**
* Used to create Json parsers and generators.
*/
private static JsonFactory jsonFactory = new JsonFactory();
/**
* Reserved for internal use. Parses the operation response as a collection of entities. Reads entity data from the
* specified input stream using the specified class type and optionally projects each entity result with the
* specified resolver into an {@link ODataPayload} containing a collection of {@link TableResult} objects.
*
* @param inStream
* The <code>InputStream</code> to read the data to parse from.
* @param clazzType
* The class type <code>T</code> implementing {@link TableEntity} for the entities returned. Set to
* <code>null</code> to ignore the returned entities and copy only response properties into the
* {@link TableResult} objects.
* @param resolver
* An {@link EntityResolver} instance to project the entities into instances of type <code>R</code>. Set
* to <code>null</code> to return the entities as instances of the class type <code>T</code>.
* @param options
* A {@link TableRequestOptions} object that specifies execution options such as retry policy and timeout
* settings for the operation.
* @param opContext
* An {@link OperationContext} object used to track the execution of the operation.
* @return
* An {@link ODataPayload} containing a collection of {@link TableResult} objects with the parsed operation
* response.
* @throws InstantiationException
* if an error occurs while constructing the result.
* @throws IllegalAccessException
* if an error occurs in reflection while parsing the result.
* @throws StorageException
* if a storage service error occurs.
* @throws IOException
* if an error occurs while accessing the stream.
* @throws JsonParseException
* if an error occurs while parsing the stream.
*/
@SuppressWarnings("unchecked")
static <T extends TableEntity, R> ODataPayload<?> parseQueryResponse(final InputStream inStream,
final TableRequestOptions options, final Class<T> clazzType, final EntityResolver<R> resolver,
final OperationContext opContext) throws JsonParseException, IOException, InstantiationException,
IllegalAccessException, StorageException {
ODataPayload<T> corePayload = null;
ODataPayload<R> resolvedPayload = null;
ODataPayload<?> commonPayload = null;
JsonParser parser = createJsonParserFromStream(inStream);
try {
if (resolver != null) {
resolvedPayload = new ODataPayload<R>();
commonPayload = resolvedPayload;
}
else {
corePayload = new ODataPayload<T>();
commonPayload = corePayload;
}
if (!parser.hasCurrentToken()) {
parser.nextToken();
}
JsonUtilities.assertIsStartObjectJsonToken(parser);
// move into data
parser.nextToken();
// if there is a clazz type and if JsonNoMetadata, create a classProperties dictionary to use for type inference once
// instead of querying the cache many times
HashMap<String, PropertyPair> classProperties = null;
if (options.getTablePayloadFormat() == TablePayloadFormat.JsonNoMetadata && clazzType != null) {
classProperties = PropertyPair.generatePropertyPairs(clazzType);
}
while (parser.getCurrentToken() != null) {
if (parser.getCurrentToken() == JsonToken.FIELD_NAME
&& parser.getCurrentName().equals(ODataConstants.VALUE)) {
// move to start of array
parser.nextToken();
JsonUtilities.assertIsStartArrayJsonToken(parser);
// go to properties
parser.nextToken();
while (parser.getCurrentToken() == JsonToken.START_OBJECT) {
final TableResult res = parseJsonEntity(parser, clazzType, classProperties, resolver, options,
opContext);
if (corePayload != null) {
corePayload.tableResults.add(res);
}
if (resolver != null) {
resolvedPayload.results.add((R) res.getResult());
}
else {
corePayload.results.add((T) res.getResult());
}
parser.nextToken();
}
JsonUtilities.assertIsEndArrayJsonToken(parser);
}
parser.nextToken();
}
}
finally {
parser.close();
}
return commonPayload;
}
/**
* Reserved for internal use. Parses the operation response as an entity. Reads entity data from the specified
* <code>JsonParser</code> using the specified class type and optionally projects the entity result with the
* specified resolver into a {@link TableResult} object.
*
* @param parser
* The <code>JsonParser</code> to read the data to parse from.
* @param httpStatusCode
* The HTTP status code returned with the operation response.
* @param clazzType
* The class type <code>T</code> implementing {@link TableEntity} for the entity returned. Set to
* <code>null</code> to ignore the returned entity and copy only response properties into the
* {@link TableResult} object.
* @param resolver
* An {@link EntityResolver} instance to project the entity into an instance of type <code>R</code>. Set
* to <code>null</code> to return the entitys as instance of the class type <code>T</code>.
* @param options
* A {@link TableRequestOptions} object that specifies execution options such as retry policy and timeout
* settings for the operation.
* @param opContext
* An {@link OperationContext} object used to track the execution of the operation.
* @return
* A {@link TableResult} object with the parsed operation response.
* @throws InstantiationException
* if an error occurs while constructing the result.
* @throws IllegalAccessException
* if an error occurs in reflection while parsing the result.
* @throws StorageException
* if a storage service error occurs.
* @throws IOException
* if an error occurs while accessing the stream.
* @throws JsonParseException
* if an error occurs while parsing the stream.
*/
static <T extends TableEntity, R> TableResult parseSingleOpResponse(final InputStream inStream,
final TableRequestOptions options, final int httpStatusCode, final Class<T> clazzType,
final EntityResolver<R> resolver, final OperationContext opContext) throws JsonParseException, IOException,
InstantiationException, IllegalAccessException, StorageException {
JsonParser parser = createJsonParserFromStream(inStream);
try {
final TableResult res = parseJsonEntity(parser, clazzType,
null /*HashMap<String, PropertyPair> classProperties*/, resolver, options, opContext);
res.setHttpStatusCode(httpStatusCode);
return res;
}
finally {
parser.close();
}
}
/**
* Reserved for internal use. Parses the operation response as an entity. Parses the result returned in the
* specified stream in JSON format into a {@link TableResult} containing an entity of the specified class type
* projected using the specified resolver.
*
* @param parser
* The <code>JsonParser</code> to read the data to parse from.
* @param clazzType
* The class type <code>T</code> implementing {@link TableEntity} for the entity returned. Set to
* <code>null</code> to ignore the returned entity and copy only response properties into the
* {@link TableResult} object.
* @param resolver
* An {@link EntityResolver} instance to project the entity into an instance of type <code>R</code>. Set
* to <code>null</code> to return the entity as an instance of the class type <code>T</code>.
* @param options
* A {@link TableRequestOptions} object that specifies execution options such as retry policy and timeout
* settings for the operation.
* @param opContext
* An {@link OperationContext} object used to track the execution of the operation.
* @return
* A {@link TableResult} containing the parsed entity result of the operation.
* @throws IOException
* if an error occurs while accessing the stream.
* @throws InstantiationException
* if an error occurs while constructing the result.
* @throws IllegalAccessException
* if an error occurs in reflection while parsing the result.
* @throws StorageException
* if a storage service error occurs.
* @throws IOException
* if an error occurs while accessing the stream.
* @throws JsonParseException
* if an error occurs while parsing the stream.
*/
private static <T extends TableEntity, R> TableResult parseJsonEntity(final JsonParser parser,
final Class<T> clazzType, HashMap<String, PropertyPair> classProperties, final EntityResolver<R> resolver,
final TableRequestOptions options, final OperationContext opContext) throws JsonParseException,
IOException, StorageException, InstantiationException, IllegalAccessException {
final TableResult res = new TableResult();
final HashMap<String, EntityProperty> properties = new HashMap<String, EntityProperty>();
if (!parser.hasCurrentToken()) {
parser.nextToken();
}
JsonUtilities.assertIsStartObjectJsonToken(parser);
parser.nextToken();
// get all metadata, if present
while (parser.getCurrentName().startsWith(ODataConstants.ODATA_PREFIX)) {
final String name = parser.getCurrentName().substring(ODataConstants.ODATA_PREFIX.length());
// get the value token
parser.nextToken();
if (name.equals(ODataConstants.ETAG)) {
String etag = parser.getValueAsString();
res.setEtag(etag);
}
// get the key token
parser.nextToken();
}
if (resolver == null && clazzType == null) {
return res;
}
// get object properties
while (parser.getCurrentToken() != JsonToken.END_OBJECT) {
String key = Constants.EMPTY_STRING;
String val = Constants.EMPTY_STRING;
EdmType edmType = null;
// checks if this property is preceded by an OData property type annotation
if (options.getTablePayloadFormat() != TablePayloadFormat.JsonNoMetadata
&& parser.getCurrentName().endsWith(ODataConstants.ODATA_TYPE_SUFFIX)) {
parser.nextToken();
edmType = EdmType.parse(parser.getValueAsString());
parser.nextValue();
key = parser.getCurrentName();
val = parser.getValueAsString();
}
else {
key = parser.getCurrentName();
parser.nextToken();
val = parser.getValueAsString();
edmType = evaluateEdmType(parser.getCurrentToken(), parser.getValueAsString());
}
final EntityProperty newProp = new EntityProperty(val, edmType);
newProp.setDateBackwardCompatibility(options.getDateBackwardCompatibility());
properties.put(key, newProp);
parser.nextToken();
}
String partitionKey = null;
String rowKey = null;
Date timestamp = null;
String etag = null;
// Remove core properties from map and set individually
EntityProperty tempProp = properties.remove(TableConstants.PARTITION_KEY);
if (tempProp != null) {
partitionKey = tempProp.getValueAsString();
}
tempProp = properties.remove(TableConstants.ROW_KEY);
if (tempProp != null) {
rowKey = tempProp.getValueAsString();
}
tempProp = properties.remove(TableConstants.TIMESTAMP);
if (tempProp != null) {
tempProp.setDateBackwardCompatibility(false);
timestamp = tempProp.getValueAsDate();
if (res.getEtag() == null) {
etag = getETagFromTimestamp(tempProp.getValueAsString());
res.setEtag(etag);
}
}
// do further processing for type if JsonNoMetdata by inferring type information via resolver or clazzType
if (options.getTablePayloadFormat() == TablePayloadFormat.JsonNoMetadata
&& (options.getPropertyResolver() != null || clazzType != null)) {
if (options.getPropertyResolver() != null) {
for (final Entry<String, EntityProperty> p : properties.entrySet()) {
final String key = p.getKey();
final String value = p.getValue().getValueAsString();
EdmType edmType;
// try to use the property resolver to get the type
try {
edmType = options.getPropertyResolver().propertyResolver(partitionKey, rowKey, key, value);
}
catch (Exception e) {
throw new StorageException(StorageErrorCodeStrings.INTERNAL_ERROR, SR.CUSTOM_RESOLVER_THREW,
Constants.HeaderConstants.HTTP_UNUSED_306, null, e);
}
// try to create a new entity property using the returned type
try {
final EntityProperty newProp = new EntityProperty(value, edmType);
newProp.setDateBackwardCompatibility(options.getDateBackwardCompatibility());
properties.put(p.getKey(), newProp);
}
catch (IllegalArgumentException e) {
throw new StorageException(StorageErrorCodeStrings.INVALID_TYPE, String.format(
SR.FAILED_TO_PARSE_PROPERTY, key, value, edmType),
Constants.HeaderConstants.HTTP_UNUSED_306, null, e);
}
}
}
else if (clazzType != null) {
if (classProperties == null) {
classProperties = PropertyPair.generatePropertyPairs(clazzType);
}
for (final Entry<String, EntityProperty> p : properties.entrySet()) {
PropertyPair propPair = classProperties.get(p.getKey());
if (propPair != null) {
final EntityProperty newProp = new EntityProperty(p.getValue().getValueAsString(),
propPair.type);
newProp.setDateBackwardCompatibility(options.getDateBackwardCompatibility());
properties.put(p.getKey(), newProp);
}
}
}
}
// set the result properties, now that they are appropriately parsed
res.setProperties(properties);
// use resolver if provided, else create entity based on clazz type
if (resolver != null) {
res.setResult(resolver.resolve(partitionKey, rowKey, timestamp, res.getProperties(), res.getEtag()));
}
else if (clazzType != null) {
// Generate new entity and return
final T entity = clazzType.newInstance();
entity.setEtag(res.getEtag());
entity.setPartitionKey(partitionKey);
entity.setRowKey(rowKey);
entity.setTimestamp(timestamp);
entity.readEntity(res.getProperties(), opContext);
res.setResult(entity);
}
return res;
}
private static String getETagFromTimestamp(String timestampString) throws UnsupportedEncodingException {
timestampString = URLEncoder.encode(timestampString, Constants.UTF8_CHARSET);
return "W/\"datetime'" + timestampString + "'\"";
}
private static EdmType evaluateEdmType(JsonToken token, String value) {
EdmType edmType = null;
if (token == JsonToken.VALUE_FALSE || token == JsonToken.VALUE_TRUE) {
edmType = EdmType.BOOLEAN;
}
else if (token == JsonToken.VALUE_NUMBER_FLOAT) {
edmType = EdmType.DOUBLE;
}
else if (token == JsonToken.VALUE_NUMBER_INT) {
edmType = EdmType.INT32;
}
else {
edmType = EdmType.STRING;
}
return edmType;
}
private static JsonParser createJsonParserFromStream(final InputStream streamRef) throws JsonParseException,
IOException {
JsonParser parser = jsonFactory.createParser(streamRef);
return parser
// allows handling of infinity, -infinity, and NaN for Doubles
.enable(JsonParser.Feature.ALLOW_NON_NUMERIC_NUMBERS)
// don't close the stream and allow it to be drained completely
// in ExecutionEngine to improve socket reuse
.disable(JsonParser.Feature.AUTO_CLOSE_SOURCE);
}
}
| |
/**
*
*/
package com.kant.design.patterns.interpretor;
import java.util.HashMap;
import java.util.Stack;
/**
* an operand can be an {@link Expression} {@link Variable} {@link Number}
*
* @author shaskant
*
*/
interface Operand {
double evaluate(HashMap<String, Integer> context);
void traverse(int level);
}
/**
*
* @author shaskant
*
*/
class Expression implements Operand {
private char m_operator;
public Operand left, right;
public Expression(char op) {
m_operator = op;
}
public void traverse(int level) {
left.traverse(level + 1);
System.out.print("" + level + m_operator + level + " ");
right.traverse(level + 1);
}
public double evaluate(HashMap<String, Integer> context) {
double result = 0.;
double a = left.evaluate(context);
double b = right.evaluate(context);
if (m_operator == '+')
result = a + b;
else if (m_operator == '-')
result = a - b;
else if (m_operator == '*')
result = a * b;
else if (m_operator == '/')
result = a / b;
return result;
}
}
/**
*
* @author shaskant
*
*/
class Variable implements Operand {
private String m_name;
public Variable(String name) {
m_name = name;
}
public void traverse(int level) {
System.out.print(m_name + " ");
}
public double evaluate(HashMap<String, Integer> context) {
return context.get(m_name);
}
}
/**
*
* @author shaskant
*
*/
class Number implements Operand {
private double m_value;
public Number(double value) {
m_value = value;
}
public void traverse(int level) {
System.out.print(m_value + " ");
}
/**
* always a constant value
*/
public double evaluate(HashMap context) {
return m_value;
}
}
/**
* Interpreter suggests modeling the domain with a recursive grammar. Each rule
* in the grammar is either a 'composite' (a rule that references other rules)
* or a terminal (a leaf node in a tree structure). Interpreter relies on the
* recursive traversal of the Composite pattern to interpret the 'sentences' it
* is asked to process
*
* @author shaskant
*
*/
public class InterpreterDemo {
private static final String THE_SPACE = " ";
/**
*
**/
public static boolean precedence(char a, char b) {
String high = "*/", low = "+-";
if (a == '(')
return false;
if (a == ')' && b == '(') {
System.out.println(")-(");
return false;
}
if (b == '(')
return false;
if (b == ')')
return true;
if (high.indexOf(a) > -1 && low.indexOf(b) > -1)
return true;
if (high.indexOf(a) > -1 && high.indexOf(b) > -1)
return true;
if (low.indexOf(a) > -1 && low.indexOf(b) > -1)
return true;
return false;
}
/**
* infix to postfix conversion.
**/
public static String convert_to_postfix(String expr) {
Stack<Character> op_stack = new Stack<Character>();
StringBuffer out = new StringBuffer();
String opers = "+-*/()";
char top_sym = '+';
boolean empty;
String[] tokens = expr.split(THE_SPACE);
for (int i = 0; i < tokens.length; i++)
if (opers.indexOf(tokens[i].charAt(0)) == -1) {
out.append(tokens[i]);
out.append(THE_SPACE.charAt(0));// always add space , necessary
// to tokenize
} else {
while (!(empty = op_stack.isEmpty())
&& precedence(top_sym = op_stack.pop(),
tokens[i].charAt(0))) {
out.append(top_sym);
out.append(THE_SPACE.charAt(0));// always add space ,
// necessary to tokenize
}
if (!empty)
op_stack.push(top_sym);
if (empty || tokens[i].charAt(0) != ')')
op_stack.push(tokens[i].charAt(0));
else
top_sym = op_stack.pop();
}
while (!op_stack.isEmpty()) {
out.append(op_stack.pop());
out.append(THE_SPACE.charAt(0));
}
return out.toString();
}
/**
* returns root of the postfix tree.
*
*/
public static Operand build_syntax_tree(String tree) {
Stack<Operand> stack = new Stack<Operand>();
String opers = "+-*/"; // operators
String[] tokens = tree.split(THE_SPACE);
for (int i = 0; i < tokens.length; i++) {
// If token is a number or variable
if (opers.indexOf(tokens[i].charAt(0)) == -1) {
Operand term = null;
try {
term = new Number(Double.parseDouble(tokens[i]));
} catch (NumberFormatException ex) {
term = new Variable(tokens[i]);
}
stack.push(term);
// else if token is an operator
} else {
Expression expr = new Expression(tokens[i].charAt(0));
expr.right = stack.pop();
expr.left = stack.pop();
stack.push(expr);
}
}
return stack.pop();
}
/**
*
* @param args
*/
public static void main(String[] args) {
String infixExpression = "celsi * 9 / 5 + thirty";
System.out.println(infixExpression);
String postfix = convert_to_postfix(infixExpression);
System.out.println(postfix);
Operand expr = build_syntax_tree(postfix);
expr.traverse(1);
System.out.println();
HashMap<String, Integer> map = new HashMap<String, Integer>();
map.put("thirty", 30);
for (int i = 0; i <= 100; i += 10) {
map.put("celsi", i);
System.out.println("C is " + i + ", F is " + expr.evaluate(map));
}
}
}
| |
/**
* Copyright 2005-2014 The Kuali Foundation
*
* Licensed under the Educational Community License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.opensource.org/licenses/ecl2.php
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.kuali.rice.krad.uif.util;
import com.google.common.collect.Maps;
import org.apache.commons.lang.StringUtils;
import org.kuali.rice.krad.datadictionary.parse.BeanTag;
import org.kuali.rice.krad.datadictionary.parse.BeanTagAttribute;
import org.kuali.rice.krad.datadictionary.uif.UifDictionaryBeanBase;
import org.kuali.rice.krad.uif.UifConstants;
import org.kuali.rice.krad.util.KRADUtils;
import java.io.Serializable;
import java.util.HashMap;
import java.util.Map;
/**
* This object represents a url in the Krad framework. The url can be set explicitly to a specific href or a
* controller
* plus a viewId can be provided (at very minimum). By default, the krad base bean config points the baseUrl property
* to 'krad.url' configuration property and the methodToCall to 'start', but these can be reset to any value as needed.
*
* <p>
* If href is not set, the generated value of href is constructed (in general) as follows:<br/>
* baseUrl + /controllerMapping + ? + methodToCall param + viewId param + other parameters
* <br/>
* with any necessary tokens to construct a valid url. If baseUrl is not provided, the url is not valid and a
* blank string is returned.
* </p>
*
* @author Kuali Rice Team (rice.collab@kuali.org)
*/
@BeanTag(name = "url-bean", parent = "Uif-Url")
public class UrlInfo extends UifDictionaryBeanBase implements Serializable {
private static final long serialVersionUID = 3195177614468120958L;
private String href;
private String originalHref;
private String baseUrl;
private String controllerMapping;
private String viewType;
private String viewId;
private String pageId;
private String formKey;
private String methodToCall;
private Map<String, String> requestParameters;
/**
* Base constructor
*/
public UrlInfo() {}
/**
* Constructor that initializes an href value
*
* @param href the href value
*/
public UrlInfo(String href) {
this.href = href;
this.originalHref = href;
}
/**
* Constructor that sets the base url construction properties
*
* @param baseUrl the baseUrl
* @param controllerMapping the controllerMapping
* @param viewId the id of the view
* @param methodToCall the methodToCall
*/
public UrlInfo(String baseUrl, String controllerMapping, String viewId, String methodToCall) {
this.baseUrl = baseUrl;
this.controllerMapping = controllerMapping;
this.viewId = viewId;
this.methodToCall = methodToCall;
}
/**
* Generate the url based on properties of this object
*
* @return the generatedUrl, blank if not a valid url (no baseUrl value provided)
*/
protected String generateUrl() {
String generatedUrl = "";
if (StringUtils.isBlank(baseUrl)) {
return generatedUrl;
}
generatedUrl = baseUrl;
if (StringUtils.isNotBlank(controllerMapping) && !controllerMapping.startsWith("/")) {
generatedUrl = generatedUrl + "/" + controllerMapping;
} else if (StringUtils.isNotBlank(controllerMapping)) {
generatedUrl = generatedUrl + controllerMapping;
}
Map<String, String> allRequestParameters = new HashMap<String, String>();
if (StringUtils.isNotBlank(methodToCall)) {
allRequestParameters.put(UifConstants.CONTROLLER_METHOD_DISPATCH_PARAMETER_NAME, methodToCall);
}
if (StringUtils.isNotBlank(viewId)) {
allRequestParameters.put(UifConstants.UrlParams.VIEW_ID, viewId);
}
if (StringUtils.isNotBlank(pageId)) {
allRequestParameters.put(UifConstants.UrlParams.PAGE_ID, pageId);
}
if (StringUtils.isNotBlank(formKey)) {
allRequestParameters.put(UifConstants.UrlParams.FORM_KEY, formKey);
}
if (requestParameters != null) {
allRequestParameters.putAll(requestParameters);
}
//add the request parameters
generatedUrl = generatedUrl + KRADUtils.getRequestStringFromMap(allRequestParameters);
return generatedUrl;
}
/**
* Get the href value for this url object. This is the main call to this url object as it provides the full href
* value represented by this object.
*
* <p>
* If href has NOT been explicitly set to a value, the href is generated by
* constructing pieces of the url set through the properties of this url object.
* The generated value of href is constructed (in general) as follows:<br/>
* baseUrl + /controllerMapping + ? + methodToCall param + viewId param + other parameters
* <br/>
* with any necessary tokens to construct a valid url. If baseUrl is not provided, the url is not valid and a
* blank string is returned.
* </p>
*
* @return THE href represented by this url object, or blank if not valid
*/
@BeanTagAttribute(name = "href")
public String getHref() {
if (StringUtils.isBlank(this.href)) {
this.href = generateUrl();
}
return href;
}
/**
* Explicitly set the href value - if this is called with a value, all other properties of the url object are
* ignored. This call is basically a full override. This also sets the orginalHref value.
*
* @param href
*/
public void setHref(String href) {
this.href = href;
this.originalHref = href;
}
/**
* The base url value (the value that comes before other properties). Default base bean value is set to use
* 'krad.url' of the configuration properties.
*
* @return the baseUrl
*/
@BeanTagAttribute(name = "baseUrl")
public String getBaseUrl() {
return baseUrl;
}
/**
* Set the baseUrl
*
* @param baseUrl
*/
public void setBaseUrl(String baseUrl) {
this.baseUrl = baseUrl;
}
/**
* The controllerMapping for the url (string that represents the controllerMapping path appended to baseUrl)
*
* @return the controllerMapping string
*/
@BeanTagAttribute(name = "controllerMapping")
public String getControllerMapping() {
return controllerMapping;
}
/**
* Set the controllerMapping
*
* @param controllerMapping
*/
public void setControllerMapping(String controllerMapping) {
this.controllerMapping = controllerMapping;
}
/**
* The viewType representing the View's base type
*
* @return the viewType
*/
@BeanTagAttribute(name = "viewType")
public String getViewType() {
return viewType;
}
/**
* Set the viewType
*
* @param viewType
*/
public void setViewType(String viewType) {
this.viewType = viewType;
}
/**
* ViewId representing the view by id to retrieve
*
* @return the viewId
*/
@BeanTagAttribute(name = "viewId")
public String getViewId() {
return viewId;
}
/**
* Set viewId
*
* @param viewId
*/
public void setViewId(String viewId) {
this.viewId = viewId;
}
/**
* PageId representing the page of the view to retrieve by id
*
* @return the pageId
*/
@BeanTagAttribute(name = "pageId")
public String getPageId() {
return pageId;
}
/**
* Set pageId
*
* @param pageId
*/
public void setPageId(String pageId) {
this.pageId = pageId;
}
/**
* FormKey representing the key of the form data to retrieve
*
* @return the formKey
*/
@BeanTagAttribute(name = "formKey")
public String getFormKey() {
return formKey;
}
/**
* Set the formKey
*
* @param formKey
*/
public void setFormKey(String formKey) {
this.formKey = formKey;
}
/**
* MethodToCall representing the methodToCall on the controller (default base bean value is 'start')
*
* @return methodToCall on controller
*/
@BeanTagAttribute(name = "methodToCall")
public String getMethodToCall() {
return methodToCall;
}
/**
* Set the methodToCall
*
* @param methodToCall
*/
public void setMethodToCall(String methodToCall) {
this.methodToCall = methodToCall;
}
/**
* Map of key value pairs that will be appended to the request parameters to pass in any custom data
*
* @return the requestParameters map
*/
@BeanTagAttribute(name = "requestParameters", type = BeanTagAttribute.AttributeType.MAPVALUE)
public Map<String, String> getRequestParameters() {
return requestParameters;
}
/**
* Set the requestParameters
*
* @param requestParameters
*/
public void setRequestParameters(Map<String, String> requestParameters) {
this.requestParameters = requestParameters;
}
/**
* The original(set) href value. This is generally used to determine if the href was explicitly set and not
* generated by this url object.
*
* @return the original(set) href value
*/
public String getOriginalHref() {
return originalHref;
}
/**
* toString override returns the href value of url
*
* @return href value
*/
@Override
public String toString() {
return this.getHref();
}
/**
* toString returns the original href value of url
*
* @return original href value
*/
public void setOriginalHref(String originalHref) {
this.originalHref = originalHref;
}
@Override
protected <T> void copyProperties(T dictionaryBaseBean) {
super.copyProperties(dictionaryBaseBean);
UrlInfo urlInfoCopy = (UrlInfo) dictionaryBaseBean;
urlInfoCopy.setHref(this.href);
urlInfoCopy.setOriginalHref(this.originalHref);
urlInfoCopy.setBaseUrl(this.baseUrl);
urlInfoCopy.setControllerMapping(this.controllerMapping);
urlInfoCopy.setViewType(this.viewType);
urlInfoCopy.setViewId(this.viewId);
urlInfoCopy.setPageId(this.pageId);
urlInfoCopy.setFormKey(this.formKey);
urlInfoCopy.setMethodToCall(this.methodToCall);
if (this.requestParameters != null) {
Map<String, String> requestParametersCopy = Maps.newHashMapWithExpectedSize(this.requestParameters.size());
for (Map.Entry requestParameter : requestParameters.entrySet()) {
requestParametersCopy.put(requestParameter.getKey().toString(), requestParameter.getValue().toString());
}
urlInfoCopy.setRequestParameters(requestParametersCopy);
}
}
}
| |
package jvm.ncatz.netbour.pck_fragment.home.all;
import android.content.Context;
import android.content.DialogInterface;
import android.content.res.TypedArray;
import android.graphics.Color;
import android.graphics.drawable.ColorDrawable;
import android.os.Bundle;
import android.support.annotation.Nullable;
import android.support.v4.app.Fragment;
import android.support.v7.app.AlertDialog;
import android.view.LayoutInflater;
import android.view.Menu;
import android.view.MenuInflater;
import android.view.MenuItem;
import android.view.View;
import android.view.ViewGroup;
import android.widget.ListView;
import android.widget.TextView;
import android.widget.Toast;
import com.nightonke.boommenu.BoomMenuButton;
import com.yalantis.contextmenu.lib.ContextMenuDialogFragment;
import com.yalantis.contextmenu.lib.MenuObject;
import com.yalantis.contextmenu.lib.MenuParams;
import com.yalantis.contextmenu.lib.interfaces.OnMenuItemClickListener;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Comparator;
import java.util.List;
import butterknife.BindView;
import butterknife.ButterKnife;
import butterknife.OnItemClick;
import de.cketti.mailto.EmailIntentBuilder;
import jvm.ncatz.netbour.R;
import jvm.ncatz.netbour.pck_adapter.AdpCommunity;
import jvm.ncatz.netbour.pck_adapter.IAdapter;
import jvm.ncatz.netbour.pck_interface.FrgBack;
import jvm.ncatz.netbour.pck_interface.FrgLists;
import jvm.ncatz.netbour.pck_interface.presenter.PresenterCommunity;
import jvm.ncatz.netbour.pck_pojo.PoCommunity;
import jvm.ncatz.netbour.pck_pojo.PoUser;
import jvm.ncatz.netbour.pck_presenter.PresenterCommunityImpl;
public class FrgCommunity extends Fragment implements PresenterCommunity.ViewList, IAdapter, IAdapter.ICommunity, IAdapter.ICode {
@BindView(R.id.fragListCommunity_list)
ListView communityList;
@BindView(R.id.fragListCommunity_empty)
TextView communityEmpty;
@OnItemClick(R.id.fragListCommunity_list)
public void itemClick(View view) {
BoomMenuButton bmb = (BoomMenuButton) view.findViewById(R.id.adapterCommunity_Menu);
bmb.boom();
}
private AdpCommunity adpCommunity;
private AlertDialog loading;
private ContextMenuDialogFragment frg;
private FrgBack callbackBack;
private FrgLists callSnack;
private ListCommunity callback;
private PresenterCommunityImpl presenterCommunity;
private boolean flatsSort;
private boolean postalSort;
private int userCategory;
private String[] to;
public interface ListCommunity {
void changeCode(String code);
void deletedCommunity(PoCommunity item);
void sendCommunity(PoCommunity item);
}
@Override
public void onAttach(Context context) {
super.onAttach(context);
callbackBack = (FrgBack) context;
callSnack = (FrgLists) context;
callback = (ListCommunity) context;
}
@Override
public void onCreate(@Nullable Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setRetainInstance(true);
setHasOptionsMenu(true);
loadingDialogCreate();
flatsSort = false;
postalSort = false;
Bundle bundle = getArguments();
if (bundle != null) {
userCategory = bundle.getInt("userCategory");
ArrayList<String> arrayList = bundle.getStringArrayList("adminEmails");
List<PoCommunity> list = new ArrayList<>();
adpCommunity = new AdpCommunity(getActivity(), list, this, this, this);
presenterCommunity = new PresenterCommunityImpl(null, this);
presenterCommunity.instanceFirebase();
if (arrayList != null) {
to = arrayList.toArray(new String[arrayList.size()]);
}
}
createMenu();
}
@Nullable
@Override
public View onCreateView(LayoutInflater inflater, @Nullable ViewGroup container, @Nullable Bundle savedInstanceState) {
View view = inflater.inflate(R.layout.fragment_list_community, container, false);
ButterKnife.bind(this, view);
return view;
}
@Override
public void onViewCreated(View view, @Nullable Bundle savedInstanceState) {
super.onViewCreated(view, savedInstanceState);
communityList.setAdapter(adpCommunity);
communityList.setDivider(null);
}
@Override
public void onStart() {
super.onStart();
loadingDialogShow();
if (callbackBack != null) {
callbackBack.backFromForm();
}
if (presenterCommunity != null) {
presenterCommunity.attachFirebase();
}
}
@Override
public void onStop() {
super.onStop();
if (presenterCommunity != null) {
presenterCommunity.dettachFirebase();
}
loadingDialogHide();
}
@Override
public void onDetach() {
super.onDetach();
callback = null;
callSnack = null;
callbackBack = null;
}
@Override
public void onCreateOptionsMenu(Menu menu, MenuInflater inflater) {
inflater.inflate(R.menu.menu_list, menu);
super.onCreateOptionsMenu(menu, inflater);
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
switch (item.getItemId()) {
case R.id.sort_menu:
frg.show(getActivity().getSupportFragmentManager(), "cmdf");
break;
}
return super.onOptionsItemSelected(item);
}
@Override
public void deleteElement(PoCommunity community, int position) {
if (community != null) {
if (userCategory == PoUser.GROUP_ADMIN) {
showDeleteDialog(community, position);
} else {
if (callSnack != null) {
callSnack.sendSnack(getString(R.string.no_permission));
}
}
}
}
@Override
public void deletedCommunity(PoCommunity item) {
callback.deletedCommunity(item);
}
@Override
public void editElement(PoCommunity community) {
if (userCategory == PoUser.GROUP_ADMIN) {
if (callback != null) {
callback.sendCommunity(community);
}
} else {
if (callback != null) {
callSnack.sendSnack(getString(R.string.no_permission));
}
}
}
@Override
public void reportElement() {
sendEmail();
}
@Override
public void returnList(List<PoCommunity> list) {
communityList.setVisibility(View.VISIBLE);
communityEmpty.setVisibility(View.GONE);
loadingDialogHide();
updateList(list);
}
@Override
public void returnListEmpty() {
communityList.setVisibility(View.GONE);
communityEmpty.setVisibility(View.VISIBLE);
List<PoCommunity> list = new ArrayList<>();
loadingDialogHide();
updateList(list);
}
@Override
public void selectCode(int position) {
PoCommunity com = adpCommunity.getItem(position);
if (com != null) {
if (callback != null) {
callback.changeCode(com.getCode());
}
}
}
private void createMenu() {
int actionBarHeight;
TypedArray styledAttributes = getContext().getTheme().obtainStyledAttributes(
new int[]{android.R.attr.actionBarSize});
actionBarHeight = (int) styledAttributes.getDimension(0, 0);
styledAttributes.recycle();
MenuObject close = new MenuObject();
close.setResource(R.drawable.window_close);
MenuObject postal = new MenuObject(getString(R.string.sort_postal));
postal.setResource(R.drawable.sign_direction);
MenuObject flats = new MenuObject(getString(R.string.sort_flats));
flats.setResource(R.drawable.account_multiple_outline);
List<MenuObject> menuObjects = new ArrayList<>();
menuObjects.add(close);
menuObjects.add(postal);
menuObjects.add(flats);
MenuParams menuParams = new MenuParams();
menuParams.setActionBarSize(actionBarHeight);
menuParams.setMenuObjects(menuObjects);
menuParams.setClosableOutside(true);
menuParams.setFitsSystemWindow(true);
menuParams.setClipToPadding(false);
menuParams.setAnimationDuration(50);
frg = ContextMenuDialogFragment.newInstance(menuParams);
frg.setItemClickListener(new OnMenuItemClickListener() {
@Override
public void onMenuItemClick(View clickedView, int position) {
switch (position) {
case 0:
//Close
break;
case 1:
sortPostal(postalSort);
break;
case 2:
sortFlats(flatsSort);
break;
}
}
});
}
private void deleteResponse(int position) {
if (presenterCommunity != null) {
presenterCommunity.deleteCommunity(adpCommunity.getItem(position));
}
}
private void loadingDialogCreate() {
AlertDialog.Builder builder = new AlertDialog.Builder(getActivity());
LayoutInflater inflater = getActivity().getLayoutInflater();
View view = inflater.inflate(R.layout.loading_dialog, null);
builder.setView(view);
builder.setCancelable(false);
loading = builder.create();
loading.setCancelable(false);
loading.setCanceledOnTouchOutside(false);
if (loading.getWindow() != null) {
loading.getWindow().setBackgroundDrawable(new ColorDrawable(Color.TRANSPARENT));
}
}
public void loadingDialogHide() {
if (loading != null) {
loading.dismiss();
}
}
public void loadingDialogShow() {
if (loading != null) {
loading.show();
}
}
private void sendEmail() {
if (to != null) {
if (to.length > 0) {
EmailIntentBuilder.from(getActivity())
.to(Arrays.asList(to))
.subject(getActivity().getString(R.string.report_community))
.start();
} else {
Toast.makeText(getActivity(), R.string.no_email_admin, Toast.LENGTH_SHORT).show();
}
} else {
Toast.makeText(getActivity(), R.string.no_email_admin, Toast.LENGTH_SHORT).show();
}
}
private void showDeleteDialog(PoCommunity community, final int position) {
AlertDialog.Builder builder = new AlertDialog.Builder(getActivity());
builder.setTitle(R.string.dialog_title_delete);
builder.setMessage(getString(R.string.dialog_message_delete)
+ " " + community.getCode()
+ getString(R.string.dialog_message_delete_two));
builder.setPositiveButton(android.R.string.yes, new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialog, int which) {
deleteResponse(position);
}
});
builder.setNegativeButton(android.R.string.no, null);
AlertDialog dialog = builder.create();
dialog.show();
}
private void sortFlats(boolean flatsSort) {
if (flatsSort) {
adpCommunity.sort(new Comparator<PoCommunity>() {
@Override
public int compare(PoCommunity o1, PoCommunity o2) {
return o2.getFlats() - o1.getFlats();
}
});
} else {
adpCommunity.sort(new Comparator<PoCommunity>() {
@Override
public int compare(PoCommunity o1, PoCommunity o2) {
return o1.getFlats() - o2.getFlats();
}
});
}
this.flatsSort = !flatsSort;
}
private void sortPostal(boolean postalSort) {
if (postalSort) {
adpCommunity.sort(new Comparator<PoCommunity>() {
@Override
public int compare(PoCommunity o1, PoCommunity o2) {
return o2.getPostal().compareTo(o1.getPostal());
}
});
} else {
adpCommunity.sort(new Comparator<PoCommunity>() {
@Override
public int compare(PoCommunity o1, PoCommunity o2) {
return o1.getPostal().compareTo(o2.getPostal());
}
});
}
this.postalSort = !postalSort;
}
private void updateList(List<PoCommunity> list) {
adpCommunity.clear();
adpCommunity.addAll(list);
adpCommunity.sort(new Comparator<PoCommunity>() {
@Override
public int compare(PoCommunity o1, PoCommunity o2) {
return o2.getCode().compareTo(o1.getCode());
}
});
}
}
| |
package com.destinationradiodenver.mobileConsole.view;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.List;
import javax.annotation.Resource;
import javax.ejb.SessionContext;
import javax.ejb.Stateful;
import javax.enterprise.context.Conversation;
import javax.enterprise.context.ConversationScoped;
import javax.faces.application.FacesMessage;
import javax.faces.component.UIComponent;
import javax.faces.context.FacesContext;
import javax.faces.convert.Converter;
import javax.inject.Inject;
import javax.inject.Named;
import javax.persistence.EntityManager;
import javax.persistence.PersistenceContext;
import javax.persistence.PersistenceContextType;
import javax.persistence.TypedQuery;
import javax.persistence.criteria.CriteriaBuilder;
import javax.persistence.criteria.CriteriaQuery;
import javax.persistence.criteria.Predicate;
import javax.persistence.criteria.Root;
import com.destinationradiodenver.mobileStreaming.web.entity.MobileProfile;
/**
* Backing bean for MobileProfile entities.
* <p>
* This class provides CRUD functionality for all MobileProfile entities. It focuses
* purely on Java EE 6 standards (e.g. <tt>@ConversationScoped</tt> for
* state management, <tt>PersistenceContext</tt> for persistence,
* <tt>CriteriaBuilder</tt> for searches) rather than introducing a CRUD framework or
* custom base class.
*/
@Named
@Stateful
@ConversationScoped
public class MobileProfileBean implements Serializable
{
private static final long serialVersionUID = 1L;
/*
* Support creating and retrieving MobileProfile entities
*/
private Long id;
public Long getId()
{
return this.id;
}
public void setId(Long id)
{
this.id = id;
}
private MobileProfile mobileProfile;
public MobileProfile getMobileProfile()
{
return this.mobileProfile;
}
@Inject
private Conversation conversation;
@PersistenceContext(type = PersistenceContextType.EXTENDED)
private EntityManager entityManager;
public String create()
{
this.conversation.begin();
return "create?faces-redirect=true";
}
public void retrieve()
{
if (FacesContext.getCurrentInstance().isPostback())
{
return;
}
if (this.conversation.isTransient())
{
this.conversation.begin();
}
if (this.id == null)
{
this.mobileProfile = this.example;
}
else
{
this.mobileProfile = findById(getId());
}
}
public MobileProfile findById(Long id)
{
return this.entityManager.find(MobileProfile.class, id);
}
/*
* Support updating and deleting MobileProfile entities
*/
public String update()
{
this.conversation.end();
try
{
if (this.id == null)
{
this.entityManager.persist(this.mobileProfile);
return "search?faces-redirect=true";
}
else
{
this.entityManager.merge(this.mobileProfile);
return "view?faces-redirect=true&id=" + this.mobileProfile.getId();
}
}
catch (Exception e)
{
FacesContext.getCurrentInstance().addMessage(null, new FacesMessage(e.getMessage()));
return null;
}
}
public String delete()
{
this.conversation.end();
try
{
this.entityManager.remove(findById(getId()));
this.entityManager.flush();
return "search?faces-redirect=true";
}
catch (Exception e)
{
FacesContext.getCurrentInstance().addMessage(null, new FacesMessage(e.getMessage()));
return null;
}
}
/*
* Support searching MobileProfile entities with pagination
*/
private int page;
private long count;
private List<MobileProfile> pageItems;
private MobileProfile example = new MobileProfile();
public int getPage()
{
return this.page;
}
public void setPage(int page)
{
this.page = page;
}
public int getPageSize()
{
return 10;
}
public MobileProfile getExample()
{
return this.example;
}
public void setExample(MobileProfile example)
{
this.example = example;
}
public void search()
{
this.page = 0;
}
public void paginate()
{
CriteriaBuilder builder = this.entityManager.getCriteriaBuilder();
// Populate this.count
CriteriaQuery<Long> countCriteria = builder.createQuery(Long.class);
Root<MobileProfile> root = countCriteria.from(MobileProfile.class);
countCriteria = countCriteria.select(builder.count(root)).where(getSearchPredicates(root));
this.count = this.entityManager.createQuery(countCriteria).getSingleResult();
// Populate this.pageItems
CriteriaQuery<MobileProfile> criteria = builder.createQuery(MobileProfile.class);
root = criteria.from(MobileProfile.class);
TypedQuery<MobileProfile> query = this.entityManager.createQuery(criteria.select(root).where(getSearchPredicates(root)));
query.setFirstResult(this.page * getPageSize()).setMaxResults(getPageSize());
this.pageItems = query.getResultList();
}
private Predicate[] getSearchPredicates(Root<MobileProfile> root)
{
CriteriaBuilder builder = this.entityManager.getCriteriaBuilder();
List<Predicate> predicatesList = new ArrayList<Predicate>();
String name = this.example.getName();
if (name != null && !"".equals(name))
{
predicatesList.add(builder.like(root.<String> get("name"), '%' + name + '%'));
}
int bandwidth = this.example.getBandwidth();
if (bandwidth != 0)
{
predicatesList.add(builder.equal(root.get("bandwidth"), bandwidth));
}
int width = this.example.getWidth();
if (width != 0)
{
predicatesList.add(builder.equal(root.get("width"), width));
}
int height = this.example.getHeight();
if (height != 0)
{
predicatesList.add(builder.equal(root.get("height"), height));
}
return predicatesList.toArray(new Predicate[predicatesList.size()]);
}
public List<MobileProfile> getPageItems()
{
return this.pageItems;
}
public long getCount()
{
return this.count;
}
/*
* Support listing and POSTing back MobileProfile entities (e.g. from inside an
* HtmlSelectOneMenu)
*/
public List<MobileProfile> getAll()
{
CriteriaQuery<MobileProfile> criteria = this.entityManager.getCriteriaBuilder().createQuery(MobileProfile.class);
return this.entityManager.createQuery(criteria.select(criteria.from(MobileProfile.class))).getResultList();
}
@Resource
private SessionContext sessionContext;
public Converter getConverter()
{
final MobileProfileBean ejbProxy = this.sessionContext.getBusinessObject(MobileProfileBean.class);
return new Converter()
{
@Override
public Object getAsObject(FacesContext context, UIComponent component, String value)
{
return ejbProxy.findById(Long.valueOf(value));
}
@Override
public String getAsString(FacesContext context, UIComponent component, Object value)
{
if (value == null)
{
return "";
}
return String.valueOf(((MobileProfile) value).getId());
}
};
}
/*
* Support adding children to bidirectional, one-to-many tables
*/
private MobileProfile add = new MobileProfile();
public MobileProfile getAdd()
{
return this.add;
}
public MobileProfile getAdded()
{
MobileProfile added = this.add;
this.add = new MobileProfile();
return added;
}
}
| |
/*
* Copyright (c) WSO2 Inc. (http://www.wso2.org) All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.wso2.developerstudio.eclipse.artifact.messageprocessor.wizard;
import java.io.File;
import java.io.IOException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
import java.util.regex.Pattern;
import javax.xml.namespace.QName;
import org.apache.axiom.om.OMAttribute;
import org.apache.axiom.om.OMElement;
import org.apache.commons.lang.StringUtils;
import org.apache.maven.model.Plugin;
import org.apache.maven.model.PluginExecution;
import org.apache.maven.model.Repository;
import org.apache.maven.project.MavenProject;
import org.apache.synapse.config.xml.MessageProcessorSerializer;
import org.apache.synapse.message.processor.MessageProcessor;
import org.apache.synapse.message.processor.impl.forwarder.ScheduledMessageForwardingProcessor;
import org.apache.synapse.message.processor.impl.sampler.SamplingProcessor;
import org.eclipse.core.resources.IContainer;
import org.eclipse.core.resources.IFile;
import org.eclipse.core.resources.IProject;
import org.eclipse.core.resources.IResource;
import org.eclipse.core.resources.ResourcesPlugin;
import org.eclipse.core.runtime.CoreException;
import org.eclipse.core.runtime.NullProgressMonitor;
import org.eclipse.core.runtime.Path;
import org.eclipse.core.runtime.Platform;
import org.eclipse.jface.dialogs.MessageDialog;
import org.wso2.developerstudio.eclipse.artifact.messageprocessor.Activator;
import org.wso2.developerstudio.eclipse.artifact.messageprocessor.model.MessageProcessorModel;
import org.wso2.developerstudio.eclipse.artifact.messageprocessor.util.MessageProcessorImageUtils;
import org.wso2.developerstudio.eclipse.capp.maven.utils.MavenConstants;
import org.wso2.developerstudio.eclipse.esb.project.artifact.ESBArtifact;
import org.wso2.developerstudio.eclipse.esb.project.artifact.ESBProjectArtifact;
import org.wso2.developerstudio.eclipse.logging.core.IDeveloperStudioLog;
import org.wso2.developerstudio.eclipse.logging.core.Logger;
import org.wso2.developerstudio.eclipse.maven.util.MavenUtils;
import org.wso2.developerstudio.eclipse.platform.ui.editor.Openable;
import org.wso2.developerstudio.eclipse.platform.ui.startup.ESBGraphicalEditor;
import org.wso2.developerstudio.eclipse.platform.ui.wizard.AbstractWSO2ProjectCreationWizard;
import org.wso2.developerstudio.eclipse.utils.file.FileUtils;
import org.codehaus.plexus.util.xml.Xpp3Dom;
public class MessageProcessorCreationWizard extends AbstractWSO2ProjectCreationWizard {
private static IDeveloperStudioLog log = Logger.getLog(Activator.PLUGIN_ID);
private MessageProcessorModel messageProcessorModel;
private ESBProjectArtifact esbProjectArtifact;
private IProject esbProject;
private IFile artifactFile;
private List<File> fileLst = new ArrayList<File>();
private String version = "1.0.0";
public MessageProcessorCreationWizard() {
messageProcessorModel = new MessageProcessorModel();
setModel(messageProcessorModel);
setWindowTitle("New Message processor");
setDefaultPageImageDescriptor(MessageProcessorImageUtils.getInstance().getImageDescriptor("message-processor.png"));
}
@Override
public IResource getCreatedResource() {
return null;
}
@Override
public boolean performFinish() {
try {
boolean isNewArtifact = true;
esbProject = messageProcessorModel.getSaveLocation().getProject();
IContainer location = esbProject
.getFolder("src/main/synapse-config/message-processors");
File pomfile = esbProject.getFile("pom.xml").getLocation().toFile();
if (!pomfile.exists()) {
createPOM(pomfile);
}
esbProjectArtifact = new ESBProjectArtifact();
esbProjectArtifact.fromFile(esbProject.getFile("artifact.xml").getLocation().toFile());
updatePom();
esbProject.refreshLocal(IResource.DEPTH_INFINITE, new NullProgressMonitor());
String groupId = getMavenGroupId(pomfile) + ".message-processors";
if (getModel().getSelectedOption().equals("create.processor")) {
artifactFile = location.getFile(new Path(messageProcessorModel
.getMessageProcessorName() + ".xml"));
File destFile = artifactFile.getLocation().toFile();
FileUtils.createFile(destFile, getTemplateContent());
fileLst.add(destFile);
String relativePath = FileUtils.getRelativePath(
esbProject.getLocation().toFile(),
new File(location.getLocation().toFile(), messageProcessorModel
.getMessageProcessorName() + ".xml")).replaceAll(
Pattern.quote(File.separator), "/");
esbProjectArtifact.addESBArtifact(createArtifact(
messageProcessorModel.getMessageProcessorName(), groupId, version,
relativePath));
esbProjectArtifact.toFile();
} else {
IFile task = location.getFile(new Path(getModel().getImportFile().getName()));
if (task.exists()) {
if (!MessageDialog.openQuestion(getShell(), "WARNING",
"Do you like to override exsiting project in the workspace")) {
return false;
}
isNewArtifact = false;
}
copyImportFile(location, isNewArtifact, groupId);
}
esbProject.refreshLocal(IResource.DEPTH_INFINITE, new NullProgressMonitor());
for (File file : fileLst) {
if (file.exists()) {
openEditor(file);
}
}
} catch (CoreException e) {
log.error("CoreException has occurred", e);
} catch (Exception e) {
log.error("An unexpected error has occurred", e);
}
return true;
}
@Override
protected boolean isRequireProjectLocationSection() {
return false;
}
@Override
protected boolean isRequiredWorkingSet() {
return false;
}
private ESBArtifact createArtifact(String name, String groupId, String version, String path) {
ESBArtifact artifact = new ESBArtifact();
artifact.setName(name);
artifact.setVersion(version);
artifact.setType("synapse/message-processors");
artifact.setServerRole("EnterpriseServiceBus");
artifact.setGroupId(groupId);
artifact.setFile(path);
return artifact;
}
public void updatePom() throws Exception {
File mavenProjectPomLocation = esbProject.getFile("pom.xml").getLocation().toFile();
MavenProject mavenProject = MavenUtils.getMavenProject(mavenProjectPomLocation);
version = mavenProject.getVersion();
//version = version.replaceAll("-SNAPSHOT$", "");
boolean pluginExists = MavenUtils.checkOldPluginEntry(mavenProject, "org.wso2.maven",
"wso2-esb-messageprocessor-plugin",
MavenConstants.WSO2_ESB_MESSAGE_STORE_PLUGIN_VERSION);
if (pluginExists) {
return;
}
Plugin plugin = MavenUtils.createPluginEntry(mavenProject, "org.wso2.maven",
"wso2-esb-messageprocessor-plugin",
MavenConstants.WSO2_ESB_MESSAGE_PROCESSOR_PLUGIN_VERSION, true);
PluginExecution pluginExecution = new PluginExecution();
pluginExecution.addGoal("pom-gen");
pluginExecution.setPhase("process-resources");
pluginExecution.setId("task");
Xpp3Dom configurationNode = MavenUtils.createMainConfigurationNode();
Xpp3Dom artifactLocationNode = MavenUtils.createXpp3Node(configurationNode,
"artifactLocation");
artifactLocationNode.setValue(".");
Xpp3Dom typeListNode = MavenUtils.createXpp3Node(configurationNode, "typeList");
typeListNode.setValue("${artifact.types}");
pluginExecution.setConfiguration(configurationNode);
plugin.addExecution(pluginExecution);
String disableWSO2Repo = Platform.getPreferencesService()
.getString("org.wso2.developerstudio.eclipse.platform.ui", DISABLE_WSO2_REPOSITORY,
null, null);
if (disableWSO2Repo == null) {
MavenUtils.updateMavenRepo(mavenProject);
}
Repository globalRepositoryFromPreference = getGlobalRepositoryFromPreference();
if (globalRepositoryFromPreference != null) {
mavenProject.getModel().addRepository(globalRepositoryFromPreference);
mavenProject.getModel().addPluginRepository(globalRepositoryFromPreference);
}
MavenUtils.saveMavenProject(mavenProject, mavenProjectPomLocation);
}
private String getTemplateContent() {
Map<String, Object> messageProcessorPrameeters = null;
String className = "";
OMElement messageProcessorElement;
MessageProcessor messageProcessor = new ScheduledMessageForwardingProcessor();
String lineSeparator = System.getProperty("line.separator", "\n");
if (messageProcessorModel.getMessageProcessorType().equals(
"Scheduled Message Forwarding Processor")) {
messageProcessorPrameeters = new HashMap<String, Object>();
// Fixing TOOLS-2026.
//className = "org.apache.synapse.message.processors.forward.ScheduledMessageForwardingProcessor";
className = "org.apache.synapse.message.processor.impl.forwarder.ScheduledMessageForwardingProcessor";
messageProcessorPrameeters.put("interval",
((Integer) messageProcessorModel.getForwardingInterval()).toString());
if (StringUtils.isNotBlank(messageProcessorModel.getRetryInterval())) {
messageProcessorPrameeters.put("client.retry.interval",
messageProcessorModel.getRetryInterval());
}
if (StringUtils.isNotBlank(messageProcessorModel.getDeliveryAttempts())) {
messageProcessorPrameeters.put("max.delivery.attempts",
messageProcessorModel.getDeliveryAttempts());
}
if (StringUtils.isNotBlank(messageProcessorModel.getClientRepository())) {
messageProcessorPrameeters.put("axis2.repo",
messageProcessorModel.getClientRepository());
}
if (StringUtils.isNotBlank(messageProcessorModel.getAxis2Configuration())) {
messageProcessorPrameeters.put("axis2.config",
messageProcessorModel.getAxis2Configuration());
}
if (StringUtils.isNotBlank(messageProcessorModel.getReplySequenceName())) {
messageProcessorPrameeters.put("message.processor.reply.sequence",
messageProcessorModel.getReplySequenceName());
}
if (StringUtils.isNotBlank(messageProcessorModel.getFaultSequenceName())) {
messageProcessorPrameeters.put("message.processor.fault.sequence",
messageProcessorModel.getFaultSequenceName());
}
if (StringUtils.isNotBlank(messageProcessorModel.getConfigurationFilePath())) {
messageProcessorPrameeters.put("quartz.conf",
messageProcessorModel.getConfigurationFilePath());
}
if (StringUtils.isNotBlank(messageProcessorModel.getCronExpression())) {
messageProcessorPrameeters.put("cronExpression",
messageProcessorModel.getCronExpression());
}
if (StringUtils.isNotBlank(messageProcessorModel.getPinnedServers())) {
messageProcessorPrameeters.put("pinnedServers",
messageProcessorModel.getPinnedServers());
}
if (StringUtils.isNotBlank(messageProcessorModel.getProcessorState())) {
if (messageProcessorModel.getProcessorState().equals("Activate")) {
Boolean isActive = true;
messageProcessorPrameeters.put("is.active", isActive.toString());
} else {
Boolean isActive = false;
messageProcessorPrameeters.put("is.active", isActive.toString());
}
}
if (StringUtils.isNotBlank(messageProcessorModel.getNonRetryHttpStatusCodes())) {
messageProcessorPrameeters.put("non.retry.status.codes",
messageProcessorModel.getNonRetryHttpStatusCodes());
}
messageProcessor = new ScheduledMessageForwardingProcessor();
messageProcessor.setTargetEndpoint(messageProcessorModel.getEndpointName());
messageProcessor.setName(messageProcessorModel.getMessageProcessorName());
messageProcessor.setMessageStoreName(messageProcessorModel.getMessageStore());
messageProcessor.setParameters(messageProcessorPrameeters);
} else if (messageProcessorModel.getMessageProcessorType().equals(
"Message Sampling Processor")) {
messageProcessorPrameeters = new HashMap<String, Object>();
// Fixing TOOLS-2026.
//className = "org.apache.synapse.message.processors.sampler.SamplingProcessor";
className = "org.apache.synapse.message.processor.impl.sampler.SamplingProcessor";
messageProcessorPrameeters.put("interval",
((Integer) messageProcessorModel.getSamplingInterval()).toString());
messageProcessorPrameeters.put("concurrency",
((Integer) messageProcessorModel.getSamplingConcurrency()).toString());
if (StringUtils.isNotBlank(messageProcessorModel.getConfigurationFilePath())) {
messageProcessorPrameeters.put("quartz.conf",
messageProcessorModel.getConfigurationFilePath());
}
if (StringUtils.isNotBlank(messageProcessorModel.getCronExpression())) {
messageProcessorPrameeters.put("cronExpression",
messageProcessorModel.getCronExpression());
}
if (StringUtils.isNotBlank(messageProcessorModel.getPinnedServers())) {
messageProcessorPrameeters.put("pinnedServers",
messageProcessorModel.getPinnedServers());
}
if (StringUtils.isNotBlank(messageProcessorModel.getSequence())) {
messageProcessorPrameeters.put("sequence", messageProcessorModel.getSequence());
}
if (StringUtils.isNotBlank(messageProcessorModel.getProcessorState())) {
if (messageProcessorModel.getProcessorState().equals("Activate")) {
Boolean isActive = true;
messageProcessorPrameeters.put("is.active", isActive.toString());
} else {
Boolean isActive = false;
messageProcessorPrameeters.put("is.active", isActive.toString());
}
}
messageProcessor = new SamplingProcessor();
messageProcessor.setName(messageProcessorModel.getMessageProcessorName());
messageProcessor.setMessageStoreName(messageProcessorModel.getMessageStore());
messageProcessor.setParameters(messageProcessorPrameeters);
} else if (messageProcessorModel.getMessageProcessorType().equals(
"Custom Message Processor")) {
messageProcessorPrameeters = new HashMap<String, Object>();
messageProcessor = new SamplingProcessor();
className = messageProcessorModel.getClassFQN();
messageProcessor.setName(messageProcessorModel.getMessageProcessorName());
messageProcessor.setMessageStoreName(messageProcessorModel.getMessageStore());
messageProcessor.setParameters(messageProcessorPrameeters);
HashMap<String, String> parameters = messageProcessorModel
.getCustomProcessorParameters();
if (parameters.size() > 0) {
Set<Entry<String, String>> parameterEntrySet = parameters.entrySet();
Iterator<Entry<String, String>> it = parameterEntrySet.iterator();
while (it.hasNext()) {
Map.Entry<String, String> entry = it.next();
if (StringUtils.isNotBlank(entry.getKey())) {
messageProcessorPrameeters.put(entry.getKey(), entry.getValue());
}
}
}
}
messageProcessorElement = MessageProcessorSerializer.serializeMessageProcessor(null,
messageProcessor);
OMAttribute classAttr = messageProcessorElement.getAttribute(new QName("class"));
if (messageProcessorModel.getMessageProcessorType().equals("Custom Message Processor")
&& classAttr != null) {
classAttr.setAttributeValue(className);
} else {
messageProcessorElement.addAttribute("class", className, null);
}
return messageProcessorElement.toString().replace("><", ">" + lineSeparator + "<");
}
public void copyImportFile(IContainer importLocation, boolean isNewAritfact, String groupId)
throws IOException {
File importFile = getModel().getImportFile();
File destFile = null;
List<OMElement> selectedList = ((MessageProcessorModel) getModel())
.getSelectedProcessorList();
if (selectedList != null && selectedList.size() > 0) {
for (OMElement element : selectedList) {
String name = element.getAttributeValue(new QName("name"));
destFile = new File(importLocation.getLocation().toFile(), name + ".xml");
FileUtils.createFile(destFile, element.toString());
fileLst.add(destFile);
if (isNewAritfact) {
String relativePath = FileUtils.getRelativePath(importLocation.getProject()
.getLocation().toFile(), new File(
importLocation.getLocation().toFile(), name + ".xml"));
esbProjectArtifact.addESBArtifact(createArtifact(name, groupId, version,
relativePath));
}
}
} else {
destFile = new File(importLocation.getLocation().toFile(), importFile.getName());
FileUtils.copy(importFile, destFile);
fileLst.add(destFile);
String name = importFile.getName().replaceAll(".xml$", "");
if (isNewAritfact) {
String relativePath = FileUtils.getRelativePath(importLocation.getProject()
.getLocation().toFile(), new File(importLocation.getLocation().toFile(),
name + ".xml"));
esbProjectArtifact.addESBArtifact(createArtifact(name, groupId, version,
relativePath));
}
}
try {
esbProjectArtifact.toFile();
} catch (Exception e) {
throw new IOException(e);
}
}
@Override
public void openEditor(File file) {
try {
refreshDistProjects();
IFile resource = ResourcesPlugin.getWorkspace().getRoot()
.getFileForLocation(Path.fromOSString(file.getAbsolutePath()));
String path = resource.getParent().getFullPath() + "/";
String source = FileUtils.getContentAsString(file);
Openable openable = ESBGraphicalEditor.getOpenable();
openable.editorOpen(file.getName(), "messageProcessor", path + "messageProcessor_",
source);
} catch (Exception e) {
log.error("Cannot open the editor", e);
}
}
}
| |
// Copyright 2000-2020 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.intellij.testFramework;
import com.intellij.openapi.Disposable;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.project.DumbServiceImpl;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.util.RecursionManager;
import com.intellij.util.indexing.FileBasedIndex;
import com.intellij.util.indexing.UnindexedFilesUpdater;
import junit.framework.*;
import org.jetbrains.annotations.NotNull;
import org.junit.internal.MethodSorter;
import java.lang.reflect.*;
import static com.intellij.testFramework.TestIndexingModeSupporter.IndexingMode.DUMB_EMPTY_INDEX;
import static junit.framework.TestSuite.warning;
/**
* To run a test with needed {@link IndexingMode}, it's enough to make getIndexingMode return it and run the test with IDE's gutter action.
* To run all dumb mode completion tests, check JavaDoc of
* {@link com.intellij.java.codeInsight.completion.JavaCompletionTestSuite} or
* {@link com.jetbrains.php.slowTests.PhpDumbCompletionTestSuite}
*/
public interface TestIndexingModeSupporter {
enum IndexingMode {
SMART {
@Override
public void setUpTest(@NotNull Project project, @NotNull Disposable testRootDisposable) {}
@Override
public void tearDownTest(@NotNull Project project) {}
}, DUMB_FULL_INDEX {
@Override
public void setUpTest(@NotNull Project project, @NotNull Disposable testRootDisposable) {
indexEverythingAndBecomeDumb(project);
RecursionManager.disableMissedCacheAssertions(testRootDisposable);
}
@Override
public void ensureIndexingStatus(@NotNull Project project) {
DumbServiceImpl dumbService = DumbServiceImpl.getInstance(project);
ApplicationManager.getApplication().invokeAndWait(() -> {
dumbService.setDumb(false);
dumbService.queueTask(new UnindexedFilesUpdater(project));
dumbService.setDumb(true);
});
}
}, DUMB_RUNTIME_ONLY_INDEX {
@Override
public void setUpTest(@NotNull Project project, @NotNull Disposable testRootDisposable) {
becomeDumb(project);
RecursionManager.disableMissedCacheAssertions(testRootDisposable);
}
}, DUMB_EMPTY_INDEX {
@Override
public void setUpTest(@NotNull Project project, @NotNull Disposable testRootDisposable) {
ServiceContainerUtil
.replaceService(ApplicationManager.getApplication(), FileBasedIndex.class, new EmptyFileBasedIndex(), testRootDisposable);
becomeDumb(project);
RecursionManager.disableMissedCacheAssertions(testRootDisposable);
}
};
public abstract void setUpTest(@NotNull Project project,
@NotNull Disposable testRootDisposable);
public void tearDownTest(@NotNull Project project) {
ApplicationManager.getApplication().invokeAndWait(() -> {
DumbServiceImpl.getInstance(project).setDumb(false);
});
}
public void ensureIndexingStatus(@NotNull Project project) {
}
private static void becomeDumb(@NotNull Project project) {
ApplicationManager.getApplication().invokeAndWait(() -> {
DumbServiceImpl.getInstance(project).setDumb(true);
});
}
private static void indexEverythingAndBecomeDumb(@NotNull Project project) {
DumbServiceImpl dumbService = DumbServiceImpl.getInstance(project);
ApplicationManager.getApplication().invokeAndWait(() -> {
dumbService.setDumb(false);
dumbService.queueTask(new UnindexedFilesUpdater(project));
dumbService.setDumb(true);
});
}
}
void setIndexingMode(@NotNull IndexingMode mode);
@NotNull IndexingMode getIndexingMode();
static void addTest(@NotNull Class<? extends TestIndexingModeSupporter> aClass,
@NotNull TestIndexingModeSupporter.IndexingModeTestHandler handler,
@NotNull TestSuite parentSuite) {
if (handler.shouldIgnore(aClass)) return;
try {
TestSuite suite = handler.createTestSuite();
suite.setName(aClass.getSimpleName());
boolean foundTests = false;
Constructor<? extends TestIndexingModeSupporter> constructor = aClass.getConstructor();
for (Method declaredMethod : MethodSorter.getDeclaredMethods(aClass)) {
if (!Modifier.isPublic(declaredMethod.getModifiers())) continue;
String methodName = declaredMethod.getName();
if (!methodName.startsWith("test")) continue;
if (TestFrameworkUtil.isPerformanceTest(methodName, aClass.getName())) continue;
if (handler.shouldIgnore(declaredMethod)) continue;
TestIndexingModeSupporter aCase = constructor.newInstance();
aCase.setIndexingMode(handler.getIndexingMode());
if (aCase instanceof TestCase) {
TestCase testCase = (TestCase)aCase;
testCase.setName(methodName);
if (UsefulTestCase.IS_UNDER_TEAMCITY) {
Test wrapper = IndexingModeTestHandler.wrapForTeamCity(testCase, handler.getIndexingMode());
suite.addTest(wrapper);
}
else {
suite.addTest(testCase);
}
}
else {
parentSuite.addTest(warning(aClass.getName() + "is not a TestSuite"));
}
foundTests = true;
}
if (foundTests) {
parentSuite.addTest(suite);
}
}
catch (NoSuchMethodException e) {
parentSuite.addTest(warning("Failed to find default constructor for " + aClass.getName() + ", see log"));
//noinspection CallToPrintStackTrace
e.printStackTrace();
}
catch (IllegalAccessException | InstantiationException | InvocationTargetException e) {
parentSuite.addTest(warning("Failed to instantiate " + aClass.getName() + ", see log"));
//noinspection CallToPrintStackTrace
e.printStackTrace();
}
}
abstract class IndexingModeTestHandler {
public final String myTestSuiteName;
public final String myTestNamePrefix;
private final IndexingMode myIndexingMode;
protected IndexingModeTestHandler(@NotNull String testSuiteName,
@NotNull String testNamePrefix,
@NotNull IndexingMode mode) {
myTestSuiteName = testSuiteName;
myTestNamePrefix = testNamePrefix;
myIndexingMode = mode;
}
public TestSuite createTestSuite() {
return new NamedTestSuite(myTestNamePrefix);
}
public abstract boolean shouldIgnore(@NotNull AnnotatedElement aClass);
public @NotNull TestIndexingModeSupporter.IndexingMode getIndexingMode() {
return myIndexingMode;
}
private static boolean shouldIgnoreInFullIndexSuite(@NotNull AnnotatedElement element) {
return element.isAnnotationPresent(NeedsIndex.SmartMode.class);
}
private static boolean shouldIgnoreInRuntimeOnlyIndexSuite(@NotNull AnnotatedElement element) {
return shouldIgnoreInFullIndexSuite(element) || element.isAnnotationPresent(NeedsIndex.Full.class);
}
private static boolean shouldIgnoreInEmptyIndexSuite(@NotNull AnnotatedElement element) {
return shouldIgnoreInRuntimeOnlyIndexSuite(element) || element.isAnnotationPresent(NeedsIndex.ForStandardLibrary.class);
}
private static Test wrapForTeamCity(@NotNull TestCase testCase, @NotNull IndexingMode mode) {
return new MyHackyJUnitTaskMirrorImpl.VmExitErrorTest(testCase, mode);
}
private static final class NamedTestSuite extends TestSuite {
private final String myPrefix;
private NamedTestSuite(@NotNull String prefix) {
myPrefix = prefix;
}
@Override
public void setName(String name) {
super.setName(myPrefix + name);
}
}
/**
* TeamCity prints log with {@code jetbrains.buildServer.ant.junit.AntJUnitFormatter3}
* (see org.jetbrains.intellij.build.impl.TestingTaskImpl), which in TC sources
* in {@code jetbrains.buildServer.ant.junit.JUnitUtil#getTestName} uses either className.methodName template or toString() value
* in case it {@code startsWith(className + ".")} or {@code endsWith("JUnitTaskMirrorImpl$VmExitErrorTest")}
* <p>
* To test TeamCity output locally one needs to run tests from {@code tests_in_ultimate.gant} with provided environment variable
* {@code TEAMCITY_VERSION} (otherwise output would be formatted
* with {@link org.jetbrains.intellij.build.JUnitLiveTestProgressFormatter}) and provided system property {@code agent.home.dir},
* with path of buildAgent directory in TeamCity installation. To get it unpack TeamCity archive and start TeamCity with
* {@code ./bin/runAll.sh start}
* <p>
* Also these properties may be useful for debugging of tests, making them wait for remote debug connection:
* {@code
* -Dintellij.build.test.debug.port=<port>
* -Dintellij.build.test.debug.suspend=true
* }
*/
private static class MyHackyJUnitTaskMirrorImpl {
private static class VmExitErrorTest implements Test {
private final TestCase myTestCase;
private final IndexingMode myMode;
private VmExitErrorTest(@NotNull TestCase testCase,
@NotNull IndexingMode mode) {
myTestCase = testCase;
myMode = mode;
}
@Override
public int countTestCases() {
return myTestCase.countTestCases();
}
@Override
public void run(TestResult result) {
result.startTest(this);
Protectable p = new Protectable() {
@Override
public void protect() throws Throwable {
myTestCase.runBare();
}
};
result.runProtected(this, p);
result.endTest(this);
}
@Override
public String toString() {
return myTestCase.getClass().getName() + "." + myTestCase.getName() + " with IndexingMode " + myMode.name();
}
}
}
}
class FullIndexSuite extends TestIndexingModeSupporter.IndexingModeTestHandler {
public FullIndexSuite() {
super("Full index", "Full index ", IndexingMode.DUMB_FULL_INDEX);
}
@Override
public boolean shouldIgnore(@NotNull AnnotatedElement aClass) {
return IndexingModeTestHandler.shouldIgnoreInFullIndexSuite(aClass);
}
}
class RuntimeOnlyIndexSuite extends TestIndexingModeSupporter.IndexingModeTestHandler {
public RuntimeOnlyIndexSuite() {
super("RuntimeOnlyIndex", "Runtime only index ", IndexingMode.DUMB_RUNTIME_ONLY_INDEX);
}
@Override
public boolean shouldIgnore(@NotNull AnnotatedElement aClass) {
return IndexingModeTestHandler.shouldIgnoreInRuntimeOnlyIndexSuite(aClass);
}
}
class EmptyIndexSuite extends TestIndexingModeSupporter.IndexingModeTestHandler {
public EmptyIndexSuite() {
super("Empty index", "Empty index ", DUMB_EMPTY_INDEX);
}
@Override
public boolean shouldIgnore(@NotNull AnnotatedElement aClass) {
return IndexingModeTestHandler.shouldIgnoreInEmptyIndexSuite(aClass);
}
}
}
| |
/*
* Copyright 2010-2015 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazonaws.services.elastictranscoder.model;
import java.io.Serializable;
/**
* <p>
* The captions to be created, if any.
* </p>
*/
public class Captions implements Serializable, Cloneable {
/**
* A policy that determines how Elastic Transcoder handles the existence
* of multiple captions. <ul> <li><p><b>MergeOverride:</b> Elastic
* Transcoder transcodes both embedded and sidecar captions into outputs.
* If captions for a language are embedded in the input file and also
* appear in a sidecar file, Elastic Transcoder uses the sidecar captions
* and ignores the embedded captions for that language.</li>
* <li><p><b>MergeRetain:</b> Elastic Transcoder transcodes both embedded
* and sidecar captions into outputs. If captions for a language are
* embedded in the input file and also appear in a sidecar file, Elastic
* Transcoder uses the embedded captions and ignores the sidecar captions
* for that language. If <code>CaptionSources</code> is empty, Elastic
* Transcoder omits all sidecar captions from the output files.</li>
* <li><p><b>Override:</b> Elastic Transcoder transcodes only the sidecar
* captions that you specify in <code>CaptionSources</code>.</li> </ul>
* <p><code>MergePolicy</code> cannot be null.
* <p>
* <b>Constraints:</b><br/>
* <b>Pattern: </b>(^MergeOverride$)|(^MergeRetain$)|(^Override$)<br/>
*/
private String mergePolicy;
/**
* Source files for the input sidecar captions used during the
* transcoding process. To omit all sidecar captions, leave
* <code>CaptionSources</code> blank.
* <p>
* <b>Constraints:</b><br/>
* <b>Length: </b>0 - 20<br/>
*/
private com.amazonaws.internal.ListWithAutoConstructFlag<CaptionSource> captionSources;
/**
* The array of file formats for the output captions. If you leave this
* value blank, Elastic Transcoder returns an error.
* <p>
* <b>Constraints:</b><br/>
* <b>Length: </b>0 - 4<br/>
*/
private com.amazonaws.internal.ListWithAutoConstructFlag<CaptionFormat> captionFormats;
/**
* A policy that determines how Elastic Transcoder handles the existence
* of multiple captions. <ul> <li><p><b>MergeOverride:</b> Elastic
* Transcoder transcodes both embedded and sidecar captions into outputs.
* If captions for a language are embedded in the input file and also
* appear in a sidecar file, Elastic Transcoder uses the sidecar captions
* and ignores the embedded captions for that language.</li>
* <li><p><b>MergeRetain:</b> Elastic Transcoder transcodes both embedded
* and sidecar captions into outputs. If captions for a language are
* embedded in the input file and also appear in a sidecar file, Elastic
* Transcoder uses the embedded captions and ignores the sidecar captions
* for that language. If <code>CaptionSources</code> is empty, Elastic
* Transcoder omits all sidecar captions from the output files.</li>
* <li><p><b>Override:</b> Elastic Transcoder transcodes only the sidecar
* captions that you specify in <code>CaptionSources</code>.</li> </ul>
* <p><code>MergePolicy</code> cannot be null.
* <p>
* <b>Constraints:</b><br/>
* <b>Pattern: </b>(^MergeOverride$)|(^MergeRetain$)|(^Override$)<br/>
*
* @return A policy that determines how Elastic Transcoder handles the existence
* of multiple captions. <ul> <li><p><b>MergeOverride:</b> Elastic
* Transcoder transcodes both embedded and sidecar captions into outputs.
* If captions for a language are embedded in the input file and also
* appear in a sidecar file, Elastic Transcoder uses the sidecar captions
* and ignores the embedded captions for that language.</li>
* <li><p><b>MergeRetain:</b> Elastic Transcoder transcodes both embedded
* and sidecar captions into outputs. If captions for a language are
* embedded in the input file and also appear in a sidecar file, Elastic
* Transcoder uses the embedded captions and ignores the sidecar captions
* for that language. If <code>CaptionSources</code> is empty, Elastic
* Transcoder omits all sidecar captions from the output files.</li>
* <li><p><b>Override:</b> Elastic Transcoder transcodes only the sidecar
* captions that you specify in <code>CaptionSources</code>.</li> </ul>
* <p><code>MergePolicy</code> cannot be null.
*/
public String getMergePolicy() {
return mergePolicy;
}
/**
* A policy that determines how Elastic Transcoder handles the existence
* of multiple captions. <ul> <li><p><b>MergeOverride:</b> Elastic
* Transcoder transcodes both embedded and sidecar captions into outputs.
* If captions for a language are embedded in the input file and also
* appear in a sidecar file, Elastic Transcoder uses the sidecar captions
* and ignores the embedded captions for that language.</li>
* <li><p><b>MergeRetain:</b> Elastic Transcoder transcodes both embedded
* and sidecar captions into outputs. If captions for a language are
* embedded in the input file and also appear in a sidecar file, Elastic
* Transcoder uses the embedded captions and ignores the sidecar captions
* for that language. If <code>CaptionSources</code> is empty, Elastic
* Transcoder omits all sidecar captions from the output files.</li>
* <li><p><b>Override:</b> Elastic Transcoder transcodes only the sidecar
* captions that you specify in <code>CaptionSources</code>.</li> </ul>
* <p><code>MergePolicy</code> cannot be null.
* <p>
* <b>Constraints:</b><br/>
* <b>Pattern: </b>(^MergeOverride$)|(^MergeRetain$)|(^Override$)<br/>
*
* @param mergePolicy A policy that determines how Elastic Transcoder handles the existence
* of multiple captions. <ul> <li><p><b>MergeOverride:</b> Elastic
* Transcoder transcodes both embedded and sidecar captions into outputs.
* If captions for a language are embedded in the input file and also
* appear in a sidecar file, Elastic Transcoder uses the sidecar captions
* and ignores the embedded captions for that language.</li>
* <li><p><b>MergeRetain:</b> Elastic Transcoder transcodes both embedded
* and sidecar captions into outputs. If captions for a language are
* embedded in the input file and also appear in a sidecar file, Elastic
* Transcoder uses the embedded captions and ignores the sidecar captions
* for that language. If <code>CaptionSources</code> is empty, Elastic
* Transcoder omits all sidecar captions from the output files.</li>
* <li><p><b>Override:</b> Elastic Transcoder transcodes only the sidecar
* captions that you specify in <code>CaptionSources</code>.</li> </ul>
* <p><code>MergePolicy</code> cannot be null.
*/
public void setMergePolicy(String mergePolicy) {
this.mergePolicy = mergePolicy;
}
/**
* A policy that determines how Elastic Transcoder handles the existence
* of multiple captions. <ul> <li><p><b>MergeOverride:</b> Elastic
* Transcoder transcodes both embedded and sidecar captions into outputs.
* If captions for a language are embedded in the input file and also
* appear in a sidecar file, Elastic Transcoder uses the sidecar captions
* and ignores the embedded captions for that language.</li>
* <li><p><b>MergeRetain:</b> Elastic Transcoder transcodes both embedded
* and sidecar captions into outputs. If captions for a language are
* embedded in the input file and also appear in a sidecar file, Elastic
* Transcoder uses the embedded captions and ignores the sidecar captions
* for that language. If <code>CaptionSources</code> is empty, Elastic
* Transcoder omits all sidecar captions from the output files.</li>
* <li><p><b>Override:</b> Elastic Transcoder transcodes only the sidecar
* captions that you specify in <code>CaptionSources</code>.</li> </ul>
* <p><code>MergePolicy</code> cannot be null.
* <p>
* Returns a reference to this object so that method calls can be chained together.
* <p>
* <b>Constraints:</b><br/>
* <b>Pattern: </b>(^MergeOverride$)|(^MergeRetain$)|(^Override$)<br/>
*
* @param mergePolicy A policy that determines how Elastic Transcoder handles the existence
* of multiple captions. <ul> <li><p><b>MergeOverride:</b> Elastic
* Transcoder transcodes both embedded and sidecar captions into outputs.
* If captions for a language are embedded in the input file and also
* appear in a sidecar file, Elastic Transcoder uses the sidecar captions
* and ignores the embedded captions for that language.</li>
* <li><p><b>MergeRetain:</b> Elastic Transcoder transcodes both embedded
* and sidecar captions into outputs. If captions for a language are
* embedded in the input file and also appear in a sidecar file, Elastic
* Transcoder uses the embedded captions and ignores the sidecar captions
* for that language. If <code>CaptionSources</code> is empty, Elastic
* Transcoder omits all sidecar captions from the output files.</li>
* <li><p><b>Override:</b> Elastic Transcoder transcodes only the sidecar
* captions that you specify in <code>CaptionSources</code>.</li> </ul>
* <p><code>MergePolicy</code> cannot be null.
*
* @return A reference to this updated object so that method calls can be chained
* together.
*/
public Captions withMergePolicy(String mergePolicy) {
this.mergePolicy = mergePolicy;
return this;
}
/**
* Source files for the input sidecar captions used during the
* transcoding process. To omit all sidecar captions, leave
* <code>CaptionSources</code> blank.
* <p>
* <b>Constraints:</b><br/>
* <b>Length: </b>0 - 20<br/>
*
* @return Source files for the input sidecar captions used during the
* transcoding process. To omit all sidecar captions, leave
* <code>CaptionSources</code> blank.
*/
public java.util.List<CaptionSource> getCaptionSources() {
if (captionSources == null) {
captionSources = new com.amazonaws.internal.ListWithAutoConstructFlag<CaptionSource>();
captionSources.setAutoConstruct(true);
}
return captionSources;
}
/**
* Source files for the input sidecar captions used during the
* transcoding process. To omit all sidecar captions, leave
* <code>CaptionSources</code> blank.
* <p>
* <b>Constraints:</b><br/>
* <b>Length: </b>0 - 20<br/>
*
* @param captionSources Source files for the input sidecar captions used during the
* transcoding process. To omit all sidecar captions, leave
* <code>CaptionSources</code> blank.
*/
public void setCaptionSources(java.util.Collection<CaptionSource> captionSources) {
if (captionSources == null) {
this.captionSources = null;
return;
}
com.amazonaws.internal.ListWithAutoConstructFlag<CaptionSource> captionSourcesCopy = new com.amazonaws.internal.ListWithAutoConstructFlag<CaptionSource>(captionSources.size());
captionSourcesCopy.addAll(captionSources);
this.captionSources = captionSourcesCopy;
}
/**
* Source files for the input sidecar captions used during the
* transcoding process. To omit all sidecar captions, leave
* <code>CaptionSources</code> blank.
* <p>
* <b>NOTE:</b> This method appends the values to the existing list (if
* any). Use {@link #setCaptionSources(java.util.Collection)} or {@link
* #withCaptionSources(java.util.Collection)} if you want to override the
* existing values.
* <p>
* Returns a reference to this object so that method calls can be chained together.
* <p>
* <b>Constraints:</b><br/>
* <b>Length: </b>0 - 20<br/>
*
* @param captionSources Source files for the input sidecar captions used during the
* transcoding process. To omit all sidecar captions, leave
* <code>CaptionSources</code> blank.
*
* @return A reference to this updated object so that method calls can be chained
* together.
*/
public Captions withCaptionSources(CaptionSource... captionSources) {
if (getCaptionSources() == null) setCaptionSources(new java.util.ArrayList<CaptionSource>(captionSources.length));
for (CaptionSource value : captionSources) {
getCaptionSources().add(value);
}
return this;
}
/**
* Source files for the input sidecar captions used during the
* transcoding process. To omit all sidecar captions, leave
* <code>CaptionSources</code> blank.
* <p>
* Returns a reference to this object so that method calls can be chained together.
* <p>
* <b>Constraints:</b><br/>
* <b>Length: </b>0 - 20<br/>
*
* @param captionSources Source files for the input sidecar captions used during the
* transcoding process. To omit all sidecar captions, leave
* <code>CaptionSources</code> blank.
*
* @return A reference to this updated object so that method calls can be chained
* together.
*/
public Captions withCaptionSources(java.util.Collection<CaptionSource> captionSources) {
if (captionSources == null) {
this.captionSources = null;
} else {
com.amazonaws.internal.ListWithAutoConstructFlag<CaptionSource> captionSourcesCopy = new com.amazonaws.internal.ListWithAutoConstructFlag<CaptionSource>(captionSources.size());
captionSourcesCopy.addAll(captionSources);
this.captionSources = captionSourcesCopy;
}
return this;
}
/**
* The array of file formats for the output captions. If you leave this
* value blank, Elastic Transcoder returns an error.
* <p>
* <b>Constraints:</b><br/>
* <b>Length: </b>0 - 4<br/>
*
* @return The array of file formats for the output captions. If you leave this
* value blank, Elastic Transcoder returns an error.
*/
public java.util.List<CaptionFormat> getCaptionFormats() {
if (captionFormats == null) {
captionFormats = new com.amazonaws.internal.ListWithAutoConstructFlag<CaptionFormat>();
captionFormats.setAutoConstruct(true);
}
return captionFormats;
}
/**
* The array of file formats for the output captions. If you leave this
* value blank, Elastic Transcoder returns an error.
* <p>
* <b>Constraints:</b><br/>
* <b>Length: </b>0 - 4<br/>
*
* @param captionFormats The array of file formats for the output captions. If you leave this
* value blank, Elastic Transcoder returns an error.
*/
public void setCaptionFormats(java.util.Collection<CaptionFormat> captionFormats) {
if (captionFormats == null) {
this.captionFormats = null;
return;
}
com.amazonaws.internal.ListWithAutoConstructFlag<CaptionFormat> captionFormatsCopy = new com.amazonaws.internal.ListWithAutoConstructFlag<CaptionFormat>(captionFormats.size());
captionFormatsCopy.addAll(captionFormats);
this.captionFormats = captionFormatsCopy;
}
/**
* The array of file formats for the output captions. If you leave this
* value blank, Elastic Transcoder returns an error.
* <p>
* <b>NOTE:</b> This method appends the values to the existing list (if
* any). Use {@link #setCaptionFormats(java.util.Collection)} or {@link
* #withCaptionFormats(java.util.Collection)} if you want to override the
* existing values.
* <p>
* Returns a reference to this object so that method calls can be chained together.
* <p>
* <b>Constraints:</b><br/>
* <b>Length: </b>0 - 4<br/>
*
* @param captionFormats The array of file formats for the output captions. If you leave this
* value blank, Elastic Transcoder returns an error.
*
* @return A reference to this updated object so that method calls can be chained
* together.
*/
public Captions withCaptionFormats(CaptionFormat... captionFormats) {
if (getCaptionFormats() == null) setCaptionFormats(new java.util.ArrayList<CaptionFormat>(captionFormats.length));
for (CaptionFormat value : captionFormats) {
getCaptionFormats().add(value);
}
return this;
}
/**
* The array of file formats for the output captions. If you leave this
* value blank, Elastic Transcoder returns an error.
* <p>
* Returns a reference to this object so that method calls can be chained together.
* <p>
* <b>Constraints:</b><br/>
* <b>Length: </b>0 - 4<br/>
*
* @param captionFormats The array of file formats for the output captions. If you leave this
* value blank, Elastic Transcoder returns an error.
*
* @return A reference to this updated object so that method calls can be chained
* together.
*/
public Captions withCaptionFormats(java.util.Collection<CaptionFormat> captionFormats) {
if (captionFormats == null) {
this.captionFormats = null;
} else {
com.amazonaws.internal.ListWithAutoConstructFlag<CaptionFormat> captionFormatsCopy = new com.amazonaws.internal.ListWithAutoConstructFlag<CaptionFormat>(captionFormats.size());
captionFormatsCopy.addAll(captionFormats);
this.captionFormats = captionFormatsCopy;
}
return this;
}
/**
* Returns a string representation of this object; useful for testing and
* debugging.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getMergePolicy() != null) sb.append("MergePolicy: " + getMergePolicy() + ",");
if (getCaptionSources() != null) sb.append("CaptionSources: " + getCaptionSources() + ",");
if (getCaptionFormats() != null) sb.append("CaptionFormats: " + getCaptionFormats() );
sb.append("}");
return sb.toString();
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getMergePolicy() == null) ? 0 : getMergePolicy().hashCode());
hashCode = prime * hashCode + ((getCaptionSources() == null) ? 0 : getCaptionSources().hashCode());
hashCode = prime * hashCode + ((getCaptionFormats() == null) ? 0 : getCaptionFormats().hashCode());
return hashCode;
}
@Override
public boolean equals(Object obj) {
if (this == obj) return true;
if (obj == null) return false;
if (obj instanceof Captions == false) return false;
Captions other = (Captions)obj;
if (other.getMergePolicy() == null ^ this.getMergePolicy() == null) return false;
if (other.getMergePolicy() != null && other.getMergePolicy().equals(this.getMergePolicy()) == false) return false;
if (other.getCaptionSources() == null ^ this.getCaptionSources() == null) return false;
if (other.getCaptionSources() != null && other.getCaptionSources().equals(this.getCaptionSources()) == false) return false;
if (other.getCaptionFormats() == null ^ this.getCaptionFormats() == null) return false;
if (other.getCaptionFormats() != null && other.getCaptionFormats().equals(this.getCaptionFormats()) == false) return false;
return true;
}
@Override
public Captions clone() {
try {
return (Captions) super.clone();
} catch (CloneNotSupportedException e) {
throw new IllegalStateException(
"Got a CloneNotSupportedException from Object.clone() "
+ "even though we're Cloneable!",
e);
}
}
}
| |
package com.intellij.openapi.vcs.changes.committed;
import com.intellij.ide.CopyProvider;
import com.intellij.ide.DefaultTreeExpander;
import com.intellij.ide.TreeExpander;
import com.intellij.ide.actions.ContextHelpAction;
import com.intellij.ide.ui.SplitterProportionsDataImpl;
import com.intellij.ide.util.treeView.TreeState;
import com.intellij.openapi.Disposable;
import com.intellij.openapi.actionSystem.*;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.application.ModalityState;
import com.intellij.openapi.keymap.Keymap;
import com.intellij.openapi.keymap.KeymapManager;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.ui.Splitter;
import com.intellij.openapi.ui.SplitterProportionsData;
import com.intellij.openapi.util.Comparing;
import com.intellij.openapi.util.Disposer;
import com.intellij.openapi.vcs.VcsDataKeys;
import com.intellij.openapi.vcs.changes.Change;
import com.intellij.openapi.vcs.changes.ChangesUtil;
import com.intellij.openapi.vcs.changes.ContentRevision;
import com.intellij.openapi.vcs.changes.issueLinks.TreeLinkMouseListener;
import com.intellij.openapi.vcs.versionBrowser.CommittedChangeList;
import com.intellij.pom.Navigatable;
import com.intellij.ui.*;
import com.intellij.ui.treeStructure.Tree;
import com.intellij.ui.treeStructure.actions.CollapseAllAction;
import com.intellij.ui.treeStructure.actions.ExpandAllAction;
import com.intellij.util.messages.MessageBusConnection;
import com.intellij.util.messages.Topic;
import com.intellij.util.ui.StatusText;
import com.intellij.util.ui.tree.TreeUtil;
import org.jetbrains.annotations.NonNls;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import javax.swing.*;
import javax.swing.border.Border;
import javax.swing.event.ChangeEvent;
import javax.swing.event.ChangeListener;
import javax.swing.event.TreeSelectionEvent;
import javax.swing.event.TreeSelectionListener;
import javax.swing.tree.DefaultMutableTreeNode;
import javax.swing.tree.DefaultTreeModel;
import javax.swing.tree.TreeModel;
import javax.swing.tree.TreePath;
import java.awt.*;
import java.util.*;
import java.util.List;
/**
* @author yole
*/
public class CommittedChangesTreeBrowser extends JPanel implements TypeSafeDataProvider, Disposable, DecoratorManager {
private static final Border RIGHT_BORDER = IdeBorderFactory.createBorder(SideBorder.TOP | SideBorder.LEFT);
private final Project myProject;
private final Tree myChangesTree;
private final RepositoryChangesBrowser myDetailsView;
private List<CommittedChangeList> myChangeLists;
private List<CommittedChangeList> mySelectedChangeLists;
private ChangeListGroupingStrategy myGroupingStrategy = new DateChangeListGroupingStrategy();
private final CompositeChangeListFilteringStrategy myFilteringStrategy = new CompositeChangeListFilteringStrategy();
private final JPanel myLeftPanel;
private final FilterChangeListener myFilterChangeListener = new FilterChangeListener();
private final SplitterProportionsData mySplitterProportionsData = new SplitterProportionsDataImpl();
private final CopyProvider myCopyProvider;
private final TreeExpander myTreeExpander;
private String myHelpId;
public static final Topic<CommittedChangesReloadListener> ITEMS_RELOADED = new Topic<CommittedChangesReloadListener>("ITEMS_RELOADED", CommittedChangesReloadListener.class);
private final List<CommittedChangeListDecorator> myDecorators;
@NonNls public static final String ourHelpId = "reference.changesToolWindow.incoming";
private WiseSplitter myInnerSplitter;
private final MessageBusConnection myConnection;
private TreeState myState;
public CommittedChangesTreeBrowser(final Project project, final List<CommittedChangeList> changeLists) {
super(new BorderLayout());
myProject = project;
myDecorators = new LinkedList<CommittedChangeListDecorator>();
myChangeLists = changeLists;
myChangesTree = new ChangesBrowserTree();
myChangesTree.setRootVisible(false);
myChangesTree.setShowsRootHandles(true);
myChangesTree.setCellRenderer(new CommittedChangeListRenderer(project, myDecorators));
TreeUtil.expandAll(myChangesTree);
myChangesTree.getExpandableItemsHandler().setEnabled(false);
myDetailsView = new RepositoryChangesBrowser(project, Collections.<CommittedChangeList>emptyList());
myDetailsView.getViewer().setScrollPaneBorder(RIGHT_BORDER);
myChangesTree.getSelectionModel().addTreeSelectionListener(new TreeSelectionListener() {
public void valueChanged(TreeSelectionEvent e) {
updateBySelectionChange();
}
});
final TreeLinkMouseListener linkMouseListener = new TreeLinkMouseListener(new CommittedChangeListRenderer(project, myDecorators));
linkMouseListener.installOn(myChangesTree);
myLeftPanel = new JPanel(new BorderLayout());
initSplitters();
updateBySelectionChange();
Keymap keymap = KeymapManager.getInstance().getActiveKeymap();
CustomShortcutSet quickdocShortcuts = new CustomShortcutSet(keymap.getShortcuts(IdeActions.ACTION_QUICK_JAVADOC));
EmptyAction.registerWithShortcutSet("CommittedChanges.Details", quickdocShortcuts, this);
myCopyProvider = new TreeCopyProvider(myChangesTree);
myTreeExpander = new DefaultTreeExpander(myChangesTree);
myDetailsView.addToolbarAction(ActionManager.getInstance().getAction("Vcs.ShowTabbedFileHistory"));
myHelpId = ourHelpId;
myDetailsView.getDiffAction().registerCustomShortcutSet(myDetailsView.getDiffAction().getShortcutSet(), myChangesTree);
myConnection = myProject.getMessageBus().connect();
myConnection.subscribe(ITEMS_RELOADED, new CommittedChangesReloadListener() {
public void itemsReloaded() {
}
public void emptyRefresh() {
updateGrouping();
}
});
}
private void initSplitters() {
final Splitter filterSplitter = new Splitter(false, 0.5f);
filterSplitter.setSecondComponent(ScrollPaneFactory.createScrollPane(myChangesTree));
myLeftPanel.add(filterSplitter, BorderLayout.CENTER);
final Splitter mainSplitter = new Splitter(false, 0.7f);
mainSplitter.setFirstComponent(myLeftPanel);
mainSplitter.setSecondComponent(myDetailsView);
add(mainSplitter, BorderLayout.CENTER);
myInnerSplitter = new WiseSplitter(new Runnable() {
public void run() {
filterSplitter.doLayout();
updateModel();
}
}, filterSplitter);
Disposer.register(this, myInnerSplitter);
mySplitterProportionsData.externalizeFromDimensionService("CommittedChanges.SplitterProportions");
mySplitterProportionsData.restoreSplitterProportions(this);
}
public void addFilter(final ChangeListFilteringStrategy strategy) {
myFilteringStrategy.addStrategy(strategy.getKey(), strategy);
strategy.addChangeListener(myFilterChangeListener);
}
private void updateGrouping() {
if (myGroupingStrategy.changedSinceApply()) {
ApplicationManager.getApplication().invokeLater(new Runnable() {
public void run() {
updateModel();
}
}, ModalityState.NON_MODAL);
}
}
private TreeModel buildTreeModel(final List<CommittedChangeList> filteredChangeLists) {
DefaultMutableTreeNode root = new DefaultMutableTreeNode();
DefaultTreeModel model = new DefaultTreeModel(root);
Collections.sort(filteredChangeLists, myGroupingStrategy.getComparator());
myGroupingStrategy.beforeStart();
DefaultMutableTreeNode lastGroupNode = null;
String lastGroupName = null;
for(CommittedChangeList list: filteredChangeLists) {
String groupName = myGroupingStrategy.getGroupName(list);
if (!Comparing.equal(groupName, lastGroupName)) {
lastGroupName = groupName;
lastGroupNode = new DefaultMutableTreeNode(lastGroupName);
root.add(lastGroupNode);
}
assert lastGroupNode != null;
lastGroupNode.add(new DefaultMutableTreeNode(list));
}
return model;
}
public void setHelpId(final String helpId) {
myHelpId = helpId;
}
public StatusText getEmptyText() {
return myChangesTree.getEmptyText();
}
public void setToolBar(JComponent toolBar) {
myLeftPanel.add(toolBar, BorderLayout.NORTH);
Dimension prefSize = myDetailsView.getHeaderPanel().getPreferredSize();
if (prefSize.height < toolBar.getPreferredSize().height) {
prefSize.height = toolBar.getPreferredSize().height;
myDetailsView.getHeaderPanel().setPreferredSize(prefSize);
}
}
public void dispose() {
myConnection.disconnect();
mySplitterProportionsData.saveSplitterProportions(this);
mySplitterProportionsData.externalizeToDimensionService("CommittedChanges.SplitterProportions");
myDetailsView.dispose();
}
public void setItems(@NotNull List<CommittedChangeList> items, final CommittedChangesBrowserUseCase useCase) {
myDetailsView.setUseCase(useCase);
myChangeLists = items;
myFilteringStrategy.setFilterBase(items);
myProject.getMessageBus().syncPublisher(ITEMS_RELOADED).itemsReloaded();
updateModel();
}
private void updateModel() {
final List<CommittedChangeList> filteredChangeLists = myFilteringStrategy.filterChangeLists(myChangeLists);
final TreePath[] paths = myChangesTree.getSelectionPaths();
myChangesTree.setModel(buildTreeModel(filteredChangeLists));
TreeUtil.expandAll(myChangesTree);
myChangesTree.setSelectionPaths(paths);
}
public void setGroupingStrategy(ChangeListGroupingStrategy strategy) {
myGroupingStrategy = strategy;
updateModel();
}
private void updateBySelectionChange() {
List<CommittedChangeList> selection = new ArrayList<CommittedChangeList>();
final TreePath[] selectionPaths = myChangesTree.getSelectionPaths();
if (selectionPaths != null) {
for(TreePath path: selectionPaths) {
DefaultMutableTreeNode node = (DefaultMutableTreeNode) path.getLastPathComponent();
if (node.getUserObject() instanceof CommittedChangeList) {
selection.add((CommittedChangeList) node.getUserObject());
}
}
}
if (!selection.equals(mySelectedChangeLists)) {
mySelectedChangeLists = selection;
myDetailsView.setChangesToDisplay(collectChanges(mySelectedChangeLists, false));
}
}
@NotNull
public static List<Change> collectChanges(final List<? extends CommittedChangeList> selectedChangeLists, final boolean withMovedTrees) {
List<Change> result = new ArrayList<Change>();
Collections.sort(selectedChangeLists, new Comparator<CommittedChangeList>() {
public int compare(final CommittedChangeList o1, final CommittedChangeList o2) {
return o1.getCommitDate().compareTo(o2.getCommitDate());
}
});
for(CommittedChangeList cl: selectedChangeLists) {
final Collection<Change> changes = withMovedTrees ? cl.getChangesWithMovedTrees() : cl.getChanges();
for(Change c: changes) {
addOrReplaceChange(result, c);
}
}
return result;
}
/**
* Zips changes by removing duplicates (changes in the same file) and compounding the diff.
* <b>NB:</b> changes must be given in the time-ascending order, i.e the first change in the list should be the oldest one.
*/
@NotNull
public static List<Change> zipChanges(@NotNull List<Change> changes) {
final List<Change> result = new ArrayList<Change>();
for (Change change : changes) {
addOrReplaceChange(result, change);
}
return result;
}
private static void addOrReplaceChange(final List<Change> changes, final Change c) {
final ContentRevision beforeRev = c.getBeforeRevision();
// todo!!! further improvements needed
if (beforeRev != null) {
final String beforeName = beforeRev.getFile().getName();
final String beforeAbsolutePath = beforeRev.getFile().getIOFile().getAbsolutePath();
for(Change oldChange: changes) {
ContentRevision rev = oldChange.getAfterRevision();
// first compare name, which is many times faster - to remove 99% not matching
if (rev != null && (rev.getFile().getName().equals(beforeName)) && rev.getFile().getIOFile().getAbsolutePath().equals(beforeAbsolutePath)) {
changes.remove(oldChange);
if (oldChange.getBeforeRevision() != null || c.getAfterRevision() != null) {
changes.add(new Change(oldChange.getBeforeRevision(), c.getAfterRevision()));
}
return;
}
}
}
changes.add(c);
}
private List<CommittedChangeList> getSelectedChangeLists() {
return TreeUtil.collectSelectedObjectsOfType(myChangesTree, CommittedChangeList.class);
}
public void setTableContextMenu(final ActionGroup group, final List<AnAction> auxiliaryActions) {
DefaultActionGroup menuGroup = new DefaultActionGroup();
menuGroup.add(group);
for (AnAction action : auxiliaryActions) {
menuGroup.add(action);
}
menuGroup.add(ActionManager.getInstance().getAction(IdeActions.ACTION_COPY));
PopupHandler.installPopupHandler(myChangesTree, menuGroup, ActionPlaces.UNKNOWN, ActionManager.getInstance());
}
public void removeFilteringStrategy(final CommittedChangesFilterKey key) {
final ChangeListFilteringStrategy strategy = myFilteringStrategy.removeStrategy(key);
if (strategy != null) {
strategy.removeChangeListener(myFilterChangeListener);
}
myInnerSplitter.remove(key);
}
public boolean setFilteringStrategy(final ChangeListFilteringStrategy filteringStrategy) {
if (myInnerSplitter.canAdd()) {
filteringStrategy.addChangeListener(myFilterChangeListener);
final CommittedChangesFilterKey key = filteringStrategy.getKey();
myFilteringStrategy.addStrategy(key, filteringStrategy);
myFilteringStrategy.setFilterBase(myChangeLists);
final JComponent filterUI = filteringStrategy.getFilterUI();
if (filterUI != null) {
myInnerSplitter.add(key, filterUI);
}
return true;
}
return false;
}
public ActionToolbar createGroupFilterToolbar(final Project project, final ActionGroup leadGroup, @Nullable final ActionGroup tailGroup,
final List<AnAction> extra) {
DefaultActionGroup toolbarGroup = new DefaultActionGroup();
toolbarGroup.add(leadGroup);
toolbarGroup.addSeparator();
toolbarGroup.add(new SelectFilteringAction(project, this));
toolbarGroup.add(new SelectGroupingAction(project, this));
final ExpandAllAction expandAllAction = new ExpandAllAction(myChangesTree);
final CollapseAllAction collapseAllAction = new CollapseAllAction(myChangesTree);
expandAllAction.registerCustomShortcutSet(
new CustomShortcutSet(KeymapManager.getInstance().getActiveKeymap().getShortcuts(IdeActions.ACTION_EXPAND_ALL)),
myChangesTree);
collapseAllAction.registerCustomShortcutSet(
new CustomShortcutSet(KeymapManager.getInstance().getActiveKeymap().getShortcuts(IdeActions.ACTION_COLLAPSE_ALL)),
myChangesTree);
toolbarGroup.add(expandAllAction);
toolbarGroup.add(collapseAllAction);
toolbarGroup.add(ActionManager.getInstance().getAction(IdeActions.ACTION_COPY));
toolbarGroup.add(new ContextHelpAction(myHelpId));
if (tailGroup != null) {
toolbarGroup.add(tailGroup);
}
for (AnAction anAction : extra) {
toolbarGroup.add(anAction);
}
return ActionManager.getInstance().createActionToolbar(ActionPlaces.UNKNOWN, toolbarGroup, true);
}
public void calcData(DataKey key, DataSink sink) {
if (key.equals(VcsDataKeys.CHANGES)) {
final Collection<Change> changes = collectChanges(getSelectedChangeLists(), false);
sink.put(VcsDataKeys.CHANGES, changes.toArray(new Change[changes.size()]));
} else if (key.equals(VcsDataKeys.HAVE_SELECTED_CHANGES)) {
final int count = myChangesTree.getSelectionCount();
sink.put(VcsDataKeys.HAVE_SELECTED_CHANGES, count > 0 ? Boolean.TRUE : Boolean.FALSE);
}
else if (key.equals(VcsDataKeys.CHANGES_WITH_MOVED_CHILDREN)) {
final Collection<Change> changes = collectChanges(getSelectedChangeLists(), true);
sink.put(VcsDataKeys.CHANGES_WITH_MOVED_CHILDREN, changes.toArray(new Change[changes.size()]));
}
else if (key.equals(VcsDataKeys.CHANGE_LISTS)) {
final List<CommittedChangeList> lists = getSelectedChangeLists();
if (!lists.isEmpty()) {
sink.put(VcsDataKeys.CHANGE_LISTS, lists.toArray(new CommittedChangeList[lists.size()]));
}
}
else if (key.equals(CommonDataKeys.NAVIGATABLE_ARRAY)) {
final Collection<Change> changes = collectChanges(getSelectedChangeLists(), false);
Navigatable[] result = ChangesUtil.getNavigatableArray(myProject, ChangesUtil.getFilesFromChanges(changes));
sink.put(CommonDataKeys.NAVIGATABLE_ARRAY, result);
}
else if (key.equals(PlatformDataKeys.HELP_ID)) {
sink.put(PlatformDataKeys.HELP_ID, myHelpId);
} else if (VcsDataKeys.SELECTED_CHANGES_IN_DETAILS.equals(key)) {
final List<Change> selectedChanges = myDetailsView.getSelectedChanges();
sink.put(VcsDataKeys.SELECTED_CHANGES_IN_DETAILS, selectedChanges.toArray(new Change[selectedChanges.size()]));
}
}
public TreeExpander getTreeExpander() {
return myTreeExpander;
}
public void repaintTree() {
myChangesTree.revalidate();
myChangesTree.repaint();
}
public void install(final CommittedChangeListDecorator decorator) {
myDecorators.add(decorator);
repaintTree();
}
public void remove(final CommittedChangeListDecorator decorator) {
myDecorators.remove(decorator);
repaintTree();
}
public void reportLoadedLists(final CommittedChangeListsListener listener) {
ApplicationManager.getApplication().executeOnPooledThread(new Runnable() {
public void run() {
listener.onBeforeStartReport();
for (CommittedChangeList list : myChangeLists) {
listener.report(list);
}
listener.onAfterEndReport();
}
});
}
// for appendable view
public void reset() {
myChangeLists.clear();
myFilteringStrategy.resetFilterBase();
myState = TreeState.createOn(myChangesTree, (DefaultMutableTreeNode)myChangesTree.getModel().getRoot());
updateModel();
}
public void append(final List<CommittedChangeList> list) {
final TreeState state = myChangeLists.isEmpty() && myState != null ? myState :
TreeState.createOn(myChangesTree, (DefaultMutableTreeNode)myChangesTree.getModel().getRoot());
state.setScrollToSelection(false);
myChangeLists.addAll(list);
myFilteringStrategy.appendFilterBase(list);
myChangesTree.setModel(buildTreeModel(myFilteringStrategy.filterChangeLists(myChangeLists)));
state.applyTo(myChangesTree, (DefaultMutableTreeNode)myChangesTree.getModel().getRoot());
TreeUtil.expandAll(myChangesTree);
myProject.getMessageBus().syncPublisher(ITEMS_RELOADED).itemsReloaded();
}
public static class MoreLauncher implements Runnable {
private final Project myProject;
private final CommittedChangeList myList;
MoreLauncher(final Project project, final CommittedChangeList list) {
myProject = project;
myList = list;
}
public void run() {
ChangeListDetailsAction.showDetailsPopup(myProject, myList);
}
}
private class FilterChangeListener implements ChangeListener {
public void stateChanged(ChangeEvent e) {
if (ApplicationManager.getApplication().isDispatchThread()) {
updateModel();
} else {
ApplicationManager.getApplication().invokeLater(new Runnable() {
public void run() {
updateModel();
}
});
}
}
}
private class ChangesBrowserTree extends Tree implements TypeSafeDataProvider {
public ChangesBrowserTree() {
super(buildTreeModel(myFilteringStrategy.filterChangeLists(myChangeLists)));
}
@Override
public boolean getScrollableTracksViewportWidth() {
return true;
}
public void calcData(final DataKey key, final DataSink sink) {
if (key.equals(PlatformDataKeys.COPY_PROVIDER)) {
sink.put(PlatformDataKeys.COPY_PROVIDER, myCopyProvider);
}
else if (key.equals(PlatformDataKeys.TREE_EXPANDER)) {
sink.put(PlatformDataKeys.TREE_EXPANDER, myTreeExpander);
} else {
final String name = key.getName();
if (VcsDataKeys.SELECTED_CHANGES.is(name) || VcsDataKeys.CHANGE_LEAD_SELECTION.is(name) ||
CommittedChangesBrowserUseCase.DATA_KEY.is(name)) {
final Object data = myDetailsView.getData(name);
if (data != null) {
sink.put(key, data);
}
}
}
}
}
public interface CommittedChangesReloadListener {
void itemsReloaded();
void emptyRefresh();
}
public void setLoading(final boolean value) {
new AbstractCalledLater(myProject, ModalityState.NON_MODAL) {
public void run() {
myChangesTree.setPaintBusy(value);
}
}.callMe();
}
}
| |
/* $Id: QueueTracker.java 988245 2010-08-23 18:39:35Z kwright $ */
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.manifoldcf.crawler.interfaces;
import org.apache.manifoldcf.core.interfaces.*;
import java.io.*;
import java.util.*;
import java.util.regex.*;
/** This class attempts to provide document priorities in order to acheive as much balance as possible between documents having different bins.
* A document's priority assignment takes place at the time the document is added to the queue, and will be recalculated when a job is aborted, or
* when the crawler daemon is started. The document priorities are strictly obeyed when documents are chosen from the queue and handed to
* worker threads; higher-priority documents always have precedence, except due to deliberate priority adjustment specified by the job priority.
*
* The priority values themselves are logarithmic: 0.0 is the highest, and the larger the number, the lower the priority.
*
* The basis for the calculation for each document priority handed out by this module are:
*
* - number of documents having a given bin (tracked)
* - performance of a connection (gathered through statistics)
* - throttling that applies to the each document bin
*
*
* The queuing prioritization model hooks into the document lifecycle in the following places:
* (1) When a document is added to the queue (and thus when its priority is handed out)
* (2) When documents that were *supposed* to be added to the queue turned out to already be there and already have an established priority,
* (in which case the priority that was handed out before is returned to the pool for reuse)
* (3) When a document is pulled from the database queue (which sets the current highest priority level that should not be exceeded in step (1))
*
* The assignment prioritization model is largely independent of the queuing prioritization model, and is used to select among documents that have
* been marked "active" as they are handed to worker threads. These events cause information to be logged:
* (1) When a document is handed to a worker thread
* (2) When the worker thread completes the document
*
*/
public class QueueTracker
{
public static final String _rcsid = "@(#)$Id: QueueTracker.java 988245 2010-08-23 18:39:35Z kwright $";
/** Factor by which bins are reduced */
protected final static double binReductionFactor = 1.0;
/** These are the accumulated performance averages for all connections etc. */
protected final PerformanceStatistics performanceStatistics = new PerformanceStatistics();
/** These are the bin counts for tracking the documents that are on
* the active queue, but are not being processed yet */
protected final Map<String,BinCount> queuedBinCounts = new HashMap<String,BinCount>();
/** These are the bin counts for active threads */
protected final Map<String,BinCount> activeBinCounts = new HashMap<String,BinCount>();
/** Constructor */
public QueueTracker()
{
}
/** Add an access record to the queue tracker. This happens when a document
* is added to the in-memory queue, and allows us to keep track of that particular event so
* we can schedule in a way that meets our distribution goals.
*@param binNames are the set of bins, as returned from the connector in question, for
* the document that is being queued. These bins are considered global in nature.
*/
public void addRecord(String[] binNames)
{
int i = 0;
while (i < binNames.length)
{
String binName = binNames[i++];
synchronized (queuedBinCounts)
{
BinCount value = queuedBinCounts.get(binName);
if (value == null)
{
value = new BinCount();
queuedBinCounts.put(binName,value);
}
value.increment();
}
}
}
/** Note the time required to successfully complete a set of documents. This allows this module to keep track of
* the performance characteristics of each individual connection, so distribution across connections can be balanced
* properly.
*/
public void noteConnectionPerformance(int docCount, String connectionName, long elapsedTime)
{
performanceStatistics.noteDocumentsCompleted(connectionName,docCount,elapsedTime);
}
/** Obtain the current performance statistics object */
public PerformanceStatistics getCurrentStatistics()
{
return performanceStatistics;
}
/** Note that we are beginning processing for a document with a particular set of bins.
* This method is called when a worker thread starts work on a set of documents.
*/
public void beginProcessing(String[] binNames)
{
// Effectively, we are moving the document from one status to another, so we adjust the bin counts of the source and
// the target both.
int i = 0;
while (i < binNames.length)
{
String binName = binNames[i++];
// Increment queued bin count for this bin.
synchronized (queuedBinCounts)
{
BinCount value = queuedBinCounts.get(binName);
if (value != null)
{
if (value.decrement())
queuedBinCounts.remove(binName);
}
}
// Decrement active bin count for this bin.
synchronized (activeBinCounts)
{
BinCount value = activeBinCounts.get(binName);
if (value == null)
{
value = new BinCount();
activeBinCounts.put(binName,value);
}
value.increment();
}
}
}
/** Note that we have completed processing of a document with a given set of bins.
* This method gets called when a Worker Thread has finished with a document.
*/
public void endProcessing(String[] binNames)
{
// Remove the document from the active queue, by decrementing the corresponding active bin counts.
int i = 0;
while (i < binNames.length)
{
String binName = binNames[i++];
synchronized (activeBinCounts)
{
BinCount value = activeBinCounts.get(binName);
if (value != null)
{
if (value.decrement())
activeBinCounts.remove(binName);
}
}
}
}
/** Calculate an assignment rating for a set of bins based on what's currently in use.
* This rating is used to help determine which documents returned from a queueing query actually get made "active",
* and which ones are skipped for the moment.
*
* The rating returned
* for each bin will be 1 divided by one plus the active thread count for that bin. The higher the
* rating, the better. The ratings are combined by multiplying the rating for each bin by that for
* every other bin, and then taking the nth root (where n is the number of bins) to normalize for
* the number of bins.
* The repository connection is used to reduce the priority of assignment, based on the fetch rate that will
* result from this set of bins.
*/
public double calculateAssignmentRating(String[] binNames, IRepositoryConnection connection)
{
// Work in log space
double ratingLog = 0.0;
int i = 0;
while (i < binNames.length)
{
String binName = binNames[i++];
int count = 0;
synchronized (activeBinCounts)
{
BinCount value = activeBinCounts.get(binName);
if (value != null)
count = value.getValue();
}
// rating *= (1.0 / (1.0 + (double)count))
ratingLog -= Math.log(1.0 + (double)count);
}
// Take the ith root of the bin rating, and leave it in log form
return ratingLog/(double)i;
}
/** This is the class which allows a mutable integer count value to be saved in the bincount table.
*/
protected static class BinCount
{
/** The count */
protected int count = 0;
/** Create */
public BinCount()
{
}
public BinCount duplicate()
{
BinCount rval = new BinCount();
rval.count = this.count;
return rval;
}
/** Increment the counter */
public void increment()
{
count++;
}
/** Decrement the counter, returning true if empty */
public boolean decrement()
{
count--;
return count == 0;
}
/** Set the counter value */
public void setValue(int count)
{
this.count = count;
}
/** Get the counter value */
public int getValue()
{
return count;
}
}
}
| |
package phenoscape.queries;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import java.io.BufferedWriter;
import java.io.File;
import java.io.FileWriter;
import java.io.IOException;
import java.io.StringWriter;
import java.io.Writer;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.text.DateFormat;
import java.util.Collection;
import java.util.Date;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Random;
import java.util.Set;
import junit.framework.Assert;
import org.apache.log4j.BasicConfigurator;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import phenoscape.queries.lib.CountTable;
import phenoscape.queries.lib.DistinctGeneAnnotationRecord;
import phenoscape.queries.lib.EntitySet;
import phenoscape.queries.lib.PermutedScoreSet;
import phenoscape.queries.lib.PhenotypeExpression;
import phenoscape.queries.lib.PhenotypeScoreTable;
import phenoscape.queries.lib.Profile;
import phenoscape.queries.lib.ProfileMap;
import phenoscape.queries.lib.ProfileScoreSet;
import phenoscape.queries.lib.TaxonPhenotypeLink;
import phenoscape.queries.lib.Utils;
import phenoscape.queries.lib.VariationTable;
public class TestPropTree1 extends PropTreeTest{
PhenotypeProfileAnalysis testAnalysis;
Utils u = new Utils();
StringWriter testWriter1;
StringWriter testWriter2;
StringWriter testWriter3;
StringWriter testWriter4;
Map<Integer,Integer> attMap;
TaxonomyTree t1;
int nodeIDofQuality;
Map<Integer,Integer> badTaxonQualities;
Map<Integer,Integer> badGeneQualities;
@Before
public void setUp() throws Exception {
BasicConfigurator.configure(); //prevent complaints by log4j
u.openKBFromConnections(UNITTESTKB);
testAnalysis = new PhenotypeProfileAnalysis(u);
attMap = u.setupAttributes();
nodeIDofQuality = u.getQualityNodeID();
testAnalysis.attributeMap = u.setupAttributes(); // this is icky
PhenotypeExpression.getEQTop(u); //just to initialize early.
testAnalysis.attributeSet.addAll(testAnalysis.attributeMap.values());
testAnalysis.attributeSet.add(nodeIDofQuality);
badTaxonQualities = new HashMap<Integer,Integer>();
badGeneQualities = new HashMap<Integer,Integer>();
String taxonomyRoot = UNITTESTROOT;
t1 = new TaxonomyTree(taxonomyRoot,u);
t1.traverseOntologyTree(u);
}
private static final String NODEQUERY = "SELECT n.node_id FROM node AS n WHERE n.uid = ?";
@Test
public void TestGetTaxonPhenotypeLinksFromKB() throws Exception{
int taxonid = -1;
ResultSet r;
Collection<TaxonPhenotypeLink> c;
PreparedStatement p = u.getPreparedStatement(NODEQUERY);
p.setString(1,TAXON1STR);
r = p.executeQuery();
if (r.next()){
taxonid = r.getInt(1);
}
else{
fail("Couldn't find node for " + TAXON1STR);
}
c = testAnalysis.getTaxonPhenotypeLinksFromKB(u, taxonid);
assertNotNull(c);
assertTrue(c.isEmpty());
p.setString(1,TAXON2STR);
r = p.executeQuery();
if (r.next()){
taxonid = r.getInt(1);
}
else{
fail("Couldn't find node for " + TAXON2STR);
}
c = testAnalysis.getTaxonPhenotypeLinksFromKB(u, taxonid);
assertNotNull(c);
assertTrue(c.isEmpty());
p.setString(1,TAXON3STR);
r = p.executeQuery();
if (r.next()){
taxonid = r.getInt(1);
}
else{
fail("Couldn't find node for " + TAXON3STR);
}
c = testAnalysis.getTaxonPhenotypeLinksFromKB(u, taxonid);
assertNotNull(c);
assertTrue(c.isEmpty());
p.setString(1,TAXON4STR);
r = p.executeQuery();
if (r.next()){
taxonid = r.getInt(1);
}
else{
fail("Couldn't find node for " + TAXON4STR);
}
c = testAnalysis.getTaxonPhenotypeLinksFromKB(u, taxonid);
assertNotNull(c);
assertFalse(c.isEmpty());
Assert.assertEquals(1,c.size());
p.setString(1,TAXON5STR);
r = p.executeQuery();
if (r.next()){
taxonid = r.getInt(1);
}
else{
fail("Couldn't find node for " + TAXON5STR);
}
c = testAnalysis.getTaxonPhenotypeLinksFromKB(u, taxonid);
assertNotNull(c);
assertFalse(c.isEmpty());
Assert.assertEquals(2,c.size());
p.setString(1,TAXON6STR);
r = p.executeQuery();
if (r.next()){
taxonid = r.getInt(1);
}
else{
fail("Couldn't find node for " + TAXON6STR);
}
c = testAnalysis.getTaxonPhenotypeLinksFromKB(u, taxonid);
assertNotNull(c);
assertFalse(c.isEmpty());
Assert.assertEquals(2,c.size());
p.setString(1,TAXON7STR);
r = p.executeQuery();
if (r.next()){
taxonid = r.getInt(1);
}
else{
fail("Couldn't find node for " + TAXON7STR);
}
c = testAnalysis.getTaxonPhenotypeLinksFromKB(u, taxonid);
assertNotNull(c);
assertTrue(c.isEmpty());
p.setString(1,TAXON8STR);
r = p.executeQuery();
if (r.next()){
taxonid = r.getInt(1);
}
else{
fail("Couldn't find node for " + TAXON8STR);
}
c = testAnalysis.getTaxonPhenotypeLinksFromKB(u, taxonid);
assertNotNull(c);
assertTrue(c.isEmpty());
p.setString(1,TAXON9STR);
r = p.executeQuery();
if (r.next()){
taxonid = r.getInt(1);
}
else{
fail("Couldn't find node for " + TAXON9STR);
}
c = testAnalysis.getTaxonPhenotypeLinksFromKB(u, taxonid);
assertNotNull(c);
assertTrue(c.isEmpty());
p.setString(1,TAXON10STR);
r = p.executeQuery();
if (r.next()){
taxonid = r.getInt(1);
}
else{
fail("Couldn't find node for " + TAXON10STR);
}
c = testAnalysis.getTaxonPhenotypeLinksFromKB(u, taxonid);
assertNotNull(c);
assertFalse(c.isEmpty());
Assert.assertEquals(1,c.size());
p.setString(1,TAXON11STR);
r = p.executeQuery();
if (r.next()){
taxonid = r.getInt(1);
}
else{
fail("Couldn't find node for " + TAXON11STR);
}
c = testAnalysis.getTaxonPhenotypeLinksFromKB(u, taxonid);
assertNotNull(c);
assertFalse(c.isEmpty());
Assert.assertEquals(1,c.size());
p.setString(1,TAXON12STR);
r = p.executeQuery();
if (r.next()){
taxonid = r.getInt(1);
}
else{
fail("Couldn't find node for " + TAXON12STR);
}
c = testAnalysis.getTaxonPhenotypeLinksFromKB(u, taxonid);
assertNotNull(c);
assertFalse(c.isEmpty());
Assert.assertEquals(1,c.size());
p.setString(1,TAXON13STR);
r = p.executeQuery();
if (r.next()){
taxonid = r.getInt(1);
}
else{
fail("Couldn't find node for " + TAXON13STR);
}
c = testAnalysis.getTaxonPhenotypeLinksFromKB(u, taxonid);
assertNotNull(c);
assertFalse(c.isEmpty());
Assert.assertEquals(1,c.size());
p.setString(1,TAXON14STR);
r = p.executeQuery();
if (r.next()){
taxonid = r.getInt(1);
}
else{
fail("Couldn't find node for " + TAXON14STR);
}
c = testAnalysis.getTaxonPhenotypeLinksFromKB(u, taxonid);
assertNotNull(c);
assertFalse(c.isEmpty());
Assert.assertEquals(1,c.size());
}
@Test
public void TestGetAllTaxonPhenotypeLinksFromKB() throws Exception{
Map<Integer,Set<TaxonPhenotypeLink>> links = testAnalysis.getAllTaxonPhenotypeLinksFromKB(t1, u);
assertNotNull(links);
Assert.assertEquals(15,links.size()); //this is just the number of taxa in the KB
for(Integer taxonID : t1.getAllTaxa()){
assertNotNull(links.get(taxonID));
}
}
@Test
public void testLoadTaxonProfiles() throws SQLException{
t1.traverseOntologyTree(u);
Map<Integer,Set<TaxonPhenotypeLink>> allLinks = testAnalysis.getAllTaxonPhenotypeLinksFromKB(t1,u);
ProfileMap taxonProfiles = testAnalysis.loadTaxonProfiles(allLinks,u, attMap, nodeIDofQuality, badTaxonQualities);
assertFalse(taxonProfiles.isEmpty());
Assert.assertEquals(15, taxonProfiles.domainSize()); //again, should be equal to the number of taxa
}
@Test
public void testTraverseTaxonomy() throws SQLException {
t1.traverseOntologyTree(u);
Map<Integer,Set<TaxonPhenotypeLink>> allLinks = testAnalysis.getAllTaxonPhenotypeLinksFromKB(t1,u);
ProfileMap taxonProfiles = testAnalysis.loadTaxonProfiles(allLinks,u, attMap, nodeIDofQuality, badTaxonQualities);
final VariationTable taxonVariation = new VariationTable(VariationTable.VariationType.TAXON);
testAnalysis.traverseTaxonomy(t1, t1.getRootNodeID(), taxonProfiles, taxonVariation, u);
Assert.assertEquals(1,taxonVariation.getUsedEntities().size()); //This should be {'opercle'}
Iterator<Integer> e_Itr = taxonVariation.getUsedEntities().iterator();
assertTrue(e_Itr.hasNext());
Integer entity = e_Itr.next();
Assert.assertEquals("opercle", u.getNodeName(entity.intValue()));
Assert.assertEquals(1,taxonVariation.getUsedAttributes().size()); //This should be {'shape'}
Iterator<Integer> a_Itr = taxonVariation.getUsedAttributes().iterator();
assertTrue(a_Itr.hasNext());
Integer att = a_Itr.next();
Assert.assertEquals("shape", u.getNodeName(att.intValue()));
assertFalse(taxonProfiles.isEmpty());
Assert.assertEquals(15,taxonProfiles.domainSize()); //The taxonVariation table 'knows' where the variation is, but profiles not updated yet
}
@Test
public void testFlushUnvaryingPhenotypes() throws SQLException {
t1.traverseOntologyTree(u);
Map<Integer,Set<TaxonPhenotypeLink>> allLinks = testAnalysis.getAllTaxonPhenotypeLinksFromKB(t1,u);
ProfileMap taxonProfiles = testAnalysis.loadTaxonProfiles(allLinks,u, attMap, nodeIDofQuality, badTaxonQualities);
final VariationTable taxonVariation = new VariationTable(VariationTable.VariationType.TAXON);
testAnalysis.traverseTaxonomy(t1, t1.getRootNodeID(), taxonProfiles, taxonVariation, u);
assertFalse(taxonProfiles.isEmpty());
Assert.assertEquals(15,taxonProfiles.domainSize()); //profiles before the flush includes all taxa
testAnalysis.flushUnvaryingPhenotypes(taxonProfiles,taxonVariation,u);
assertFalse(taxonProfiles.isEmpty());
Assert.assertEquals(3,taxonProfiles.domainSize()); //profiles has now been trimmed to only those taxa with variation
Set<String>taxonUIDs = new HashSet<String>();
for (Integer taxon : taxonProfiles.domainSet()){
taxonUIDs.add(u.getNodeUID(taxon));
Profile curProfile = taxonProfiles.getProfile(taxon);
curProfile.getUsedEntities();
Iterator<Integer> e_Itr = curProfile.getUsedEntities().iterator();
assertTrue(e_Itr.hasNext());
Integer ent = e_Itr.next();
Assert.assertEquals("opercle", u.getNodeName(ent.intValue()));
Iterator<Integer> a_Itr = curProfile.getUsedAttributes().iterator();
assertTrue(a_Itr.hasNext());
Integer att = a_Itr.next();
Assert.assertEquals("shape", u.getNodeName(att.intValue()));
}
}
@Test
public void testGetAllGeneAnnotationsFromKB() throws SQLException {
Collection<DistinctGeneAnnotationRecord> annotations = testAnalysis.getAllGeneAnnotationsFromKB(u);
Assert.assertEquals(24,annotations.size());
}
@Test
public void testProcessGeneExpression() throws SQLException {
initNames(u);
Assert.assertFalse("failed to lookup entity opercle",opercleID==-1);
Assert.assertFalse("failed to lookup entity pectoral fin",pectoralFinID==-1);
Assert.assertFalse("failed to lookup entity dorsal region of cerebellum",dorsalRegionOfCerebellumID==-1);
Assert.assertFalse("failed to lookup quality count",countID==-1);
Assert.assertFalse("failed to lookup quality position",positionID==-1);
Assert.assertFalse("failed to lookup quality shape",shapeID==-1);
Assert.assertFalse("failed to lookup quality size",sizeID==-1);
Assert.assertFalse("failed to lookup quality texture",textureID==-1);
VariationTable geneVariation = new VariationTable(VariationTable.VariationType.GENE);
testAnalysis.processGeneExpression(geneVariation, u, null);
Set<Integer> genes = new HashSet<Integer>();
for(Integer att : geneVariation.getUsedAttributes()){
for (Integer ent : geneVariation.getUsedEntities()){
if (geneVariation.hasExhibitorSet(ent,att)){
genes.addAll(geneVariation.getExhibitorSet(ent,att));
}
}
}
assertEquals("Count of genes in variation table",19,genes.size());
Assert.assertTrue(geneVariation.geneExhibits(pectoralFinID,sizeID,alfID));
Assert.assertTrue(geneVariation.geneExhibits(opercleID,shapeID,furinaID));
Assert.assertTrue(geneVariation.geneExhibits(opercleID,shapeID,jag1bID));
Assert.assertTrue(geneVariation.geneExhibits(opercleID,shapeID,edn1ID));
Assert.assertTrue(geneVariation.geneExhibits(opercleID,countID,edn1ID));
Assert.assertTrue(geneVariation.geneExhibits(dorsalRegionOfCerebellumID,shapeID,apcID));
Assert.assertTrue(geneVariation.geneExhibits(dorsalRegionOfCerebellumID,sizeID,apcID));
Assert.assertTrue(geneVariation.geneExhibits(pectoralFinID,sizeID,sec24dID));
Assert.assertTrue(geneVariation.geneExhibits(pectoralFinID,sizeID,sec23aID));
Assert.assertTrue(geneVariation.geneExhibits(pectoralFinID,sizeID,shhaID));
Assert.assertTrue(geneVariation.geneExhibits(pectoralFinID,sizeID,lama5ID));
Assert.assertTrue(geneVariation.geneExhibits(opercleID,positionID,fgf8aID));
Assert.assertTrue(geneVariation.geneExhibits(pectoralFinID,sizeID,henID));
Assert.assertTrue(geneVariation.geneExhibits(pectoralFinID,sizeID,rndID));
Assert.assertTrue(geneVariation.geneExhibits(opercleID,countID,brpf1ID));
Assert.assertTrue(geneVariation.geneExhibits(pectoralFinID,sizeID,cyp26b1ID));
Assert.assertTrue(geneVariation.geneExhibits(pectoralFinID,sizeID,ugdhID));
Assert.assertTrue(geneVariation.geneExhibits(opercleID,textureID,macf1ID));
Assert.assertTrue(geneVariation.geneExhibits(pectoralFinID,sizeID,fgf24ID));
Assert.assertTrue(geneVariation.geneExhibits(pectoralFinID,sizeID,lofID));
}
@Test
public void testBuildEQParents() throws SQLException {
t1.traverseOntologyTree(u);
Map<Integer,Set<TaxonPhenotypeLink>> allLinks = testAnalysis.getAllTaxonPhenotypeLinksFromKB(t1,u);
ProfileMap taxonProfiles = testAnalysis.loadTaxonProfiles(allLinks,u, attMap, nodeIDofQuality, badTaxonQualities);
final VariationTable taxonVariation = new VariationTable(VariationTable.VariationType.TAXON);
testAnalysis.traverseTaxonomy(t1, t1.getRootNodeID(), taxonProfiles, taxonVariation, u);
assertFalse(taxonProfiles.isEmpty());
Assert.assertEquals(15,taxonProfiles.domainSize()); //profiles before the flush includes all taxa
testAnalysis.flushUnvaryingPhenotypes(taxonProfiles,taxonVariation,u);
Map <Integer,Set<Integer>> entityParentCache = new HashMap<Integer,Set<Integer>>();
Map <Integer,Set<Integer>> entityChildCache = new HashMap<Integer,Set<Integer>>();
u.setupEntityParents(entityParentCache,entityChildCache);
Map <PhenotypeExpression,Set<PhenotypeExpression>> phenotypeParentCache = new HashMap<PhenotypeExpression,Set<PhenotypeExpression>>();
testAnalysis.buildEQParents(phenotypeParentCache,entityParentCache,u);
// for(PhenotypeExpression pe : phenotypeParentCache.keySet()){
// pe.fillNames(u);
// System.out.println("Expression is " + pe);
// for (PhenotypeExpression peParent : phenotypeParentCache.get(pe)){
// peParent.fillNames(u);
// System.out.println(" Parent is " + peParent);
// }
// }
}
@Test
public void testFillCountTable() throws SQLException {
t1.traverseOntologyTree(u);
Map<Integer,Set<TaxonPhenotypeLink>> allLinks = testAnalysis.getAllTaxonPhenotypeLinksFromKB(t1,u);
ProfileMap taxonProfiles = testAnalysis.loadTaxonProfiles(allLinks,u, attMap, nodeIDofQuality, badTaxonQualities);
CountTableCheck countTableCheck = new CountTableCheck(u);
VariationTable geneVariation = new VariationTable(VariationTable.VariationType.GENE);
ProfileMap geneProfiles = testAnalysis.processGeneExpression(geneVariation, u, null);
Map <Integer,Set<Integer>> entityParentCache = new HashMap<Integer,Set<Integer>>();
Map <Integer,Set<Integer>> entityChildCache = new HashMap<Integer,Set<Integer>>();
u.setupEntityParents(entityParentCache,entityChildCache);
Map <PhenotypeExpression,Set<PhenotypeExpression>> phenotypeParentCache = new HashMap<PhenotypeExpression,Set<PhenotypeExpression>>();
testAnalysis.buildEQParents(phenotypeParentCache,entityParentCache,u);
CountTable<PhenotypeExpression> counts = testAnalysis.fillPhenotypeCountTable(geneProfiles, taxonProfiles, phenotypeParentCache, u, PhenotypeProfileAnalysis.GENEPHENOTYPECOUNTQUERY, PhenotypeProfileAnalysis.GENEQUALITYCOUNTQUERY, u.countDistinctGenePhenotypeAnnotations());
for(PhenotypeExpression p : counts.getPhenotypes()){
p.fillNames(u);
final String fullName = p.getFullName(u);
Assert.assertNotNull("Full phenotype name",fullName);
Assert.assertNotNull(countTableCheck);
Assert.assertNotNull("Count table does not contain: " + fullName,countTableCheck.hasPhenotype(p));
Assert.assertNotNull("Raw count of "+ fullName + " is null?",counts.getRawCount(p));
Assert.assertEquals(countTableCheck.get(p),counts.getRawCount(p));
}
}
@Test
public void testBuildPhenotypeMatchCache() throws SQLException {
t1.traverseOntologyTree(u);
Map<Integer,Set<TaxonPhenotypeLink>> allLinks = testAnalysis.getAllTaxonPhenotypeLinksFromKB(t1,u);
ProfileMap taxonProfiles = testAnalysis.loadTaxonProfiles(allLinks,u, attMap, nodeIDofQuality, badTaxonQualities);
testAnalysis.taxonProfiles= taxonProfiles;
final VariationTable taxonVariation = new VariationTable(VariationTable.VariationType.TAXON);
testAnalysis.traverseTaxonomy(t1, t1.getRootNodeID(), taxonProfiles, taxonVariation, u);
assertFalse(taxonProfiles.isEmpty());
Assert.assertEquals(15,taxonProfiles.domainSize()); //profiles before the flush includes all taxa
testAnalysis.flushUnvaryingPhenotypes(taxonProfiles,taxonVariation,u);
VariationTable geneVariation = new VariationTable(VariationTable.VariationType.GENE);
ProfileMap geneProfiles = testAnalysis.processGeneExpression(geneVariation, u, null);
testAnalysis.geneProfiles= geneProfiles;
Map <PhenotypeExpression,Set<PhenotypeExpression>> phenotypeParentCache = new HashMap<PhenotypeExpression,Set<PhenotypeExpression>>();
Map <Integer,Set<Integer>> entityParentCache = new HashMap<Integer,Set<Integer>>();
Map <Integer,Set<Integer>> entityChildCache = new HashMap<Integer,Set<Integer>>();
u.setupEntityParents(entityParentCache,entityChildCache);
PhenotypeScoreTable phenotypeScores = new PhenotypeScoreTable();
testAnalysis.buildEQParents(phenotypeParentCache,entityParentCache,u);
CountTable <PhenotypeExpression> counts = testAnalysis.fillPhenotypeCountTable(geneProfiles, taxonProfiles, phenotypeParentCache, u, PhenotypeProfileAnalysis.GENEPHENOTYPECOUNTQUERY, PhenotypeProfileAnalysis.GENEQUALITYCOUNTQUERY, u.countDistinctGenePhenotypeAnnotations());
testAnalysis.buildPhenotypeMatchCache(phenotypeParentCache, phenotypeScores, counts, u);
}
@Test
public void testWritePhenotypeMatchSummary() throws SQLException {
t1.traverseOntologyTree(u);
Map<Integer,Set<TaxonPhenotypeLink>> allLinks = testAnalysis.getAllTaxonPhenotypeLinksFromKB(t1,u);
ProfileMap taxonProfiles = testAnalysis.loadTaxonProfiles(allLinks,u, attMap, nodeIDofQuality, badTaxonQualities);
testAnalysis.taxonProfiles= taxonProfiles;
final VariationTable taxonVariation = new VariationTable(VariationTable.VariationType.TAXON);
testAnalysis.traverseTaxonomy(t1, t1.getRootNodeID(), taxonProfiles, taxonVariation, u);
assertFalse(taxonProfiles.isEmpty());
Assert.assertEquals(15,taxonProfiles.domainSize()); //profiles before the flush includes all taxa
testAnalysis.flushUnvaryingPhenotypes(taxonProfiles,taxonVariation,u);
VariationTable geneVariation = new VariationTable(VariationTable.VariationType.GENE);
ProfileMap geneProfiles = testAnalysis.processGeneExpression(geneVariation, u, null);
testAnalysis.geneProfiles= geneProfiles;
Map <PhenotypeExpression,Set<PhenotypeExpression>> phenotypeParentCache = new HashMap<PhenotypeExpression,Set<PhenotypeExpression>>();
Map <Integer,Set<Integer>> entityParentCache = new HashMap<Integer,Set<Integer>>();
Map <Integer,Set<Integer>> entityChildCache = new HashMap<Integer,Set<Integer>>();
u.setupEntityParents(entityParentCache,entityChildCache);
PhenotypeScoreTable phenotypeScores = new PhenotypeScoreTable();
testAnalysis.buildEQParents(phenotypeParentCache,entityParentCache,u);
CountTable <PhenotypeExpression> counts = testAnalysis.fillPhenotypeCountTable(geneProfiles, taxonProfiles, phenotypeParentCache, u, PhenotypeProfileAnalysis.GENEPHENOTYPECOUNTQUERY, PhenotypeProfileAnalysis.GENEQUALITYCOUNTQUERY, u.countDistinctGenePhenotypeAnnotations());
testAnalysis.buildPhenotypeMatchCache(phenotypeParentCache, phenotypeScores, counts, u);
}
@Test
public void testCalcMaxIC() throws SQLException {
t1.traverseOntologyTree(u);
Map<Integer,Set<TaxonPhenotypeLink>> allLinks = testAnalysis.getAllTaxonPhenotypeLinksFromKB(t1,u);
ProfileMap taxonProfiles = testAnalysis.loadTaxonProfiles(allLinks,u, attMap, nodeIDofQuality, badTaxonQualities);
testAnalysis.taxonProfiles= taxonProfiles;
final VariationTable taxonVariation = new VariationTable(VariationTable.VariationType.TAXON);
testAnalysis.traverseTaxonomy(t1, t1.getRootNodeID(), taxonProfiles, taxonVariation, u);
assertFalse(taxonProfiles.isEmpty());
Assert.assertEquals(15,taxonProfiles.domainSize()); //profiles before the flush includes all taxa
testAnalysis.flushUnvaryingPhenotypes(taxonProfiles,taxonVariation,u);
VariationTable geneVariation = new VariationTable(VariationTable.VariationType.GENE);
ProfileMap geneProfiles = testAnalysis.processGeneExpression(geneVariation, u, null);
testAnalysis.geneProfiles= geneProfiles;
Map <PhenotypeExpression,Set<PhenotypeExpression>> phenotypeParentCache = new HashMap<PhenotypeExpression,Set<PhenotypeExpression>>();
Map <Integer,Set<Integer>> entityParentCache = new HashMap<Integer,Set<Integer>>();
Map <Integer,Set<Integer>> entityChildCache = new HashMap<Integer,Set<Integer>>();
u.setupEntityParents(entityParentCache,entityChildCache);
PhenotypeScoreTable phenotypeScores = new PhenotypeScoreTable();
testAnalysis.buildEQParents(phenotypeParentCache,entityParentCache,u);
CountTable<PhenotypeExpression> counts = testAnalysis.fillPhenotypeCountTable(geneProfiles, taxonProfiles, phenotypeParentCache, u, PhenotypeProfileAnalysis.GENEPHENOTYPECOUNTQUERY, PhenotypeProfileAnalysis.GENEQUALITYCOUNTQUERY, u.countDistinctGenePhenotypeAnnotations());
testAnalysis.buildPhenotypeMatchCache(phenotypeParentCache, phenotypeScores, counts, u);
initNames(u);
//test order1 against alf
double maxICScore = testAnalysis.calcMaxIC(taxonProfiles.getProfile(order1ID).getAllEAPhenotypes(),
geneProfiles.getProfile(alfID).getAllEAPhenotypes(),
phenotypeScores);
Assert.assertTrue("Matching order1 against alf; Expected " + 0.0 + "; found " + maxICScore,softCompare(maxICScore,0.0));
// //test order1 against apa
// maxICScore = testAnalysis.calcMaxIC(taxonProfiles.get(order1ID).getAllEAPhenotypes(),
// geneProfiles.get(apaID).getAllEAPhenotypes(),
// phenotypeScores);
// Assert.assertTrue("Matching order1 against apa; Expected " + IC13 + "; found " + maxICScore,softCompare(maxICScore,IC13));
//
// //test order1 against apc
// maxICScore = testAnalysis.calcMaxIC(taxonProfiles.get(order1ID).getAllEAPhenotypes(),
// geneProfiles.get(apcID).getAllEAPhenotypes(),
// phenotypeScores);
// Assert.assertTrue("Matching order1 against apc; Expected " + IC1 + "; found " + maxICScore,softCompare(maxICScore,IC1));
//
// //test order1 against cyp26b1
// maxICScore = testAnalysis.calcMaxIC(taxonProfiles.get(order1ID).getAllEAPhenotypes(),
// geneProfiles.get(cyp26b1ID).getAllEAPhenotypes(),
// phenotypeScores);
// Assert.assertTrue("Matching order1 against cyp26b1; Expected " + IC13 + "; found " + maxICScore,softCompare(maxICScore,IC13));
//
// //test order1 against jag1b
// maxICScore = testAnalysis.calcMaxIC(taxonProfiles.get(order1ID).getAllEAPhenotypes(),
// geneProfiles.get(jag1bID).getAllEAPhenotypes(),
// phenotypeScores);
// Assert.assertTrue("Matching order1 against jag1b; Expected " + IC3 + "; found " + maxICScore, softCompare(maxICScore,IC3));
//
// //test family1 against apc
// maxICScore = testAnalysis.calcMaxIC(taxonProfiles.get(family1ID).getAllEAPhenotypes(),
// geneProfiles.get(apcID).getAllEAPhenotypes(),
// phenotypeScores);
// Assert.assertTrue("Matching family1 against apc; Expected " + IC1 + "; found " + maxICScore,softCompare(maxICScore,IC1));
//
// //test family1 against jag1b
// maxICScore = testAnalysis.calcMaxIC(taxonProfiles.get(family1ID).getAllEAPhenotypes(),
// geneProfiles.get(jag1bID).getAllEAPhenotypes(),
// phenotypeScores);
// Assert.assertTrue("Matching family1 against jag1b; Expected " + IC3 + "; found " + maxICScore, softCompare(maxICScore,IC3));
//
// //test genus1 against apc
// maxICScore = testAnalysis.calcMaxIC(taxonProfiles.get(genus1ID).getAllEAPhenotypes(),
// geneProfiles.get(apcID).getAllEAPhenotypes(),
// phenotypeScores);
// Assert.assertTrue("Matching genus1 against apc; Expected " + IC4 + "; found " + maxICScore,softCompare(maxICScore,IC4));
//
// //test genus1 against jag1b
// maxICScore = testAnalysis.calcMaxIC(taxonProfiles.get(genus1ID).getAllEAPhenotypes(),
// geneProfiles.get(jag1bID).getAllEAPhenotypes(),
// phenotypeScores);
// Assert.assertTrue("Matching genus1 against jag1b; Expected " + IC3 + "; found " + maxICScore,softCompare(maxICScore,IC3));
//
// //test genus2 against apc
// maxICScore = testAnalysis.calcMaxIC(taxonProfiles.get(genus2ID).getAllEAPhenotypes(),
// geneProfiles.get(apcID).getAllEAPhenotypes(),
// phenotypeScores);
// Assert.assertTrue("Matching genus2 against apc; Expected " + IC4 + "; found " + maxICScore,softCompare(maxICScore,IC4));
//
// //test genus2 against jag1b
// maxICScore = testAnalysis.calcMaxIC(taxonProfiles.get(genus2ID).getAllEAPhenotypes(),
// geneProfiles.get(jag1bID).getAllEAPhenotypes(),
// phenotypeScores);
// Assert.assertTrue("Matching genus2 against jag1b; Expected " + IC3 + "; found " + maxICScore,softCompare(maxICScore,IC3));
}
@Test
public void testMatchOneProfilePair() throws SQLException, IOException{
t1.traverseOntologyTree(u);
testAnalysis.entityAnnotations = new EntitySet(u);
testAnalysis.entityAnnotations.fillTaxonPhenotypeAnnotationsToEntities();
final int ata = testAnalysis.entityAnnotations.annotationTotal();
Assert.assertEquals(u.countAssertedTaxonPhenotypeAnnotations(),ata);
testAnalysis.entityAnnotations.fillGenePhenotypeAnnotationsToEntities();
final int tea = testAnalysis.entityAnnotations.annotationTotal();
final int dga = u.countDistinctGenePhenotypeAnnotations();
Assert.assertEquals(tea, ata+dga);
testAnalysis.totalAnnotations = ata + dga;
Map<Integer,Set<TaxonPhenotypeLink>> allLinks = testAnalysis.getAllTaxonPhenotypeLinksFromKB(t1,u);
ProfileMap taxonProfiles = testAnalysis.loadTaxonProfiles(allLinks,u, attMap, nodeIDofQuality, badTaxonQualities);
testAnalysis.taxonProfiles= taxonProfiles;
final VariationTable taxonVariation = new VariationTable(VariationTable.VariationType.TAXON);
testAnalysis.traverseTaxonomy(t1, t1.getRootNodeID(), taxonProfiles, taxonVariation, u);
assertFalse(taxonProfiles.isEmpty());
Assert.assertEquals(15,taxonProfiles.domainSize()); //profiles before the flush includes all taxa
testAnalysis.flushUnvaryingPhenotypes(taxonProfiles,taxonVariation,u);
VariationTable geneVariation = new VariationTable(VariationTable.VariationType.GENE);
ProfileMap geneProfiles = testAnalysis.processGeneExpression(geneVariation, u, null);
testAnalysis.geneProfiles= geneProfiles;
Map <PhenotypeExpression,Set<PhenotypeExpression>> phenotypeParentCache = new HashMap<PhenotypeExpression,Set<PhenotypeExpression>>();
Map <Integer,Set<Integer>> entityParentCache = new HashMap<Integer,Set<Integer>>();
Map <Integer,Set<Integer>> entityChildCache = new HashMap<Integer,Set<Integer>>();
u.setupEntityParents(entityParentCache,entityChildCache);
CountTable<Integer> geneEntityCounts = testAnalysis.fillGeneEntityCountTable(testAnalysis.geneProfiles, entityParentCache, u,PhenotypeProfileAnalysis.GENEENTITYCOUNTQUERY , u.countDistinctGeneEntityPhenotypeAnnotations());
CountTable<Integer> taxonEntityCounts = testAnalysis.fillTaxonEntityCountTable(testAnalysis.geneProfiles, entityParentCache, u,PhenotypeProfileAnalysis.TAXONENTITYCOUNTQUERY , u.countDistinctGeneEntityPhenotypeAnnotations());
CountTable<Integer> sumTable = geneEntityCounts.addTable(taxonEntityCounts);
PhenotypeScoreTable phenotypeScores = new PhenotypeScoreTable();
testAnalysis.buildEQParents(phenotypeParentCache,entityParentCache,u);
CountTable<PhenotypeExpression> counts = testAnalysis.fillPhenotypeCountTable(geneProfiles, taxonProfiles, phenotypeParentCache, u, PhenotypeProfileAnalysis.GENEPHENOTYPECOUNTQUERY, PhenotypeProfileAnalysis.GENEQUALITYCOUNTQUERY, u.countDistinctGenePhenotypeAnnotations());
testAnalysis.buildPhenotypeMatchCache(phenotypeParentCache, phenotypeScores, counts, u);
PermutedScoreSet s = new PermutedScoreSet(taxonProfiles,geneProfiles,entityParentCache, entityChildCache, phenotypeScores, u);
s.setRandom(new Random());
s.calcPermutedProfileScores();
// test distribution dumping, though this is temporary...
// for(PermutedProfileScore score : pScores){
// score.writeDist(PhenotypeProfileAnalysis.RANDOMIZATIONREPORTSFOLDER);
// }
initNames(u);
// check genes against order1
ProfileScoreSet pSet = testAnalysis.matchOneProfilePair(order1ID,alfID,s,phenotypeScores,entityParentCache,entityChildCache,testAnalysis.entityAnnotations, phenotypeParentCache,u);
Assert.assertEquals(0.0, pSet.getMedianICScore());
// pSet = testAnalysis.matchOneProfilePair(order1ID,apaID,pScores,phenotypeScores);
// Assert.assertEquals(IC13, pSet.getMaxICScore());
//
// pSet = testAnalysis.matchOneProfilePair(order1ID,apcID,pScores,phenotypeScores);
// Assert.assertEquals(IC1, pSet.getMaxICScore());
//
// pSet = testAnalysis.matchOneProfilePair(order1ID,cyp26b1ID,pScores,phenotypeScores);
// Assert.assertEquals(IC13, pSet.getMaxICScore());
//
// pSet = testAnalysis.matchOneProfilePair(order1ID,edn1ID,pScores,phenotypeScores);
// Assert.assertEquals(IC3, pSet.getMaxICScore());
//
// pSet = testAnalysis.matchOneProfilePair(order1ID,fgf24ID,pScores,phenotypeScores);
// Assert.assertEquals(IC13, pSet.getMaxICScore());
//
// pSet = testAnalysis.matchOneProfilePair(order1ID,furinaID,pScores,phenotypeScores);
// Assert.assertEquals(IC3, pSet.getMaxICScore());
//
// pSet = testAnalysis.matchOneProfilePair(order1ID,henID,pScores,phenotypeScores);
// Assert.assertEquals(IC13, pSet.getMaxICScore());
//
// pSet = testAnalysis.matchOneProfilePair(order1ID,jag1bID,pScores,phenotypeScores);
// Assert.assertEquals(IC3, pSet.getMaxICScore());
//
// pSet = testAnalysis.matchOneProfilePair(order1ID,lama5ID,pScores,phenotypeScores);
// Assert.assertEquals(IC13, pSet.getMaxICScore());
//
// pSet = testAnalysis.matchOneProfilePair(order1ID,lofID,pScores,phenotypeScores);
// Assert.assertEquals(IC13, pSet.getMaxICScore());
//
// pSet = testAnalysis.matchOneProfilePair(order1ID,rndID,pScores,phenotypeScores);
// Assert.assertEquals(IC13, pSet.getMaxICScore());
//
// pSet = testAnalysis.matchOneProfilePair(order1ID,sec23aID,pScores,phenotypeScores);
// Assert.assertEquals(IC13, pSet.getMaxICScore());
//
// pSet = testAnalysis.matchOneProfilePair(order1ID,sec24dID,pScores,phenotypeScores);
// Assert.assertEquals(IC13, pSet.getMaxICScore());
//
// pSet = testAnalysis.matchOneProfilePair(order1ID,shhaID,pScores,phenotypeScores);
// Assert.assertEquals(IC13, pSet.getMaxICScore());
//
// pSet = testAnalysis.matchOneProfilePair(order1ID,ugdhID,pScores,phenotypeScores);
// Assert.assertEquals(IC13, pSet.getMaxICScore());
// check genes against family1
// pSet = testAnalysis.matchOneProfilePair(family1ID,jag1bID,pScores,phenotypeScores);
// Assert.assertEquals(IC3, pSet.getMaxICScore());
//
// pSet = testAnalysis.matchOneProfilePair(family1ID,apaID,pScores,phenotypeScores);
// Assert.assertEquals(IC12, pSet.getMaxICScore());
//
// pSet = testAnalysis.matchOneProfilePair(family1ID,apcID,pScores,phenotypeScores);
// Assert.assertEquals(IC1, pSet.getMaxICScore());
//
// pSet = testAnalysis.matchOneProfilePair(family1ID,cyp26b1ID,pScores,phenotypeScores);
// Assert.assertEquals(IC12, pSet.getMaxICScore());
//
// pSet = testAnalysis.matchOneProfilePair(family1ID,edn1ID,pScores,phenotypeScores);
// Assert.assertEquals(IC3, pSet.getMaxICScore());
//
// pSet = testAnalysis.matchOneProfilePair(family1ID,fgf24ID,pScores,phenotypeScores);
// Assert.assertEquals(IC12, pSet.getMaxICScore());
//
// pSet = testAnalysis.matchOneProfilePair(family1ID,furinaID,pScores,phenotypeScores);
// Assert.assertEquals(IC3, pSet.getMaxICScore());
//
// pSet = testAnalysis.matchOneProfilePair(family1ID,henID,pScores,phenotypeScores);
// Assert.assertEquals(IC12, pSet.getMaxICScore());
//
// pSet = testAnalysis.matchOneProfilePair(family1ID,jag1bID,pScores,phenotypeScores);
// Assert.assertEquals(IC3, pSet.getMaxICScore());
//
// pSet = testAnalysis.matchOneProfilePair(family1ID,lama5ID,pScores,phenotypeScores);
// Assert.assertEquals(IC12, pSet.getMaxICScore());
//
// pSet = testAnalysis.matchOneProfilePair(family1ID,lofID,pScores,phenotypeScores);
// Assert.assertEquals(IC12, pSet.getMaxICScore());
//
// pSet = testAnalysis.matchOneProfilePair(family1ID,rndID,pScores,phenotypeScores);
// Assert.assertEquals(IC12, pSet.getMaxICScore());
//
// pSet = testAnalysis.matchOneProfilePair(family1ID,sec23aID,pScores,phenotypeScores);
// Assert.assertEquals(IC12, pSet.getMaxICScore());
//
// pSet = testAnalysis.matchOneProfilePair(family1ID,sec24dID,pScores,phenotypeScores);
// Assert.assertEquals(IC12, pSet.getMaxICScore());
//
// pSet = testAnalysis.matchOneProfilePair(family1ID,shhaID,pScores,phenotypeScores);
// Assert.assertEquals(IC12, pSet.getMaxICScore());
//
// pSet = testAnalysis.matchOneProfilePair(family1ID,ugdhID,pScores,phenotypeScores);
// Assert.assertEquals(IC12, pSet.getMaxICScore());
// check genes against genus1
// pSet = testAnalysis.matchOneProfilePair(genus1ID,jag1bID,pScores,phenotypeScores);
// Assert.assertEquals(IC3, pSet.getMaxICScore());
//
// pSet = testAnalysis.matchOneProfilePair(genus1ID,apaID,pScores,phenotypeScores);
// Assert.assertEquals(IC12, pSet.getMaxICScore());
//
// pSet = testAnalysis.matchOneProfilePair(genus1ID,apcID,pScores,phenotypeScores);
// Assert.assertEquals(IC4, pSet.getMaxICScore());
//
// pSet = testAnalysis.matchOneProfilePair(genus1ID,cyp26b1ID,pScores,phenotypeScores);
// Assert.assertEquals(IC12, pSet.getMaxICScore());
//
// pSet = testAnalysis.matchOneProfilePair(genus1ID,edn1ID,pScores,phenotypeScores);
// Assert.assertEquals(IC2, pSet.getMaxICScore());
//
// pSet = testAnalysis.matchOneProfilePair(genus1ID,fgf24ID,pScores,phenotypeScores);
// Assert.assertEquals(IC12, pSet.getMaxICScore());
//
// pSet = testAnalysis.matchOneProfilePair(genus1ID,furinaID,pScores,phenotypeScores);
// Assert.assertEquals(IC3, pSet.getMaxICScore());
//
// pSet = testAnalysis.matchOneProfilePair(genus1ID,henID,pScores,phenotypeScores);
// Assert.assertEquals(IC12, pSet.getMaxICScore());
//
// pSet = testAnalysis.matchOneProfilePair(genus1ID,jag1bID,pScores,phenotypeScores);
// Assert.assertEquals(IC3, pSet.getMaxICScore());
//
// pSet = testAnalysis.matchOneProfilePair(genus1ID,lama5ID,pScores,phenotypeScores);
// Assert.assertEquals(IC12, pSet.getMaxICScore());
//
// pSet = testAnalysis.matchOneProfilePair(genus1ID,lofID,pScores,phenotypeScores);
// Assert.assertEquals(IC12, pSet.getMaxICScore());
//
// pSet = testAnalysis.matchOneProfilePair(genus1ID,rndID,pScores,phenotypeScores);
// Assert.assertEquals(IC12, pSet.getMaxICScore());
//
// pSet = testAnalysis.matchOneProfilePair(genus1ID,sec23aID,pScores,phenotypeScores);
// Assert.assertEquals(IC12, pSet.getMaxICScore());
//
// pSet = testAnalysis.matchOneProfilePair(genus1ID,sec24dID,pScores,phenotypeScores);
// Assert.assertEquals(IC12, pSet.getMaxICScore());
//
// pSet = testAnalysis.matchOneProfilePair(genus1ID,shhaID,pScores,phenotypeScores);
// Assert.assertEquals(IC12, pSet.getMaxICScore());
//
// pSet = testAnalysis.matchOneProfilePair(genus1ID,ugdhID,pScores,phenotypeScores);
// Assert.assertEquals(IC12, pSet.getMaxICScore());
// check genes against genus2
// pSet = testAnalysis.matchOneProfilePair(genus2ID,jag1bID,pScores,phenotypeScores);
// Assert.assertEquals(IC3, pSet.getMaxICScore());
//
// pSet = testAnalysis.matchOneProfilePair(genus2ID,apaID,pScores,phenotypeScores);
// Assert.assertEquals(0.0, pSet.getMaxICScore());
//
// pSet = testAnalysis.matchOneProfilePair(genus2ID,apcID,pScores,phenotypeScores);
// Assert.assertEquals(IC4, pSet.getMaxICScore());
//
// pSet = testAnalysis.matchOneProfilePair(genus2ID,cyp26b1ID,pScores,phenotypeScores);
// Assert.assertEquals(0.0, pSet.getMaxICScore());
//
// pSet = testAnalysis.matchOneProfilePair(genus2ID,edn1ID,pScores,phenotypeScores);
// Assert.assertEquals(IC2, pSet.getMaxICScore());
//
// pSet = testAnalysis.matchOneProfilePair(genus2ID,fgf24ID,pScores,phenotypeScores);
// Assert.assertEquals(0.0, pSet.getMaxICScore());
//
// pSet = testAnalysis.matchOneProfilePair(genus2ID,furinaID,pScores,phenotypeScores);
// Assert.assertEquals(IC3, pSet.getMaxICScore());
//
// pSet = testAnalysis.matchOneProfilePair(genus2ID,henID,pScores,phenotypeScores);
// Assert.assertEquals(0.0, pSet.getMaxICScore());
//
// pSet = testAnalysis.matchOneProfilePair(genus2ID,jag1bID,pScores,phenotypeScores);
// Assert.assertEquals(IC3, pSet.getMaxICScore());
//
// pSet = testAnalysis.matchOneProfilePair(genus2ID,lama5ID,pScores,phenotypeScores);
// Assert.assertEquals(0.0, pSet.getMaxICScore());
//
// pSet = testAnalysis.matchOneProfilePair(genus2ID,lofID,pScores,phenotypeScores);
// Assert.assertEquals(0.0, pSet.getMaxICScore());
//
// pSet = testAnalysis.matchOneProfilePair(genus2ID,rndID,pScores,phenotypeScores);
// Assert.assertEquals(0.0, pSet.getMaxICScore());
//
// pSet = testAnalysis.matchOneProfilePair(genus2ID,sec23aID,pScores,phenotypeScores);
// Assert.assertEquals(0.0, pSet.getMaxICScore());
//
// pSet = testAnalysis.matchOneProfilePair(genus2ID,sec24dID,pScores,phenotypeScores);
// Assert.assertEquals(0.0, pSet.getMaxICScore());
//
// pSet = testAnalysis.matchOneProfilePair(genus2ID,shhaID,pScores,phenotypeScores);
// Assert.assertEquals(0.0, pSet.getMaxICScore());
//
// pSet = testAnalysis.matchOneProfilePair(genus2ID,ugdhID,pScores,phenotypeScores);
// Assert.assertEquals(0.0, pSet.getMaxICScore());
// check genes against genus3
// pSet = testAnalysis.matchOneProfilePair(genus3ID,jag1bID,pScores,phenotypeScores);
// Assert.assertEquals(0.0, pSet.getMaxICScore());
//
// pSet = testAnalysis.matchOneProfilePair(genus3ID,apaID,pScores,phenotypeScores);
// Assert.assertEquals(0.0, pSet.getMaxICScore());
//
// pSet = testAnalysis.matchOneProfilePair(genus3ID,apcID,pScores,phenotypeScores);
// Assert.assertEquals(0.0, pSet.getMaxICScore());
//
// pSet = testAnalysis.matchOneProfilePair(genus3ID,cyp26b1ID,pScores,phenotypeScores);
// Assert.assertEquals(0.0, pSet.getMaxICScore());
//
// pSet = testAnalysis.matchOneProfilePair(genus3ID,edn1ID,pScores,phenotypeScores);
// Assert.assertEquals(IC2, pSet.getMaxICScore());
//
// pSet = testAnalysis.matchOneProfilePair(genus3ID,fgf24ID,pScores,phenotypeScores);
// Assert.assertEquals(0.0, pSet.getMaxICScore());
//
// pSet = testAnalysis.matchOneProfilePair(genus3ID,furinaID,pScores,phenotypeScores);
// Assert.assertEquals(0.0, pSet.getMaxICScore());
//
// pSet = testAnalysis.matchOneProfilePair(genus3ID,henID,pScores,phenotypeScores);
// Assert.assertEquals(0.0, pSet.getMaxICScore());
//
// pSet = testAnalysis.matchOneProfilePair(genus3ID,jag1bID,pScores,phenotypeScores);
// Assert.assertEquals(0.0, pSet.getMaxICScore());
//
// pSet = testAnalysis.matchOneProfilePair(genus3ID,lama5ID,pScores,phenotypeScores);
// Assert.assertEquals(0.0, pSet.getMaxICScore());
//
// pSet = testAnalysis.matchOneProfilePair(genus3ID,lofID,pScores,phenotypeScores);
// Assert.assertEquals(0.0, pSet.getMaxICScore());
//
// pSet = testAnalysis.matchOneProfilePair(genus3ID,rndID,pScores,phenotypeScores);
// Assert.assertEquals(0.0, pSet.getMaxICScore());
//
// pSet = testAnalysis.matchOneProfilePair(genus3ID,sec23aID,pScores,phenotypeScores);
// Assert.assertEquals(0.0, pSet.getMaxICScore());
//
// pSet = testAnalysis.matchOneProfilePair(genus3ID,sec24dID,pScores,phenotypeScores);
// Assert.assertEquals(0.0, pSet.getMaxICScore());
//
// pSet = testAnalysis.matchOneProfilePair(genus3ID,shhaID,pScores,phenotypeScores);
// Assert.assertEquals(0.0, pSet.getMaxICScore());
//
// pSet = testAnalysis.matchOneProfilePair(genus3ID,ugdhID,pScores,phenotypeScores);
// Assert.assertEquals(0.0, pSet.getMaxICScore());
}
private static final String TAXONREPORTFILENAME = "../../SmallKBTests/PropTree1/TaxonVariationReport.txt";
private static final String GENEREPORTFILENAME = "../../SmallKBTests/PropTree1/GeneVariationReport.txt";
private static final String PHENOTYPEMATCHREPORTFILENAME = "../../SmallKBTests/PropTree1/PhenotypeMatchReport.txt";
private static final String PROFILEMATCHREPORTFILENAME = "../../SmallKBTests/PropTree1/ProfileMatchReport.txt";
private static final String TAXONGENEMAXICSCOREFILENAME = "../../SmallKBTests/PropTree1/MaxICReport.txt";
@Test
public void testOutputFiles() throws SQLException, IOException{
File outFile1 = new File(TAXONREPORTFILENAME);
File outFile2 = new File(GENEREPORTFILENAME);
File outFile3 = new File(PHENOTYPEMATCHREPORTFILENAME);
File outFile4 = new File(PROFILEMATCHREPORTFILENAME);
File outFile5 = new File(TAXONGENEMAXICSCOREFILENAME);
Writer taxonWriter = null;
Writer geneWriter = null;
Writer phenoWriter = null;
Writer profileWriter = null;
Writer w5 = null;
Date today;
DateFormat dateFormatter;
dateFormatter = DateFormat.getDateInstance(DateFormat.DEFAULT);
today = new Date();
DateFormat timeFormatter = DateFormat.getTimeInstance(DateFormat.DEFAULT);
String timeStamp = dateFormatter.format(today) + " " + timeFormatter.format(today) + " on PropTree1";
taxonWriter = new BufferedWriter(new FileWriter(outFile1));
geneWriter = new BufferedWriter(new FileWriter(outFile2));
phenoWriter = new BufferedWriter(new FileWriter(outFile3));
profileWriter = new BufferedWriter(new FileWriter(outFile4));
w5 = new BufferedWriter(new FileWriter(outFile5));
u.writeOrDump(timeStamp, taxonWriter);
u.writeOrDump(timeStamp, geneWriter);
u.writeOrDump(timeStamp, phenoWriter);
u.writeOrDump(timeStamp, profileWriter);
u.writeOrDump(timeStamp, w5);
u.writeOrDump("Starting analysis: " + timeStamp, null);
PhenotypeExpression.getEQTop(u); //just to initialize early.
testAnalysis.entityAnnotations = new EntitySet(u);
testAnalysis.entityAnnotations.fillTaxonPhenotypeAnnotationsToEntities();
int ata = testAnalysis.entityAnnotations.annotationTotal();
Assert.assertEquals(u.countAssertedTaxonPhenotypeAnnotations(),ata);
testAnalysis.entityAnnotations.fillGenePhenotypeAnnotationsToEntities();
int tea = testAnalysis.entityAnnotations.annotationTotal();
int dga = u.countDistinctGenePhenotypeAnnotations();
Assert.assertEquals(tea, ata+dga);
testAnalysis.totalAnnotations = ata + dga;
// process taxa annotations
Map<Integer,Set<TaxonPhenotypeLink>> allLinks = testAnalysis.getAllTaxonPhenotypeLinksFromKB(t1,u);
testAnalysis.taxonProfiles = testAnalysis.loadTaxonProfiles(allLinks,u, attMap, nodeIDofQuality, badTaxonQualities);
testAnalysis.countAnnotatedTaxa(t1,t1.getRootNodeID(),testAnalysis.taxonProfiles,u);
int eaCount = testAnalysis.countEAAnnotations(testAnalysis.taxonProfiles,u);
u.writeOrDump("Count of distinct taxon-phenotype assertions (EQ level): " + testAnalysis.taxonPhenotypeLinkCount, taxonWriter);
u.writeOrDump("Count of distinct taxon-phenotype assertions (EA level; not filtered for variation): " + eaCount, taxonWriter);
u.writeOrDump("Count of annotated taxa = " + testAnalysis.annotatedTaxa, taxonWriter);
u.writeOrDump("Count of parents of annotated taxa = " + testAnalysis.parentsOfAnnotatedTaxa, taxonWriter);
final VariationTable taxonVariation = new VariationTable(VariationTable.VariationType.TAXON);
testAnalysis.traverseTaxonomy(t1, t1.getRootNodeID(), testAnalysis.taxonProfiles, taxonVariation, u);
t1.report(u, taxonWriter);
taxonVariation.variationReport(u,taxonWriter);
u.writeOrDump("\nList of qualities that were placed under quality as an attribute by default\n", taxonWriter);
for(Integer bad_id : badTaxonQualities.keySet()){
u.writeOrDump(u.getNodeName(bad_id) + " " + badTaxonQualities.get(bad_id), taxonWriter);
}
testAnalysis.flushUnvaryingPhenotypes(testAnalysis.taxonProfiles,taxonVariation,u);
Assert.assertFalse(testAnalysis.taxonProfiles.isEmpty());
VariationTable geneVariation = new VariationTable(VariationTable.VariationType.GENE);
testAnalysis.geneProfiles = testAnalysis.processGeneExpression(geneVariation,u, geneWriter);
geneVariation.variationReport(u, geneWriter);
u.writeOrDump("\nList of qualities that were placed under quality as an attribute by default\n", geneWriter);
for(Integer bad_id : badGeneQualities.keySet()){
u.writeOrDump(u.getNodeName(bad_id) + " " + badGeneQualities.get(bad_id), geneWriter);
}
geneWriter.close();
Map <Integer,Set<Integer>> entityParentCache = new HashMap<Integer,Set<Integer>>();
Map <Integer,Set<Integer>> entityChildCache = new HashMap<Integer,Set<Integer>>();
u.setupEntityParents(entityParentCache,entityChildCache);
CountTable<Integer> geneEntityCounts = testAnalysis.fillGeneEntityCountTable(testAnalysis.geneProfiles, entityParentCache, u,PhenotypeProfileAnalysis.GENEENTITYCOUNTQUERY , u.countDistinctGeneEntityPhenotypeAnnotations());
CountTable<Integer> taxonEntityCounts = testAnalysis.fillTaxonEntityCountTable(testAnalysis.geneProfiles, entityParentCache, u,PhenotypeProfileAnalysis.TAXONENTITYCOUNTQUERY , u.countDistinctGeneEntityPhenotypeAnnotations());
CountTable<Integer> sumTable = geneEntityCounts.addTable(taxonEntityCounts);
/* Test introduction of phenotypeParentCache, which should map an attribute level EQ to all its parents via inheres_in_part_of entity parents and is_a quality parents (cross product) */
Map <PhenotypeExpression,Set<PhenotypeExpression>> phenotypeParentCache = new HashMap<PhenotypeExpression,Set<PhenotypeExpression>>();
testAnalysis.buildEQParents(phenotypeParentCache,entityParentCache,u);
CountTable<PhenotypeExpression> phenotypeCountsToUse = testAnalysis.fillPhenotypeCountTable(testAnalysis.geneProfiles, testAnalysis.taxonProfiles,phenotypeParentCache, u, GENEPHENOTYPECOUNTQUERY, GENEQUALITYCOUNTQUERY, u.countDistinctGenePhenotypeAnnotations());
PhenotypeScoreTable phenotypeScores = new PhenotypeScoreTable();
testAnalysis.buildPhenotypeMatchCache(phenotypeParentCache, phenotypeScores, phenotypeCountsToUse, u);
taxonWriter.close();
testAnalysis.writePhenotypeMatchSummary(phenotypeScores,u,phenoWriter);
phenoWriter.close();
testAnalysis.writeTaxonGeneMaxICSummary(phenotypeScores,u,w5);
w5.close();
PermutedScoreSet s = new PermutedScoreSet(testAnalysis.taxonProfiles,testAnalysis.geneProfiles,entityParentCache, entityChildCache, phenotypeScores, u);
s.setRandom(new Random());
s.calcPermutedProfileScores();
testAnalysis.profileMatchReport(phenotypeScores,s,profileWriter,entityParentCache, entityChildCache, testAnalysis.entityAnnotations,phenotypeParentCache, u);
profileWriter.close();
taxonWriter.close();
geneWriter.close();
phenoWriter.close();
profileWriter.close();
w5.close();
}
@After
public void tearDown() throws Exception {
u.closeKB();
}
}
| |
package com.aiteu.reading.view.menu;
import android.annotation.SuppressLint;
import android.app.Activity;
import android.content.Context;
import android.graphics.Canvas;
import android.util.AttributeSet;
import android.view.MotionEvent;
import android.view.VelocityTracker;
public class SlidingDrawer extends DraggableDrawer {
private static final String TAG = "OverlayDrawer";
SlidingDrawer(Activity activity, int dragMode) {
super(activity, dragMode);
}
public SlidingDrawer(Context context) {
super(context);
}
public SlidingDrawer(Context context, AttributeSet attrs) {
super(context, attrs);
}
public SlidingDrawer(Context context, AttributeSet attrs, int defStyle) {
super(context, attrs, defStyle);
}
@Override
protected void initDrawer(Context context, AttributeSet attrs, int defStyle) {
super.initDrawer(context, attrs, defStyle);
super.addView(mMenuContainer, -1, new LayoutParams(
LayoutParams.MATCH_PARENT, LayoutParams.MATCH_PARENT));
super.addView(mContentContainer, -1, new LayoutParams(
LayoutParams.MATCH_PARENT, LayoutParams.MATCH_PARENT));
}
@Override
public void openMenu(boolean animate) {
int animateTo = 0;
switch (getPosition()) {
case LEFT:
case TOP:
animateTo = mMenuSize;
break;
case RIGHT:
case BOTTOM:
animateTo = -mMenuSize;
break;
}
animateOffsetTo(animateTo, 0, animate);
}
@Override
public void closeMenu(boolean animate) {
animateOffsetTo(0, 0, animate);
}
@SuppressLint("NewApi")
@Override
protected void onOffsetPixelsChanged(int offsetPixels) {
if (USE_TRANSLATIONS) {
switch (getPosition()) {
case TOP:
case BOTTOM:
mContentContainer.setTranslationY(offsetPixels);
break;
default:
mContentContainer.setTranslationX(offsetPixels);
break;
}
} else {
switch (getPosition()) {
case TOP:
case BOTTOM:
mContentContainer.offsetTopAndBottom(offsetPixels
- mContentContainer.getTop());
break;
default:
mContentContainer.offsetLeftAndRight(offsetPixels
- mContentContainer.getLeft());
break;
}
}
offsetMenu(offsetPixels);
invalidate();
}
@Override
protected void initPeekScroller() {
switch (getPosition()) {
case RIGHT:
case BOTTOM: {
final int dx = -mMenuSize / 3;
mPeekScroller.startScroll(0, 0, dx, 0, PEEK_DURATION);
break;
}
default: {
final int dx = mMenuSize / 3;
mPeekScroller.startScroll(0, 0, dx, 0, PEEK_DURATION);
break;
}
}
}
@Override
protected void onSizeChanged(int w, int h, int oldw, int oldh) {
super.onSizeChanged(w, h, oldw, oldh);
onOffsetPixelsChanged((int) mOffsetPixels);
}
@Override
protected void drawOverlay(Canvas canvas) {
final int width = getWidth();
final int height = getHeight();
final int offsetPixels = (int) mOffsetPixels;
final float openRatio = Math.abs(mOffsetPixels) / mMenuSize;
switch (getPosition()) {
case LEFT:
mMenuOverlay.setBounds(0, 0, offsetPixels, height);
break;
case RIGHT:
mMenuOverlay.setBounds(width + offsetPixels, 0, width, height);
break;
case TOP:
mMenuOverlay.setBounds(0, 0, width, offsetPixels);
break;
case BOTTOM:
mMenuOverlay.setBounds(0, height + offsetPixels, width, height);
break;
}
mMenuOverlay
.setAlpha((int) (MAX_MENU_OVERLAY_ALPHA * (1.f - openRatio)));
mMenuOverlay.draw(canvas);
}
@Override
protected void onLayout(boolean changed, int l, int t, int r, int b) {
final int width = r - l;
final int height = b - t;
if (USE_TRANSLATIONS) {
mContentContainer.layout(0, 0, width, height);
} else {
final int offsetPixels = (int) mOffsetPixels;
if (getPosition() == Position.LEFT
|| getPosition() == Position.RIGHT) {
mContentContainer.layout(offsetPixels, 0, width + offsetPixels,
height);
} else {
mContentContainer.layout(0, offsetPixels, width, height
+ offsetPixels);
}
}
switch (getPosition()) {
case LEFT:
mMenuContainer.layout(0, 0, mMenuSize, height);
break;
case RIGHT:
mMenuContainer.layout(width - mMenuSize, 0, width, height);
break;
case TOP:
mMenuContainer.layout(0, 0, width, mMenuSize);
break;
case BOTTOM:
mMenuContainer.layout(0, height - mMenuSize, width, height);
break;
}
}
/**
* Offsets the menu relative to its original position based on the position
* of the content.
*
* @param offsetPixels
* The number of pixels the content if offset.
*/
@SuppressLint("NewApi")
private void offsetMenu(int offsetPixels) {
if (!mOffsetMenu || mMenuSize == 0) {
return;
}
final int width = getWidth();
final int height = getHeight();
final int menuSize = mMenuSize;
final int sign = (int) (mOffsetPixels / Math.abs(mOffsetPixels));
final float openRatio = Math.abs(mOffsetPixels) / menuSize;
final int offset = (int) (-0.25f * ((1.0f - openRatio) * menuSize) * sign);
switch (getPosition()) {
case LEFT: {
if (USE_TRANSLATIONS) {
if (offsetPixels > 0) {
mMenuContainer.setTranslationX(offset);
} else {
mMenuContainer.setTranslationX(-menuSize);
}
} else {
mMenuContainer.offsetLeftAndRight(offset
- mMenuContainer.getLeft());
mMenuContainer.setVisibility(offsetPixels == 0 ? INVISIBLE
: VISIBLE);
}
break;
}
case RIGHT: {
if (USE_TRANSLATIONS) {
if (offsetPixels != 0) {
mMenuContainer.setTranslationX(offset);
} else {
mMenuContainer.setTranslationX(menuSize);
}
} else {
final int oldOffset = mMenuContainer.getRight() - width;
final int offsetBy = offset - oldOffset;
mMenuContainer.offsetLeftAndRight(offsetBy);
mMenuContainer.setVisibility(offsetPixels == 0 ? INVISIBLE
: VISIBLE);
}
break;
}
case TOP: {
if (USE_TRANSLATIONS) {
if (offsetPixels > 0) {
mMenuContainer.setTranslationY(offset);
} else {
mMenuContainer.setTranslationY(-menuSize);
}
} else {
mMenuContainer.offsetTopAndBottom(offset
- mMenuContainer.getTop());
mMenuContainer.setVisibility(offsetPixels == 0 ? INVISIBLE
: VISIBLE);
}
break;
}
case BOTTOM: {
if (USE_TRANSLATIONS) {
if (offsetPixels != 0) {
mMenuContainer.setTranslationY(offset);
} else {
mMenuContainer.setTranslationY(menuSize);
}
} else {
final int oldOffset = mMenuContainer.getBottom() - height;
final int offsetBy = offset - oldOffset;
mMenuContainer.offsetTopAndBottom(offsetBy);
mMenuContainer.setVisibility(offsetPixels == 0 ? INVISIBLE
: VISIBLE);
}
break;
}
}
}
@Override
protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) {
final int widthMode = MeasureSpec.getMode(widthMeasureSpec);
final int heightMode = MeasureSpec.getMode(heightMeasureSpec);
if (widthMode == MeasureSpec.UNSPECIFIED
|| heightMode == MeasureSpec.UNSPECIFIED) {
throw new IllegalStateException("Must measure with an exact size");
}
final int width = MeasureSpec.getSize(widthMeasureSpec);
final int height = MeasureSpec.getSize(heightMeasureSpec);
if (mOffsetPixels == -1)
openMenu(false);
int menuWidthMeasureSpec;
int menuHeightMeasureSpec;
switch (getPosition()) {
case TOP:
case BOTTOM:
menuWidthMeasureSpec = getChildMeasureSpec(widthMeasureSpec, 0,
width);
menuHeightMeasureSpec = getChildMeasureSpec(heightMeasureSpec, 0,
mMenuSize);
break;
default:
// LEFT/RIGHT
menuWidthMeasureSpec = getChildMeasureSpec(widthMeasureSpec, 0,
mMenuSize);
menuHeightMeasureSpec = getChildMeasureSpec(widthMeasureSpec, 0,
height);
}
mMenuContainer.measure(menuWidthMeasureSpec, menuHeightMeasureSpec);
final int contentWidthMeasureSpec = getChildMeasureSpec(
widthMeasureSpec, 0, width);
final int contentHeightMeasureSpec = getChildMeasureSpec(
widthMeasureSpec, 0, height);
mContentContainer.measure(contentWidthMeasureSpec,
contentHeightMeasureSpec);
setMeasuredDimension(width, height);
updateTouchAreaSize();
}
private boolean isContentTouch(int x, int y) {
boolean contentTouch = false;
switch (getPosition()) {
case LEFT:
contentTouch = ViewHelper.getLeft(mContentContainer) < x;
break;
case RIGHT:
contentTouch = ViewHelper.getRight(mContentContainer) > x;
break;
case TOP:
contentTouch = ViewHelper.getTop(mContentContainer) < y;
break;
case BOTTOM:
contentTouch = ViewHelper.getBottom(mContentContainer) > y;
break;
}
return contentTouch;
}
protected boolean onDownAllowDrag(int x, int y) {
switch (getPosition()) {
case LEFT:
return (!mMenuVisible && mInitialMotionX <= mTouchSize)
|| (mMenuVisible && mInitialMotionX >= mOffsetPixels);
case RIGHT:
final int width = getWidth();
final int initialMotionX = (int) mInitialMotionX;
return (!mMenuVisible && initialMotionX >= width - mTouchSize)
|| (mMenuVisible && initialMotionX <= width + mOffsetPixels);
case TOP:
return (!mMenuVisible && mInitialMotionY <= mTouchSize)
|| (mMenuVisible && mInitialMotionY >= mOffsetPixels);
case BOTTOM:
final int height = getHeight();
return (!mMenuVisible && mInitialMotionY >= height - mTouchSize)
|| (mMenuVisible && mInitialMotionY <= height
+ mOffsetPixels);
}
return false;
}
protected boolean onMoveAllowDrag(int x, int y, float dx, float dy) {
switch (getPosition()) {
case LEFT:
return (!mMenuVisible && mInitialMotionX <= mTouchSize && (dx > 0))
|| (mMenuVisible && x >= mOffsetPixels);
case RIGHT:
final int width = getWidth();
return (!mMenuVisible && mInitialMotionX >= width - mTouchSize && (dx < 0))
|| (mMenuVisible && x <= width + mOffsetPixels);
case TOP:
return (!mMenuVisible && mInitialMotionY <= mTouchSize && (dy > 0))
|| (mMenuVisible && y >= mOffsetPixels);
case BOTTOM:
final int height = getHeight();
return (!mMenuVisible && mInitialMotionY >= height - mTouchSize && (dy < 0))
|| (mMenuVisible && y <= height + mOffsetPixels);
}
return false;
}
protected void onMoveEvent(float dx, float dy) {
switch (getPosition()) {
case LEFT:
setOffsetPixels(Math
.min(Math.max(mOffsetPixels + dx, 0), mMenuSize));
break;
case RIGHT:
setOffsetPixels(Math.max(Math.min(mOffsetPixels + dx, 0),
-mMenuSize));
break;
case TOP:
setOffsetPixels(Math
.min(Math.max(mOffsetPixels + dy, 0), mMenuSize));
break;
case BOTTOM:
setOffsetPixels(Math.max(Math.min(mOffsetPixels + dy, 0),
-mMenuSize));
break;
}
}
protected void onUpEvent(int x, int y) {
final int offsetPixels = (int) mOffsetPixels;
switch (getPosition()) {
case LEFT: {
if (mIsDragging) {
mVelocityTracker.computeCurrentVelocity(1000, mMaxVelocity);
final int initialVelocity = (int) getXVelocity(mVelocityTracker);
mLastMotionX = x;
animateOffsetTo(initialVelocity > 0 ? mMenuSize : 0,
initialVelocity, true);
// Close the menu when content is clicked while the menu is
// visible.
} else if (mMenuVisible && x > offsetPixels) {
closeMenu();
}
break;
}
case TOP: {
if (mIsDragging) {
mVelocityTracker.computeCurrentVelocity(1000, mMaxVelocity);
final int initialVelocity = (int) getYVelocity(mVelocityTracker);
mLastMotionY = y;
animateOffsetTo(initialVelocity > 0 ? mMenuSize : 0,
initialVelocity, true);
// Close the menu when content is clicked while the menu is
// visible.
} else if (mMenuVisible && y > offsetPixels) {
closeMenu();
}
break;
}
case RIGHT: {
final int width = getWidth();
if (mIsDragging) {
mVelocityTracker.computeCurrentVelocity(1000, mMaxVelocity);
final int initialVelocity = (int) getXVelocity(mVelocityTracker);
mLastMotionX = x;
animateOffsetTo(initialVelocity > 0 ? 0 : -mMenuSize,
initialVelocity, true);
// Close the menu when content is clicked while the menu is
// visible.
} else if (mMenuVisible && x < width + offsetPixels) {
closeMenu();
}
break;
}
case BOTTOM: {
if (mIsDragging) {
mVelocityTracker.computeCurrentVelocity(1000, mMaxVelocity);
final int initialVelocity = (int) getYVelocity(mVelocityTracker);
mLastMotionY = y;
animateOffsetTo(initialVelocity < 0 ? -mMenuSize : 0,
initialVelocity, true);
// Close the menu when content is clicked while the menu is
// visible.
} else if (mMenuVisible && y < getHeight() + offsetPixels) {
closeMenu();
}
break;
}
}
}
protected boolean checkTouchSlop(float dx, float dy) {
switch (getPosition()) {
case TOP:
case BOTTOM:
return Math.abs(dy) > mTouchSlop && Math.abs(dy) > Math.abs(dx);
default:
return Math.abs(dx) > mTouchSlop && Math.abs(dx) > Math.abs(dy);
}
}
public boolean onInterceptTouchEvent(MotionEvent ev) {
final int action = ev.getAction() & MotionEvent.ACTION_MASK;
if (action == MotionEvent.ACTION_UP
|| action == MotionEvent.ACTION_CANCEL) {
mActivePointerId = INVALID_POINTER;
mIsDragging = false;
if (mVelocityTracker != null) {
mVelocityTracker.recycle();
mVelocityTracker = null;
}
if (Math.abs(mOffsetPixels) > mMenuSize / 2) {
openMenu();
} else {
closeMenu();
}
return false;
}
if (action == MotionEvent.ACTION_DOWN && mMenuVisible
&& isCloseEnough()) {
setOffsetPixels(0);
stopAnimation();
endPeek();
setDrawerState(STATE_CLOSED);
mIsDragging = false;
}
// Always intercept events over the content while menu is visible.
if (mMenuVisible) {
int index = 0;
if (mActivePointerId != INVALID_POINTER) {
index = ev.findPointerIndex(mActivePointerId);
index = index == -1 ? 0 : index;
}
final int x = (int) ev.getX(index);
final int y = (int) ev.getY(index);
if (isContentTouch(x, y)) {
return true;
}
}
if (!mMenuVisible && !mIsDragging && mTouchMode == TOUCH_MODE_NONE) {
return false;
}
if (action != MotionEvent.ACTION_DOWN && mIsDragging) {
return true;
}
switch (action) {
case MotionEvent.ACTION_DOWN: {
mLastMotionX = mInitialMotionX = ev.getX();
mLastMotionY = mInitialMotionY = ev.getY();
final boolean allowDrag = onDownAllowDrag((int) mLastMotionX,
(int) mLastMotionY);
mActivePointerId = ev.getPointerId(0);
if (allowDrag) {
setDrawerState(mMenuVisible ? STATE_OPEN : STATE_CLOSED);
stopAnimation();
endPeek();
mIsDragging = false;
}
break;
}
case MotionEvent.ACTION_MOVE: {
final int activePointerId = mActivePointerId;
if (activePointerId == INVALID_POINTER) {
// If we don't have a valid id, the touch down wasn't on
// content.
break;
}
final int pointerIndex = ev.findPointerIndex(activePointerId);
if (pointerIndex == -1) {
mIsDragging = false;
mActivePointerId = INVALID_POINTER;
endDrag();
closeMenu(true);
return false;
}
final float x = ev.getX(pointerIndex);
final float dx = x - mLastMotionX;
final float y = ev.getY(pointerIndex);
final float dy = y - mLastMotionY;
if (checkTouchSlop(dx, dy)) {
if (mOnInterceptMoveEventListener != null
&& (mTouchMode == TOUCH_MODE_FULLSCREEN || mMenuVisible)
&& canChildrenScroll((int) dx, (int) dy, (int) x,
(int) y)) {
endDrag(); // Release the velocity tracker
requestDisallowInterceptTouchEvent(true);
return false;
}
final boolean allowDrag = onMoveAllowDrag((int) x, (int) y, dx,
dy);
if (allowDrag) {
setDrawerState(STATE_DRAGGING);
mIsDragging = true;
mLastMotionX = x;
mLastMotionY = y;
}
}
break;
}
case MotionEvent.ACTION_POINTER_UP:
onPointerUp(ev);
mLastMotionX = ev.getX(ev.findPointerIndex(mActivePointerId));
mLastMotionY = ev.getY(ev.findPointerIndex(mActivePointerId));
break;
}
if (mVelocityTracker == null)
mVelocityTracker = VelocityTracker.obtain();
mVelocityTracker.addMovement(ev);
return mIsDragging;
}
@Override
public boolean onTouchEvent(MotionEvent ev) {
if (!mMenuVisible && !mIsDragging && mTouchMode == TOUCH_MODE_NONE) {
return false;
}
final int action = ev.getAction() & MotionEvent.ACTION_MASK;
if (mVelocityTracker == null)
mVelocityTracker = VelocityTracker.obtain();
mVelocityTracker.addMovement(ev);
switch (action) {
case MotionEvent.ACTION_DOWN: {
mLastMotionX = mInitialMotionX = ev.getX();
mLastMotionY = mInitialMotionY = ev.getY();
final boolean allowDrag = onDownAllowDrag((int) mLastMotionX,
(int) mLastMotionY);
mActivePointerId = ev.getPointerId(0);
if (allowDrag) {
stopAnimation();
endPeek();
startLayerTranslation();
}
break;
}
case MotionEvent.ACTION_MOVE: {
final int pointerIndex = ev.findPointerIndex(mActivePointerId);
if (pointerIndex == -1) {
mIsDragging = false;
mActivePointerId = INVALID_POINTER;
endDrag();
closeMenu(true);
return false;
}
if (!mIsDragging) {
final float x = ev.getX(pointerIndex);
final float dx = x - mLastMotionX;
final float y = ev.getY(pointerIndex);
final float dy = y - mLastMotionY;
if (checkTouchSlop(dx, dy)) {
final boolean allowDrag = onMoveAllowDrag((int) x, (int) y,
dx, dy);
if (allowDrag) {
setDrawerState(STATE_DRAGGING);
mIsDragging = true;
mLastMotionX = x;
mLastMotionY = y;
} else {
mInitialMotionX = x;
mInitialMotionY = y;
}
}
}
if (mIsDragging) {
startLayerTranslation();
final float x = ev.getX(pointerIndex);
final float dx = x - mLastMotionX;
final float y = ev.getY(pointerIndex);
final float dy = y - mLastMotionY;
mLastMotionX = x;
mLastMotionY = y;
onMoveEvent(dx, dy);
}
break;
}
case MotionEvent.ACTION_CANCEL:
case MotionEvent.ACTION_UP: {
int index = ev.findPointerIndex(mActivePointerId);
index = index == -1 ? 0 : index;
final int x = (int) ev.getX(index);
final int y = (int) ev.getY(index);
onUpEvent(x, y);
mActivePointerId = INVALID_POINTER;
mIsDragging = false;
break;
}
case MotionEvent.ACTION_POINTER_DOWN:
final int index = (ev.getAction() & MotionEvent.ACTION_POINTER_INDEX_MASK) >> MotionEvent.ACTION_POINTER_INDEX_SHIFT;
mLastMotionX = ev.getX(index);
mLastMotionY = ev.getY(index);
mActivePointerId = ev.getPointerId(index);
break;
case MotionEvent.ACTION_POINTER_UP:
onPointerUp(ev);
mLastMotionX = ev.getX(ev.findPointerIndex(mActivePointerId));
mLastMotionY = ev.getY(ev.findPointerIndex(mActivePointerId));
break;
}
return true;
}
private void onPointerUp(MotionEvent ev) {
final int pointerIndex = ev.getActionIndex();
final int pointerId = ev.getPointerId(pointerIndex);
if (pointerId == mActivePointerId) {
final int newPointerIndex = pointerIndex == 0 ? 1 : 0;
mLastMotionX = ev.getX(newPointerIndex);
mActivePointerId = ev.getPointerId(newPointerIndex);
if (mVelocityTracker != null) {
mVelocityTracker.clear();
}
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.nifi.dbcp.hive;
import org.apache.commons.dbcp2.BasicDataSource;
import org.apache.commons.pool2.impl.GenericObjectPoolConfig;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hive.jdbc.HiveDriver;
import org.apache.nifi.annotation.behavior.RequiresInstanceClassLoading;
import org.apache.nifi.annotation.documentation.CapabilityDescription;
import org.apache.nifi.annotation.documentation.Tags;
import org.apache.nifi.annotation.lifecycle.OnDisabled;
import org.apache.nifi.annotation.lifecycle.OnEnabled;
import org.apache.nifi.components.PropertyDescriptor;
import org.apache.nifi.components.PropertyValue;
import org.apache.nifi.components.ValidationContext;
import org.apache.nifi.components.ValidationResult;
import org.apache.nifi.components.resource.ResourceCardinality;
import org.apache.nifi.components.resource.ResourceType;
import org.apache.nifi.controller.AbstractControllerService;
import org.apache.nifi.controller.ConfigurationContext;
import org.apache.nifi.controller.ControllerServiceInitializationContext;
import org.apache.nifi.dbcp.DBCPValidator;
import org.apache.nifi.expression.ExpressionLanguageScope;
import org.apache.nifi.hadoop.KerberosProperties;
import org.apache.nifi.hadoop.SecurityUtil;
import org.apache.nifi.kerberos.KerberosCredentialsService;
import org.apache.nifi.logging.ComponentLog;
import org.apache.nifi.processor.exception.ProcessException;
import org.apache.nifi.processor.util.StandardValidators;
import org.apache.nifi.reporting.InitializationException;
import org.apache.nifi.security.krb.KerberosKeytabUser;
import org.apache.nifi.security.krb.KerberosLoginException;
import org.apache.nifi.security.krb.KerberosPasswordUser;
import org.apache.nifi.security.krb.KerberosUser;
import org.apache.nifi.util.hive.AuthenticationFailedException;
import org.apache.nifi.util.hive.HiveConfigurator;
import org.apache.nifi.util.hive.ValidationResources;
import java.io.File;
import java.io.IOException;
import java.lang.reflect.UndeclaredThrowableException;
import java.security.PrivilegedExceptionAction;
import java.sql.Connection;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import java.util.Map;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicReference;
/**
* Implementation for Database Connection Pooling Service used for Apache Hive
* connections. Apache DBCP is used for connection pooling functionality.
*/
@RequiresInstanceClassLoading
@Tags({"hive", "dbcp", "jdbc", "database", "connection", "pooling", "store"})
@CapabilityDescription("Provides Database Connection Pooling Service for Apache Hive 3.x. Connections can be asked from pool and returned after usage.")
public class Hive3ConnectionPool extends AbstractControllerService implements Hive3DBCPService {
private static final String ALLOW_EXPLICIT_KEYTAB = "NIFI_ALLOW_EXPLICIT_KEYTAB";
/**
* Copied from {@link GenericObjectPoolConfig.DEFAULT_MIN_IDLE} in Commons-DBCP 2.6.0
*/
private static final String DEFAULT_MIN_IDLE = "0";
/**
* Copied from {@link GenericObjectPoolConfig.DEFAULT_MAX_IDLE} in Commons-DBCP 2.6.0
*/
private static final String DEFAULT_MAX_IDLE = "8";
/**
* Copied from private variable {@link BasicDataSource.maxConnLifetimeMillis} in Commons-DBCP 2.6.0
*/
private static final String DEFAULT_MAX_CONN_LIFETIME = "-1";
/**
* Copied from {@link GenericObjectPoolConfig.DEFAULT_TIME_BETWEEN_EVICTION_RUNS_MILLIS} in Commons-DBCP 2.6.0
*/
private static final String DEFAULT_EVICTION_RUN_PERIOD = String.valueOf(-1L);
/**
* Copied from {@link GenericObjectPoolConfig.DEFAULT_MIN_EVICTABLE_IDLE_TIME_MILLIS} in Commons-DBCP 2.6.0
* and converted from 1800000L to "1800000 millis" to "30 mins"
*/
private static final String DEFAULT_MIN_EVICTABLE_IDLE_TIME = "30 mins";
/**
* Copied from {@link GenericObjectPoolConfig.DEFAULT_SOFT_MIN_EVICTABLE_IDLE_TIME_MILLIS} in Commons-DBCP 2.6.0
*/
private static final String DEFAULT_SOFT_MIN_EVICTABLE_IDLE_TIME = String.valueOf(-1L);
static final PropertyDescriptor DATABASE_URL = new PropertyDescriptor.Builder()
.name("hive-db-connect-url")
.displayName("Database Connection URL")
.description("A database connection URL used to connect to a database. May contain database system name, host, port, database name and some parameters."
+ " The exact syntax of a database connection URL is specified by the Hive documentation. For example, the server principal is often included "
+ "as a connection parameter when connecting to a secure Hive server.")
.defaultValue(null)
.addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
.required(true)
.expressionLanguageSupported(ExpressionLanguageScope.VARIABLE_REGISTRY)
.build();
static final PropertyDescriptor HIVE_CONFIGURATION_RESOURCES = new PropertyDescriptor.Builder()
.name("hive-config-resources")
.displayName("Hive Configuration Resources")
.description("A file or comma separated list of files which contains the Hive configuration (hive-site.xml, e.g.). Without this, Hadoop "
+ "will search the classpath for a 'hive-site.xml' file or will revert to a default configuration. Note that to enable authentication "
+ "with Kerberos e.g., the appropriate properties must be set in the configuration files. Please see the Hive documentation for more details.")
.required(false)
.identifiesExternalResource(ResourceCardinality.MULTIPLE, ResourceType.FILE)
.expressionLanguageSupported(ExpressionLanguageScope.VARIABLE_REGISTRY)
.build();
static final PropertyDescriptor DB_USER = new PropertyDescriptor.Builder()
.name("hive-db-user")
.displayName("Database User")
.description("Database user name")
.defaultValue(null)
.addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
.expressionLanguageSupported(ExpressionLanguageScope.VARIABLE_REGISTRY)
.build();
static final PropertyDescriptor DB_PASSWORD = new PropertyDescriptor.Builder()
.name("hive-db-password")
.displayName("Password")
.description("The password for the database user")
.defaultValue(null)
.required(false)
.sensitive(true)
.addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
.expressionLanguageSupported(ExpressionLanguageScope.VARIABLE_REGISTRY)
.build();
static final PropertyDescriptor MAX_WAIT_TIME = new PropertyDescriptor.Builder()
.name("hive-max-wait-time")
.displayName("Max Wait Time")
.description("The maximum amount of time that the pool will wait (when there are no available connections) "
+ " for a connection to be returned before failing, or -1 to wait indefinitely. ")
.defaultValue("500 millis")
.required(true)
.addValidator(StandardValidators.TIME_PERIOD_VALIDATOR)
.expressionLanguageSupported(ExpressionLanguageScope.VARIABLE_REGISTRY)
.build();
static final PropertyDescriptor MAX_TOTAL_CONNECTIONS = new PropertyDescriptor.Builder()
.name("hive-max-total-connections")
.displayName("Max Total Connections")
.description("The maximum number of active connections that can be allocated from this pool at the same time, "
+ "or negative for no limit.")
.defaultValue("8")
.required(true)
.addValidator(StandardValidators.INTEGER_VALIDATOR)
.expressionLanguageSupported(ExpressionLanguageScope.VARIABLE_REGISTRY)
.build();
static final PropertyDescriptor VALIDATION_QUERY = new PropertyDescriptor.Builder()
.name("Validation-query")
.displayName("Validation query")
.description("Validation query used to validate connections before returning them. "
+ "When a borrowed connection is invalid, it gets dropped and a new valid connection will be returned. "
+ "NOTE: Using validation may have a performance penalty.")
.required(false)
.addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
.expressionLanguageSupported(ExpressionLanguageScope.VARIABLE_REGISTRY)
.build();
public static final PropertyDescriptor MIN_IDLE = new PropertyDescriptor.Builder()
.displayName("Minimum Idle Connections")
.name("dbcp-min-idle-conns")
.description("The minimum number of connections that can remain idle in the pool, without extra ones being " +
"created, or zero to create none.")
.defaultValue(DEFAULT_MIN_IDLE)
.required(false)
.addValidator(StandardValidators.NON_NEGATIVE_INTEGER_VALIDATOR)
.expressionLanguageSupported(ExpressionLanguageScope.VARIABLE_REGISTRY)
.build();
public static final PropertyDescriptor MAX_IDLE = new PropertyDescriptor.Builder()
.displayName("Max Idle Connections")
.name("dbcp-max-idle-conns")
.description("The maximum number of connections that can remain idle in the pool, without extra ones being " +
"released, or negative for no limit.")
.defaultValue(DEFAULT_MAX_IDLE)
.required(false)
.addValidator(StandardValidators.INTEGER_VALIDATOR)
.expressionLanguageSupported(ExpressionLanguageScope.VARIABLE_REGISTRY)
.build();
public static final PropertyDescriptor MAX_CONN_LIFETIME = new PropertyDescriptor.Builder()
.displayName("Max Connection Lifetime")
.name("dbcp-max-conn-lifetime")
.description("The maximum lifetime in milliseconds of a connection. After this time is exceeded the " +
"connection will fail the next activation, passivation or validation test. A value of zero or less " +
"means the connection has an infinite lifetime.")
.defaultValue(DEFAULT_MAX_CONN_LIFETIME)
.required(false)
.addValidator(DBCPValidator.CUSTOM_TIME_PERIOD_VALIDATOR)
.expressionLanguageSupported(ExpressionLanguageScope.VARIABLE_REGISTRY)
.build();
public static final PropertyDescriptor EVICTION_RUN_PERIOD = new PropertyDescriptor.Builder()
.displayName("Time Between Eviction Runs")
.name("dbcp-time-between-eviction-runs")
.description("The number of milliseconds to sleep between runs of the idle connection evictor thread. When " +
"non-positive, no idle connection evictor thread will be run.")
.defaultValue(DEFAULT_EVICTION_RUN_PERIOD)
.required(false)
.addValidator(DBCPValidator.CUSTOM_TIME_PERIOD_VALIDATOR)
.expressionLanguageSupported(ExpressionLanguageScope.VARIABLE_REGISTRY)
.build();
public static final PropertyDescriptor MIN_EVICTABLE_IDLE_TIME = new PropertyDescriptor.Builder()
.displayName("Minimum Evictable Idle Time")
.name("dbcp-min-evictable-idle-time")
.description("The minimum amount of time a connection may sit idle in the pool before it is eligible for eviction.")
.defaultValue(DEFAULT_MIN_EVICTABLE_IDLE_TIME)
.required(false)
.addValidator(DBCPValidator.CUSTOM_TIME_PERIOD_VALIDATOR)
.expressionLanguageSupported(ExpressionLanguageScope.VARIABLE_REGISTRY)
.build();
public static final PropertyDescriptor SOFT_MIN_EVICTABLE_IDLE_TIME = new PropertyDescriptor.Builder()
.displayName("Soft Minimum Evictable Idle Time")
.name("dbcp-soft-min-evictable-idle-time")
.description("The minimum amount of time a connection may sit idle in the pool before it is eligible for " +
"eviction by the idle connection evictor, with the extra condition that at least a minimum number of" +
" idle connections remain in the pool. When the not-soft version of this option is set to a positive" +
" value, it is examined first by the idle connection evictor: when idle connections are visited by " +
"the evictor, idle time is first compared against it (without considering the number of idle " +
"connections in the pool) and then against this soft option, including the minimum idle connections " +
"constraint.")
.defaultValue(DEFAULT_SOFT_MIN_EVICTABLE_IDLE_TIME)
.required(false)
.addValidator(DBCPValidator.CUSTOM_TIME_PERIOD_VALIDATOR)
.expressionLanguageSupported(ExpressionLanguageScope.VARIABLE_REGISTRY)
.build();
private static final PropertyDescriptor KERBEROS_CREDENTIALS_SERVICE = new PropertyDescriptor.Builder()
.name("kerberos-credentials-service")
.displayName("Kerberos Credentials Service")
.description("Specifies the Kerberos Credentials Controller Service that should be used for authenticating with Kerberos")
.identifiesControllerService(KerberosCredentialsService.class)
.required(false)
.build();
private List<PropertyDescriptor> properties;
private String connectionUrl = "unknown";
// Holder of cached Configuration information so validation does not reload the same config over and over
private final AtomicReference<ValidationResources> validationResourceHolder = new AtomicReference<>();
private volatile BasicDataSource dataSource;
private volatile HiveConfigurator hiveConfigurator = new HiveConfigurator();
private volatile UserGroupInformation ugi;
private final AtomicReference<KerberosUser> kerberosUserReference = new AtomicReference<>();
private volatile File kerberosConfigFile = null;
private volatile KerberosProperties kerberosProperties;
@Override
protected void init(final ControllerServiceInitializationContext context) {
List<PropertyDescriptor> props = new ArrayList<>();
props.add(DATABASE_URL);
props.add(HIVE_CONFIGURATION_RESOURCES);
props.add(DB_USER);
props.add(DB_PASSWORD);
props.add(MAX_WAIT_TIME);
props.add(MAX_TOTAL_CONNECTIONS);
props.add(VALIDATION_QUERY);
props.add(MIN_IDLE);
props.add(MAX_IDLE);
props.add(MAX_CONN_LIFETIME);
props.add(EVICTION_RUN_PERIOD);
props.add(MIN_EVICTABLE_IDLE_TIME);
props.add(SOFT_MIN_EVICTABLE_IDLE_TIME);
props.add(KERBEROS_CREDENTIALS_SERVICE);
kerberosConfigFile = context.getKerberosConfigurationFile();
kerberosProperties = new KerberosProperties(kerberosConfigFile);
props.add(kerberosProperties.getKerberosPrincipal());
props.add(kerberosProperties.getKerberosKeytab());
props.add(kerberosProperties.getKerberosPassword());
properties = props;
}
@Override
protected List<PropertyDescriptor> getSupportedPropertyDescriptors() {
return properties;
}
@Override
protected Collection<ValidationResult> customValidate(ValidationContext validationContext) {
boolean confFileProvided = validationContext.getProperty(HIVE_CONFIGURATION_RESOURCES).isSet();
final List<ValidationResult> problems = new ArrayList<>();
if (confFileProvided) {
final String explicitPrincipal = validationContext.getProperty(kerberosProperties.getKerberosPrincipal()).evaluateAttributeExpressions().getValue();
final String explicitKeytab = validationContext.getProperty(kerberosProperties.getKerberosKeytab()).evaluateAttributeExpressions().getValue();
final String explicitPassword = validationContext.getProperty(kerberosProperties.getKerberosPassword()).getValue();
final KerberosCredentialsService credentialsService = validationContext.getProperty(KERBEROS_CREDENTIALS_SERVICE).asControllerService(KerberosCredentialsService.class);
final String resolvedPrincipal;
final String resolvedKeytab;
if (credentialsService != null) {
resolvedPrincipal = credentialsService.getPrincipal();
resolvedKeytab = credentialsService.getKeytab();
} else {
resolvedPrincipal = explicitPrincipal;
resolvedKeytab = explicitKeytab;
}
final String configFiles = validationContext.getProperty(HIVE_CONFIGURATION_RESOURCES).evaluateAttributeExpressions().getValue();
problems.addAll(hiveConfigurator.validate(configFiles, resolvedPrincipal, resolvedKeytab, explicitPassword, validationResourceHolder, getLogger()));
if (credentialsService != null && (explicitPrincipal != null || explicitKeytab != null || explicitPassword != null)) {
problems.add(new ValidationResult.Builder()
.subject("Kerberos Credentials")
.valid(false)
.explanation("Cannot specify a Kerberos Credentials Service while also specifying a Kerberos Principal, Kerberos Keytab, or Kerberos Password")
.build());
}
if (!isAllowExplicitKeytab() && explicitKeytab != null) {
problems.add(new ValidationResult.Builder()
.subject("Kerberos Credentials")
.valid(false)
.explanation("The '" + ALLOW_EXPLICIT_KEYTAB + "' system environment variable is configured to forbid explicitly configuring Kerberos Keytab in processors. "
+ "The Kerberos Credentials Service should be used instead of setting the Kerberos Keytab or Kerberos Principal property.")
.build());
}
}
return problems;
}
/**
* Configures connection pool by creating an instance of the
* {@link BasicDataSource} based on configuration provided with
* {@link ConfigurationContext}.
* <p>
* This operation makes no guarantees that the actual connection could be
* made since the underlying system may still go off-line during normal
* operation of the connection pool.
* <p/>
* As of Apache NiFi 1.5.0, due to changes made to
* {@link SecurityUtil#loginKerberos(Configuration, String, String)}, which is used by this class invoking
* {@link HiveConfigurator#authenticate(Configuration, String, String)}
* to authenticate a principal with Kerberos, Hive controller services no longer use a separate thread to
* relogin, and instead call {@link UserGroupInformation#checkTGTAndReloginFromKeytab()} from
* {@link Hive3ConnectionPool#getConnection()}. The relogin request is performed in a synchronized block to prevent
* threads from requesting concurrent relogins. For more information, please read the documentation for
* {@link SecurityUtil#loginKerberos(Configuration, String, String)}.
* <p/>
* In previous versions of NiFi, a {@link org.apache.nifi.hadoop.KerberosTicketRenewer} was started by
* {@link HiveConfigurator#authenticate(Configuration, String, String, long)} when the Hive
* controller service was enabled. The use of a separate thread to explicitly relogin could cause race conditions
* with the implicit relogin attempts made by hadoop/Hive code on a thread that references the same
* {@link UserGroupInformation} instance. One of these threads could leave the
* {@link javax.security.auth.Subject} in {@link UserGroupInformation} to be cleared or in an unexpected state
* while the other thread is attempting to use the {@link javax.security.auth.Subject}, resulting in failed
* authentication attempts that would leave the Hive controller service in an unrecoverable state.
*
* @see SecurityUtil#loginKerberos(Configuration, String, String)
* @see HiveConfigurator#authenticate(Configuration, String, String)
* @see HiveConfigurator#authenticate(Configuration, String, String, long)
* @param context the configuration context
* @throws InitializationException if unable to create a database connection
*/
@OnEnabled
public void onConfigured(final ConfigurationContext context) throws InitializationException {
ComponentLog log = getLogger();
final String configFiles = context.getProperty(HIVE_CONFIGURATION_RESOURCES).evaluateAttributeExpressions().getValue();
final Configuration hiveConfig = hiveConfigurator.getConfigurationFromFiles(configFiles);
final String validationQuery = context.getProperty(VALIDATION_QUERY).evaluateAttributeExpressions().getValue();
// add any dynamic properties to the Hive configuration
for (final Map.Entry<PropertyDescriptor, String> entry : context.getProperties().entrySet()) {
final PropertyDescriptor descriptor = entry.getKey();
if (descriptor.isDynamic()) {
hiveConfig.set(descriptor.getName(), context.getProperty(descriptor).evaluateAttributeExpressions().getValue());
}
}
final String drv = HiveDriver.class.getName();
if (SecurityUtil.isSecurityEnabled(hiveConfig)) {
final String explicitPrincipal = context.getProperty(kerberosProperties.getKerberosPrincipal()).evaluateAttributeExpressions().getValue();
final String explicitKeytab = context.getProperty(kerberosProperties.getKerberosKeytab()).evaluateAttributeExpressions().getValue();
final String explicitPassword = context.getProperty(kerberosProperties.getKerberosPassword()).getValue();
final KerberosCredentialsService credentialsService = context.getProperty(KERBEROS_CREDENTIALS_SERVICE).asControllerService(KerberosCredentialsService.class);
final String resolvedPrincipal;
final String resolvedKeytab;
if (credentialsService != null) {
resolvedPrincipal = credentialsService.getPrincipal();
resolvedKeytab = credentialsService.getKeytab();
} else {
resolvedPrincipal = explicitPrincipal;
resolvedKeytab = explicitKeytab;
}
if (resolvedKeytab != null) {
kerberosUserReference.set(new KerberosKeytabUser(resolvedPrincipal, resolvedKeytab));
log.info("Hive Security Enabled, logging in as principal {} with keytab {}", new Object[] {resolvedPrincipal, resolvedKeytab});
} else if (explicitPassword != null) {
kerberosUserReference.set(new KerberosPasswordUser(resolvedPrincipal, explicitPassword));
log.info("Hive Security Enabled, logging in as principal {} with password", new Object[] {resolvedPrincipal});
} else {
throw new InitializationException("Unable to authenticate with Kerberos, no keytab or password was provided");
}
try {
ugi = hiveConfigurator.authenticate(hiveConfig, kerberosUserReference.get());
} catch (AuthenticationFailedException ae) {
log.error(ae.getMessage(), ae);
throw new InitializationException(ae);
}
getLogger().info("Successfully logged in as principal " + resolvedPrincipal);
}
final String user = context.getProperty(DB_USER).evaluateAttributeExpressions().getValue();
final String passw = context.getProperty(DB_PASSWORD).evaluateAttributeExpressions().getValue();
final Long maxWaitMillis = context.getProperty(MAX_WAIT_TIME).evaluateAttributeExpressions().asTimePeriod(TimeUnit.MILLISECONDS);
final Integer maxTotal = context.getProperty(MAX_TOTAL_CONNECTIONS).evaluateAttributeExpressions().asInteger();
final Integer minIdle = context.getProperty(MIN_IDLE).evaluateAttributeExpressions().asInteger();
final Integer maxIdle = context.getProperty(MAX_IDLE).evaluateAttributeExpressions().asInteger();
final Long maxConnLifetimeMillis = extractMillisWithInfinite(context.getProperty(MAX_CONN_LIFETIME).evaluateAttributeExpressions());
final Long timeBetweenEvictionRunsMillis = extractMillisWithInfinite(context.getProperty(EVICTION_RUN_PERIOD).evaluateAttributeExpressions());
final Long minEvictableIdleTimeMillis = extractMillisWithInfinite(context.getProperty(MIN_EVICTABLE_IDLE_TIME).evaluateAttributeExpressions());
final Long softMinEvictableIdleTimeMillis = extractMillisWithInfinite(context.getProperty(SOFT_MIN_EVICTABLE_IDLE_TIME).evaluateAttributeExpressions());
dataSource = new BasicDataSource();
dataSource.setDriverClassName(drv);
connectionUrl = context.getProperty(DATABASE_URL).evaluateAttributeExpressions().getValue();
if (validationQuery != null && !validationQuery.isEmpty()) {
dataSource.setValidationQuery(validationQuery);
dataSource.setTestOnBorrow(true);
}
dataSource.setUrl(connectionUrl);
dataSource.setUsername(user);
dataSource.setPassword(passw);
dataSource.setMaxWaitMillis(maxWaitMillis);
dataSource.setMaxTotal(maxTotal);
dataSource.setMinIdle(minIdle);
dataSource.setMaxIdle(maxIdle);
dataSource.setMaxConnLifetimeMillis(maxConnLifetimeMillis);
dataSource.setTimeBetweenEvictionRunsMillis(timeBetweenEvictionRunsMillis);
dataSource.setMinEvictableIdleTimeMillis(minEvictableIdleTimeMillis);
dataSource.setSoftMinEvictableIdleTimeMillis(softMinEvictableIdleTimeMillis);
}
private Long extractMillisWithInfinite(PropertyValue prop) {
return "-1".equals(prop.getValue()) ? -1 : prop.asTimePeriod(TimeUnit.MILLISECONDS);
}
/**
* Shutdown pool, close all open connections.
*/
@OnDisabled
public void shutdown() {
try {
dataSource.close();
} catch (final SQLException e) {
throw new ProcessException(e);
}
}
@Override
public Connection getConnection() throws ProcessException {
try {
if (ugi != null) {
/*
* Explicitly check the TGT and relogin if necessary with the KerberosUser instance. No synchronization
* is necessary in the client code, since AbstractKerberosUser's checkTGTAndRelogin method is synchronized.
*/
getLogger().trace("getting UGI instance");
if (kerberosUserReference.get() != null) {
// if there's a KerberosUser associated with this UGI, check the TGT and relogin if it is close to expiring
KerberosUser kerberosUser = kerberosUserReference.get();
getLogger().debug("kerberosUser is " + kerberosUser);
try {
getLogger().debug("checking TGT on kerberosUser " + kerberosUser);
kerberosUser.checkTGTAndRelogin();
} catch (final KerberosLoginException e) {
throw new ProcessException("Unable to relogin with kerberos credentials for " + kerberosUser.getPrincipal(), e);
}
} else {
getLogger().debug("kerberosUser was null, will not refresh TGT with KerberosUser");
// no synchronization is needed for UserGroupInformation.checkTGTAndReloginFromKeytab; UGI handles the synchronization internally
ugi.checkTGTAndReloginFromKeytab();
}
try {
return ugi.doAs((PrivilegedExceptionAction<Connection>) () -> dataSource.getConnection());
} catch (UndeclaredThrowableException e) {
Throwable cause = e.getCause();
if (cause instanceof SQLException) {
throw (SQLException) cause;
} else {
throw e;
}
}
} else {
getLogger().info("Simple Authentication");
return dataSource.getConnection();
}
} catch (SQLException | IOException | InterruptedException e) {
getLogger().error("Error getting Hive connection", e);
throw new ProcessException(e);
}
}
@Override
public String toString() {
return "Hive3ConnectionPool[id=" + getIdentifier() + "]";
}
@Override
public String getConnectionURL() {
return connectionUrl;
}
/*
* Overridable by subclasses in the same package, mainly intended for testing purposes to allow verification without having to set environment variables.
*/
boolean isAllowExplicitKeytab() {
return Boolean.parseBoolean(System.getenv(ALLOW_EXPLICIT_KEYTAB));
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.impl;
import java.util.Iterator;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.CopyOnWriteArrayList;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.ThreadFactory;
import java.util.concurrent.ThreadPoolExecutor;
import java.util.concurrent.TimeUnit;
import org.apache.camel.CamelContext;
import org.apache.camel.NamedNode;
import org.apache.camel.StaticService;
import org.apache.camel.ThreadPoolRejectedPolicy;
import org.apache.camel.model.OptionalIdentifiedDefinition;
import org.apache.camel.model.ProcessorDefinition;
import org.apache.camel.model.ProcessorDefinitionHelper;
import org.apache.camel.model.RouteDefinition;
import org.apache.camel.spi.ExecutorServiceManager;
import org.apache.camel.spi.LifecycleStrategy;
import org.apache.camel.spi.ThreadPoolFactory;
import org.apache.camel.spi.ThreadPoolProfile;
import org.apache.camel.support.ServiceSupport;
import org.apache.camel.util.ObjectHelper;
import org.apache.camel.util.StopWatch;
import org.apache.camel.util.TimeUtils;
import org.apache.camel.util.URISupport;
import org.apache.camel.util.concurrent.CamelThreadFactory;
import org.apache.camel.util.concurrent.SizedScheduledExecutorService;
import org.apache.camel.util.concurrent.ThreadHelper;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Default {@link org.apache.camel.spi.ExecutorServiceManager}.
*
*/
public class DefaultExecutorServiceManager extends ServiceSupport implements ExecutorServiceManager {
private static final Logger LOG = LoggerFactory.getLogger(DefaultExecutorServiceManager.class);
private final CamelContext camelContext;
private ThreadPoolFactory threadPoolFactory = new DefaultThreadPoolFactory();
private final List<ExecutorService> executorServices = new CopyOnWriteArrayList<>();
private String threadNamePattern;
private long shutdownAwaitTermination = 10000;
private String defaultThreadPoolProfileId = "defaultThreadPoolProfile";
private final Map<String, ThreadPoolProfile> threadPoolProfiles = new ConcurrentHashMap<>();
private ThreadPoolProfile defaultProfile;
public DefaultExecutorServiceManager(CamelContext camelContext) {
this.camelContext = camelContext;
defaultProfile = new ThreadPoolProfile(defaultThreadPoolProfileId);
defaultProfile.setDefaultProfile(true);
defaultProfile.setPoolSize(10);
defaultProfile.setMaxPoolSize(20);
defaultProfile.setKeepAliveTime(60L);
defaultProfile.setTimeUnit(TimeUnit.SECONDS);
defaultProfile.setMaxQueueSize(1000);
defaultProfile.setAllowCoreThreadTimeOut(false);
defaultProfile.setRejectedPolicy(ThreadPoolRejectedPolicy.CallerRuns);
registerThreadPoolProfile(defaultProfile);
}
@Override
public ThreadPoolFactory getThreadPoolFactory() {
return threadPoolFactory;
}
@Override
public void setThreadPoolFactory(ThreadPoolFactory threadPoolFactory) {
this.threadPoolFactory = threadPoolFactory;
}
@Override
public void registerThreadPoolProfile(ThreadPoolProfile profile) {
ObjectHelper.notNull(profile, "profile");
ObjectHelper.notEmpty(profile.getId(), "id", profile);
threadPoolProfiles.put(profile.getId(), profile);
}
@Override
public ThreadPoolProfile getThreadPoolProfile(String id) {
return threadPoolProfiles.get(id);
}
@Override
public ThreadPoolProfile getDefaultThreadPoolProfile() {
return getThreadPoolProfile(defaultThreadPoolProfileId);
}
@Override
public void setDefaultThreadPoolProfile(ThreadPoolProfile defaultThreadPoolProfile) {
threadPoolProfiles.remove(defaultThreadPoolProfileId);
defaultThreadPoolProfile.addDefaults(defaultProfile);
LOG.info("Using custom DefaultThreadPoolProfile: {}", defaultThreadPoolProfile);
this.defaultThreadPoolProfileId = defaultThreadPoolProfile.getId();
defaultThreadPoolProfile.setDefaultProfile(true);
registerThreadPoolProfile(defaultThreadPoolProfile);
}
@Override
public String getThreadNamePattern() {
return threadNamePattern;
}
@Override
public void setThreadNamePattern(String threadNamePattern) {
// must set camel id here in the pattern and let the other placeholders be resolved on demand
this.threadNamePattern = threadNamePattern.replaceFirst("#camelId#", this.camelContext.getName());
}
@Override
public long getShutdownAwaitTermination() {
return shutdownAwaitTermination;
}
@Override
public void setShutdownAwaitTermination(long shutdownAwaitTermination) {
this.shutdownAwaitTermination = shutdownAwaitTermination;
}
@Override
public String resolveThreadName(String name) {
return ThreadHelper.resolveThreadName(threadNamePattern, name);
}
@Override
public Thread newThread(String name, Runnable runnable) {
ThreadFactory factory = createThreadFactory(name, true);
return factory.newThread(runnable);
}
@Override
public ExecutorService newDefaultThreadPool(Object source, String name) {
return newThreadPool(source, name, getDefaultThreadPoolProfile());
}
@Override
public ScheduledExecutorService newDefaultScheduledThreadPool(Object source, String name) {
return newScheduledThreadPool(source, name, getDefaultThreadPoolProfile());
}
@Override
public ExecutorService newThreadPool(Object source, String name, String profileId) {
ThreadPoolProfile profile = getThreadPoolProfile(profileId);
if (profile != null) {
return newThreadPool(source, name, profile);
} else {
// no profile with that id
return null;
}
}
@Override
public ExecutorService newThreadPool(Object source, String name, ThreadPoolProfile profile) {
String sanitizedName = URISupport.sanitizeUri(name);
ObjectHelper.notNull(profile, "ThreadPoolProfile");
ThreadPoolProfile defaultProfile = getDefaultThreadPoolProfile();
profile.addDefaults(defaultProfile);
ThreadFactory threadFactory = createThreadFactory(sanitizedName, true);
ExecutorService executorService = threadPoolFactory.newThreadPool(profile, threadFactory);
onThreadPoolCreated(executorService, source, profile.getId());
if (LOG.isDebugEnabled()) {
LOG.debug("Created new ThreadPool for source: {} with name: {}. -> {}", source, sanitizedName, executorService);
}
return executorService;
}
@Override
public ExecutorService newThreadPool(Object source, String name, int poolSize, int maxPoolSize) {
ThreadPoolProfile profile = new ThreadPoolProfile(name);
profile.setPoolSize(poolSize);
profile.setMaxPoolSize(maxPoolSize);
return newThreadPool(source, name, profile);
}
@Override
public ExecutorService newSingleThreadExecutor(Object source, String name) {
return newFixedThreadPool(source, name, 1);
}
@Override
public ExecutorService newCachedThreadPool(Object source, String name) {
String sanitizedName = URISupport.sanitizeUri(name);
ExecutorService answer = threadPoolFactory.newCachedThreadPool(createThreadFactory(sanitizedName, true));
onThreadPoolCreated(answer, source, null);
if (LOG.isDebugEnabled()) {
LOG.debug("Created new CachedThreadPool for source: {} with name: {}. -> {}", source, sanitizedName, answer);
}
return answer;
}
@Override
public ExecutorService newFixedThreadPool(Object source, String name, int poolSize) {
ThreadPoolProfile profile = new ThreadPoolProfile(name);
profile.setPoolSize(poolSize);
profile.setMaxPoolSize(poolSize);
profile.setKeepAliveTime(0L);
return newThreadPool(source, name, profile);
}
@Override
public ScheduledExecutorService newSingleThreadScheduledExecutor(Object source, String name) {
return newScheduledThreadPool(source, name, 1);
}
@Override
public ScheduledExecutorService newScheduledThreadPool(Object source, String name, ThreadPoolProfile profile) {
String sanitizedName = URISupport.sanitizeUri(name);
profile.addDefaults(getDefaultThreadPoolProfile());
ScheduledExecutorService answer = threadPoolFactory.newScheduledThreadPool(profile, createThreadFactory(sanitizedName, true));
onThreadPoolCreated(answer, source, null);
if (LOG.isDebugEnabled()) {
LOG.debug("Created new ScheduledThreadPool for source: {} with name: {} -> {}", source, sanitizedName, answer);
}
return answer;
}
@Override
public ScheduledExecutorService newScheduledThreadPool(Object source, String name, String profileId) {
ThreadPoolProfile profile = getThreadPoolProfile(profileId);
if (profile != null) {
return newScheduledThreadPool(source, name, profile);
} else {
// no profile with that id
return null;
}
}
@Override
public ScheduledExecutorService newScheduledThreadPool(Object source, String name, int poolSize) {
ThreadPoolProfile profile = new ThreadPoolProfile(name);
profile.setPoolSize(poolSize);
return newScheduledThreadPool(source, name, profile);
}
@Override
public void shutdown(ExecutorService executorService) {
doShutdown(executorService, 0, false);
}
@Override
public void shutdownGraceful(ExecutorService executorService) {
doShutdown(executorService, getShutdownAwaitTermination(), false);
}
@Override
public void shutdownGraceful(ExecutorService executorService, long shutdownAwaitTermination) {
doShutdown(executorService, shutdownAwaitTermination, false);
}
private boolean doShutdown(ExecutorService executorService, long shutdownAwaitTermination, boolean failSafe) {
if (executorService == null) {
return false;
}
boolean warned = false;
// shutting down a thread pool is a 2 step process. First we try graceful, and if that fails, then we go more aggressively
// and try shutting down again. In both cases we wait at most the given shutdown timeout value given
// (total wait could then be 2 x shutdownAwaitTermination, but when we shutdown the 2nd time we are aggressive and thus
// we ought to shutdown much faster)
if (!executorService.isShutdown()) {
StopWatch watch = new StopWatch();
LOG.trace("Shutdown of ExecutorService: {} with await termination: {} millis", executorService, shutdownAwaitTermination);
executorService.shutdown();
if (shutdownAwaitTermination > 0) {
try {
if (!awaitTermination(executorService, shutdownAwaitTermination)) {
warned = true;
LOG.warn("Forcing shutdown of ExecutorService: {} due first await termination elapsed.", executorService);
executorService.shutdownNow();
// we are now shutting down aggressively, so wait to see if we can completely shutdown or not
if (!awaitTermination(executorService, shutdownAwaitTermination)) {
LOG.warn("Cannot completely force shutdown of ExecutorService: {} due second await termination elapsed.", executorService);
}
}
} catch (InterruptedException e) {
warned = true;
LOG.warn("Forcing shutdown of ExecutorService: {} due interrupted.", executorService);
// we were interrupted during shutdown, so force shutdown
executorService.shutdownNow();
}
}
// if we logged at WARN level, then report at INFO level when we are complete so the end user can see this in the log
if (warned) {
LOG.info("Shutdown of ExecutorService: {} is shutdown: {} and terminated: {} took: {}.",
executorService, executorService.isShutdown(), executorService.isTerminated(), TimeUtils.printDuration(watch.taken()));
} else if (LOG.isDebugEnabled()) {
LOG.debug("Shutdown of ExecutorService: {} is shutdown: {} and terminated: {} took: {}.",
executorService, executorService.isShutdown(), executorService.isTerminated(), TimeUtils.printDuration(watch.taken()));
}
}
// let lifecycle strategy be notified as well which can let it be managed in JMX as well
ThreadPoolExecutor threadPool = null;
if (executorService instanceof ThreadPoolExecutor) {
threadPool = (ThreadPoolExecutor) executorService;
} else if (executorService instanceof SizedScheduledExecutorService) {
threadPool = ((SizedScheduledExecutorService) executorService).getScheduledThreadPoolExecutor();
}
if (threadPool != null) {
for (LifecycleStrategy lifecycle : camelContext.getLifecycleStrategies()) {
lifecycle.onThreadPoolRemove(camelContext, threadPool);
}
}
// remove reference as its shutdown (do not remove if fail-safe)
if (!failSafe) {
executorServices.remove(executorService);
}
return warned;
}
@Override
public List<Runnable> shutdownNow(ExecutorService executorService) {
return doShutdownNow(executorService, false);
}
private List<Runnable> doShutdownNow(ExecutorService executorService, boolean failSafe) {
ObjectHelper.notNull(executorService, "executorService");
List<Runnable> answer = null;
if (!executorService.isShutdown()) {
if (failSafe) {
// log as warn, as we shutdown as fail-safe, so end user should see more details in the log.
LOG.warn("Forcing shutdown of ExecutorService: {}", executorService);
} else {
LOG.debug("Forcing shutdown of ExecutorService: {}", executorService);
}
answer = executorService.shutdownNow();
if (LOG.isTraceEnabled()) {
LOG.trace("Shutdown of ExecutorService: {} is shutdown: {} and terminated: {}.",
executorService, executorService.isShutdown(), executorService.isTerminated());
}
}
// let lifecycle strategy be notified as well which can let it be managed in JMX as well
ThreadPoolExecutor threadPool = null;
if (executorService instanceof ThreadPoolExecutor) {
threadPool = (ThreadPoolExecutor) executorService;
} else if (executorService instanceof SizedScheduledExecutorService) {
threadPool = ((SizedScheduledExecutorService) executorService).getScheduledThreadPoolExecutor();
}
if (threadPool != null) {
for (LifecycleStrategy lifecycle : camelContext.getLifecycleStrategies()) {
lifecycle.onThreadPoolRemove(camelContext, threadPool);
}
}
// remove reference as its shutdown (do not remove if fail-safe)
if (!failSafe) {
executorServices.remove(executorService);
}
return answer;
}
@Override
public boolean awaitTermination(ExecutorService executorService, long shutdownAwaitTermination) throws InterruptedException {
// log progress every 2nd second so end user is aware of we are shutting down
StopWatch watch = new StopWatch();
long interval = Math.min(2000, shutdownAwaitTermination);
boolean done = false;
while (!done && interval > 0) {
if (executorService.awaitTermination(interval, TimeUnit.MILLISECONDS)) {
done = true;
} else {
LOG.info("Waited {} for ExecutorService: {} to terminate...", TimeUtils.printDuration(watch.taken()), executorService);
// recalculate interval
interval = Math.min(2000, shutdownAwaitTermination - watch.taken());
}
}
return done;
}
/**
* Strategy callback when a new {@link java.util.concurrent.ExecutorService} have been created.
*
* @param executorService the created {@link java.util.concurrent.ExecutorService}
*/
protected void onNewExecutorService(ExecutorService executorService) {
// noop
}
@Override
protected void doStart() throws Exception {
if (threadNamePattern == null) {
// set default name pattern which includes the camel context name
threadNamePattern = "Camel (" + camelContext.getName() + ") thread ##counter# - #name#";
}
}
@Override
protected void doStop() throws Exception {
// noop
}
@Override
protected void doShutdown() throws Exception {
// shutdown all remainder executor services by looping and doing this aggressively
// as by normal all threads pool should have been shutdown using proper lifecycle
// by their EIPs, components etc. This is acting as a fail-safe during shutdown
// of CamelContext itself.
Set<ExecutorService> forced = new LinkedHashSet<>();
if (!executorServices.isEmpty()) {
// at first give a bit of time to shutdown nicely as the thread pool is most likely in the process of being shutdown also
LOG.debug("Giving time for {} ExecutorService's to shutdown properly (acting as fail-safe)", executorServices.size());
for (ExecutorService executorService : executorServices) {
try {
boolean warned = doShutdown(executorService, getShutdownAwaitTermination(), true);
// remember the thread pools that was forced to shutdown (eg warned)
if (warned) {
forced.add(executorService);
}
} catch (Throwable e) {
// only log if something goes wrong as we want to shutdown them all
LOG.warn("Error occurred during shutdown of ExecutorService: "
+ executorService + ". This exception will be ignored.", e);
}
}
}
// log the thread pools which was forced to shutdown so it may help the user to identify a problem of his
if (!forced.isEmpty()) {
LOG.warn("Forced shutdown of {} ExecutorService's which has not been shutdown properly (acting as fail-safe)", forced.size());
for (ExecutorService executorService : forced) {
LOG.warn(" forced -> {}", executorService);
}
}
forced.clear();
// clear list
executorServices.clear();
// do not clear the default profile as we could potential be restarted
Iterator<ThreadPoolProfile> it = threadPoolProfiles.values().iterator();
while (it.hasNext()) {
ThreadPoolProfile profile = it.next();
if (!profile.isDefaultProfile()) {
it.remove();
}
}
}
/**
* Invoked when a new thread pool is created.
* This implementation will invoke the {@link LifecycleStrategy#onThreadPoolAdd(org.apache.camel.CamelContext,
* java.util.concurrent.ThreadPoolExecutor, String, String, String, String) LifecycleStrategy.onThreadPoolAdd} method,
* which for example will enlist the thread pool in JMX management.
*
* @param executorService the thread pool
* @param source the source to use the thread pool
* @param threadPoolProfileId profile id, if the thread pool was created from a thread pool profile
*/
private void onThreadPoolCreated(ExecutorService executorService, Object source, String threadPoolProfileId) {
// add to internal list of thread pools
executorServices.add(executorService);
String id;
String sourceId = null;
String routeId = null;
// extract id from source
if (source instanceof NamedNode) {
id = ((OptionalIdentifiedDefinition<?>) source).idOrCreate(this.camelContext.getNodeIdFactory());
// and let source be the short name of the pattern
sourceId = ((NamedNode) source).getShortName();
} else if (source instanceof String) {
id = (String) source;
} else if (source != null) {
if (source instanceof StaticService) {
// the source is static service so its name would be unique
id = source.getClass().getSimpleName();
} else {
// fallback and use the simple class name with hashcode for the id so its unique for this given source
id = source.getClass().getSimpleName() + "(" + ObjectHelper.getIdentityHashCode(source) + ")";
}
} else {
// no source, so fallback and use the simple class name from thread pool and its hashcode identity so its unique
id = executorService.getClass().getSimpleName() + "(" + ObjectHelper.getIdentityHashCode(executorService) + ")";
}
// id is mandatory
ObjectHelper.notEmpty(id, "id for thread pool " + executorService);
// extract route id if possible
if (source instanceof ProcessorDefinition) {
RouteDefinition route = ProcessorDefinitionHelper.getRoute((ProcessorDefinition<?>) source);
if (route != null) {
routeId = route.idOrCreate(this.camelContext.getNodeIdFactory());
}
}
// let lifecycle strategy be notified as well which can let it be managed in JMX as well
ThreadPoolExecutor threadPool = null;
if (executorService instanceof ThreadPoolExecutor) {
threadPool = (ThreadPoolExecutor) executorService;
} else if (executorService instanceof SizedScheduledExecutorService) {
threadPool = ((SizedScheduledExecutorService) executorService).getScheduledThreadPoolExecutor();
}
if (threadPool != null) {
for (LifecycleStrategy lifecycle : camelContext.getLifecycleStrategies()) {
lifecycle.onThreadPoolAdd(camelContext, threadPool, id, sourceId, routeId, threadPoolProfileId);
}
}
// now call strategy to allow custom logic
onNewExecutorService(executorService);
}
private ThreadFactory createThreadFactory(String name, boolean isDaemon) {
return new CamelThreadFactory(threadNamePattern, name, isDaemon);
}
}
| |
/*
* Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.cloudformation.model;
import java.io.Serializable;
import javax.annotation.Generated;
/**
*
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/cloudformation-2010-05-15/ListExports" target="_top">AWS API
* Documentation</a>
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class ListExportsResult extends com.amazonaws.AmazonWebServiceResult<com.amazonaws.ResponseMetadata> implements Serializable, Cloneable {
/**
* <p>
* The output for the <a>ListExports</a> action.
* </p>
*/
private com.amazonaws.internal.SdkInternalList<Export> exports;
/**
* <p>
* If the output exceeds 100 exported output values, a string that identifies the next page of exports. If there is
* no additional page, this value is null.
* </p>
*/
private String nextToken;
/**
* <p>
* The output for the <a>ListExports</a> action.
* </p>
*
* @return The output for the <a>ListExports</a> action.
*/
public java.util.List<Export> getExports() {
if (exports == null) {
exports = new com.amazonaws.internal.SdkInternalList<Export>();
}
return exports;
}
/**
* <p>
* The output for the <a>ListExports</a> action.
* </p>
*
* @param exports
* The output for the <a>ListExports</a> action.
*/
public void setExports(java.util.Collection<Export> exports) {
if (exports == null) {
this.exports = null;
return;
}
this.exports = new com.amazonaws.internal.SdkInternalList<Export>(exports);
}
/**
* <p>
* The output for the <a>ListExports</a> action.
* </p>
* <p>
* <b>NOTE:</b> This method appends the values to the existing list (if any). Use
* {@link #setExports(java.util.Collection)} or {@link #withExports(java.util.Collection)} if you want to override
* the existing values.
* </p>
*
* @param exports
* The output for the <a>ListExports</a> action.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public ListExportsResult withExports(Export... exports) {
if (this.exports == null) {
setExports(new com.amazonaws.internal.SdkInternalList<Export>(exports.length));
}
for (Export ele : exports) {
this.exports.add(ele);
}
return this;
}
/**
* <p>
* The output for the <a>ListExports</a> action.
* </p>
*
* @param exports
* The output for the <a>ListExports</a> action.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public ListExportsResult withExports(java.util.Collection<Export> exports) {
setExports(exports);
return this;
}
/**
* <p>
* If the output exceeds 100 exported output values, a string that identifies the next page of exports. If there is
* no additional page, this value is null.
* </p>
*
* @param nextToken
* If the output exceeds 100 exported output values, a string that identifies the next page of exports. If
* there is no additional page, this value is null.
*/
public void setNextToken(String nextToken) {
this.nextToken = nextToken;
}
/**
* <p>
* If the output exceeds 100 exported output values, a string that identifies the next page of exports. If there is
* no additional page, this value is null.
* </p>
*
* @return If the output exceeds 100 exported output values, a string that identifies the next page of exports. If
* there is no additional page, this value is null.
*/
public String getNextToken() {
return this.nextToken;
}
/**
* <p>
* If the output exceeds 100 exported output values, a string that identifies the next page of exports. If there is
* no additional page, this value is null.
* </p>
*
* @param nextToken
* If the output exceeds 100 exported output values, a string that identifies the next page of exports. If
* there is no additional page, this value is null.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public ListExportsResult withNextToken(String nextToken) {
setNextToken(nextToken);
return this;
}
/**
* Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be
* redacted from this string using a placeholder value.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getExports() != null)
sb.append("Exports: ").append(getExports()).append(",");
if (getNextToken() != null)
sb.append("NextToken: ").append(getNextToken());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof ListExportsResult == false)
return false;
ListExportsResult other = (ListExportsResult) obj;
if (other.getExports() == null ^ this.getExports() == null)
return false;
if (other.getExports() != null && other.getExports().equals(this.getExports()) == false)
return false;
if (other.getNextToken() == null ^ this.getNextToken() == null)
return false;
if (other.getNextToken() != null && other.getNextToken().equals(this.getNextToken()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getExports() == null) ? 0 : getExports().hashCode());
hashCode = prime * hashCode + ((getNextToken() == null) ? 0 : getNextToken().hashCode());
return hashCode;
}
@Override
public ListExportsResult clone() {
try {
return (ListExportsResult) super.clone();
} catch (CloneNotSupportedException e) {
throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e);
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.gemstone.gemfire.internal.cache;
import java.util.ArrayList;
import java.util.List;
import java.util.Properties;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import static org.junit.Assert.*;
import com.gemstone.gemfire.cache.CacheFactory;
import com.gemstone.gemfire.distributed.internal.DistributionConfig;
import com.gemstone.gemfire.test.junit.categories.IntegrationTest;
import junit.framework.TestCase;
/**
* Tests notifications of CacheLifecycleListener from GemFireCacheImpl.
*
* @author Kirk Lund
*/
@Category(IntegrationTest.class)
public class CacheLifecycleListenerJUnitTest {
@Test
public void testAddAndRemoveNull() {
GemFireCacheImpl.addCacheLifecycleListener(null);
GemFireCacheImpl.removeCacheLifecycleListener(null);
}
@Test
public void testRemoveNonExistent() {
final List<CacheLifecycleCallback> cacheCreatedCallbacks = new ArrayList<CacheLifecycleCallback>();
final List<CacheLifecycleCallback> cacheClosedCallbacks = new ArrayList<CacheLifecycleCallback>();
final TestCacheLifecycleListener listener = new TestCacheLifecycleListener(cacheCreatedCallbacks, cacheClosedCallbacks);
GemFireCacheImpl.removeCacheLifecycleListener(listener);
}
@Test
public void testCallbacks() {
final List<CacheLifecycleCallback> cacheCreatedCallbacks = new ArrayList<CacheLifecycleCallback>();
final List<CacheLifecycleCallback> cacheClosedCallbacks = new ArrayList<CacheLifecycleCallback>();
final TestCacheLifecycleListener listener = new TestCacheLifecycleListener(cacheCreatedCallbacks, cacheClosedCallbacks);
try {
GemFireCacheImpl.addCacheLifecycleListener(listener);
// assert no create callback
assertTrue(cacheCreatedCallbacks.isEmpty());
// assert no close callback
assertTrue(cacheClosedCallbacks.isEmpty());
final Properties props = new Properties();
props.setProperty(DistributionConfig.MCAST_PORT_NAME, "0");
props.setProperty(DistributionConfig.LOCATORS_NAME, "");
final GemFireCacheImpl cache = (GemFireCacheImpl) new CacheFactory(props).create();
try {
// assert one create callback
assertFalse(cacheCreatedCallbacks.isEmpty());
assertEquals(1, cacheCreatedCallbacks.size());
assertEquals(cache, cacheCreatedCallbacks.get(0).getCache());
// assert no close callback
assertTrue(cacheClosedCallbacks.isEmpty());
} finally {
cache.close();
}
// assert one create callback
assertFalse(cacheCreatedCallbacks.isEmpty());
assertEquals(1, cacheCreatedCallbacks.size());
assertEquals(cache, cacheCreatedCallbacks.get(0).getCache());
// assert one close callback
assertFalse(cacheClosedCallbacks.isEmpty());
assertEquals(1, cacheClosedCallbacks.size());
assertEquals(cache, cacheClosedCallbacks.get(0).getCache());
} finally {
GemFireCacheImpl.removeCacheLifecycleListener(listener);
}
}
@Test
public void testRemoveBeforeCreate() {
final List<CacheLifecycleCallback> cacheCreatedCallbacks = new ArrayList<CacheLifecycleCallback>();
final List<CacheLifecycleCallback> cacheClosedCallbacks = new ArrayList<CacheLifecycleCallback>();
final TestCacheLifecycleListener listener = new TestCacheLifecycleListener(cacheCreatedCallbacks, cacheClosedCallbacks);
try {
GemFireCacheImpl.addCacheLifecycleListener(listener);
GemFireCacheImpl.removeCacheLifecycleListener(listener);
// assert no create callback
assertTrue(cacheCreatedCallbacks.isEmpty());
// assert no close callback
assertTrue(cacheClosedCallbacks.isEmpty());
final Properties props = new Properties();
props.setProperty(DistributionConfig.MCAST_PORT_NAME, "0");
props.setProperty(DistributionConfig.LOCATORS_NAME, "");
final GemFireCacheImpl cache = (GemFireCacheImpl) new CacheFactory(props).create();
try {
// assert no create callback
assertTrue(cacheCreatedCallbacks.isEmpty());
// assert no close callback
assertTrue(cacheClosedCallbacks.isEmpty());
} finally {
cache.close();
}
// assert no create callback
assertTrue(cacheCreatedCallbacks.isEmpty());
// assert no close callback
assertTrue(cacheClosedCallbacks.isEmpty());
} finally {
GemFireCacheImpl.removeCacheLifecycleListener(listener);
}
}
@Test
public void testRemoveBeforeClose() {
final List<CacheLifecycleCallback> cacheCreatedCallbacks = new ArrayList<CacheLifecycleCallback>();
final List<CacheLifecycleCallback> cacheClosedCallbacks = new ArrayList<CacheLifecycleCallback>();
final TestCacheLifecycleListener listener = new TestCacheLifecycleListener(cacheCreatedCallbacks, cacheClosedCallbacks);
try {
GemFireCacheImpl.addCacheLifecycleListener(listener);
// assert no create callback
assertTrue(cacheCreatedCallbacks.isEmpty());
// assert no close callback
assertTrue(cacheClosedCallbacks.isEmpty());
final Properties props = new Properties();
props.setProperty(DistributionConfig.MCAST_PORT_NAME, "0");
props.setProperty(DistributionConfig.LOCATORS_NAME, "");
final GemFireCacheImpl cache = (GemFireCacheImpl) new CacheFactory(props).create();
try {
// assert one create callback
assertFalse(cacheCreatedCallbacks.isEmpty());
assertEquals(1, cacheCreatedCallbacks.size());
assertEquals(cache, cacheCreatedCallbacks.get(0).getCache());
// assert no close callback
assertTrue(cacheClosedCallbacks.isEmpty());
} finally {
GemFireCacheImpl.removeCacheLifecycleListener(listener);
cache.close();
}
// assert one create callback
assertFalse(cacheCreatedCallbacks.isEmpty());
assertEquals(1, cacheCreatedCallbacks.size());
assertEquals(cache, cacheCreatedCallbacks.get(0).getCache());
// assert no close callback
assertTrue(cacheClosedCallbacks.isEmpty());
} finally {
GemFireCacheImpl.removeCacheLifecycleListener(listener);
}
}
@Test
public void testCallbacksRepeat() {
final List<CacheLifecycleCallback> cacheCreatedCallbacks = new ArrayList<CacheLifecycleCallback>();
final List<CacheLifecycleCallback> cacheClosedCallbacks = new ArrayList<CacheLifecycleCallback>();
final TestCacheLifecycleListener listener = new TestCacheLifecycleListener(cacheCreatedCallbacks, cacheClosedCallbacks);
try {
GemFireCacheImpl.addCacheLifecycleListener(listener);
// assert no create callback
assertTrue(cacheCreatedCallbacks.isEmpty());
// assert no close callback
assertTrue(cacheClosedCallbacks.isEmpty());
final Properties props = new Properties();
props.setProperty(DistributionConfig.MCAST_PORT_NAME, "0");
props.setProperty(DistributionConfig.LOCATORS_NAME, "");
final GemFireCacheImpl cache1 = (GemFireCacheImpl) new CacheFactory(props).create();
try {
// assert one create callback
assertFalse(cacheCreatedCallbacks.isEmpty());
assertEquals(1, cacheCreatedCallbacks.size());
assertEquals(cache1, cacheCreatedCallbacks.get(0).getCache());
// assert no close callback
assertTrue(cacheClosedCallbacks.isEmpty());
} finally {
cache1.close();
}
// assert one create callback
assertFalse(cacheCreatedCallbacks.isEmpty());
assertEquals(1, cacheCreatedCallbacks.size());
assertEquals(cache1, cacheCreatedCallbacks.get(0).getCache());
// assert one close callback
assertFalse(cacheClosedCallbacks.isEmpty());
assertEquals(1, cacheClosedCallbacks.size());
assertEquals(cache1, cacheClosedCallbacks.get(0).getCache());
final GemFireCacheImpl cache2 = (GemFireCacheImpl) new CacheFactory(props).create();
try {
// assert two create callback
assertFalse(cacheCreatedCallbacks.isEmpty());
assertEquals(2, cacheCreatedCallbacks.size());
assertEquals(cache1, cacheCreatedCallbacks.get(0).getCache());
assertEquals(cache2, cacheCreatedCallbacks.get(1).getCache());
// assert one close callback
assertFalse(cacheClosedCallbacks.isEmpty());
assertEquals(1, cacheClosedCallbacks.size());
assertEquals(cache1, cacheClosedCallbacks.get(0).getCache());
} finally {
cache2.close();
}
// assert two create callbacks
assertFalse(cacheCreatedCallbacks.isEmpty());
assertEquals(2, cacheCreatedCallbacks.size());
assertEquals(cache1, cacheCreatedCallbacks.get(0).getCache());
assertEquals(cache2, cacheCreatedCallbacks.get(1).getCache());
// assert two close callbacks
assertFalse(cacheClosedCallbacks.isEmpty());
assertEquals(2, cacheClosedCallbacks.size());
assertEquals(cache1, cacheClosedCallbacks.get(0).getCache());
assertEquals(cache2, cacheClosedCallbacks.get(1).getCache());
} finally {
GemFireCacheImpl.removeCacheLifecycleListener(listener);
}
}
@Test
public void testAddAfterCreate() {
final List<CacheLifecycleCallback> cacheCreatedCallbacks = new ArrayList<CacheLifecycleCallback>();
final List<CacheLifecycleCallback> cacheClosedCallbacks = new ArrayList<CacheLifecycleCallback>();
final TestCacheLifecycleListener listener = new TestCacheLifecycleListener(cacheCreatedCallbacks, cacheClosedCallbacks);
// assert no create callback
assertTrue(cacheCreatedCallbacks.isEmpty());
// assert no close callback
assertTrue(cacheClosedCallbacks.isEmpty());
final Properties props = new Properties();
props.setProperty(DistributionConfig.MCAST_PORT_NAME, "0");
props.setProperty(DistributionConfig.LOCATORS_NAME, "");
final GemFireCacheImpl cache = (GemFireCacheImpl) new CacheFactory(props).create();
try {
try {
// assert no create callback
assertTrue(cacheCreatedCallbacks.isEmpty());
// assert no close callback
assertTrue(cacheClosedCallbacks.isEmpty());
GemFireCacheImpl.addCacheLifecycleListener(listener);
} finally {
cache.close();
}
// assert no create callback
assertTrue(cacheCreatedCallbacks.isEmpty());
// assert one close callback
assertFalse(cacheClosedCallbacks.isEmpty());
assertEquals(1, cacheClosedCallbacks.size());
assertEquals(cache, cacheClosedCallbacks.get(0).getCache());
} finally {
GemFireCacheImpl.removeCacheLifecycleListener(listener);
}
}
static final class CacheLifecycleCallback {
private final GemFireCacheImpl cache;
private final long timeStamp;
CacheLifecycleCallback(GemFireCacheImpl cache) {
this.cache = cache;
this.timeStamp = System.currentTimeMillis();
}
GemFireCacheImpl getCache() {
return this.cache;
}
}
static final class TestCacheLifecycleListener implements CacheLifecycleListener {
private final List<CacheLifecycleCallback> cacheCreatedCallbacks;
private final List<CacheLifecycleCallback> cacheClosedCallbacks;
TestCacheLifecycleListener(List<CacheLifecycleCallback> cacheCreatedCallbacks, List<CacheLifecycleCallback> cacheClosedCallbacks) {
this.cacheCreatedCallbacks = cacheCreatedCallbacks;
this.cacheClosedCallbacks = cacheClosedCallbacks;
}
@Override
public void cacheCreated(GemFireCacheImpl cache) {
this.cacheCreatedCallbacks.add(new CacheLifecycleCallback(cache));
}
@Override
public void cacheClosed(GemFireCacheImpl cache) {
this.cacheClosedCallbacks.add(new CacheLifecycleCallback(cache));
}
}
}
| |
/**
* Copyright 2006-2015 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.mybatis.generator.internal;
import static org.mybatis.generator.internal.util.StringUtility.stringHasValue;
import static org.mybatis.generator.internal.util.messages.Messages.getString;
import java.net.URL;
import java.util.ArrayList;
import java.util.List;
import org.mybatis.generator.api.CommentGenerator;
import org.mybatis.generator.api.FullyQualifiedTable;
import org.mybatis.generator.api.JavaFormatter;
import org.mybatis.generator.api.Plugin;
import org.mybatis.generator.api.IntrospectedColumn;
import org.mybatis.generator.api.IntrospectedTable;
import org.mybatis.generator.api.JavaTypeResolver;
import org.mybatis.generator.api.XmlFormatter;
import org.mybatis.generator.api.dom.DefaultJavaFormatter;
import org.mybatis.generator.api.dom.DefaultXmlFormatter;
import org.mybatis.generator.codegen.ibatis2.IntrospectedTableIbatis2Java2Impl;
import org.mybatis.generator.codegen.ibatis2.IntrospectedTableIbatis2Java5Impl;
import org.mybatis.generator.codegen.mybatis3.IntrospectedTableMyBatis3Impl;
import org.mybatis.generator.codegen.mybatis3.IntrospectedTableMyBatis3SimpleImpl;
import org.mybatis.generator.config.CommentGeneratorConfiguration;
import org.mybatis.generator.config.Context;
import org.mybatis.generator.config.PluginConfiguration;
import org.mybatis.generator.config.JavaTypeResolverConfiguration;
import org.mybatis.generator.config.PropertyRegistry;
import org.mybatis.generator.config.TableConfiguration;
import org.mybatis.generator.internal.types.JavaTypeResolverDefaultImpl;
import org.mybatis.generator.internal.util.StringUtility;
/**
* This class creates the different objects needed by the generator.
*
* @author Jeff Butler
*/
public class ObjectFactory {
/** The external class loaders. */
private static List<ClassLoader> externalClassLoaders;
/** The resource class loaders. */
private static List<ClassLoader> resourceClassLoaders;
static {
externalClassLoaders = new ArrayList<ClassLoader>();
resourceClassLoaders = new ArrayList<ClassLoader>();
}
/**
* Utility class. No instances allowed
*/
private ObjectFactory() {
super();
}
/**
* Adds a custom classloader to the collection of classloaders searched for resources. Currently, this is only used
* when searching for properties files that may be referenced in the configuration file.
*
* @param classLoader
* the class loader
*/
public static synchronized void addResourceClassLoader(
ClassLoader classLoader) {
ObjectFactory.resourceClassLoaders.add(classLoader);
}
/**
* Adds a custom classloader to the collection of classloaders searched for "external" classes. These are classes
* that do not depend on any of the generator's classes or interfaces. Examples are JDBC drivers, root classes, root
* interfaces, etc.
*
* @param classLoader
* the class loader
*/
public static synchronized void addExternalClassLoader(
ClassLoader classLoader) {
ObjectFactory.externalClassLoaders.add(classLoader);
}
/**
* This method returns a class loaded from the context classloader, or the classloader supplied by a client. This is
* appropriate for JDBC drivers, model root classes, etc. It is not appropriate for any class that extends one of
* the supplied classes or interfaces.
*
* @param type
* the type
* @return the Class loaded from the external classloader
* @throws ClassNotFoundException
* the class not found exception
*/
public static Class<?> externalClassForName(String type)
throws ClassNotFoundException {
Class<?> clazz;
for (ClassLoader classLoader : externalClassLoaders) {
try {
clazz = Class.forName(type, true, classLoader);
return clazz;
} catch (Throwable e) {
// ignore - fail safe below
;
}
}
return internalClassForName(type);
}
/**
* Creates a new Object object.
*
* @param type
* the type
* @return the object
*/
public static Object createExternalObject(String type) {
Object answer;
try {
Class<?> clazz = externalClassForName(type);
answer = clazz.newInstance();
} catch (Exception e) {
throw new RuntimeException(getString(
"RuntimeError.6", type), e); //$NON-NLS-1$
}
return answer;
}
/**
* Internal class for name.
*
* @param type
* the type
* @return the class
* @throws ClassNotFoundException
* the class not found exception
*/
public static Class<?> internalClassForName(String type)
throws ClassNotFoundException {
Class<?> clazz = null;
try {
ClassLoader cl = Thread.currentThread().getContextClassLoader();
clazz = Class.forName(type, true, cl);
} catch (Exception e) {
// ignore - failsafe below
}
if (clazz == null) {
clazz = Class.forName(type, true, ObjectFactory.class.getClassLoader());
}
return clazz;
}
/**
* Gets the resource.
*
* @param resource
* the resource
* @return the resource
*/
public static URL getResource(String resource) {
URL url;
for (ClassLoader classLoader : resourceClassLoaders) {
url = classLoader.getResource(resource);
if (url != null) {
return url;
}
}
ClassLoader cl = Thread.currentThread().getContextClassLoader();
url = cl.getResource(resource);
if (url == null) {
url = ObjectFactory.class.getClassLoader().getResource(resource);
}
return url;
}
/**
* Creates a new Object object.
*
* @param type
* the type
* @return the object
*/
public static Object createInternalObject(String type) {
Object answer;
try {
Class<?> clazz = internalClassForName(type);
answer = clazz.newInstance();
} catch (Exception e) {
throw new RuntimeException(getString(
"RuntimeError.6", type), e); //$NON-NLS-1$
}
return answer;
}
/**
* Creates a new Object object.
*
* @param context
* the context
* @param warnings
* the warnings
* @return the java type resolver
*/
public static JavaTypeResolver createJavaTypeResolver(Context context,
List<String> warnings) {
JavaTypeResolverConfiguration config = context
.getJavaTypeResolverConfiguration();
String type;
if (config != null && config.getConfigurationType() != null) {
type = config.getConfigurationType();
if ("DEFAULT".equalsIgnoreCase(type)) { //$NON-NLS-1$
type = JavaTypeResolverDefaultImpl.class.getName();
}
} else {
type = JavaTypeResolverDefaultImpl.class.getName();
}
JavaTypeResolver answer = (JavaTypeResolver) createInternalObject(type);
answer.setWarnings(warnings);
if (config != null) {
answer.addConfigurationProperties(config.getProperties());
}
answer.setContext(context);
return answer;
}
/**
* Creates a new Object object.
*
* @param context
* the context
* @param pluginConfiguration
* the plugin configuration
* @return the plugin
*/
public static Plugin createPlugin(Context context,
PluginConfiguration pluginConfiguration) {
Plugin plugin = (Plugin) createInternalObject(pluginConfiguration
.getConfigurationType());
plugin.setContext(context);
plugin.setProperties(pluginConfiguration.getProperties());
return plugin;
}
/**
* Creates a new Object object.
*
* @param context
* the context
* @return the comment generator
*/
public static CommentGenerator createCommentGenerator(Context context) {
CommentGeneratorConfiguration config = context
.getCommentGeneratorConfiguration();
CommentGenerator answer;
String type;
if (config == null || config.getConfigurationType() == null) {
type = DefaultCommentGenerator.class.getName();
} else {
type = config.getConfigurationType();
}
answer = (CommentGenerator) createInternalObject(type);
if (config != null) {
answer.addConfigurationProperties(config.getProperties());
}
return answer;
}
/**
* Creates a new Object object.
*
* @param context
* the context
* @return the java formatter
*/
public static JavaFormatter createJavaFormatter(Context context) {
String type = context.getProperty(PropertyRegistry.CONTEXT_JAVA_FORMATTER);
if (!StringUtility.stringHasValue(type)) {
type = DefaultJavaFormatter.class.getName();
}
JavaFormatter answer = (JavaFormatter) createInternalObject(type);
answer.setContext(context);
return answer;
}
/**
* Creates a new Object object.
*
* @param context
* the context
* @return the xml formatter
*/
public static XmlFormatter createXmlFormatter(Context context) {
String type = context.getProperty(PropertyRegistry.CONTEXT_XML_FORMATTER);
if (!StringUtility.stringHasValue(type)) {
type = DefaultXmlFormatter.class.getName();
}
XmlFormatter answer = (XmlFormatter) createInternalObject(type);
answer.setContext(context);
return answer;
}
/**
* Creates a new Object object.
*
* @param tableConfiguration
* the table configuration
* @param table
* the table
* @param context
* the context
* @return the introspected table
*/
public static IntrospectedTable createIntrospectedTable(
TableConfiguration tableConfiguration, FullyQualifiedTable table,
Context context) {
IntrospectedTable answer = createIntrospectedTableForValidation(context);
answer.setFullyQualifiedTable(table);
answer.setTableConfiguration(tableConfiguration);
return answer;
}
/**
* This method creates an introspected table implementation that is only usable for validation (i.e. for a context
* to determine if the target is ibatis2 or mybatis3).
*
*
* @param context
* the context
* @return the introspected table
*/
public static IntrospectedTable createIntrospectedTableForValidation(Context context) {
String type = context.getTargetRuntime();
if (!stringHasValue(type)) {
type = IntrospectedTableMyBatis3Impl.class.getName();
} else if ("Ibatis2Java2".equalsIgnoreCase(type)) { //$NON-NLS-1$
type = IntrospectedTableIbatis2Java2Impl.class.getName();
} else if ("Ibatis2Java5".equalsIgnoreCase(type)) { //$NON-NLS-1$
type = IntrospectedTableIbatis2Java5Impl.class.getName();
} else if ("Ibatis3".equalsIgnoreCase(type)) { //$NON-NLS-1$
type = IntrospectedTableMyBatis3Impl.class.getName();
} else if ("MyBatis3".equalsIgnoreCase(type)) { //$NON-NLS-1$
type = IntrospectedTableMyBatis3Impl.class.getName();
} else if ("MyBatis3Simple".equalsIgnoreCase(type)) { //$NON-NLS-1$
type = IntrospectedTableMyBatis3SimpleImpl.class.getName();
}
IntrospectedTable answer = (IntrospectedTable) createInternalObject(type);
answer.setContext(context);
return answer;
}
/**
* Creates a new Object object.
*
* @param context
* the context
* @return the introspected column
*/
public static IntrospectedColumn createIntrospectedColumn(Context context) {
String type = context.getIntrospectedColumnImpl();
if (!stringHasValue(type)) {
type = IntrospectedColumn.class.getName();
}
IntrospectedColumn answer = (IntrospectedColumn) createInternalObject(type);
answer.setContext(context);
return answer;
}
}
| |
/*
Copyright 2013 Twitter, Inc.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package com.twitter.hraven.mapreduce;
import java.io.IOException;
import java.util.LinkedList;
import java.util.List;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.io.IOUtils;
import org.apache.hadoop.io.Writable;
import org.apache.hadoop.mapreduce.Mapper;
import com.twitter.hraven.Constants;
import com.twitter.hraven.datasource.JobHistoryRawService;
import com.twitter.hraven.etl.JobFile;
/**
* Used to read records for the processFile (referring to a JobFile). Reads said
* file into the RAW HBase table.
*/
public class JobFileRawLoaderMapper extends
Mapper<JobFile, FileStatus, ImmutableBytesWritable, Put> {
private static final ImmutableBytesWritable EMPTY = new ImmutableBytesWritable();
private static Log LOG = LogFactory.getLog(JobFileRawLoaderMapper.class);
private long keyCount = 0;
private boolean forceReprocess = false;
/**
* Used to read the files from.
*/
private FileSystem hdfs;
/**
* Job configuration for this job.
*/
private Configuration myConf;
/**
* Service for storing and retrieving job history and conf blobs.
*/
private JobHistoryRawService rawService = null;
/**
* @return the key class for the job output data.
*/
public static Class<?> getOutputKeyClass() {
return ImmutableBytesWritable.class;
}
/**
* @return the value class for the job output data.
*/
public static Class<?> getOutputValueClass() {
return Put.class;
}
@Override
protected void setup(Context context) throws java.io.IOException,
InterruptedException {
myConf = context.getConfiguration();
hdfs = FileSystem.get(myConf);
rawService = new JobHistoryRawService(myConf);
forceReprocess = myConf.getBoolean(Constants.FORCE_REPROCESS_CONF_KEY,
false);
LOG.info("forceReprocess=" + forceReprocess);
keyCount = 0;
}
@Override
protected void map(JobFile jobFile, FileStatus fileStatus, Context context)
throws IOException, InterruptedException {
boolean exists = hdfs.exists(fileStatus.getPath());
if (exists) {
/**
* To collect puts to be passed to the mapper.
*/
List<Put> puts = new LinkedList<Put>();
// Determine if we need to process this file.
if (jobFile.isJobConfFile()) {
keyCount++;
byte[] rowKey = getRowKeyBytes(jobFile);
addFileNamePut(puts, rowKey, Constants.JOBCONF_FILENAME_COL_BYTES,
jobFile.getFilename());
addRawPut(puts, rowKey, Constants.JOBCONF_COL_BYTES,
Constants.JOBCONF_LAST_MODIFIED_COL_BYTES, fileStatus);
if (forceReprocess) {
// Indicate that we processed the RAW was reloaded so that we can be
// picked up in the new process scan.
Put successPut = rawService.getJobProcessedSuccessPut(rowKey, false);
puts.add(successPut);
}
LOG.info("Loaded conf file (" + keyCount + ") size: "
+ fileStatus.getLen() + " = " + jobFile.getFilename());
} else if (jobFile.isJobHistoryFile()) {
keyCount++;
byte[] rowKey = getRowKeyBytes(jobFile);
// Add filename to be used to re-create JobHistory URL later
addFileNamePut(puts, rowKey, Constants.JOBHISTORY_FILENAME_COL_BYTES,
jobFile.getFilename());
addRawPut(puts, rowKey, Constants.JOBHISTORY_COL_BYTES,
Constants.JOBHISTORY_LAST_MODIFIED_COL_BYTES, fileStatus);
if (forceReprocess) {
// Indicate that we processed the RAW was reloaded so that we can be
// picked up in the new process scan.
Put successPut = rawService.getJobProcessedSuccessPut(rowKey, false);
puts.add(successPut);
}
LOG.info("Loaded history file (" + keyCount + ") size: "
+ fileStatus.getLen() + " = " + jobFile.getFilename());
} else {
System.out.println("Skipping Key: " + jobFile.getFilename());
}
for (Put put : puts) {
// Key is ignored, value is a Put
context.write(EMPTY, put);
}
} else {
// TODO: have better error handling.
System.err.println("Unable to find file: " + fileStatus.getPath());
}
};
/**
* @param jobFile
* @return the byte representation of the rowkey for the raw table.
*/
private byte[] getRowKeyBytes(JobFile jobFile) {
// This is the cluster for which we are processing files.
String cluster = myConf.get(Constants.CLUSTER_JOB_CONF_KEY);
return rawService.getRowKey(cluster, jobFile.getJobid());
}
/**
* @param puts
* to add puts to
* @param rowKey
* for the raw table
* @param filenameColumn
* which filename this is (could be for the jobConf of jobHistory
* file).
* @param filename
* the name of the file.
*/
private void addFileNamePut(List<Put> puts, byte[] rowKey,
byte[] filenameColumn, String filename) {
Put put = new Put(rowKey);
put.add(Constants.INFO_FAM_BYTES, filenameColumn, Bytes.toBytes(filename));
puts.add(put);
}
/**
* Call {@link #readJobFile(FileStatus)} and add the raw bytes and the last
* modified millis to {@code puts}
*
* @param puts
* to add puts to.
* @rowkey to identify the row in the raw table.
* @param rawColumn
* where to add the raw data in
* @param fileStatus
* Referring to the jobFile to load.
* @throws IOException
*/
private void addRawPut(List<Put> puts, byte[] rowKey, byte[] rawColumn,
byte[] lastModificationColumn, FileStatus fileStatus) throws IOException {
byte[] rawBytes = readJobFile(fileStatus);
Put raw = new Put(rowKey);
byte[] rawLastModifiedMillis = Bytes.toBytes(fileStatus
.getModificationTime());
raw.add(Constants.RAW_FAM_BYTES, rawColumn, rawBytes);
raw.add(Constants.INFO_FAM_BYTES, lastModificationColumn,
rawLastModifiedMillis);
puts.add(raw);
}
/**
* Get the raw bytes and the last modification millis for this JobFile
*
* @return the contents of the job file.
* @throws IOException
* when bad things happen during reading
*/
private byte[] readJobFile(FileStatus fileStatus) throws IOException {
byte[] rawBytes = null;
FSDataInputStream fsdis = null;
try {
long fileLength = fileStatus.getLen();
int fileLengthInt = (int) fileLength;
rawBytes = new byte[fileLengthInt];
fsdis = hdfs.open(fileStatus.getPath());
IOUtils.readFully(fsdis, rawBytes, 0, fileLengthInt);
} finally {
IOUtils.closeStream(fsdis);
}
return rawBytes;
}
@Override
protected void cleanup(Context context) throws IOException,
InterruptedException {
if (rawService != null) {
rawService.close();
}
}
}
| |
/* Copyright (c) 2009, Nathan Freitas, Orbot / The Guardian Project - http://openideals.com/guardian */
/* See LICENSE for licensing information */
package org.torproject.android.service;
import android.util.Log;
import java.io.*;
import java.util.StringTokenizer;
public class TorServiceUtils implements TorServiceConstants {
/**
* Check if we have root access
* @return boolean true if we have root
*/
public static boolean checkRootAccess() {
StringBuilder log = new StringBuilder();
try {
// Run an empty script just to check root access
String[] cmd = {"exit 0"};
int exitCode = TorServiceUtils.doShellCommand(cmd, log, true, true);
if (exitCode == 0) {
return true;
}
} catch (IOException e) {
//this means that there is no root to be had (normally) so we won't log anything
Log.e(com.xyrotech.MarzecPro.MarzecPro.TAG, "Error checking for root access", e);
}
catch (Exception e) {
Log.e(com.xyrotech.MarzecPro.MarzecPro.TAG, "Error checking for root access", e);
//this means that there is no root to be had (normally)
}
Log.i(com.xyrotech.MarzecPro.MarzecPro.TAG, "Could not acquire root permissions");
return false;
}
public static int findProcessId(String command)
{
int procId = -1;
try
{
procId = findProcessIdWithPidOf(command);
if (procId == -1)
procId = findProcessIdWithPS(command);
}
catch (Exception e)
{
try
{
procId = findProcessIdWithPS(command);
}
catch (Exception e2)
{
Log.w(TAG,"Unable to get proc id for: " + command,e2);
}
}
return procId;
}
//use 'pidof' command
public static int findProcessIdWithPidOf(String command) throws Exception
{
int procId = -1;
Runtime r = Runtime.getRuntime();
Process procPs = null;
String baseName = new File(command).getName();
//fix contributed my mikos on 2010.12.10
procPs = r.exec(new String[] {SHELL_CMD_PIDOF, baseName});
//procPs = r.exec(SHELL_CMD_PIDOF);
BufferedReader reader = new BufferedReader(new InputStreamReader(procPs.getInputStream()));
String line = null;
while ((line = reader.readLine())!=null)
{
try
{
//this line should just be the process id
procId = Integer.parseInt(line.trim());
break;
}
catch (NumberFormatException e)
{
Log.e(com.xyrotech.MarzecPro.MarzecPro.TAG, "unable to parse process pid: " + line, e);
}
}
return procId;
}
//use 'ps' command
public static int findProcessIdWithPS(String command) throws Exception
{
int procId = -1;
Runtime r = Runtime.getRuntime();
Process procPs = null;
procPs = r.exec(SHELL_CMD_PS);
BufferedReader reader = new BufferedReader(new InputStreamReader(procPs.getInputStream()));
String line = null;
while ((line = reader.readLine())!=null)
{
if (line.indexOf(' ' + command)!=-1)
{
StringTokenizer st = new StringTokenizer(line," ");
st.nextToken(); //proc owner
procId = Integer.parseInt(st.nextToken().trim());
break;
}
}
return procId;
}
public static int doShellCommand(String[] cmds, StringBuilder log, boolean runAsRoot, boolean waitFor) throws Exception
{
Log.i(com.xyrotech.MarzecPro.MarzecPro.TAG, "executing shell cmds: " + cmds[0] + "; runAsRoot=" + runAsRoot);
Process proc = null;
int exitCode = -1;
if (runAsRoot)
proc = Runtime.getRuntime().exec("su");
else
proc = Runtime.getRuntime().exec("sh");
OutputStreamWriter out = new OutputStreamWriter(proc.getOutputStream());
for (int i = 0; i < cmds.length; i++)
{
out.write(cmds[i]);
out.write("\n");
}
out.flush();
out.write("exit\n");
out.flush();
if (waitFor)
{
final char buf[] = new char[10];
// Consume the "stdout"
InputStreamReader reader = new InputStreamReader(proc.getInputStream());
int read=0;
while ((read=reader.read(buf)) != -1) {
if (log != null) log.append(buf, 0, read);
}
// Consume the "stderr"
reader = new InputStreamReader(proc.getErrorStream());
read=0;
while ((read=reader.read(buf)) != -1) {
if (log != null) log.append(buf, 0, read);
}
exitCode = proc.waitFor();
log.append("process exit code: ");
log.append(exitCode);
log.append("\n");
Log.i(com.xyrotech.MarzecPro.MarzecPro.TAG, "command process exit value: " + exitCode);
}
return exitCode;
}
}
| |
package apoc.redis;
import io.lettuce.core.Range;
import io.lettuce.core.ScriptOutputType;
import io.lettuce.core.api.StatefulRedisConnection;
import io.lettuce.core.api.sync.RedisCommands;
import io.lettuce.core.codec.ByteArrayCodec;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;
public class ByteArrayRedisConnection extends RedisConnection<byte[]> {
private final RedisCommands<byte[], byte[]> commands;
public ByteArrayRedisConnection(String uri, RedisConfig config) {
super(uri, config);
StatefulRedisConnection<byte[], byte[]> connection = this.client.connect(new ByteArrayCodec());
this.commands = connection.sync();
}
// -- String
@Override
public byte[] get(byte[] key) {
return this.commands.get(key);
}
@Override
public byte[] getSet(byte[] key, byte[] value) {
return this.commands.setGet(key, value);
}
@Override
public long append(byte[] key, byte[] value) {
return this.commands.append(key, value);
}
// -- Hashes
@Override
public long incrby(byte[] key, long amount) {
return this.commands.incrby(key, amount);
}
@Override
public long hdel(byte[] key, List<Object> fields) {
return this.commands.hdel(key, toBytesArray(fields));
}
@Override
public boolean hexists(byte[] key, byte[] field) {
return this.commands.hexists(key, field);
}
@Override
public byte[] hget(byte[] key, byte[] field) {
return this.commands.hget(key, field);
}
@Override
public long hincrby(byte[] key, byte[] field, long amount) {
return this.commands.hincrby(key, field, amount);
}
@Override
public boolean hset(byte[] key, byte[] field, byte[] value) {
return this.commands.hset(key, field, value);
}
@Override
public Map<String, Object> hgetall(byte[] key) {
return this.commands.hgetall(key)
.entrySet()
.stream()
.collect(Collectors.toMap(e -> new String(e.getKey()), Map.Entry::getValue));
}
// -- Lists
@Override
public long push(byte[] key, List<Object> values) {
return this.conf.isRight()
? this.commands.rpush(key, toBytesArray(values))
: this.commands.lpush(key, toBytesArray(values));
}
@Override
public byte[] pop(byte[] key) {
return this.conf.isRight()
? this.commands.rpop(key)
: this.commands.lpop(key);
}
@Override
public List<Object> lrange(byte[] key, long start, long stop) {
return new ArrayList<>(this.commands.lrange(key, start, stop));
}
// -- Sets
@Override
public long sadd(byte[] key, List<Object> members) {
return this.commands.sadd(key, toBytesArray(members));
}
@Override
public byte[] spop(byte[] key) {
return this.commands.spop(key);
}
@Override
public long scard(byte[] key) {
return this.commands.scard(key);
}
@Override
public List<Object> smembers(byte[] key) {
return new ArrayList<>(this.commands.smembers(key));
}
@Override
public List<Object> sunion(List<Object> keys) {
return new ArrayList<>(this.commands.sunion(toBytesArray(keys)));
}
// -- Sorted Sets
@Override
public long zadd(byte[] key, Object... scoresAndMembers) {
return this.commands.zadd(key, scoresAndMembers);
}
@Override
public long zcard(byte[] key) {
return this.commands.zcard(key);
}
@Override
public List<Object> zrangebyscore(byte[] source, long min, long max) {
return new ArrayList<>(this.commands.zrangebyscore(source, Range.create(min, max)));
}
@Override
public long zrem(byte[] source, List<Object> members) {
return this.commands.zrem(source, toBytesArray(members));
}
// -- Script
@Override
public byte[] eval(String script, ScriptOutputType outputType, List<Object> keys, List<Object> values) {
return this.commands.eval(script, outputType, toBytesArray(keys), toBytesArray(values));
}
// -- Key
@Override
public boolean copy(byte[] source, byte[] destination) {
return this.commands.copy(source, destination);
}
@Override
public long exists(List<Object> key) {
return this.commands.exists(toBytesArray(key));
}
@Override
public boolean pexpire(byte[] key, long time, boolean isExpireAt) {
return isExpireAt
? this.commands.pexpireat(key, time)
: this.commands.pexpire(key, time);
}
@Override
public boolean persist(byte[] key) {
return this.commands.persist(key);
}
@Override
public long pttl(byte[] key) {
return this.commands.pttl(key);
}
// -- Server
@Override
public String info() {
return this.commands.info();
}
@Override
public String configSet(String parameter, String value) {
return this.commands.configSet(parameter, value);
}
@Override
public Map<String, Object> configGet(String parameter) {
return Collections.unmodifiableMap(this.commands.configGet(parameter));
}
private byte[][] toBytesArray(List<Object> fields) {
return fields.stream().map(byte[].class::cast).toArray(byte[][]::new);
}
}
| |
/*
* JasperReports - Free Java Reporting Library.
* Copyright (C) 2001 - 2014 TIBCO Software Inc. All rights reserved.
* http://www.jaspersoft.com
*
* Unless you have purchased a commercial license agreement from Jaspersoft,
* the following license terms apply:
*
* This program is part of JasperReports.
*
* JasperReports is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* JasperReports is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with JasperReports. If not, see <http://www.gnu.org/licenses/>.
*/
package net.sf.jasperreports.engine.xml;
import java.io.IOException;
import net.sf.jasperreports.engine.JRConditionalStyle;
import net.sf.jasperreports.engine.JRConstants;
import net.sf.jasperreports.engine.JRExpression;
import net.sf.jasperreports.engine.JRLineBox;
import net.sf.jasperreports.engine.JRParagraph;
import net.sf.jasperreports.engine.JRPen;
import net.sf.jasperreports.engine.JRPropertiesUtil;
import net.sf.jasperreports.engine.JRStyle;
import net.sf.jasperreports.engine.JRStyleContainer;
import net.sf.jasperreports.engine.TabStop;
import net.sf.jasperreports.engine.util.JRXmlWriteHelper;
import net.sf.jasperreports.engine.util.VersionComparator;
import net.sf.jasperreports.engine.util.XmlNamespace;
/**
* Base XML writer.
*
* @author Lucian Chirita (lucianc@users.sourceforge.net)
* @version $Id: JRXmlBaseWriter.java 7199 2014-08-27 13:58:10Z teodord $
*/
public abstract class JRXmlBaseWriter
{
/**
* Property that specifies the JasperReports version associated with this report. Report elements/attributes newer than
* this version are neglected by the JRXML writers when a report template is generated. If not set, all elements/attributes
* will be printed out.
*
* @see JRXmlWriter
*/
public static final String PROPERTY_REPORT_VERSION = JRPropertiesUtil.PROPERTY_PREFIX + "report.version";
protected JRXmlWriteHelper writer;
protected String version;
protected VersionComparator versionComparator = new VersionComparator();
/**
* Sets the XML write helper.
*
* @param writer the XML write helper
*/
protected void useWriter(JRXmlWriteHelper writer, String version)
{
this.writer = writer;
this.version = version;
}
/**
* Writes a style.
*
* @param style the style to write.
* @throws IOException
*/
protected void writeStyle(JRStyle style) throws IOException
{
writer.startElement(JRXmlConstants.ELEMENT_style);
writer.addEncodedAttribute(JRXmlConstants.ATTRIBUTE_name, style.getName());
writer.addAttribute(JRXmlConstants.ATTRIBUTE_isDefault, style.isDefault(), false);
writeStyleReferenceAttr(style);
writer.addAttribute(JRXmlConstants.ATTRIBUTE_mode, style.getOwnModeValue());
writer.addAttribute(JRXmlConstants.ATTRIBUTE_forecolor, style.getOwnForecolor());
writer.addAttribute(JRXmlConstants.ATTRIBUTE_backcolor, style.getOwnBackcolor());
writer.addAttribute(JRXmlConstants.ATTRIBUTE_fill, style.getOwnFillValue());
writer.addAttribute(JRXmlConstants.ATTRIBUTE_radius, style.getOwnRadius());
writer.addAttribute(JRXmlConstants.ATTRIBUTE_scaleImage, style.getOwnScaleImageValue());
writer.addAttribute(JRXmlConstants.ATTRIBUTE_hAlign, style.getOwnHorizontalAlignmentValue());
writer.addAttribute(JRXmlConstants.ATTRIBUTE_vAlign, style.getOwnVerticalAlignmentValue());
writer.addAttribute(JRXmlConstants.ATTRIBUTE_rotation, style.getOwnRotationValue());
if (isOlderVersionThan(JRConstants.VERSION_4_0_2))
{
writer.addAttribute(JRXmlConstants.ATTRIBUTE_lineSpacing, style.getParagraph().getLineSpacing());
}
writer.addAttribute(JRXmlConstants.ATTRIBUTE_markup, style.getOwnMarkup());
writer.addEncodedAttribute(JRXmlConstants.ATTRIBUTE_pattern, style.getOwnPattern());
writer.addAttribute(JRXmlConstants.ATTRIBUTE_isBlankWhenNull, style.isOwnBlankWhenNull());
writer.addEncodedAttribute(JRXmlConstants.ATTRIBUTE_fontName, style.getOwnFontName());
writer.addAttribute(JRXmlConstants.ATTRIBUTE_fontSize, style.getOwnFontsize(), true);
writer.addAttribute(JRXmlConstants.ATTRIBUTE_isBold, style.isOwnBold());
writer.addAttribute(JRXmlConstants.ATTRIBUTE_isItalic, style.isOwnItalic());
writer.addAttribute(JRXmlConstants.ATTRIBUTE_isUnderline, style.isOwnUnderline());
writer.addAttribute(JRXmlConstants.ATTRIBUTE_isStrikeThrough, style.isOwnStrikeThrough());
writer.addEncodedAttribute(JRXmlConstants.ATTRIBUTE_pdfFontName, style.getOwnPdfFontName());
writer.addEncodedAttribute(JRXmlConstants.ATTRIBUTE_pdfEncoding, style.getOwnPdfEncoding());
writer.addAttribute(JRXmlConstants.ATTRIBUTE_isPdfEmbedded, style.isOwnPdfEmbedded());
writePen(style.getLinePen());
writeBox(style.getLineBox());
writeParagraph(style.getParagraph());
if (toWriteConditionalStyles())
{
JRConditionalStyle[] conditionalStyles = style.getConditionalStyles();
if (!(style instanceof JRConditionalStyle) && conditionalStyles != null)
{
for (int i = 0; i < conditionalStyles.length; i++)
{
writeConditionalStyle(conditionalStyles[i]);
}
}
}
writer.closeElement();
}
public void writeStyleReferenceAttr(JRStyleContainer styleContainer)
{
if (!(styleContainer instanceof JRConditionalStyle))
{
if (styleContainer.getStyle() != null)
{
writer.addEncodedAttribute(JRXmlConstants.ATTRIBUTE_style, styleContainer.getStyle().getName());
}
else if (styleContainer.getStyleNameReference() != null)
{
writer.addEncodedAttribute(JRXmlConstants.ATTRIBUTE_style, styleContainer.getStyleNameReference());
}
}
}
/**
* Decides whether conditional styles are to be written.
*
* @return whether conditional styles are to be written
*/
protected abstract boolean toWriteConditionalStyles();
/**
* Writes a conditional style.
*
* @param style the conditional style
* @throws IOException
*/
protected void writeConditionalStyle(JRConditionalStyle style) throws IOException
{
writer.startElement(JRXmlConstants.ELEMENT_conditionalStyle);
writeExpression(JRXmlConstants.ELEMENT_conditionExpression, style.getConditionExpression(), false);
writeStyle(style);
writer.closeElement();
}
/**
*
*/
protected void writePen(JRPen pen) throws IOException
{
writePen(JRXmlConstants.ELEMENT_pen, pen);
}
/**
*
*/
private void writePen(String element, JRPen pen) throws IOException
{
writer.startElement(element);
writer.addAttribute(JRXmlConstants.ATTRIBUTE_lineWidth, pen.getOwnLineWidth());
writer.addAttribute(JRXmlConstants.ATTRIBUTE_lineStyle, pen.getOwnLineStyleValue());
writer.addAttribute(JRXmlConstants.ATTRIBUTE_lineColor, pen.getOwnLineColor());
writer.closeElement(true);
}
public void writeBox(JRLineBox box) throws IOException
{
writeBox(box, null);
}
/**
*
*/
public void writeBox(JRLineBox box, XmlNamespace namespace) throws IOException
{
if (box != null)
{
writer.startElement(JRXmlConstants.ELEMENT_box, namespace);
writer.addAttribute(JRXmlConstants.ATTRIBUTE_padding, box.getOwnPadding());
writer.addAttribute(JRXmlConstants.ATTRIBUTE_topPadding, box.getOwnTopPadding());
writer.addAttribute(JRXmlConstants.ATTRIBUTE_leftPadding, box.getOwnLeftPadding());
writer.addAttribute(JRXmlConstants.ATTRIBUTE_bottomPadding, box.getOwnBottomPadding());
writer.addAttribute(JRXmlConstants.ATTRIBUTE_rightPadding, box.getOwnRightPadding());
writePen(JRXmlConstants.ELEMENT_pen, box.getPen());
writePen(JRXmlConstants.ELEMENT_topPen, box.getTopPen());
writePen(JRXmlConstants.ELEMENT_leftPen, box.getLeftPen());
writePen(JRXmlConstants.ELEMENT_bottomPen, box.getBottomPen());
writePen(JRXmlConstants.ELEMENT_rightPen, box.getRightPen());
writer.closeElement(true);
}
}
public void writeParagraph(JRParagraph paragraph) throws IOException
{
writeParagraph(paragraph, null);
}
/**
*
*/
public void writeParagraph(JRParagraph paragraph, XmlNamespace namespace) throws IOException
{
if (paragraph != null && isNewerVersionOrEqual(JRConstants.VERSION_4_0_2))
{
writer.startElement(JRXmlConstants.ELEMENT_paragraph, namespace);
writer.addAttribute(JRXmlConstants.ATTRIBUTE_lineSpacing, paragraph.getOwnLineSpacing());
writer.addAttribute(JRXmlConstants.ATTRIBUTE_lineSpacingSize, paragraph.getOwnLineSpacingSize());
writer.addAttribute(JRXmlConstants.ATTRIBUTE_firstLineIndent, paragraph.getOwnFirstLineIndent());
writer.addAttribute(JRXmlConstants.ATTRIBUTE_leftIndent, paragraph.getOwnLeftIndent());
writer.addAttribute(JRXmlConstants.ATTRIBUTE_rightIndent, paragraph.getOwnRightIndent());
writer.addAttribute(JRXmlConstants.ATTRIBUTE_spacingBefore, paragraph.getOwnSpacingBefore());
writer.addAttribute(JRXmlConstants.ATTRIBUTE_spacingAfter, paragraph.getOwnSpacingAfter());
writer.addAttribute(JRXmlConstants.ATTRIBUTE_tabStopWidth, paragraph.getOwnTabStopWidth());
/* */
TabStop[] tabStops = paragraph.getTabStops();
if (tabStops != null && tabStops.length > 0)
{
for(int i = 0; i < tabStops.length; i++)
{
writeTabStop(tabStops[i]);
}
}
writer.closeElement(true);
}
}
/**
*
*/
public void writeTabStop(TabStop tabStop) throws IOException
{
if (tabStop != null)
{
writer.startElement(JRXmlConstants.ELEMENT_tabStop);
writer.addAttribute(JRXmlConstants.ATTRIBUTE_position, tabStop.getPosition());
writer.addAttribute(JRXmlConstants.ATTRIBUTE_alignment, tabStop.getAlignment());
writer.closeElement(true);
}
}
/**
*
*/
protected boolean isNewerVersionOrEqual(String oldVersion)
{
return versionComparator.compare(version, oldVersion) >= 0;
}
/**
*
*/
protected boolean isOlderVersionThan(String version)
{
return versionComparator.compare(this.version, version) < 0;
}
@SuppressWarnings("deprecation")
protected void writeExpression(String name, JRExpression expression, boolean writeClass) throws IOException
{
if(isNewerVersionOrEqual(JRConstants.VERSION_4_1_1))
{
writer.writeExpression(name, expression);
}
else
{
writer.writeExpression(name, expression, writeClass);
}
}
}
| |
package info.novatec.testit.webtester.junit.runner.internal;
import static org.assertj.core.api.Assertions.assertThat;
import java.lang.reflect.Field;
import org.apache.commons.lang.IllegalClassException;
import org.junit.BeforeClass;
import org.junit.Test;
import org.junit.experimental.runners.Enclosed;
import org.junit.runner.RunWith;
import info.novatec.testit.webtester.config.Configuration;
import info.novatec.testit.webtester.config.builders.DefaultConfigurationBuilder;
import info.novatec.testit.webtester.junit.annotations.ConfigurationValue;
@RunWith(Enclosed.class)
public class ConfigurationValueInjectorTest {
static Configuration configuration = new DefaultConfigurationBuilder().build();
public static abstract class AbstractConfigurationValueInjectorTest {
ConfigurationValueInjector cut = new ConfigurationValueInjector();
}
public static class CanInjectValue extends AbstractConfigurationValueInjectorTest {
@Test
public void canInjectStringValues() throws NoSuchFieldException {
assertThat(cut.canInjectValue(field("stringValue"))).isTrue();
}
@Test
public void canInjectIntegerValues() throws NoSuchFieldException {
assertThat(cut.canInjectValue(field("integerValue"))).isTrue();
}
@Test
public void canInjectLongValues() throws NoSuchFieldException {
assertThat(cut.canInjectValue(field("longValue"))).isTrue();
}
@Test
public void canInjectFloatValues() throws NoSuchFieldException {
assertThat(cut.canInjectValue(field("floatValue"))).isTrue();
}
@Test
public void canInjectDoubleValues() throws NoSuchFieldException {
assertThat(cut.canInjectValue(field("doubleValue"))).isTrue();
}
@Test
public void canInjectBooleanValues() throws NoSuchFieldException {
assertThat(cut.canInjectValue(field("booleanValue"))).isTrue();
}
@Test
public void cantInjectObjectValues() throws NoSuchFieldException {
assertThat(cut.canInjectValue(field("objectValue"))).isFalse();
}
Field field(String fieldName) throws NoSuchFieldException {
return CanInjectClass.class.getDeclaredField(fieldName);
}
public static class CanInjectClass {
String stringValue;
Integer integerValue;
Long longValue;
Float floatValue;
Double doubleValue;
Boolean booleanValue;
Object objectValue;
}
}
@RunWith(Enclosed.class)
public static class InjectStatics {
public static class StaticFieldInjection {
@BeforeClass
public static void injectStaticValues() {
new ConfigurationValueInjector().injectStatics(configuration, TestClassForStaticFields.class);
}
@Test
public void stringValuesCanBeInjected() {
assertThat(TestClassForStaticFields.stringValue).isEqualTo("foo bar");
}
@Test
public void integerValuesCanBeInjected() {
assertThat(TestClassForStaticFields.integerValue).isEqualTo(1);
}
@Test
public void longValuesCanBeInjected() {
assertThat(TestClassForStaticFields.longValue).isEqualTo(2L);
}
@Test
public void floatValuesCanBeInjected() {
assertThat(TestClassForStaticFields.floatValue).isEqualTo(1.0f);
}
@Test
public void doubleValuesCanBeInjected() {
assertThat(TestClassForStaticFields.doubleValue).isEqualTo(2.0d);
}
@Test
public void booleanValuesCanBeInjected() {
assertThat(TestClassForStaticFields.booleanValue).isEqualTo(true);
}
public static class TestClassForStaticFields {
@ConfigurationValue("test.string")
static String stringValue;
@ConfigurationValue("test.integer")
static Integer integerValue;
@ConfigurationValue("test.long")
static Long longValue;
@ConfigurationValue("test.float")
static Float floatValue;
@ConfigurationValue("test.double")
static Double doubleValue;
@ConfigurationValue("test.boolean")
static Boolean booleanValue;
}
}
public static class UnmappedFieldTypes extends AbstractConfigurationValueInjectorTest {
@Test(expected = IllegalClassException.class)
public void unmappedFieldTypeOfStaticFieldThrowsException() {
cut.injectStatics(configuration, UnmappedStaticClass.class);
}
public static class UnmappedStaticClass {
@ConfigurationValue("unknownClass")
static Object stringValue;
}
}
public static class NonInjectableFields extends AbstractConfigurationValueInjectorTest {
@Test
public void nonConfigurationStaticValueFieldsAreIgnored() {
cut.injectStatics(configuration, NonInjectableStaticClass.class);
assertThat(NonInjectableStaticClass.classString).isEqualTo("foo bar");
assertThat(NonInjectableStaticClass.notInjectedClassValue).isNull();
}
public static class NonInjectableStaticClass {
@ConfigurationValue("test.string")
static String classString;
static String notInjectedClassValue;
}
}
}
@RunWith(Enclosed.class)
public static class Inject {
public static class InstanceFieldInjection extends AbstractConfigurationValueInjectorTest {
@Test
public void stringValuesCanBeInjected() {
TestClassForInstanceFields instance = new TestClassForInstanceFields();
cut.inject(configuration, instance);
assertThat(instance.stringValue).isEqualTo("foo bar");
}
@Test
public void integerValuesCanBeInjected() {
TestClassForInstanceFields instance = new TestClassForInstanceFields();
cut.inject(configuration, instance);
assertThat(instance.integerValue).isEqualTo(1);
}
@Test
public void longValuesCanBeInjected() {
TestClassForInstanceFields instance = new TestClassForInstanceFields();
cut.inject(configuration, instance);
assertThat(instance.longValue).isEqualTo(2L);
}
@Test
public void floatValuesCanBeInjected() {
TestClassForInstanceFields instance = new TestClassForInstanceFields();
cut.inject(configuration, instance);
assertThat(instance.floatValue).isEqualTo(1.0f);
}
@Test
public void doubleValuesCanBeInjected() {
TestClassForInstanceFields instance = new TestClassForInstanceFields();
cut.inject(configuration, instance);
assertThat(instance.doubleValue).isEqualTo(2.0d);
}
@Test
public void booleanValuesCanBeInjected() {
TestClassForInstanceFields instance = new TestClassForInstanceFields();
cut.inject(configuration, instance);
assertThat(instance.booleanValue).isEqualTo(true);
}
public static class TestClassForInstanceFields {
@ConfigurationValue("test.string")
String stringValue;
@ConfigurationValue("test.integer")
Integer integerValue;
@ConfigurationValue("test.long")
Long longValue;
@ConfigurationValue("test.float")
Float floatValue;
@ConfigurationValue("test.double")
Double doubleValue;
@ConfigurationValue("test.boolean")
Boolean booleanValue;
}
}
public static class UnmappedFieldTypes extends AbstractConfigurationValueInjectorTest {
@Test(expected = IllegalClassException.class)
public void unmappedFieldTypeOfInstanceFieldThrowsException() {
UnmappedInstanceClass instance = new UnmappedInstanceClass();
cut.inject(configuration, instance);
}
public static class UnmappedInstanceClass {
@ConfigurationValue("unknownClass")
Object stringValue;
}
}
public static class NonInjectableFields extends AbstractConfigurationValueInjectorTest {
@Test
public void nonConfigurationInstanceValueFieldsAreIgnored() {
NonInjectableInstanceClass instance = new NonInjectableInstanceClass();
cut.inject(configuration, instance);
assertThat(instance.instanceString).isEqualTo("foo bar");
assertThat(instance.notInjectedInstanceValue).isNull();
}
public static class NonInjectableInstanceClass {
@ConfigurationValue("test.string")
String instanceString;
String notInjectedInstanceValue;
}
}
}
}
| |
/*
* Copyright 2004, 2005, 2006 Acegi Technology Pty Limited
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.security.authentication.dao;
import static org.assertj.core.api.Assertions.assertThat;
import static org.assertj.core.api.Assertions.fail;
import static org.mockito.Matchers.anyString;
import static org.mockito.Matchers.isA;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
import java.security.SecureRandom;
import java.util.ArrayList;
import java.util.List;
import org.junit.Test;
import org.springframework.dao.DataRetrievalFailureException;
import org.springframework.security.authentication.AccountExpiredException;
import org.springframework.security.authentication.AuthenticationServiceException;
import org.springframework.security.authentication.BadCredentialsException;
import org.springframework.security.authentication.CredentialsExpiredException;
import org.springframework.security.authentication.DisabledException;
import org.springframework.security.authentication.InternalAuthenticationServiceException;
import org.springframework.security.authentication.LockedException;
import org.springframework.security.authentication.TestingAuthenticationToken;
import org.springframework.security.authentication.UsernamePasswordAuthenticationToken;
import org.springframework.security.core.Authentication;
import org.springframework.security.core.GrantedAuthority;
import org.springframework.security.core.authority.AuthorityUtils;
import org.springframework.security.core.userdetails.User;
import org.springframework.security.core.userdetails.UserDetails;
import org.springframework.security.core.userdetails.UserDetailsService;
import org.springframework.security.core.userdetails.UsernameNotFoundException;
import org.springframework.security.core.userdetails.cache.EhCacheBasedUserCache;
import org.springframework.security.core.userdetails.cache.NullUserCache;
import org.springframework.security.crypto.bcrypt.BCryptPasswordEncoder;
import org.springframework.security.crypto.password.NoOpPasswordEncoder;
import org.springframework.security.crypto.password.PasswordEncoder;
/**
* Tests {@link DaoAuthenticationProvider}.
*
* @author Ben Alex
* @author Rob Winch
*/
public class DaoAuthenticationProviderTests {
private static final List<GrantedAuthority> ROLES_12 = AuthorityUtils.createAuthorityList(
"ROLE_ONE", "ROLE_TWO");
// ~ Methods
// ========================================================================================================
@Test
public void testAuthenticateFailsForIncorrectPasswordCase() {
UsernamePasswordAuthenticationToken token = new UsernamePasswordAuthenticationToken(
"rod", "KOala");
DaoAuthenticationProvider provider = createProvider();
provider.setUserDetailsService(new MockAuthenticationDaoUserrod());
provider.setUserCache(new MockUserCache());
try {
provider.authenticate(token);
fail("Should have thrown BadCredentialsException");
}
catch (BadCredentialsException expected) {
}
}
@Test
public void testReceivedBadCredentialsWhenCredentialsNotProvided() {
// Test related to SEC-434
DaoAuthenticationProvider provider = createProvider();
provider.setUserDetailsService(new MockAuthenticationDaoUserrod());
provider.setUserCache(new MockUserCache());
UsernamePasswordAuthenticationToken authenticationToken = new UsernamePasswordAuthenticationToken(
"rod", null);
try {
provider.authenticate(authenticationToken);
fail("Expected BadCredenialsException");
}
catch (BadCredentialsException expected) {
}
}
@Test
public void testAuthenticateFailsIfAccountExpired() {
UsernamePasswordAuthenticationToken token = new UsernamePasswordAuthenticationToken(
"peter", "opal");
DaoAuthenticationProvider provider = createProvider();
provider.setUserDetailsService(
new MockAuthenticationDaoUserPeterAccountExpired());
provider.setUserCache(new MockUserCache());
try {
provider.authenticate(token);
fail("Should have thrown AccountExpiredException");
}
catch (AccountExpiredException expected) {
}
}
@Test
public void testAuthenticateFailsIfAccountLocked() {
UsernamePasswordAuthenticationToken token = new UsernamePasswordAuthenticationToken(
"peter", "opal");
DaoAuthenticationProvider provider = createProvider();
provider.setUserDetailsService(new MockAuthenticationDaoUserPeterAccountLocked());
provider.setUserCache(new MockUserCache());
try {
provider.authenticate(token);
fail("Should have thrown LockedException");
}
catch (LockedException expected) {
}
}
@Test
public void testAuthenticateFailsIfCredentialsExpired() {
UsernamePasswordAuthenticationToken token = new UsernamePasswordAuthenticationToken(
"peter", "opal");
DaoAuthenticationProvider provider = createProvider();
provider.setUserDetailsService(
new MockAuthenticationDaoUserPeterCredentialsExpired());
provider.setUserCache(new MockUserCache());
try {
provider.authenticate(token);
fail("Should have thrown CredentialsExpiredException");
}
catch (CredentialsExpiredException expected) {
}
// Check that wrong password causes BadCredentialsException, rather than
// CredentialsExpiredException
token = new UsernamePasswordAuthenticationToken("peter", "wrong_password");
try {
provider.authenticate(token);
fail("Should have thrown BadCredentialsException");
}
catch (BadCredentialsException expected) {
}
}
@Test
public void testAuthenticateFailsIfUserDisabled() {
UsernamePasswordAuthenticationToken token = new UsernamePasswordAuthenticationToken(
"peter", "opal");
DaoAuthenticationProvider provider = createProvider();
provider.setUserDetailsService(new MockAuthenticationDaoUserPeter());
provider.setUserCache(new MockUserCache());
try {
provider.authenticate(token);
fail("Should have thrown DisabledException");
}
catch (DisabledException expected) {
}
}
@Test
public void testAuthenticateFailsWhenAuthenticationDaoHasBackendFailure() {
UsernamePasswordAuthenticationToken token = new UsernamePasswordAuthenticationToken(
"rod", "koala");
DaoAuthenticationProvider provider = createProvider();
provider.setUserDetailsService(new MockAuthenticationDaoSimulateBackendError());
provider.setUserCache(new MockUserCache());
try {
provider.authenticate(token);
fail("Should have thrown InternalAuthenticationServiceException");
}
catch (InternalAuthenticationServiceException expected) {
}
}
@Test
public void testAuthenticateFailsWithEmptyUsername() {
UsernamePasswordAuthenticationToken token = new UsernamePasswordAuthenticationToken(
null, "koala");
DaoAuthenticationProvider provider = createProvider();
provider.setUserDetailsService(new MockAuthenticationDaoUserrod());
provider.setUserCache(new MockUserCache());
try {
provider.authenticate(token);
fail("Should have thrown BadCredentialsException");
}
catch (BadCredentialsException expected) {
}
}
@Test
public void testAuthenticateFailsWithInvalidPassword() {
UsernamePasswordAuthenticationToken token = new UsernamePasswordAuthenticationToken(
"rod", "INVALID_PASSWORD");
DaoAuthenticationProvider provider = createProvider();
provider.setUserDetailsService(new MockAuthenticationDaoUserrod());
provider.setUserCache(new MockUserCache());
try {
provider.authenticate(token);
fail("Should have thrown BadCredentialsException");
}
catch (BadCredentialsException expected) {
}
}
@Test
public void testAuthenticateFailsWithInvalidUsernameAndHideUserNotFoundExceptionFalse() {
UsernamePasswordAuthenticationToken token = new UsernamePasswordAuthenticationToken(
"INVALID_USER", "koala");
DaoAuthenticationProvider provider = createProvider();
provider.setHideUserNotFoundExceptions(false); // we want
// UsernameNotFoundExceptions
provider.setUserDetailsService(new MockAuthenticationDaoUserrod());
provider.setUserCache(new MockUserCache());
try {
provider.authenticate(token);
fail("Should have thrown UsernameNotFoundException");
}
catch (UsernameNotFoundException expected) {
}
}
@Test
public void testAuthenticateFailsWithInvalidUsernameAndHideUserNotFoundExceptionsWithDefaultOfTrue() {
UsernamePasswordAuthenticationToken token = new UsernamePasswordAuthenticationToken(
"INVALID_USER", "koala");
DaoAuthenticationProvider provider = createProvider();
assertThat(provider.isHideUserNotFoundExceptions()).isTrue();
provider.setUserDetailsService(new MockAuthenticationDaoUserrod());
provider.setUserCache(new MockUserCache());
try {
provider.authenticate(token);
fail("Should have thrown BadCredentialsException");
}
catch (BadCredentialsException expected) {
}
}
@Test
public void testAuthenticateFailsWithMixedCaseUsernameIfDefaultChanged() {
UsernamePasswordAuthenticationToken token = new UsernamePasswordAuthenticationToken(
"RoD", "koala");
DaoAuthenticationProvider provider = createProvider();
provider.setUserDetailsService(new MockAuthenticationDaoUserrod());
provider.setUserCache(new MockUserCache());
try {
provider.authenticate(token);
fail("Should have thrown BadCredentialsException");
}
catch (BadCredentialsException expected) {
}
}
@Test
public void testAuthenticates() {
UsernamePasswordAuthenticationToken token = new UsernamePasswordAuthenticationToken(
"rod", "koala");
token.setDetails("192.168.0.1");
DaoAuthenticationProvider provider = createProvider();
provider.setUserDetailsService(new MockAuthenticationDaoUserrod());
provider.setUserCache(new MockUserCache());
Authentication result = provider.authenticate(token);
if (!(result instanceof UsernamePasswordAuthenticationToken)) {
fail("Should have returned instance of UsernamePasswordAuthenticationToken");
}
UsernamePasswordAuthenticationToken castResult = (UsernamePasswordAuthenticationToken) result;
assertThat(castResult.getPrincipal().getClass()).isEqualTo(User.class);
assertThat(castResult.getCredentials()).isEqualTo("koala");
assertThat(
AuthorityUtils.authorityListToSet(castResult.getAuthorities())).contains(
"ROLE_ONE", "ROLE_TWO");
assertThat(castResult.getDetails()).isEqualTo("192.168.0.1");
}
@Test
public void testAuthenticatesASecondTime() {
UsernamePasswordAuthenticationToken token = new UsernamePasswordAuthenticationToken(
"rod", "koala");
DaoAuthenticationProvider provider = createProvider();
provider.setUserDetailsService(new MockAuthenticationDaoUserrod());
provider.setUserCache(new MockUserCache());
Authentication result = provider.authenticate(token);
if (!(result instanceof UsernamePasswordAuthenticationToken)) {
fail("Should have returned instance of UsernamePasswordAuthenticationToken");
}
// Now try to authenticate with the previous result (with its UserDetails)
Authentication result2 = provider.authenticate(result);
if (!(result2 instanceof UsernamePasswordAuthenticationToken)) {
fail("Should have returned instance of UsernamePasswordAuthenticationToken");
}
assertThat(result2.getCredentials()).isEqualTo(result.getCredentials());
}
@Test
public void testAuthenticatesWithForcePrincipalAsString() {
UsernamePasswordAuthenticationToken token = new UsernamePasswordAuthenticationToken(
"rod", "koala");
DaoAuthenticationProvider provider = createProvider();
provider.setUserDetailsService(new MockAuthenticationDaoUserrod());
provider.setUserCache(new MockUserCache());
provider.setForcePrincipalAsString(true);
Authentication result = provider.authenticate(token);
if (!(result instanceof UsernamePasswordAuthenticationToken)) {
fail("Should have returned instance of UsernamePasswordAuthenticationToken");
}
UsernamePasswordAuthenticationToken castResult = (UsernamePasswordAuthenticationToken) result;
assertThat(castResult.getPrincipal().getClass()).isEqualTo(String.class);
assertThat(castResult.getPrincipal()).isEqualTo("rod");
}
@Test
public void testDetectsNullBeingReturnedFromAuthenticationDao() {
UsernamePasswordAuthenticationToken token = new UsernamePasswordAuthenticationToken(
"rod", "koala");
DaoAuthenticationProvider provider = createProvider();
provider.setUserDetailsService(new MockAuthenticationDaoReturnsNull());
try {
provider.authenticate(token);
fail("Should have thrown AuthenticationServiceException");
}
catch (AuthenticationServiceException expected) {
assertThat(
"UserDetailsService returned null, which is an interface contract violation").isEqualTo(
expected.getMessage());
}
}
@Test
public void testGettersSetters() {
DaoAuthenticationProvider provider = new DaoAuthenticationProvider();
provider.setPasswordEncoder(new BCryptPasswordEncoder());
assertThat(provider.getPasswordEncoder().getClass()).isEqualTo(
BCryptPasswordEncoder.class);
provider.setUserCache(new EhCacheBasedUserCache());
assertThat(provider.getUserCache().getClass()).isEqualTo(
EhCacheBasedUserCache.class);
assertThat(provider.isForcePrincipalAsString()).isFalse();
provider.setForcePrincipalAsString(true);
assertThat(provider.isForcePrincipalAsString()).isTrue();
}
@Test
public void testGoesBackToAuthenticationDaoToObtainLatestPasswordIfCachedPasswordSeemsIncorrect() {
UsernamePasswordAuthenticationToken token = new UsernamePasswordAuthenticationToken(
"rod", "koala");
MockAuthenticationDaoUserrod authenticationDao = new MockAuthenticationDaoUserrod();
MockUserCache cache = new MockUserCache();
DaoAuthenticationProvider provider = createProvider();
provider.setUserDetailsService(authenticationDao);
provider.setUserCache(cache);
// This will work, as password still "koala"
provider.authenticate(token);
// Check "rod = koala" ended up in the cache
assertThat(cache.getUserFromCache("rod").getPassword()).isEqualTo("koala");
// Now change the password the AuthenticationDao will return
authenticationDao.setPassword("easternLongNeckTurtle");
// Now try authentication again, with the new password
token = new UsernamePasswordAuthenticationToken("rod", "easternLongNeckTurtle");
provider.authenticate(token);
// To get this far, the new password was accepted
// Check the cache was updated
assertThat(cache.getUserFromCache("rod").getPassword()).isEqualTo(
"easternLongNeckTurtle");
}
@Test
public void testStartupFailsIfNoAuthenticationDao() throws Exception {
DaoAuthenticationProvider provider = new DaoAuthenticationProvider();
try {
provider.afterPropertiesSet();
fail("Should have thrown IllegalArgumentException");
}
catch (IllegalArgumentException expected) {
}
}
@Test
public void testStartupFailsIfNoUserCacheSet() throws Exception {
DaoAuthenticationProvider provider = createProvider();
provider.setUserDetailsService(new MockAuthenticationDaoUserrod());
assertThat(provider.getUserCache().getClass()).isEqualTo(NullUserCache.class);
provider.setUserCache(null);
try {
provider.afterPropertiesSet();
fail("Should have thrown IllegalArgumentException");
}
catch (IllegalArgumentException expected) {
}
}
@Test
public void testStartupSuccess() throws Exception {
DaoAuthenticationProvider provider = createProvider();
UserDetailsService userDetailsService = new MockAuthenticationDaoUserrod();
provider.setUserDetailsService(userDetailsService);
provider.setUserCache(new MockUserCache());
assertThat(provider.getUserDetailsService()).isEqualTo(userDetailsService);
provider.afterPropertiesSet();
}
@Test
public void testSupports() {
DaoAuthenticationProvider provider = createProvider();
assertThat(provider.supports(UsernamePasswordAuthenticationToken.class)).isTrue();
assertThat(!provider.supports(TestingAuthenticationToken.class)).isTrue();
}
// SEC-2056
@Test
public void testUserNotFoundEncodesPassword() throws Exception {
UsernamePasswordAuthenticationToken token = new UsernamePasswordAuthenticationToken(
"missing", "koala");
PasswordEncoder encoder = mock(PasswordEncoder.class);
when(encoder.encode(anyString())).thenReturn("koala");
DaoAuthenticationProvider provider = new DaoAuthenticationProvider();
provider.setHideUserNotFoundExceptions(false);
provider.setPasswordEncoder(encoder);
provider.setUserDetailsService(new MockAuthenticationDaoUserrod());
provider.afterPropertiesSet();
try {
provider.authenticate(token);
fail("Expected Exception");
}
catch (UsernameNotFoundException success) {
}
// ensure encoder invoked w/ non-null strings since PasswordEncoder impls may fail
// if encoded password is null
verify(encoder).matches(isA(String.class), isA(String.class));
}
@Test
public void testUserNotFoundBCryptPasswordEncoder() {
UsernamePasswordAuthenticationToken token = new UsernamePasswordAuthenticationToken(
"missing", "koala");
PasswordEncoder encoder = new BCryptPasswordEncoder();
DaoAuthenticationProvider provider = new DaoAuthenticationProvider();
provider.setHideUserNotFoundExceptions(false);
provider.setPasswordEncoder(encoder);
MockAuthenticationDaoUserrod userDetailsService = new MockAuthenticationDaoUserrod();
userDetailsService.password = encoder.encode(
(CharSequence) token.getCredentials());
provider.setUserDetailsService(userDetailsService);
try {
provider.authenticate(token);
fail("Expected Exception");
}
catch (UsernameNotFoundException success) {
}
}
@Test
public void testUserNotFoundDefaultEncoder() {
UsernamePasswordAuthenticationToken token = new UsernamePasswordAuthenticationToken(
"missing", null);
DaoAuthenticationProvider provider = createProvider();
provider.setHideUserNotFoundExceptions(false);
provider.setUserDetailsService(new MockAuthenticationDaoUserrod());
try {
provider.authenticate(token);
fail("Expected Exception");
}
catch (UsernameNotFoundException success) {
}
}
/**
* This is an explicit test for SEC-2056. It is intentionally ignored since this test
* is not deterministic and {@link #testUserNotFoundEncodesPassword()} ensures that
* SEC-2056 is fixed.
*/
public void IGNOREtestSec2056() {
UsernamePasswordAuthenticationToken foundUser = new UsernamePasswordAuthenticationToken(
"rod", "koala");
UsernamePasswordAuthenticationToken notFoundUser = new UsernamePasswordAuthenticationToken(
"notFound", "koala");
PasswordEncoder encoder = new BCryptPasswordEncoder(10, new SecureRandom());
DaoAuthenticationProvider provider = new DaoAuthenticationProvider();
provider.setHideUserNotFoundExceptions(false);
provider.setPasswordEncoder(encoder);
MockAuthenticationDaoUserrod userDetailsService = new MockAuthenticationDaoUserrod();
userDetailsService.password = encoder.encode(
(CharSequence) foundUser.getCredentials());
provider.setUserDetailsService(userDetailsService);
int sampleSize = 100;
List<Long> userFoundTimes = new ArrayList<Long>(sampleSize);
for (int i = 0; i < sampleSize; i++) {
long start = System.currentTimeMillis();
provider.authenticate(foundUser);
userFoundTimes.add(System.currentTimeMillis() - start);
}
List<Long> userNotFoundTimes = new ArrayList<Long>(sampleSize);
for (int i = 0; i < sampleSize; i++) {
long start = System.currentTimeMillis();
try {
provider.authenticate(notFoundUser);
fail("Expected Exception");
}
catch (UsernameNotFoundException success) {
}
userNotFoundTimes.add(System.currentTimeMillis() - start);
}
double userFoundAvg = avg(userFoundTimes);
double userNotFoundAvg = avg(userNotFoundTimes);
assertThat(Math.abs(userNotFoundAvg - userFoundAvg) <= 3).withFailMessage(
"User not found average " + userNotFoundAvg
+ " should be within 3ms of user found average "
+ userFoundAvg).isTrue();
}
private double avg(List<Long> counts) {
long sum = 0;
for (Long time : counts) {
sum += time;
}
return sum / counts.size();
}
@Test
public void testUserNotFoundNullCredentials() {
UsernamePasswordAuthenticationToken token = new UsernamePasswordAuthenticationToken(
"missing", null);
PasswordEncoder encoder = mock(PasswordEncoder.class);
DaoAuthenticationProvider provider = new DaoAuthenticationProvider();
provider.setHideUserNotFoundExceptions(false);
provider.setPasswordEncoder(encoder);
provider.setUserDetailsService(new MockAuthenticationDaoUserrod());
try {
provider.authenticate(token);
fail("Expected Exception");
}
catch (UsernameNotFoundException success) {
}
verify(encoder, times(0)).matches(anyString(), anyString());
}
// ~ Inner Classes
// ==================================================================================================
private class MockAuthenticationDaoReturnsNull implements UserDetailsService {
public UserDetails loadUserByUsername(String username) {
return null;
}
}
private class MockAuthenticationDaoSimulateBackendError
implements UserDetailsService {
public UserDetails loadUserByUsername(String username) {
throw new DataRetrievalFailureException(
"This mock simulator is designed to fail");
}
}
private class MockAuthenticationDaoUserrod implements UserDetailsService {
private String password = "koala";
public UserDetails loadUserByUsername(String username) {
if ("rod".equals(username)) {
return new User("rod", password, true, true, true, true, ROLES_12);
}
else {
throw new UsernameNotFoundException("Could not find: " + username);
}
}
public void setPassword(String password) {
this.password = password;
}
}
private class MockAuthenticationDaoUserrodWithSalt implements UserDetailsService {
public UserDetails loadUserByUsername(String username) {
if ("rod".equals(username)) {
return new User("rod", "koala{SYSTEM_SALT_VALUE}", true, true, true, true,
ROLES_12);
}
else {
throw new UsernameNotFoundException("Could not find: " + username);
}
}
}
private class MockAuthenticationDaoUserPeter implements UserDetailsService {
public UserDetails loadUserByUsername(String username) {
if ("peter".equals(username)) {
return new User("peter", "opal", false, true, true, true, ROLES_12);
}
else {
throw new UsernameNotFoundException("Could not find: " + username);
}
}
}
private class MockAuthenticationDaoUserPeterAccountExpired
implements UserDetailsService {
public UserDetails loadUserByUsername(String username) {
if ("peter".equals(username)) {
return new User("peter", "opal", true, false, true, true, ROLES_12);
}
else {
throw new UsernameNotFoundException("Could not find: " + username);
}
}
}
private class MockAuthenticationDaoUserPeterAccountLocked
implements UserDetailsService {
public UserDetails loadUserByUsername(String username) {
if ("peter".equals(username)) {
return new User("peter", "opal", true, true, true, false, ROLES_12);
}
else {
throw new UsernameNotFoundException("Could not find: " + username);
}
}
}
private class MockAuthenticationDaoUserPeterCredentialsExpired
implements UserDetailsService {
public UserDetails loadUserByUsername(String username) {
if ("peter".equals(username)) {
return new User("peter", "opal", true, true, false, true, ROLES_12);
}
else {
throw new UsernameNotFoundException("Could not find: " + username);
}
}
}
private DaoAuthenticationProvider createProvider() {
DaoAuthenticationProvider provider = new DaoAuthenticationProvider();
provider.setPasswordEncoder(NoOpPasswordEncoder.getInstance());
return provider;
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.netty.buffer;
import io.netty.util.internal.PlatformDependent;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.channels.GatheringByteChannel;
import java.nio.channels.ScatteringByteChannel;
import java.nio.charset.Charset;
import org.apache.drill.exec.memory.Accountor;
import org.apache.drill.exec.memory.BufferAllocator;
import org.apache.drill.exec.ops.FragmentContext;
import org.apache.drill.exec.ops.OperatorContext;
import org.apache.drill.exec.util.AssertionUtil;
public final class DrillBuf extends AbstractByteBuf {
static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(DrillBuf.class);
private static final boolean BOUNDS_CHECKING_ENABLED = AssertionUtil.BOUNDS_CHECKING_ENABLED;
private final ByteBuf b;
private final long addr;
private final int offset;
private final boolean rootBuffer;
private volatile BufferAllocator allocator;
private volatile Accountor acct;
private volatile int length;
private OperatorContext context;
private FragmentContext fContext;
public DrillBuf(BufferAllocator allocator, Accountor a, UnsafeDirectLittleEndian b) {
super(b.maxCapacity());
this.b = b;
this.addr = b.memoryAddress();
this.acct = a;
this.length = b.capacity();
this.offset = 0;
this.rootBuffer = true;
this.allocator = allocator;
}
private DrillBuf(ByteBuffer bb) {
super(bb.remaining());
UnpooledUnsafeDirectByteBuf bytebuf = new UnpooledUnsafeDirectByteBuf(UnpooledByteBufAllocator.DEFAULT, bb, bb.remaining());
this.acct = FakeAllocator.FAKE_ACCOUNTOR;
this.addr = bytebuf.memoryAddress();
this.allocator = FakeAllocator.FAKE_ALLOCATOR;
this.b = bytebuf;
this.length = bytebuf.capacity();
this.offset = 0;
this.rootBuffer = true;
this.writerIndex(bb.remaining());
}
private DrillBuf(BufferAllocator allocator, Accountor a) {
super(0);
this.b = new EmptyByteBuf(allocator.getUnderlyingAllocator()).order(ByteOrder.LITTLE_ENDIAN);
this.allocator = allocator;
this.acct = a;
this.length = 0;
this.addr = 0;
this.rootBuffer = true;
this.offset = 0;
}
private DrillBuf(DrillBuf buffer, int index, int length) {
super(length);
if (index < 0 || index > buffer.capacity() - length) {
throw new IndexOutOfBoundsException(buffer.toString() + ".slice(" + index + ", " + length + ')');
}
this.length = length;
writerIndex(length);
this.b = buffer;
this.addr = buffer.memoryAddress() + index;
this.offset = index;
this.acct = null;
this.length = length;
this.rootBuffer = false;
this.allocator = buffer.allocator;
}
public void setOperatorContext(OperatorContext c) {
this.context = c;
}
public void setFragmentContext(FragmentContext c) {
this.fContext = c;
}
public BufferAllocator getAllocator() {
return allocator;
}
public DrillBuf reallocIfNeeded(int size) {
if (this.capacity() >= size) {
return this;
}
if (context != null) {
return context.replace(this, size);
} else if(fContext != null) {
return fContext.replace(this, size);
} else {
throw new UnsupportedOperationException("Realloc is only available in the context of an operator's UDFs");
}
}
@Override
public int refCnt() {
return b.refCnt();
}
private long addr(int index) {
return addr + index;
}
private final void checkIndexD(int index) {
ensureAccessible();
if (index < 0 || index >= capacity()) {
throw new IndexOutOfBoundsException(String.format(
"index: %d (expected: range(0, %d))", index, capacity()));
}
}
private final void checkIndexD(int index, int fieldLength) {
ensureAccessible();
if (fieldLength < 0) {
throw new IllegalArgumentException("length: " + fieldLength + " (expected: >= 0)");
}
if (index < 0 || index > capacity() - fieldLength) {
throw new IndexOutOfBoundsException(String.format(
"index: %d, length: %d (expected: range(0, %d))", index, fieldLength, capacity()));
}
}
/**
* Allows a function to determine whether not reading a particular string of bytes is valid.
*
* Will throw an exception if the memory is not readable for some reason. Only doesn't something in the
* case that AssertionUtil.BOUNDS_CHECKING_ENABLED is true.
*
* @param start The starting position of the bytes to be read.
* @param end The exclusive endpoint of the bytes to be read.
*/
public void checkBytes(int start, int end){
if (BOUNDS_CHECKING_ENABLED) {
checkIndexD(start, end - start);
}
}
private void chk(int index, int width) {
if (BOUNDS_CHECKING_ENABLED) {
checkIndexD(index, width);
}
}
private void chk(int index) {
if (BOUNDS_CHECKING_ENABLED) {
checkIndexD(index);
}
}
private void ensure(int width) {
if (BOUNDS_CHECKING_ENABLED) {
ensureWritable(width);
}
}
public boolean transferAccounting(Accountor target) {
if (rootBuffer) {
boolean outcome = acct.transferTo(target, this, length);
acct = target;
return outcome;
} else {
throw new UnsupportedOperationException();
}
}
@Override
public synchronized boolean release() {
if (b.release() && rootBuffer) {
acct.release(this, length);
return true;
}
return false;
}
@Override
public synchronized boolean release(int decrement) {
if (b.release(decrement) && rootBuffer) {
acct.release(this, length);
return true;
}
return false;
}
@Override
public int capacity() {
return length;
}
@Override
public synchronized ByteBuf capacity(int newCapacity) {
if (rootBuffer) {
if (newCapacity == length) {
return this;
} else if (newCapacity < length) {
b.capacity(newCapacity);
int diff = length - b.capacity();
acct.releasePartial(this, diff);
this.length = length - diff;
return this;
} else {
throw new UnsupportedOperationException("Accounting byte buf doesn't support increasing allocations.");
}
} else {
throw new UnsupportedOperationException("Non root bufs doen't support changing allocations.");
}
}
@Override
public int maxCapacity() {
return length;
}
@Override
public ByteBufAllocator alloc() {
return b.alloc();
}
@Override
public ByteOrder order() {
return ByteOrder.LITTLE_ENDIAN;
}
@Override
public ByteBuf order(ByteOrder endianness) {
// if(endianness != ByteOrder.LITTLE_ENDIAN) throw new
// UnsupportedOperationException("Drill buffers only support little endian.");
return this;
}
@Override
public ByteBuf unwrap() {
return b;
}
@Override
public boolean isDirect() {
return true;
}
@Override
public ByteBuf readBytes(int length) {
throw new UnsupportedOperationException();
}
@Override
public ByteBuf readSlice(int length) {
ByteBuf slice = slice(readerIndex(), length);
readerIndex(readerIndex() + length);
return slice;
}
@Override
public ByteBuf copy() {
throw new UnsupportedOperationException();
}
@Override
public ByteBuf copy(int index, int length) {
throw new UnsupportedOperationException();
}
@Override
public ByteBuf slice() {
return slice(readerIndex(), readableBytes());
}
@Override
public DrillBuf slice(int index, int length) {
DrillBuf buf = new DrillBuf(this, index, length);
buf.writerIndex = length;
return buf;
}
@Override
public DrillBuf duplicate() {
return new DrillBuf(this, 0, length);
}
@Override
public int nioBufferCount() {
return 1;
}
@Override
public ByteBuffer nioBuffer() {
return nioBuffer(readerIndex(), readableBytes());
}
@Override
public ByteBuffer nioBuffer(int index, int length) {
return b.nioBuffer(offset + index, length);
}
@Override
public ByteBuffer internalNioBuffer(int index, int length) {
return b.internalNioBuffer(offset + index, length);
}
@Override
public ByteBuffer[] nioBuffers() {
return new ByteBuffer[]{nioBuffer()};
}
@Override
public ByteBuffer[] nioBuffers(int index, int length) {
return new ByteBuffer[]{nioBuffer(index, length)};
}
@Override
public boolean hasArray() {
return b.hasArray();
}
@Override
public byte[] array() {
return b.array();
}
@Override
public int arrayOffset() {
return b.arrayOffset();
}
@Override
public boolean hasMemoryAddress() {
return true;
}
@Override
public long memoryAddress() {
return this.addr;
}
@Override
public String toString(Charset charset) {
return toString(readerIndex, readableBytes(), charset);
}
@Override
public String toString(int index, int length, Charset charset) {
if (length == 0) {
return "";
}
ByteBuffer nioBuffer;
if (nioBufferCount() == 1) {
nioBuffer = nioBuffer(index, length);
} else {
nioBuffer = ByteBuffer.allocate(length);
getBytes(index, nioBuffer);
nioBuffer.flip();
}
return ByteBufUtil.decodeString(nioBuffer, charset);
}
@Override
public int hashCode() {
return System.identityHashCode(this);
}
@Override
public boolean equals(Object obj) {
// identity equals only.
return this == obj;
}
@Override
public ByteBuf retain(int increment) {
b.retain(increment);
return this;
}
@Override
public ByteBuf retain() {
b.retain();
return this;
}
@Override
public long getLong(int index) {
chk(index, 8);
long v = PlatformDependent.getLong(addr(index));
return v;
}
@Override
public float getFloat(int index) {
return Float.intBitsToFloat(getInt(index));
}
@Override
public double getDouble(int index) {
return Double.longBitsToDouble(getLong(index));
}
@Override
public char getChar(int index) {
return (char) getShort(index);
}
@Override
public long getUnsignedInt(int index) {
return getInt(index) & 0xFFFFFFFFL;
}
@Override
public int getInt(int index) {
chk(index, 4);
int v = PlatformDependent.getInt(addr(index));
return v;
}
@Override
public int getUnsignedShort(int index) {
return getShort(index) & 0xFFFF;
}
@Override
public short getShort(int index) {
chk(index, 2);
short v = PlatformDependent.getShort(addr(index));
return v;
}
@Override
public ByteBuf setShort(int index, int value) {
chk(index, 2);
PlatformDependent.putShort(addr(index), (short) value);
return this;
}
@Override
public ByteBuf setInt(int index, int value) {
chk(index, 4);
PlatformDependent.putInt(addr(index), value);
return this;
}
@Override
public ByteBuf setLong(int index, long value) {
chk(index, 8);
PlatformDependent.putLong(addr(index), value);
return this;
}
@Override
public ByteBuf setChar(int index, int value) {
chk(index, 2);
PlatformDependent.putShort(addr(index), (short) value);
return this;
}
@Override
public ByteBuf setFloat(int index, float value) {
chk(index, 4);
PlatformDependent.putInt(addr(index), Float.floatToRawIntBits(value));
return this;
}
@Override
public ByteBuf setDouble(int index, double value) {
chk(index, 8);
PlatformDependent.putLong(addr(index), Double.doubleToRawLongBits(value));
return this;
}
@Override
public ByteBuf writeShort(int value) {
ensure(2);
PlatformDependent.putShort(addr(writerIndex), (short) value);
writerIndex += 2;
return this;
}
@Override
public ByteBuf writeInt(int value) {
ensure(4);
PlatformDependent.putInt(addr(writerIndex), value);
writerIndex += 4;
return this;
}
@Override
public ByteBuf writeLong(long value) {
ensure(8);
PlatformDependent.putLong(addr(writerIndex), value);
writerIndex += 8;
return this;
}
@Override
public ByteBuf writeChar(int value) {
ensure(2);
PlatformDependent.putShort(addr(writerIndex), (short) value);
writerIndex += 2;
return this;
}
@Override
public ByteBuf writeFloat(float value) {
ensure(4);
PlatformDependent.putInt(addr(writerIndex), Float.floatToRawIntBits(value));
writerIndex += 4;
return this;
}
@Override
public ByteBuf writeDouble(double value) {
ensure(8);
PlatformDependent.putLong(addr(writerIndex), Double.doubleToRawLongBits(value));
writerIndex += 8;
return this;
}
@Override
public ByteBuf getBytes(int index, byte[] dst, int dstIndex, int length) {
b.getBytes(index + offset, dst, dstIndex, length);
return this;
}
@Override
public ByteBuf getBytes(int index, ByteBuffer dst) {
b.getBytes(index + offset, dst);
return this;
}
@Override
public ByteBuf setByte(int index, int value) {
chk(index, 1);
PlatformDependent.putByte(addr(index), (byte) value);
return this;
}
@Override
protected byte _getByte(int index) {
return getByte(index);
}
@Override
protected short _getShort(int index) {
return getShort(index);
}
@Override
protected int _getInt(int index) {
return getInt(index);
}
@Override
protected long _getLong(int index) {
return getLong(index);
}
@Override
protected void _setByte(int index, int value) {
setByte(index, value);
}
@Override
protected void _setShort(int index, int value) {
setShort(index, value);
}
@Override
protected void _setMedium(int index, int value) {
setMedium(index, value);
}
@Override
protected void _setInt(int index, int value) {
setInt(index, value);
}
@Override
protected void _setLong(int index, long value) {
setLong(index, value);
}
@Override
public ByteBuf getBytes(int index, ByteBuf dst, int dstIndex, int length) {
b.getBytes(index + offset, dst, dstIndex, length);
return this;
}
@Override
public ByteBuf getBytes(int index, OutputStream out, int length) throws IOException {
b.getBytes(index + offset, out, length);
return this;
}
@Override
protected int _getUnsignedMedium(int index) {
long addr = addr(index);
return (PlatformDependent.getByte(addr) & 0xff) << 16 |
(PlatformDependent.getByte(addr + 1) & 0xff) << 8 |
PlatformDependent.getByte(addr + 2) & 0xff;
}
@Override
public int getBytes(int index, GatheringByteChannel out, int length) throws IOException {
return b.getBytes(index + offset, out, length);
}
@Override
public ByteBuf setBytes(int index, ByteBuf src, int srcIndex, int length) {
b.setBytes(index + offset, src, srcIndex, length);
return this;
}
@Override
public ByteBuf setBytes(int index, byte[] src, int srcIndex, int length) {
b.setBytes(index + offset, src, srcIndex, length);
return this;
}
@Override
public ByteBuf setBytes(int index, ByteBuffer src) {
b.setBytes(index + offset, src);
return this;
}
@Override
public int setBytes(int index, InputStream in, int length) throws IOException {
return b.setBytes(index + offset, in, length);
}
@Override
public int setBytes(int index, ScatteringByteChannel in, int length) throws IOException {
return b.setBytes(index + offset, in, length);
}
@Override
public byte getByte(int index) {
chk(index, 1);
return PlatformDependent.getByte(addr(index));
}
public static DrillBuf getEmpty(BufferAllocator allocator, Accountor a) {
return new DrillBuf(allocator, a);
}
public boolean isRootBuffer() {
return rootBuffer;
}
public static DrillBuf wrapByteBuffer(ByteBuffer b) {
if (!b.isDirect()) {
throw new IllegalStateException("DrillBufs can only refer to direct memory.");
} else {
return new DrillBuf(b);
}
}
}
| |
/*
* Copyright 2002-2009 Andy Clark, Marc Guillemot
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.cyberneko.html.filters;
import org.apache.xerces.xni.*;
import java.util.Hashtable;
/**
* This class is a document filter capable of removing specified
* elements from the processing stream. There are two options for
* processing document elements:
* <ul>
* <li>specifying those elements which should be accepted and,
* optionally, which attributes of that element should be
* kept; and
* <li>specifying those elements whose tags and content should be
* completely removed from the event stream.
* </ul>
* <p>
* The first option allows the application to specify which elements
* appearing in the event stream should be accepted and, therefore,
* passed on to the next stage in the pipeline. All elements
* <em>not</em> in the list of acceptable elements have their start
* and end tags stripped from the event stream <em>unless</em> those
* elements appear in the list of elements to be removed.
* <p>
* The second option allows the application to specify which elements
* should be completely removed from the event stream. When an element
* appears that is to be removed, the element's start and end tag as
* well as all of that element's content is removed from the event
* stream.
* <p>
* A common use of this filter would be to only allow rich-text
* and linking elements as well as the character content to pass
* through the filter — all other elements would be stripped.
* The following code shows how to configure this filter to perform
* this task:
* <pre>
* ElementRemover remover = new ElementRemover();
* remover.acceptElement("b", null);
* remover.acceptElement("i", null);
* remover.acceptElement("u", null);
* remover.acceptElement("a", new String[] { "href" });
* </pre>
* <p>
* However, this would still allow the text content of other
* elements to pass through, which may not be desirable. In order
* to further "clean" the input, the {@code removeElement}
* option can be used. The following piece of code adds the ability
* to completely remove any <SCRIPT> tags and content
* from the stream.
* <pre>
* remover.removeElement("script");
* </pre>
* <p>
* <strong>Note:</strong>
* All text and accepted element children of a stripped element is
* retained. To completely remove an element's content, use the
* {@code removeElement} method.
* <p>
* <strong>Note:</strong>
* Care should be taken when using this filter because the output
* may not be a well-balanced tree. Specifically, if the application
* removes the <HTML> element (with or without retaining its
* children), the resulting document event stream will no longer be
* well-formed.
*
* @author Andy Clark
* @version $Id: ElementRemover.java,v 1.5 2005/02/14 03:56:54 andyc Exp $
*/
public class ElementRemover
extends DefaultFilter {
//
// Constants
//
/**
* A "null" object.
*/
protected static final Object NULL = new Object();
//
// Data
//
// information
/**
* Accepted elements.
*/
protected Hashtable fAcceptedElements = new Hashtable();
/**
* Removed elements.
*/
protected Hashtable fRemovedElements = new Hashtable();
// state
/**
* The element depth.
*/
protected int fElementDepth;
/**
* The element depth at element removal.
*/
protected int fRemovalElementDepth;
//
// Public methods
//
/**
* Specifies that the given element should be accepted and, optionally,
* which attributes of that element should be kept.
*
* @param element The element to accept.
* @param attributes The list of attributes to be kept or null if no
* attributes should be kept for this element.
* <p>
* see #removeElement
*/
public void acceptElement(String element, String[] attributes) {
Object key = element.toLowerCase();
Object value = NULL;
if (attributes != null) {
String[] newarray = new String[attributes.length];
for (int i = 0; i < attributes.length; i++) {
newarray[i] = attributes[i].toLowerCase();
}
value = attributes;
}
fAcceptedElements.put(key, value);
} // acceptElement(String,String[])
/**
* Specifies that the given element should be completely removed. If an
* element is encountered during processing that is on the remove list,
* the element's start and end tags as well as all of content contained
* within the element will be removed from the processing stream.
*
* @param element The element to completely remove.
*/
public void removeElement(String element) {
Object key = element.toLowerCase();
Object value = NULL;
fRemovedElements.put(key, value);
} // removeElement(String)
//
// XMLDocumentHandler methods
//
// since Xerces-J 2.2.0
/**
* {@inheritDoc}
*
* Start document.
*/
public void startDocument(XMLLocator locator, String encoding,
NamespaceContext nscontext, Augmentations augs)
throws XNIException {
fElementDepth = 0;
fRemovalElementDepth = Integer.MAX_VALUE;
super.startDocument(locator, encoding, nscontext, augs);
} // startDocument(XMLLocator,String,NamespaceContext,Augmentations)
// old methods
/**
* {@inheritDoc}
*
* Start document.
*/
public void startDocument(XMLLocator locator, String encoding, Augmentations augs)
throws XNIException {
startDocument(locator, encoding, null, augs);
} // startDocument(XMLLocator,String,Augmentations)
/**
* {@inheritDoc}
*
* Start prefix mapping.
*/
public void startPrefixMapping(String prefix, String uri, Augmentations augs)
throws XNIException {
if (fElementDepth <= fRemovalElementDepth) {
super.startPrefixMapping(prefix, uri, augs);
}
} // startPrefixMapping(String,String,Augmentations)
/**
* {@inheritDoc}
*
* Start element.
*/
public void startElement(QName element, XMLAttributes attributes, Augmentations augs)
throws XNIException {
if (fElementDepth <= fRemovalElementDepth && handleOpenTag(element, attributes)) {
super.startElement(element, attributes, augs);
}
fElementDepth++;
} // startElement(QName,XMLAttributes,Augmentations)
/**
* {@inheritDoc}
*
* Empty element.
*/
public void emptyElement(QName element, XMLAttributes attributes, Augmentations augs)
throws XNIException {
if (fElementDepth <= fRemovalElementDepth && handleOpenTag(element, attributes)) {
super.emptyElement(element, attributes, augs);
}
} // emptyElement(QName,XMLAttributes,Augmentations)
/**
* {@inheritDoc}
*
* Comment.
*/
public void comment(XMLString text, Augmentations augs)
throws XNIException {
if (fElementDepth <= fRemovalElementDepth) {
super.comment(text, augs);
}
} // comment(XMLString,Augmentations)
/**
* {@inheritDoc}
*
* Processing instruction.
*/
public void processingInstruction(String target, XMLString data, Augmentations augs)
throws XNIException {
if (fElementDepth <= fRemovalElementDepth) {
super.processingInstruction(target, data, augs);
}
} // processingInstruction(String,XMLString,Augmentations)
/**
* {@inheritDoc}
*
* Characters.
*/
public void characters(XMLString text, Augmentations augs)
throws XNIException {
if (fElementDepth <= fRemovalElementDepth) {
super.characters(text, augs);
}
} // characters(XMLString,Augmentations)
/**
* {@inheritDoc}
*
* Ignorable whitespace.
*/
public void ignorableWhitespace(XMLString text, Augmentations augs)
throws XNIException {
if (fElementDepth <= fRemovalElementDepth) {
super.ignorableWhitespace(text, augs);
}
} // ignorableWhitespace(XMLString,Augmentations)
/**
* {@inheritDoc}
*
* Start general entity.
*/
public void startGeneralEntity(String name, XMLResourceIdentifier id, String encoding, Augmentations augs)
throws XNIException {
if (fElementDepth <= fRemovalElementDepth) {
super.startGeneralEntity(name, id, encoding, augs);
}
} // startGeneralEntity(String,XMLResourceIdentifier,String,Augmentations)
/**
* {@inheritDoc}
*
* Text declaration.
*/
public void textDecl(String version, String encoding, Augmentations augs)
throws XNIException {
if (fElementDepth <= fRemovalElementDepth) {
super.textDecl(version, encoding, augs);
}
} // textDecl(String,String,Augmentations)
/**
* {@inheritDoc}
*
* End general entity.
*/
public void endGeneralEntity(String name, Augmentations augs)
throws XNIException {
if (fElementDepth <= fRemovalElementDepth) {
super.endGeneralEntity(name, augs);
}
} // endGeneralEntity(String,Augmentations)
/**
* {@inheritDoc}
*
* Start CDATA section.
*/
public void startCDATA(Augmentations augs) throws XNIException {
if (fElementDepth <= fRemovalElementDepth) {
super.startCDATA(augs);
}
} // startCDATA(Augmentations)
/**
* {@inheritDoc}
*
* End CDATA section.
*/
public void endCDATA(Augmentations augs) throws XNIException {
if (fElementDepth <= fRemovalElementDepth) {
super.endCDATA(augs);
}
} // endCDATA(Augmentations)
/**
* {@inheritDoc}
*
* End element.
*/
public void endElement(QName element, Augmentations augs)
throws XNIException {
if (fElementDepth <= fRemovalElementDepth && elementAccepted(element.rawname)) {
super.endElement(element, augs);
}
fElementDepth--;
if (fElementDepth == fRemovalElementDepth) {
fRemovalElementDepth = Integer.MAX_VALUE;
}
} // endElement(QName,Augmentations)
/**
* {@inheritDoc}
*
* End prefix mapping.
*/
public void endPrefixMapping(String prefix, Augmentations augs)
throws XNIException {
if (fElementDepth <= fRemovalElementDepth) {
super.endPrefixMapping(prefix, augs);
}
} // endPrefixMapping(String,Augmentations)
//
// Protected methods
//
/**
* Should the element be accepted
*
* @param element Name of element to test
* @return true if the specified element is accepted.
*/
protected boolean elementAccepted(String element) {
Object key = element.toLowerCase();
return fAcceptedElements.containsKey(key);
} // elementAccepted(String):boolean
/**
* <p>elementRemoved.</p>
*
* @param element Name of element to test
* @return true if the specified element should be removed.
*/
protected boolean elementRemoved(String element) {
Object key = element.toLowerCase();
return fRemovedElements.containsKey(key);
} // elementRemoved(String):boolean
/**
* Handles an open tag.
*
* @param element QName of element
* @param attributes element attributes
* @return true if element is the element is accepted
*/
protected boolean handleOpenTag(QName element, XMLAttributes attributes) {
if (elementAccepted(element.rawname)) {
Object key = element.rawname.toLowerCase();
Object value = fAcceptedElements.get(key);
if (value != NULL) {
String[] anames = (String[]) value;
int attributeCount = attributes.getLength();
LOOP:
for (int i = 0; i < attributeCount; i++) {
String aname = attributes.getQName(i).toLowerCase();
for (int j = 0; j < anames.length; j++) {
if (anames[j].equals(aname)) {
continue LOOP;
}
}
attributes.removeAttributeAt(i--);
attributeCount--;
}
} else {
attributes.removeAllAttributes();
}
return true;
} else if (elementRemoved(element.rawname)) {
fRemovalElementDepth = fElementDepth;
}
return false;
} // handleOpenTag(QName,XMLAttributes):boolean
} // class DefaultFilter
| |
//
// This file was pubmed.openAccess.jaxb.generated by the JavaTM Architecture for XML Binding(JAXB) Reference Implementation, vJAXB 2.1.10 in JDK 6
// See <a href="http://java.sun.com/xml/jaxb">http://java.sun.com/xml/jaxb</a>
// Any modifications to this file will be lost upon recompilation of the source schema.
// Generated on: 2011.06.04 at 07:58:30 PM BST
//
package pubmed.openAccess.jaxb.generated;
import java.util.ArrayList;
import java.util.List;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlAttribute;
import javax.xml.bind.annotation.XmlElementRef;
import javax.xml.bind.annotation.XmlElementRefs;
import javax.xml.bind.annotation.XmlMixed;
import javax.xml.bind.annotation.XmlRootElement;
import javax.xml.bind.annotation.XmlSchemaType;
import javax.xml.bind.annotation.XmlType;
import elsevier.jaxb.math.mathml.Math;
/**
* <p>Java class for anonymous complex type.
*
* <p>The following schema fragment specifies the expected content contained within this class.
*
* <pre>
* <complexType>
* <complexContent>
* <restriction base="{http://www.w3.org/2001/XMLSchema}anyType">
* <choice maxOccurs="unbounded" minOccurs="0">
* <element ref="{}email"/>
* <element ref="{}ext-link"/>
* <element ref="{}uri"/>
* <element ref="{}inline-supplementary-material"/>
* <element ref="{}related-article"/>
* <element ref="{}related-object"/>
* <element ref="{}hr"/>
* <element ref="{}bold"/>
* <element ref="{}italic"/>
* <element ref="{}monospace"/>
* <element ref="{}overline"/>
* <element ref="{}overline-start"/>
* <element ref="{}overline-end"/>
* <element ref="{}roman"/>
* <element ref="{}sans-serif"/>
* <element ref="{}sc"/>
* <element ref="{}strike"/>
* <element ref="{}underline"/>
* <element ref="{}underline-start"/>
* <element ref="{}underline-end"/>
* <element ref="{}alternatives"/>
* <element ref="{}inline-graphic"/>
* <element ref="{}private-char"/>
* <element ref="{}chem-struct"/>
* <element ref="{}inline-formula"/>
* <element ref="{}tex-math"/>
* <element ref="{http://www.w3.org/1998/Math/MathML}math"/>
* <element ref="{}abbrev"/>
* <element ref="{}milestone-end"/>
* <element ref="{}milestone-start"/>
* <element ref="{}named-content"/>
* <element ref="{}styled-content"/>
* <element ref="{}fn"/>
* <element ref="{}target"/>
* <element ref="{}xref"/>
* <element ref="{}sub"/>
* <element ref="{}sup"/>
* <element ref="{}x"/>
* </choice>
* <attribute name="content-type" type="{http://www.w3.org/2001/XMLSchema}anySimpleType" />
* </restriction>
* </complexContent>
* </complexType>
* </pre>
*
*
*/
@XmlAccessorType(XmlAccessType.FIELD)
@XmlType(name = "", propOrder = {
"content"
})
@XmlRootElement(name = "conf-num")
public class ConfNum {
@XmlElementRefs({
@XmlElementRef(name = "related-article", type = RelatedArticle.class),
@XmlElementRef(name = "hr", type = Hr.class),
@XmlElementRef(name = "underline-start", type = UnderlineStart.class),
@XmlElementRef(name = "inline-formula", type = InlineFormula.class),
@XmlElementRef(name = "xref", type = Xref.class),
@XmlElementRef(name = "roman", type = Roman.class),
@XmlElementRef(name = "fn", type = Fn.class),
@XmlElementRef(name = "italic", type = Italic.class),
@XmlElementRef(name = "related-object", type = RelatedObject.class),
@XmlElementRef(name = "private-char", type = PrivateChar.class),
@XmlElementRef(name = "overline", type = Overline.class),
@XmlElementRef(name = "email", type = Email.class),
@XmlElementRef(name = "alternatives", type = Alternatives.class),
@XmlElementRef(name = "inline-graphic", type = InlineGraphic.class),
@XmlElementRef(name = "chem-struct", type = ChemStruct.class),
@XmlElementRef(name = "x", type = X.class),
@XmlElementRef(name = "uri", type = Uri.class),
@XmlElementRef(name = "styled-content", type = StyledContent.class),
@XmlElementRef(name = "strike", type = Strike.class),
@XmlElementRef(name = "overline-end", type = OverlineEnd.class),
@XmlElementRef(name = "inline-supplementary-material", type = InlineSupplementaryMaterial.class),
@XmlElementRef(name = "milestone-start", type = MilestoneStart.class),
@XmlElementRef(name = "sup", type = Sup.class),
@XmlElementRef(name = "abbrev", type = Abbrev.class),
@XmlElementRef(name = "target", type = Target.class),
@XmlElementRef(name = "monospace", type = Monospace.class),
@XmlElementRef(name = "milestone-end", type = MilestoneEnd.class),
@XmlElementRef(name = "ext-link", type = ExtLink.class),
@XmlElementRef(name = "underline-end", type = UnderlineEnd.class),
@XmlElementRef(name = "bold", type = Bold.class),
@XmlElementRef(name = "sub", type = Sub.class),
@XmlElementRef(name = "sans-serif", type = SansSerif.class),
@XmlElementRef(name = "math", namespace = "http://www.w3.org/1998/Math/MathML", type = Math.class),
@XmlElementRef(name = "named-content", type = NamedContent.class),
@XmlElementRef(name = "underline", type = Underline.class),
@XmlElementRef(name = "sc", type = Sc.class),
@XmlElementRef(name = "tex-math", type = TexMath.class),
@XmlElementRef(name = "overline-start", type = OverlineStart.class)
})
@XmlMixed
protected List<Object> content;
@XmlAttribute(name = "content-type")
@XmlSchemaType(name = "anySimpleType")
protected String contentType;
/**
* Gets the value of the content property.
*
* <p>
* This accessor method returns a reference to the live list,
* not a snapshot. Therefore any modification you make to the
* returned list will be present inside the JAXB object.
* This is why there is not a <CODE>set</CODE> method for the content property.
*
* <p>
* For example, to add a new item, do as follows:
* <pre>
* getContent().add(newItem);
* </pre>
*
*
* <p>
* Objects of the following type(s) are allowed in the list
* {@link RelatedArticle }
* {@link UnderlineStart }
* {@link Hr }
* {@link Xref }
* {@link InlineFormula }
* {@link String }
* {@link Roman }
* {@link Italic }
* {@link Fn }
* {@link PrivateChar }
* {@link RelatedObject }
* {@link Overline }
* {@link Email }
* {@link Alternatives }
* {@link ChemStruct }
* {@link InlineGraphic }
* {@link Uri }
* {@link X }
* {@link StyledContent }
* {@link OverlineEnd }
* {@link Strike }
* {@link InlineSupplementaryMaterial }
* {@link MilestoneStart }
* {@link Monospace }
* {@link Target }
* {@link Abbrev }
* {@link Sup }
* {@link MilestoneEnd }
* {@link ExtLink }
* {@link UnderlineEnd }
* {@link Bold }
* {@link SansSerif }
* {@link Sub }
* {@link Underline }
* {@link NamedContent }
* {@link Math }
* {@link Sc }
* {@link TexMath }
* {@link OverlineStart }
*
*
*/
public List<Object> getContent() {
if (content == null) {
content = new ArrayList<Object>();
}
return this.content;
}
/**
* Gets the value of the contentType property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getContentType() {
return contentType;
}
/**
* Sets the value of the contentType property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setContentType(String value) {
this.contentType = value;
}
}
| |
/*
* Copyright (c) 2015, Ericsson AB.
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without modification,
* are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
*
* 2. Redistributions in binary form must reproduce the above copyright notice, this
* list of conditions and the following disclaimer in the documentation and/or other
* materials provided with the distribution.
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
* IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT,
* INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
* NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
* PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY
* OF SUCH DAMAGE.
*/
package com.ericsson.research.owr.sdk;
import android.os.Handler;
import android.os.Looper;
import android.util.Log;
import com.ericsson.research.owr.AudioRenderer;
import com.ericsson.research.owr.CaptureSourcesCallback;
import com.ericsson.research.owr.DataChannel;
import com.ericsson.research.owr.MediaSource;
import com.ericsson.research.owr.MediaType;
import com.ericsson.research.owr.DataChannelReadyState;
import com.ericsson.research.owr.Owr;
import org.json.JSONObject;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.EnumSet;
import java.util.LinkedList;
import java.util.List;
import java.util.concurrent.CountDownLatch;
public class RtcSessionTest extends OwrTestCase {
private static final String TAG = "RtcSessionTest";
private MediaSource mVideoSource;
private MediaSource mAudioSource;
public void testStuff() {
RtcConfig config = RtcConfigs.defaultConfig("stun.example.com");
final RtcSession session = RtcSessions.create(config);
session.setOnLocalCandidateListener(new RtcSession.OnLocalCandidateListener() {
@Override
public void onLocalCandidate(final RtcCandidate candidate) {
}
});
session.stop();
}
public void testCall() {
RtcConfig config = RtcConfigs.defaultConfig(Collections.<RtcConfig.HelperServer>emptyList());
final RtcSession out = RtcSessions.create(config);
final RtcSession in = RtcSessions.create(config);
out.setOnLocalCandidateListener(new RtcSession.OnLocalCandidateListener() {
@Override
public void onLocalCandidate(final RtcCandidate candidate) {
assertSame(Looper.getMainLooper(), Looper.myLooper());
Log.e(TAG, "LOCAL CANDIDATE out -> in: " + RtcCandidates.toJsep(candidate));
in.addRemoteCandidate(candidate);
}
});
in.setOnLocalCandidateListener(new RtcSession.OnLocalCandidateListener() {
@Override
public void onLocalCandidate(final RtcCandidate candidate) {
assertSame(Looper.getMainLooper(), Looper.myLooper());
Log.e(TAG, "LOCAL CANDIDATE in -> out: " + RtcCandidates.toJsep(candidate));
out.addRemoteCandidate(candidate);
}
});
final StreamSetMock streamSetMockOut = new StreamSetMock("initiator", Arrays.asList(
video("video1", true, true),
video("video2", true, true),
audio("audio1", true, true)
));
final StreamSetMock streamSetMockIn = new StreamSetMock("peer", Arrays.asList(
video("video1", true, true),
audio("audio1", true, true),
audio("audio2", true, true)
));
TestUtils.synchronous().timeout(5).run(new TestUtils.SynchronousBlock() {
@Override
public void run(final CountDownLatch latch) {
Owr.getCaptureSources(EnumSet.of(MediaType.VIDEO, MediaType.AUDIO), new CaptureSourcesCallback() {
@Override
public void onCaptureSourcesCallback(final List<MediaSource> list) {
Log.d(TAG, "got capture sources: " + list.size());
for (MediaSource source : list) {
Log.d(TAG, "iterate source " + source.getName());
if (source.getMediaType().contains(MediaType.VIDEO) && mVideoSource == null) {
Log.d(TAG, "got video source: " + source + " " + source.getName());
mVideoSource = source;
} else if (source.getMediaType().contains(MediaType.AUDIO) && mAudioSource == null) {
Log.d(TAG, "got audio source: " + source);
mAudioSource = source;
}
}
latch.countDown();
}
});
}
});
streamSetMockOut.getMediaStream("audio1").setMediaSource(mAudioSource);
streamSetMockIn.getMediaStream("audio1").setMediaSource(mAudioSource);
streamSetMockOut.getMediaStream("video1").setMediaSource(mVideoSource);
streamSetMockIn.getMediaStream("video1").setMediaSource(mVideoSource);
TestUtils.synchronous().timeout(30).run(new TestUtils.SynchronousBlock() {
@Override
public void run(final CountDownLatch latch) {
out.setOnLocalDescriptionListener(new RtcSession.OnLocalDescriptionListener() {
@Override
public void onLocalDescription(final SessionDescription localDescription) {
assertSame(Looper.getMainLooper(), Looper.myLooper());
Log.w(TAG, "OFFER: " + SessionDescriptions.toJsep(localDescription));
try {
in.setRemoteDescription(localDescription);
} catch (InvalidDescriptionException e) {
throw new RuntimeException(e);
}
in.start(streamSetMockIn);
}
});
in.setOnLocalDescriptionListener(new RtcSession.OnLocalDescriptionListener() {
@Override
public void onLocalDescription(final SessionDescription localDescription) {
assertSame(Looper.getMainLooper(), Looper.myLooper());
try {
out.setRemoteDescription(localDescription);
} catch (InvalidDescriptionException e) {
throw new RuntimeException(e);
}
Log.w(TAG, "ANSWER: " + SessionDescriptions.toJsep(localDescription));
latch.countDown();
}
});
out.start(streamSetMockOut);
Log.d(TAG, "waiting for call start");
}
});
assertEquals(StreamMode.SEND_RECEIVE, streamSetMockOut.getMediaStream("video1").getStreamMode());
assertEquals(StreamMode.SEND_RECEIVE, streamSetMockOut.getMediaStream("audio1").getStreamMode());
assertEquals(StreamMode.SEND_RECEIVE, streamSetMockIn.getMediaStream("video1").getStreamMode());
assertEquals(StreamMode.SEND_RECEIVE, streamSetMockIn.getMediaStream("audio1").getStreamMode());
assertEquals(StreamMode.INACTIVE, streamSetMockOut.getMediaStream("video2").getStreamMode());
assertEquals(StreamMode.INACTIVE, streamSetMockIn.getMediaStream("audio2").getStreamMode());
TestUtils.synchronous().timeout(15).latchCount(4).run(new TestUtils.SynchronousBlock() {
@Override
public void run(final CountDownLatch latch) {
streamSetMockOut.getMediaStream("audio1").waitForRemoteSource(latch);
streamSetMockIn.getMediaStream("audio1").waitForRemoteSource(latch);
streamSetMockOut.getMediaStream("video1").waitForRemoteSource(latch);
streamSetMockIn.getMediaStream("video1").waitForRemoteSource(latch);
Log.d(TAG, "waiting for remote sources");
}
});
Log.d(TAG, "got all remote sources");
Log.d(TAG, "testing stop");
out.stop();
in.stop();
try {
Thread.sleep(200); // wait a bit for stop to complete
} catch (InterruptedException e) {
throw new RuntimeException(e);
}
Log.d(TAG, "stopped");
}
public void testEmptyCall() {
RtcConfig config = RtcConfigs.defaultConfig(Collections.<RtcConfig.HelperServer>emptyList());
final RtcSession out = RtcSessions.create(config);
final RtcSession in = RtcSessions.create(config);
final StreamSetMock streamSetMockOut = new StreamSetMock("initiator", Arrays.asList(
video("video1", false, false),
audio("audio1", false, false)
));
final StreamSetMock streamSetMockIn = new StreamSetMock("peer", Arrays.asList(
video("video1", false, false),
audio("audio1", false, false)
));
TestUtils.synchronous().timeout(5).run(new TestUtils.SynchronousBlock() {
@Override
public void run(final CountDownLatch latch) {
Owr.getCaptureSources(EnumSet.of(MediaType.VIDEO, MediaType.AUDIO), new CaptureSourcesCallback() {
@Override
public void onCaptureSourcesCallback(final List<MediaSource> list) {
Log.d(TAG, "got capture sources: " + list.size());
for (MediaSource source : list) {
Log.d(TAG, "iterate source " + source.getName());
if (source.getMediaType().contains(MediaType.VIDEO) && mVideoSource == null) {
Log.d(TAG, "got video source: " + source + " " + source.getName());
mVideoSource = source;
} else if (source.getMediaType().contains(MediaType.AUDIO) && mAudioSource == null) {
Log.d(TAG, "got audio source: " + source);
mAudioSource = source;
}
}
latch.countDown();
}
});
}
});
TestUtils.synchronous().timeout(30).run(new TestUtils.SynchronousBlock() {
@Override
public void run(final CountDownLatch latch) {
out.setOnLocalDescriptionListener(new RtcSession.OnLocalDescriptionListener() {
@Override
public void onLocalDescription(final SessionDescription localDescription) {
Log.w(TAG, "OFFER: " + SessionDescriptions.toJsep(localDescription));
try {
in.setRemoteDescription(localDescription);
} catch (InvalidDescriptionException e) {
throw new RuntimeException(e);
}
in.start(streamSetMockIn);
}
});
in.setOnLocalDescriptionListener(new RtcSession.OnLocalDescriptionListener() {
@Override
public void onLocalDescription(final SessionDescription localDescription) {
try {
out.setRemoteDescription(localDescription);
} catch (InvalidDescriptionException e) {
throw new RuntimeException(e);
}
Log.w(TAG, "ANSWER: " + SessionDescriptions.toJsep(localDescription));
latch.countDown();
}
});
out.start(streamSetMockOut);
Log.d(TAG, "waiting for call start");
}
});
assertEquals(StreamMode.INACTIVE, streamSetMockOut.getMediaStream("video1").getStreamMode());
assertEquals(StreamMode.INACTIVE, streamSetMockOut.getMediaStream("audio1").getStreamMode());
assertEquals(StreamMode.INACTIVE, streamSetMockIn.getMediaStream("video1").getStreamMode());
assertEquals(StreamMode.INACTIVE, streamSetMockIn.getMediaStream("audio1").getStreamMode());
out.stop();
in.stop();
}
public void testJsepCall() {
RtcConfig config = RtcConfigs.defaultConfig(Collections.<RtcConfig.HelperServer>emptyList());
final RtcSession out = RtcSessions.create(config);
final RtcSession in = RtcSessions.create(config);
out.setOnLocalCandidateListener(new RtcSession.OnLocalCandidateListener() {
@Override
public void onLocalCandidate(final RtcCandidate candidate) {
assertSame(Looper.getMainLooper(), Looper.myLooper());
JSONObject json = RtcCandidates.toJsep(candidate);
Log.e(TAG, "LOCAL CANDIDATE out -> in: " + json);
in.addRemoteCandidate(RtcCandidates.fromJsep(json));
}
});
in.setOnLocalCandidateListener(new RtcSession.OnLocalCandidateListener() {
@Override
public void onLocalCandidate(final RtcCandidate candidate) {
assertSame(Looper.getMainLooper(), Looper.myLooper());
JSONObject json = RtcCandidates.toJsep(candidate);
Log.e(TAG, "LOCAL CANDIDATE in -> out: " + json);
out.addRemoteCandidate(RtcCandidates.fromJsep(json));
}
});
final StreamSetMock streamSetMockOut = new StreamSetMock("initiator", Arrays.asList(
video("video1", true, true),
audio("audio1", true, true)
));
final StreamSetMock streamSetMockIn = new StreamSetMock("peer", Arrays.asList(
video("video1", true, true),
audio("audio1", true, true)
));
TestUtils.synchronous().timeout(30).run(new TestUtils.SynchronousBlock() {
@Override
public void run(final CountDownLatch latch) {
out.setOnLocalDescriptionListener(new RtcSession.OnLocalDescriptionListener() {
@Override
public void onLocalDescription(final SessionDescription localDescription) {
assertSame(Looper.getMainLooper(), Looper.myLooper());
JSONObject jsepOffer = SessionDescriptions.toJsep(localDescription);
try {
in.setRemoteDescription(SessionDescriptions.fromJsep(jsepOffer));
} catch (InvalidDescriptionException e) {
throw new RuntimeException(e);
}
in.start(streamSetMockIn);
}
});
in.setOnLocalDescriptionListener(new RtcSession.OnLocalDescriptionListener() {
@Override
public void onLocalDescription(final SessionDescription localDescription) {
assertSame(Looper.getMainLooper(), Looper.myLooper());
JSONObject jsepAnswer = SessionDescriptions.toJsep(localDescription);
try {
out.setRemoteDescription(SessionDescriptions.fromJsep(jsepAnswer));
} catch (InvalidDescriptionException e) {
throw new RuntimeException(e);
}
latch.countDown();
}
});
out.start(streamSetMockOut);
Log.d(TAG, "waiting for call start");
}
});
assertEquals(StreamMode.SEND_RECEIVE, streamSetMockOut.getMediaStream("video1").getStreamMode());
assertEquals(StreamMode.SEND_RECEIVE, streamSetMockOut.getMediaStream("audio1").getStreamMode());
assertEquals(StreamMode.SEND_RECEIVE, streamSetMockIn.getMediaStream("video1").getStreamMode());
assertEquals(StreamMode.SEND_RECEIVE, streamSetMockIn.getMediaStream("audio1").getStreamMode());
out.stop();
in.stop();
Log.d(TAG, "stopped");
}
public void testStopAndStartWithNew() {
RtcConfig config = RtcConfigs.defaultConfig(Collections.<RtcConfig.HelperServer>emptyList());
final RtcSession out = RtcSessions.create(config);
final RtcSession in = RtcSessions.create(config);
final RtcSession in2 = RtcSessions.create(config);
final StreamSetMock streamSetMock = new StreamSetMock("simple", Collections.singletonList(
audio("audio1", true, true)
));
final SessionDescription[] outOffer = new SessionDescription[1];
TestUtils.synchronous().timeout(30).run(new TestUtils.SynchronousBlock() {
@Override
public void run(final CountDownLatch latch) {
out.setOnLocalDescriptionListener(new RtcSession.OnLocalDescriptionListener() {
@Override
public void onLocalDescription(final SessionDescription localDescription) {
outOffer[0] = localDescription;
try {
in.setRemoteDescription(localDescription);
} catch (InvalidDescriptionException e) {
throw new RuntimeException(e);
}
in.start(streamSetMock);
}
});
in.setOnLocalDescriptionListener(new RtcSession.OnLocalDescriptionListener() {
@Override
public void onLocalDescription(final SessionDescription localDescription) {
in.stop();
latch.countDown();
}
});
out.start(streamSetMock);
Log.d(TAG, "waiting for 1/2 call start");
}
});
TestUtils.synchronous().timeout(30).run(new TestUtils.SynchronousBlock() {
@Override
public void run(final CountDownLatch latch) {
try {
in2.setRemoteDescription(outOffer[0]);
} catch (InvalidDescriptionException e) {
throw new RuntimeException(e);
}
in2.setOnLocalDescriptionListener(new RtcSession.OnLocalDescriptionListener() {
@Override
public void onLocalDescription(final SessionDescription localDescription) {
try {
out.setRemoteDescription(localDescription);
} catch (InvalidDescriptionException e) {
throw new RuntimeException(e);
}
in2.stop();
latch.countDown();
}
});
in2.start(streamSetMock);
Log.d(TAG, "waiting rest 1/2 of call start");
}
});
out.stop();
try {
Thread.sleep(200);
} catch (InterruptedException e) {
throw new RuntimeException(e);
}
}
public void testImmediateStop() {
RtcConfig config = RtcConfigs.defaultConfig(Collections.<RtcConfig.HelperServer>emptyList());
final RtcSession session1 = RtcSessions.create(config);
final RtcSession session2 = RtcSessions.create(config);
final StreamSetMock streamSetMock = new StreamSetMock("simple", Collections.singletonList(
audio("audio1", true, true)
));
TestUtils.synchronous().timeout(30).run(new TestUtils.SynchronousBlock() {
@Override
public void run(final CountDownLatch latch) {
session1.setOnLocalDescriptionListener(new RtcSession.OnLocalDescriptionListener() {
@Override
public void onLocalDescription(final SessionDescription localDescription) {
throw new RuntimeException("should not be reached");
}
});
session1.start(streamSetMock);
session1.stop();
session2.start(streamSetMock);
session2.setOnLocalDescriptionListener(new RtcSession.OnLocalDescriptionListener() {
@Override
public void onLocalDescription(final SessionDescription localDescription) {
try {
// Setting the remote description just after starting should be ignored
session1.setRemoteDescription(localDescription);
} catch (InvalidDescriptionException e) {
throw new RuntimeException(e);
}
session2.stop();
latch.countDown();
}
});
Log.d(TAG, "waiting for call start");
}
});
try {
Thread.sleep(200);
} catch (InterruptedException e) {
throw new RuntimeException(e);
}
}
public void testDatachannel() {
RtcConfig config = RtcConfigs.defaultConfig(Collections.<RtcConfig.HelperServer>emptyList());
final RtcSession out = RtcSessions.create(config);
final RtcSession in = RtcSessions.create(config);
out.setOnLocalCandidateListener(new RtcSession.OnLocalCandidateListener() {
@Override
public void onLocalCandidate(final RtcCandidate candidate) {
assertSame(Looper.getMainLooper(), Looper.myLooper());
Log.e(TAG, "LOCAL CANDIDATE out -> in: " + RtcCandidates.toJsep(candidate));
in.addRemoteCandidate(candidate);
}
});
in.setOnLocalCandidateListener(new RtcSession.OnLocalCandidateListener() {
@Override
public void onLocalCandidate(final RtcCandidate candidate) {
assertSame(Looper.getMainLooper(), Looper.myLooper());
Log.e(TAG, "LOCAL CANDIDATE in -> out: " + RtcCandidates.toJsep(candidate));
out.addRemoteCandidate(candidate);
}
});
final StreamSetMock streamSetOut = new StreamSetMock("initiator", Collections.singletonList(data()));
final StreamSetMock streamSetIn = new StreamSetMock("peer", Collections.singletonList(data()));
TestUtils.synchronous().timeout(30).run(new TestUtils.SynchronousBlock() {
@Override
public void run(final CountDownLatch latch) {
out.setOnLocalDescriptionListener(new RtcSession.OnLocalDescriptionListener() {
@Override
public void onLocalDescription(final SessionDescription localDescription) {
Log.w(TAG, "OFFER: " + SessionDescriptions.toJsep(localDescription));
try {
in.setRemoteDescription(localDescription);
} catch (InvalidDescriptionException e) {
throw new RuntimeException(e);
}
in.start(streamSetIn);
}
});
in.setOnLocalDescriptionListener(new RtcSession.OnLocalDescriptionListener() {
@Override
public void onLocalDescription(final SessionDescription localDescription) {
try {
out.setRemoteDescription(localDescription);
} catch (InvalidDescriptionException e) {
throw new RuntimeException(e);
}
Log.w(TAG, "ANSWER: " + SessionDescriptions.toJsep(localDescription));
latch.countDown();
}
});
out.start(streamSetOut);
Log.d(TAG, "waiting for call start");
}
});
TestUtils.synchronous().latchCount(2).run(new TestUtils.SynchronousBlock() {
@Override
public void run(final CountDownLatch latch) {
Log.d(TAG, "waiting for streams to become active");
streamSetOut.getDataStream().waitUntilActive(latch);
streamSetIn.getDataStream().waitUntilActive(latch);
}
});
assertSame(StreamMode.SEND_RECEIVE, streamSetOut.getDataStream().getStreamMode());
assertSame(StreamMode.SEND_RECEIVE, streamSetIn.getDataStream().getStreamMode());
Log.d(TAG, "streams are active");
/* final DataChannel channelOut1 = new DataChannel(true, -1, 10, "UTPE", false, (short) 1, "test");
DataChannel.ReadyStateChangeListener readyStateChangeListener = new DataChannel.ReadyStateChangeListener() {
@Override
public void onReadyStateChanged(final DataChannelReadyState dataChannelReadyState) {
Log.d(TAG, "DATACHANNEL: ready state changed: " + dataChannelReadyState);
}
};
channelOut1.addReadyStateChangeListener(readyStateChangeListener);
streamSetOut.getDataStream().addDataChannel(channelOut1);
TestUtils.synchronous().run(new TestUtils.SynchronousBlock() {
@Override
public void run(final CountDownLatch latch) {
streamSetIn.getDataStream().waitForDataChannels(latch);
}
});
final DataChannel channelIn1 = streamSetIn.getDataStream().getReceivedDataChannels().get(0);
channelIn1.addReadyStateChangeListener(readyStateChangeListener);
runDataChannelMessageTest("requested", channelOut1, channelIn1);
final DataChannel channelOut2 = new DataChannel(false, 1000, -1, "TEST", true, (short) 3, "test2");
final DataChannel channelIn2 = new DataChannel(false, 1000, -1, "TEST", true, (short) 3, "test2");
channelOut2.addReadyStateChangeListener(readyStateChangeListener);
channelIn2.addReadyStateChangeListener(readyStateChangeListener);
streamSetOut.getDataStream().addDataChannel(channelOut2);
streamSetIn.getDataStream().addDataChannel(channelIn2);
runDataChannelMessageTest("pre-negotiated", channelOut2, channelIn2);
out.stop();
in.stop();*/
}
private void runDataChannelMessageTest(final String label, final DataChannel left, final DataChannel right) {
final Handler handler = new Handler(Looper.getMainLooper());
TestUtils.synchronous().latchCount(4).run(new TestUtils.SynchronousBlock() {
@Override
public void run(final CountDownLatch latch) {
DataChannel.OnDataListener latchCounter = new DataChannel.OnDataListener() {
@Override
public void onData(final String string) {
Log.d(TAG, "[" + label + "] got string data: " + string);
if ("message".equals(string)) {
latch.countDown();
}
}
};
DataChannel.OnBinaryDataListener binaryLatchCounter = new DataChannel.OnBinaryDataListener() {
@Override
public void onBinaryData(final byte[] bytes) {
String string = new String(bytes);
Log.d(TAG, "[" + label + "] got binary data: " + string);
if ("message".equals(string)) {
latch.countDown();
}
}
};
left.addOnDataListener(latchCounter);
left.addOnBinaryDataListener(binaryLatchCounter);
right.addOnDataListener(latchCounter);
right.addOnBinaryDataListener(binaryLatchCounter);
handler.postDelayed(new Runnable() {
@Override
public void run() {
Log.d(TAG, "[" + label + "] sending 'message' left -> right");
left.send("message");
left.sendBinary("message".getBytes());
Log.d(TAG, "[" + label + "] sending 'message' right -> left");
right.send("message");
right.sendBinary("message".getBytes());
}
}, 100);
}
});
}
public void testAddingCandidates() {
final RtcCandidate candidate = new RtcCandidateImpl(0, null, "a", "b", "1", RtcCandidate.ComponentType.RTP,
RtcCandidate.TransportType.UDP, 0, "1.2.3.4", 2, RtcCandidate.CandidateType.HOST, null, 0);
final SessionDescription remoteDescription = new SessionDescriptionImpl(SessionDescription.Type.OFFER, "1", Collections.<StreamDescription>emptyList());
RtcConfig config = RtcConfigs.defaultConfig(Collections.<RtcConfig.HelperServer>emptyList());
final RtcSession session1 = RtcSessions.create(config);
TestUtils.synchronous().run(new TestUtils.SynchronousBlock() {
@Override
public void run(final CountDownLatch latch) {
session1.addRemoteCandidate(candidate);
try {
session1.setRemoteDescription(remoteDescription);
} catch (InvalidDescriptionException e) {
throw new RuntimeException(e);
}
session1.addRemoteCandidate(candidate);
session1.setOnLocalDescriptionListener(new RtcSession.OnLocalDescriptionListener() {
@Override
public void onLocalDescription(final SessionDescription localDescription) {
session1.addRemoteCandidate(candidate);
latch.countDown();
}
});
session1.start(new StreamSetMock("empty", Collections.<StreamConfig>emptyList()));
session1.addRemoteCandidate(candidate);
}
});
session1.addRemoteCandidate(candidate);
session1.stop();
session1.addRemoteCandidate(candidate);
final RtcSession session2 = RtcSessions.create(config);
TestUtils.synchronous().run(new TestUtils.SynchronousBlock() {
@Override
public void run(final CountDownLatch latch) {
session2.setOnLocalDescriptionListener(new RtcSession.OnLocalDescriptionListener() {
@Override
public void onLocalDescription(final SessionDescription localDescription) {
try {
session2.addRemoteCandidate(candidate);
session2.setRemoteDescription(remoteDescription);
session2.addRemoteCandidate(candidate);
latch.countDown();
} catch (InvalidDescriptionException e) {
throw new RuntimeException(e);
}
}
});
session2.addRemoteCandidate(candidate);
session2.start(new StreamSetMock("empty", Collections.<StreamConfig>emptyList()));
session2.addRemoteCandidate(candidate);
}
});
session2.addRemoteCandidate(candidate);
session2.stop();
session2.addRemoteCandidate(candidate);
}
public void testInvalidCalls() {
RtcConfig config = RtcConfigs.defaultConfig(Collections.<RtcConfig.HelperServer>emptyList());
final RtcSession session = RtcSessions.create(config);
final StreamSetMock streamSetMock = new StreamSetMock("empty", Collections.<StreamConfig>emptyList());
try {
session.setRemoteDescription(null); // invalid argument
throw new RuntimeException("should not be reached");
} catch (NullPointerException e) {
} catch (InvalidDescriptionException e) {
throw new RuntimeException(e);
}
try {
session.start(null);
throw new RuntimeException("should not be reached");
} catch (NullPointerException e) {
}
TestUtils.synchronous().run(new TestUtils.SynchronousBlock() {
@Override
public void run(final CountDownLatch latch) {
session.setOnLocalDescriptionListener(new RtcSession.OnLocalDescriptionListener() {
@Override
public void onLocalDescription(final SessionDescription localDescription) {
// should be ignored, since we've already started
session.start(streamSetMock);
session.stop(); // STOPPING HERE
latch.countDown();
}
});
session.start(streamSetMock);
try {
// setting the remote description during startup should be ignored
session.setRemoteDescription(new SessionDescriptionImpl(null, null, null));
} catch (InvalidDescriptionException e) {
throw new RuntimeException(e);
}
// should be ignored, since we've already started
session.start(streamSetMock);
}
});
try {
// setting the remote description when stopped should be ignored
session.setRemoteDescription(new SessionDescriptionImpl(null, null, null));
} catch (InvalidDescriptionException e) {
throw new RuntimeException(e);
}
// setting the local description listener when stopped should be ignored
session.setOnLocalDescriptionListener(new RtcSession.OnLocalDescriptionListener() {
@Override
public void onLocalDescription(final SessionDescription localDescription) {
}
});
session.start(streamSetMock);
// should be fine to call even when stopped
session.addRemoteCandidate(RtcCandidates.fromSdpAttribute("candidate:1 1 UDP 123 1.1.1.1 1 typ host"));
}
public void testStreamsetCombinations() {
StreamSetMock streamSetMockOut = new StreamSetMock("initiator", Arrays.asList(
video("video1", true, true),
video("video2", true, true),
video("video3", true, true),
audio("audio1", true, true),
audio("audio2", true, true)
));
final StreamSetMock streamSetMockIn = new StreamSetMock("peer", Arrays.asList(
video("video1", true, true),
video("video2", true, true),
audio("audio1", true, true),
audio("audio2", true, true),
audio("audio3", true, true)
));
runStreamSetTest(streamSetMockIn, streamSetMockOut);
}
private void runStreamSetTest(final StreamSetMock inbound, final StreamSetMock outbound) {
RtcConfig config = RtcConfigs.defaultConfig(Collections.<RtcConfig.HelperServer>emptyList());
final RtcSession out = RtcSessions.create(config);
final RtcSession in = RtcSessions.create(config);
TestUtils.synchronous().run(new TestUtils.SynchronousBlock() {
@Override
public void run(final CountDownLatch latch) {
out.setOnLocalDescriptionListener(new RtcSession.OnLocalDescriptionListener() {
@Override
public void onLocalDescription(final SessionDescription localDescription) {
try {
in.setRemoteDescription(localDescription);
} catch (InvalidDescriptionException e) {
throw new RuntimeException(e);
}
in.start(inbound);
}
});
in.setOnLocalDescriptionListener(new RtcSession.OnLocalDescriptionListener() {
@Override
public void onLocalDescription(final SessionDescription localDescription) {
try {
out.setRemoteDescription(localDescription);
} catch (InvalidDescriptionException e) {
throw new RuntimeException(e);
}
latch.countDown();
}
});
out.start(outbound);
}
});
}
private static StreamConfig video(String id, boolean wantSend, boolean wantReceive) {
return new StreamConfig(id, wantSend, wantReceive, MediaType.VIDEO);
}
private static StreamConfig audio(String id, boolean wantSend, boolean wantReceive) {
return new StreamConfig(id, wantSend, wantReceive, MediaType.AUDIO);
}
private static StreamConfig data() {
return new StreamConfig();
}
private static class StreamConfig {
private String id;
private boolean wantSend;
private boolean wantReceive;
private MediaType mediaType;
private StreamConfig(String id, boolean wantSend, boolean wantReceive, MediaType mediaType) {
this.id = id;
this.wantSend = wantSend;
this.wantReceive = wantReceive;
this.mediaType = mediaType;
}
private StreamConfig() {
this.mediaType = null;
}
}
private class StreamSetMock extends StreamSet {
private final ArrayList<Stream> mStreams;
private final String mLabel;
private final boolean mAcceptUnusedStreams;
public StreamSetMock(final String label, boolean acceptUnusedStreams, List<StreamConfig> configs) {
mLabel = label;
mAcceptUnusedStreams = acceptUnusedStreams;
mStreams = new ArrayList<>(configs.size());
for (StreamConfig config : configs) {
if (config.mediaType != null) {
mStreams.add(new MediaStreamMock(config));
} else {
mStreams.add(new DataStreamMock());
}
}
}
public StreamSetMock(final String label, List<StreamConfig> configs) {
this(label, false, configs);
}
public MediaStreamMock getMediaStream(String id) {
for (Stream stream : mStreams) {
if (stream instanceof MediaStreamMock) {
MediaStreamMock mock = (MediaStreamMock) stream;
if (id.equals(mock.getId())) {
return mock;
}
}
}
return null;
}
public DataStreamMock getDataStream() {
for (Stream stream : mStreams) {
if (stream instanceof DataStreamMock) {
return (DataStreamMock) stream;
}
}
return null;
}
@Override
protected List<Stream> getStreams() {
return mStreams;
}
private class DataStreamMock extends DataStream {
private StreamMode mStreamMode = null;
private List<DataChannel> mReceivedDataChannels = new LinkedList<>();
private CountDownLatch mDataChannelLatch = null;
private CountDownLatch mModeSetLatch = null;
private DataChannelDelegate mDataChannelDelegate;
public synchronized void waitForDataChannels(CountDownLatch latch) {
mDataChannelLatch = latch;
for (DataChannel ignored : mReceivedDataChannels) {
latch.countDown();
}
}
public StreamMode getStreamMode() {
return mStreamMode;
}
public List<DataChannel> getReceivedDataChannels() {
return mReceivedDataChannels;
}
public void addDataChannel(DataChannel dataChannel) {
mDataChannelDelegate.addDataChannel(dataChannel);
}
@Override
protected synchronized boolean onDataChannelReceived(final DataChannel dataChannel) {
Log.v(TAG, "[" + mLabel + "] data channel received: " + dataChannel);
mReceivedDataChannels.add(dataChannel);
if (mDataChannelLatch != null) {
mDataChannelLatch.countDown();
}
return true;
}
@Override
protected void setDataChannelDelegate(final DataChannelDelegate dataChannelDelegate) {
mDataChannelDelegate = dataChannelDelegate;
Log.v(TAG, "[" + mLabel + "] data channel delegate set: " + dataChannelDelegate);
}
@Override
public void setStreamMode(final StreamMode mode) {
mStreamMode = mode;
if (mModeSetLatch != null) {
mModeSetLatch.countDown();
mModeSetLatch = null;
}
}
public void waitUntilActive(final CountDownLatch latch) {
mModeSetLatch = latch;
if (mStreamMode != null) {
mModeSetLatch.countDown();
mModeSetLatch = null;
}
}
}
private class MediaStreamMock extends MediaStream {
private MediaSource mRemoteSource;
private MediaSourceDelegate mMediaSourceDelegate;
private StreamMode mStreamMode = null;
private MediaSource mLocalSource;
private final StreamConfig mConfig;
private CountDownLatch mCountDownLatch = null;
public MediaStreamMock(final StreamConfig config) {
mConfig = config;
}
public boolean haveRemoteSource() {
return mRemoteSource != null;
}
public boolean haveMediaSourceDelegate() {
return mMediaSourceDelegate != null;
}
public MediaSourceDelegate getMediaSourceDelegate() {
return mMediaSourceDelegate;
}
public MediaSource getRemoteSource() {
return mRemoteSource;
}
public StreamMode getStreamMode() {
return mStreamMode;
}
public synchronized void waitForRemoteSource(final CountDownLatch remoteSourceLatch) {
if (haveRemoteSource()) {
Log.d(TAG, "[" + mLabel + "] already had remote source for " + getId());
remoteSourceLatch.countDown();
return;
}
Log.d(TAG, "[" + mLabel + "] waiting for remote source for " + getId());
mCountDownLatch = remoteSourceLatch;
}
@Override
protected String getId() {
return mConfig.id;
}
@Override
protected MediaType getMediaType() {
return mConfig.mediaType;
}
@Override
protected boolean wantSend() {
return mConfig.wantSend;
}
@Override
protected boolean wantReceive() {
return mConfig.wantReceive;
}
@Override
protected synchronized void onRemoteMediaSource(final MediaSource mediaSource) {
Log.v(TAG, "[" + mLabel + "] got remote source for " + getId() + " : " + mediaSource);
mRemoteSource = mediaSource;
if (mCountDownLatch != null) {
mCountDownLatch.countDown();
}
}
@Override
protected void setMediaSourceDelegate(final MediaSourceDelegate mediaSourceDelegate) {
mMediaSourceDelegate = mediaSourceDelegate;
if (mLocalSource != null && mediaSourceDelegate != null) {
Log.v(TAG, "[" + mLabel + "] local source set for " + getId() + " : " + mLocalSource);
mediaSourceDelegate.setMediaSource(mLocalSource);
} else {
Log.v(TAG, "[" + mLabel + "] local source not set for " + getId() + " : " + mediaSourceDelegate);
}
}
@Override
public void setStreamMode(final StreamMode mode) {
mStreamMode = mode;
}
public void setMediaSource(final MediaSource mediaSource) {
Log.v(TAG, "[" + mLabel + "] local source stored for " + getId() + " : " + mediaSource);
mLocalSource = mediaSource;
}
}
}
}
| |
package ch.spacebase.openclassic.api.pkg.task;
import java.io.DataOutputStream;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.net.HttpURLConnection;
import java.net.URL;
import java.nio.channels.Channels;
import java.nio.channels.ReadableByteChannel;
import java.util.ArrayList;
import java.util.Enumeration;
import java.util.List;
import java.util.zip.ZipEntry;
import java.util.zip.ZipFile;
import ch.spacebase.openclassic.api.Color;
import ch.spacebase.openclassic.api.OpenClassic;
import ch.spacebase.openclassic.api.command.Sender;
import ch.spacebase.openclassic.api.config.Configuration;
/**
* A task that installs a package.
*/
public class PackageInstallTask implements Runnable {
private String name;
private Sender executor;
public PackageInstallTask(String name, Sender executor) {
this.name = name;
this.executor = executor;
}
@Override
public void run() {
Configuration pkgs = OpenClassic.getGame().getPackageManager().getInstalled();
if(pkgs.getNode(this.name) != null) {
if(this.executor != null) this.executor.sendMessage(Color.RED + "This package is already installed!");
return;
}
if(this.executor != null) this.executor.sendMessage(Color.AQUA + "Locating package...");
File cache = new File(OpenClassic.getGame().getDirectory(), "source-cache");
if(!cache.exists()) cache.mkdirs();
Configuration source = null;
String url = "";
String version = "";
String desc = "";
String depends = "";
String enable = "";
for(File file : cache.listFiles()) {
source = new Configuration(file);
source.load();
if(source.getNode(this.name) == null) continue;
OpenClassic.getLogger().info("Hi");
url = source.getString(this.name + ".url");
version = source.getString(this.name + ".latest");
desc = source.getString(this.name + ".desc");
depends = source.getString(this.name + ".depends");
enable = source.getString(this.name + ".enable");
break;
}
if(version == null || version.equals("") || url == null || url.equals("")) {
if(this.executor != null) executor.sendMessage(Color.RED + "The package " + this.name + " was not found in any of your sources.");
return;
}
if(depends != null && !depends.equals("")) {
for(String depend : depends.split(",")) {
if(pkgs.getNode(depend) == null) {
if(this.executor != null) executor.sendMessage(Color.RED + "The dependency " + depend + " is not installed. Please install it before installing this package.");
return;
}
}
}
if(this.executor != null) this.executor.sendMessage(Color.AQUA + "Downloading package...");
File file = new File(OpenClassic.getGame().getDirectory(), "temp/" + this.name + "-" + version + ".zip");
if(file.exists()) {
file.delete();
}
if(!file.getParentFile().exists()) {
file.getParentFile().mkdirs();
}
try {
file.createNewFile();
} catch (IOException e) {
if(this.executor != null) this.executor.sendMessage(Color.RED + "Failed to create package file!");
OpenClassic.getLogger().severe("Failed to create file \"" + file.getName() + "\" when attempting to download a file!");
e.printStackTrace();
return;
}
ReadableByteChannel rbc = null;
FileOutputStream fos = null;
try {
URL u = new URL(url + "/" + version + ".zip");
rbc = Channels.newChannel(u.openStream());
fos = new FileOutputStream(file);
int length = 0;
try {
HttpURLConnection content = (HttpURLConnection) u.openConnection();
length = content.getContentLength();
} catch (Exception e) {
if(this.executor != null) this.executor.sendMessage(Color.RED + "Failed to download package.");
OpenClassic.getLogger().severe("Failed to determine file length!");
e.printStackTrace();
return;
}
fos.getChannel().transferFrom(rbc, 0, length);
} catch(Exception e) {
if(this.executor != null) this.executor.sendMessage(Color.RED + "Failed to download package.");
OpenClassic.getLogger().severe("Failed to download file from \"" + url + "/" + version + ".zip\"!");
e.printStackTrace();
return;
} finally {
try {
if(rbc != null) rbc.close();
if(fos != null) fos.close();
} catch(IOException e) {
OpenClassic.getLogger().warning("Failed to close stream after downloading file!");
e.printStackTrace();
}
}
if(this.executor != null) this.executor.sendMessage(Color.AQUA + "Installing package...");
List<String> files = new ArrayList<String>();
List<String> dirs = new ArrayList<String>();
ZipFile zfile = null;
InputStream is = null;
DataOutputStream out = null;
try {
zfile = new ZipFile(file);
Enumeration<? extends ZipEntry> entries = zfile.entries();
while(entries.hasMoreElements()) {
ZipEntry entry = entries.nextElement();
if(entry.isDirectory()) {
(new File(OpenClassic.getGame().getDirectory(), entry.getName())).mkdir();
dirs.add("plugins/" + entry.getName());
} else {
is = zfile.getInputStream(entry);
out = new DataOutputStream(new FileOutputStream(new File(OpenClassic.getGame().getDirectory(), "plugins/" + entry.getName())));
byte[] buffer = new byte[1024];
int len;
while((len = is.read(buffer)) >= 0) {
out.write(buffer, 0, len);
}
files.add("plugins/" + entry.getName());
}
}
} catch (IOException e) {
if(this.executor != null) this.executor.sendMessage(Color.RED + "Failed to download package.");
OpenClassic.getLogger().severe("Failed to unzip file \"" + file.getName() + "\"!");
e.printStackTrace();
return;
} finally {
try {
if(zfile != null) zfile.close();
if(is != null) is.close();
if(out != null) out.close();
} catch(IOException e) {
OpenClassic.getLogger().warning("Failed to close stream after unzipping file!");
e.printStackTrace();
}
}
pkgs.setValue(this.name + ".url", url);
pkgs.setValue(this.name + ".version", version);
pkgs.setValue(this.name + ".desc", desc);
pkgs.setValue(this.name + ".files", files);
pkgs.setValue(this.name + ".dirs", dirs);
pkgs.setValue(this.name + ".plugins", enable);
pkgs.save();
if(enable != null && !enable.equals("")) {
for(String plugin : enable.split(",")) {
OpenClassic.getGame().getPluginManager().loadPlugin(new File(OpenClassic.getGame().getDirectory(), "plugins/" + plugin + ".jar"));
}
}
if(this.executor != null) this.executor.sendMessage(Color.GREEN + "The package \"" + this.name + "\" has been installed successfully!");
}
}
| |
/*
* Copyright 2010-2014 Ning, Inc.
* Copyright 2014-2020 Groupon, Inc
* Copyright 2020-2021 Equinix, Inc
* Copyright 2014-2021 The Billing Project, LLC
*
* The Billing Project licenses this file to you under the Apache License, version 2.0
* (the "License"); you may not use this file except in compliance with the
* License. You may obtain a copy of the License at:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package org.killbill.billing.plugin.analytics.reports;
import java.util.Collection;
import java.util.LinkedList;
import javax.annotation.Nullable;
import org.joda.time.DateTime;
import org.joda.time.DateTimeZone;
import org.jooq.Condition;
import org.jooq.DSLContext;
import org.jooq.Field;
import org.jooq.Query;
import org.jooq.Record;
import org.jooq.SQLDialect;
import org.jooq.SelectConditionStep;
import org.jooq.SelectSelectStep;
import org.jooq.impl.DSL;
import org.killbill.billing.plugin.analytics.api.core.AnalyticsConfiguration;
import org.killbill.billing.plugin.analytics.reports.sql.Cases;
import org.killbill.billing.plugin.analytics.reports.sql.Filters;
import org.killbill.billing.plugin.analytics.reports.sql.JooqSettings;
import org.killbill.billing.plugin.analytics.reports.sql.MetricExpressionParser;
import org.killbill.billing.plugin.dao.PluginDao.DBEngine;
import com.bpodgursky.jbool_expressions.And;
import com.bpodgursky.jbool_expressions.Expression;
import com.bpodgursky.jbool_expressions.Variable;
import com.google.common.collect.ImmutableList;
import static org.killbill.billing.plugin.analytics.reports.ReportsUserApi.DAY_COLUMN_NAME;
import static org.killbill.billing.plugin.analytics.reports.ReportsUserApi.TS_COLUMN_NAME;
public class SqlReportDataExtractor {
private final String tableName;
private final String sourceQuery;
private final ReportSpecification reportSpecification;
private final DateTime startDate;
private final DateTime endDate;
private final DSLContext context;
private final Long tenantRecordId;
private Collection<Field<Object>> dimensions = ImmutableList.<Field<Object>>of();
private Collection<Field<Object>> metrics = ImmutableList.<Field<Object>>of();
private Expression<String> filters = null;
private Condition condition = null;
private boolean shouldGroupBy = false;
public SqlReportDataExtractor(final String tableName,
final ReportSpecification reportSpecification,
@Nullable final DateTime startDate,
@Nullable final DateTime endDate,
final DBEngine dbEngine,
final Long tenantRecordId) {
this.tableName = tableName;
this.reportSpecification = reportSpecification;
this.startDate = startDate;
this.endDate = endDate;
this.tenantRecordId = tenantRecordId;
this.sourceQuery = null;
this.context = buildDslContext(dbEngine);
setup();
}
public SqlReportDataExtractor(final String sourceQuery,
final ReportSpecification reportSpecification,
@Nullable final DateTime startDate,
@Nullable final DateTime endDate,
final AnalyticsConfiguration analyticsConfiguration,
final DBEngine dbEngine,
final Long tenantRecordId) {
this.tableName = null;
this.reportSpecification = null;
this.startDate = startDate;
this.endDate = endDate;
this.tenantRecordId = tenantRecordId;
this.context = buildDslContext(dbEngine);
// Default (safe) replacements
String query = sourceQuery.replaceAll("TENANT_RECORD_ID", String.valueOf(tenantRecordId));
if (startDate != null) {
query = query.replaceAll("START_DATE", startDate.toString());
} else {
query = query.replaceAll("START_DATE", "2000-01-01T00:00:00.000Z");
}
if (endDate != null) {
query = query.replaceAll("END_DATE", endDate.toString());
} else {
query = query.replaceAll("END_DATE", "2030-01-01T00:00:00.000Z");
}
// Custom replacements
final Query parsedQuery = context.parser().parseQuery(query);
// We use jOOQ to provide some amount of escaping. Not 100% sure how secure that is though,
// hence the explicit config required for trusted environments.
if (analyticsConfiguration.enableTemplateVariables) {
for (final String variableName : reportSpecification.getVariableValues().keySet()) {
parsedQuery.bind(variableName, reportSpecification.getVariableValues().get(variableName));
}
}
this.sourceQuery = parsedQuery.getSQL();
}
@Override
public String toString() {
if (this.sourceQuery != null) {
return this.sourceQuery;
}
// Generate "select *" if no dimension or metric is precised
final SelectSelectStep<? extends Record> initialSelect = dimensions.size() == 1 && metrics.isEmpty() ? context.select()
: context.select(dimensions)
.select(metrics);
SelectConditionStep<? extends Record> statement = initialSelect.from(tableName)
.where();
if (filters != null) {
statement = statement.and(Filters.of(filters));
}
if (condition != null) {
statement = statement.and(condition);
}
statement.and(DSL.fieldByName("tenant_record_id").eq(tenantRecordId));
if (shouldGroupBy) {
return statement.groupBy(dimensions)
.getSQL();
} else {
return statement.getSQL();
}
}
private void setup() {
setupDimensions();
setupMetrics();
setupFilters();
}
private void setupDimensions() {
dimensions = new LinkedList<Field<Object>>();
// Add the special "day" column if needed
if (!reportSpecification.getDimensions().contains(DAY_COLUMN_NAME) && !reportSpecification.getDimensions().contains(TS_COLUMN_NAME)) {
dimensions.add(DSL.fieldByName(DAY_COLUMN_NAME));
}
// Add all other dimensions, potential building case statements as we go
for (final String dimensionWithGrouping : reportSpecification.getDimensionsWithGrouping()) {
final Cases.FieldWithMetadata fieldWithMetadata = Cases.of(dimensionWithGrouping);
dimensions.add(fieldWithMetadata.getField());
if (fieldWithMetadata.getCondition() != null) {
condition = condition == null ? fieldWithMetadata.getCondition() : condition.and(fieldWithMetadata.getCondition());
}
}
}
private void setupMetrics() {
metrics = new LinkedList<Field<Object>>();
for (final String metric : reportSpecification.getMetrics()) {
final MetricExpressionParser.FieldWithMetadata fieldWithMetadata = MetricExpressionParser.parse(metric);
metrics.add(fieldWithMetadata.getField());
shouldGroupBy = shouldGroupBy || fieldWithMetadata.hasAggregateFunction();
}
}
private void setupFilters() {
filters = reportSpecification.getFilterExpression();
// Deal with dates (as yet another, specific, filter)
if (startDate != null) {
final Variable<String> dateCheck;
if (!reportSpecification.getDimensions().contains(TS_COLUMN_NAME) && startDate.compareTo(startDate.toLocalDate().toDateTimeAtStartOfDay(DateTimeZone.UTC)) == 0) {
dateCheck = Variable.of(String.format("%s>=%s", DAY_COLUMN_NAME, startDate.toLocalDate()));
} else {
dateCheck = Variable.of(String.format("%s>=%s", TS_COLUMN_NAME, startDate));
}
filters = filters == null ? dateCheck : And.of(filters, dateCheck);
}
if (endDate != null) {
final Variable<String> dateCheck;
if (!reportSpecification.getDimensions().contains(TS_COLUMN_NAME) && endDate.compareTo(endDate.toLocalDate().toDateTimeAtStartOfDay(DateTimeZone.UTC)) == 0) {
dateCheck = Variable.of(String.format("%s<=%s", DAY_COLUMN_NAME, endDate.toLocalDate()));
} else {
dateCheck = Variable.of(String.format("%s<=%s", TS_COLUMN_NAME, endDate));
}
filters = filters == null ? dateCheck : And.of(filters, dateCheck);
}
}
private DSLContext buildDslContext(final DBEngine dbEngine) {
final SQLDialect sqlDialect = SQLDialectFromDBEngine(dbEngine);
return DSL.using(sqlDialect, JooqSettings.defaults(sqlDialect));
}
private static SQLDialect SQLDialectFromDBEngine(final DBEngine dbEngine) {
switch (dbEngine) {
case H2:
return SQLDialect.H2;
case MYSQL:
return SQLDialect.MARIADB;
case POSTGRESQL:
return SQLDialect.POSTGRES;
default:
throw new IllegalArgumentException("Unsupported DB engine: " + dbEngine);
}
}
}
| |
/**
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.mapreduce;
import java.io.IOException;
import java.io.InterruptedIOException;
import java.net.InetAddress;
import java.net.InetSocketAddress;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import javax.naming.NamingException;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.HRegionLocation;
import org.apache.hadoop.hbase.client.HTable;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
import org.apache.hadoop.hbase.util.Addressing;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.Pair;
import org.apache.hadoop.hbase.util.Strings;
import org.apache.hadoop.mapreduce.InputFormat;
import org.apache.hadoop.mapreduce.InputSplit;
import org.apache.hadoop.mapreduce.JobContext;
import org.apache.hadoop.mapreduce.RecordReader;
import org.apache.hadoop.mapreduce.TaskAttemptContext;
import org.apache.hadoop.net.DNS;
/**
* A base for {@link TableInputFormat}s. Receives a {@link HTable}, an
* {@link Scan} instance that defines the input columns etc. Subclasses may use
* other TableRecordReader implementations.
* <p>
* An example of a subclass:
* <pre>
* class ExampleTIF extends TableInputFormatBase implements JobConfigurable {
*
* public void configure(JobConf job) {
* HTable exampleTable = new HTable(HBaseConfiguration.create(job),
* Bytes.toBytes("exampleTable"));
* // mandatory
* setHTable(exampleTable);
* Text[] inputColumns = new byte [][] { Bytes.toBytes("cf1:columnA"),
* Bytes.toBytes("cf2") };
* // mandatory
* setInputColumns(inputColumns);
* RowFilterInterface exampleFilter = new RegExpRowFilter("keyPrefix.*");
* // optional
* setRowFilter(exampleFilter);
* }
*
* public void validateInput(JobConf job) throws IOException {
* }
* }
* </pre>
*/
@InterfaceAudience.Public
@InterfaceStability.Stable
public abstract class TableInputFormatBase
extends InputFormat<ImmutableBytesWritable, Result> {
final Log LOG = LogFactory.getLog(TableInputFormatBase.class);
/** Holds the details for the internal scanner. */
private Scan scan = null;
/** The table to scan. */
private HTable table = null;
/** The reader scanning the table, can be a custom one. */
private TableRecordReader tableRecordReader = null;
/** The reverse DNS lookup cache mapping: IPAddress => HostName */
private HashMap<InetAddress, String> reverseDNSCacheMap =
new HashMap<InetAddress, String>();
/** The NameServer address */
private String nameServer = null;
/**
* Builds a TableRecordReader. If no TableRecordReader was provided, uses
* the default.
*
* @param split The split to work with.
* @param context The current context.
* @return The newly created record reader.
* @throws IOException When creating the reader fails.
* @see org.apache.hadoop.mapreduce.InputFormat#createRecordReader(
* org.apache.hadoop.mapreduce.InputSplit,
* org.apache.hadoop.mapreduce.TaskAttemptContext)
*/
@Override
public RecordReader<ImmutableBytesWritable, Result> createRecordReader(
InputSplit split, TaskAttemptContext context)
throws IOException {
if (table == null) {
throw new IOException("Cannot create a record reader because of a" +
" previous error. Please look at the previous logs lines from" +
" the task's full log for more details.");
}
TableSplit tSplit = (TableSplit) split;
TableRecordReader trr = this.tableRecordReader;
// if no table record reader was provided use default
if (trr == null) {
trr = new TableRecordReader();
}
Scan sc = new Scan(this.scan);
sc.setStartRow(tSplit.getStartRow());
sc.setStopRow(tSplit.getEndRow());
trr.setScan(sc);
trr.setHTable(table);
return trr;
}
/**
* Calculates the splits that will serve as input for the map tasks. The
* number of splits matches the number of regions in a table.
*
* @param context The current job context.
* @return The list of input splits.
* @throws IOException When creating the list of splits fails.
* @see org.apache.hadoop.mapreduce.InputFormat#getSplits(
* org.apache.hadoop.mapreduce.JobContext)
*/
@Override
public List<InputSplit> getSplits(JobContext context) throws IOException {
if (table == null) {
throw new IOException("No table was provided.");
}
// Get the name server address and the default value is null.
this.nameServer =
context.getConfiguration().get("hbase.nameserver.address", null);
Pair<byte[][], byte[][]> keys = table.getStartEndKeys();
if (keys == null || keys.getFirst() == null ||
keys.getFirst().length == 0) {
HRegionLocation regLoc = table.getRegionLocation(HConstants.EMPTY_BYTE_ARRAY, false);
if (null == regLoc) {
throw new IOException("Expecting at least one region.");
}
List<InputSplit> splits = new ArrayList<InputSplit>(1);
InputSplit split = new TableSplit(table.getName(),
HConstants.EMPTY_BYTE_ARRAY, HConstants.EMPTY_BYTE_ARRAY, regLoc
.getHostnamePort().split(Addressing.HOSTNAME_PORT_SEPARATOR)[0]);
splits.add(split);
return splits;
}
List<InputSplit> splits = new ArrayList<InputSplit>(keys.getFirst().length);
for (int i = 0; i < keys.getFirst().length; i++) {
if ( !includeRegionInSplit(keys.getFirst()[i], keys.getSecond()[i])) {
continue;
}
HRegionLocation location = table.getRegionLocation(keys.getFirst()[i], false);
// The below InetSocketAddress creation does a name resolution.
InetSocketAddress isa = new InetSocketAddress(location.getHostname(), location.getPort());
if (isa.isUnresolved()) {
LOG.warn("Failed resolve " + isa);
}
InetAddress regionAddress = isa.getAddress();
String regionLocation;
try {
regionLocation = reverseDNS(regionAddress);
} catch (NamingException e) {
LOG.error("Cannot resolve the host name for " + regionAddress + " because of " + e);
regionLocation = location.getHostname();
}
byte[] startRow = scan.getStartRow();
byte[] stopRow = scan.getStopRow();
// determine if the given start an stop key fall into the region
if ((startRow.length == 0 || keys.getSecond()[i].length == 0 ||
Bytes.compareTo(startRow, keys.getSecond()[i]) < 0) &&
(stopRow.length == 0 ||
Bytes.compareTo(stopRow, keys.getFirst()[i]) > 0)) {
byte[] splitStart = startRow.length == 0 ||
Bytes.compareTo(keys.getFirst()[i], startRow) >= 0 ?
keys.getFirst()[i] : startRow;
byte[] splitStop = (stopRow.length == 0 ||
Bytes.compareTo(keys.getSecond()[i], stopRow) <= 0) &&
keys.getSecond()[i].length > 0 ?
keys.getSecond()[i] : stopRow;
InputSplit split = new TableSplit(table.getName(),
splitStart, splitStop, regionLocation);
splits.add(split);
if (LOG.isDebugEnabled()) {
LOG.debug("getSplits: split -> " + i + " -> " + split);
}
}
}
return splits;
}
private String reverseDNS(InetAddress ipAddress) throws NamingException {
String hostName = this.reverseDNSCacheMap.get(ipAddress);
if (hostName == null) {
hostName = Strings.domainNamePointerToHostName(
DNS.reverseDns(ipAddress, this.nameServer));
this.reverseDNSCacheMap.put(ipAddress, hostName);
}
return hostName;
}
/**
*
*
* Test if the given region is to be included in the InputSplit while splitting
* the regions of a table.
* <p>
* This optimization is effective when there is a specific reasoning to exclude an entire region from the M-R job,
* (and hence, not contributing to the InputSplit), given the start and end keys of the same. <br>
* Useful when we need to remember the last-processed top record and revisit the [last, current) interval for M-R processing,
* continuously. In addition to reducing InputSplits, reduces the load on the region server as well, due to the ordering of the keys.
* <br>
* <br>
* Note: It is possible that <code>endKey.length() == 0 </code> , for the last (recent) region.
* <br>
* Override this method, if you want to bulk exclude regions altogether from M-R. By default, no region is excluded( i.e. all regions are included).
*
*
* @param startKey Start key of the region
* @param endKey End key of the region
* @return true, if this region needs to be included as part of the input (default).
*
*/
protected boolean includeRegionInSplit(final byte[] startKey, final byte [] endKey) {
return true;
}
/**
* Allows subclasses to get the {@link HTable}.
*/
protected HTable getHTable() {
return this.table;
}
/**
* Allows subclasses to set the {@link HTable}.
*
* @param table The table to get the data from.
*/
protected void setHTable(HTable table) {
this.table = table;
}
/**
* Gets the scan defining the actual details like columns etc.
*
* @return The internal scan instance.
*/
public Scan getScan() {
if (this.scan == null) this.scan = new Scan();
return scan;
}
/**
* Sets the scan defining the actual details like columns etc.
*
* @param scan The scan to set.
*/
public void setScan(Scan scan) {
this.scan = scan;
}
/**
* Allows subclasses to set the {@link TableRecordReader}.
*
* @param tableRecordReader A different {@link TableRecordReader}
* implementation.
*/
protected void setTableRecordReader(TableRecordReader tableRecordReader) {
this.tableRecordReader = tableRecordReader;
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.dataformat.bindy.fixed.ordinal;
import java.math.BigDecimal;
import java.util.Arrays;
import java.util.Calendar;
import java.util.Date;
import java.util.GregorianCalendar;
import org.apache.camel.EndpointInject;
import org.apache.camel.builder.RouteBuilder;
import org.apache.camel.component.mock.MockEndpoint;
import org.apache.camel.dataformat.bindy.annotation.DataField;
import org.apache.camel.dataformat.bindy.annotation.FixedLengthRecord;
import org.apache.camel.model.dataformat.BindyDataFormat;
import org.apache.camel.model.dataformat.BindyType;
import org.apache.camel.test.junit4.CamelTestSupport;
import org.junit.Test;
/**
* This test validates that fixed length records can be defined and processed using ordinal 'pos' values, and
* lengths declared for each field. Strict position calculations in FixedLength records is not necessary. The
* records will be marshalled using the relative the order of the 'pos' values.
*/
public class BindySimpleFixedLengthOrdinalPosTest extends CamelTestSupport {
public static final String URI_DIRECT_MARSHALL = "direct:marshall";
public static final String URI_DIRECT_UNMARSHALL = "direct:unmarshall";
public static final String URI_MOCK_MARSHALL_RESULT = "mock:marshall-result";
public static final String URI_MOCK_UNMARSHALL_RESULT = "mock:unmarshall-result";
private static final String TEST_RECORD = "10A9 PaulineM ISINXD12345678BUYShare000002500.45USD01-08-2009Hello \r\n";
@EndpointInject(uri = URI_MOCK_MARSHALL_RESULT)
private MockEndpoint marshallResult;
@EndpointInject(uri = URI_MOCK_UNMARSHALL_RESULT)
private MockEndpoint unmarshallResult;
// *************************************************************************
// TESTS
// *************************************************************************
@Test
public void testUnmarshallMessage() throws Exception {
unmarshallResult.expectedMessageCount(1);
template.sendBody(URI_DIRECT_UNMARSHALL, TEST_RECORD);
unmarshallResult.assertIsSatisfied();
// check the model
BindySimpleFixedLengthOrdinalPosTest.Order order =
(BindySimpleFixedLengthOrdinalPosTest.Order) unmarshallResult.getReceivedExchanges().get(0).getIn().getBody();
assertEquals(10, order.getOrderNr());
// the field is not trimmed
assertEquals(" Pauline", order.getFirstName());
assertEquals("M ", order.getLastName());
assertEquals("Hello ", order.getComment());
}
@Test
public void testMarshallMessage() throws Exception {
BindySimpleFixedLengthOrdinalPosTest.Order order = new Order();
order.setOrderNr(10);
order.setOrderType("BUY");
order.setClientNr("A9");
order.setFirstName("Pauline");
order.setLastName("M");
order.setAmount(new BigDecimal("2500.45"));
order.setInstrumentCode("ISIN");
order.setInstrumentNumber("XD12345678");
order.setInstrumentType("Share");
order.setCurrency("USD");
Calendar calendar = new GregorianCalendar();
calendar.set(2009, 7, 1);
order.setOrderDate(calendar.getTime());
order.setComment("Hello");
marshallResult.expectedMessageCount(1);
marshallResult.expectedBodiesReceived(Arrays.asList(new String[] {TEST_RECORD}));
template.sendBody(URI_DIRECT_MARSHALL, order);
marshallResult.assertIsSatisfied();
}
// *************************************************************************
// ROUTES
// *************************************************************************
@Override
protected RouteBuilder createRouteBuilder() throws Exception {
RouteBuilder routeBuilder = new RouteBuilder() {
@Override
public void configure() throws Exception {
BindyDataFormat bindy = new BindyDataFormat();
bindy.setClassType(BindySimpleFixedLengthOrdinalPosTest.Order.class);
bindy.setLocale("en");
bindy.setType(BindyType.Fixed);
from(URI_DIRECT_MARSHALL)
.marshal(bindy)
.to(URI_MOCK_MARSHALL_RESULT);
from(URI_DIRECT_UNMARSHALL)
.unmarshal().bindy(BindyType.Fixed, BindySimpleFixedLengthOrdinalPosTest.Order.class)
.to(URI_MOCK_UNMARSHALL_RESULT);
}
};
return routeBuilder;
}
// *************************************************************************
// DATA MODEL
// *************************************************************************
@FixedLengthRecord()
public static class Order {
@DataField(pos = 1, length = 2)
private int orderNr;
@DataField(pos = 2, length = 2)
private String clientNr;
@DataField(pos = 3, length = 9)
private String firstName;
@DataField(pos = 4, length = 5, align = "L")
private String lastName;
@DataField(pos = 5, length = 4)
private String instrumentCode;
@DataField(pos = 6, length = 10)
private String instrumentNumber;
@DataField(pos = 7, length = 3)
private String orderType;
@DataField(pos = 8, length = 5)
private String instrumentType;
@DataField(pos = 9, precision = 2, length = 12, paddingChar = '0')
private BigDecimal amount;
@DataField(pos = 10, length = 3)
private String currency;
@DataField(pos = 11, length = 10, pattern = "dd-MM-yyyy")
private Date orderDate;
@DataField(pos = 12, length = 10, align = "L", paddingChar = ' ')
private String comment;
public int getOrderNr() {
return orderNr;
}
public void setOrderNr(int orderNr) {
this.orderNr = orderNr;
}
public String getClientNr() {
return clientNr;
}
public void setClientNr(String clientNr) {
this.clientNr = clientNr;
}
public String getFirstName() {
return firstName;
}
public void setFirstName(String firstName) {
this.firstName = firstName;
}
public String getLastName() {
return lastName;
}
public void setLastName(String lastName) {
this.lastName = lastName;
}
public String getInstrumentCode() {
return instrumentCode;
}
public void setInstrumentCode(String instrumentCode) {
this.instrumentCode = instrumentCode;
}
public String getInstrumentNumber() {
return instrumentNumber;
}
public void setInstrumentNumber(String instrumentNumber) {
this.instrumentNumber = instrumentNumber;
}
public String getOrderType() {
return orderType;
}
public void setOrderType(String orderType) {
this.orderType = orderType;
}
public String getInstrumentType() {
return instrumentType;
}
public void setInstrumentType(String instrumentType) {
this.instrumentType = instrumentType;
}
public BigDecimal getAmount() {
return amount;
}
public void setAmount(BigDecimal amount) {
this.amount = amount;
}
public String getCurrency() {
return currency;
}
public void setCurrency(String currency) {
this.currency = currency;
}
public Date getOrderDate() {
return orderDate;
}
public void setOrderDate(Date orderDate) {
this.orderDate = orderDate;
}
public String getComment() {
return comment;
}
public void setComment(String comment) {
this.comment = comment;
}
@Override
public String toString() {
return "Model : " + Order.class.getName() + " : " + this.orderNr + ", " + this.orderType + ", " + String.valueOf(this.amount) + ", " + this.instrumentCode + ", "
+ this.instrumentNumber + ", " + this.instrumentType + ", " + this.currency + ", " + this.clientNr + ", " + this.firstName + ", " + this.lastName + ", "
+ String.valueOf(this.orderDate);
}
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.