gt stringclasses 1
value | context stringlengths 2.05k 161k |
|---|---|
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.test.rest;
import com.carrotsearch.randomizedtesting.RandomizedTest;
import org.apache.lucene.util.IOUtils;
import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksAction;
import org.elasticsearch.client.ResponseException;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.SuppressForbidden;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentHelper;
import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.test.rest.client.RestTestResponse;
import org.elasticsearch.test.rest.parser.RestTestParseException;
import org.elasticsearch.test.rest.parser.RestTestSuiteParser;
import org.elasticsearch.test.rest.section.DoSection;
import org.elasticsearch.test.rest.section.ExecutableSection;
import org.elasticsearch.test.rest.section.RestTestSuite;
import org.elasticsearch.test.rest.section.SkipSection;
import org.elasticsearch.test.rest.section.TestSection;
import org.elasticsearch.test.rest.spec.RestApi;
import org.elasticsearch.test.rest.spec.RestSpec;
import org.elasticsearch.test.rest.support.FileUtils;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.Before;
import org.junit.BeforeClass;
import java.io.IOException;
import java.io.InputStream;
import java.net.URI;
import java.net.URISyntaxException;
import java.net.URL;
import java.nio.file.FileSystem;
import java.nio.file.FileSystems;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.StandardCopyOption;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import static java.util.Collections.emptyList;
import static java.util.Collections.emptyMap;
import static java.util.Collections.sort;
/**
* Runs the clients test suite against an elasticsearch cluster.
*/
public abstract class ESRestTestCase extends ESTestCase {
/**
* Property that allows to control which REST tests get run. Supports comma separated list of tests
* or directories that contain tests e.g. -Dtests.rest.suite=index,get,create/10_with_id
*/
public static final String REST_TESTS_SUITE = "tests.rest.suite";
/**
* Property that allows to blacklist some of the REST tests based on a comma separated list of globs
* e.g. -Dtests.rest.blacklist=get/10_basic/*
*/
public static final String REST_TESTS_BLACKLIST = "tests.rest.blacklist";
/**
* Property that allows to control whether spec validation is enabled or not (default true).
*/
public static final String REST_TESTS_VALIDATE_SPEC = "tests.rest.validate_spec";
/**
* Property that allows to control where the REST spec files need to be loaded from
*/
public static final String REST_TESTS_SPEC = "tests.rest.spec";
public static final String REST_LOAD_PACKAGED_TESTS = "tests.rest.load_packaged";
private static final String DEFAULT_TESTS_PATH = "/rest-api-spec/test";
private static final String DEFAULT_SPEC_PATH = "/rest-api-spec/api";
/**
* This separator pattern matches ',' except it is preceded by a '\'.
* This allows us to support ',' within paths when it is escaped with a slash.
*
* For example, the path string "/a/b/c\,d/e/f,/foo/bar,/baz" is separated to "/a/b/c\,d/e/f", "/foo/bar" and "/baz".
*
* For reference, this regular expression feature is known as zero-width negative look-behind.
*
*/
private static final String PATHS_SEPARATOR = "(?<!\\\\),";
private final List<BlacklistedPathPatternMatcher> blacklistPathMatchers = new ArrayList<>();
private final URL[] clusterUrls;
private static RestTestExecutionContext restTestExecutionContext;
private static RestTestExecutionContext adminExecutionContext;
private final RestTestCandidate testCandidate;
public ESRestTestCase(RestTestCandidate testCandidate) {
this.testCandidate = testCandidate;
String[] blacklist = resolvePathsProperty(REST_TESTS_BLACKLIST, null);
for (String entry : blacklist) {
this.blacklistPathMatchers.add(new BlacklistedPathPatternMatcher(entry));
}
String cluster = System.getProperty("tests.rest.cluster");
if (cluster == null) {
throw new RuntimeException("Must specify tests.rest.cluster for rest tests");
}
String[] stringUrls = cluster.split(",");
clusterUrls = new URL[stringUrls.length];
int i = 0;
try {
for (String stringUrl : stringUrls) {
clusterUrls[i++] = new URL("http://" + stringUrl);
}
} catch (IOException e) {
throw new RuntimeException("Failed to parse cluster addresses for rest test", e);
}
}
@Override
protected void afterIfFailed(List<Throwable> errors) {
logger.info("Stash dump on failure [{}]", XContentHelper.toString(restTestExecutionContext.stash()));
super.afterIfFailed(errors);
}
public static Iterable<Object[]> createParameters(int id, int count) throws IOException, RestTestParseException {
//parse tests only if rest test group is enabled, otherwise rest tests might not even be available on file system
List<RestTestCandidate> restTestCandidates = collectTestCandidates(id, count);
List<Object[]> objects = new ArrayList<>();
for (RestTestCandidate restTestCandidate : restTestCandidates) {
objects.add(new Object[]{restTestCandidate});
}
return objects;
}
private static List<RestTestCandidate> collectTestCandidates(int id, int count) throws RestTestParseException, IOException {
List<RestTestCandidate> testCandidates = new ArrayList<>();
FileSystem fileSystem = getFileSystem();
// don't make a try-with, getFileSystem returns null
// ... and you can't close() the default filesystem
try {
String[] paths = resolvePathsProperty(REST_TESTS_SUITE, DEFAULT_TESTS_PATH);
Map<String, Set<Path>> yamlSuites = FileUtils.findYamlSuites(fileSystem, DEFAULT_TESTS_PATH, paths);
RestTestSuiteParser restTestSuiteParser = new RestTestSuiteParser();
//yaml suites are grouped by directory (effectively by api)
for (String api : yamlSuites.keySet()) {
List<Path> yamlFiles = new ArrayList<>(yamlSuites.get(api));
for (Path yamlFile : yamlFiles) {
String key = api + yamlFile.getFileName().toString();
if (mustExecute(key, id, count)) {
RestTestSuite restTestSuite = restTestSuiteParser.parse(api, yamlFile);
for (TestSection testSection : restTestSuite.getTestSections()) {
testCandidates.add(new RestTestCandidate(restTestSuite, testSection));
}
}
}
}
} finally {
IOUtils.close(fileSystem);
}
//sort the candidates so they will always be in the same order before being shuffled, for repeatability
Collections.sort(testCandidates, new Comparator<RestTestCandidate>() {
@Override
public int compare(RestTestCandidate o1, RestTestCandidate o2) {
return o1.getTestPath().compareTo(o2.getTestPath());
}
});
return testCandidates;
}
private static boolean mustExecute(String test, int id, int count) {
int hash = (int) (Math.abs((long)test.hashCode()) % count);
return hash == id;
}
private static String[] resolvePathsProperty(String propertyName, String defaultValue) {
String property = System.getProperty(propertyName);
if (!Strings.hasLength(property)) {
return defaultValue == null ? Strings.EMPTY_ARRAY : new String[]{defaultValue};
} else {
return property.split(PATHS_SEPARATOR);
}
}
/**
* Returns a new FileSystem to read REST resources, or null if they
* are available from classpath.
*/
@SuppressForbidden(reason = "proper use of URL, hack around a JDK bug")
static FileSystem getFileSystem() throws IOException {
// REST suite handling is currently complicated, with lots of filtering and so on
// For now, to work embedded in a jar, return a ZipFileSystem over the jar contents.
URL codeLocation = FileUtils.class.getProtectionDomain().getCodeSource().getLocation();
boolean loadPackaged = RandomizedTest.systemPropertyAsBoolean(REST_LOAD_PACKAGED_TESTS, true);
if (codeLocation.getFile().endsWith(".jar") && loadPackaged) {
try {
// hack around a bug in the zipfilesystem implementation before java 9,
// its checkWritable was incorrect and it won't work without write permissions.
// if we add the permission, it will open jars r/w, which is too scary! so copy to a safe r-w location.
Path tmp = Files.createTempFile(null, ".jar");
try (InputStream in = codeLocation.openStream()) {
Files.copy(in, tmp, StandardCopyOption.REPLACE_EXISTING);
}
return FileSystems.newFileSystem(new URI("jar:" + tmp.toUri()), Collections.<String,Object>emptyMap());
} catch (URISyntaxException e) {
throw new IOException("couldn't open zipfilesystem: ", e);
}
} else {
return null;
}
}
@BeforeClass
public static void initExecutionContext() throws IOException {
String[] specPaths = resolvePathsProperty(REST_TESTS_SPEC, DEFAULT_SPEC_PATH);
RestSpec restSpec = null;
FileSystem fileSystem = getFileSystem();
// don't make a try-with, getFileSystem returns null
// ... and you can't close() the default filesystem
try {
restSpec = RestSpec.parseFrom(fileSystem, DEFAULT_SPEC_PATH, specPaths);
} finally {
IOUtils.close(fileSystem);
}
validateSpec(restSpec);
restTestExecutionContext = new RestTestExecutionContext(restSpec);
adminExecutionContext = new RestTestExecutionContext(restSpec);
}
protected RestTestExecutionContext getAdminExecutionContext() {
return adminExecutionContext;
}
private static void validateSpec(RestSpec restSpec) {
boolean validateSpec = RandomizedTest.systemPropertyAsBoolean(REST_TESTS_VALIDATE_SPEC, true);
if (validateSpec) {
StringBuilder errorMessage = new StringBuilder();
for (RestApi restApi : restSpec.getApis()) {
if (restApi.getMethods().contains("GET") && restApi.isBodySupported()) {
if (!restApi.getMethods().contains("POST")) {
errorMessage.append("\n- ").append(restApi.getName()).append(" supports GET with a body but doesn't support POST");
}
}
}
if (errorMessage.length() > 0) {
throw new IllegalArgumentException(errorMessage.toString());
}
}
}
@After
public void wipeCluster() throws Exception {
// wipe indices
Map<String, String> deleteIndicesArgs = new HashMap<>();
deleteIndicesArgs.put("index", "*");
try {
adminExecutionContext.callApi("indices.delete", deleteIndicesArgs, Collections.emptyList(), Collections.emptyMap());
} catch (ResponseException e) {
// 404 here just means we had no indexes
if (e.getResponse().getStatusLine().getStatusCode() != 404) {
throw e;
}
}
// wipe index templates
Map<String, String> deleteTemplatesArgs = new HashMap<>();
deleteTemplatesArgs.put("name", "*");
adminExecutionContext.callApi("indices.delete_template", deleteTemplatesArgs, Collections.emptyList(), Collections.emptyMap());
// wipe snapshots
Map<String, String> deleteSnapshotsArgs = new HashMap<>();
deleteSnapshotsArgs.put("repository", "*");
adminExecutionContext.callApi("snapshot.delete_repository", deleteSnapshotsArgs, Collections.emptyList(), Collections.emptyMap());
}
/**
* Logs a message if there are still running tasks. The reasoning is that any tasks still running are state the is trying to bleed into
* other tests.
*/
@After
public void logIfThereAreRunningTasks() throws InterruptedException, IOException {
RestTestResponse tasks = adminExecutionContext.callApi("tasks.list", emptyMap(), emptyList(), emptyMap());
Set<String> runningTasks = runningTasks(tasks);
// Ignore the task list API - it doens't count against us
runningTasks.remove(ListTasksAction.NAME);
runningTasks.remove(ListTasksAction.NAME + "[n]");
if (runningTasks.isEmpty()) {
return;
}
List<String> stillRunning = new ArrayList<>(runningTasks);
sort(stillRunning);
logger.info("There are still tasks running after this test that might break subsequent tests {}.", stillRunning);
/*
* This isn't a higher level log or outright failure because some of these tasks are run by the cluster in the background. If we
* could determine that some tasks are run by the user we'd fail the tests if those tasks were running and ignore any background
* tasks.
*/
}
@AfterClass
public static void close() {
if (restTestExecutionContext != null) {
restTestExecutionContext.close();
adminExecutionContext.close();
restTestExecutionContext = null;
adminExecutionContext = null;
}
}
/**
* Used to obtain settings for the REST client that is used to send REST requests.
*/
protected Settings restClientSettings() {
return Settings.EMPTY;
}
/** Returns the REST client settings used for admin actions like cleaning up after the test has completed. */
protected Settings restAdminSettings() {
return restClientSettings(); // default to the same client settings
}
@Before
public void reset() throws IOException {
// admin context must be available for @After always, regardless of whether the test was blacklisted
adminExecutionContext.initClient(clusterUrls, restAdminSettings());
adminExecutionContext.clear();
//skip test if it matches one of the blacklist globs
for (BlacklistedPathPatternMatcher blacklistedPathMatcher : blacklistPathMatchers) {
String testPath = testCandidate.getSuitePath() + "/" + testCandidate.getTestSection().getName();
assumeFalse("[" + testCandidate.getTestPath() + "] skipped, reason: blacklisted", blacklistedPathMatcher
.isSuffixMatch(testPath));
}
//The client needs non static info to get initialized, therefore it can't be initialized in the before class
restTestExecutionContext.initClient(clusterUrls, restClientSettings());
restTestExecutionContext.clear();
//skip test if the whole suite (yaml file) is disabled
assumeFalse(buildSkipMessage(testCandidate.getSuitePath(), testCandidate.getSetupSection().getSkipSection()),
testCandidate.getSetupSection().getSkipSection().skip(restTestExecutionContext.esVersion()));
//skip test if test section is disabled
assumeFalse(buildSkipMessage(testCandidate.getTestPath(), testCandidate.getTestSection().getSkipSection()),
testCandidate.getTestSection().getSkipSection().skip(restTestExecutionContext.esVersion()));
}
private static String buildSkipMessage(String description, SkipSection skipSection) {
StringBuilder messageBuilder = new StringBuilder();
if (skipSection.isVersionCheck()) {
messageBuilder.append("[").append(description).append("] skipped, reason: [").append(skipSection.getReason()).append("] ");
} else {
messageBuilder.append("[").append(description).append("] skipped, reason: features ")
.append(skipSection.getFeatures()).append(" not supported");
}
return messageBuilder.toString();
}
public void test() throws IOException {
//let's check that there is something to run, otherwise there might be a problem with the test section
if (testCandidate.getTestSection().getExecutableSections().size() == 0) {
throw new IllegalArgumentException("No executable sections loaded for [" + testCandidate.getTestPath() + "]");
}
if (!testCandidate.getSetupSection().isEmpty()) {
logger.debug("start setup test [{}]", testCandidate.getTestPath());
for (DoSection doSection : testCandidate.getSetupSection().getDoSections()) {
doSection.execute(restTestExecutionContext);
}
logger.debug("end setup test [{}]", testCandidate.getTestPath());
}
restTestExecutionContext.clear();
for (ExecutableSection executableSection : testCandidate.getTestSection().getExecutableSections()) {
executableSection.execute(restTestExecutionContext);
}
}
@SuppressWarnings("unchecked")
public Set<String> runningTasks(RestTestResponse response) throws IOException {
Set<String> runningTasks = new HashSet<>();
Map<String, Object> nodes = (Map<String, Object>) response.evaluate("nodes");
for (Map.Entry<String, Object> node : nodes.entrySet()) {
Map<String, Object> nodeInfo = (Map<String, Object>) node.getValue();
Map<String, Object> nodeTasks = (Map<String, Object>) nodeInfo.get("tasks");
for (Map.Entry<String, Object> taskAndName : nodeTasks.entrySet()) {
Map<String, Object> task = (Map<String, Object>) taskAndName.getValue();
runningTasks.add(task.get("action").toString());
}
}
return runningTasks;
}
}
| |
/* Copyright (c) 2009, Nathan Freitas, Orbot / The Guardian Project - http://openideals.com/guardian */
/* See LICENSE for licensing information */
package info.guardianproject.onionkit.ui;
import java.io.BufferedReader;
import java.io.File;
import java.io.IOException;
import java.io.InputStreamReader;
import java.io.OutputStreamWriter;
import java.net.URLEncoder;
import java.util.StringTokenizer;
import android.util.Log;
public class TorServiceUtils {
private final static String TAG = "TorUtils";
// various console cmds
public final static String SHELL_CMD_CHMOD = "chmod";
public final static String SHELL_CMD_KILL = "kill -9";
public final static String SHELL_CMD_RM = "rm";
public final static String SHELL_CMD_PS = "ps";
public final static String SHELL_CMD_PIDOF = "pidof";
public final static String CHMOD_EXE_VALUE = "700";
public static boolean isRootPossible()
{
StringBuilder log = new StringBuilder();
try {
// Check if Superuser.apk exists
File fileSU = new File("/system/app/Superuser.apk");
if (fileSU.exists())
return true;
fileSU = new File("/system/app/superuser.apk");
if (fileSU.exists())
return true;
fileSU = new File("/system/bin/su");
if (fileSU.exists())
{
String[] cmd = {
"su"
};
int exitCode = TorServiceUtils.doShellCommand(cmd, log, false, true);
if (exitCode != 0)
return false;
else
return true;
}
// Check for 'su' binary
String[] cmd = {
"which su"
};
int exitCode = TorServiceUtils.doShellCommand(cmd, log, false, true);
if (exitCode == 0) {
Log.d(TAG, "root exists, but not sure about permissions");
return true;
}
} catch (IOException e) {
// this means that there is no root to be had (normally) so we won't
// log anything
Log.e(TAG, "Error checking for root access", e);
} catch (Exception e) {
Log.e(TAG, "Error checking for root access", e);
// this means that there is no root to be had (normally)
}
Log.e(TAG, "Could not acquire root permissions");
return false;
}
public static int findProcessId(String command)
{
int procId = -1;
try
{
procId = findProcessIdWithPidOf(command);
if (procId == -1)
procId = findProcessIdWithPS(command);
} catch (Exception e)
{
try
{
procId = findProcessIdWithPS(command);
} catch (Exception e2)
{
Log.e(TAG, "Unable to get proc id for command: " + URLEncoder.encode(command), e2);
}
}
return procId;
}
// use 'pidof' command
public static int findProcessIdWithPidOf(String command) throws Exception
{
int procId = -1;
Runtime r = Runtime.getRuntime();
Process procPs = null;
String baseName = new File(command).getName();
// fix contributed my mikos on 2010.12.10
procPs = r.exec(new String[] {
SHELL_CMD_PIDOF, baseName
});
// procPs = r.exec(SHELL_CMD_PIDOF);
BufferedReader reader = new BufferedReader(new InputStreamReader(procPs.getInputStream()));
String line = null;
while ((line = reader.readLine()) != null)
{
try
{
// this line should just be the process id
procId = Integer.parseInt(line.trim());
break;
} catch (NumberFormatException e)
{
Log.e("TorServiceUtils", "unable to parse process pid: " + line, e);
}
}
return procId;
}
// use 'ps' command
public static int findProcessIdWithPS(String command) throws Exception
{
int procId = -1;
Runtime r = Runtime.getRuntime();
Process procPs = null;
procPs = r.exec(SHELL_CMD_PS);
BufferedReader reader = new BufferedReader(new InputStreamReader(procPs.getInputStream()));
String line = null;
while ((line = reader.readLine()) != null)
{
if (line.indexOf(' ' + command) != -1)
{
StringTokenizer st = new StringTokenizer(line, " ");
st.nextToken(); // proc owner
procId = Integer.parseInt(st.nextToken().trim());
break;
}
}
return procId;
}
public static int doShellCommand(String[] cmds, StringBuilder log, boolean runAsRoot,
boolean waitFor) throws Exception
{
Process proc = null;
int exitCode = -1;
if (runAsRoot)
proc = Runtime.getRuntime().exec("su");
else
proc = Runtime.getRuntime().exec("sh");
OutputStreamWriter out = new OutputStreamWriter(proc.getOutputStream());
for (int i = 0; i < cmds.length; i++)
{
// TorService.logMessage("executing shell cmd: " + cmds[i] +
// "; runAsRoot=" + runAsRoot + ";waitFor=" + waitFor);
out.write(cmds[i]);
out.write("\n");
}
out.flush();
out.write("exit\n");
out.flush();
if (waitFor)
{
final char buf[] = new char[10];
// Consume the "stdout"
InputStreamReader reader = new InputStreamReader(proc.getInputStream());
int read = 0;
while ((read = reader.read(buf)) != -1) {
if (log != null)
log.append(buf, 0, read);
}
// Consume the "stderr"
reader = new InputStreamReader(proc.getErrorStream());
read = 0;
while ((read = reader.read(buf)) != -1) {
if (log != null)
log.append(buf, 0, read);
}
exitCode = proc.waitFor();
}
return exitCode;
}
}
| |
// PathVisio,
// a tool for data visualization and analysis using Biological Pathways
// Copyright 2006-2011 BiGCaT Bioinformatics
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
package org.pathvisio.desktop;
import java.util.Iterator;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import javax.swing.JEditorPane;
import javax.swing.SwingUtilities;
import javax.swing.text.html.HTMLEditorKit;
import org.bridgedb.Xref;
import org.pathvisio.core.ApplicationEvent;
import org.pathvisio.core.Engine;
import org.pathvisio.core.Engine.ApplicationEventListener;
import org.pathvisio.core.model.PathwayElement;
import org.pathvisio.core.model.PathwayElementEvent;
import org.pathvisio.core.model.PathwayElementListener;
import org.pathvisio.core.model.StaticProperty;
import org.pathvisio.core.view.Graphics;
import org.pathvisio.core.view.SelectionBox.SelectionEvent;
import org.pathvisio.core.view.SelectionBox.SelectionListener;
import org.pathvisio.core.view.VPathway;
import org.pathvisio.core.view.VPathwayElement;
import org.pathvisio.gui.DataPaneTextProvider;
/**
* The backpage panel for the Swing version of PathVisio. This pane shows
* annotation information from the individual Databases when a datanode or
* interaction is clicked.
* <p>
* BackpagePane listens to selection events and other event types to update its
* contents when necessary.
* <p>
* It uses a BackpageTextProvider to generate the html content, which has to be
* inserted at construction time. Backpage generation may take a noticable
* amount of time, therefore this task is always done in a background thread.
* <p>
* It is the responsibility of the instantiator to also call the dispose()
* method, otherwise the background thread is not killed.
*/
public class DataPane extends JEditorPane implements ApplicationEventListener,
SelectionListener, PathwayElementListener {
private final DataPaneTextProvider dpt;
private Engine engine;
private ExecutorService executor;
public DataPane(DataPaneTextProvider dpt, Engine engine) {
super();
engine.addApplicationEventListener(this);
VPathway vp = engine.getActiveVPathway();
if (vp != null)
vp.addSelectionListener(this);
this.engine = engine;
setEditable(false);
setContentType("text/html");
this.dpt = dpt;
executor = Executors.newSingleThreadExecutor();
// Workaround for #1313
// Cause is java bug:
// http://bugs.sun.com/bugdatabase/view_bug.do?bug_id=6993691
setEditorKit(new HTMLEditorKit() {
protected Parser getParser() {
try {
Class c = Class
.forName("javax.swing.text.html.parser.ParserDelegator");
Parser defaultParser = (Parser) c.newInstance();
return defaultParser;
} catch (Throwable e) {
}
return null;
}
});
}
private PathwayElement input;
public void setInput(final PathwayElement e) {
// System.err.println("===== SetInput Called ==== " + e);
if (e == input)
return; // Don't set same input twice
// Remove pathwaylistener from old input
if (input != null)
input.removeListener(this);
if (e == null) {
input = null;
setText(dpt.getAnnotationHTML(null));
} else {
input = e;
input.addListener(this);
doQuery();
}
}
private void doQuery() {
setText("Loading");
currRef = input.getXref();
executor.execute(new Runnable() {
public void run() {
if (input == null)
return;
final String txt = dpt.getAnnotationHTML(input);
SwingUtilities.invokeLater(new Runnable() {
public void run() {
setText(txt);
setCaretPosition(0); // scroll to top.
}
});
}
});
}
public void selectionEvent(SelectionEvent e) {
switch (e.type) {
case SelectionEvent.OBJECT_ADDED:
// Just take the first DataNode in the selection
Iterator<VPathwayElement> it = e.selection.iterator();
while (it.hasNext()) {
VPathwayElement o = it.next();
// works for all Graphics object
// the backpage checks and gives the correct error if
// it's not a datanode or line
if (o instanceof Graphics) {
setInput(((Graphics) o).getPathwayElement());
break;
}
}
break;
case SelectionEvent.OBJECT_REMOVED:
if (e.selection.size() != 0)
break;
case SelectionEvent.SELECTION_CLEARED:
setInput(null);
break;
}
}
public void applicationEvent(ApplicationEvent e) {
switch (e.getType()) {
case VPATHWAY_CREATED:
((VPathway) e.getSource()).addSelectionListener(this);
break;
case VPATHWAY_DISPOSED:
((VPathway) e.getSource()).removeSelectionListener(this);
// remove content of backpage when pathway is closed
input = null;
setText(dpt.getAnnotationHTML(null));
break;
}
}
Xref currRef;
public void gmmlObjectModified(PathwayElementEvent e) {
PathwayElement pe = e.getModifiedPathwayElement();
if (input != null
&& (e.affectsProperty(StaticProperty.GENEID) || e
.affectsProperty(StaticProperty.DATASOURCE))) {
Xref nref = new Xref(pe.getElementID(), input.getDataSource());
if (!nref.equals(currRef)) {
doQuery();
}
}
}
private boolean disposed = false;
public void dispose() {
assert (!disposed);
engine.removeApplicationEventListener(this);
VPathway vpwy = engine.getActiveVPathway();
if (vpwy != null)
vpwy.removeSelectionListener(this);
executor.shutdown();
disposed = true;
}
}
| |
/*
* Copyright 2021 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.cloud.channel.v1.stub;
import static com.google.cloud.channel.v1.CloudChannelServiceClient.ListChannelPartnerLinksPagedResponse;
import static com.google.cloud.channel.v1.CloudChannelServiceClient.ListCustomersPagedResponse;
import static com.google.cloud.channel.v1.CloudChannelServiceClient.ListEntitlementsPagedResponse;
import static com.google.cloud.channel.v1.CloudChannelServiceClient.ListOffersPagedResponse;
import static com.google.cloud.channel.v1.CloudChannelServiceClient.ListProductsPagedResponse;
import static com.google.cloud.channel.v1.CloudChannelServiceClient.ListPurchasableOffersPagedResponse;
import static com.google.cloud.channel.v1.CloudChannelServiceClient.ListPurchasableSkusPagedResponse;
import static com.google.cloud.channel.v1.CloudChannelServiceClient.ListSkusPagedResponse;
import static com.google.cloud.channel.v1.CloudChannelServiceClient.ListSubscribersPagedResponse;
import static com.google.cloud.channel.v1.CloudChannelServiceClient.ListTransferableOffersPagedResponse;
import static com.google.cloud.channel.v1.CloudChannelServiceClient.ListTransferableSkusPagedResponse;
import com.google.api.gax.core.BackgroundResource;
import com.google.api.gax.rpc.OperationCallable;
import com.google.api.gax.rpc.UnaryCallable;
import com.google.cloud.channel.v1.ActivateEntitlementRequest;
import com.google.cloud.channel.v1.CancelEntitlementRequest;
import com.google.cloud.channel.v1.ChangeOfferRequest;
import com.google.cloud.channel.v1.ChangeParametersRequest;
import com.google.cloud.channel.v1.ChangeRenewalSettingsRequest;
import com.google.cloud.channel.v1.ChannelPartnerLink;
import com.google.cloud.channel.v1.CheckCloudIdentityAccountsExistRequest;
import com.google.cloud.channel.v1.CheckCloudIdentityAccountsExistResponse;
import com.google.cloud.channel.v1.CreateChannelPartnerLinkRequest;
import com.google.cloud.channel.v1.CreateCustomerRequest;
import com.google.cloud.channel.v1.CreateEntitlementRequest;
import com.google.cloud.channel.v1.Customer;
import com.google.cloud.channel.v1.DeleteCustomerRequest;
import com.google.cloud.channel.v1.Entitlement;
import com.google.cloud.channel.v1.GetChannelPartnerLinkRequest;
import com.google.cloud.channel.v1.GetCustomerRequest;
import com.google.cloud.channel.v1.GetEntitlementRequest;
import com.google.cloud.channel.v1.ImportCustomerRequest;
import com.google.cloud.channel.v1.ListChannelPartnerLinksRequest;
import com.google.cloud.channel.v1.ListChannelPartnerLinksResponse;
import com.google.cloud.channel.v1.ListCustomersRequest;
import com.google.cloud.channel.v1.ListCustomersResponse;
import com.google.cloud.channel.v1.ListEntitlementsRequest;
import com.google.cloud.channel.v1.ListEntitlementsResponse;
import com.google.cloud.channel.v1.ListOffersRequest;
import com.google.cloud.channel.v1.ListOffersResponse;
import com.google.cloud.channel.v1.ListProductsRequest;
import com.google.cloud.channel.v1.ListProductsResponse;
import com.google.cloud.channel.v1.ListPurchasableOffersRequest;
import com.google.cloud.channel.v1.ListPurchasableOffersResponse;
import com.google.cloud.channel.v1.ListPurchasableSkusRequest;
import com.google.cloud.channel.v1.ListPurchasableSkusResponse;
import com.google.cloud.channel.v1.ListSkusRequest;
import com.google.cloud.channel.v1.ListSkusResponse;
import com.google.cloud.channel.v1.ListSubscribersRequest;
import com.google.cloud.channel.v1.ListSubscribersResponse;
import com.google.cloud.channel.v1.ListTransferableOffersRequest;
import com.google.cloud.channel.v1.ListTransferableOffersResponse;
import com.google.cloud.channel.v1.ListTransferableSkusRequest;
import com.google.cloud.channel.v1.ListTransferableSkusResponse;
import com.google.cloud.channel.v1.LookupOfferRequest;
import com.google.cloud.channel.v1.Offer;
import com.google.cloud.channel.v1.OperationMetadata;
import com.google.cloud.channel.v1.ProvisionCloudIdentityRequest;
import com.google.cloud.channel.v1.RegisterSubscriberRequest;
import com.google.cloud.channel.v1.RegisterSubscriberResponse;
import com.google.cloud.channel.v1.StartPaidServiceRequest;
import com.google.cloud.channel.v1.SuspendEntitlementRequest;
import com.google.cloud.channel.v1.TransferEntitlementsRequest;
import com.google.cloud.channel.v1.TransferEntitlementsResponse;
import com.google.cloud.channel.v1.TransferEntitlementsToGoogleRequest;
import com.google.cloud.channel.v1.UnregisterSubscriberRequest;
import com.google.cloud.channel.v1.UnregisterSubscriberResponse;
import com.google.cloud.channel.v1.UpdateChannelPartnerLinkRequest;
import com.google.cloud.channel.v1.UpdateCustomerRequest;
import com.google.longrunning.Operation;
import com.google.longrunning.stub.OperationsStub;
import com.google.protobuf.Empty;
import javax.annotation.Generated;
// AUTO-GENERATED DOCUMENTATION AND CLASS.
/**
* Base stub class for the CloudChannelService service API.
*
* <p>This class is for advanced usage and reflects the underlying API directly.
*/
@Generated("by gapic-generator-java")
public abstract class CloudChannelServiceStub implements BackgroundResource {
public OperationsStub getOperationsStub() {
throw new UnsupportedOperationException("Not implemented: getOperationsStub()");
}
public UnaryCallable<ListCustomersRequest, ListCustomersPagedResponse>
listCustomersPagedCallable() {
throw new UnsupportedOperationException("Not implemented: listCustomersPagedCallable()");
}
public UnaryCallable<ListCustomersRequest, ListCustomersResponse> listCustomersCallable() {
throw new UnsupportedOperationException("Not implemented: listCustomersCallable()");
}
public UnaryCallable<GetCustomerRequest, Customer> getCustomerCallable() {
throw new UnsupportedOperationException("Not implemented: getCustomerCallable()");
}
public UnaryCallable<
CheckCloudIdentityAccountsExistRequest, CheckCloudIdentityAccountsExistResponse>
checkCloudIdentityAccountsExistCallable() {
throw new UnsupportedOperationException(
"Not implemented: checkCloudIdentityAccountsExistCallable()");
}
public UnaryCallable<CreateCustomerRequest, Customer> createCustomerCallable() {
throw new UnsupportedOperationException("Not implemented: createCustomerCallable()");
}
public UnaryCallable<UpdateCustomerRequest, Customer> updateCustomerCallable() {
throw new UnsupportedOperationException("Not implemented: updateCustomerCallable()");
}
public UnaryCallable<DeleteCustomerRequest, Empty> deleteCustomerCallable() {
throw new UnsupportedOperationException("Not implemented: deleteCustomerCallable()");
}
public UnaryCallable<ImportCustomerRequest, Customer> importCustomerCallable() {
throw new UnsupportedOperationException("Not implemented: importCustomerCallable()");
}
public OperationCallable<ProvisionCloudIdentityRequest, Customer, OperationMetadata>
provisionCloudIdentityOperationCallable() {
throw new UnsupportedOperationException(
"Not implemented: provisionCloudIdentityOperationCallable()");
}
public UnaryCallable<ProvisionCloudIdentityRequest, Operation> provisionCloudIdentityCallable() {
throw new UnsupportedOperationException("Not implemented: provisionCloudIdentityCallable()");
}
public UnaryCallable<ListEntitlementsRequest, ListEntitlementsPagedResponse>
listEntitlementsPagedCallable() {
throw new UnsupportedOperationException("Not implemented: listEntitlementsPagedCallable()");
}
public UnaryCallable<ListEntitlementsRequest, ListEntitlementsResponse>
listEntitlementsCallable() {
throw new UnsupportedOperationException("Not implemented: listEntitlementsCallable()");
}
public UnaryCallable<ListTransferableSkusRequest, ListTransferableSkusPagedResponse>
listTransferableSkusPagedCallable() {
throw new UnsupportedOperationException("Not implemented: listTransferableSkusPagedCallable()");
}
public UnaryCallable<ListTransferableSkusRequest, ListTransferableSkusResponse>
listTransferableSkusCallable() {
throw new UnsupportedOperationException("Not implemented: listTransferableSkusCallable()");
}
public UnaryCallable<ListTransferableOffersRequest, ListTransferableOffersPagedResponse>
listTransferableOffersPagedCallable() {
throw new UnsupportedOperationException(
"Not implemented: listTransferableOffersPagedCallable()");
}
public UnaryCallable<ListTransferableOffersRequest, ListTransferableOffersResponse>
listTransferableOffersCallable() {
throw new UnsupportedOperationException("Not implemented: listTransferableOffersCallable()");
}
public UnaryCallable<GetEntitlementRequest, Entitlement> getEntitlementCallable() {
throw new UnsupportedOperationException("Not implemented: getEntitlementCallable()");
}
public OperationCallable<CreateEntitlementRequest, Entitlement, OperationMetadata>
createEntitlementOperationCallable() {
throw new UnsupportedOperationException(
"Not implemented: createEntitlementOperationCallable()");
}
public UnaryCallable<CreateEntitlementRequest, Operation> createEntitlementCallable() {
throw new UnsupportedOperationException("Not implemented: createEntitlementCallable()");
}
public OperationCallable<ChangeParametersRequest, Entitlement, OperationMetadata>
changeParametersOperationCallable() {
throw new UnsupportedOperationException("Not implemented: changeParametersOperationCallable()");
}
public UnaryCallable<ChangeParametersRequest, Operation> changeParametersCallable() {
throw new UnsupportedOperationException("Not implemented: changeParametersCallable()");
}
public OperationCallable<ChangeRenewalSettingsRequest, Entitlement, OperationMetadata>
changeRenewalSettingsOperationCallable() {
throw new UnsupportedOperationException(
"Not implemented: changeRenewalSettingsOperationCallable()");
}
public UnaryCallable<ChangeRenewalSettingsRequest, Operation> changeRenewalSettingsCallable() {
throw new UnsupportedOperationException("Not implemented: changeRenewalSettingsCallable()");
}
public OperationCallable<ChangeOfferRequest, Entitlement, OperationMetadata>
changeOfferOperationCallable() {
throw new UnsupportedOperationException("Not implemented: changeOfferOperationCallable()");
}
public UnaryCallable<ChangeOfferRequest, Operation> changeOfferCallable() {
throw new UnsupportedOperationException("Not implemented: changeOfferCallable()");
}
public OperationCallable<StartPaidServiceRequest, Entitlement, OperationMetadata>
startPaidServiceOperationCallable() {
throw new UnsupportedOperationException("Not implemented: startPaidServiceOperationCallable()");
}
public UnaryCallable<StartPaidServiceRequest, Operation> startPaidServiceCallable() {
throw new UnsupportedOperationException("Not implemented: startPaidServiceCallable()");
}
public OperationCallable<SuspendEntitlementRequest, Entitlement, OperationMetadata>
suspendEntitlementOperationCallable() {
throw new UnsupportedOperationException(
"Not implemented: suspendEntitlementOperationCallable()");
}
public UnaryCallable<SuspendEntitlementRequest, Operation> suspendEntitlementCallable() {
throw new UnsupportedOperationException("Not implemented: suspendEntitlementCallable()");
}
public OperationCallable<CancelEntitlementRequest, Empty, OperationMetadata>
cancelEntitlementOperationCallable() {
throw new UnsupportedOperationException(
"Not implemented: cancelEntitlementOperationCallable()");
}
public UnaryCallable<CancelEntitlementRequest, Operation> cancelEntitlementCallable() {
throw new UnsupportedOperationException("Not implemented: cancelEntitlementCallable()");
}
public OperationCallable<ActivateEntitlementRequest, Entitlement, OperationMetadata>
activateEntitlementOperationCallable() {
throw new UnsupportedOperationException(
"Not implemented: activateEntitlementOperationCallable()");
}
public UnaryCallable<ActivateEntitlementRequest, Operation> activateEntitlementCallable() {
throw new UnsupportedOperationException("Not implemented: activateEntitlementCallable()");
}
public OperationCallable<
TransferEntitlementsRequest, TransferEntitlementsResponse, OperationMetadata>
transferEntitlementsOperationCallable() {
throw new UnsupportedOperationException(
"Not implemented: transferEntitlementsOperationCallable()");
}
public UnaryCallable<TransferEntitlementsRequest, Operation> transferEntitlementsCallable() {
throw new UnsupportedOperationException("Not implemented: transferEntitlementsCallable()");
}
public OperationCallable<TransferEntitlementsToGoogleRequest, Empty, OperationMetadata>
transferEntitlementsToGoogleOperationCallable() {
throw new UnsupportedOperationException(
"Not implemented: transferEntitlementsToGoogleOperationCallable()");
}
public UnaryCallable<TransferEntitlementsToGoogleRequest, Operation>
transferEntitlementsToGoogleCallable() {
throw new UnsupportedOperationException(
"Not implemented: transferEntitlementsToGoogleCallable()");
}
public UnaryCallable<ListChannelPartnerLinksRequest, ListChannelPartnerLinksPagedResponse>
listChannelPartnerLinksPagedCallable() {
throw new UnsupportedOperationException(
"Not implemented: listChannelPartnerLinksPagedCallable()");
}
public UnaryCallable<ListChannelPartnerLinksRequest, ListChannelPartnerLinksResponse>
listChannelPartnerLinksCallable() {
throw new UnsupportedOperationException("Not implemented: listChannelPartnerLinksCallable()");
}
public UnaryCallable<GetChannelPartnerLinkRequest, ChannelPartnerLink>
getChannelPartnerLinkCallable() {
throw new UnsupportedOperationException("Not implemented: getChannelPartnerLinkCallable()");
}
public UnaryCallable<CreateChannelPartnerLinkRequest, ChannelPartnerLink>
createChannelPartnerLinkCallable() {
throw new UnsupportedOperationException("Not implemented: createChannelPartnerLinkCallable()");
}
public UnaryCallable<UpdateChannelPartnerLinkRequest, ChannelPartnerLink>
updateChannelPartnerLinkCallable() {
throw new UnsupportedOperationException("Not implemented: updateChannelPartnerLinkCallable()");
}
public UnaryCallable<LookupOfferRequest, Offer> lookupOfferCallable() {
throw new UnsupportedOperationException("Not implemented: lookupOfferCallable()");
}
public UnaryCallable<ListProductsRequest, ListProductsPagedResponse> listProductsPagedCallable() {
throw new UnsupportedOperationException("Not implemented: listProductsPagedCallable()");
}
public UnaryCallable<ListProductsRequest, ListProductsResponse> listProductsCallable() {
throw new UnsupportedOperationException("Not implemented: listProductsCallable()");
}
public UnaryCallable<ListSkusRequest, ListSkusPagedResponse> listSkusPagedCallable() {
throw new UnsupportedOperationException("Not implemented: listSkusPagedCallable()");
}
public UnaryCallable<ListSkusRequest, ListSkusResponse> listSkusCallable() {
throw new UnsupportedOperationException("Not implemented: listSkusCallable()");
}
public UnaryCallable<ListOffersRequest, ListOffersPagedResponse> listOffersPagedCallable() {
throw new UnsupportedOperationException("Not implemented: listOffersPagedCallable()");
}
public UnaryCallable<ListOffersRequest, ListOffersResponse> listOffersCallable() {
throw new UnsupportedOperationException("Not implemented: listOffersCallable()");
}
public UnaryCallable<ListPurchasableSkusRequest, ListPurchasableSkusPagedResponse>
listPurchasableSkusPagedCallable() {
throw new UnsupportedOperationException("Not implemented: listPurchasableSkusPagedCallable()");
}
public UnaryCallable<ListPurchasableSkusRequest, ListPurchasableSkusResponse>
listPurchasableSkusCallable() {
throw new UnsupportedOperationException("Not implemented: listPurchasableSkusCallable()");
}
public UnaryCallable<ListPurchasableOffersRequest, ListPurchasableOffersPagedResponse>
listPurchasableOffersPagedCallable() {
throw new UnsupportedOperationException(
"Not implemented: listPurchasableOffersPagedCallable()");
}
public UnaryCallable<ListPurchasableOffersRequest, ListPurchasableOffersResponse>
listPurchasableOffersCallable() {
throw new UnsupportedOperationException("Not implemented: listPurchasableOffersCallable()");
}
public UnaryCallable<RegisterSubscriberRequest, RegisterSubscriberResponse>
registerSubscriberCallable() {
throw new UnsupportedOperationException("Not implemented: registerSubscriberCallable()");
}
public UnaryCallable<UnregisterSubscriberRequest, UnregisterSubscriberResponse>
unregisterSubscriberCallable() {
throw new UnsupportedOperationException("Not implemented: unregisterSubscriberCallable()");
}
public UnaryCallable<ListSubscribersRequest, ListSubscribersPagedResponse>
listSubscribersPagedCallable() {
throw new UnsupportedOperationException("Not implemented: listSubscribersPagedCallable()");
}
public UnaryCallable<ListSubscribersRequest, ListSubscribersResponse> listSubscribersCallable() {
throw new UnsupportedOperationException("Not implemented: listSubscribersCallable()");
}
@Override
public abstract void close();
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache license, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the license for the specific language governing permissions and
* limitations under the license.
*/
package org.apache.logging.log4j.nosql.appender;
import java.util.Map;
import org.apache.logging.log4j.Marker;
import org.apache.logging.log4j.ThreadContext;
import org.apache.logging.log4j.core.LogEvent;
import org.apache.logging.log4j.core.appender.AppenderLoggingException;
import org.apache.logging.log4j.core.appender.ManagerFactory;
import org.apache.logging.log4j.core.appender.db.AbstractDatabaseManager;
import org.apache.logging.log4j.core.util.Closer;
/**
* An {@link AbstractDatabaseManager} implementation for all NoSQL databases.
*
* @param <W> A type parameter for reassuring the compiler that all operations are using the same {@link NoSqlObject}.
*/
public final class NoSqlDatabaseManager<W> extends AbstractDatabaseManager {
private static final NoSQLDatabaseManagerFactory FACTORY = new NoSQLDatabaseManagerFactory();
private final NoSqlProvider<NoSqlConnection<W, ? extends NoSqlObject<W>>> provider;
private NoSqlConnection<W, ? extends NoSqlObject<W>> connection;
private NoSqlDatabaseManager(final String name, final int bufferSize,
final NoSqlProvider<NoSqlConnection<W, ? extends NoSqlObject<W>>> provider) {
super(name, bufferSize);
this.provider = provider;
}
@Override
protected void startupInternal() {
// nothing to see here
}
@Override
protected void shutdownInternal() {
// NoSQL doesn't use transactions, so all we need to do here is simply close the client
Closer.closeSilently(this.connection);
}
@Override
protected void connectAndStart() {
try {
this.connection = this.provider.getConnection();
} catch (final Exception e) {
throw new AppenderLoggingException("Failed to get connection from NoSQL connection provider.", e);
}
}
@Override
protected void writeInternal(final LogEvent event) {
if (!this.isRunning() || this.connection == null || this.connection.isClosed()) {
throw new AppenderLoggingException(
"Cannot write logging event; NoSQL manager not connected to the database.");
}
final NoSqlObject<W> entity = this.connection.createObject();
entity.set("level", event.getLevel());
entity.set("loggerName", event.getLoggerName());
entity.set("message", event.getMessage() == null ? null : event.getMessage().getFormattedMessage());
final StackTraceElement source = event.getSource();
if (source == null) {
entity.set("source", (Object) null);
} else {
entity.set("source", this.convertStackTraceElement(source));
}
final Marker marker = event.getMarker();
if (marker == null) {
entity.set("marker", (Object) null);
} else {
entity.set("marker", buildMarkerEntity(marker));
}
entity.set("threadId", event.getThreadId());
entity.set("threadName", event.getThreadName());
entity.set("threadPriority", event.getThreadPriority());
entity.set("millis", event.getTimeMillis());
entity.set("date", new java.util.Date(event.getTimeMillis()));
@SuppressWarnings("ThrowableResultOfMethodCallIgnored")
Throwable thrown = event.getThrown();
if (thrown == null) {
entity.set("thrown", (Object) null);
} else {
final NoSqlObject<W> originalExceptionEntity = this.connection.createObject();
NoSqlObject<W> exceptionEntity = originalExceptionEntity;
exceptionEntity.set("type", thrown.getClass().getName());
exceptionEntity.set("message", thrown.getMessage());
exceptionEntity.set("stackTrace", this.convertStackTrace(thrown.getStackTrace()));
while (thrown.getCause() != null) {
thrown = thrown.getCause();
final NoSqlObject<W> causingExceptionEntity = this.connection.createObject();
causingExceptionEntity.set("type", thrown.getClass().getName());
causingExceptionEntity.set("message", thrown.getMessage());
causingExceptionEntity.set("stackTrace", this.convertStackTrace(thrown.getStackTrace()));
exceptionEntity.set("cause", causingExceptionEntity);
exceptionEntity = causingExceptionEntity;
}
entity.set("thrown", originalExceptionEntity);
}
final Map<String, String> contextMap = event.getContextMap();
if (contextMap == null) {
entity.set("contextMap", (Object) null);
} else {
final NoSqlObject<W> contextMapEntity = this.connection.createObject();
for (final Map.Entry<String, String> entry : contextMap.entrySet()) {
contextMapEntity.set(entry.getKey(), entry.getValue());
}
entity.set("contextMap", contextMapEntity);
}
final ThreadContext.ContextStack contextStack = event.getContextStack();
if (contextStack == null) {
entity.set("contextStack", (Object) null);
} else {
entity.set("contextStack", contextStack.asList().toArray());
}
this.connection.insertObject(entity);
}
private NoSqlObject<W> buildMarkerEntity(final Marker marker) {
final NoSqlObject<W> entity = this.connection.createObject();
entity.set("name", marker.getName());
final Marker[] parents = marker.getParents();
if (parents != null) {
@SuppressWarnings("unchecked")
final NoSqlObject<W>[] parentEntities = new NoSqlObject[parents.length];
for (int i = 0; i < parents.length; i++) {
parentEntities[i] = buildMarkerEntity(parents[i]);
}
entity.set("parents", parentEntities);
}
return entity;
}
@Override
protected void commitAndClose() {
// all NoSQL drivers auto-commit (since NoSQL doesn't generally use the concept of transactions).
// also, all our NoSQL drivers use internal connection pooling and provide clients, not connections.
// thus, we should not be closing the client until shutdown as NoSQL is very different from SQL.
// see LOG4J2-591 and LOG4J2-676
}
private NoSqlObject<W>[] convertStackTrace(final StackTraceElement[] stackTrace) {
final NoSqlObject<W>[] stackTraceEntities = this.connection.createList(stackTrace.length);
for (int i = 0; i < stackTrace.length; i++) {
stackTraceEntities[i] = this.convertStackTraceElement(stackTrace[i]);
}
return stackTraceEntities;
}
private NoSqlObject<W> convertStackTraceElement(final StackTraceElement element) {
final NoSqlObject<W> elementEntity = this.connection.createObject();
elementEntity.set("className", element.getClassName());
elementEntity.set("methodName", element.getMethodName());
elementEntity.set("fileName", element.getFileName());
elementEntity.set("lineNumber", element.getLineNumber());
return elementEntity;
}
/**
* Creates a NoSQL manager for use within the {@link NoSqlAppender}, or returns a suitable one if it already exists.
*
* @param name The name of the manager, which should include connection details and hashed passwords where possible.
* @param bufferSize The size of the log event buffer.
* @param provider A provider instance which will be used to obtain connections to the chosen NoSQL database.
* @return a new or existing NoSQL manager as applicable.
*/
public static NoSqlDatabaseManager<?> getNoSqlDatabaseManager(final String name, final int bufferSize,
final NoSqlProvider<?> provider) {
return AbstractDatabaseManager.getManager(name, new FactoryData(bufferSize, provider), FACTORY);
}
/**
* Encapsulates data that {@link NoSQLDatabaseManagerFactory} uses to create managers.
*/
private static final class FactoryData extends AbstractDatabaseManager.AbstractFactoryData {
private final NoSqlProvider<?> provider;
protected FactoryData(final int bufferSize, final NoSqlProvider<?> provider) {
super(bufferSize);
this.provider = provider;
}
}
/**
* Creates managers.
*/
private static final class NoSQLDatabaseManagerFactory implements
ManagerFactory<NoSqlDatabaseManager<?>, FactoryData> {
@Override
@SuppressWarnings("unchecked")
public NoSqlDatabaseManager<?> createManager(final String name, final FactoryData data) {
return new NoSqlDatabaseManager(name, data.getBufferSize(), data.provider);
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.beam.sdk.util;
import org.apache.beam.sdk.transforms.PTransform;
import com.google.common.base.Joiner;
import com.google.common.base.Preconditions;
import java.util.ArrayList;
import java.util.List;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
/**
* Utilities for working with JSON and other human-readable string formats.
*/
public class StringUtils {
/**
* Converts the given array of bytes into a legal JSON string.
*
* <p>Uses a simple strategy of converting each byte to a single char,
* except for non-printable chars, non-ASCII chars, and '%', '\',
* and '"', which are encoded as three chars in '%xx' format, where
* 'xx' is the hexadecimal encoding of the byte.
*/
public static String byteArrayToJsonString(byte[] bytes) {
StringBuilder sb = new StringBuilder(bytes.length * 2);
for (byte b : bytes) {
if (b >= 32 && b < 127) {
// A printable ascii character.
char c = (char) b;
if (c != '%' && c != '\\' && c != '\"') {
// Not an escape prefix or special character, either.
// Send through unchanged.
sb.append(c);
continue;
}
}
// Send through escaped. Use '%xx' format.
sb.append(String.format("%%%02x", b));
}
return sb.toString();
}
/**
* Converts the given string, encoded using {@link #byteArrayToJsonString},
* into a byte array.
*
* @throws IllegalArgumentException if the argument string is not legal
*/
public static byte[] jsonStringToByteArray(String string) {
List<Byte> bytes = new ArrayList<>();
for (int i = 0; i < string.length(); ) {
char c = string.charAt(i);
Byte b;
if (c == '%') {
// Escaped. Expect '%xx' format.
try {
b = (byte) Integer.parseInt(string.substring(i + 1, i + 3), 16);
} catch (IndexOutOfBoundsException | NumberFormatException exn) {
throw new IllegalArgumentException(
"not in legal encoded format; "
+ "substring [" + i + ".." + (i + 2) + "] not in format \"%xx\"",
exn);
}
i += 3;
} else {
// Send through unchanged.
b = (byte) c;
i++;
}
bytes.add(b);
}
byte[] byteArray = new byte[bytes.size()];
int i = 0;
for (Byte b : bytes) {
byteArray[i++] = b;
}
return byteArray;
}
private static final String[] STANDARD_NAME_SUFFIXES =
new String[]{"DoFn", "Fn"};
/**
* Pattern to match a non-anonymous inner class.
* Eg, matches "Foo$Bar", or even "Foo$1$Bar", but not "Foo$1" or "Foo$1$2".
*/
private static final Pattern NAMED_INNER_CLASS =
Pattern.compile(".+\\$(?<INNER>[^0-9].*)");
private static final String ANONYMOUS_CLASS_REGEX = "\\$[0-9]+\\$";
/**
* Returns a simple name for a class.
*
* <p>Note: this is non-invertible - the name may be simplified to an
* extent that it cannot be mapped back to the original class.
*
* <p>This can be used to generate human-readable names. It
* removes the package and outer classes from the name,
* and removes common suffixes.
*
* <p>Examples:
* <ul>
* <li>{@code some.package.Word.SummaryDoFn} -> "Summary"
* <li>{@code another.package.PairingFn} -> "Pairing"
* </ul>
*
* @throws IllegalArgumentException if the class is anonymous
*/
public static String approximateSimpleName(Class<?> clazz) {
return approximateSimpleName(clazz, /* dropOuterClassNames */ true);
}
/**
* Returns a name for a PTransform class.
*
* <p>This can be used to generate human-readable transform names. It
* removes the package from the name, and removes common suffixes.
*
* <p>It is different than approximateSimpleName:
* <ul>
* <li>1. It keeps the outer classes names.
* <li>2. It removes the common transform inner class: "Bound".
* </ul>
*
* <p>Examples:
* <ul>
* <li>{@code some.package.Word.Summary} -> "Word.Summary"
* <li>{@code another.package.Pairing.Bound} -> "Pairing"
* </ul>
*/
public static String approximatePTransformName(Class<?> clazz) {
Preconditions.checkArgument(PTransform.class.isAssignableFrom(clazz));
return approximateSimpleName(clazz, /* dropOuterClassNames */ false)
.replaceFirst("\\.Bound$", "");
}
/**
* Calculate the Levenshtein distance between two strings.
*
* <p>The Levenshtein distance between two words is the minimum number of single-character edits
* (i.e. insertions, deletions or substitutions) required to change one string into the other.
*/
public static int getLevenshteinDistance(final String s, final String t) {
Preconditions.checkNotNull(s);
Preconditions.checkNotNull(t);
// base cases
if (s.equals(t)) {
return 0;
}
if (s.length() == 0) {
return t.length();
}
if (t.length() == 0) {
return s.length();
}
// create two work arrays to store integer distances
final int[] v0 = new int[t.length() + 1];
final int[] v1 = new int[t.length() + 1];
// initialize v0 (the previous row of distances)
// this row is A[0][i]: edit distance for an empty s
// the distance is just the number of characters to delete from t
for (int i = 0; i < v0.length; i++) {
v0[i] = i;
}
for (int i = 0; i < s.length(); i++) {
// calculate v1 (current row distances) from the previous row v0
// first element of v1 is A[i+1][0]
// edit distance is delete (i+1) chars from s to match empty t
v1[0] = i + 1;
// use formula to fill in the rest of the row
for (int j = 0; j < t.length(); j++) {
int cost = (s.charAt(i) == t.charAt(j)) ? 0 : 1;
v1[j + 1] = Math.min(Math.min(v1[j] + 1, v0[j + 1] + 1), v0[j] + cost);
}
// copy v1 (current row) to v0 (previous row) for next iteration
System.arraycopy(v1, 0, v0, 0, v0.length);
}
return v1[t.length()];
}
private static String approximateSimpleName(Class<?> clazz, boolean dropOuterClassNames) {
Preconditions.checkArgument(!clazz.isAnonymousClass(),
"Attempted to get simple name of anonymous class");
String fullName = clazz.getName();
String shortName = fullName.substring(fullName.lastIndexOf('.') + 1);
// Drop common suffixes for each named component.
String[] names = shortName.split("\\$");
for (int i = 0; i < names.length; i++) {
names[i] = simplifyNameComponent(names[i]);
}
shortName = Joiner.on('$').join(names);
if (dropOuterClassNames) {
// Simplify inner class name by dropping outer class prefixes.
Matcher m = NAMED_INNER_CLASS.matcher(shortName);
if (m.matches()) {
shortName = m.group("INNER");
}
} else {
// Dropping anonymous outer classes
shortName = shortName.replaceAll(ANONYMOUS_CLASS_REGEX, ".");
shortName = shortName.replaceAll("\\$", ".");
}
return shortName;
}
private static String simplifyNameComponent(String name) {
for (String suffix : STANDARD_NAME_SUFFIXES) {
if (name.endsWith(suffix) && name.length() > suffix.length()) {
return name.substring(0, name.length() - suffix.length());
}
}
return name;
}
}
| |
/*
* Copyright 2016 Red Hat, Inc. and/or its affiliates
* and other contributors as indicated by the @author tags.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.keycloak.theme;
import org.jboss.logging.Logger;
import org.keycloak.Config;
import org.keycloak.common.Version;
import org.keycloak.models.KeycloakSession;
import java.io.IOException;
import java.io.InputStream;
import java.net.URL;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashSet;
import java.util.LinkedList;
import java.util.List;
import java.util.ListIterator;
import java.util.Locale;
import java.util.Properties;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
/**
* @author <a href="mailto:sthorger@redhat.com">Stian Thorgersen</a>
*/
public class ExtendingThemeManager implements ThemeProvider {
private static final Logger log = Logger.getLogger(ExtendingThemeManager.class);
private final KeycloakSession session;
private final ConcurrentHashMap<ExtendingThemeManagerFactory.ThemeKey, Theme> themeCache;
private List<ThemeProvider> providers;
private String defaultTheme;
public ExtendingThemeManager(KeycloakSession session, ConcurrentHashMap<ExtendingThemeManagerFactory.ThemeKey, Theme> themeCache) {
this.session = session;
this.themeCache = themeCache;
this.defaultTheme = Config.scope("theme").get("default", Version.NAME.toLowerCase());
}
private List<ThemeProvider> getProviders() {
if (providers == null) {
providers = new LinkedList();
for (ThemeProvider p : session.getAllProviders(ThemeProvider.class)) {
if (!(p instanceof ExtendingThemeManager)) {
if (!p.getClass().equals(ExtendingThemeManager.class)) {
providers.add(p);
}
}
}
Collections.sort(providers, new Comparator<ThemeProvider>() {
@Override
public int compare(ThemeProvider o1, ThemeProvider o2) {
return o2.getProviderPriority() - o1.getProviderPriority();
}
});
}
return providers;
}
@Override
public int getProviderPriority() {
return 0;
}
@Override
public Theme getTheme(String name, Theme.Type type) throws IOException {
if (name == null) {
name = defaultTheme;
}
if (themeCache != null) {
ExtendingThemeManagerFactory.ThemeKey key = ExtendingThemeManagerFactory.ThemeKey.get(name, type);
Theme theme = themeCache.get(key);
if (theme == null) {
theme = loadTheme(name, type);
if (theme == null) {
theme = loadTheme("keycloak", type);
if (theme == null) {
theme = loadTheme("base", type);
}
log.errorv("Failed to find {0} theme {1}, using built-in themes", type, name);
} else if (themeCache.putIfAbsent(key, theme) != null) {
theme = themeCache.get(key);
}
}
return theme;
} else {
return loadTheme(name, type);
}
}
private Theme loadTheme(String name, Theme.Type type) throws IOException {
Theme theme = findTheme(name, type);
if (theme != null && (theme.getParentName() != null || theme.getImportName() != null)) {
List<Theme> themes = new LinkedList<>();
themes.add(theme);
if (theme.getImportName() != null) {
String[] s = theme.getImportName().split("/");
themes.add(findTheme(s[1], Theme.Type.valueOf(s[0].toUpperCase())));
}
if (theme.getParentName() != null) {
for (String parentName = theme.getParentName(); parentName != null; parentName = theme.getParentName()) {
theme = findTheme(parentName, type);
themes.add(theme);
if (theme.getImportName() != null) {
String[] s = theme.getImportName().split("/");
themes.add(findTheme(s[1], Theme.Type.valueOf(s[0].toUpperCase())));
}
}
}
return new ExtendingTheme(themes);
} else {
return theme;
}
}
@Override
public Set<String> nameSet(Theme.Type type) {
Set<String> themes = new HashSet<String>();
for (ThemeProvider p : getProviders()) {
themes.addAll(p.nameSet(type));
}
return themes;
}
@Override
public boolean hasTheme(String name, Theme.Type type) {
for (ThemeProvider p : getProviders()) {
if (p.hasTheme(name, type)) {
return true;
}
}
return false;
}
@Override
public void close() {
providers = null;
}
private Theme findTheme(String name, Theme.Type type) {
for (ThemeProvider p : getProviders()) {
if (p.hasTheme(name, type)) {
try {
return p.getTheme(name, type);
} catch (IOException e) {
log.errorv(e, p.getClass() + " failed to load theme, type={0}, name={1}", type, name);
}
}
}
return null;
}
public static class ExtendingTheme implements Theme {
private List<Theme> themes;
private Properties properties;
private ConcurrentHashMap<String, ConcurrentHashMap<Locale, Properties>> messages = new ConcurrentHashMap<>();
public ExtendingTheme(List<Theme> themes) {
this.themes = themes;
}
@Override
public String getName() {
return themes.get(0).getName();
}
@Override
public String getParentName() {
return themes.get(0).getParentName();
}
@Override
public String getImportName() {
return themes.get(0).getImportName();
}
@Override
public Type getType() {
return themes.get(0).getType();
}
@Override
public URL getTemplate(String name) throws IOException {
for (Theme t : themes) {
URL template = t.getTemplate(name);
if (template != null) {
return template;
}
}
return null;
}
@Override
public InputStream getTemplateAsStream(String name) throws IOException {
for (Theme t : themes) {
InputStream template = t.getTemplateAsStream(name);
if (template != null) {
return template;
}
}
return null;
}
@Override
public URL getResource(String path) throws IOException {
for (Theme t : themes) {
URL resource = t.getResource(path);
if (resource != null) {
return resource;
}
}
return null;
}
@Override
public InputStream getResourceAsStream(String path) throws IOException {
for (Theme t : themes) {
InputStream resource = t.getResourceAsStream(path);
if (resource != null) {
return resource;
}
}
return null;
}
@Override
public Properties getMessages(Locale locale) throws IOException {
return getMessages("messages", locale);
}
@Override
public Properties getMessages(String baseBundlename, Locale locale) throws IOException {
if (messages.get(baseBundlename) == null || messages.get(baseBundlename).get(locale) == null) {
Properties messages = new Properties();
if (!Locale.ENGLISH.equals(locale)) {
messages.putAll(getMessages(baseBundlename, Locale.ENGLISH));
}
ListIterator<Theme> itr = themes.listIterator(themes.size());
while (itr.hasPrevious()) {
Properties m = itr.previous().getMessages(baseBundlename, locale);
if (m != null) {
messages.putAll(m);
}
}
this.messages.putIfAbsent(baseBundlename, new ConcurrentHashMap<Locale, Properties>());
this.messages.get(baseBundlename).putIfAbsent(locale, messages);
return messages;
} else {
return messages.get(baseBundlename).get(locale);
}
}
@Override
public Properties getProperties() throws IOException {
if (properties == null) {
Properties properties = new Properties();
ListIterator<Theme> itr = themes.listIterator(themes.size());
while (itr.hasPrevious()) {
Properties p = itr.previous().getProperties();
if (p != null) {
properties.putAll(p);
}
}
this.properties = properties;
return properties;
} else {
return properties;
}
}
}
}
| |
/*
* Copyright 2000-2015 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package git4idea.rebase;
import com.google.common.annotations.VisibleForTesting;
import com.intellij.dvcs.DvcsUtil;
import com.intellij.notification.Notification;
import com.intellij.notification.NotificationListener;
import com.intellij.openapi.application.AccessToken;
import com.intellij.openapi.components.ServiceManager;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.progress.ProcessCanceledException;
import com.intellij.openapi.progress.ProgressIndicator;
import com.intellij.openapi.progress.ProgressManager;
import com.intellij.openapi.progress.Task;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.util.Condition;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.openapi.vcs.VcsException;
import com.intellij.openapi.vcs.VcsNotifier;
import com.intellij.openapi.vcs.changes.ChangeListManager;
import com.intellij.openapi.vfs.VfsUtil;
import com.intellij.openapi.vfs.VirtualFile;
import com.intellij.util.ExceptionUtil;
import com.intellij.util.Function;
import com.intellij.util.ThreeState;
import com.intellij.util.containers.MultiMap;
import git4idea.GitUtil;
import git4idea.branch.GitRebaseParams;
import git4idea.commands.Git;
import git4idea.commands.GitCommandResult;
import git4idea.commands.GitLineHandlerListener;
import git4idea.commands.GitUntrackedFilesOverwrittenByOperationDetector;
import git4idea.merge.GitConflictResolver;
import git4idea.rebase.GitSuccessfulRebase.SuccessType;
import git4idea.repo.GitRepository;
import git4idea.repo.GitRepositoryManager;
import git4idea.stash.GitChangesSaver;
import git4idea.util.GitFreezingProcess;
import git4idea.util.GitUntrackedFilesHelper;
import org.jetbrains.annotations.CalledInBackground;
import org.jetbrains.annotations.Contract;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import javax.swing.event.HyperlinkEvent;
import java.util.Collection;
import java.util.List;
import java.util.Map;
import java.util.Set;
import static com.intellij.dvcs.DvcsUtil.getShortRepositoryName;
import static com.intellij.openapi.vfs.VfsUtilCore.toVirtualFileArray;
import static com.intellij.util.ObjectUtils.assertNotNull;
import static com.intellij.util.ObjectUtils.notNull;
import static com.intellij.util.containers.ContainerUtil.*;
import static com.intellij.util.containers.ContainerUtilRt.newArrayList;
import static com.intellij.util.containers.ContainerUtilRt.newLinkedHashSet;
import static git4idea.GitUtil.getRootsFromRepositories;
import static java.util.Collections.singleton;
public class GitRebaseProcess {
private static final Logger LOG = Logger.getInstance(GitRebaseProcess.class);
@NotNull private final Project myProject;
@NotNull private final Git myGit;
@NotNull private final ChangeListManager myChangeListManager;
@NotNull private final VcsNotifier myNotifier;
@NotNull private final GitRepositoryManager myRepositoryManager;
@NotNull private final GitRebaseSpec myRebaseSpec;
@Nullable private final GitRebaseResumeMode myCustomMode;
@NotNull private final GitChangesSaver mySaver;
@NotNull private final ProgressManager myProgressManager;
public GitRebaseProcess(@NotNull Project project, @NotNull GitRebaseSpec rebaseSpec, @Nullable GitRebaseResumeMode customMode) {
myProject = project;
myRebaseSpec = rebaseSpec;
myCustomMode = customMode;
mySaver = rebaseSpec.getSaver();
myGit = ServiceManager.getService(Git.class);
myChangeListManager = ChangeListManager.getInstance(myProject);
myNotifier = VcsNotifier.getInstance(myProject);
myRepositoryManager = GitUtil.getRepositoryManager(myProject);
myProgressManager = ProgressManager.getInstance();
}
public void rebase() {
new GitFreezingProcess(myProject, "rebase", new Runnable() {
public void run() {
doRebase();
}
}).execute();
}
/**
* Given a GitRebaseSpec this method either starts, or continues the ongoing rebase in multiple repositories.
* <ul>
* <li>It does nothing with "already successfully rebased repositories" (the ones which have {@link GitRebaseStatus} == SUCCESSFUL,
* and just remembers them to use in the resulting notification.</li>
* <li>If there is a repository with rebase in progress, it calls `git rebase --continue` (or `--skip`).
* It is assumed that there is only one such repository.</li>
* <li>For all remaining repositories rebase on which didn't start yet, it calls {@code git rebase <original parameters>}</li>
* </ul>
*/
private void doRebase() {
LOG.info("Started rebase");
LOG.debug("Started rebase with the following spec: " + myRebaseSpec);
Map<GitRepository, GitRebaseStatus> statuses = newLinkedHashMap(myRebaseSpec.getStatuses());
Collection<GitRepository> toRefresh = newLinkedHashSet();
List<GitRepository> repositoriesToRebase = myRebaseSpec.getIncompleteRepositories();
AccessToken token = DvcsUtil.workingTreeChangeStarted(myProject);
try {
if (!saveDirtyRootsInitially(repositoriesToRebase)) return;
GitRepository failed = null;
for (GitRepository repository : repositoriesToRebase) {
GitRebaseResumeMode customMode = null;
if (repository == myRebaseSpec.getOngoingRebase()) {
customMode = myCustomMode == null ? GitRebaseResumeMode.CONTINUE : myCustomMode;
}
GitRebaseStatus rebaseStatus = rebaseSingleRoot(repository, customMode, getSuccessfulRepositories(statuses));
repository.update(); // make the repo state info actual ASAP
statuses.put(repository, rebaseStatus);
if (shouldBeRefreshed(rebaseStatus)) {
toRefresh.add(repository);
}
if (rebaseStatus.getType() != GitRebaseStatus.Type.SUCCESS) {
failed = repository;
break;
}
}
if (failed == null) {
LOG.debug("Rebase completed successfully.");
mySaver.load();
}
refresh(toRefresh);
if (failed == null) {
notifySuccess(getSuccessfulRepositories(statuses), getSkippedCommits(statuses));
}
saveUpdatedSpec(statuses);
}
catch (ProcessCanceledException pce) {
throw pce;
}
catch(Throwable e) {
myRepositoryManager.setOngoingRebaseSpec(null);
ExceptionUtil.rethrowUnchecked(e);
}
finally {
DvcsUtil.workingTreeChangeFinished(myProject, token);
}
}
private void saveUpdatedSpec(@NotNull Map<GitRepository, GitRebaseStatus> statuses) {
if (myRebaseSpec.shouldBeSaved()) {
GitRebaseSpec newRebaseInfo = myRebaseSpec.cloneWithNewStatuses(statuses);
myRepositoryManager.setOngoingRebaseSpec(newRebaseInfo);
}
else {
myRepositoryManager.setOngoingRebaseSpec(null);
}
}
@NotNull
private GitRebaseStatus rebaseSingleRoot(@NotNull GitRepository repository,
@Nullable GitRebaseResumeMode customMode,
@NotNull Map<GitRepository, GitSuccessfulRebase> alreadyRebased) {
VirtualFile root = repository.getRoot();
String repoName = getShortRepositoryName(repository);
LOG.info("Rebasing root " + repoName + ", mode: " + notNull(customMode, "standard"));
Collection<GitRebaseUtils.CommitInfo> skippedCommits = newArrayList();
MultiMap<GitRepository, GitRebaseUtils.CommitInfo> allSkippedCommits = getSkippedCommits(alreadyRebased);
boolean retryWhenDirty = false;
while (true) {
GitRebaseProblemDetector rebaseDetector = new GitRebaseProblemDetector();
GitUntrackedFilesOverwrittenByOperationDetector untrackedDetector = new GitUntrackedFilesOverwrittenByOperationDetector(root);
GitRebaseLineListener progressListener = new GitRebaseLineListener();
GitCommandResult result = callRebase(repository, customMode, rebaseDetector, untrackedDetector, progressListener);
boolean somethingRebased = customMode != null || progressListener.getResult().current > 1;
if (result.success()) {
if (rebaseDetector.hasStoppedForEditing()) {
showStoppedForEditingMessage(repository);
return new GitRebaseStatus(GitRebaseStatus.Type.SUSPENDED, skippedCommits);
}
LOG.debug("Successfully rebased " + repoName);
return GitSuccessfulRebase.parseFromOutput(result.getOutput(), skippedCommits);
}
else if (result.cancelled()) {
LOG.info("Rebase was cancelled");
throw new ProcessCanceledException();
}
else if (rebaseDetector.isDirtyTree() && customMode == null && !retryWhenDirty) {
// if the initial dirty tree check doesn't find all local changes, we are still ready to stash-on-demand,
// but only once per repository (if the error happens again, that means that the previous stash attempt failed for some reason),
// and not in the case of --continue (where all local changes are expected to be committed) or --skip.
LOG.debug("Dirty tree detected in " + repoName);
String saveError = saveLocalChanges(singleton(repository.getRoot()));
if (saveError == null) {
retryWhenDirty = true; // try same repository again
}
else {
LOG.warn("Couldn't " + mySaver.getOperationName() + " root " + repository.getRoot() + ": " + saveError);
showFatalError(saveError, repository, somethingRebased, alreadyRebased.keySet(), allSkippedCommits);
GitRebaseStatus.Type type = somethingRebased ? GitRebaseStatus.Type.SUSPENDED : GitRebaseStatus.Type.ERROR;
return new GitRebaseStatus(type, skippedCommits);
}
}
else if (untrackedDetector.wasMessageDetected()) {
LOG.info("Untracked files detected in " + repoName);
showUntrackedFilesError(untrackedDetector.getRelativeFilePaths(), repository, somethingRebased, alreadyRebased.keySet(),
allSkippedCommits);
GitRebaseStatus.Type type = somethingRebased ? GitRebaseStatus.Type.SUSPENDED : GitRebaseStatus.Type.ERROR;
return new GitRebaseStatus(type, skippedCommits);
}
else if (rebaseDetector.isNoChangeError()) {
LOG.info("'No changes' situation detected in " + repoName);
GitRebaseUtils.CommitInfo currentRebaseCommit = GitRebaseUtils.getCurrentRebaseCommit(myProject, root);
if (currentRebaseCommit != null) skippedCommits.add(currentRebaseCommit);
customMode = GitRebaseResumeMode.SKIP;
}
else if (rebaseDetector.isMergeConflict()) {
LOG.info("Merge conflict in " + repoName);
ResolveConflictResult resolveResult = showConflictResolver(repository, false);
if (resolveResult == ResolveConflictResult.ALL_RESOLVED) {
customMode = GitRebaseResumeMode.CONTINUE;
}
else if (resolveResult == ResolveConflictResult.NOTHING_TO_MERGE) {
// the output is the same for the cases:
// (1) "unresolved conflicts"
// (2) "manual editing of a file not followed by `git add`
// => we check if there are any unresolved conflicts, and if not, then it is the case #2 which we are not handling
LOG.info("Unmerged changes while rebasing root " + repoName + ": " + result.getErrorOutputAsJoinedString());
showFatalError(result.getErrorOutputAsHtmlString(), repository, somethingRebased, alreadyRebased.keySet(), allSkippedCommits);
GitRebaseStatus.Type type = somethingRebased ? GitRebaseStatus.Type.SUSPENDED : GitRebaseStatus.Type.ERROR;
return new GitRebaseStatus(type, skippedCommits);
}
else {
notifyNotAllConflictsResolved(repository, allSkippedCommits);
return new GitRebaseStatus(GitRebaseStatus.Type.SUSPENDED, skippedCommits);
}
}
else {
LOG.info("Error rebasing root " + repoName + ": " + result.getErrorOutputAsJoinedString());
showFatalError(result.getErrorOutputAsHtmlString(), repository, somethingRebased, alreadyRebased.keySet(), allSkippedCommits);
GitRebaseStatus.Type type = somethingRebased ? GitRebaseStatus.Type.SUSPENDED : GitRebaseStatus.Type.ERROR;
return new GitRebaseStatus(type, skippedCommits);
}
}
}
@NotNull
private GitCommandResult callRebase(@NotNull GitRepository repository,
@Nullable GitRebaseResumeMode mode,
@NotNull GitLineHandlerListener... listeners) {
if (mode == null) {
GitRebaseParams params = assertNotNull(myRebaseSpec.getParams());
return myGit.rebase(repository, params, listeners);
}
else if (mode == GitRebaseResumeMode.SKIP) {
return myGit.rebaseSkip(repository, listeners);
}
else {
LOG.assertTrue(mode == GitRebaseResumeMode.CONTINUE, "Unexpected rebase mode: " + mode);
return myGit.rebaseContinue(repository, listeners);
}
}
@VisibleForTesting
@NotNull
protected Collection<GitRepository> getDirtyRoots(@NotNull Collection<GitRepository> repositories) {
return findRootsWithLocalChanges(repositories);
}
private static boolean shouldBeRefreshed(@NotNull GitRebaseStatus rebaseStatus) {
return rebaseStatus.getType() != GitRebaseStatus.Type.SUCCESS ||
((GitSuccessfulRebase)rebaseStatus).getSuccessType() != SuccessType.UP_TO_DATE;
}
private static void refresh(@NotNull Collection<GitRepository> repositories) {
GitUtil.updateRepositories(repositories);
// TODO use --diff-stat, and refresh only what's needed
VfsUtil.markDirtyAndRefresh(false, true, false, toVirtualFileArray(getRootsFromRepositories(repositories)));
}
private boolean saveDirtyRootsInitially(@NotNull List<GitRepository> repositories) {
Collection<GitRepository> repositoriesToSave = filter(repositories, new Condition<GitRepository>() {
@Override
public boolean value(GitRepository repository) {
return !repository.equals(myRebaseSpec.getOngoingRebase()); // no need to save anything when --continue/--skip is to be called
}
});
if (repositoriesToSave.isEmpty()) return true;
Collection<VirtualFile> rootsToSave = getRootsFromRepositories(getDirtyRoots(repositoriesToSave));
String error = saveLocalChanges(rootsToSave);
if (error != null) {
myNotifier.notifyError("Rebase not Started", error);
return false;
}
return true;
}
@Nullable
private String saveLocalChanges(@NotNull Collection<VirtualFile> rootsToSave) {
try {
mySaver.saveLocalChanges(rootsToSave);
return null;
}
catch (VcsException e) {
LOG.warn(e);
return "Couldn't " + mySaver.getSaverName() + " local uncommitted changes:<br/>" + e.getMessage();
}
}
private Collection<GitRepository> findRootsWithLocalChanges(@NotNull Collection<GitRepository> repositories) {
return filter(repositories, new Condition<GitRepository>() {
@Override
public boolean value(GitRepository repository) {
return myChangeListManager.haveChangesUnder(repository.getRoot()) != ThreeState.NO;
}
});
}
private void notifySuccess(@NotNull Map<GitRepository, GitSuccessfulRebase> successful,
final MultiMap<GitRepository, GitRebaseUtils.CommitInfo> skippedCommits) {
String rebasedBranch = getCommonCurrentBranchNameIfAllTheSame(myRebaseSpec.getAllRepositories());
List<SuccessType> successTypes = map(successful.values(),
new Function<GitSuccessfulRebase, SuccessType>() {
@Override
public SuccessType fun(@NotNull GitSuccessfulRebase rebase) {
return rebase.getSuccessType();
}
});
SuccessType commonType = getItemIfAllTheSame(successTypes, SuccessType.REBASED);
GitRebaseParams params = myRebaseSpec.getParams();
String baseBranch = params == null ? null : notNull(params.getNewBase(), params.getUpstream());
if ("HEAD".equals(baseBranch)) {
baseBranch = getItemIfAllTheSame(myRebaseSpec.getInitialBranchNames().values(), baseBranch);
}
String message = commonType.formatMessage(rebasedBranch, baseBranch, params != null && params.getBranch() != null);
message += mentionSkippedCommits(skippedCommits);
myNotifier.notifyMinorInfo("Rebase Successful", message, new NotificationListener.Adapter() {
@Override
protected void hyperlinkActivated(@NotNull Notification notification, @NotNull HyperlinkEvent e) {
handlePossibleCommitLinks(e.getDescription(), skippedCommits);
}
});
}
@Nullable
private static String getCommonCurrentBranchNameIfAllTheSame(@NotNull Collection<GitRepository> repositories) {
return getItemIfAllTheSame(map(repositories, new Function<GitRepository, String>() {
@Override
public String fun(@NotNull GitRepository repository) {
return repository.getCurrentBranchName();
}
}), null);
}
@Contract("_, !null -> !null")
private static <T> T getItemIfAllTheSame(@NotNull Collection<T> collection, @Nullable T defaultItem) {
return newHashSet(collection).size() == 1 ? getFirstItem(collection) : defaultItem;
}
private void notifyNotAllConflictsResolved(@NotNull GitRepository conflictingRepository,
MultiMap<GitRepository, GitRebaseUtils.CommitInfo> skippedCommits) {
String description = "You have to <a href='resolve'>resolve</a> the conflicts and <a href='continue'>continue</a> rebase.<br/>" +
"If you want to start from the beginning, you can <a href='abort'>abort</a> rebase.";
description += GitRebaseUtils.mentionLocalChangesRemainingInStash(mySaver);
myNotifier.notifyImportantWarning("Rebase Suspended", description,
new RebaseNotificationListener(conflictingRepository, skippedCommits));
}
@NotNull
private ResolveConflictResult showConflictResolver(@NotNull GitRepository conflicting, boolean calledFromNotification) {
GitConflictResolver.Params params = new GitConflictResolver.Params().setReverse(true);
RebaseConflictResolver conflictResolver = new RebaseConflictResolver(myProject, myGit, conflicting, params, calledFromNotification);
boolean allResolved = conflictResolver.merge();
if (conflictResolver.myWasNothingToMerge) return ResolveConflictResult.NOTHING_TO_MERGE;
if (allResolved) return ResolveConflictResult.ALL_RESOLVED;
return ResolveConflictResult.UNRESOLVED_REMAIN;
}
private void showStoppedForEditingMessage(@NotNull GitRepository repository) {
String description = "Once you are satisfied with your changes you may <a href='continue'>continue</a>";
myNotifier.notifyImportantInfo("Rebase Stopped for Editing", description,
new RebaseNotificationListener(repository, MultiMap.<GitRepository, GitRebaseUtils.CommitInfo>empty()));
}
private void showFatalError(@NotNull final String error,
@NotNull final GitRepository currentRepository,
boolean somethingWasRebased,
@NotNull final Collection<GitRepository> successful,
@NotNull MultiMap<GitRepository, GitRebaseUtils.CommitInfo> skippedCommits) {
String repo = myRepositoryManager.moreThanOneRoot() ? getShortRepositoryName(currentRepository) + ": " : "";
String description = repo + error + "<br/>" +
mentionRetryAndAbort(somethingWasRebased, successful) +
mentionSkippedCommits(skippedCommits) +
GitRebaseUtils.mentionLocalChangesRemainingInStash(mySaver);
String title = myRebaseSpec.getOngoingRebase() == null ? "Rebase Failed" : "Continue Rebase Failed";
myNotifier.notifyError(title, description, new RebaseNotificationListener(currentRepository, skippedCommits));
}
private void showUntrackedFilesError(@NotNull Set<String> untrackedPaths,
@NotNull GitRepository currentRepository,
boolean somethingWasRebased,
@NotNull Collection<GitRepository> successful,
MultiMap<GitRepository, GitRebaseUtils.CommitInfo> skippedCommits) {
String message = GitUntrackedFilesHelper.createUntrackedFilesOverwrittenDescription("rebase", true) +
mentionRetryAndAbort(somethingWasRebased, successful) +
mentionSkippedCommits(skippedCommits) +
GitRebaseUtils.mentionLocalChangesRemainingInStash(mySaver);
GitUntrackedFilesHelper.notifyUntrackedFilesOverwrittenBy(myProject, currentRepository.getRoot(), untrackedPaths, "rebase", message);
}
@NotNull
private static String mentionRetryAndAbort(boolean somethingWasRebased, @NotNull Collection<GitRepository> successful) {
return somethingWasRebased || !successful.isEmpty()
? "You can <a href='retry'>retry</a> or <a href='abort'>abort</a> rebase."
: "<a href='retry'>Retry.</a>";
}
@NotNull
private static String mentionSkippedCommits(@NotNull MultiMap<GitRepository, GitRebaseUtils.CommitInfo> skippedCommits) {
if (skippedCommits.isEmpty()) return "";
String message = "<br/>";
if (skippedCommits.values().size() == 1) {
message += "The following commit was skipped during rebase:<br/>";
}
else {
message += "The following commits were skipped during rebase:<br/>";
}
message += StringUtil.join(skippedCommits.values(), new Function<GitRebaseUtils.CommitInfo, String>() {
@Override
public String fun(@NotNull GitRebaseUtils.CommitInfo commitInfo) {
String commitMessage = StringUtil.shortenPathWithEllipsis(commitInfo.subject, 72, true);
String hash = commitInfo.revision.asString();
String shortHash = DvcsUtil.getShortHash(commitInfo.revision.asString());
return String.format("<a href='%s'>%s</a> %s", hash, shortHash, commitMessage);
}
}, "<br/>");
return message;
}
@NotNull
private static MultiMap<GitRepository, GitRebaseUtils.CommitInfo> getSkippedCommits(@NotNull Map<GitRepository, ? extends GitRebaseStatus> statuses) {
MultiMap<GitRepository, GitRebaseUtils.CommitInfo> map = MultiMap.create();
for (GitRepository repository : statuses.keySet()) {
map.put(repository, statuses.get(repository).getSkippedCommits());
}
return map;
}
@NotNull
private static Map<GitRepository, GitSuccessfulRebase> getSuccessfulRepositories(@NotNull Map<GitRepository, GitRebaseStatus> statuses) {
Map<GitRepository, GitSuccessfulRebase> map = newLinkedHashMap();
for (GitRepository repository : statuses.keySet()) {
GitRebaseStatus status = statuses.get(repository);
if (status instanceof GitSuccessfulRebase) map.put(repository, (GitSuccessfulRebase)status);
}
return map;
}
private class RebaseConflictResolver extends GitConflictResolver {
private final boolean myCalledFromNotification;
private boolean myWasNothingToMerge;
RebaseConflictResolver(@NotNull Project project,
@NotNull Git git,
@NotNull GitRepository repository,
@NotNull Params params, boolean calledFromNotification) {
super(project, git, singleton(repository.getRoot()), params);
myCalledFromNotification = calledFromNotification;
}
@Override
protected void notifyUnresolvedRemain() {
// will be handled in the common notification
}
@CalledInBackground
@Override
protected boolean proceedAfterAllMerged() throws VcsException {
if (myCalledFromNotification) {
retry(GitRebaseUtils.CONTINUE_PROGRESS_TITLE);
}
return true;
}
@Override
protected boolean proceedIfNothingToMerge() throws VcsException {
myWasNothingToMerge = true;
return true;
}
}
private enum ResolveConflictResult {
ALL_RESOLVED,
NOTHING_TO_MERGE,
UNRESOLVED_REMAIN
}
private class RebaseNotificationListener extends NotificationListener.Adapter {
@NotNull private final GitRepository myCurrentRepository;
@NotNull private final MultiMap<GitRepository, GitRebaseUtils.CommitInfo> mySkippedCommits;
RebaseNotificationListener(@NotNull GitRepository currentRepository,
@NotNull MultiMap<GitRepository, GitRebaseUtils.CommitInfo> skippedCommits) {
myCurrentRepository = currentRepository;
mySkippedCommits = skippedCommits;
}
@Override
protected void hyperlinkActivated(@NotNull Notification notification, @NotNull final HyperlinkEvent e) {
final String href = e.getDescription();
if ("abort".equals(href)) {
abort();
}
else if ("continue".equals(href)) {
retry(GitRebaseUtils.CONTINUE_PROGRESS_TITLE);
}
else if ("retry".equals(href)) {
retry("Retry Rebase Process...");
}
else if ("resolve".equals(href)) {
showConflictResolver(myCurrentRepository, true);
}
else if ("stash".equals(href)) {
mySaver.showSavedChanges();
}
else {
handlePossibleCommitLinks(href, mySkippedCommits);
}
}
}
private void abort() {
myProgressManager.run(new Task.Backgroundable(myProject, "Aborting Rebase Process...") {
@Override
public void run(@NotNull ProgressIndicator indicator) {
GitRebaseUtils.abort(myProject, indicator);
}
});
}
private void retry(@NotNull final String processTitle) {
myProgressManager.run(new Task.Backgroundable(myProject, processTitle, true) {
@Override
public void run(@NotNull ProgressIndicator indicator) {
GitRebaseUtils.continueRebase(myProject);
}
});
}
private void handlePossibleCommitLinks(@NotNull String href, MultiMap<GitRepository, GitRebaseUtils.CommitInfo> skippedCommits) {
GitRepository repository = findRootBySkippedCommit(href, skippedCommits);
if (repository != null) {
GitUtil.showSubmittedFiles(myProject, href, repository.getRoot(), true, false);
}
}
@Nullable
private static GitRepository findRootBySkippedCommit(@NotNull final String hash,
final MultiMap<GitRepository, GitRebaseUtils.CommitInfo> skippedCommits) {
return find(skippedCommits.keySet(), new Condition<GitRepository>() {
@Override
public boolean value(GitRepository repository) {
return exists(skippedCommits.get(repository), new Condition<GitRebaseUtils.CommitInfo>() {
@Override
public boolean value(GitRebaseUtils.CommitInfo info) {
return info.revision.asString().equals(hash);
}
});
}
});
}
}
| |
/*
* Copyright 2015 Red Hat, Inc. and/or its affiliates
* and other contributors as indicated by the @author tags.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.hawkular.inventory.impl.tinkerpop;
import static org.hawkular.inventory.impl.tinkerpop.Constants.Property.__metric_data_type;
import static org.hawkular.inventory.impl.tinkerpop.Constants.Property.__structuredDataIndex;
import static org.hawkular.inventory.impl.tinkerpop.Constants.Property.__structuredDataKey;
import static org.hawkular.inventory.impl.tinkerpop.Constants.Property.__structuredDataType;
import static org.hawkular.inventory.impl.tinkerpop.Constants.Property.__structuredDataValue;
import static org.hawkular.inventory.impl.tinkerpop.Constants.Property.__unit;
import java.util.Arrays;
import org.hawkular.inventory.api.model.AbstractElement;
import org.hawkular.inventory.api.model.DataEntity;
import org.hawkular.inventory.api.model.ElementTypeVisitor;
import org.hawkular.inventory.api.model.ElementVisitor;
import org.hawkular.inventory.api.model.Environment;
import org.hawkular.inventory.api.model.Feed;
import org.hawkular.inventory.api.model.Metric;
import org.hawkular.inventory.api.model.MetricType;
import org.hawkular.inventory.api.model.OperationType;
import org.hawkular.inventory.api.model.Relationship;
import org.hawkular.inventory.api.model.Resource;
import org.hawkular.inventory.api.model.ResourceType;
import org.hawkular.inventory.api.model.StructuredData;
import org.hawkular.inventory.api.model.Tenant;
/**
* @author Lukas Krejci
* @since 0.0.1
*/
final class Constants {
/**
* The vertices in the graph have certain well-known properties.
*/
enum Property {
/**
* This is the name of the property that we use to store the type of the entity represented by the vertex
*/
__type,
/**
* This is the name of the property that we use to store the user-defined ID of the entity represented by the
* vertex. These ID are required to be unique only amongst their "siblings" as determined by the "contains"
* hierarchy.
*/
__eid,
/**
* Present on metric type, this is the name of the propety that we use to store the unit of the metric type
* represented by the vertex.
*/
__unit,
/**
* Property used on metric type to distinguish type of metric e.g. gauge, counter...
*/
__metric_data_type,
/**
* Property used to store the canonical path of an element.
*/
__cp,
/**
* The type of the data stored by the structured data vertex
*/
__structuredDataType,
/**
* The key using which a structured data value is stored in a map.
*/
__structuredDataKey,
/**
* The index on which a structured data value is stored in a list.
*/
__structuredDataIndex,
/**
* The name of the property on the structured data vertex that holds the primitive value of that vertex.
* List and maps don't hold the value directly but instead have edges going out to the child vertices.
*/
__structuredDataValue;
public static String mapUserDefined(String property) {
if (AbstractElement.ID_PROPERTY.equals(property)) {
return __eid.name();
} else {
return property;
}
}
}
/**
* The type of entities known to Hawkular.
*/
enum Type {
tenant(Tenant.class), environment(Environment.class), feed(Feed.class),
resourceType(ResourceType.class), metricType(MetricType.class, __unit, __metric_data_type),
operationType(OperationType.class), resource(Resource.class), metric(Metric.class),
relationship(Relationship.class), dataEntity(DataEntity.class),
structuredData(StructuredData.class, __structuredDataType, __structuredDataValue, __structuredDataIndex,
__structuredDataKey);
private final String[] mappedProperties;
private final Class<?> entityType;
Type(Class<?> entityType, Property... mappedProperties) {
this.entityType = entityType;
this.mappedProperties = new String[mappedProperties.length + 3];
Arrays.setAll(this.mappedProperties, i -> {
switch (i) {
case 0:
return Property.__type.name();
case 1:
return Property.__eid.name();
case 2:
return Property.__cp.name();
default:
return mappedProperties[i - 3].name();
}
});
}
public static Type of(AbstractElement<?, ?> e) {
return e.accept(new ElementVisitor<Type, Void>() {
@Override
public Type visitRelationship(Relationship relationship, Void parameter) {
return Type.relationship;
}
@Override
public Type visitTenant(Tenant tenant, Void parameter) {
return Type.tenant;
}
@Override
public Type visitEnvironment(Environment environment, Void parameter) {
return Type.environment;
}
@Override
public Type visitFeed(Feed feed, Void parameter) {
return Type.feed;
}
@Override
public Type visitMetric(Metric metric, Void parameter) {
return Type.metric;
}
@Override
public Type visitMetricType(MetricType definition, Void parameter) {
return Type.metricType;
}
@Override
public Type visitResource(Resource resource, Void parameter) {
return Type.resource;
}
@Override
public Type visitResourceType(ResourceType type, Void parameter) {
return Type.resourceType;
}
@Override
public Type visitData(DataEntity data, Void parameter) {
return Type.dataEntity;
}
@Override
public Type visitOperationType(OperationType operationType, Void parameter) {
return Type.operationType;
}
@Override
public Type visitUnknown(Object entity, Void parameter) {
return null;
}
}, null);
}
public static Type of(Class<?> ec) {
return ElementTypeVisitor.accept(ec, new ElementTypeVisitor<Type, Void>() {
@Override
public Type visitTenant(Void parameter) {
return tenant;
}
@Override
public Type visitEnvironment(Void parameter) {
return environment;
}
@Override
public Type visitFeed(Void parameter) {
return feed;
}
@Override
public Type visitMetric(Void parameter) {
return metric;
}
@Override
public Type visitMetricType(Void parameter) {
return metricType;
}
@Override
public Type visitResource(Void parameter) {
return resource;
}
@Override
public Type visitResourceType(Void parameter) {
return resourceType;
}
@Override
public Type visitRelationship(Void parameter) {
return relationship;
}
@Override
public Type visitData(Void parameter) {
return dataEntity;
}
@Override
public Type visitOperationType(Void parameter) {
return operationType;
}
@Override
public Type visitUnknown(Void parameter) {
if (StructuredData.class.equals(ec)) {
return structuredData;
}
throw new IllegalArgumentException("Unsupported entity class " + ec);
}
}, null);
}
public Class<?> getEntityType() {
return entityType;
}
/**
* @return list of properties that are explicitly mapped to entity class properties.
*/
public String[] getMappedProperties() {
return mappedProperties;
}
}
private Constants() {
//no instances, thank you
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.drill.exec.vector;
import java.util.Collections;
import java.util.Iterator;
import com.google.common.base.Preconditions;
import com.google.common.collect.ObjectArrays;
import io.netty.buffer.DrillBuf;
import org.apache.drill.common.types.TypeProtos;
import org.apache.drill.common.types.Types;
import org.apache.drill.exec.exception.SchemaChangeRuntimeException;
import org.apache.drill.exec.expr.TypeHelper;
import org.apache.drill.exec.memory.BufferAllocator;
import org.apache.drill.exec.proto.UserBitShared;
import org.apache.drill.exec.record.MaterializedField;
public abstract class BaseRepeatedValueVector extends BaseValueVector implements RepeatedValueVector {
public final static ValueVector DEFAULT_DATA_VECTOR = ZeroVector.INSTANCE;
public final static String OFFSETS_VECTOR_NAME = "offsets";
public final static String DATA_VECTOR_NAME = "data";
private final static MaterializedField offsetsField =
MaterializedField.create(OFFSETS_VECTOR_NAME, Types.required(TypeProtos.MinorType.UINT4));
protected final UInt4Vector offsets;
protected ValueVector vector;
protected BaseRepeatedValueVector(MaterializedField field, BufferAllocator allocator) {
this(field, allocator, DEFAULT_DATA_VECTOR);
}
protected BaseRepeatedValueVector(MaterializedField field, BufferAllocator allocator, ValueVector vector) {
super(field, allocator);
this.offsets = new UInt4Vector(offsetsField, allocator);
this.vector = Preconditions.checkNotNull(vector, "data vector cannot be null");
}
@Override
public boolean allocateNewSafe() {
if (!offsets.allocateNewSafe()) {
return false;
}
offsets.zeroVector();
return vector.allocateNewSafe();
}
@Override
public UInt4Vector getOffsetVector() {
return offsets;
}
@Override
public ValueVector getDataVector() {
return vector;
}
@Override
public void setInitialCapacity(int numRecords) {
offsets.setInitialCapacity(numRecords + 1);
vector.setInitialCapacity(numRecords * RepeatedValueVector.DEFAULT_REPEAT_PER_RECORD);
}
@Override
public int getValueCapacity() {
final int offsetValueCapacity = offsets.getValueCapacity() - 1;
if (vector == DEFAULT_DATA_VECTOR) {
return offsetValueCapacity;
}
return Math.min(vector.getValueCapacity(), offsetValueCapacity);
}
@Override
protected UserBitShared.SerializedField.Builder getMetadataBuilder() {
return super.getMetadataBuilder()
.setGroupCount(getAccessor().getValueCount())
.setValueCount(getAccessor().getInnerValueCount())
.addChild(vector.getMetadata());
}
@Override
public int getBufferSize() {
if (getAccessor().getValueCount() == 0) {
return 0;
}
return offsets.getBufferSize() + vector.getBufferSize();
}
@Override
public Iterator<ValueVector> iterator() {
return Collections.singleton(getDataVector()).iterator();
}
@Override
public void clear() {
offsets.clear();
vector.clear();
super.clear();
}
@Override
public DrillBuf[] getBuffers(boolean clear) {
final DrillBuf[] buffers = ObjectArrays.concat(offsets.getBuffers(false), vector.getBuffers(false), DrillBuf.class);
if (clear) {
for (DrillBuf buffer:buffers) {
buffer.retain();
}
clear();
}
return buffers;
}
/**
* Returns 1 if inner vector is explicitly set via #addOrGetVector else 0
*
* @see {@link ContainerVectorLike#size}
*/
@Override
public int size() {
return vector == DEFAULT_DATA_VECTOR ? 0:1;
}
@Override
public <T extends ValueVector> AddOrGetResult<T> addOrGetVector(VectorDescriptor descriptor) {
boolean created = false;
if (vector == DEFAULT_DATA_VECTOR && descriptor.getType().getMinorType() != TypeProtos.MinorType.LATE) {
final MaterializedField field = descriptor.withName(DATA_VECTOR_NAME).getField();
vector = TypeHelper.getNewVector(field, allocator);
// returned vector must have the same field
assert field.equals(vector.getField());
getField().addChild(field);
created = true;
}
final TypeProtos.MajorType actual = vector.getField().getType();
if (!actual.equals(descriptor.getType())) {
final String msg = String.format("Inner vector type mismatch. Requested type: [%s], actual type: [%s]",
descriptor.getType(), actual);
throw new SchemaChangeRuntimeException(msg);
}
return new AddOrGetResult<>((T)vector, created);
}
public abstract class BaseRepeatedAccessor extends BaseValueVector.BaseAccessor implements RepeatedAccessor {
@Override
public int getValueCount() {
return Math.max(offsets.getAccessor().getValueCount() - 1, 0);
}
@Override
public int getInnerValueCount() {
return vector.getAccessor().getValueCount();
}
@Override
public int getInnerValueCountAt(int index) {
return offsets.getAccessor().get(index+1) - offsets.getAccessor().get(index);
}
@Override
public boolean isNull(int index) {
return false;
}
@Override
public boolean isEmpty(int index) {
return false;
}
}
public abstract class BaseRepeatedMutator extends BaseValueVector.BaseMutator implements RepeatedMutator {
@Override
public void startNewValue(int index) {
offsets.getMutator().setSafe(index+1, offsets.getAccessor().get(index));
setValueCount(index+1);
}
@Override
public void setValueCount(int valueCount) {
// TODO: populate offset end points
offsets.getMutator().setValueCount(valueCount == 0 ? 0 : valueCount+1);
final int childValueCount = valueCount == 0 ? 0 : offsets.getAccessor().get(valueCount);
vector.getMutator().setValueCount(childValueCount);
}
}
}
| |
/* Copyright (c) The m-m-m Team, Licensed under the Apache License, Version 2.0
* http://www.apache.org/licenses/LICENSE-2.0 */
package net.sf.mmm.util.io.impl;
import java.io.IOException;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.Map;
import java.util.NoSuchElementException;
import net.sf.mmm.util.exception.api.NlsIllegalArgumentException;
import net.sf.mmm.util.io.api.BufferExceedException;
import net.sf.mmm.util.io.api.ByteArray;
import net.sf.mmm.util.io.api.spi.DetectorStreamBuffer;
import net.sf.mmm.util.io.api.spi.DetectorStreamProcessor;
import net.sf.mmm.util.io.base.AbstractByteArray;
import net.sf.mmm.util.io.base.ByteArrayImpl;
import net.sf.mmm.util.pool.api.ByteArrayPool;
/**
* This is the implementation of the {@link DetectorStreamBuffer} interface. <br>
* It is based on the idea that each {@link DetectorStreamProcessor} in the chain has its own
* {@link DetectorStreamBuffer} instance. Therefore it holds the according {@link DetectorStreamProcessor} building a
* pair of buffer+processor. Further it holds an instance of the predecessor and thereby represents the chain itself.
*
* @author Joerg Hohwiller (hohwille at users.sourceforge.net)
*/
public class DetectorStreamBufferImpl implements DetectorStreamBuffer {
/** The actual processor served by this buffer. */
private DetectorStreamProcessor processor;
/** The successor in the chain or {@code null} if this is the last. */
private DetectorStreamBufferImpl chainSuccessor;
private long streamPosition;
/** @see #seek(long, SeekMode) */
private long seekCount;
/** @see #seek(long, SeekMode) */
private SeekMode seekMode;
private ByteArray currentByteArray;
/** The current {@link ByteArray} to work on. */
private byte[] currentArray;
/** The start-index in {@link #currentArray}. */
private int currentArrayMin;
/** The {@link ByteArray#getCurrentIndex() index} in {@link #currentArray}. */
private int currentArrayIndex;
/**
* The {@link ByteArray#getMaximumIndex() maximum index} in {@link #currentArray}.
*/
private int currentArrayMax;
/**
* The {@link java.util.Queue} of available {@link ByteArray}s that have NOT yet been processed.
*/
private final LinkedList<ByteArray> arrayQueue;
private final ByteArray currentArrayView;
/** The {@link ByteArrayPool}. */
private ByteArrayPool byteArrayPool;
/**
* The constructor.
*
* @param processor is the {@link DetectorStreamProcessor} served by this buffer.
* @param successor is the successor in the chain or {@code null} if this is the last buffer/processor pair in the
* chain.
* @param byteArrayPool is the {@link ByteArrayPool} to use.
*/
public DetectorStreamBufferImpl(DetectorStreamProcessor processor, DetectorStreamBufferImpl successor,
ByteArrayPool byteArrayPool) {
super();
this.arrayQueue = new LinkedList<>();
this.chainSuccessor = successor;
this.byteArrayPool = byteArrayPool;
this.processor = processor;
this.currentArrayView = new CurrentByteArray();
}
@Override
public long skip(long byteCount) {
seek(byteCount, SeekMode.SKIP);
return byteCount;
}
@Override
public long skip() {
if (this.currentArray == null) {
return 0;
}
int bytesAvailable = this.currentArrayMax - this.currentArrayIndex + 1;
if (this.chainSuccessor != null) {
this.chainSuccessor
.append(this.currentByteArray.createSubArray(this.currentArrayIndex, this.currentArrayMax));
}
release(this.currentByteArray);
this.currentArray = null;
Iterator<ByteArray> arrayIterator = this.arrayQueue.iterator();
while (arrayIterator.hasNext()) {
ByteArray array = arrayIterator.next();
arrayIterator.remove();
bytesAvailable = bytesAvailable + array.getBytesAvailable();
if (this.chainSuccessor == null) {
release(array);
} else {
this.chainSuccessor.append(array);
}
}
return bytesAvailable;
}
@Override
public ByteArray getByteArray(int index) {
if (index == 0) {
return this.currentArrayView;
} else {
return this.arrayQueue.get(index - 1);
}
}
@Override
public int getByteArrayCount() {
int arrayCount = this.arrayQueue.size();
if (this.currentArray != null) {
arrayCount++;
}
return arrayCount;
}
@Override
public int getBytesAvailable() {
if (this.currentArray == null) {
return 0;
}
int bytesAvailable = this.currentArrayMax - this.currentArrayIndex + 1;
for (ByteArray array : this.arrayQueue) {
bytesAvailable = bytesAvailable + array.getBytesAvailable();
}
return bytesAvailable;
}
@Override
public boolean hasNext() {
if (this.currentArray == null) {
boolean okay = nextArray();
if (!okay) {
return false;
}
}
return true;
}
/**
* This method is called when a {@link ByteArray} is wiped out of the chain.
*
* @param byteArray is the array to release.
*/
protected void release(ByteArray byteArray) {
if (byteArray instanceof PooledByteArray) {
PooledByteArray pooledArray = (PooledByteArray) byteArray;
if (pooledArray.release()) {
this.byteArrayPool.release(byteArray.getBytes());
}
}
}
/**
* This method switches over to the next internal {@link #getByteArray(int) byte-array}.
*
* @return {@code true} if a new buffer is available, {@code false} if the buffer queue is empty.
*/
private boolean nextArray() {
if (this.currentArray != null) {
if ((this.currentArrayMin < this.currentArrayMax) && (this.chainSuccessor != null)
&& (this.seekMode != SeekMode.REMOVE)) {
ByteArray subArray = this.currentByteArray.createSubArray(this.currentArrayMin, this.currentArrayMax);
this.chainSuccessor.append(subArray);
}
release(this.currentByteArray);
}
if (this.arrayQueue.isEmpty()) {
this.currentArray = null;
return false;
} else {
ByteArray nextArray = this.arrayQueue.remove();
int offsetMin = 0;
int offsetIndex = 0;
while (this.seekCount > 0) {
long bytesAvailable = nextArray.getBytesAvailable();
if (this.seekCount >= bytesAvailable) {
this.seekCount = this.seekCount - bytesAvailable;
this.streamPosition = this.streamPosition + bytesAvailable;
if ((this.chainSuccessor != null) && (this.seekMode == SeekMode.SKIP)) {
this.chainSuccessor.append(nextArray);
} else {
release(this.currentByteArray);
}
if (this.arrayQueue.isEmpty()) {
this.currentArray = null;
return false;
}
nextArray = this.arrayQueue.remove();
} else {
offsetIndex = (int) this.seekCount;
if (this.seekMode == SeekMode.REMOVE) {
offsetMin = offsetIndex;
}
this.streamPosition = this.streamPosition + this.seekCount;
this.seekCount = 0;
this.seekMode = null;
// break;
}
}
this.currentByteArray = nextArray;
this.currentArray = nextArray.getBytes();
int currentIndex = nextArray.getCurrentIndex();
this.currentArrayMin = currentIndex + offsetMin;
this.currentArrayIndex = currentIndex + offsetIndex;
this.currentArrayMax = nextArray.getMaximumIndex();
if (this.currentArrayIndex > this.currentArrayMax) {
// array already empty...
return nextArray();
}
return true;
}
}
@Override
public byte next() throws NoSuchElementException {
if (this.currentArray == null) {
throw new NoSuchElementException();
}
byte result = this.currentArray[this.currentArrayIndex++];
this.streamPosition++;
if (this.currentArrayIndex > this.currentArrayMax) {
nextArray();
}
return result;
}
@Override
public byte peek() throws NoSuchElementException {
if (this.currentArray == null) {
throw new NoSuchElementException();
}
byte result = this.currentArray[this.currentArrayIndex];
return result;
}
@Override
public void insert(byte... data) {
insert(new ByteArrayImpl(data));
}
@Override
public void insert(ByteArray data) {
if (this.currentArray != null) {
int max = this.currentArrayIndex - 1;
if (this.currentArrayMin <= max) {
this.chainSuccessor.append(new ByteArrayImpl(this.currentArray, this.currentArrayMin, max));
}
this.currentArrayMin = this.currentArrayIndex;
}
this.chainSuccessor.append(data);
}
/**
* This method {@link #remove(long) removes} or {@link #skip(long) skips} the given number of bytes.
*
* @param byteCount is the number of bytes to seek.
* @param mode is the {@link SeekMode}.
*/
protected void seek(long byteCount, SeekMode mode) {
if (this.seekMode == null) {
this.seekMode = mode;
} else if (this.seekMode != mode) {
// TODO: add dedicated exception or constructor to
// IllegalStateException...
throw new NlsIllegalArgumentException("remove and skip can NOT be mixed!");
}
this.seekCount = this.seekCount + byteCount;
if (this.currentArray != null) {
// if removeCount was > 0 before, then currentArray had been null.
if (mode == SeekMode.REMOVE) {
if (this.currentArrayMin < this.currentArrayIndex) {
// there are bytes that have been consumed before remove was
// invoked...
ByteArray subArray = this.currentByteArray.createSubArray(this.currentArrayMin,
this.currentArrayIndex - 1);
this.chainSuccessor.append(subArray);
this.currentArrayMin = this.currentArrayIndex;
}
}
long currentBytesAvailable = this.currentArrayMax - this.currentArrayIndex + 1;
if (currentBytesAvailable > this.seekCount) {
this.currentArrayIndex = (int) (this.currentArrayIndex + this.seekCount);
this.seekCount = 0;
this.seekMode = null;
if (mode == SeekMode.REMOVE) {
this.currentArrayMin = this.currentArrayIndex;
}
} else {
this.seekCount = this.seekCount - currentBytesAvailable;
nextArray();
if (this.seekCount == 0) {
this.seekMode = null;
}
}
}
}
@Override
public void remove(long byteCount) {
seek(byteCount, SeekMode.REMOVE);
}
@Override
public long getStreamPosition() {
return this.streamPosition;
}
/**
* This method queues the given {@link ByteArray} at the end of this buffer.
*
* @param nextArray is the {@link ByteArray} to append.
*/
protected void append(ByteArray nextArray) {
this.arrayQueue.add(nextArray);
if (this.currentArray == null) {
nextArray();
}
}
/**
* @see DetectorStreamProcessor#process(DetectorStreamBuffer, Map, boolean)
*
* @param metadata is the {@link Map} with the meta-data.
* @param eos - {@code true} if the end of the stream has been reached and the given {@code buffer} has to be
* @throws IOException in case of an Input/Output error. Should only be used internally.
*/
public void process(Map<String, Object> metadata, boolean eos) throws IOException {
this.processor.process(this, metadata, eos);
if (this.chainSuccessor != null) {
this.chainSuccessor.process(metadata, eos);
}
}
@Override
public int fill(byte[] buffer, int offset, int length) {
if (offset >= buffer.length) {
throw new BufferExceedException(offset, buffer.length);
}
if ((length + offset) > buffer.length) {
throw new BufferExceedException(length, buffer.length - offset);
}
if (!hasNext()) {
// buffer is empty...
return -1;
}
int bufferIndex = offset;
int bytesLeft = length;
while (bytesLeft > 0) {
int count = this.currentArrayMax - this.currentArrayIndex + 1;
if (count > bytesLeft) {
count = bytesLeft;
bytesLeft = 0;
} else {
bytesLeft = bytesLeft - count;
}
System.arraycopy(this.currentArray, this.currentArrayIndex, buffer, bufferIndex, count);
bufferIndex = bufferIndex + count;
this.currentArrayIndex = this.currentArrayIndex + count;
if (this.currentArrayIndex > this.currentArrayMax) {
boolean bufferLeft = nextArray();
if (!bufferLeft) {
break;
}
}
}
return (length - bytesLeft);
}
/**
* This inner class is a view on the current {@link ByteArray}.
*
* @see DetectorStreamBufferImpl#getByteArray(int)
*/
protected class CurrentByteArray extends AbstractByteArray {
@Override
public byte[] getBytes() {
return DetectorStreamBufferImpl.this.currentArray;
}
@Override
public int getBytesAvailable() {
return DetectorStreamBufferImpl.this.currentArrayMax - DetectorStreamBufferImpl.this.currentArrayIndex + 1;
}
@Override
public int getMinimumIndex() {
return DetectorStreamBufferImpl.this.currentArrayIndex;
}
@Override
public int getCurrentIndex() {
return DetectorStreamBufferImpl.this.currentArrayIndex;
}
@Override
public int getMaximumIndex() {
return DetectorStreamBufferImpl.this.currentArrayMax;
}
}
/**
* Enum with available modes for a {@link DetectorStreamBufferImpl#seek(long, SeekMode) seek}.
*/
protected static enum SeekMode {
/** @see DetectorStreamBufferImpl#skip(long) */
SKIP,
/** @see DetectorStreamBufferImpl#remove(long) */
REMOVE
}
}
| |
import java.io.PrintStream;
import java.io.IOException;
import java.util.Random;
import java.util.List;
import java.util.Vector;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;
//import edu.cornell.cs.blog.JNIBlog;
import java.nio.ByteBuffer;
public class FileTester extends Configured implements Tool {
public static void main(String[] args){
int res;
try{
res = ToolRunner.run(new Configuration(), new FileTester(), args);
System.exit(res);
}catch(Exception e){
System.out.println(e);
e.printStackTrace();
}
}
void overwriteFile(FileSystem fs, String path, String... args)
throws Exception{
/* if(args.length != 2){
throw new IOException("Please specify the pos and data");
}
FSDataOutputStream fsos = fs.append(new Path(path));
fsos.seek(Long.parseLong(args[0]));
System.out.println("seek done");
PrintStream ps = new PrintStream(fsos);
ps.print(args[1]);
System.out.println("write done");
ps.close();
fsos.close();
System.out.println("close done");
*/ }
void appendFile(FileSystem fs, String path, String... args)
throws IOException{
if(args.length != 1){
throw new IOException("Please specify the data");
}
FSDataOutputStream fsos = fs.append(new Path(path));
PrintStream ps = new PrintStream(fsos);
ps.print(args[0]);
ps.close();
fsos.close();
}
void write1g(FileSystem fs, String path, int bfsz)
throws IOException{
FSDataOutputStream fsos = fs.create(new Path(path));
byte [] buf = new byte[bfsz];
int i;
for(i=0;i<bfsz;i++)buf[i]=(byte)'a';
for(i=0;i<((1<<30)/bfsz);i++)
fsos.write(buf,0,bfsz);
fsos.close();
}
// Note: please set the block size to 1MB
void randomWrite(FileSystem fs, String path)
throws IOException{
/* Path p = new Path(path);
byte [] buf = new byte[4096];
int i;
// 0) Initialize write 4KB for 1024 times.
for(i=0;i<4096;i++)buf[i]='I';
FSDataOutputStream fsos = fs.create(new Path(path));
for(i=0;i<1024;i++)fsos.write(buf,0,4096);
// fsos.close();
// 1) write 4K at 0; 4K at 1044480; 4K at 100000
for(i=0;i<4096;i++)buf[i]='1';
fsos.seek(0);fsos.write(buf,0,4096);
fsos.seek(1044480);fsos.write(buf,0,4096);
fsos.seek(100000);fsos.write(buf,0,4096);
// fsos.close();
// 2) write cross blocks
// from 1048000 -> 1049000
for(i=0;i<4096;i++)buf[i]='2';
fsos.seek(1048000);fsos.write(buf,0,1000);
// from 2097000 to 3146000
fsos.seek(2097000);
for(int j=0;j<1049;j++)fsos.write(buf,0,1000);
fsos.close();
*/ }
void multitest(FileSystem fs, String path, int num)
throws IOException{
/*
Path p = new Path(path);
int flen = (int)fs.getFileStatus(new Path(path)).getLen();
Random rand = new Random(System.currentTimeMillis());
String [] buf = new String[num];
int [] pos = new int[num];
System.out.print("Preparing randome write info:"
+ "\nfile = " + path
+ "\nwriteSize = 256"
+ "\ncount = " + num
+ "\n...");
for(int i=0;i<num;i++){
byte tbuf[] = new byte[256];
pos[i] = rand.nextInt(flen-256);
rand.nextBytes(tbuf);
for(int j=0;j<256;j++)
tbuf[j] = (byte)(tbuf[j]%5+'5');
buf[i] = new String(tbuf);
}
System.out.print("...done"
+ "\nPerforming test...");
long tsStart = System.nanoTime();
FSDataOutputStream fsos = fs.append(new Path(path));
for(int i=0;i<num;i++){
fsos.seek(pos[i]);
if(buf[i].length()!=256)
System.out.println(buf[i].length());
fsos.writeBytes(buf[i]);
}
fsos.close();
long tsEnd = System.nanoTime();
System.out.println("...done");
System.out.println("Total Time = "
+ (tsEnd-tsStart)/1000 + "." + (tsEnd-tsStart)%1000
+ " microseconds");
*/ }
void snapshot(FileSystem fs,String path, long msInt, int count)
throws IOException{
long st = System.currentTimeMillis();//JNIBlog.readLocalRTC();
int curc = 0;
while(curc < count){
long sts = System.nanoTime();
fs.createSnapshot(new Path(path),""+curc);
long ets = System.nanoTime();
System.out.println((double)(ets-sts)/1000000);
curc++;
while(/*JNIBlog.readLocalRTC()*/System.currentTimeMillis()<(st+curc*msInt)){
try{Thread.sleep(1);}catch(InterruptedException ie){}
}
}
}
/*
void read(FileSystem fs,String path)
throws IOException{
//read one
read("R1",fs,path);
//read two
read("R2",fs,path);
}*/
void read(FileSystem fs,String path)
throws IOException{
long start_ts,end_ts,len=0;
int nRead;
ByteBuffer bb = ByteBuffer.allocate(65536);
FSDataInputStream fsis = fs.open(new Path(path));
for(int i=0;i<1;i++){
fsis.seek(0);
len=0;
start_ts = System.nanoTime();
while((nRead=fsis.read(bb))>0){
len+=nRead;
bb.clear();
};
end_ts = System.nanoTime();
System.out.println(((double)len/(end_ts - start_ts)));
}
fsis.close();
}
void addTimestamp(long ts,byte[] buf){
StringBuffer sb = new StringBuffer();
sb.append(ts);
sb.append(" ");
byte bs[] = sb.toString().getBytes();
System.arraycopy(bs,0,buf,0,bs.length);
}
void timewrite(FileSystem fs,String path,int bufsz, int dur)
throws IOException{
byte buf[] = new byte[bufsz];
for(int i=0;i<bufsz;i++)buf[i]='P';
buf[bufsz-1]='\n';
FSDataOutputStream fsos = fs.create(new Path(path));
long end = System.currentTimeMillis()+dur*1000;
long cur = System.currentTimeMillis()+33;//JNIBlog.readLocalRTC()+33;
while(System.currentTimeMillis() < end){
//write a packet
if(/*JNIBlog.readLocalRTC()*/ System.currentTimeMillis() >= cur){
addTimestamp(cur,buf);
fsos.write(buf,0,bufsz);
fsos.hflush();
cur += 33;
}
}
fsos.close();
}
void pmuwrite(FileSystem fs,String path,
int pmuid, int recordsize, int dur)
throws IOException{
byte buf[] = new byte[recordsize];
for(int i=0;i<recordsize;i++)buf[i]='P';
buf[recordsize-1]='\n';
FSDataOutputStream fsos = fs.create(new Path(path+"/pmu"+pmuid));
long end = System.currentTimeMillis()+dur*1000;
long cur = System.currentTimeMillis()+33;//JNIBlog.readLocalRTC()+33;
while(System.currentTimeMillis() < end){
//write a packet
if(/*JNIBlog.readLocalRTC()*/System.currentTimeMillis() >= cur){
addTimestamp(cur,buf);
fsos.write(buf,0,recordsize);
fsos.hflush();
cur += 33;
}
}
fsos.close();
}
private List<Long> getSnapshots(FileSystem fs)
throws IOException{
List<Long> lRet = new Vector<Long>(256);
for(FileStatus stat: fs.listStatus(new Path("/.snapshot"))){
Long snapts = Long.parseLong(stat.getPath().getName());
lRet.add(snapts);
}
return lRet;
}
class FileTuple{
String name; // file name
long sts; // start time stamp
long ets; // end time stamp
}
private long readFirstTs(FileSystem fs,Path path)
throws IOException{
FSDataInputStream fsis = fs.open(path);
long lRet = -1;
if(fsis.available()>13){
byte buf[] = new byte[13];
fsis.readFully(0,buf);
lRet = Long.parseLong(new String(buf));
}
fsis.close();
return lRet;
}
private long readLastTs(FileSystem fs,Path path)
throws IOException{
long lRet = -1;
if(!fs.exists(path)) return -1;
FSDataInputStream fsis = null;
try{
fsis = fs.open(path);
int back = 64;
int flen = fsis.available();
if(flen > 0)
while(true){
int try_pos = Math.max(flen - back, 0);
byte buf[] = new byte[flen - try_pos];
fsis.readFully(try_pos,buf);
if(buf[buf.length-1]=='\n')
buf[buf.length-1]='P';
String line = new String(buf);
int pts = line.lastIndexOf('\n');
if(pts == -1 && try_pos != 0)continue;
if(pts != -1 && pts+14 <= line.length())
lRet = Long.parseLong(line.substring(pts+1,pts+14));
break;
}
}catch(IOException ioe){
return -1;
}finally{
if(fsis!=null)fsis.close();
}
return lRet;
}
private List<FileTuple> getFiles(FileSystem fs, String path)
throws IOException{
List<FileTuple> lRet = new Vector<FileTuple>(32);
for(FileStatus stat: fs.listStatus(new Path(path))){
FileTuple ft = new FileTuple();
ft.name = stat.getPath().getName();
ft.sts = readFirstTs(fs,stat.getPath());
ft.ets = readLastTs(fs,stat.getPath());
lRet.add(ft);
}
return lRet;
}
void analyzesnap(FileSystem fs, String path)
throws IOException{
// STEP 1: get snapshot/timestamp list
List<Long> lSnap = getSnapshots(fs);
// STEP 2: get the real start/end timestamp for each file.
List<FileTuple> lFile = getFiles(fs,path);
// STEP 3: spit data
for(long snap: lSnap){
for(FileTuple ft: lFile){
if(ft.sts > snap)continue;
Path p = new Path("/.snapshot/"+snap+path+"/"+ft.name);
long delta = snap - ft.sts;
long ets = readLastTs(fs,p);
if(ets!=-1){
delta = snap - ets;
if(snap > ft.ets){
delta = ft.ets - ets;
if(delta <= 0) continue;
}
}
System.out.println(ft.name+" "+snap+" "+delta);
}
}
}
@Override
public int run(String[] args) throws Exception{
Configuration conf = this.getConf();
FileSystem fs = FileSystem.get(conf);
if(args.length < 1){
System.out.println("args: <cmd:=append|overwrite>\n"+
"\tappend <file> <data>\n"+
"\toverwrite <file> <pos> <data>\n"+
"\tmultitest <file> <num>\n"+
"\twrite1g <file> <bfsz>\n"+ //set buffersize
"\trandomwrite <file>\n"+
"\tsnapshot <path> <interval_ms> <number>\n"+
"\ttimewrite <path> <bfsz> <duration>\n"+
"\tpmuwrite <path> <pmuid> <recordsize> <duration>\n"+
"\tread <path>\n"+
"\tanalyzesnap <path>\n"+
"\tr108");
return -1;
}
if("append".equals(args[0]))
this.appendFile(fs,args[1],args[2]);
else if("overwrite".equals(args[0]))
this.overwriteFile(fs,args[1],args[2],args[3]);
else if("multitest".equals(args[0]))
this.multitest(fs,args[1],Integer.parseInt(args[2]));
else if("write1g".equals(args[0]))
this.write1g(fs,args[1],Integer.parseInt(args[2]));
else if("randomwrite".equals(args[0]))
this.randomWrite(fs,args[1]);
else if("snapshot".equals(args[0]))
this.snapshot(fs,args[1],Long.parseLong(args[2]),Integer.parseInt(args[3]));
else if("read".equals(args[0]))
this.read(fs,args[1]);
else if("timewrite".equals(args[0]))
this.timewrite(fs,args[1],Integer.parseInt(args[2]),Integer.parseInt(args[3]));
else if("pmuwrite".equals(args[0]))
this.pmuwrite(fs,args[1],Integer.parseInt(args[2]),Integer.parseInt(args[3]),Integer.parseInt(args[4]));
else if("analyzesnap".equals(args[0]))
this.analyzesnap(fs,args[1]);
else
throw new Exception("invalid command:"+args[0]);
return 0;
}
}
| |
/*
* Copyright 2020-present Open Networking Foundation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.onosproject.kubevirtnode.api;
import com.google.common.base.MoreObjects;
import org.apache.commons.lang.StringUtils;
import org.onlab.osgi.DefaultServiceDirectory;
import org.onlab.packet.IpAddress;
import org.onosproject.net.DeviceId;
import org.onosproject.net.Port;
import org.onosproject.net.PortNumber;
import org.onosproject.net.device.DeviceService;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashSet;
import java.util.Objects;
import java.util.Set;
import static com.google.common.base.Preconditions.checkArgument;
import static org.onosproject.kubevirtnode.api.Constants.DEFAULT_CLUSTER_NAME;
import static org.onosproject.kubevirtnode.api.Constants.GENEVE;
import static org.onosproject.kubevirtnode.api.Constants.GRE;
import static org.onosproject.kubevirtnode.api.Constants.INTEGRATION_TO_PHYSICAL_PREFIX;
import static org.onosproject.kubevirtnode.api.Constants.VXLAN;
import static org.onosproject.net.AnnotationKeys.PORT_NAME;
/**
* Representation of a KubeVirt node.
*/
public class DefaultKubevirtNode implements KubevirtNode {
private static final String NOT_NULL_MSG = "Node % cannot be null";
private static final String OVSDB = "ovsdb:";
private static final int PORT_NAME_MAX_LENGTH = 15;
private final String clusterName;
private final String hostname;
private final Type type;
private final DeviceId intgBridge;
private final DeviceId tunBridge;
private final IpAddress managementIp;
private final IpAddress dataIp;
private final KubevirtNodeState state;
private final Collection<KubevirtPhyInterface> phyIntfs;
private final String gatewayBridgeName;
/**
* A default constructor of kubevirt node.
*
* @param clusterName clusterName
* @param hostname hostname
* @param type node type
* @param intgBridge integration bridge
* @param tunBridge tunnel bridge
* @param managementIp management IP address
* @param dataIp data IP address
* @param state node state
* @param phyIntfs physical interfaces
* @param gatewayBridgeName gateway bridge name
*/
protected DefaultKubevirtNode(String clusterName, String hostname, Type type,
DeviceId intgBridge, DeviceId tunBridge,
IpAddress managementIp, IpAddress dataIp,
KubevirtNodeState state,
Collection<KubevirtPhyInterface> phyIntfs,
String gatewayBridgeName) {
this.clusterName = clusterName;
this.hostname = hostname;
this.type = type;
this.intgBridge = intgBridge;
this.tunBridge = tunBridge;
this.managementIp = managementIp;
this.dataIp = dataIp;
this.state = state;
this.phyIntfs = phyIntfs;
this.gatewayBridgeName = gatewayBridgeName;
}
@Override
public String clusterName() {
return clusterName;
}
@Override
public String hostname() {
return hostname;
}
@Override
public Type type() {
return type;
}
@Override
public DeviceId ovsdb() {
return DeviceId.deviceId(OVSDB + managementIp().toString());
}
@Override
public DeviceId intgBridge() {
return intgBridge;
}
@Override
public DeviceId tunBridge() {
return tunBridge;
}
@Override
public IpAddress managementIp() {
return managementIp;
}
@Override
public IpAddress dataIp() {
return dataIp;
}
@Override
public KubevirtNodeState state() {
return state;
}
@Override
public KubevirtNode updateState(KubevirtNodeState newState) {
return new Builder()
.hostname(hostname)
.clusterName(clusterName)
.type(type)
.intgBridge(intgBridge)
.tunBridge(tunBridge)
.managementIp(managementIp)
.dataIp(dataIp)
.state(newState)
.phyIntfs(phyIntfs)
.gatewayBridgeName(gatewayBridgeName)
.build();
}
@Override
public KubevirtNode updateIntgBridge(DeviceId deviceId) {
return new Builder()
.hostname(hostname)
.clusterName(clusterName)
.type(type)
.intgBridge(deviceId)
.tunBridge(tunBridge)
.managementIp(managementIp)
.dataIp(dataIp)
.state(state)
.phyIntfs(phyIntfs)
.gatewayBridgeName(gatewayBridgeName)
.build();
}
@Override
public KubevirtNode updateTunBridge(DeviceId deviceId) {
return new Builder()
.hostname(hostname)
.clusterName(clusterName)
.type(type)
.intgBridge(intgBridge)
.tunBridge(deviceId)
.managementIp(managementIp)
.dataIp(dataIp)
.state(state)
.phyIntfs(phyIntfs)
.gatewayBridgeName(gatewayBridgeName)
.build();
}
@Override
public Collection<KubevirtPhyInterface> phyIntfs() {
if (phyIntfs == null) {
return new ArrayList<>();
}
return phyIntfs;
}
@Override
public Set<PortNumber> physPatchPorts() {
Set<PortNumber> portNumbers = new HashSet<>();
for (KubevirtPhyInterface phyIntf : this.phyIntfs()) {
String portName = structurePortName(
INTEGRATION_TO_PHYSICAL_PREFIX + phyIntf.network());
PortNumber portNumber = patchPort(portName);
if (portNumber != null) {
portNumbers.add(portNumber);
}
}
return portNumbers;
}
@Override
public PortNumber vxlanPort() {
return tunnelPort(VXLAN);
}
@Override
public PortNumber grePort() {
return tunnelPort(GRE);
}
@Override
public PortNumber genevePort() {
return tunnelPort(GENEVE);
}
@Override
public String gatewayBridgeName() {
return gatewayBridgeName;
}
private PortNumber tunnelPort(String tunnelType) {
if (dataIp == null) {
return null;
}
DeviceService deviceService = DefaultServiceDirectory.getService(DeviceService.class);
Port port = deviceService.getPorts(tunBridge).stream()
.filter(p -> p.isEnabled() &&
Objects.equals(p.annotations().value(PORT_NAME), tunnelType))
.findAny().orElse(null);
return port != null ? port.number() : null;
}
private PortNumber patchPort(String portName) {
DeviceService deviceService = DefaultServiceDirectory.getService(DeviceService.class);
Port port = deviceService.getPorts(intgBridge).stream()
.filter(p -> p.isEnabled() &&
Objects.equals(p.annotations().value(PORT_NAME), portName))
.findAny().orElse(null);
return port != null ? port.number() : null;
}
/**
* Re-structures the OVS port name.
* The length of OVS port name should be not large than 15.
*
* @param portName original port name
* @return re-structured OVS port name
*/
private String structurePortName(String portName) {
// The size of OVS port name should not be larger than 15
if (portName.length() > PORT_NAME_MAX_LENGTH) {
return StringUtils.substring(portName, 0, PORT_NAME_MAX_LENGTH);
}
return portName;
}
/**
* Returns new builder instance.
*
* @return kubevirt node builder
*/
public static Builder builder() {
return new Builder();
}
/**
* Returns new builder instance with the given node as a default value.
*
* @param node kubevirt node
* @return kubevirt node builder
*/
public static Builder from(KubevirtNode node) {
return new Builder()
.hostname(node.hostname())
.clusterName(node.clusterName())
.type(node.type())
.intgBridge(node.intgBridge())
.tunBridge(node.tunBridge())
.managementIp(node.managementIp())
.dataIp(node.dataIp())
.state(node.state())
.phyIntfs(node.phyIntfs())
.gatewayBridgeName(node.gatewayBridgeName());
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
DefaultKubevirtNode that = (DefaultKubevirtNode) o;
return clusterName.equals(that.clusterName) &&
hostname.equals(that.hostname) &&
type == that.type &&
intgBridge.equals(that.intgBridge) &&
tunBridge.equals(that.tunBridge) &&
managementIp.equals(that.managementIp) &&
dataIp.equals(that.dataIp);
}
@Override
public int hashCode() {
return Objects.hash(clusterName, hostname, type, intgBridge, tunBridge,
managementIp, dataIp);
}
@Override
public String toString() {
return MoreObjects.toStringHelper(this)
.add("clusterName", clusterName)
.add("hostname", hostname)
.add("type", type)
.add("intgBridge", intgBridge)
.add("tunBridge", tunBridge)
.add("managementIp", managementIp)
.add("dataIp", dataIp)
.add("state", state)
.add("phyIntfs", phyIntfs)
.add("gatewayBridgeName", gatewayBridgeName)
.toString();
}
public static final class Builder implements KubevirtNode.Builder {
private String clusterName;
private String hostname;
private Type type;
private DeviceId intgBridge;
private DeviceId tunBridge;
private IpAddress managementIp;
private IpAddress dataIp;
private KubevirtNodeState state;
private Collection<KubevirtPhyInterface> phyIntfs;
private String gatewayBridgeName;
// private constructor not intended to use from external
private Builder() {
}
@Override
public KubevirtNode build() {
checkArgument(hostname != null, NOT_NULL_MSG, "hostname");
checkArgument(type != null, NOT_NULL_MSG, "type");
checkArgument(state != null, NOT_NULL_MSG, "state");
checkArgument(managementIp != null, NOT_NULL_MSG, "management IP");
if (StringUtils.isEmpty(clusterName)) {
clusterName = DEFAULT_CLUSTER_NAME;
}
return new DefaultKubevirtNode(
clusterName,
hostname,
type,
intgBridge,
tunBridge,
managementIp,
dataIp,
state,
phyIntfs,
gatewayBridgeName
);
}
@Override
public Builder clusterName(String clusterName) {
this.clusterName = clusterName;
return this;
}
@Override
public Builder hostname(String hostname) {
this.hostname = hostname;
return this;
}
@Override
public Builder type(Type type) {
this.type = type;
return this;
}
@Override
public Builder intgBridge(DeviceId deviceId) {
this.intgBridge = deviceId;
return this;
}
@Override
public Builder tunBridge(DeviceId deviceId) {
this.tunBridge = deviceId;
return this;
}
@Override
public Builder managementIp(IpAddress managementIp) {
this.managementIp = managementIp;
return this;
}
@Override
public Builder dataIp(IpAddress dataIp) {
this.dataIp = dataIp;
return this;
}
@Override
public Builder phyIntfs(Collection<KubevirtPhyInterface> phyIntfs) {
this.phyIntfs = phyIntfs;
return this;
}
@Override
public Builder state(KubevirtNodeState state) {
this.state = state;
return this;
}
@Override
public Builder gatewayBridgeName(String gatewayBridgeName) {
this.gatewayBridgeName = gatewayBridgeName;
return this;
}
}
}
| |
/*
* (c) Copyright 2003, 2004, 2005 2006, 2007, 2008, 2009 Hewlett-Packard Development Company, LP
* All rights reserved.
*
*
*/
//=======================================================================
// Package
package com.hp.hpl.jena.db.impl;
//=======================================================================
// Imports
import java.sql.*;
import java.util.*;
import com.hp.hpl.jena.util.iterator.*;
import com.hp.hpl.jena.shared.*;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
//=======================================================================
/**
* Iterates over an SQL result set returning each row as an ArrayList of
* objects. The returned array is shared at each iteration so calling next() or even hasNext()
* changes the array contents. When the iterator terminates the resources
* are cleaned up and the underlying SQL PreparedStatement is returned to
* the SQLCache pool from whence it came.
*
* <p>Override the extractRow, getRow, and remove methods in subclasses
* to return an object collection derived from the row contents instead
* of the raw row contents.
*
* @author <a href="mailto:der@hplb.hpl.hp.com">Dave Reynolds</a>
* @version $Revision: 1.1 $ on $Date: 2009/06/29 08:55:37 $
*/
public abstract class ResultSetIterator<T> implements ExtendedIterator<T>{
// T is the type of the
/** The ResultSet being iterated over */
protected ResultSet m_resultSet;
/** The originating SQLcache to return the statement to, can be null */
protected SQLCache m_sqlCache;
/** The source Statement to be cleaned up when the iterator finishes - return it to cache or close it if no cache */
protected PreparedStatement m_statement;
/** If true, clean/close the prepared statement when iterator is closed */
protected boolean m_statementClean = true;
/** The name of the original operation that lead to this statement, can be null if SQLCache is null */
protected String m_opname;
/** The contents of the current row */
protected T m_row;
/** The number of columns in this result set */
protected int m_nCols;
/** Flag that the iteration has finished */
protected boolean m_finished = false;
/** Flag if we have prefeteched the next row but not yet returned it */
protected boolean m_prefetched = false;
private static Logger logger = LoggerFactory.getLogger( ResultSetIterator.class );
/**
* Create an empty iterator.
* Needs to be initialized by reset
* before it can be accessed. Useful to allow generic functions like
* {@link SQLCache#runSQLQuery runSQLQuery}
* to return different iterator types to the client.
*/
public ResultSetIterator() {
m_finished = true; // Prevent reading until reset
}
/**
* Iterate over the results of a PreparedStatement generated by an SQLCache
* @param resultSet the result set being iterated over
* @param sourceStatement The source Statement to be cleaned up when the iterator finishes - return it to cache or close it if no cache
* @param cache The originating SQLcache to return the statement to, can be null
* @param opname The name of the original operation that lead to this statement, can be null if SQLCache is null
*/
public ResultSetIterator(ResultSet resultSet, PreparedStatement sourceStatement, SQLCache cache, String opname) {
m_resultSet = resultSet;
m_sqlCache = cache;
m_statement = sourceStatement;
m_opname = opname;
}
/**
* Iterate over the results of a PreparedStatement, close the statement when finished.
* @param resultSet the result set being iterated over
* @param sourceStatement The source Statement to be closed when the iterator finishes
*/
public ResultSetIterator(ResultSet resultSet, PreparedStatement sourceStatement) {
m_resultSet = resultSet;
m_statement = sourceStatement;
}
/**
* Reset an existing iterator to scan a new result set.
* @param resultSet the result set being iterated over
* @param sourceStatement The source Statement to be cleaned up when the iterator finishes - return it to cache or close it if no cache
* @param cache The originating SQLcache to return the statement to, can be null
* @param opname The name of the original operation that lead to this statement, can be null if SQLCache is null
*/
public void reset(ResultSet resultSet, PreparedStatement sourceStatement, SQLCache cache, String opname) {
m_resultSet = resultSet;
m_sqlCache = cache;
m_statement = sourceStatement;
m_opname = opname;
m_finished = false;
m_prefetched = false;
m_row = null;
m_statementClean = true;
}
/**
* Reset an existing iterator to scan a new result set.
* @param resultSet the result set being iterated over
* @param sourceStatement The source Statement to be cleaned up when the iterator finishes - return it to cache or close it if no cache
* note: the sourceStatement is not closed or returned when the iterator is closed.
*/
public void reset(ResultSet resultSet, PreparedStatement sourceStatement) {
m_resultSet = resultSet;
m_sqlCache = null;
m_statement = sourceStatement;
m_opname = null;
m_finished = false;
m_prefetched = false;
m_row = null;
m_statementClean = false;
}
/**
* Test if there is a next result to return
*/
public boolean hasNext() {
if (!m_finished && !m_prefetched) moveForward();
return !m_finished;
}
public T removeNext()
{ cantRemove(); return null; }
/**
* Return the current row
*/
public T next() {
if (!m_finished && !m_prefetched) moveForward();
m_prefetched = false;
if (m_finished) {
throw new NoSuchElementException();
}
return getRow() ;
}
/**
* Delete the current row entry
*/
public void remove() {
cantRemove();
}
protected void cantRemove() {
throw new UnsupportedOperationException("ResultSetIterator can't remove database rows");
}
/**
* More forward one row. Sets the m_finished flag if there is no more to fetch
*/
protected void moveForward() {
try {
if (!m_finished && m_resultSet.next()) {
extractRow();
m_prefetched = true;
} else {
close();
}
} catch (Exception e) {
// TODO do we need this catch at all?
logger.warn("Problem in iterator over db result set, op = " + m_opname, e);
// Added by kers for debugging
throw new JenaException( e );
}
}
/**
* Extract the current row
* Override in subclasses.
*/
protected abstract void extractRow() throws Exception ;
// {
// if (m_row == null) {
// m_nCols = m_resultSet.getMetaData().getColumnCount();
// m_row = new ArrayList<Object>(m_nCols);
// for (int i = 0; i < m_nCols; i++) m_row.add(null);
// }
// for (int i = 0; i < m_nCols; i++) {
// m_row.set(i, m_resultSet.getObject(i+1));
// }
// }
/**
* Return the current row,should have already been extracted.
* Override in subclasses.
*/
protected T getRow() {
return m_row;
}
/**
* Clean up the allocated resources - result set and statement.
* If we know of an SQLCache return the statement there, otherwise close it.
*/
public void close() {
if (!m_finished) {
if (m_resultSet != null) {
try {
m_resultSet.close();
m_resultSet = null;
} catch (SQLException e) {
logger.warn("Error while finalizing result set iterator", e);
}
}
if (m_statementClean) {
if (m_sqlCache != null && m_opname != null) {
m_sqlCache.returnPreparedSQLStatement(m_statement);
} else {
try {
m_statement.close();
} catch (SQLException e) {
logger.warn("Error while finalizing result set iterator", e);
}
}
}
}
m_finished = true;
}
/**
* Get a singleton result (single column from single row) and close the iterator.
* This may be too specialized but seems to come up a lot - rethink.
*/
public Object getSingleton() throws SQLException {
List<?> row = (List<?>)next();
close();
return row.get(0);
}
/**
* Clean up the database cursor. Noramlly the client should read to the end
* or explicity close but....
*/
@Override
protected void finalize() throws SQLException {
if (!m_finished && m_resultSet != null) close();
}
/**
return a new iterator which delivers all the elements of this iterator and
then all the elements of the other iterator. Does not copy either iterator;
they are consumed as the result iterator is consumed.
*/
public <X extends T> ExtendedIterator<T> andThen( Iterator<X> other ) {
return NiceIterator.andThen(this, other);
}
public Set<T> toSet() {
return NiceIterator.asSet( this );
}
public List<T> toList() {
return NiceIterator.asList( this );
}
/* (non-Javadoc)
* @see com.hp.hpl.jena.util.iterator.ExtendedIterator#filterKeep(com.hp.hpl.jena.util.iterator.Filter)
*/
public ExtendedIterator<T> filterKeep(Filter<T> f) {
return new FilterIterator<T>( f, this );
}
/* (non-Javadoc)
* @see com.hp.hpl.jena.util.iterator.ExtendedIterator#filterDrop(com.hp.hpl.jena.util.iterator.Filter)
*/
public ExtendedIterator<T> filterDrop(final Filter<T> f)
{
Filter<T> notF = new Filter<T>() {
@Override
public boolean accept(T x)
{
return !f.accept(x) ;
}
} ;
return new FilterIterator<T>(notF, this) ;
}
/* (non-Javadoc)
* @see com.hp.hpl.jena.util.iterator.ExtendedIterator#mapWith(com.hp.hpl.jena.util.iterator.Map1)
*/
public <X> ExtendedIterator<X> mapWith(Map1<T,X> map1) {
return new Map1Iterator<T,X>( map1, this );
}
} // End class
/*
* (c) Copyright 2003, 2004, 2005, 2006, 2007, 2008, 2009 Hewlett-Packard Development Company, LP
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
* 3. The name of the author may not be used to endorse or promote products
* derived from this software without specific prior written permission.
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
* IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
* OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
* IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
* INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
* NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
* DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
* THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
* THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
| |
/*
* Titan Robotics Framework Library
* Copyright (c) 2015 Titan Robotics Club (http://www.titanrobotics.net)
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
package trclib;
/**
* This class implements a platform independent accelerometer. Typically, this
* class is extended by a platform dependent accelerometer class. The platform
* dependent accelerometer class must implement the abstract methods required
* by this class. The abstract methods allow this class to get raw data for each
* accelerometer axis. Depending on the options specified in the constructor,
* this class may create an integrator. The platform dependent accelerometer
* can specify how many axes it supports by setting the HAS_AXIS options. If it
* does not provide velocity or distance data, it can set the INTEGRATE and
* DOUBLE_INTEGRATE options and let the built-in integrator handle it.
*/
public abstract class TrcAccelerometer extends TrcSensor implements TrcSensorDataSource
{
//
// Accelerometer data types.
//
public enum DataType
{
ACCELERATION,
VELOCITY,
DISTANCE
} //enum DataType
/**
* This abstract method returns the raw data of the specified type for the x-axis.
*
* @param dataType specifies the data type.
* @return raw data of the specified type for the x-axis.
*/
public abstract SensorData getRawXData(DataType dataType);
/**
* This abstract method returns the raw data of the specified type for the y-axis.
*
* @param dataType specifies the data type.
* @return raw data of the specified type for the y-axis.
*/
public abstract SensorData getRawYData(DataType dataType);
/**
* This abstract method returns the raw data of the specified type for the z-axis.
*
* @param dataType specifies the data type.
* @return raw data of the specified type for the z-axis.
*/
public abstract SensorData getRawZData(DataType dataType);
//
// Accelerometer options.
//
public static final int ACCEL_HAS_X_AXIS = (1 << 0);
public static final int ACCEL_HAS_Y_AXIS = (1 << 1);
public static final int ACCEL_HAS_Z_AXIS = (1 << 2);
public static final int ACCEL_INTEGRATE = (1 << 3);
public static final int ACCEL_DOUBLE_INTEGRATE = (1 << 4);
private static final String moduleName = "TrcAccelerometer";
private static final boolean debugEnabled = false;
private TrcDbgTrace dbgTrace = null;
private final String instanceName;
private TrcDataIntegrator dataIntegrator = null;
private int xIndex = -1;
private int yIndex = -1;
private int zIndex = -1;
/**
* Constructor: Creates an instance of the object.
*
* @param instanceName specifies the instance name.
* @param numAxes specifies the number of axes of the gyro.
* @param options specifies the accelerometer options. Multiple options can be OR'd together.
* ACCEL_HAS_X_AXIS - supports x-axis.
* ACCEL_HAS_Y_AXIS - supports y-axis.
* ACCEL_HAS_Z_AXIS - supports z-axis.
* ACCEL_INTEGRATE - do integration on all axes to get velocities.
* ACCEL_DOUBLE_INTEGRATE - do double integration on all axes to get distances.
* @param filters specifies an array of filter objects one for each supported axis.
* It is assumed that the order of the filters in the array is x, y
* and then z. If an axis is specified in the options but no filter
* will be used on that axis, the corresponding element in the array
* should be set to null. If no filter is used at all, filters can
* be set to null.
*/
public TrcAccelerometer(final String instanceName, int numAxes, int options, TrcFilter[] filters)
{
super(instanceName, numAxes, filters);
if (debugEnabled)
{
dbgTrace = new TrcDbgTrace(moduleName + "." + instanceName,
false,
TrcDbgTrace.TraceLevel.API,
TrcDbgTrace.MsgLevel.INFO);
}
//
// Count the number of axes and set up the indices for each axis.
//
int axisCount = 0;
if ((options & ACCEL_HAS_X_AXIS) != 0)
{
xIndex = axisCount;
axisCount++;
}
if ((options & ACCEL_HAS_Y_AXIS) != 0)
{
yIndex = axisCount;
axisCount++;
}
if ((options & ACCEL_HAS_Z_AXIS) != 0)
{
zIndex = axisCount;
axisCount++;
}
if (axisCount != numAxes)
{
throw new IllegalArgumentException(
"numAxes doesn't match the number of axes in options");
}
this.instanceName = instanceName;
//
// Create the data integrator.
//
if ((options & (ACCEL_INTEGRATE | ACCEL_DOUBLE_INTEGRATE)) != 0)
{
dataIntegrator = new TrcDataIntegrator(
instanceName, this, DataType.ACCELERATION,
(options & ACCEL_DOUBLE_INTEGRATE) != 0);
if ((options & ACCEL_DOUBLE_INTEGRATE) != 0)
{
dataIntegrator.setUnwindIntegratedData(true);
}
}
} //TrcAccelerometer
/**
* Constructor: Creates an instance of the object.
*
* @param instanceName specifies the instance name.
* @param numAxes specifies the number of axes of the gyro.
* @param options specifies the accelerometer options. Multiple options can be OR'd together.
* ACCEL_HAS_X_AXIS - supports x-axis.
* ACCEL_HAS_Y_AXIS - supports y-axis.
* ACCEL_HAS_Z_AXIS - supports z-axis.
* ACCEL_INTEGRATE - do integration on all axes to get velocities.
* ACCEL_DOUBLE_INTEGRATE - do double integration on all axes to get distances.
*/
public TrcAccelerometer(final String instanceName, int numAxes, int options)
{
this(instanceName, numAxes, options, null);
} //TrcAccelerometer
/**
* This method returns the instance name.
*
* @return instance name.
*/
public String toString()
{
return instanceName;
} //toString
/**
* This method enables/disables the processing of accelerometer data. It is not
* automatically enabled when the TrcAccelerometer object is created. You need
* to explicitly enable the it before data processing will start. As part of
* enabling the accelerometer, calibrate() is also called. calibrate() may be
* overridden by the platform dependent accelerometer if it is capable of doing
* its own. Otherwise, calibrate will call the built-in calibrator to do the
* calibration.
* Enabling/disabling data processing for the gyro involves enabling/disabling
* the integrator if it exist.
*
* @param enabled specifies true if enabling, false otherwise.
*/
public void setEnabled(boolean enabled)
{
final String funcName = "setEnabled";
if (debugEnabled)
{
dbgTrace.traceEnter(funcName, TrcDbgTrace.TraceLevel.API,
"enabled=%s", Boolean.toString(enabled));
dbgTrace.traceExit(funcName, TrcDbgTrace.TraceLevel.API);
}
//
// Enable/disable integrator.
//
if (dataIntegrator != null)
{
dataIntegrator.setEnabled(enabled);
}
} //setEnabled
/**
* This method inverts the x-axis. This is useful if the orientation of
* the accelerometer x-axis is such that the data goes the wrong direction.
*
* @param inverted specifies true to invert x-axis, false otherwise.
*/
public void setXInverted(boolean inverted)
{
final String funcName = "setXInverted";
if (debugEnabled)
{
dbgTrace.traceEnter(funcName, TrcDbgTrace.TraceLevel.API,
"inverted=%s", Boolean.toString(inverted));
dbgTrace.traceExit(funcName, TrcDbgTrace.TraceLevel.API);
}
setInverted(xIndex, inverted);
} //setXInverted
/**
* This method inverts the y-axis. This is useful if the orientation of
* the accelerometer y-axis is such that the data goes the wrong direction.
*
* @param inverted specifies true to invert y-axis, false otherwise.
*/
public void setYInverted(boolean inverted)
{
final String funcName = "setYInverted";
if (debugEnabled)
{
dbgTrace.traceEnter(funcName, TrcDbgTrace.TraceLevel.API,
"inverted=%s", Boolean.toString(inverted));
dbgTrace.traceExit(funcName, TrcDbgTrace.TraceLevel.API);
}
setInverted(yIndex, inverted);
} //setYInverted
/**
* This method inverts the z-axis. This is useful if the orientation of
* the accelerometer z-axis is such that the data goes the wrong direction.
*
* @param inverted specifies true to invert z-axis, false otherwise.
*/
public void setZInverted(boolean inverted)
{
final String funcName = "setZInverted";
if (debugEnabled)
{
dbgTrace.traceEnter(funcName, TrcDbgTrace.TraceLevel.API,
"inverted=%s", Boolean.toString(inverted));
dbgTrace.traceExit(funcName, TrcDbgTrace.TraceLevel.API);
}
setInverted(zIndex, inverted);
} //setZInverted
/**
* This method sets the scale factor for the data of the x-axis.
*
* @param scale specifies the x scale factor.
*/
public void setXScale(double scale)
{
final String funcName = "setXScale";
if (debugEnabled)
{
dbgTrace.traceEnter(funcName, TrcDbgTrace.TraceLevel.API, "scale=%f", scale);
dbgTrace.traceExit(funcName, TrcDbgTrace.TraceLevel.API);
}
setScale(xIndex, scale);
} //setXScale
/**
* This method sets the scale factor for the data of the y-axis.
*
* @param scale specifies the y scale factor.
*/
public void setYScale(double scale)
{
final String funcName = "setYScale";
if (debugEnabled)
{
dbgTrace.traceEnter(funcName, TrcDbgTrace.TraceLevel.API, "scale=%f", scale);
dbgTrace.traceExit(funcName, TrcDbgTrace.TraceLevel.API);
}
setScale(yIndex, scale);
} //setYScale
/**
* This method sets the scale factor for the data of the z-axis.
*
* @param scale specifies the z scale factor.
*/
public void setZScale(double scale)
{
final String funcName = "setZScale";
if (debugEnabled)
{
dbgTrace.traceEnter(funcName, TrcDbgTrace.TraceLevel.API, "scale=%f", scale);
dbgTrace.traceExit(funcName, TrcDbgTrace.TraceLevel.API);
}
setScale(zIndex, scale);
} //setZScale
/**
* This method returns the acceleration on the x-axis.
*
* @return X acceleration.
*/
public SensorData getXAcceleration()
{
final String funcName = "getXAcceleration";
SensorData data = getData(xIndex, DataType.ACCELERATION);
if (debugEnabled)
{
dbgTrace.traceEnter(funcName, TrcDbgTrace.TraceLevel.API);
dbgTrace.traceExit(funcName, TrcDbgTrace.TraceLevel.API,
"=(timestamp=%.3f,value=%f)", data.timestamp, data.value);
}
return data;
} //getXAcceleration
/**
* This method returns the acceleration on the y-axis.
*
* @return Y acceleration.
*/
public SensorData getYAcceleration()
{
final String funcName = "getYAcceleration";
SensorData data = getData(yIndex, DataType.ACCELERATION);
if (debugEnabled)
{
dbgTrace.traceEnter(funcName, TrcDbgTrace.TraceLevel.API);
dbgTrace.traceExit(funcName, TrcDbgTrace.TraceLevel.API,
"=(timestamp=%.3f,value=%f)", data.timestamp, data.value);
}
return data;
} //getYAcceleration
/**
* This method returns the acceleration on the z-axis.
*
* @return Z acceleration.
*/
public SensorData getZAcceleration()
{
final String funcName = "getZAcceleration";
SensorData data = getData(zIndex, DataType.ACCELERATION);
if (debugEnabled)
{
dbgTrace.traceEnter(funcName, TrcDbgTrace.TraceLevel.API);
dbgTrace.traceExit(funcName, TrcDbgTrace.TraceLevel.API,
"=(timestamp=%.3f,value=%f)", data.timestamp, data.value);
}
return data;
} //getZAcceleration
/**
* This method returns the velocity of the x-axis. If there is an integrator,
* we call the integrator to get the velocity else we call the platform dependent
* accelerometer to get the raw velocity value.
*
* @return X velocity.
*/
public SensorData getXVelocity()
{
final String funcName = "getXVelocity";
SensorData data = null;
if (dataIntegrator != null)
{
data = dataIntegrator.getIntegratedData(xIndex);
}
else
{
data = getRawXData(DataType.VELOCITY);
}
if (debugEnabled)
{
dbgTrace.traceEnter(funcName, TrcDbgTrace.TraceLevel.API);
dbgTrace.traceExit(funcName, TrcDbgTrace.TraceLevel.API,
"=(timestamp=%.3f,value=%f)", data.timestamp, data.value);
}
return data;
} //getXVelocity
/**
* This method returns the velocity of the y-axis. If there is an integrator,
* we call the integrator to get the velocity else we call the platform dependent
* accelerometer to get the raw velocity value.
*
* @return Y velocity.
*/
public SensorData getYVelocity()
{
final String funcName = "getYVelocity";
SensorData data = null;
if (dataIntegrator != null)
{
data = dataIntegrator.getIntegratedData(yIndex);
}
else
{
data = getRawYData(DataType.VELOCITY);
}
if (debugEnabled)
{
dbgTrace.traceEnter(funcName, TrcDbgTrace.TraceLevel.API);
dbgTrace.traceExit(funcName, TrcDbgTrace.TraceLevel.API,
"=(timestamp=%.3f,value=%f)", data.timestamp, data.value);
}
return data;
} //getYVelocity
/**
* This method returns the velocity of the z-axis. If there is an integrator,
* we call the integrator to get the velocity else we call the platform dependent
* accelerometer to get the raw velocity value.
*
* @return Z velocity.
*/
public SensorData getZVelocity()
{
final String funcName = "getZVelocity";
SensorData data = null;
if (dataIntegrator != null)
{
data = dataIntegrator.getIntegratedData(zIndex);
}
else
{
data = getRawZData(DataType.VELOCITY);
}
if (debugEnabled)
{
dbgTrace.traceEnter(funcName, TrcDbgTrace.TraceLevel.API);
dbgTrace.traceExit(funcName, TrcDbgTrace.TraceLevel.API,
"=(timestamp=%.3f,value=%f)", data.timestamp, data.value);
}
return data;
} //getZVelocity
/**
* This method returns the distance of the x-axis. If there is an integrator,
* we call the integrator to get the distance else we call the platform dependent
* accelerometer to get the raw distance value.
*
* @return X distance.
*/
public SensorData getXDistance()
{
final String funcName = "getXDistance";
SensorData data = null;
if (dataIntegrator != null)
{
data = dataIntegrator.getDoubleIntegratedData(xIndex);
}
else
{
data = getRawXData(DataType.DISTANCE);
}
if (debugEnabled)
{
dbgTrace.traceEnter(funcName, TrcDbgTrace.TraceLevel.API);
dbgTrace.traceExit(funcName, TrcDbgTrace.TraceLevel.API,
"=(timestamp=%.3f,value=%f)", data.timestamp, data.value);
}
return data;
} //getXDistance
/**
* This method returns the distance of the y-axis. If there is an integrator,
* we call the integrator to get the distance else we call the platform dependent
* accelerometer to get the raw distance value.
*
* @return Y distance.
*/
public SensorData getYDistance()
{
final String funcName = "getYDistance";
SensorData data = null;
if (dataIntegrator != null)
{
data = dataIntegrator.getDoubleIntegratedData(yIndex);
}
else
{
data = getRawYData(DataType.DISTANCE);
}
if (debugEnabled)
{
dbgTrace.traceEnter(funcName, TrcDbgTrace.TraceLevel.API);
dbgTrace.traceExit(funcName, TrcDbgTrace.TraceLevel.API,
"=(timestamp=%.3f,value=%f)", data.timestamp, data.value);
}
return data;
} //getYDistance
/**
* This method returns the distance of the z-axis. If there is an integrator,
* we call the integrator to get the distance else we call the platform dependent
* accelerometer to get the raw distance value.
*
* @return Z distance.
*/
public SensorData getZDistance()
{
final String funcName = "getZDistance";
SensorData data = null;
if (dataIntegrator != null)
{
data = dataIntegrator.getDoubleIntegratedData(zIndex);
}
else
{
data = getRawZData(DataType.DISTANCE);
}
if (debugEnabled)
{
dbgTrace.traceEnter(funcName, TrcDbgTrace.TraceLevel.API);
dbgTrace.traceExit(funcName, TrcDbgTrace.TraceLevel.API,
"=(timestamp=%.3f,value=%f)", data.timestamp, data.value);
}
return data;
} //getZDistance
//
// The following methods can be overridden by a platform dependent accelerometer class.
//
/**
* This method resets the integrator on the x-axis.
*/
public void resetXIntegrator()
{
final String funcName = "resetXIntegrator";
if (dataIntegrator != null)
{
dataIntegrator.reset(xIndex);
}
if (debugEnabled)
{
dbgTrace.traceEnter(funcName, TrcDbgTrace.TraceLevel.API);
dbgTrace.traceExit(funcName, TrcDbgTrace.TraceLevel.API);
}
} //resetXIntegrator
/**
* This method resets the integrator on the y-axis.
*/
public void resetYIntegrator()
{
final String funcName = "resetYIntegrator";
if (dataIntegrator != null)
{
dataIntegrator.reset(yIndex);
}
if (debugEnabled)
{
dbgTrace.traceEnter(funcName, TrcDbgTrace.TraceLevel.API);
dbgTrace.traceExit(funcName, TrcDbgTrace.TraceLevel.API);
}
} //resetYIntegrator
/**
* This method resets the integrator on the z-axis.
*/
public void resetZIntegrator()
{
final String funcName = "resetZIntegrator";
if (dataIntegrator != null)
{
dataIntegrator.reset(zIndex);
}
if (debugEnabled)
{
dbgTrace.traceEnter(funcName, TrcDbgTrace.TraceLevel.API);
dbgTrace.traceExit(funcName, TrcDbgTrace.TraceLevel.API);
}
} //resetZIntegrator
//
// Implements TrcSensor abstract methods.
//
/**
* This abstract method returns the raw sensor data for the specified axis and type.
*
* @param index specifies the axis index.
* @param dataType specifies the data type.
* @return raw data for the specified axis.
*/
@Override
public SensorData getRawData(int index, Object dataType)
{
final String funcName = "getRawData";
SensorData data = null;
if (index == xIndex)
{
data = getRawXData((DataType)dataType);
}
else if (index == yIndex)
{
data = getRawYData((DataType)dataType);
}
else if (index == zIndex)
{
data = getRawZData((DataType)dataType);
}
if (debugEnabled)
{
dbgTrace.traceEnter(funcName, TrcDbgTrace.TraceLevel.CALLBK, "index=%d", index);
dbgTrace.traceExit(funcName, TrcDbgTrace.TraceLevel.CALLBK,
"=(timestamp=%.3f,value=%f", data.timestamp, data.value);
}
return data;
} //getRawData
//
// Implements TrcSensorDataSource interface.
//
/**
* This method returns the sensor data of the specified index.
*
* @param index specifies the data index.
* @return sensor data of the specified index.
*/
@Override
public TrcSensor.SensorData getSensorData(int index)
{
final String funcName = "getSensorData";
TrcSensor.SensorData data = null;
switch (index)
{
case 0:
data = getXAcceleration();
break;
case 1:
data = getYAcceleration();
break;
case 2:
data = getZAcceleration();
break;
case 3:
data = getXVelocity();
break;
case 4:
data = getYVelocity();
break;
case 5:
data = getZVelocity();
break;
case 6:
data = getXDistance();
break;
case 7:
data = getYDistance();
break;
case 8:
data = getZDistance();
break;
}
if (debugEnabled)
{
dbgTrace.traceEnter(funcName, TrcDbgTrace.TraceLevel.API, "index=%d", index);
dbgTrace.traceExit(funcName, TrcDbgTrace.TraceLevel.API,
"=(time=%.3f,value=%f)", data.timestamp, data.value);
}
return data;
} //getSensorData
} //class TrcAccelerometer
| |
//
// This file was generated by the JavaTM Architecture for XML Binding(JAXB) Reference Implementation, v2.2.8-b130911.1802
// See <a href="http://java.sun.com/xml/jaxb">http://java.sun.com/xml/jaxb</a>
// Any modifications to this file will be lost upon recompilation of the source schema.
// Generated on: 2015.09.25 at 09:53:49 PM BST
//
package org.opencb.biodata.formats.protein.uniprot.v201504jaxb;
import javax.xml.bind.JAXBElement;
import javax.xml.bind.annotation.XmlElementDecl;
import javax.xml.bind.annotation.XmlRegistry;
import javax.xml.namespace.QName;
/**
* This object contains factory methods for each
* Java content interface and Java element interface
* generated in the org.opencb.biodata.formats.protein.uniprot.v201504jaxb package.
* <p>An ObjectFactory allows you to programatically
* construct new instances of the Java representation
* for XML content. The Java representation of XML
* content can consist of schema derived interfaces
* and classes representing the binding of schema
* type definitions, element declarations and model
* groups. Factory methods for each of these are
* provided in this class.
*
*/
@XmlRegistry
public class ObjectFactory {
private final static QName _Copyright_QNAME = new QName("http://uniprot.org/uniprot", "copyright");
/**
* Create a new ObjectFactory that can be used to create new instances of schema derived classes for package: org.opencb.biodata.formats.protein.uniprot.v201504jaxb
*
*/
public ObjectFactory() {
}
/**
* Create an instance of {@link SourceDataType }
*
*/
public SourceDataType createSourceDataType() {
return new SourceDataType();
}
/**
* Create an instance of {@link IsoformType }
*
*/
public IsoformType createIsoformType() {
return new IsoformType();
}
/**
* Create an instance of {@link CommentType }
*
*/
public CommentType createCommentType() {
return new CommentType();
}
/**
* Create an instance of {@link CommentType.Conflict }
*
*/
public CommentType.Conflict createCommentTypeConflict() {
return new CommentType.Conflict();
}
/**
* Create an instance of {@link OrganismType }
*
*/
public OrganismType createOrganismType() {
return new OrganismType();
}
/**
* Create an instance of {@link ProteinType }
*
*/
public ProteinType createProteinType() {
return new ProteinType();
}
/**
* Create an instance of {@link Entry }
*
*/
public Entry createEntry() {
return new Entry();
}
/**
* Create an instance of {@link GeneType }
*
*/
public GeneType createGeneType() {
return new GeneType();
}
/**
* Create an instance of {@link GeneLocationType }
*
*/
public GeneLocationType createGeneLocationType() {
return new GeneLocationType();
}
/**
* Create an instance of {@link ReferenceType }
*
*/
public ReferenceType createReferenceType() {
return new ReferenceType();
}
/**
* Create an instance of {@link DbReferenceType }
*
*/
public DbReferenceType createDbReferenceType() {
return new DbReferenceType();
}
/**
* Create an instance of {@link ProteinExistenceType }
*
*/
public ProteinExistenceType createProteinExistenceType() {
return new ProteinExistenceType();
}
/**
* Create an instance of {@link KeywordType }
*
*/
public KeywordType createKeywordType() {
return new KeywordType();
}
/**
* Create an instance of {@link FeatureType }
*
*/
public FeatureType createFeatureType() {
return new FeatureType();
}
/**
* Create an instance of {@link EvidenceType }
*
*/
public EvidenceType createEvidenceType() {
return new EvidenceType();
}
/**
* Create an instance of {@link SequenceType }
*
*/
public SequenceType createSequenceType() {
return new SequenceType();
}
/**
* Create an instance of {@link Uniprot }
*
*/
public Uniprot createUniprot() {
return new Uniprot();
}
/**
* Create an instance of {@link StatusType }
*
*/
public StatusType createStatusType() {
return new StatusType();
}
/**
* Create an instance of {@link PositionType }
*
*/
public PositionType createPositionType() {
return new PositionType();
}
/**
* Create an instance of {@link ConsortiumType }
*
*/
public ConsortiumType createConsortiumType() {
return new ConsortiumType();
}
/**
* Create an instance of {@link GeneNameType }
*
*/
public GeneNameType createGeneNameType() {
return new GeneNameType();
}
/**
* Create an instance of {@link LocationType }
*
*/
public LocationType createLocationType() {
return new LocationType();
}
/**
* Create an instance of {@link CitationType }
*
*/
public CitationType createCitationType() {
return new CitationType();
}
/**
* Create an instance of {@link PropertyType }
*
*/
public PropertyType createPropertyType() {
return new PropertyType();
}
/**
* Create an instance of {@link CofactorType }
*
*/
public CofactorType createCofactorType() {
return new CofactorType();
}
/**
* Create an instance of {@link EvidencedStringType }
*
*/
public EvidencedStringType createEvidencedStringType() {
return new EvidencedStringType();
}
/**
* Create an instance of {@link PersonType }
*
*/
public PersonType createPersonType() {
return new PersonType();
}
/**
* Create an instance of {@link ImportedFromType }
*
*/
public ImportedFromType createImportedFromType() {
return new ImportedFromType();
}
/**
* Create an instance of {@link EventType }
*
*/
public EventType createEventType() {
return new EventType();
}
/**
* Create an instance of {@link InteractantType }
*
*/
public InteractantType createInteractantType() {
return new InteractantType();
}
/**
* Create an instance of {@link NameListType }
*
*/
public NameListType createNameListType() {
return new NameListType();
}
/**
* Create an instance of {@link SourceType }
*
*/
public SourceType createSourceType() {
return new SourceType();
}
/**
* Create an instance of {@link MoleculeType }
*
*/
public MoleculeType createMoleculeType() {
return new MoleculeType();
}
/**
* Create an instance of {@link OrganismNameType }
*
*/
public OrganismNameType createOrganismNameType() {
return new OrganismNameType();
}
/**
* Create an instance of {@link SubcellularLocationType }
*
*/
public SubcellularLocationType createSubcellularLocationType() {
return new SubcellularLocationType();
}
/**
* Create an instance of {@link SourceDataType.Strain }
*
*/
public SourceDataType.Strain createSourceDataTypeStrain() {
return new SourceDataType.Strain();
}
/**
* Create an instance of {@link SourceDataType.Plasmid }
*
*/
public SourceDataType.Plasmid createSourceDataTypePlasmid() {
return new SourceDataType.Plasmid();
}
/**
* Create an instance of {@link SourceDataType.Transposon }
*
*/
public SourceDataType.Transposon createSourceDataTypeTransposon() {
return new SourceDataType.Transposon();
}
/**
* Create an instance of {@link SourceDataType.Tissue }
*
*/
public SourceDataType.Tissue createSourceDataTypeTissue() {
return new SourceDataType.Tissue();
}
/**
* Create an instance of {@link IsoformType.Name }
*
*/
public IsoformType.Name createIsoformTypeName() {
return new IsoformType.Name();
}
/**
* Create an instance of {@link IsoformType.Sequence }
*
*/
public IsoformType.Sequence createIsoformTypeSequence() {
return new IsoformType.Sequence();
}
/**
* Create an instance of {@link CommentType.Absorption }
*
*/
public CommentType.Absorption createCommentTypeAbsorption() {
return new CommentType.Absorption();
}
/**
* Create an instance of {@link CommentType.Kinetics }
*
*/
public CommentType.Kinetics createCommentTypeKinetics() {
return new CommentType.Kinetics();
}
/**
* Create an instance of {@link CommentType.PhDependence }
*
*/
public CommentType.PhDependence createCommentTypePhDependence() {
return new CommentType.PhDependence();
}
/**
* Create an instance of {@link CommentType.RedoxPotential }
*
*/
public CommentType.RedoxPotential createCommentTypeRedoxPotential() {
return new CommentType.RedoxPotential();
}
/**
* Create an instance of {@link CommentType.TemperatureDependence }
*
*/
public CommentType.TemperatureDependence createCommentTypeTemperatureDependence() {
return new CommentType.TemperatureDependence();
}
/**
* Create an instance of {@link CommentType.Link }
*
*/
public CommentType.Link createCommentTypeLink() {
return new CommentType.Link();
}
/**
* Create an instance of {@link CommentType.Disease }
*
*/
public CommentType.Disease createCommentTypeDisease() {
return new CommentType.Disease();
}
/**
* Create an instance of {@link CommentType.Conflict.Sequence }
*
*/
public CommentType.Conflict.Sequence createCommentTypeConflictSequence() {
return new CommentType.Conflict.Sequence();
}
/**
* Create an instance of {@link OrganismType.Lineage }
*
*/
public OrganismType.Lineage createOrganismTypeLineage() {
return new OrganismType.Lineage();
}
/**
* Create an instance of {@link ProteinType.RecommendedName }
*
*/
public ProteinType.RecommendedName createProteinTypeRecommendedName() {
return new ProteinType.RecommendedName();
}
/**
* Create an instance of {@link ProteinType.AlternativeName }
*
*/
public ProteinType.AlternativeName createProteinTypeAlternativeName() {
return new ProteinType.AlternativeName();
}
/**
* Create an instance of {@link ProteinType.SubmittedName }
*
*/
public ProteinType.SubmittedName createProteinTypeSubmittedName() {
return new ProteinType.SubmittedName();
}
/**
* Create an instance of {@link ProteinType.Domain }
*
*/
public ProteinType.Domain createProteinTypeDomain() {
return new ProteinType.Domain();
}
/**
* Create an instance of {@link ProteinType.Component }
*
*/
public ProteinType.Component createProteinTypeComponent() {
return new ProteinType.Component();
}
/**
* Create an instance of {@link JAXBElement }{@code <}{@link String }{@code >}}
*
*/
@XmlElementDecl(namespace = "http://uniprot.org/uniprot", name = "copyright")
public JAXBElement<String> createCopyright(String value) {
return new JAXBElement<String>(_Copyright_QNAME, String.class, null, value);
}
}
| |
/*
* Copyright (C) 2016 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.android.cameraview;
import android.app.Activity;
import android.content.Context;
import android.graphics.Rect;
import android.hardware.Camera;
import android.media.CamcorderProfile;
import android.os.Build;
import android.os.HandlerThread;
import android.os.Handler;
import android.os.Parcel;
import android.os.Parcelable;
import androidx.annotation.IntDef;
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
import androidx.core.os.ParcelableCompat;
import androidx.core.os.ParcelableCompatCreatorCallbacks;
import androidx.core.view.ViewCompat;
import android.util.AttributeSet;
import android.view.MotionEvent;
import android.view.View;
import android.widget.FrameLayout;
import android.graphics.SurfaceTexture;
import com.facebook.react.bridge.ReadableMap;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.util.ArrayList;
import java.util.List;
import java.util.Properties;
import java.util.Set;
import java.util.SortedSet;
public class CameraView extends FrameLayout {
/** The camera device faces the opposite direction as the device's screen. */
public static final int FACING_BACK = Constants.FACING_BACK;
/** The camera device faces the same direction as the device's screen. */
public static final int FACING_FRONT = Constants.FACING_FRONT;
/** Direction the camera faces relative to device screen. */
@IntDef({FACING_BACK, FACING_FRONT})
@Retention(RetentionPolicy.SOURCE)
public @interface Facing {
}
/** Flash will not be fired. */
public static final int FLASH_OFF = Constants.FLASH_OFF;
/** Flash will always be fired during snapshot. */
public static final int FLASH_ON = Constants.FLASH_ON;
/** Constant emission of light during preview, auto-focus and snapshot. */
public static final int FLASH_TORCH = Constants.FLASH_TORCH;
/** Flash will be fired automatically when required. */
public static final int FLASH_AUTO = Constants.FLASH_AUTO;
/** Flash will be fired in red-eye reduction mode. */
public static final int FLASH_RED_EYE = Constants.FLASH_RED_EYE;
/** The mode for for the camera device's flash control */
@Retention(RetentionPolicy.SOURCE)
@IntDef({FLASH_OFF, FLASH_ON, FLASH_TORCH, FLASH_AUTO, FLASH_RED_EYE})
public @interface Flash {
}
CameraViewImpl mImpl;
private final CallbackBridge mCallbacks;
private boolean mAdjustViewBounds;
private Context mContext;
private final DisplayOrientationDetector mDisplayOrientationDetector;
protected HandlerThread mBgThread;
protected Handler mBgHandler;
public CameraView(Context context, boolean fallbackToOldApi) {
this(context, null, fallbackToOldApi);
}
public CameraView(Context context, AttributeSet attrs, boolean fallbackToOldApi) {
this(context, attrs, 0, fallbackToOldApi);
}
@SuppressWarnings("WrongConstant")
public CameraView(Context context, AttributeSet attrs, int defStyleAttr, boolean fallbackToOldApi) {
super(context, attrs, defStyleAttr);
// bg hanadler for non UI heavy work
mBgThread = new HandlerThread("RNCamera-Handler-Thread");
mBgThread.start();
mBgHandler = new Handler(mBgThread.getLooper());
if (isInEditMode()){
mCallbacks = null;
mDisplayOrientationDetector = null;
return;
}
mAdjustViewBounds = true;
mContext = context;
// Internal setup
final PreviewImpl preview = createPreviewImpl(context);
mCallbacks = new CallbackBridge();
if (fallbackToOldApi || Build.VERSION.SDK_INT < 21 || Camera2.isLegacy(context)) {
mImpl = new Camera1(mCallbacks, preview, mBgHandler);
} else if (Build.VERSION.SDK_INT < 23) {
mImpl = new Camera2(mCallbacks, preview, context, mBgHandler);
} else {
mImpl = new Camera2Api23(mCallbacks, preview, context, mBgHandler);
}
// Display orientation detector
mDisplayOrientationDetector = new DisplayOrientationDetector(context) {
@Override
public void onDisplayOrientationChanged(int displayOrientation, int deviceOrientation) {
mImpl.setDisplayOrientation(displayOrientation);
mImpl.setDeviceOrientation(deviceOrientation);
}
};
}
public void cleanup(){
if(mBgThread != null){
if(Build.VERSION.SDK_INT < 18){
mBgThread.quit();
}
else{
mBgThread.quitSafely();
}
mBgThread = null;
}
}
@NonNull
private PreviewImpl createPreviewImpl(Context context) {
PreviewImpl preview;
if (Build.VERSION.SDK_INT < 14) {
preview = new SurfaceViewPreview(context, this);
} else {
preview = new TextureViewPreview(context, this);
}
return preview;
}
@Override
protected void onAttachedToWindow() {
super.onAttachedToWindow();
if (!isInEditMode()) {
mDisplayOrientationDetector.enable(ViewCompat.getDisplay(this));
}
}
@Override
protected void onDetachedFromWindow() {
if (!isInEditMode()) {
mDisplayOrientationDetector.disable();
}
super.onDetachedFromWindow();
}
@Override
protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) {
if (isInEditMode()){
super.onMeasure(widthMeasureSpec, heightMeasureSpec);
return;
}
// Handle android:adjustViewBounds
if (mAdjustViewBounds) {
if (!isCameraOpened()) {
mCallbacks.reserveRequestLayoutOnOpen();
super.onMeasure(widthMeasureSpec, heightMeasureSpec);
return;
}
final int widthMode = MeasureSpec.getMode(widthMeasureSpec);
final int heightMode = MeasureSpec.getMode(heightMeasureSpec);
if (widthMode == MeasureSpec.EXACTLY && heightMode != MeasureSpec.EXACTLY) {
final AspectRatio ratio = getAspectRatio();
assert ratio != null;
int height = (int) (MeasureSpec.getSize(widthMeasureSpec) * ratio.toFloat());
if (heightMode == MeasureSpec.AT_MOST) {
height = Math.min(height, MeasureSpec.getSize(heightMeasureSpec));
}
super.onMeasure(widthMeasureSpec,
MeasureSpec.makeMeasureSpec(height, MeasureSpec.EXACTLY));
} else if (widthMode != MeasureSpec.EXACTLY && heightMode == MeasureSpec.EXACTLY) {
final AspectRatio ratio = getAspectRatio();
assert ratio != null;
int width = (int) (MeasureSpec.getSize(heightMeasureSpec) * ratio.toFloat());
if (widthMode == MeasureSpec.AT_MOST) {
width = Math.min(width, MeasureSpec.getSize(widthMeasureSpec));
}
super.onMeasure(MeasureSpec.makeMeasureSpec(width, MeasureSpec.EXACTLY),
heightMeasureSpec);
} else {
super.onMeasure(widthMeasureSpec, heightMeasureSpec);
}
} else {
super.onMeasure(widthMeasureSpec, heightMeasureSpec);
}
// Measure the TextureView
int width = getMeasuredWidth();
int height = getMeasuredHeight();
AspectRatio ratio = getAspectRatio();
if (mDisplayOrientationDetector.getLastKnownDisplayOrientation() % 180 == 0) {
ratio = ratio.inverse();
}
assert ratio != null;
if (height < width * ratio.getY() / ratio.getX()) {
mImpl.getView().measure(
MeasureSpec.makeMeasureSpec(width, MeasureSpec.EXACTLY),
MeasureSpec.makeMeasureSpec(width * ratio.getY() / ratio.getX(),
MeasureSpec.EXACTLY));
} else {
mImpl.getView().measure(
MeasureSpec.makeMeasureSpec(height * ratio.getX() / ratio.getY(),
MeasureSpec.EXACTLY),
MeasureSpec.makeMeasureSpec(height, MeasureSpec.EXACTLY));
}
}
@Override
protected Parcelable onSaveInstanceState() {
SavedState state = new SavedState(super.onSaveInstanceState());
state.facing = getFacing();
state.cameraId = getCameraId();
state.ratio = getAspectRatio();
state.autoFocus = getAutoFocus();
state.flash = getFlash();
state.exposure = getExposureCompensation();
state.focusDepth = getFocusDepth();
state.zoom = getZoom();
state.whiteBalance = getWhiteBalance();
state.playSoundOnCapture = getPlaySoundOnCapture();
state.playSoundOnRecord = getPlaySoundOnRecord();
state.scanning = getScanning();
state.pictureSize = getPictureSize();
return state;
}
@Override
protected void onRestoreInstanceState(Parcelable state) {
if (!(state instanceof SavedState)) {
super.onRestoreInstanceState(state);
return;
}
SavedState ss = (SavedState) state;
super.onRestoreInstanceState(ss.getSuperState());
setFacing(ss.facing);
setCameraId(ss.cameraId);
setAspectRatio(ss.ratio);
setAutoFocus(ss.autoFocus);
setFlash(ss.flash);
setExposureCompensation(ss.exposure);
setFocusDepth(ss.focusDepth);
setZoom(ss.zoom);
setWhiteBalance(ss.whiteBalance);
setPlaySoundOnCapture(ss.playSoundOnCapture);
setPlaySoundOnRecord(ss.playSoundOnRecord);
setScanning(ss.scanning);
setPictureSize(ss.pictureSize);
}
public void setUsingCamera2Api(boolean useCamera2) {
if (Build.VERSION.SDK_INT < 21) {
return;
}
boolean wasOpened = isCameraOpened();
Parcelable state = onSaveInstanceState();
if (useCamera2 && !Camera2.isLegacy(mContext)) {
if (wasOpened) {
stop();
}
if (Build.VERSION.SDK_INT < 23) {
mImpl = new Camera2(mCallbacks, mImpl.mPreview, mContext, mBgHandler);
} else {
mImpl = new Camera2Api23(mCallbacks, mImpl.mPreview, mContext, mBgHandler);
}
onRestoreInstanceState(state);
} else {
if (mImpl instanceof Camera1) {
return;
}
if (wasOpened) {
stop();
}
mImpl = new Camera1(mCallbacks, mImpl.mPreview, mBgHandler);
}
if(wasOpened){
start();
}
}
/**
* Open a camera device and start showing camera preview. This is typically called from
* {@link Activity#onResume()}.
*/
public void start() {
mImpl.start();
// this fallback is no longer needed and was too buggy/slow
// if (!mImpl.start()) {
// if (mImpl.getView() != null) {
// this.removeView(mImpl.getView());
// }
// //store the state and restore this state after fall back to Camera1
// Parcelable state = onSaveInstanceState();
// // Camera2 uses legacy hardware layer; fall back to Camera1
// mImpl = new Camera1(mCallbacks, createPreviewImpl(getContext()), mBgHandler);
// onRestoreInstanceState(state);
// mImpl.start();
// }
}
/**
* Stop camera preview and close the device. This is typically called from
* {@link Activity#onPause()}.
*/
public void stop() {
mImpl.stop();
}
/**
* @return {@code true} if the camera is opened.
*/
public boolean isCameraOpened() {
return mImpl.isCameraOpened();
}
/**
* Add a new callback.
*
* @param callback The {@link Callback} to add.
* @see #removeCallback(Callback)
*/
public void addCallback(@NonNull Callback callback) {
mCallbacks.add(callback);
}
/**
* Remove a callback.
*
* @param callback The {@link Callback} to remove.
* @see #addCallback(Callback)
*/
public void removeCallback(@NonNull Callback callback) {
mCallbacks.remove(callback);
}
/**
* @param adjustViewBounds {@code true} if you want the CameraView to adjust its bounds to
* preserve the aspect ratio of camera.
* @see #getAdjustViewBounds()
*/
public void setAdjustViewBounds(boolean adjustViewBounds) {
if (mAdjustViewBounds != adjustViewBounds) {
mAdjustViewBounds = adjustViewBounds;
requestLayout();
}
}
/**
* @return True when this CameraView is adjusting its bounds to preserve the aspect ratio of
* camera.
* @see #setAdjustViewBounds(boolean)
*/
public boolean getAdjustViewBounds() {
return mAdjustViewBounds;
}
public View getView() {
if (mImpl != null) {
return mImpl.getView();
}
return null;
}
/**
* Chooses camera by the direction it faces.
*
* @param facing The camera facing. Must be either {@link #FACING_BACK} or
* {@link #FACING_FRONT}.
*/
public void setFacing(@Facing int facing) {
mImpl.setFacing(facing);
}
/**
* Gets the direction that the current camera faces.
*
* @return The camera facing.
*/
@Facing
public int getFacing() {
//noinspection WrongConstant
return mImpl.getFacing();
}
/**
* Chooses camera by its camera iD
*
* @param id The camera ID
*/
public void setCameraId(String id) {
mImpl.setCameraId(id);
}
/**
* Gets the currently set camera ID
*
* @return The camera facing.
*/
public String getCameraId() {
return mImpl.getCameraId();
}
/**
* Gets all the aspect ratios supported by the current camera.
*/
public Set<AspectRatio> getSupportedAspectRatios() {
return mImpl.getSupportedAspectRatios();
}
/**
* Gets all the camera IDs supported by the phone as a String
*/
public List<Properties> getCameraIds() {
return mImpl.getCameraIds();
}
/**
* Sets the aspect ratio of camera.
*
* @param ratio The {@link AspectRatio} to be set.
*/
public void setAspectRatio(@NonNull AspectRatio ratio) {
if (mImpl.setAspectRatio(ratio)) {
requestLayout();
}
}
/**
* Gets the current aspect ratio of camera.
*
* @return The current {@link AspectRatio}. Can be {@code null} if no camera is opened yet.
*/
@Nullable
public AspectRatio getAspectRatio() {
return mImpl.getAspectRatio();
}
/**
* Gets all the picture sizes for particular ratio supported by the current camera.
*
* @param ratio {@link AspectRatio} for which the available image sizes will be returned.
*/
public SortedSet<Size> getAvailablePictureSizes(@NonNull AspectRatio ratio) {
return mImpl.getAvailablePictureSizes(ratio);
}
/**
* Sets the size of taken pictures.
*
* @param size The {@link Size} to be set.
*/
public void setPictureSize(@NonNull Size size) {
mImpl.setPictureSize(size);
}
/**
* Gets the size of pictures that will be taken.
*/
public Size getPictureSize() {
return mImpl.getPictureSize();
}
/**
* Enables or disables the continuous auto-focus mode. When the current camera doesn't support
* auto-focus, calling this method will be ignored.
*
* @param autoFocus {@code true} to enable continuous auto-focus mode. {@code false} to
* disable it.
*/
public void setAutoFocus(boolean autoFocus) {
mImpl.setAutoFocus(autoFocus);
}
/**
* Returns whether the continuous auto-focus mode is enabled.
*
* @return {@code true} if the continuous auto-focus mode is enabled. {@code false} if it is
* disabled, or if it is not supported by the current camera.
*/
public boolean getAutoFocus() {
return mImpl.getAutoFocus();
}
/**
* Sets the flash mode.
*
* @param flash The desired flash mode.
*/
public void setFlash(@Flash int flash) {
mImpl.setFlash(flash);
}
public ArrayList<int[]> getSupportedPreviewFpsRange() {
return mImpl.getSupportedPreviewFpsRange();
}
/**
* Gets the current flash mode.
*
* @return The current flash mode.
*/
@Flash
public int getFlash() {
//noinspection WrongConstant
return mImpl.getFlash();
}
public void setExposureCompensation(float exposure) {
mImpl.setExposureCompensation(exposure);
}
public float getExposureCompensation() {
return mImpl.getExposureCompensation();
}
/**
* Gets the camera orientation relative to the devices native orientation.
*
* @return The orientation of the camera.
*/
public int getCameraOrientation() {
return mImpl.getCameraOrientation();
}
/**
* Sets the auto focus point.
*
* @param x sets the x coordinate for camera auto focus
* @param y sets the y coordinate for camera auto focus
*/
public void setAutoFocusPointOfInterest(float x, float y) {
mImpl.setFocusArea(x, y);
}
public void setFocusDepth(float value) {
mImpl.setFocusDepth(value);
}
public float getFocusDepth() { return mImpl.getFocusDepth(); }
public void setZoom(float zoom) {
mImpl.setZoom(zoom);
}
public float getZoom() {
return mImpl.getZoom();
}
public void setWhiteBalance(int whiteBalance) {
mImpl.setWhiteBalance(whiteBalance);
}
public int getWhiteBalance() {
return mImpl.getWhiteBalance();
}
public void setPlaySoundOnCapture(boolean playSoundOnCapture) {
mImpl.setPlaySoundOnCapture(playSoundOnCapture);
}
public boolean getPlaySoundOnCapture() {
return mImpl.getPlaySoundOnCapture();
}
public void setPlaySoundOnRecord(boolean playSoundOnRecord) {
mImpl.setPlaySoundOnRecord(playSoundOnRecord);
}
public boolean getPlaySoundOnRecord() {
return mImpl.getPlaySoundOnRecord();
}
public void setScanning(boolean isScanning) { mImpl.setScanning(isScanning);}
public boolean getScanning() { return mImpl.getScanning(); }
/**
* Take a picture. The result will be returned to
* {@link Callback#onPictureTaken(CameraView, byte[], int)}.
*/
public void takePicture(ReadableMap options) {
mImpl.takePicture(options);
}
/**
* Record a video and save it to file. The result will be returned to
* {@link Callback#onVideoRecorded(CameraView, String, int, int)}.
* @param path Path to file that video will be saved to.
* @param maxDuration Maximum duration of the recording, in seconds.
* @param maxFileSize Maximum recording file size, in bytes.
* @param profile Quality profile of the recording.
*
* fires {@link Callback#onRecordingStart(CameraView, String, int, int)} and {@link Callback#onRecordingEnd(CameraView)}.
*/
public boolean record(String path, int maxDuration, int maxFileSize,
boolean recordAudio, CamcorderProfile profile, int orientation, int fps) {
return mImpl.record(path, maxDuration, maxFileSize, recordAudio, profile, orientation, fps);
}
public void stopRecording() {
mImpl.stopRecording();
}
public void pauseRecording() {
mImpl.pauseRecording();
}
public void resumeRecording() {
mImpl.resumeRecording();
}
public void resumePreview() {
mImpl.resumePreview();
}
public void pausePreview() {
mImpl.pausePreview();
}
public void setPreviewTexture(SurfaceTexture surfaceTexture) {
mImpl.setPreviewTexture(surfaceTexture);
}
public Size getPreviewSize() {
return mImpl.getPreviewSize();
}
private class CallbackBridge implements CameraViewImpl.Callback {
private final ArrayList<Callback> mCallbacks = new ArrayList<>();
private boolean mRequestLayoutOnOpen;
CallbackBridge() {
}
public void add(Callback callback) {
mCallbacks.add(callback);
}
public void remove(Callback callback) {
mCallbacks.remove(callback);
}
@Override
public void onCameraOpened() {
if (mRequestLayoutOnOpen) {
mRequestLayoutOnOpen = false;
requestLayout();
}
for (Callback callback : mCallbacks) {
callback.onCameraOpened(CameraView.this);
}
}
@Override
public void onCameraClosed() {
for (Callback callback : mCallbacks) {
callback.onCameraClosed(CameraView.this);
}
}
@Override
public void onPictureTaken(byte[] data, int deviceOrientation) {
for (Callback callback : mCallbacks) {
callback.onPictureTaken(CameraView.this, data, deviceOrientation);
}
}
@Override
public void onRecordingStart(String path, int videoOrientation, int deviceOrientation) {
for (Callback callback : mCallbacks) {
callback.onRecordingStart(CameraView.this, path, videoOrientation, deviceOrientation);
}
}
@Override
public void onRecordingEnd() {
for (Callback callback : mCallbacks) {
callback.onRecordingEnd(CameraView.this);
}
}
@Override
public void onVideoRecorded(String path, int videoOrientation, int deviceOrientation) {
for (Callback callback : mCallbacks) {
callback.onVideoRecorded(CameraView.this, path, videoOrientation, deviceOrientation);
}
}
@Override
public void onFramePreview(byte[] data, int width, int height, int orientation) {
for (Callback callback : mCallbacks) {
callback.onFramePreview(CameraView.this, data, width, height, orientation);
}
}
@Override
public void onMountError() {
for (Callback callback : mCallbacks) {
callback.onMountError(CameraView.this);
}
}
public void reserveRequestLayoutOnOpen() {
mRequestLayoutOnOpen = true;
}
}
protected static class SavedState extends BaseSavedState {
@Facing
int facing;
String cameraId;
AspectRatio ratio;
boolean autoFocus;
@Flash
int flash;
float exposure;
float focusDepth;
float zoom;
int whiteBalance;
boolean playSoundOnCapture;
boolean playSoundOnRecord;
boolean scanning;
Size pictureSize;
@SuppressWarnings("WrongConstant")
public SavedState(Parcel source, ClassLoader loader) {
super(source);
facing = source.readInt();
cameraId = source.readString();
ratio = source.readParcelable(loader);
autoFocus = source.readByte() != 0;
flash = source.readInt();
exposure = source.readFloat();
focusDepth = source.readFloat();
zoom = source.readFloat();
whiteBalance = source.readInt();
playSoundOnCapture = source.readByte() != 0;
playSoundOnRecord = source.readByte() != 0;
scanning = source.readByte() != 0;
pictureSize = source.readParcelable(loader);
}
public SavedState(Parcelable superState) {
super(superState);
}
@Override
public void writeToParcel(Parcel out, int flags) {
super.writeToParcel(out, flags);
out.writeInt(facing);
out.writeString(cameraId);
out.writeParcelable(ratio, 0);
out.writeByte((byte) (autoFocus ? 1 : 0));
out.writeInt(flash);
out.writeFloat(exposure);
out.writeFloat(focusDepth);
out.writeFloat(zoom);
out.writeInt(whiteBalance);
out.writeByte((byte) (playSoundOnCapture ? 1 : 0));
out.writeByte((byte) (playSoundOnRecord ? 1 : 0));
out.writeByte((byte) (scanning ? 1 : 0));
out.writeParcelable(pictureSize, flags);
}
public static final Creator<SavedState> CREATOR
= ParcelableCompat.newCreator(new ParcelableCompatCreatorCallbacks<SavedState>() {
@Override
public SavedState createFromParcel(Parcel in, ClassLoader loader) {
return new SavedState(in, loader);
}
@Override
public SavedState[] newArray(int size) {
return new SavedState[size];
}
});
}
/**
* Callback for monitoring events about {@link CameraView}.
*/
@SuppressWarnings("UnusedParameters")
public abstract static class Callback {
/**
* Called when camera is opened.
*
* @param cameraView The associated {@link CameraView}.
*/
public void onCameraOpened(CameraView cameraView) {}
/**
* Called when camera is closed.
*
* @param cameraView The associated {@link CameraView}.
*/
public void onCameraClosed(CameraView cameraView) {}
/**
* Called when a picture is taken.
*
* @param cameraView The associated {@link CameraView}.
* @param data JPEG data.
*/
public void onPictureTaken(CameraView cameraView, byte[] data, int deviceOrientation) {}
/**
* Called when a video recording starts
*
* @param cameraView The associated {@link CameraView}.
* @param path Path to recoredd video file.
*/
public void onRecordingStart(CameraView cameraView, String path, int videoOrientation, int deviceOrientation) {}
/**
* Called when a video recording ends, but before video is saved/processed.
*
* @param cameraView The associated {@link CameraView}.
* @param path Path to recoredd video file.
*/
public void onRecordingEnd(CameraView cameraView){}
/**
* Called when a video is recorded.
*
* @param cameraView The associated {@link CameraView}.
* @param path Path to recoredd video file.
*/
public void onVideoRecorded(CameraView cameraView, String path, int videoOrientation, int deviceOrientation) {}
public void onFramePreview(CameraView cameraView, byte[] data, int width, int height, int orientation) {}
public void onMountError(CameraView cameraView) {}
}
}
| |
/*L
* Copyright Washington University in St. Louis
* Copyright SemanticBits
* Copyright Persistent Systems
* Copyright Krishagni
*
* Distributed under the OSI-approved BSD 3-Clause License.
* See http://ncip.github.com/catissue-advanced-query/LICENSE.txt for details.
*/
package edu.wustl.query.flex.dag;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.Date;
import edu.common.dynamicextensions.domaininterface.AttributeInterface;
import edu.wustl.common.querysuite.factory.QueryObjectFactory;
import edu.wustl.common.querysuite.queryobject.ArithmeticOperator;
import edu.wustl.common.querysuite.queryobject.IConnector;
import edu.wustl.common.querysuite.queryobject.ICustomFormula;
import edu.wustl.common.querysuite.queryobject.IDateOffsetAttribute;
import edu.wustl.common.querysuite.queryobject.IDateOffsetLiteral;
import edu.wustl.common.querysuite.queryobject.IExpression;
import edu.wustl.common.querysuite.queryobject.IExpressionAttribute;
import edu.wustl.common.querysuite.queryobject.ILiteral;
import edu.wustl.common.querysuite.queryobject.ITerm;
import edu.wustl.common.querysuite.queryobject.RelationalOperator;
import edu.wustl.common.querysuite.queryobject.TimeInterval;
import edu.wustl.common.util.Utility;
import edu.wustl.query.util.global.AQConstants;
public class SingalNodeTemporalQuery
{
private IDateOffsetAttribute dateOffsetAttr = null;
private IExpressionAttribute attributeIExpression = null;
private AttributeInterface attributeById = null;
private IDateOffsetLiteral lhsDateOffSetLiteral = null;
private IDateOffsetLiteral rhsDateOffSetLiteral = null;
private ILiteral lhsDateLiteral = null;
private ILiteral rhsDateLiteral = null;
private ITerm lhsTerm = null;
private ITerm rhsTerm = null;
private IConnector iCon = null;
private ICustomFormula customFormula = null;
private IExpression entityIExpression = null;
private int entityExpressionId = 0;
private ArithmeticOperator arithOp = null;
private RelationalOperator relOp = null;
private String attributeType = null;
private TimeInterval rhsTimeInterval = null;
private TimeInterval lhsTimeInterval = null;
private TimeInterval qAttrInterval = null;
private SimpleDateFormat formatter;
public TimeInterval getQAttrInterval()
{
return qAttrInterval;
}
public void setQAttrInterval(TimeInterval attrInterval)
{
qAttrInterval = attrInterval;
}
/**
* @return Returns the arithOp.
*/
public ArithmeticOperator getArithOp()
{
return arithOp;
}
/**
* @param arithOp The arithOp to set.
*/
public void setArithOp(ArithmeticOperator arithOp)
{
this.arithOp = arithOp;
}
/**
* @return Returns the attributeIExpression.
*/
public IExpressionAttribute getAttributeIExpression()
{
return attributeIExpression;
}
/**
* @param attributeIExpression The attributeIExpression to set.
*/
public void setAttributeIExpression(IExpressionAttribute attributeIExpression)
{
this.attributeIExpression = attributeIExpression;
}
/**
* @return Returns the attributeType.
*/
public String getAttributeType()
{
return attributeType;
}
/**
* @param attributeType The attributeType to set.
*/
public void setAttributeType(String attributeType)
{
this.attributeType = attributeType;
}
/**
* @return Returns the customFormula.
*/
public ICustomFormula getCustomFormula()
{
return customFormula;
}
/**
* @param customFormula The customFormula to set.
*/
public void setCustomFormula(ICustomFormula customFormula)
{
this.customFormula = customFormula;
}
/**
* @return Returns the lhsDateLiteral.
*/
public ILiteral getLhsDateLiteral()
{
return lhsDateLiteral;
}
/**
* @param lhsDateLiteral The lhsDateLiteral to set.
*/
public void setLhsDateLiteral(ILiteral lhsDateLiteral)
{
this.lhsDateLiteral = lhsDateLiteral;
}
/**
* @return Returns the rhsDateLiteral.
*/
public ILiteral getRhsDateLiteral()
{
return rhsDateLiteral;
}
/**
* @param rhsDateLiteral The rhsDateLiteral to set.
*/
public void setRhsDateLiteral(ILiteral rhsDateLiteral)
{
this.rhsDateLiteral = rhsDateLiteral;
}
/**
* @return Returns the dateOffsetAttr.
*/
public IDateOffsetAttribute getDateOffsetAttr()
{
return dateOffsetAttr;
}
/**
* @param dateOffsetAttr The dateOffsetAttr to set.
*/
public void setDateOffsetAttr(IDateOffsetAttribute dateOffsetAttr)
{
this.dateOffsetAttr = dateOffsetAttr;
}
/**
* @return Returns the lhsDateOffSetLiteral.
*/
public IDateOffsetLiteral getLhsDateOffSetLiteral()
{
return lhsDateOffSetLiteral;
}
/**
* @param lhsDateOffSetLiteral The lhsDateOffSetLiteral to set.
*/
public void setLhsDateOffSetLiteral(IDateOffsetLiteral lhsDateOffSetLiteral)
{
this.lhsDateOffSetLiteral = lhsDateOffSetLiteral;
}
/**
* @return Returns the rhsDateOffSetLiteral.
*/
public IDateOffsetLiteral getRhsDateOffSetLiteral()
{
return rhsDateOffSetLiteral;
}
/**
* @param rhsDateOffSetLiteral The rhsDateOffSetLiteral to set.
*/
public void setRhsDateOffSetLiteral(IDateOffsetLiteral rhsDateOffSetLiteral)
{
this.rhsDateOffSetLiteral = rhsDateOffSetLiteral;
}
/**
* @return Returns the entityIExpression.
*/
public IExpression getEntityIExpression()
{
return entityIExpression;
}
/**
* @param entityIExpression The entityIExpression to set.
*/
public void setEntityIExpression(IExpression entityIExpression)
{
this.entityIExpression = entityIExpression;
}
/**
* @return Returns the iCon.
*/
public IConnector getICon()
{
return iCon;
}
/**
* @param con The iCon to set.
*/
public void setICon(IConnector con)
{
iCon = con;
}
/**
* @return Returns the lhsTerm.
*/
public ITerm getLhsTerm()
{
return lhsTerm;
}
/**
* @param lhsTerm The lhsTerm to set.
*/
public void setLhsTerm(ITerm lhsTerm)
{
this.lhsTerm = lhsTerm;
}
/**
* @return Returns the lhsTimeInterval.
*/
public TimeInterval getLhsTimeInterval()
{
return lhsTimeInterval;
}
/**
* @param lhsTimeInterval The lhsTimeInterval to set.
*/
public void setLhsTimeInterval(TimeInterval lhsTimeInterval)
{
this.lhsTimeInterval = lhsTimeInterval;
}
/**
* @return Returns the relOp.
*/
public RelationalOperator getRelOp()
{
return relOp;
}
/**
* @param relOp The relOp to set.
*/
public void setRelOp(RelationalOperator relOp)
{
this.relOp = relOp;
}
/**
* @return Returns the rhsTerm.
*/
public ITerm getRhsTerm()
{
return rhsTerm;
}
/**
* @param rhsTerm The rhsTerm to set.
*/
public void setRhsTerm(ITerm rhsTerm)
{
this.rhsTerm = rhsTerm;
}
/**
* @return Returns the rhsTimeInterval.
*/
public TimeInterval getRhsTimeInterval()
{
return rhsTimeInterval;
}
/**
* @param rhsTimeInterval The rhsTimeInterval to set.
*/
public void setRhsTimeInterval(TimeInterval rhsTimeInterval)
{
this.rhsTimeInterval = rhsTimeInterval;
}
/**
* @return Returns the entityExpressionId.
*/
public int getEntityExpressionId()
{
return entityExpressionId;
}
/**
* @param entityExpressionId The entityExpressionId to set.
*/
public void setEntityExpressionId(int entityExpressionId)
{
this.entityExpressionId = entityExpressionId;
}
/**
* @return Returns the attributeById.
*/
public AttributeInterface getAttributeById()
{
return attributeById;
}
/**
* @param attributeById The attributeById to set.
*/
public void setAttributeById(AttributeInterface attributeById)
{
this.attributeById = attributeById;
}
public void createOnlyLHS()
{
lhsTerm = QueryObjectFactory.createTerm();
//Updating lhsTerm
if (lhsDateLiteral == null)
{
//If DatePicker doesn't exists on LHS
lhsTerm.addOperand(lhsDateOffSetLiteral);
addSecondLhsOperand();
}
else
{
//if DatePicker exists on LHS
lhsTerm.addOperand(lhsDateLiteral);
addSecondLhsOperand();
}
}
/**
* Create LHS and RHS terms.
*/
public void createLHSAndRHS()
{
lhsTerm = QueryObjectFactory.createTerm();
rhsTerm = QueryObjectFactory.createTerm();
//Updating lhsTerm
if (lhsDateLiteral == null)
{
//If DatePicker doesn't exists on LHS
lhsTerm.addOperand(lhsDateOffSetLiteral);
addSecondLhsOperand();
}
else
{
//if DatePicker exists on LHS
lhsTerm.addOperand(lhsDateLiteral);
addSecondLhsOperand();
}
//Updating rhsTerm
if (rhsDateLiteral == null)
{
//If No datePicker
rhsTerm.addOperand(rhsDateOffSetLiteral);
}
else
{
//IF DatePicker on RHS
rhsTerm.addOperand(rhsDateLiteral);
}
}
/**
* Add second LHS operand.
*/
private void addSecondLhsOperand()
{
if (attributeIExpression == null)
{
//If attribute selected is Int type
lhsTerm.addOperand(iCon, dateOffsetAttr);
}
else
{
//If attribute selected is of type Date
lhsTerm.addOperand(iCon, attributeIExpression);
}
}
/**
*Create expressions.
*/
public void createExpressions()
{
if (attributeType.equals(AQConstants.DATE_TYPE))
{
//Means Attribute is of Date type , then it is Expression attribute
attributeIExpression = QueryObjectFactory.createExpressionAttribute(entityIExpression,
attributeById,false);
}
else
{
//It will be DateOffSetLiteral
// dateOffsetAttr = QueryObjectFactory.createDateOffsetAttribute(entityIExpression,attributeById,TimeInterval.Day);
if (qAttrInterval == null)
{
dateOffsetAttr = QueryObjectFactory.createDateOffsetAttribute(entityIExpression,
attributeById, TimeInterval.Day);
}
else
{
dateOffsetAttr = QueryObjectFactory.createDateOffsetAttribute(entityIExpression,
attributeById, qAttrInterval);
}
}
}
/**
* Create RHS Date Offset Literal.
* @param rhsTimeInterval rhsTimeInterval
*/
public void createRhsDateOffSetLiteral(String rhsTimeInterval)
{
if ((!rhsTimeInterval.equals(DAGConstant.NULL_STRING)))
{
this.rhsTimeInterval = getTimeInterval(rhsTimeInterval);
rhsDateOffSetLiteral = QueryObjectFactory.createDateOffsetLiteral(this.rhsTimeInterval);
}
}
/**
* Create RHS literals.
* @param rhsTimeValue rhsTimeValue
* @param rhsTimeInterval rhsTimeInterval
*/
public void createRightLiterals(String rhsTimeValue, String rhsTimeInterval)
{
if ((!rhsTimeValue.equals(DAGConstant.NULL_STRING))
&& (!rhsTimeInterval.equals(DAGConstant.NULL_STRING)))
{
//It means there exists TextInput and Time Intervals on LHS, so create dateOffSetLiteral
this.rhsTimeInterval = getTimeInterval(rhsTimeInterval);
rhsDateOffSetLiteral = QueryObjectFactory.createDateOffsetLiteral(rhsTimeValue,
this.rhsTimeInterval);
}
else
{
generateRhsTimeValue(rhsTimeValue, rhsTimeInterval);
}
}
/**
* Generate RHS time value.
* @param rhsTimeValue rhsTimeValue
* @param rhsTimeInterval rhsTimeInterval
*/
private void generateRhsTimeValue(String rhsTimeValue, String rhsTimeInterval)
{
if ((!rhsTimeValue.equals(DAGConstant.NULL_STRING))
&& (rhsTimeInterval.equals(DAGConstant.NULL_STRING)))
{
//It means there exists TextInput and Time Intervals on LHS, so create dateOffSetLiteral
Date date = null;
String pattern = "";
try
{
//Date date = Utility.parseDate(rhsTimeValue, "MM/dd/yyyy HH:MM:SS");
if ("DateTime".equals(attributeType))
{
pattern = "MM/dd/yyyy HH:mm:ss";
}
else
{
pattern = "MM/dd/yyyy";
}
formatter = new SimpleDateFormat(pattern);
date = formatter.parse(rhsTimeValue);
rhsDateLiteral = QueryObjectFactory.createDateLiteral(new java.sql.Date(date
.getTime()));
}
catch (ParseException e)
{
e.printStackTrace();
}
}
}
/**
* Create LHS Literals.
* @param lhsTimeValue lhsTimeValue
* @param lhsTimeInterval lhsTimeInterval
*/
public void createLeftLiterals(String lhsTimeValue, String lhsTimeInterval)
{
if ((!lhsTimeValue.equals(DAGConstant.NULL_STRING))
&& (!lhsTimeInterval.equals(DAGConstant.NULL_STRING)))
{
//It means there exists TextInput and Time Intervals on LHS, so create dateOffSetLiteral
this.lhsTimeInterval = getTimeInterval(lhsTimeInterval);
lhsDateOffSetLiteral = QueryObjectFactory.createDateOffsetLiteral(lhsTimeValue,
this.lhsTimeInterval);
}
else
{
dateFormatter(lhsTimeValue, lhsTimeInterval);
}
}
/**
* Format the date.
* @param lhsTimeValue lhsTimeValue
* @param lhsTimeInterval lhsTimeInterval
*/
private void dateFormatter(String lhsTimeValue, String lhsTimeInterval)
{
if ((!lhsTimeValue.equals(DAGConstant.NULL_STRING))
&& (lhsTimeInterval.equals(DAGConstant.NULL_STRING)))
{
//This is the case when there exists DataPicker on LHS, so create only Date Literal
try
{
Date date = Utility.parseDate(lhsTimeValue, "MM/dd/yyyy");
lhsDateLiteral = QueryObjectFactory.createDateLiteral(new java.sql.Date(date
.getTime()));
}
catch (ParseException e)
{
e.printStackTrace();
}
}
}
/**
* @param timeIntervalValue timeIntervalValue
* @return timeInterval
*/
public TimeInterval getTimeInterval(String timeIntervalValue)
{
TimeInterval timeInterval = null;
for (TimeInterval time : TimeInterval.values())
{
if (timeIntervalValue.equals(time.name() + "s"))
{
timeInterval = time;
break;
}
}
return timeInterval;
}
}
| |
package it.er.dao;
import java.text.DateFormat;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Deque;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import java.util.Locale;
import org.apache.log4j.LogManager;
import org.apache.log4j.Logger;
import org.bson.Document;
import org.springframework.stereotype.Repository;
import com.mongodb.client.FindIterable;
import com.mongodb.client.MongoCursor;
import it.er.basic.BaseNoSql;
import it.er.object.Metatag;
import org.springframework.beans.factory.InitializingBean;
import org.springframework.beans.factory.annotation.Autowired;
@Repository("textContent")
public class TextContentNoSqlDAO extends BaseNoSql implements TextContent,InitializingBean{
private static final int noLimit = -1;
public static int getNolimit() {
return noLimit;
}
private static final Logger log = LogManager.getLogger(TextContentNoSqlDAO.class);
@Autowired
public void setMdbServiceParam(MdbServiceNSParam mdbServiceParam) {
this.mdbServiceParam = mdbServiceParam;
}
@Override
public void afterPropertiesSet() throws Exception {
super.propSet();
}
@Override
public int insert(Text t,String lang, Metatag meta) throws Exception {
DateFormat format = new SimpleDateFormat(BaseNoSql.getDatescarabdbpattern(), Locale.ITALIAN);
int r = 0;
try {
String idText = null;
if (t.getTitle() != null){
idText = t.getTitle().toLowerCase();
idText = idText.replaceAll("([^\\x20^\\x2D^A-z0-9])+","");
idText = idText.replaceAll("(\\x20)+", "-");
}
ArrayList<String> a = new ArrayList<String>();
String collection = getSpecificColllection(BaseNoSql.textCollection, a, lang);
this.getMdbDatabase().getCollection(collection)
.insertOne(new Document().append("body", t.getBody())
.append("archive",t.isArchive())
.append("textIdUser",t.getTextIdUser())
.append("title",t.getTitle())
.append("order", t.getOrder())
.append("idtext",idText)
.append("sitename",t.getSitename())
.append("tagname",t.getTagname()));
List<String> inserted = this.getIdsViewByTitle(t.getTagname(), t.getTitle(), lang);
String relId = "0";
if (inserted == null || inserted.size() == 0)
return 0;
else if (inserted.size() >= 1){
relId = idText;
}
/*
* tmp multi id
* //TO DO
*/
r = ++r + this.insertMetatag(a, collection, lang, meta, relId);
} catch (Exception e){
log.error(e.getMessage(), e);
r = 0;
}
return r;
}
@Override
public int insertSeries(TextSeries s,String colname,String lang, Metatag meta) throws Exception {
int r = 0;
try {
ArrayList<String> a = new ArrayList<String>();
a.add(colname);
String collection = getSpecificColllection(BaseNoSql.textCollection, a, lang);
this.getMdbDatabase().getCollection(collection)
.insertOne(new Document().append("title", s.getTitle())
.append("subtitle", s.getSubtitle()));
Iterator<Text> i = s.getSerie().iterator();
while (i.hasNext()){
Text t = i.next();
String idText = null;
if (t.getTitle() != null){
idText = t.getTitle().toLowerCase();
idText = idText.replaceAll("([^\\x20^\\x2D^A-z0-9])+","");
idText = idText.replaceAll("(\\x20)+", "-");
}
this.getMdbDatabase().getCollection(collection).insertOne(
new Document().append("body", t.getBody())
.append("archive",t.isArchive())
.append("textIdUser",t.getTextIdUser())
.append("title",t.getTitle())
.append("order", t.getOrder())
.append("idtext",idText)
.append("sitename",t.getSitename())
.append("tagname",t.getTagname())
.append("argument", colname));
r++;
List<String> inserted = this.getIdsSeriesViewByTitle(t.getTagname(),colname, t.getTitle(), lang);
String relId = "0";
if (inserted == null || inserted.size() == 0)
return 0;
else if (inserted.size() >= 1){
relId = idText;
}
/*
* tmp multi id
* //TO DO
*/
r = ++r + this.insertMetatag(a, collection, lang, meta,relId);
}
} catch (Exception e){
log.error(e.getMessage(), e);
r = 0;
}
return r;
}
@Override
public int[] insert(List<Text> t,String lang) throws Exception{
// TODO Auto-generated method stub
return null;
}
@Override
public List<String> getIdsView(String tagname,String lang) throws Exception{
String collection = getSpecificColllection(BaseNoSql.textCollection, null, lang);
FindIterable<Document> fid = this.getMdbDatabase().getCollection(collection)
.find(new Document("tagname",tagname));
List<String> l = new ArrayList<String>();
MongoCursor<Document> c = fid.iterator();
while (c.hasNext()){
Document d = c.next();
l.add(d.getString("idtext"));
}
return l;
}
@Override
public Deque<Text> getTextListShort(int limit, int start,String tagname,String argument,String lang) throws Exception{
ArrayList<String> a = new ArrayList<String>();
if (argument != null && !argument.isEmpty())
a.add(argument);
String collection = getSpecificColllection(BaseNoSql.textCollection, a, lang);
FindIterable<Document> fid = null;
if (limit == TextContentNoSqlDAO.getNolimit())
fid = this.getMdbDatabase().getCollection(collection)
.find(new Document("tagname",tagname)).skip(start);
else
fid = this.getMdbDatabase().getCollection(collection)
.find(new Document("tagname",tagname)).limit(limit).skip(start);
MongoCursor<Document> c = fid.iterator();
List<Text> lN = new LinkedList<Text>();
while (c.hasNext()){
Document d = c.next();
Text t = new Text();
t.setArchive(d.containsKey("archive")?d.getBoolean("archive"):null);
t.setBody(d.containsKey("body")?d.getString("body"):null);
t.setCreateDate(d.containsKey("_id")?d.getObjectId("_id").getTimestamp():null);
t.setIdText(d.containsKey("idtext")?d.getString("idtext"):null);
t.setSitename(d.containsKey("sitename")?d.getString("sitename"):null);
t.setTagname(d.containsKey("tagname")?d.getString("tagname"):null);
t.setTextIdUser(d.containsKey("textIdUser")?d.getString("textIdUser"):null);
t.setTitle(d.containsKey("title")?d.getString("title"):null);
t.setOrder(d.containsKey("order")?d.getString("order"):null);
lN.add(t);
}
Deque<Text> dN = new LinkedList<Text>();
Iterator<Text> i = lN.iterator();
while (i.hasNext()){
dN.offer(i.next());
}
return dN;
}
@Override
public List<Text> readTextList(int limit, int start,String tagname,String argument,String lang) throws Exception{
ArrayList<String> a = new ArrayList<String>();
if (argument != null && !argument.isEmpty())
a.add(argument);
String collection = getSpecificColllection(BaseNoSql.textCollection, a, lang);
FindIterable<Document> fid = null;
if (limit == TextContentNoSqlDAO.getNolimit())
fid = this.getMdbDatabase().getCollection(collection)
.find(new Document("tagname",tagname)).skip(start);
else
fid = this.getMdbDatabase().getCollection(collection)
.find(new Document("tagname",tagname)).limit(limit).skip(start);
MongoCursor<Document> c = fid.iterator();
List<Text> lN = new LinkedList<Text>();
while (c.hasNext()){
Document d = c.next();
Text t = new Text();
t.setArchive(d.containsKey("archive")?d.getBoolean("archive"):null);
t.setBody(d.containsKey("body")?d.getString("body"):null);
t.setCreateDate(d.containsKey("_id")?d.getObjectId("_id").getTimestamp():null);
t.setIdText(d.containsKey("idtext")?d.getString("idtext"):null);
t.setSitename(d.containsKey("sitename")?d.getString("sitename"):null);
t.setTagname(d.containsKey("tagname")?d.getString("tagname"):null);
t.setTextIdUser(d.containsKey("textIdUser")?d.getString("textIdUser"):null);
t.setTitle(d.containsKey("title")?d.getString("title"):null);
t.setOrder(d.containsKey("order")?d.getString("order"):null);
lN.add(t);
}
return lN;
}
@Override
public List<Text> readTextSeriesList(int limit, int start,String tagname,String colname,String lang) throws Exception{
ArrayList<String> a = new ArrayList<String>();
if (colname != null && !colname.isEmpty())
a.add(colname);
String collection = getSpecificColllection(BaseNoSql.textCollection, a, lang);
FindIterable<Document> fid = null;
if (limit == TextContentNoSqlDAO.getNolimit())
fid = this.getMdbDatabase().getCollection(collection)
.find(new Document("tagname",tagname).append("argument", colname)).skip(start);
else
fid = this.getMdbDatabase().getCollection(collection)
.find(new Document("tagname",tagname).append("argument", colname)).limit(limit).skip(start);
MongoCursor<Document> c = fid.iterator();
List<Text> lN = new LinkedList<Text>();
while (c.hasNext()){
Document d = c.next();
Text t = new Text();
t.setArchive(d.containsKey("archive")?d.getBoolean("archive"):null);
t.setBody(d.containsKey("body")?d.getString("body"):null);
t.setCreateDate(d.containsKey("_id")?d.getObjectId("_id").getTimestamp():null);
t.setIdText(d.containsKey("idtext")?d.getString("idtext"):null);
t.setSitename(d.containsKey("sitename")?d.getString("sitename"):null);
t.setTagname(d.containsKey("tagname")?d.getString("tagname"):null);
t.setTextIdUser(d.containsKey("textIdUser")?d.getString("textIdUser"):null);
t.setTitle(d.containsKey("title")?d.getString("title"):null);
t.setOrder(d.containsKey("order")?d.getString("order"):null);
lN.add(t);
}
return lN;
}
@Override
public List<String> getIdsSeriesView(String tagname, String colname,
String lang) throws Exception {
// TODO Auto-generated method stub
return null;
}
@Override
public List<String> getIdsViewByTitle(String tagname,String title, String lang)
throws Exception {
String collection = getSpecificColllection(BaseNoSql.textCollection, null, lang);
String idText = null;
if (title != null){
idText = title.toLowerCase();
idText = idText.replaceAll("([^\\x20^\\x2D^A-z0-9])+","");
idText = idText.replaceAll("(\\x20)+", "-");
}
FindIterable<Document> fid = this.getMdbDatabase().getCollection(collection)
.find(new Document("tagname",tagname)
.append("idtext", idText));
List<String> l = new ArrayList<String>();
MongoCursor<Document> c = fid.iterator();
while (c.hasNext()){
Document d = c.next();
l.add(d.getString("idtext"));
}
return l;
}
@Override
public List<String> getIdsSeriesViewByTitle(String tagname, String colname,
String title,String lang) throws Exception {
ArrayList<String> a = new ArrayList<String>();
if (colname != null && !colname.isEmpty())
a.add(colname);
String idText = null;
if (title != null){
idText = title.toLowerCase();
idText = idText.replaceAll("([^\\x20^\\x2D^A-z0-9])+","");
idText = idText.replaceAll("(\\x20)+", "-");
}
String collection = getSpecificColllection(BaseNoSql.textCollection, a, lang);
FindIterable<Document> fid = this.getMdbDatabase().getCollection(collection)
.find(new Document("tagname",tagname)
.append("argument", colname)
.append("idtext", idText));
List<String> l = new ArrayList<String>();
MongoCursor<Document> c = fid.iterator();
while (c.hasNext()){
Document d = c.next();
l.add(d.getString("idtext"));
}
return l;
}
@SuppressWarnings("unchecked")
@Override
public List<Metatag> readMetatagByTitle(String colname,
String title, String lang) throws Exception {
ArrayList<String> a = new ArrayList<String>();
FindIterable<Document> fid = null;
String idText = null;
if (title != null){
idText = title.toLowerCase();
idText = idText.replaceAll("([^\\x20^\\x2D^A-z0-9])+","");
idText = idText.replaceAll("(\\x20)+", "-");
}
if (colname != null && !colname.isEmpty())
a.add(colname);
String collection = getSpecificColllection(BaseNoSql.metatagCollection, a, lang);
fid = this.getMdbDatabase().getCollection(collection)
.find(new Document("relId", idText));
MongoCursor<Document> c = fid.iterator();
List<Metatag> lM = new LinkedList<Metatag>();
while (c.hasNext()){
Document d = c.next();
Metatag m = new Metatag();
m.setTitle(d.containsKey("title")?d.getString("title"):null);
m.setType(d.containsKey("type")?d.getString("type"):null);
m.setURL(d.containsKey("url")?d.getString("url"):null);
m.setId(d.containsKey("relId")?d.getString("relId"):null);
m.setImage(d.containsKey("image")?d.getString("image"):null);
m.setSitename(d.containsKey("sitename")?d.getString("sitename"):null);
m.setAdmin(d.containsKey("admin")?d.getString("admin"):null);
m.setDescription(d.containsKey("description")?d.getString("description"):null);
m.setAuthor(d.containsKey("author")?d.getString("author"):null);
m.setLang(d.containsKey("lang")?d.getString("lang"):null);
if (d.containsKey("what")){
Document k = new Document("what",d.get("what"));
m.setKey((ArrayList<String>) k.get("what", ArrayList.class));
}
lM.add(m);
}
return lM;
}
@Override
public List<Text> readTextList(int limit, int start, String tagname, String argument,
String title, String lang) throws Exception {
ArrayList<String> a = new ArrayList<String>();
if (argument != null && !argument.isEmpty())
a.add(argument);
String idText = null;
if (title != null){
idText = title.toLowerCase();
idText = idText.replaceAll("([^\\x20^\\x2D^A-z0-9])+","");
idText = idText.replaceAll("(\\x20)+", "-");
}
String collection = getSpecificColllection(BaseNoSql.textCollection, a, lang);
FindIterable<Document> fid = null;
if (limit == TextContentNoSqlDAO.getNolimit())
fid = this.getMdbDatabase().getCollection(collection)
.find(new Document("tagname",tagname).append("idtext", idText)).skip(start);
else
fid = this.getMdbDatabase().getCollection(collection)
.find(new Document("tagname",tagname).append("idtext", idText)).limit(limit).skip(start);
MongoCursor<Document> c = fid.iterator();
List<Text> lN = new LinkedList<Text>();
while (c.hasNext()){
Document d = c.next();
Text t = new Text();
t.setArchive(d.containsKey("archive")?d.getBoolean("archive"):null);
t.setBody(d.containsKey("body")?d.getString("body"):null);
t.setCreateDate(d.containsKey("_id")?d.getObjectId("_id").getTimestamp():null);
t.setIdText(d.containsKey("idtext")?d.getString("idtext"):null);
t.setSitename(d.containsKey("sitename")?d.getString("sitename"):null);
t.setTagname(d.containsKey("tagname")?d.getString("tagname"):null);
t.setTextIdUser(d.containsKey("textIdUser")?d.getString("textIdUser"):null);
t.setTitle(d.containsKey("title")?d.getString("title"):null);
t.setOrder(d.containsKey("order")?d.getString("order"):null);
lN.add(t);
}
return lN;
}
}
| |
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.facebook.presto.connector.informationSchema;
import com.facebook.presto.spi.ColumnHandle;
import com.facebook.presto.spi.ColumnMetadata;
import com.facebook.presto.spi.ConnectorMetadata;
import com.facebook.presto.spi.ConnectorTableMetadata;
import com.facebook.presto.spi.SchemaTableName;
import com.facebook.presto.spi.SchemaTablePrefix;
import com.facebook.presto.spi.TableHandle;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import static com.facebook.presto.connector.informationSchema.InformationSchemaColumnHandle.toInformationSchemaColumnHandles;
import static com.facebook.presto.metadata.MetadataUtil.SchemaMetadataBuilder.schemaMetadataBuilder;
import static com.facebook.presto.metadata.MetadataUtil.TableMetadataBuilder.tableMetadataBuilder;
import static com.facebook.presto.metadata.MetadataUtil.findColumnMetadata;
import static com.facebook.presto.metadata.MetadataUtil.schemaNameGetter;
import static com.facebook.presto.spi.ColumnType.LONG;
import static com.facebook.presto.spi.ColumnType.STRING;
import static com.google.common.base.Preconditions.checkArgument;
import static com.google.common.base.Preconditions.checkNotNull;
import static com.google.common.base.Predicates.compose;
import static com.google.common.base.Predicates.equalTo;
import static com.google.common.collect.Iterables.filter;
public class InformationSchemaMetadata
implements ConnectorMetadata
{
public static final String INFORMATION_SCHEMA = "information_schema";
public static final SchemaTableName TABLE_COLUMNS = new SchemaTableName(INFORMATION_SCHEMA, "columns");
public static final SchemaTableName TABLE_TABLES = new SchemaTableName(INFORMATION_SCHEMA, "tables");
public static final SchemaTableName TABLE_SCHEMATA = new SchemaTableName(INFORMATION_SCHEMA, "schemata");
public static final SchemaTableName TABLE_INTERNAL_FUNCTIONS = new SchemaTableName(INFORMATION_SCHEMA, "__internal_functions__");
public static final SchemaTableName TABLE_INTERNAL_PARTITIONS = new SchemaTableName(INFORMATION_SCHEMA, "__internal_partitions__");
public static final Map<SchemaTableName, ConnectorTableMetadata> TABLES = schemaMetadataBuilder()
.table(tableMetadataBuilder(TABLE_COLUMNS)
.column("table_catalog", STRING)
.column("table_schema", STRING)
.column("table_name", STRING)
.column("column_name", STRING)
.column("ordinal_position", LONG)
.column("column_default", STRING)
.column("is_nullable", STRING)
.column("data_type", STRING)
.column("is_partition_key", STRING)
.build())
.table(tableMetadataBuilder(TABLE_TABLES)
.column("table_catalog", STRING)
.column("table_schema", STRING)
.column("table_name", STRING)
.column("table_type", STRING)
.build())
.table(tableMetadataBuilder(TABLE_SCHEMATA)
.column("catalog_name", STRING)
.column("schema_name", STRING)
.build())
.table(tableMetadataBuilder(TABLE_INTERNAL_FUNCTIONS)
.column("function_name", STRING)
.column("argument_types", STRING)
.column("return_type", STRING)
.column("function_type", STRING)
.column("description", STRING)
.build())
.table(tableMetadataBuilder(TABLE_INTERNAL_PARTITIONS)
.column("table_catalog", STRING)
.column("table_schema", STRING)
.column("table_name", STRING)
.column("partition_number", LONG)
.column("partition_key", STRING)
.column("partition_value", STRING)
.build())
.build();
private final String catalogName;
public InformationSchemaMetadata(String catalogName)
{
this.catalogName = catalogName;
}
@Override
public boolean canHandle(TableHandle tableHandle)
{
if (!(tableHandle instanceof InformationSchemaTableHandle)) {
return false;
}
InformationSchemaTableHandle handle = (InformationSchemaTableHandle) tableHandle;
return handle.getCatalogName().equals(catalogName) && TABLES.containsKey(handle.getSchemaTableName());
}
private InformationSchemaTableHandle checkTableHandle(TableHandle tableHandle)
{
checkNotNull(tableHandle, "tableHandle is null");
checkArgument(tableHandle instanceof InformationSchemaTableHandle, "tableHandle is not an information schema table handle");
InformationSchemaTableHandle handle = (InformationSchemaTableHandle) tableHandle;
checkArgument(handle.getCatalogName().equals(catalogName), "invalid table handle: expected catalog %s but got %s", catalogName, handle.getCatalogName());
checkArgument(TABLES.containsKey(handle.getSchemaTableName()), "table %s does not exist", handle.getSchemaTableName());
return handle;
}
@Override
public List<String> listSchemaNames()
{
return ImmutableList.of(INFORMATION_SCHEMA);
}
@Override
public TableHandle getTableHandle(SchemaTableName tableName)
{
if (!TABLES.containsKey(tableName)) {
return null;
}
return new InformationSchemaTableHandle(catalogName, tableName.getSchemaName(), tableName.getTableName());
}
@Override
public ConnectorTableMetadata getTableMetadata(TableHandle tableHandle)
{
InformationSchemaTableHandle informationSchemaTableHandle = checkTableHandle(tableHandle);
return TABLES.get(informationSchemaTableHandle.getSchemaTableName());
}
@Override
public List<SchemaTableName> listTables(final String schemaNameOrNull)
{
if (schemaNameOrNull == null) {
return ImmutableList.copyOf(TABLES.keySet());
}
return ImmutableList.copyOf(filter(TABLES.keySet(), compose(equalTo(schemaNameOrNull), schemaNameGetter())));
}
@Override
public ColumnHandle getColumnHandle(TableHandle tableHandle, String columnName)
{
InformationSchemaTableHandle informationSchemaTableHandle = checkTableHandle(tableHandle);
ConnectorTableMetadata tableMetadata = TABLES.get(informationSchemaTableHandle.getSchemaTableName());
if (findColumnMetadata(tableMetadata, columnName) == null) {
return null;
}
return new InformationSchemaColumnHandle(columnName);
}
@Override
public ColumnMetadata getColumnMetadata(TableHandle tableHandle, ColumnHandle columnHandle)
{
InformationSchemaTableHandle informationSchemaTableHandle = checkTableHandle(tableHandle);
ConnectorTableMetadata tableMetadata = TABLES.get(informationSchemaTableHandle.getSchemaTableName());
checkArgument(columnHandle instanceof InformationSchemaColumnHandle, "columnHandle is not an instance of InformationSchemaColumnHandle");
String columnName = ((InformationSchemaColumnHandle) columnHandle).getColumnName();
ColumnMetadata columnMetadata = findColumnMetadata(tableMetadata, columnName);
checkArgument(columnMetadata != null, "Column %s on table %s does not exist", columnName, tableMetadata.getTable());
return columnMetadata;
}
@Override
public Map<String, ColumnHandle> getColumnHandles(TableHandle tableHandle)
{
InformationSchemaTableHandle informationSchemaTableHandle = checkTableHandle(tableHandle);
ConnectorTableMetadata tableMetadata = TABLES.get(informationSchemaTableHandle.getSchemaTableName());
return toInformationSchemaColumnHandles(tableMetadata);
}
@Override
public Map<SchemaTableName, List<ColumnMetadata>> listTableColumns(SchemaTablePrefix prefix)
{
checkNotNull(prefix, "prefix is null");
ImmutableMap.Builder<SchemaTableName, List<ColumnMetadata>> builder = ImmutableMap.builder();
for (Entry<SchemaTableName, ConnectorTableMetadata> entry : TABLES.entrySet()) {
if (prefix.matches(entry.getKey())) {
builder.put(entry.getKey(), entry.getValue().getColumns());
}
}
return builder.build();
}
@Override
public TableHandle createTable(ConnectorTableMetadata tableMetadata)
{
throw new UnsupportedOperationException();
}
@Override
public void dropTable(TableHandle tableHandle)
{
throw new UnsupportedOperationException();
}
static List<ColumnMetadata> informationSchemaTableColumns(SchemaTableName tableName)
{
checkArgument(TABLES.containsKey(tableName), "table does not exist: %s", tableName);
return TABLES.get(tableName).getColumns();
}
}
| |
// Copyright 2000-2020 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.intellij.vcs.log.data;
import com.intellij.openapi.Disposable;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.progress.ProcessCanceledException;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.util.Disposer;
import com.intellij.openapi.vfs.VirtualFile;
import com.intellij.util.CommonProcessors;
import com.intellij.util.Function;
import com.intellij.util.io.*;
import com.intellij.vcs.log.*;
import com.intellij.vcs.log.impl.FatalErrorHandler;
import com.intellij.vcs.log.impl.HashImpl;
import com.intellij.vcs.log.impl.VcsRefImpl;
import com.intellij.vcs.log.util.PersistentUtil;
import com.intellij.vcs.log.util.StorageId;
import it.unimi.dsi.fastutil.objects.Object2IntMap;
import it.unimi.dsi.fastutil.objects.Object2IntOpenHashMap;
import one.util.streamex.StreamEx;
import org.jetbrains.annotations.NonNls;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.io.DataInput;
import java.io.DataOutput;
import java.io.IOException;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.function.Predicate;
/**
* Supports the int <-> Hash and int <-> VcsRef persistent mappings.
*/
public final class VcsLogStorageImpl implements Disposable, VcsLogStorage {
@NotNull private static final Logger LOG = Logger.getInstance(VcsLogStorage.class);
@NotNull @NonNls private static final String HASHES_STORAGE = "hashes";
@NotNull @NonNls private static final String REFS_STORAGE = "refs";
@NotNull @NonNls private static final String STORAGE = "storage";
@NotNull public static final VcsLogStorage EMPTY = new EmptyLogStorage();
public static final int VERSION = 8;
public static final int NO_INDEX = -1;
private static final int REFS_VERSION = 2;
@NotNull private final MyPersistentBTreeEnumerator myCommitIdEnumerator;
@NotNull private final PersistentEnumerator<VcsRef> myRefsEnumerator;
@NotNull private final FatalErrorHandler myExceptionReporter;
private volatile boolean myDisposed = false;
public VcsLogStorageImpl(@NotNull Project project,
@NotNull Map<VirtualFile, VcsLogProvider> logProviders,
@NotNull FatalErrorHandler exceptionReporter,
@NotNull Disposable parent) throws IOException {
myExceptionReporter = exceptionReporter;
List<VirtualFile> roots = StreamEx.ofKeys(logProviders).sortedBy(VirtualFile::getPath).toList();
String logId = PersistentUtil.calcLogId(project, logProviders);
MyCommitIdKeyDescriptor commitIdKeyDescriptor = new MyCommitIdKeyDescriptor(roots);
StorageId hashesStorageId = new StorageId(project.getName(), HASHES_STORAGE, logId, VERSION);
StorageLockContext storageLockContext = new StorageLockContext();
myCommitIdEnumerator = IOUtil.openCleanOrResetBroken(() -> new MyPersistentBTreeEnumerator(hashesStorageId, commitIdKeyDescriptor,
storageLockContext),
hashesStorageId.getStorageFile(STORAGE).toFile());
VcsRefKeyDescriptor refsKeyDescriptor = new VcsRefKeyDescriptor(logProviders, commitIdKeyDescriptor);
StorageId refsStorageId = new StorageId(project.getName(), REFS_STORAGE, logId, VERSION + REFS_VERSION);
myRefsEnumerator = IOUtil.openCleanOrResetBroken(() -> new PersistentEnumerator<>(refsStorageId.getStorageFile(STORAGE),
refsKeyDescriptor, Page.PAGE_SIZE,
storageLockContext, refsStorageId.getVersion()),
refsStorageId.getStorageFile(STORAGE).toFile());
Disposer.register(parent, this);
}
@NotNull
public static Function<Integer, Hash> createHashGetter(@NotNull VcsLogStorage storage) {
return commitIndex -> {
CommitId commitId = storage.getCommitId(commitIndex);
if (commitId == null) return null;
return commitId.getHash();
};
}
@Nullable
private CommitId doGetCommitId(int index) throws IOException {
return myCommitIdEnumerator.valueOf(index);
}
private int getOrPut(@NotNull Hash hash, @NotNull VirtualFile root) throws IOException {
return myCommitIdEnumerator.enumerate(new CommitId(hash, root));
}
@Override
public int getCommitIndex(@NotNull Hash hash, @NotNull VirtualFile root) {
checkDisposed();
try {
return getOrPut(hash, root);
}
catch (IOException e) {
myExceptionReporter.consume(this, e);
}
return NO_INDEX;
}
@Override
@Nullable
public CommitId getCommitId(int commitIndex) {
checkDisposed();
try {
CommitId commitId = doGetCommitId(commitIndex);
if (commitId == null) {
myExceptionReporter.consume(this, new RuntimeException("Unknown commit index: " + commitIndex));
}
return commitId;
}
catch (IOException e) {
myExceptionReporter.consume(this, e);
}
return null;
}
@Override
public boolean containsCommit(@NotNull CommitId id) {
checkDisposed();
try {
return myCommitIdEnumerator.contains(id);
}
catch (IOException e) {
myExceptionReporter.consume(this, e);
}
return false;
}
@Override
public void iterateCommits(@NotNull Predicate<? super CommitId> consumer) {
checkDisposed();
try {
myCommitIdEnumerator.iterateData(new CommonProcessors.FindProcessor<>() {
@Override
protected boolean accept(CommitId commitId) {
return !consumer.test(commitId);
}
});
}
catch (IOException e) {
myExceptionReporter.consume(this, e);
}
}
@Override
public int getRefIndex(@NotNull VcsRef ref) {
checkDisposed();
try {
return myRefsEnumerator.enumerate(ref);
}
catch (IOException e) {
myExceptionReporter.consume(this, e);
}
return NO_INDEX;
}
@Nullable
@Override
public VcsRef getVcsRef(int refIndex) {
checkDisposed();
try {
return myRefsEnumerator.valueOf(refIndex);
}
catch (IOException e) {
myExceptionReporter.consume(this, e);
return null;
}
}
@Override
public void flush() {
checkDisposed();
myCommitIdEnumerator.force();
myRefsEnumerator.force();
}
@Override
public void dispose() {
try {
myDisposed = true;
myCommitIdEnumerator.close();
myRefsEnumerator.close();
}
catch (IOException e) {
LOG.warn(e);
}
}
private void checkDisposed() {
if (myDisposed) throw new ProcessCanceledException();
}
private static class MyCommitIdKeyDescriptor implements KeyDescriptor<CommitId> {
@NotNull private final List<? extends VirtualFile> myRoots;
@NotNull private final Object2IntMap<VirtualFile> myRootsReversed;
MyCommitIdKeyDescriptor(@NotNull List<? extends VirtualFile> roots) {
myRoots = roots;
myRootsReversed = new Object2IntOpenHashMap<>();
for (int i = 0; i < roots.size(); i++) {
myRootsReversed.put(roots.get(i), i);
}
}
@Override
public void save(@NotNull DataOutput out, CommitId value) throws IOException {
((HashImpl)value.getHash()).write(out);
out.writeInt(myRootsReversed.getInt(value.getRoot()));
}
@Override
public CommitId read(@NotNull DataInput in) throws IOException {
Hash hash = HashImpl.read(in);
VirtualFile root = myRoots.get(in.readInt());
if (root == null) return null;
return new CommitId(hash, root);
}
@Override
public int getHashCode(CommitId value) {
return value.hashCode();
}
@Override
public boolean isEqual(CommitId val1, CommitId val2) {
return Objects.equals(val1, val2);
}
}
private static class EmptyLogStorage implements VcsLogStorage {
@Override
public int getCommitIndex(@NotNull Hash hash, @NotNull VirtualFile root) {
return 0;
}
@NotNull
@Override
public CommitId getCommitId(int commitIndex) {
throw new UnsupportedOperationException("Illegal access to empty hash map by index " + commitIndex);
}
@Override
public boolean containsCommit(@NotNull CommitId id) {
return false;
}
@Override
public void iterateCommits(@NotNull Predicate<? super CommitId> consumer) {
}
@Override
public int getRefIndex(@NotNull VcsRef ref) {
return 0;
}
@Nullable
@Override
public VcsRef getVcsRef(int refIndex) {
throw new UnsupportedOperationException("Illegal access to empty ref map by index " + refIndex);
}
@Override
public void flush() {
}
}
private static class VcsRefKeyDescriptor implements KeyDescriptor<VcsRef> {
@NotNull private final Map<VirtualFile, VcsLogProvider> myLogProviders;
@NotNull private final KeyDescriptor<CommitId> myCommitIdKeyDescriptor;
VcsRefKeyDescriptor(@NotNull Map<VirtualFile, VcsLogProvider> logProviders,
@NotNull KeyDescriptor<CommitId> commitIdKeyDescriptor) {
myLogProviders = logProviders;
myCommitIdKeyDescriptor = commitIdKeyDescriptor;
}
@Override
public int getHashCode(@NotNull VcsRef value) {
return value.hashCode();
}
@Override
public boolean isEqual(@NotNull VcsRef val1, @NotNull VcsRef val2) {
return val1.equals(val2);
}
@Override
public void save(@NotNull DataOutput out, @NotNull VcsRef value) throws IOException {
myCommitIdKeyDescriptor.save(out, new CommitId(value.getCommitHash(), value.getRoot()));
IOUtil.writeUTF(out, value.getName());
myLogProviders.get(value.getRoot()).getReferenceManager().serialize(out, value.getType());
}
@Override
public VcsRef read(@NotNull DataInput in) throws IOException {
CommitId commitId = myCommitIdKeyDescriptor.read(in);
if (commitId == null) throw new IOException("Can not read commit id for reference");
String name = IOUtil.readUTF(in);
VcsRefType type = myLogProviders.get(commitId.getRoot()).getReferenceManager().deserialize(in);
return new VcsRefImpl(commitId.getHash(), name, type, commitId.getRoot());
}
}
private static final class MyPersistentBTreeEnumerator extends PersistentBTreeEnumerator<CommitId> {
MyPersistentBTreeEnumerator(@NotNull StorageId storageId, @NotNull KeyDescriptor<CommitId> commitIdKeyDescriptor,
@Nullable StorageLockContext storageLockContext) throws IOException {
super(storageId.getStorageFile(STORAGE), commitIdKeyDescriptor, Page.PAGE_SIZE, storageLockContext,
storageId.getVersion());
}
public boolean contains(@NotNull CommitId id) throws IOException {
return tryEnumerate(id) != NULL_ID;
}
}
}
| |
/*
* JBoss, Home of Professional Open Source.
* Copyright 2014 Red Hat, Inc., and individual contributors
* as indicated by the @author tags.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.undertow.server.handlers.resource;
import java.io.File;
import java.io.IOException;
import java.nio.file.Paths;
import java.util.Arrays;
import java.util.Collections;
import java.util.Date;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.CopyOnWriteArrayList;
import java.util.concurrent.TimeUnit;
import io.undertow.UndertowLogger;
import io.undertow.io.IoCallback;
import io.undertow.predicate.Predicate;
import io.undertow.predicate.Predicates;
import io.undertow.server.HandlerWrapper;
import io.undertow.server.HttpHandler;
import io.undertow.server.HttpServerExchange;
import io.undertow.server.handlers.ResponseCodeHandler;
import io.undertow.server.handlers.builder.HandlerBuilder;
import io.undertow.server.handlers.cache.ResponseCache;
import io.undertow.server.handlers.encoding.ContentEncodedResource;
import io.undertow.server.handlers.encoding.ContentEncodedResourceManager;
import io.undertow.util.ByteRange;
import io.undertow.util.CanonicalPathUtils;
import io.undertow.util.DateUtils;
import io.undertow.util.ETag;
import io.undertow.util.ETagUtils;
import io.undertow.util.Headers;
import io.undertow.util.HttpString;
import io.undertow.util.Methods;
import io.undertow.util.MimeMappings;
import io.undertow.util.RedirectBuilder;
import io.undertow.util.StatusCodes;
/**
* @author Stuart Douglas
*/
public class ResourceHandler implements HttpHandler {
/**
* Set of methods prescribed by HTTP 1.1. If request method is not one of those, handler will
* return NOT_IMPLEMENTED.
*/
private static final Set<HttpString> KNOWN_METHODS = new HashSet<>();
static {
KNOWN_METHODS.add(Methods.OPTIONS);
KNOWN_METHODS.add(Methods.GET);
KNOWN_METHODS.add(Methods.HEAD);
KNOWN_METHODS.add(Methods.POST);
KNOWN_METHODS.add(Methods.PUT);
KNOWN_METHODS.add(Methods.DELETE);
KNOWN_METHODS.add(Methods.TRACE);
KNOWN_METHODS.add(Methods.CONNECT);
}
private final List<String> welcomeFiles = new CopyOnWriteArrayList<>(new String[]{"index.html", "index.htm", "default.html", "default.htm"});
/**
* If directory listing is enabled.
*/
private volatile boolean directoryListingEnabled = false;
/**
* If the canonical version of paths should be passed into the resource manager.
*/
private volatile boolean canonicalizePaths = true;
/**
* The mime mappings that are used to determine the content type.
*/
private volatile MimeMappings mimeMappings = MimeMappings.DEFAULT;
private volatile Predicate cachable = Predicates.truePredicate();
private volatile Predicate allowed = Predicates.truePredicate();
private volatile ResourceSupplier resourceSupplier;
private volatile ResourceManager resourceManager;
/**
* If this is set this will be the maximum time (in seconds) the client will cache the resource.
* <p>
* Note: Do not set this for private resources, as it will cause a Cache-Control: public
* to be sent.
* <p>
* TODO: make this more flexible
* <p>
* This will only be used if the {@link #cachable} predicate returns true
*/
private volatile Integer cacheTime;
private volatile ContentEncodedResourceManager contentEncodedResourceManager;
/**
* Handler that is called if no resource is found
*/
private final HttpHandler next;
public ResourceHandler(ResourceManager resourceSupplier) {
this(resourceSupplier, ResponseCodeHandler.HANDLE_404);
}
public ResourceHandler(ResourceManager resourceManager, HttpHandler next) {
this.resourceSupplier = new DefaultResourceSupplier(resourceManager);
this.resourceManager = resourceManager;
this.next = next;
}
public ResourceHandler(ResourceSupplier resourceSupplier) {
this(resourceSupplier, ResponseCodeHandler.HANDLE_404);
}
public ResourceHandler(ResourceSupplier resourceManager, HttpHandler next) {
this.resourceSupplier = resourceManager;
this.next = next;
}
/**
* You should use {@link ResourceHandler(ResourceManager)} instead.
*/
@Deprecated
public ResourceHandler() {
this.next = ResponseCodeHandler.HANDLE_404;
}
@Override
public void handleRequest(final HttpServerExchange exchange) throws Exception {
if (exchange.getRequestMethod().equals(Methods.GET) ||
exchange.getRequestMethod().equals(Methods.POST)) {
serveResource(exchange, true);
} else if (exchange.getRequestMethod().equals(Methods.HEAD)) {
serveResource(exchange, false);
} else {
if (KNOWN_METHODS.contains(exchange.getRequestMethod())) {
exchange.setStatusCode(StatusCodes.METHOD_NOT_ALLOWED);
exchange.getResponseHeaders().add(Headers.ALLOW,
String.join(", ", Methods.GET_STRING, Methods.HEAD_STRING, Methods.POST_STRING));
} else {
exchange.setStatusCode(StatusCodes.NOT_IMPLEMENTED);
}
exchange.endExchange();
}
}
private void serveResource(final HttpServerExchange exchange, final boolean sendContent) throws Exception {
if (DirectoryUtils.sendRequestedBlobs(exchange)) {
return;
}
if (!allowed.resolve(exchange)) {
exchange.setStatusCode(StatusCodes.FORBIDDEN);
exchange.endExchange();
return;
}
ResponseCache cache = exchange.getAttachment(ResponseCache.ATTACHMENT_KEY);
final boolean cachable = this.cachable.resolve(exchange);
//we set caching headers before we try and serve from the cache
if (cachable && cacheTime != null) {
exchange.getResponseHeaders().put(Headers.CACHE_CONTROL, "public, max-age=" + cacheTime);
long date = System.currentTimeMillis() + TimeUnit.SECONDS.toMillis(cacheTime);
String dateHeader = DateUtils.toDateString(new Date(date));
exchange.getResponseHeaders().put(Headers.EXPIRES, dateHeader);
}
if (cache != null && cachable) {
if (cache.tryServeResponse()) {
return;
}
}
//we now dispatch to a worker thread
//as resource manager methods are potentially blocking
HttpHandler dispatchTask = new HttpHandler() {
@Override
public void handleRequest(HttpServerExchange exchange) throws Exception {
Resource resource = null;
try {
if (File.separatorChar == '/' || !exchange.getRelativePath().contains(File.separator)) {
//we don't process resources that contain the sperator character if this is not /
//this prevents attacks where people use windows path seperators in file URLS's
resource = resourceSupplier.getResource(exchange, canonicalize(exchange.getRelativePath()));
}
} catch (IOException e) {
clearCacheHeaders(exchange);
UndertowLogger.REQUEST_IO_LOGGER.ioException(e);
exchange.setStatusCode(StatusCodes.INTERNAL_SERVER_ERROR);
exchange.endExchange();
return;
}
if (resource == null) {
clearCacheHeaders(exchange);
//usually a 404 handler
next.handleRequest(exchange);
return;
}
if (resource.isDirectory()) {
Resource indexResource;
try {
indexResource = getIndexFiles(exchange, resourceSupplier, resource.getPath(), welcomeFiles);
} catch (IOException e) {
UndertowLogger.REQUEST_IO_LOGGER.ioException(e);
exchange.setStatusCode(StatusCodes.INTERNAL_SERVER_ERROR);
exchange.endExchange();
return;
}
if (indexResource == null) {
if (directoryListingEnabled) {
DirectoryUtils.renderDirectoryListing(exchange, resource);
return;
} else {
exchange.setStatusCode(StatusCodes.FORBIDDEN);
exchange.endExchange();
return;
}
} else if (!exchange.getRequestPath().endsWith("/")) {
exchange.setStatusCode(StatusCodes.FOUND);
exchange.getResponseHeaders().put(Headers.LOCATION, RedirectBuilder.redirect(exchange, exchange.getRelativePath() + "/", true));
exchange.endExchange();
return;
}
resource = indexResource;
} else if(exchange.getRelativePath().endsWith("/")) {
//UNDERTOW-432
exchange.setStatusCode(StatusCodes.NOT_FOUND);
exchange.endExchange();
return;
}
final ETag etag = resource.getETag();
final Date lastModified = resource.getLastModified();
if (!ETagUtils.handleIfMatch(exchange, etag, false) ||
!DateUtils.handleIfUnmodifiedSince(exchange, lastModified)) {
exchange.setStatusCode(StatusCodes.PRECONDITION_FAILED);
exchange.endExchange();
return;
}
if (!ETagUtils.handleIfNoneMatch(exchange, etag, true) ||
!DateUtils.handleIfModifiedSince(exchange, lastModified)) {
exchange.setStatusCode(StatusCodes.NOT_MODIFIED);
exchange.endExchange();
return;
}
final ContentEncodedResourceManager contentEncodedResourceManager = ResourceHandler.this.contentEncodedResourceManager;
Long contentLength = resource.getContentLength();
if (contentLength != null && !exchange.getResponseHeaders().contains(Headers.TRANSFER_ENCODING)) {
exchange.setResponseContentLength(contentLength);
}
ByteRange.RangeResponseResult rangeResponse = null;
long start = -1, end = -1;
if(resource instanceof RangeAwareResource && ((RangeAwareResource)resource).isRangeSupported() && contentLength != null && contentEncodedResourceManager == null) {
exchange.getResponseHeaders().put(Headers.ACCEPT_RANGES, "bytes");
//TODO: figure out what to do with the content encoded resource manager
ByteRange range = ByteRange.parse(exchange.getRequestHeaders().getFirst(Headers.RANGE));
if(range != null && range.getRanges() == 1 && resource.getContentLength() != null) {
rangeResponse = range.getResponseResult(resource.getContentLength(), exchange.getRequestHeaders().getFirst(Headers.IF_RANGE), resource.getLastModified(), resource.getETag() == null ? null : resource.getETag().getTag());
if(rangeResponse != null){
start = rangeResponse.getStart();
end = rangeResponse.getEnd();
exchange.setStatusCode(rangeResponse.getStatusCode());
exchange.getResponseHeaders().put(Headers.CONTENT_RANGE, rangeResponse.getContentRange());
long length = rangeResponse.getContentLength();
exchange.setResponseContentLength(length);
if(rangeResponse.getStatusCode() == StatusCodes.REQUEST_RANGE_NOT_SATISFIABLE) {
return;
}
}
}
}
//we are going to proceed. Set the appropriate headers
if (!exchange.getResponseHeaders().contains(Headers.CONTENT_TYPE)) {
final String contentType = resource.getContentType(mimeMappings);
if (contentType != null) {
exchange.getResponseHeaders().put(Headers.CONTENT_TYPE, contentType);
} else {
exchange.getResponseHeaders().put(Headers.CONTENT_TYPE, "application/octet-stream");
}
}
if (lastModified != null) {
exchange.getResponseHeaders().put(Headers.LAST_MODIFIED, resource.getLastModifiedString());
}
if (etag != null) {
exchange.getResponseHeaders().put(Headers.ETAG, etag.toString());
}
if (contentEncodedResourceManager != null) {
try {
ContentEncodedResource encoded = contentEncodedResourceManager.getResource(resource, exchange);
if (encoded != null) {
exchange.getResponseHeaders().put(Headers.CONTENT_ENCODING, encoded.getContentEncoding());
exchange.getResponseHeaders().put(Headers.CONTENT_LENGTH, encoded.getResource().getContentLength());
encoded.getResource().serve(exchange.getResponseSender(), exchange, IoCallback.END_EXCHANGE);
return;
}
} catch (IOException e) {
//TODO: should this be fatal
UndertowLogger.REQUEST_IO_LOGGER.ioException(e);
exchange.setStatusCode(StatusCodes.INTERNAL_SERVER_ERROR);
exchange.endExchange();
return;
}
}
if (!sendContent) {
exchange.endExchange();
} else if(rangeResponse != null) {
((RangeAwareResource)resource).serveRange(exchange.getResponseSender(), exchange, start, end, IoCallback.END_EXCHANGE);
} else {
resource.serve(exchange.getResponseSender(), exchange, IoCallback.END_EXCHANGE);
}
}
};
if(exchange.isInIoThread()) {
exchange.dispatch(dispatchTask);
} else {
dispatchTask.handleRequest(exchange);
}
}
private void clearCacheHeaders(HttpServerExchange exchange) {
exchange.getResponseHeaders().remove(Headers.CACHE_CONTROL);
exchange.getResponseHeaders().remove(Headers.EXPIRES);
}
private Resource getIndexFiles(HttpServerExchange exchange, ResourceSupplier resourceManager, final String base, List<String> possible) throws IOException {
String realBase;
if (base.endsWith("/")) {
realBase = base;
} else {
realBase = base + "/";
}
for (String possibility : possible) {
Resource index = resourceManager.getResource(exchange, canonicalize(realBase + possibility));
if (index != null) {
return index;
}
}
return null;
}
private String canonicalize(String s) {
if(canonicalizePaths) {
return CanonicalPathUtils.canonicalize(s);
}
return s;
}
public boolean isDirectoryListingEnabled() {
return directoryListingEnabled;
}
public ResourceHandler setDirectoryListingEnabled(final boolean directoryListingEnabled) {
this.directoryListingEnabled = directoryListingEnabled;
return this;
}
public ResourceHandler addWelcomeFiles(String... files) {
this.welcomeFiles.addAll(Arrays.asList(files));
return this;
}
public ResourceHandler setWelcomeFiles(String... files) {
this.welcomeFiles.clear();
this.welcomeFiles.addAll(Arrays.asList(files));
return this;
}
public MimeMappings getMimeMappings() {
return mimeMappings;
}
public ResourceHandler setMimeMappings(final MimeMappings mimeMappings) {
this.mimeMappings = mimeMappings;
return this;
}
public Predicate getCachable() {
return cachable;
}
public ResourceHandler setCachable(final Predicate cachable) {
this.cachable = cachable;
return this;
}
public Predicate getAllowed() {
return allowed;
}
public ResourceHandler setAllowed(final Predicate allowed) {
this.allowed = allowed;
return this;
}
public ResourceSupplier getResourceSupplier() {
return resourceSupplier;
}
public ResourceHandler setResourceSupplier(final ResourceSupplier resourceSupplier) {
this.resourceSupplier = resourceSupplier;
this.resourceManager = null;
return this;
}
public ResourceManager getResourceManager() {
return resourceManager;
}
public ResourceHandler setResourceManager(final ResourceManager resourceManager) {
this.resourceManager = resourceManager;
this.resourceSupplier = new DefaultResourceSupplier(resourceManager);
return this;
}
public Integer getCacheTime() {
return cacheTime;
}
public ResourceHandler setCacheTime(final Integer cacheTime) {
this.cacheTime = cacheTime;
return this;
}
public ContentEncodedResourceManager getContentEncodedResourceManager() {
return contentEncodedResourceManager;
}
public ResourceHandler setContentEncodedResourceManager(ContentEncodedResourceManager contentEncodedResourceManager) {
this.contentEncodedResourceManager = contentEncodedResourceManager;
return this;
}
public boolean isCanonicalizePaths() {
return canonicalizePaths;
}
/**
* If this handler should use canonicalized paths.
*
* WARNING: If this is not true and {@link io.undertow.server.handlers.CanonicalPathHandler} is not installed in
* the handler chain then is may be possible to perform a directory traversal attack. If you set this to false make
* sure you have some kind of check in place to control the path.
* @param canonicalizePaths If paths should be canonicalized
*/
public void setCanonicalizePaths(boolean canonicalizePaths) {
this.canonicalizePaths = canonicalizePaths;
}
public static class Builder implements HandlerBuilder {
@Override
public String name() {
return "resource";
}
@Override
public Map<String, Class<?>> parameters() {
Map<String, Class<?>> params = new HashMap<>();
params.put("location", String.class);
params.put("allow-listing", boolean.class);
return params;
}
@Override
public Set<String> requiredParameters() {
return Collections.singleton("location");
}
@Override
public String defaultParameter() {
return "location";
}
@Override
public HandlerWrapper build(Map<String, Object> config) {
return new Wrapper((String)config.get("location"), (Boolean) config.get("allow-listing"));
}
}
private static class Wrapper implements HandlerWrapper {
private final String location;
private final boolean allowDirectoryListing;
private Wrapper(String location, boolean allowDirectoryListing) {
this.location = location;
this.allowDirectoryListing = allowDirectoryListing;
}
@Override
public HttpHandler wrap(HttpHandler handler) {
ResourceManager rm = new PathResourceManager(Paths.get(location), 1024);
ResourceHandler resourceHandler = new ResourceHandler(rm);
resourceHandler.setDirectoryListingEnabled(allowDirectoryListing);
return resourceHandler;
}
}
}
| |
/*
* BasicPlayer.
*
* JavaZOOM : jlgui@javazoom.net
* http://www.javazoom.net
*
*-----------------------------------------------------------------------
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU Library General Public License as published
* by the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this program; if not, write to the Free Software
* Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
*----------------------------------------------------------------------
*/
package javazoom.jlgui.basicplayer;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.net.URL;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import javax.sound.sampled.AudioFileFormat;
import javax.sound.sampled.AudioFormat;
import javax.sound.sampled.AudioInputStream;
import javax.sound.sampled.AudioSystem;
import javax.sound.sampled.DataLine;
import javax.sound.sampled.FloatControl;
import javax.sound.sampled.Line;
import javax.sound.sampled.LineUnavailableException;
import javax.sound.sampled.Mixer;
import javax.sound.sampled.SourceDataLine;
import javax.sound.sampled.UnsupportedAudioFileException;
import javazoom.spi.PropertiesContainer;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.tritonus.share.sampled.TAudioFormat;
import org.tritonus.share.sampled.file.TAudioFileFormat;
/**
* BasicPlayer is a threaded simple player class based on JavaSound API.
* It has been successfully tested under J2SE 1.3.x, 1.4.x and 1.5.x.
*/
@SuppressWarnings(value={"deprecation","unchecked"})
public class BasicPlayer implements BasicController, Runnable
{
public static int EXTERNAL_BUFFER_SIZE = 4000 * 4;
public static int SKIP_INACCURACY_SIZE = 1200;
protected Thread m_thread = null;
protected Object m_dataSource;
protected AudioInputStream m_encodedaudioInputStream;
protected int encodedLength = -1;
protected AudioInputStream m_audioInputStream;
protected AudioFileFormat m_audioFileFormat;
protected SourceDataLine m_line;
protected FloatControl m_gainControl;
protected FloatControl m_panControl;
protected String m_mixerName = null;
private int m_lineCurrentBufferSize = -1;
private int lineBufferSize = -1;
private long threadSleep = -1;
protected boolean mute;
protected boolean windows;
private static Log log = LogFactory.getLog(BasicPlayer.class);
private ArrayList<SourceDataLine> lineas;
/**
* These variables are used to distinguish stopped, paused, playing states.
* We need them to control Thread.
*/
public static final int UNKNOWN = -1;
public static final int PLAYING = 0;
public static final int PAUSED = 1;
public static final int STOPPED = 2;
public static final int OPENED = 3;
public static final int SEEKING = 4;
protected int m_status = UNKNOWN;
// Listeners to be notified.
private Collection m_listeners = null;
private Map empty_map = new HashMap();
/**
* Constructs a Basic Player.
*/
public BasicPlayer()
{
lineas = new ArrayList<SourceDataLine>();
String osName = System.getProperty("os.name");
if(osName.startsWith("Windows")) {
windows = true;
}else {
windows = false;
}
windows = false;
mute = false;
m_dataSource = null;
m_listeners = new ArrayList();
reset();
}
protected void reset(){
reset(false);
}
protected void reset(boolean seeking)
{
if(!seeking) {
m_status = UNKNOWN;
}
if (m_audioInputStream != null)
{
synchronized (m_audioInputStream)
{
closeStream();
}
}
m_audioInputStream = null;
m_audioFileFormat = null;
m_encodedaudioInputStream = null;
encodedLength = -1;
if(seeking) {
synchronized (m_line) {
if(m_line != null){
m_line.stop();
if(windows) {
//En mac peta
m_line.close();
}
lineas.add(m_line);
m_line = null;
}
}
}
if (m_line != null) {
synchronized (m_line) {
m_line.stop();
if(windows) {
//En mac peta
m_line.close();
}
lineas.add(m_line);
m_line = null;
}
}
m_gainControl = null;
m_panControl = null;
}
/**
* Add listener to be notified.
* @param bpl
*/
public void addBasicPlayerListener(BasicPlayerListener bpl)
{
m_listeners.add(bpl);
}
/**
* Return registered listeners.
* @return
*/
public Collection getListeners()
{
return m_listeners;
}
/**
* Remove registered listener.
* @param bpl
*/
public void removeBasicPlayerListener(BasicPlayerListener bpl)
{
if (m_listeners != null)
{
m_listeners.remove(bpl);
}
}
/**
* Set SourceDataLine buffer size. It affects audio latency.
* (the delay between line.write(data) and real sound).
* Minimum value should be over 10000 bytes.
* @param size -1 means maximum buffer size available.
*/
public void setLineBufferSize(int size)
{
lineBufferSize = size;
}
/**
* Return SourceDataLine buffer size.
* @return -1 maximum buffer size.
*/
public int getLineBufferSize()
{
return lineBufferSize;
}
/**
* Return SourceDataLine current buffer size.
* @return
*/
public int getLineCurrentBufferSize()
{
return m_lineCurrentBufferSize;
}
/**
* Set thread sleep time.
* Default is -1 (no sleep time).
* @param time in milliseconds.
*/
public void setSleepTime(long time)
{
threadSleep = time;
}
/**
* Return thread sleep time in milliseconds.
* @return -1 means no sleep time.
*/
public long getSleepTime()
{
return threadSleep;
}
/**
* Returns BasicPlayer status.
* @return status
*/
public int getStatus()
{
return m_status;
}
/**
* Open file to play.
*/
public void open(File file) throws BasicPlayerException
{
if(m_thread != null) {
m_thread.stop();
}
//log.info("open(" + file + ")");
if (file != null)
{
m_dataSource = file;
initAudioInputStream();
}
}
/**
* Open URL to play.
*/
public void open(URL url) throws BasicPlayerException
{
if(m_thread != null) {
m_thread.stop();
}
//log.info("open(" + url + ")");
if (url != null)
{
m_dataSource = url;
initAudioInputStream();
}
}
/**
* Open inputstream to play.
*/
public void open(InputStream inputStream) throws BasicPlayerException
{
if(m_thread != null) {
m_thread.stop();
}
//log.info("open(" + inputStream + ")");
if (inputStream != null)
{
m_dataSource = inputStream;
initAudioInputStream();
}
}
protected void initAudioInputStream() throws BasicPlayerException {
initAudioInputStream(false);
}
/**
* Inits AudioInputStream and AudioFileFormat from the data source.
* @throws BasicPlayerException
*/
protected void initAudioInputStream(boolean resetBusqueda) throws BasicPlayerException
{
try
{
if(resetBusqueda) {
reset(true);
} else {
reset();
}
notifyEvent(BasicPlayerEvent.OPENING, getEncodedStreamPosition(), -1, m_dataSource);
if (m_dataSource instanceof URL)
{
initAudioInputStream((URL) m_dataSource);
}
else if (m_dataSource instanceof File)
{
initAudioInputStream((File) m_dataSource);
}
else if (m_dataSource instanceof InputStream)
{
initAudioInputStream((InputStream) m_dataSource);
}
createLine();
if(!resetBusqueda) {
// Notify listeners with AudioFileFormat properties.
Map properties = null;
if (m_audioFileFormat instanceof TAudioFileFormat)
{
// Tritonus SPI compliant audio file format.
properties = ((TAudioFileFormat) m_audioFileFormat).properties();
// Clone the Map because it is not mutable.
properties = deepCopy(properties);
}
else properties = new HashMap();
// Add JavaSound properties.
if (m_audioFileFormat.getByteLength() > 0) properties.put("audio.length.bytes", new Integer(m_audioFileFormat.getByteLength()));
if (m_audioFileFormat.getFrameLength() > 0) properties.put("audio.length.frames", new Integer(m_audioFileFormat.getFrameLength()));
if (m_audioFileFormat.getType() != null) properties.put("audio.type", (m_audioFileFormat.getType().toString()));
// Audio format.
AudioFormat audioFormat = m_audioFileFormat.getFormat();
if (audioFormat.getFrameRate() > 0) properties.put("audio.framerate.fps", new Float(audioFormat.getFrameRate()));
if (audioFormat.getFrameSize() > 0) properties.put("audio.framesize.bytes", new Integer(audioFormat.getFrameSize()));
if (audioFormat.getSampleRate() > 0) properties.put("audio.samplerate.hz", new Float(audioFormat.getSampleRate()));
if (audioFormat.getSampleSizeInBits() > 0) properties.put("audio.samplesize.bits", new Integer(audioFormat.getSampleSizeInBits()));
if (audioFormat.getChannels() > 0) properties.put("audio.channels", new Integer(audioFormat.getChannels()));
if (audioFormat instanceof TAudioFormat)
{
// Tritonus SPI compliant audio format.
Map addproperties = ((TAudioFormat) audioFormat).properties();
properties.putAll(addproperties);
}
// Add SourceDataLine
properties.put("basicplayer.sourcedataline", m_line);
Iterator it = m_listeners.iterator();
while (it.hasNext())
{
BasicPlayerListener bpl = (BasicPlayerListener) it.next();
bpl.opened(m_dataSource, properties);
}
m_status = OPENED;
notifyEvent(BasicPlayerEvent.OPENED, getEncodedStreamPosition(), -1, null);
}
m_status = OPENED;
}
catch (LineUnavailableException e)
{
throw new BasicPlayerException(e);
}
catch (UnsupportedAudioFileException e)
{
throw new BasicPlayerException(e);
}
catch (IOException e)
{
throw new BasicPlayerException(e);
}
}
/**
* Inits Audio ressources from file.
*/
protected void initAudioInputStream(File file) throws UnsupportedAudioFileException, IOException
{
m_audioInputStream = AudioSystem.getAudioInputStream(file);
m_audioFileFormat = AudioSystem.getAudioFileFormat(file);
}
/**
* Inits Audio ressources from URL.
*/
protected void initAudioInputStream(URL url) throws UnsupportedAudioFileException, IOException
{
m_audioInputStream = AudioSystem.getAudioInputStream(url);
m_audioFileFormat = AudioSystem.getAudioFileFormat(url);
}
/**
* Inits Audio ressources from InputStream.
*/
protected void initAudioInputStream(InputStream inputStream) throws UnsupportedAudioFileException, IOException
{
m_audioInputStream = AudioSystem.getAudioInputStream(inputStream);
m_audioFileFormat = AudioSystem.getAudioFileFormat(inputStream);
}
/**
* Inits Audio ressources from AudioSystem.<br>
*/
protected void initLine() throws LineUnavailableException
{
//log.info("initLine()");
if (m_line == null) createLine();
if (!m_line.isOpen())
{
openLine();
}
else
{
AudioFormat lineAudioFormat = m_line.getFormat();
AudioFormat audioInputStreamFormat = m_audioInputStream == null ? null : m_audioInputStream.getFormat();
if (!lineAudioFormat.equals(audioInputStreamFormat))
{
m_line.close();
openLine();
}
}
}
/**
* Inits a DateLine.<br>
*
* We check if the line supports Gain and Pan controls.
*
* From the AudioInputStream, i.e. from the sound file, we
* fetch information about the format of the audio data. These
* information include the sampling frequency, the number of
* channels and the size of the samples. There information
* are needed to ask JavaSound for a suitable output line
* for this audio file.
* Furthermore, we have to give JavaSound a hint about how
* big the internal buffer for the line should be. Here,
* we say AudioSystem.NOT_SPECIFIED, signaling that we don't
* care about the exact size. JavaSound will use some default
* value for the buffer size.
*/
protected void createLine() throws LineUnavailableException
{
//log.info("Create Line");
if (m_line == null)
{
AudioFormat sourceFormat = m_audioInputStream.getFormat();
//log.info("Create Line : Source format : " + sourceFormat.toString());
int nSampleSizeInBits = sourceFormat.getSampleSizeInBits();
if (nSampleSizeInBits <= 0) nSampleSizeInBits = 16;
if ((sourceFormat.getEncoding() == AudioFormat.Encoding.ULAW) || (sourceFormat.getEncoding() == AudioFormat.Encoding.ALAW)) nSampleSizeInBits = 16;
if (nSampleSizeInBits != 8) nSampleSizeInBits = 16;
AudioFormat targetFormat = new AudioFormat(AudioFormat.Encoding.PCM_SIGNED, sourceFormat.getSampleRate(), nSampleSizeInBits, sourceFormat.getChannels(), sourceFormat.getChannels() * (nSampleSizeInBits / 8), sourceFormat.getSampleRate(), false);
//log.info("Create Line : Target format: " + targetFormat);
// Keep a reference on encoded stream to progress notification.
m_encodedaudioInputStream = m_audioInputStream;
try
{
// Get total length in bytes of the encoded stream.
encodedLength = m_encodedaudioInputStream.available();
}
catch (IOException e)
{
log.error("Cannot get m_encodedaudioInputStream.available()", e);
}
// Create decoded stream.
m_audioInputStream = AudioSystem.getAudioInputStream(targetFormat, m_audioInputStream);
AudioFormat audioFormat = m_audioInputStream.getFormat();
DataLine.Info info = new DataLine.Info(SourceDataLine.class, audioFormat, AudioSystem.NOT_SPECIFIED);
Mixer mixer = getMixer(m_mixerName);
if (mixer != null)
{
//log.info("Mixer : "+mixer.getMixerInfo().toString());
m_line = (SourceDataLine) mixer.getLine(info);
}
else
{
m_line = (SourceDataLine) AudioSystem.getLine(info);
m_mixerName = null;
}
//log.info("Line : " + m_line.toString());
//log.debug("Line Info : " + m_line.getLineInfo().toString());
//log.debug("Line AudioFormat: " + m_line.getFormat().toString());
}
}
/**
* Opens the line.
*/
protected void openLine() throws LineUnavailableException
{
if (m_line != null)
{
AudioFormat audioFormat = m_audioInputStream.getFormat();
int buffersize = lineBufferSize;
if (buffersize <= 0) buffersize = m_line.getBufferSize();
m_lineCurrentBufferSize = buffersize;
m_line.open(audioFormat, buffersize);
//log.info("Open Line : BufferSize=" + buffersize);
/*-- Display supported controls --*/
/* Control[] c = m_line.getControls();
for (int p = 0; p < c.length; p++)
{
//log.debug("Controls : " + c[p].toString());
}*/
/*-- Is Gain Control supported ? --*/
if (m_line.isControlSupported(FloatControl.Type.MASTER_GAIN))
{
m_gainControl = (FloatControl) m_line.getControl(FloatControl.Type.MASTER_GAIN);
//log.info("Master Gain Control : [" + m_gainControl.getMinimum() + "," + m_gainControl.getMaximum() + "] " + m_gainControl.getPrecision());
}
/*-- Is Pan control supported ? --*/
if (m_line.isControlSupported(FloatControl.Type.PAN))
{
m_panControl = (FloatControl) m_line.getControl(FloatControl.Type.PAN);
//log.info("Pan Control : [" + m_panControl.getMinimum() + "," + m_panControl.getMaximum() + "] " + m_panControl.getPrecision());
}
}
}
/**
* Stops the playback.<br>
*
* Player Status = STOPPED.<br>
* Thread should free Audio ressources.
*/
protected void stopPlayback()
{
if ((m_status == PLAYING) || (m_status == PAUSED))
{
if (m_line != null)
{
m_line.flush();
m_line.stop();
if(windows) {
//En mac peta
m_line.close();
lineas.add(m_line);
}
}
m_status = STOPPED;
notifyEvent(BasicPlayerEvent.STOPPED, getEncodedStreamPosition(), -1, null);
synchronized (m_audioInputStream)
{
closeStream();
}
//log.info("stopPlayback() completed");
}
}
/**
* Pauses the playback.<br>
*
* Player Status = PAUSED.
*/
protected void pausePlayback()
{
if (m_line != null)
{
if (m_status == PLAYING)
{
m_line.flush();
m_line.stop();
m_status = PAUSED;
//log.info("pausePlayback() completed");
notifyEvent(BasicPlayerEvent.PAUSED, getEncodedStreamPosition(), -1, null);
}
}
}
/**
* Resumes the playback.<br>
*
* Player Status = PLAYING.
*/
protected void resumePlayback()
{
if (m_line != null)
{
if (m_status == PAUSED)
{
m_line.flush();
m_line.start();
m_status = PLAYING;
if(m_thread != null) {
m_thread.interrupt();
}
//log.info("resumePlayback() completed");
notifyEvent(BasicPlayerEvent.RESUMED, getEncodedStreamPosition(), -1, null);
}
}
}
/**
* Starts playback.
*/
protected void startPlayback() throws BasicPlayerException
{
if(m_thread != null) {
m_thread.stop();
}
if (m_status == STOPPED) initAudioInputStream();
if (m_status == OPENED)
{
//log.info("startPlayback called");
if ((m_thread != null && m_thread.isAlive()))
{
//log.info("WARNING: old thread still running!!");
int cnt = 0;
while (m_status != OPENED)
{
try
{
if (m_thread != null)
{
//log.info("Waiting ... " + cnt);
cnt++;
Thread.sleep(1000);
if (cnt > 2)
{
m_thread.interrupt();
}
}
}
catch (InterruptedException e)
{
throw new BasicPlayerException(BasicPlayerException.WAITERROR, e);
}
}
}
// Open SourceDataLine.
try
{
initLine();
}
catch (LineUnavailableException e)
{
throw new BasicPlayerException(BasicPlayerException.CANNOTINITLINE, e);
}
//log.info("Creating new thread");
m_thread = new Thread(this, "BasicPlayer");
m_thread.start();
if (m_line != null)
{
m_line.start();
m_status = PLAYING;
notifyEvent(BasicPlayerEvent.PLAYING, getEncodedStreamPosition(), -1, null);
}
}
}
private void freeAudioResources(int nBytesRead) {
// Free audio resources.
synchronized (m_line) {
if (m_line != null) {
m_line.drain();
m_line.stop();
if(windows) {
//En mac peta
m_line.close();
}
lineas.add(m_line);
m_line = null;
}
}
// Notification of "End Of Media"
if (nBytesRead == -1)
{
notifyEvent(BasicPlayerEvent.EOM, getEncodedStreamPosition(), -1, null);
}
// Close stream.
closeStream();
while (lineas.size()>0) {
lineas.remove(0).close();
}
}
/**
* Main loop.
*
* Player Status == STOPPED || SEEKING => End of Thread + Freeing Audio Ressources.<br>
* Player Status == PLAYING => Audio stream data sent to Audio line.<br>
* Player Status == PAUSED => Waiting for another status.
*/
public void run()
{
int nBytesRead = 1;
byte[] abData = new byte[EXTERNAL_BUFFER_SIZE];
// Main play/pause loop.
while ((nBytesRead != -1) && (m_status != STOPPED) && (m_status != UNKNOWN)) {
if (m_status == PLAYING) {
// Play.
try {
synchronized (m_audioInputStream) {
if( m_audioInputStream != null) {
nBytesRead = m_audioInputStream.read(abData, 0, abData.length);
}
}
if (nBytesRead >= 0) {
//synchronized (m_line) {
if(m_line!= null) {
m_line.write(abData, 0, nBytesRead);
}
//}
//int nBytesWritten = m_line.write(abData, 0, nBytesRead);
int nEncodedBytes = getEncodedStreamPosition();
Iterator it = m_listeners.iterator();
Map properties;
if( m_status != SEEKING && m_audioInputStream != null) {
synchronized (m_audioInputStream) {
if (m_audioInputStream instanceof PropertiesContainer )
{
properties = ((PropertiesContainer) m_audioInputStream).properties();
}else {
properties = empty_map;
}
}
while (it.hasNext())
{
long tiempo = 0;
if(m_line != null) {
tiempo = m_line.getMicrosecondPosition();
}
BasicPlayerListener bpl = (BasicPlayerListener) it.next();
bpl.progress(nEncodedBytes, tiempo, null, properties);
}
}
}
}
catch (IOException e) {
//log.error("Thread cannot run()", e);
m_status = STOPPED;
notifyEvent(BasicPlayerEvent.STOPPED, getEncodedStreamPosition(), -1, null);
}
// Nice CPU usage.
if (threadSleep > 0) {
try {
Thread.sleep(threadSleep);
} catch (InterruptedException e) {
//log.error("Thread cannot sleep(" + threadSleep + ")", e);
}
}
} else if (m_status == PAUSED){ // Pause
try {
Thread.sleep(1000);
} catch (InterruptedException e) {
//log.error("Thread cannot sleep(1000)", e);
}
} else if( m_status == SEEKING) {
try {
while(m_status == SEEKING) {
Thread.sleep(1000);
}
} catch (InterruptedException e) {
//log.error("Thread cannot sleep(1000)", e);
}
}
}
freeAudioResources(nBytesRead);
m_status = STOPPED;
notifyEvent(BasicPlayerEvent.STOPPED, getEncodedStreamPosition(), -1, null);
//log.info("Thread completed");
}
/**
* Skip bytes in the File inputstream.
* It will skip N frames matching to bytes, so it will never skip given bytes length exactly.
* @param bytes
* @return value>0 for File and value=0 for URL and InputStream
* @throws BasicPlayerException
*/
protected long skipBytes(long bytes) throws BasicPlayerException
{
long totalSkipped = 0;
if (m_dataSource instanceof File)
{
//log.info("Bytes to skip : " + bytes);
int previousStatus = m_status;
m_status = SEEKING;
long skipped = 0;
try
{
//synchronized (m_audioInputStream)
{
notifyEvent(BasicPlayerEvent.SEEKING, getEncodedStreamPosition(), -1, null);
initAudioInputStream(true);
if (m_audioInputStream != null)
{
// Loop until bytes are really skipped.
while (totalSkipped < (bytes - SKIP_INACCURACY_SIZE))
{
skipped = m_audioInputStream.skip(bytes - totalSkipped);
if (skipped == 0) {
break;
}
totalSkipped = totalSkipped + skipped;
//log.info("Skipped : " + totalSkipped + "/" + bytes);
if (totalSkipped == -1) throw new BasicPlayerException(BasicPlayerException.SKIPNOTSUPPORTED);
}
}
}
notifyEvent(BasicPlayerEvent.SEEKED, getEncodedStreamPosition(), -1, null);
// m_status = OPENED;
//System.out.println("4");
try
{
initLine();
}
catch (LineUnavailableException e)
{
throw new BasicPlayerException(BasicPlayerException.CANNOTINITLINE, e);
}
//System.out.println("5");
if (m_line != null)
{
m_line.start();
}
m_status = previousStatus;
/*
if (previousStatus == PLAYING) startPlayback();
else if (previousStatus == PAUSED)
{
startPlayback();
pausePlayback();
}*/
//System.out.println("6");
m_thread.interrupt();
//System.out.println("7");
}
catch (IOException e)
{
throw new BasicPlayerException(e);
}
}
return totalSkipped;
}
/**
* Notify listeners about a BasicPlayerEvent.
* @param code event code.
* @param position in the stream when the event occurs.
*/
protected void notifyEvent(int code, int position, double value, Object description)
{
BasicPlayerEventLauncher trigger = new BasicPlayerEventLauncher(code, position, value, description, new ArrayList(m_listeners), this);
trigger.start();
}
protected int getEncodedStreamPosition()
{
int nEncodedBytes = -1;
if (m_dataSource instanceof File)
{
try
{
if (m_encodedaudioInputStream != null)
{
nEncodedBytes = encodedLength - m_encodedaudioInputStream.available();
}
}
catch (IOException e)
{
////log.debug("Cannot get m_encodedaudioInputStream.available()",e);
}
}
return nEncodedBytes;
}
protected void closeStream()
{
// Close stream.
try
{
if (m_audioInputStream != null)
{
m_audioInputStream.close();
//log.info("Stream closed");
}
}
catch (IOException e)
{
//log.info("Cannot close stream", e);
}
}
/**
* Returns true if Gain control is supported.
*/
public boolean hasGainControl()
{
if (m_gainControl == null)
{
// Try to get Gain control again (to support J2SE 1.5)
if ( (m_line != null) && (m_line.isControlSupported(FloatControl.Type.MASTER_GAIN))) m_gainControl = (FloatControl) m_line.getControl(FloatControl.Type.MASTER_GAIN);
}
return m_gainControl != null;
}
/**
* Returns Gain value.
*/
public float getGainValue()
{
if (hasGainControl())
{
return m_gainControl.getValue();
}
else
{
return 0.0F;
}
}
/**
* Gets max Gain value.
*/
public float getMaximumGain()
{
if (hasGainControl())
{
return m_gainControl.getMaximum();
}
else
{
return 0.0F;
}
}
/**
* Gets min Gain value.
*/
public float getMinimumGain()
{
if (hasGainControl())
{
return m_gainControl.getMinimum();
}
else
{
return 0.0F;
}
}
/**
* Returns true if Pan control is supported.
*/
public boolean hasPanControl()
{
if (m_panControl == null)
{
// Try to get Pan control again (to support J2SE 1.5)
if ((m_line != null)&& (m_line.isControlSupported(FloatControl.Type.PAN))) m_panControl = (FloatControl) m_line.getControl(FloatControl.Type.PAN);
}
return m_panControl != null;
}
/**
* Returns Pan precision.
*/
public float getPrecision()
{
if (hasPanControl())
{
return m_panControl.getPrecision();
}
else
{
return 0.0F;
}
}
/**
* Returns Pan value.
*/
public float getPan()
{
if (hasPanControl())
{
return m_panControl.getValue();
}
else
{
return 0.0F;
}
}
/**
* Deep copy of a Map.
* @param src
* @return
*/
protected Map deepCopy(Map src)
{
HashMap map = new HashMap();
if (src != null)
{
Iterator it = src.keySet().iterator();
while (it.hasNext())
{
Object key = it.next();
Object value = src.get(key);
map.put(key, value);
}
}
return map;
}
/**
* @see javazoom.jlgui.basicplayer.BasicController#seek(long)
*/
public long seek(long bytes) throws BasicPlayerException
{
if(m_status == PLAYING) {
return skipBytes(bytes);
}
return 0;
}
/**
* @see javazoom.jlgui.basicplayer.BasicController#play()
*/
public void play() throws BasicPlayerException
{
startPlayback();
}
/**
* @see javazoom.jlgui.basicplayer.BasicController#stop()
*/
public void stop() throws BasicPlayerException
{
stopPlayback();
}
/**
* @see javazoom.jlgui.basicplayer.BasicController#pause()
*/
public void pause() throws BasicPlayerException
{
pausePlayback();
}
/**
* @see javazoom.jlgui.basicplayer.BasicController#resume()
*/
public void resume() throws BasicPlayerException
{
resumePlayback();
}
/**
* Sets Pan value.
* Line should be opened before calling this method.
* Linear scale : -1.0 <--> +1.0
*/
public void setPan(double fPan) throws BasicPlayerException
{
if (hasPanControl())
{
//log.debug("Pan : " + fPan);
m_panControl.setValue((float) fPan);
notifyEvent(BasicPlayerEvent.PAN, getEncodedStreamPosition(), fPan, null);
}
else throw new BasicPlayerException(BasicPlayerException.PANCONTROLNOTSUPPORTED);
}
/**
* Sets Gain value.
* Line should be opened before calling this method.
* Linear scale 0.0 <--> 1.0
* Threshold Coef. : 1/2 to avoid saturation.
*/
public void setGain(double fGain) throws BasicPlayerException
{
if (hasGainControl())
{
double minGainDB = getMinimumGain();
double ampGainDB = ((10.0f / 20.0f) * getMaximumGain()) - getMinimumGain();
double cste = Math.log(10.0) / 20;
double valueDB = minGainDB + (1 / cste) * Math.log(1 + (Math.exp(cste * ampGainDB) - 1) * fGain);
//log.debug("Gain : " + valueDB);
m_gainControl.setValue((float) valueDB);
if(!mute) {
notifyEvent(BasicPlayerEvent.GAIN, getEncodedStreamPosition(), fGain, null);
}
}
else throw new BasicPlayerException(BasicPlayerException.GAINCONTROLNOTSUPPORTED);
}
public List getMixers()
{
ArrayList mixers = new ArrayList();
Mixer.Info[] mInfos = AudioSystem.getMixerInfo();
if (mInfos != null)
{
for (int i = 0; i < mInfos.length; i++)
{
Line.Info lineInfo = new Line.Info(SourceDataLine.class);
Mixer mixer = AudioSystem.getMixer(mInfos[i]);
if (mixer.isLineSupported(lineInfo))
{
mixers.add(mInfos[i].getName());
}
}
}
return mixers;
}
public Mixer getMixer(String name)
{
Mixer mixer = null;
if (name != null)
{
Mixer.Info[] mInfos = AudioSystem.getMixerInfo();
if (mInfos != null)
{
for (int i = 0; i < mInfos.length; i++)
{
if (mInfos[i].getName().equals(name))
{
mixer = AudioSystem.getMixer(mInfos[i]);
break;
}
}
}
}
return mixer;
}
public String getMixerName()
{
return m_mixerName;
}
public void setMixerName(String name)
{
m_mixerName = name;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ambari.server.controller.internal;
import java.lang.reflect.Constructor;
import java.lang.reflect.Method;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
import org.apache.ambari.server.ServiceNotFoundException;
import org.apache.ambari.server.api.services.AmbariMetaInfo;
import org.apache.ambari.server.configuration.ComponentSSLConfiguration;
import org.apache.ambari.server.controller.jmx.JMXHostProvider;
import org.apache.ambari.server.controller.jmx.JMXPropertyProvider;
import org.apache.ambari.server.controller.metrics.MetricHostProvider;
import org.apache.ambari.server.controller.metrics.MetricPropertyProviderFactory;
import org.apache.ambari.server.controller.metrics.MetricsPropertyProvider;
import org.apache.ambari.server.controller.metrics.MetricsServiceProvider;
import org.apache.ambari.server.controller.metrics.RestMetricsPropertyProvider;
import org.apache.ambari.server.controller.metrics.timeline.cache.TimelineMetricCacheProvider;
import org.apache.ambari.server.controller.spi.Predicate;
import org.apache.ambari.server.controller.spi.PropertyProvider;
import org.apache.ambari.server.controller.spi.Request;
import org.apache.ambari.server.controller.spi.Resource;
import org.apache.ambari.server.controller.spi.SystemException;
import org.apache.ambari.server.controller.utilities.StreamProvider;
import org.apache.ambari.server.security.authorization.AuthorizationException;
import org.apache.ambari.server.state.Cluster;
import org.apache.ambari.server.state.Clusters;
import org.apache.ambari.server.state.ConfigHelper;
import org.apache.ambari.server.state.Service;
import org.apache.ambari.server.state.StackId;
import org.apache.ambari.server.state.UriInfo;
import org.apache.ambari.server.state.stack.Metric;
import org.apache.ambari.server.state.stack.MetricDefinition;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.inject.Inject;
import com.google.inject.Injector;
/**
* This class analyzes a service's metrics to determine if additional
* metrics should be fetched. It's okay to maintain state here since these
* are done per-request.
*/
public class StackDefinedPropertyProvider implements PropertyProvider {
private static final Logger LOG = LoggerFactory.getLogger(StackDefinedPropertyProvider.class);
@Inject
private static Clusters clusters = null;
@Inject
private static AmbariMetaInfo metaInfo = null;
@Inject
private static Injector injector = null;
/**
* A factory used to retrieve Guice-injected instances of a metric
* {@link PropertyProvider}.
*/
@Inject
private static MetricPropertyProviderFactory metricPropertyProviderFactory;
private Resource.Type type = null;
private String clusterNamePropertyId = null;
private String hostNamePropertyId = null;
private String componentNamePropertyId = null;
private String resourceStatePropertyId = null;
private ComponentSSLConfiguration sslConfig = null;
private URLStreamProvider streamProvider = null;
private JMXHostProvider jmxHostProvider;
private PropertyProvider defaultJmx = null;
private PropertyProvider defaultGanglia = null;
private final MetricHostProvider metricHostProvider;
private final MetricsServiceProvider metricsServiceProvider;
private TimelineMetricCacheProvider cacheProvider;
/**
* PropertyHelper/AbstractPropertyProvider expect map of maps,
* that's why we wrap metrics into map
*/
public static final String WRAPPED_METRICS_KEY = "WRAPPED_METRICS_KEY";
@Inject
public static void init(Injector injector) {
clusters = injector.getInstance(Clusters.class);
metaInfo = injector.getInstance(AmbariMetaInfo.class);
metricPropertyProviderFactory = injector.getInstance(MetricPropertyProviderFactory.class);
StackDefinedPropertyProvider.injector = injector;
}
public StackDefinedPropertyProvider(Resource.Type type,
JMXHostProvider jmxHostProvider,
MetricHostProvider metricHostProvider,
MetricsServiceProvider serviceProvider,
URLStreamProvider streamProvider,
String clusterPropertyId,
String hostPropertyId,
String componentPropertyId,
String resourceStatePropertyId,
PropertyProvider defaultJmxPropertyProvider,
PropertyProvider defaultGangliaPropertyProvider) {
this.metricHostProvider = metricHostProvider;
metricsServiceProvider = serviceProvider;
if (null == clusterPropertyId) {
throw new NullPointerException("Cluster name property id cannot be null");
}
if (null == componentPropertyId) {
throw new NullPointerException("Component name property id cannot be null");
}
this.type = type;
clusterNamePropertyId = clusterPropertyId;
hostNamePropertyId = hostPropertyId;
componentNamePropertyId = componentPropertyId;
this.resourceStatePropertyId = resourceStatePropertyId;
this.jmxHostProvider = jmxHostProvider;
sslConfig = ComponentSSLConfiguration.instance();
this.streamProvider = streamProvider;
defaultJmx = defaultJmxPropertyProvider;
defaultGanglia = defaultGangliaPropertyProvider;
cacheProvider = injector.getInstance(TimelineMetricCacheProvider.class);
}
@Override
public Set<Resource> populateResources(Set<Resource> resources,
Request request, Predicate predicate) throws SystemException {
// only arrange for one instance of Ganglia and JMX instantiation
Map<String, Map<String, PropertyInfo>> gangliaMap = new HashMap<>();
Map<String, Map<String, PropertyInfo>> jmxMap = new HashMap<>();
List<PropertyProvider> additional = new ArrayList<>();
Map<String, String> overriddenHosts = new HashMap<>();
Map<String, UriInfo> overriddenJmxUris = new HashMap<>();
try {
for (Resource r : resources) {
String clusterName = r.getPropertyValue(clusterNamePropertyId).toString();
String componentName = r.getPropertyValue(componentNamePropertyId).toString();
Cluster cluster = clusters.getCluster(clusterName);
Service service = null;
try {
service = cluster.getServiceByComponentName(componentName);
} catch (ServiceNotFoundException e) {
LOG.debug("Could not load component {}", componentName);
continue;
}
StackId stack = service.getDesiredStackId();
List<MetricDefinition> defs = metaInfo.getMetrics(
stack.getStackName(), stack.getStackVersion(), service.getName(), componentName, type.name());
if (null == defs || 0 == defs.size()) {
continue;
}
for (MetricDefinition m : defs) {
if (m.getType().equals("ganglia")) {
gangliaMap.put(componentName, getPropertyInfo(m));
m.getOverriddenHosts().ifPresent(host -> overriddenHosts.put(componentName, host));
} else if (m.getType().equals("jmx")) {
jmxMap.put(componentName, getPropertyInfo(m));
m.getJmxSourceUri().ifPresent(uri -> overriddenJmxUris.put(componentName, uri));
} else {
PropertyProvider pp = getDelegate(m,
streamProvider, metricHostProvider,
clusterNamePropertyId, hostNamePropertyId,
componentNamePropertyId, resourceStatePropertyId,
componentName);
if (pp == null) {
pp = getDelegate(m);
}
if (pp != null) {
additional.add(pp);
}
}
}
}
if (gangliaMap.size() > 0) {
PropertyProvider propertyProvider =
MetricsPropertyProvider.createInstance(type, gangliaMap,
streamProvider, sslConfig,
cacheProvider,
metricHostProvider(overriddenHosts),
metricsServiceProvider, clusterNamePropertyId,
hostNamePropertyId, componentNamePropertyId);
propertyProvider.populateResources(resources, request, predicate);
} else {
defaultGanglia.populateResources(resources, request, predicate);
}
if (jmxMap.size() > 0) {
JMXPropertyProvider jpp = metricPropertyProviderFactory.createJMXPropertyProvider(jmxMap,
streamProvider,
jmxHostProvider(overriddenJmxUris, jmxHostProvider, injector.getInstance(ConfigHelper.class)), metricHostProvider,
clusterNamePropertyId, hostNamePropertyId,
componentNamePropertyId, resourceStatePropertyId);
jpp.populateResources(resources, request, predicate);
} else {
defaultJmx.populateResources(resources, request, predicate);
}
for (PropertyProvider pp : additional) {
pp.populateResources(resources, request, predicate);
}
} catch (AuthorizationException e) {
// Need to rethrow the catched 'AuthorizationException'.
throw e;
} catch (Exception e) {
e.printStackTrace();
LOG.error("Error loading deferred resources", e);
throw new SystemException("Error loading deferred resources", e);
}
return resources;
}
private JMXHostProvider jmxHostProvider(Map<String, UriInfo> overriddenJmxUris, JMXHostProvider defaultProvider, ConfigHelper configHelper) {
return overriddenJmxUris.isEmpty() ? defaultProvider : new ConfigBasedJmxHostProvider(overriddenJmxUris, defaultProvider, configHelper);
}
private MetricHostProvider metricHostProvider(Map<String, String> overriddenHosts) {
return new OverriddenMetricsHostProvider(overriddenHosts, metricHostProvider, injector.getInstance(ConfigHelper.class));
}
@Override
public Set<String> checkPropertyIds(Set<String> propertyIds) {
return Collections.emptySet();
}
/**
* @param def the metric definition
* @return the converted Map required for JMX or Ganglia execution.
* Format: <metric name, property info>
*/
public static Map<String, PropertyInfo> getPropertyInfo(MetricDefinition def) {
Map<String, PropertyInfo> defs = new HashMap<>();
for (Entry<String, Metric> entry : def.getMetrics().entrySet()) {
Metric metric = entry.getValue();
if (metric.getName() != null) {
PropertyInfo propertyInfo = new PropertyInfo(metric.getName(),
metric.isTemporal(), metric.isPointInTime());
propertyInfo.setAmsHostMetric(metric.isAmsHostMetric());
propertyInfo.setUnit(metric.getUnit());
defs.put(entry.getKey(), propertyInfo);
}
}
return defs;
}
/**
* @param definition metric definition for a component and resource type combination
* @return the custom property provider
*/
private PropertyProvider getDelegate(MetricDefinition definition) {
try {
Class<?> clz = Class.forName(definition.getType());
// singleton/factory
try {
Method m = clz.getMethod("getInstance", Map.class, Map.class);
Object o = m.invoke(null, definition.getProperties(), definition.getMetrics());
return PropertyProvider.class.cast(o);
} catch (Exception e) {
LOG.info("Could not load singleton or factory method for type '" +
definition.getType());
}
// try maps constructor
try {
Constructor<?> ct = clz.getConstructor(Map.class, Map.class);
Object o = ct.newInstance(definition.getProperties(), definition.getMetrics());
return PropertyProvider.class.cast(o);
} catch (Exception e) {
LOG.info("Could not find contructor for type '" +
definition.getType());
}
// just new instance
return PropertyProvider.class.cast(clz.newInstance());
} catch (Exception e) {
LOG.error("Could not load class " + definition.getType());
return null;
}
}
/**
*
* @param definition the metric definition for a component
* @param streamProvider the stream provider
* @param metricsHostProvider the metrics host provider
* @param clusterNamePropertyId the cluster name property id
* @param hostNamePropertyId the host name property id
* @param componentNamePropertyId the component name property id
* @param statePropertyId the state property id
* @return the custom property provider
*/
private PropertyProvider getDelegate(MetricDefinition definition,
StreamProvider streamProvider,
MetricHostProvider metricsHostProvider,
String clusterNamePropertyId,
String hostNamePropertyId,
String componentNamePropertyId,
String statePropertyId,
String componentName) {
Map<String, PropertyInfo> metrics = getPropertyInfo(definition);
HashMap<String, Map<String, PropertyInfo>> componentMetrics =
new HashMap<>();
componentMetrics.put(WRAPPED_METRICS_KEY, metrics);
try {
Class<?> clz = Class.forName(definition.getType());
// use a Factory for the REST provider
if (clz.equals(RestMetricsPropertyProvider.class)) {
return metricPropertyProviderFactory.createRESTMetricsPropertyProvider(
definition.getProperties(), componentMetrics, streamProvider, metricsHostProvider,
clusterNamePropertyId, hostNamePropertyId, componentNamePropertyId, statePropertyId,
componentName);
}
try {
/*
* Warning: this branch is already used, that's why please adjust
* all implementations when modifying constructor interface
*/
Constructor<?> ct = clz.getConstructor(Map.class,
Map.class, StreamProvider.class, MetricHostProvider.class,
String.class, String.class, String.class, String.class, String.class);
Object o = ct.newInstance(
injector,
definition.getProperties(), componentMetrics,
streamProvider, metricsHostProvider,
clusterNamePropertyId, hostNamePropertyId,
componentNamePropertyId, statePropertyId, componentName);
return PropertyProvider.class.cast(o);
} catch (Exception e) {
LOG.info("Could not find contructor for type '" +
definition.getType());
}
// just new instance
return PropertyProvider.class.cast(clz.newInstance());
} catch (Exception e) {
LOG.error("Could not load class " + definition.getType());
return null;
}
}
}
| |
/*
* Waltz - Enterprise Architecture
* Copyright (C) 2016, 2017, 2018, 2019 Waltz open source project
* See README.md for more information
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific
*
*/
package com.khartec.waltz.jobs.generators;
import com.khartec.waltz.common.DateTimeUtilities;
import com.khartec.waltz.data.measurable.MeasurableDao;
import com.khartec.waltz.data.measurable.MeasurableIdSelectorFactory;
import com.khartec.waltz.data.measurable_rating.MeasurableRatingDao;
import com.khartec.waltz.data.roadmap.RoadmapDao;
import com.khartec.waltz.data.scenario.ScenarioAxisItemDao;
import com.khartec.waltz.data.scenario.ScenarioDao;
import com.khartec.waltz.model.AxisOrientation;
import com.khartec.waltz.model.EntityKind;
import com.khartec.waltz.model.EntityReference;
import com.khartec.waltz.model.IdSelectionOptions;
import com.khartec.waltz.model.measurable.Measurable;
import com.khartec.waltz.model.measurable_rating.MeasurableRating;
import com.khartec.waltz.model.roadmap.Roadmap;
import com.khartec.waltz.model.scenario.Scenario;
import com.khartec.waltz.model.scenario.ScenarioAxisItem;
import com.khartec.waltz.schema.tables.records.ScenarioRatingItemRecord;
import com.khartec.waltz.service.measurable_rating.MeasurableRatingService;
import org.jooq.DSLContext;
import org.jooq.Record1;
import org.jooq.SelectConditionStep;
import org.jooq.impl.DSL;
import org.springframework.context.ApplicationContext;
import java.util.*;
import java.util.stream.Collectors;
import java.util.stream.IntStream;
import static com.khartec.waltz.common.ListUtilities.filter;
import static com.khartec.waltz.common.MapUtilities.groupBy;
import static com.khartec.waltz.common.ObjectUtilities.any;
import static com.khartec.waltz.common.RandomUtilities.randomIntBetween;
import static com.khartec.waltz.common.RandomUtilities.randomPick;
import static com.khartec.waltz.common.SetUtilities.asSet;
import static com.khartec.waltz.model.EntityReference.mkRef;
import static com.khartec.waltz.model.IdSelectionOptions.mkOpts;
import static com.khartec.waltz.schema.Tables.*;
import static com.khartec.waltz.schema.tables.Roadmap.ROADMAP;
import static org.jooq.lambda.tuple.Tuple.tuple;
public class RoadmapGenerator implements SampleDataGenerator {
private final Set<String> roadmapNames = asSet("Test Roadmap", "Another Roadmap");
@Override
public Map<String, Integer> create(ApplicationContext ctx) {
roadmapNames.forEach(roadmapName -> mkRoadmap(ctx, roadmapName));
return null;
}
@Override
public boolean remove(ApplicationContext ctx) {
roadmapNames.forEach(roadmapName -> removeRoadmap(ctx, roadmapName));
return false;
}
// -- HELPERS ---
private void mkRoadmap(ApplicationContext ctx, String roadmapName) {
DSLContext dsl = getDsl(ctx);
Long schemeId = dsl
.select(RATING_SCHEME.ID)
.from(RATING_SCHEME)
.fetchAny(RATING_SCHEME.ID);
Long colTypeId = findCategoryId(dsl, "CAPABILITY");
Long rowTypeId = findCategoryId(dsl, "PRODUCT");
boolean anyNull = any(Objects::isNull, schemeId, colTypeId, rowTypeId);
if (anyNull) {
// nop
} else {
long roadmapId = getRoadmapDao(ctx).createRoadmap(
roadmapName,
schemeId,
mkRef(EntityKind.MEASURABLE_CATEGORY, colTypeId),
mkRef(EntityKind.MEASURABLE_CATEGORY, rowTypeId),
"admin");
mkScenario(ctx, roadmapId);
}
}
private void mkScenario(ApplicationContext ctx, long roadmapId) {
ScenarioDao scenarioDao = getScenarioDao(ctx);
RoadmapDao roadmapDao = getRoadmapDao(ctx);
Roadmap roadmap = roadmapDao.getById(roadmapId);
Scenario scenario = scenarioDao.add(roadmapId, "Current", "admin");
mkAxisItems(ctx, scenario, roadmap);
populateScenario(ctx, scenario);
}
private void populateScenario(ApplicationContext ctx, Scenario scenario) {
ScenarioAxisItemDao scenarioAxisItemDao = getScenarioAxisItemDao(ctx);
MeasurableRatingService measurableRatingService = getMeasurableRatingService(ctx);
scenario.id().ifPresent(scenarioId -> {
Collection<ScenarioAxisItem> axisItems = scenarioAxisItemDao.findForScenarioId(scenarioId);
Map<AxisOrientation, Collection<ScenarioAxisItem>> byOrientation = groupBy(ScenarioAxisItem::axisOrientation, axisItems);
IdSelectionOptions options = mkOpts(scenario.entityReference());
Map<Long, Collection<MeasurableRating>> ratingsByMeasurableId = groupBy(
MeasurableRating::measurableId,
measurableRatingService.findByMeasurableIdSelector(options));
List<ScenarioRatingItemRecord> scenarioRatingItems = IntStream
.range(0, randomIntBetween(10, 300))
.mapToObj(i -> tuple(
randomPick(byOrientation.get(AxisOrientation.COLUMN)),
randomPick(byOrientation.get(AxisOrientation.ROW))))
.map(t -> t
.map1(d -> d.domainItem().id())
.map2(d -> d.domainItem().id()))
.map(t -> t.concat(tuple(
randomPick(ratingsByMeasurableId.get(t.v1)),
randomPick(ratingsByMeasurableId.get(t.v2)))))
.map(t -> {
MeasurableRating rating = t.v3 != null
? t.v3
: t.v4;
ScenarioRatingItemRecord record = getDsl(ctx).newRecord(SCENARIO_RATING_ITEM);
record.setScenarioId(scenarioId);
record.setColumnId(t.v1);
record.setColumnKind(EntityKind.MEASURABLE.name());
record.setRowId(t.v2);
record.setRowKind(EntityKind.MEASURABLE.name());
record.setDomainItemId(rating.entityReference().id());
record.setDomainItemKind(rating.entityReference().kind().name());
record.setRating(String.valueOf(rating.rating()));
record.setLastUpdatedBy("admin");
record.setLastUpdatedAt(DateTimeUtilities.nowUtcTimestamp());
return record;
})
.collect(Collectors.toList());
getDsl(ctx)
.batchInsert(scenarioRatingItems)
.execute();
});
}
private void mkAxisItems(ApplicationContext ctx,
Scenario scenario,
Roadmap roadmap) {
scenario.id()
.ifPresent(scenarioId -> {
List<Measurable> rowCategories = pickAxisItems(ctx, roadmap.rowType(), randomIntBetween(5, 10));
List<Measurable> colCategories = pickAxisItems(ctx, roadmap.columnType(), randomIntBetween(2, 7));
addAxisItems(ctx, rowCategories, scenarioId, AxisOrientation.ROW);
addAxisItems(ctx, colCategories, scenarioId, AxisOrientation.COLUMN);
});
}
private void addAxisItems(ApplicationContext ctx,
List<Measurable> categories,
Long scenarioId,
AxisOrientation row) {
ScenarioAxisItemDao scenarioAxisItemDao = getScenarioAxisItemDao(ctx);
categories.forEach(c -> scenarioAxisItemDao.add(
scenarioId,
row, // orientation
c.entityReference(),
10));
}
private List<Measurable> pickAxisItems(ApplicationContext ctx, EntityReference type, int howMany) {
MeasurableDao measurableDao = getMeasurableDao(ctx);
List<Measurable> concreteChoices = filter(
Measurable::concrete,
measurableDao.findByCategoryId(type.id()));
return randomPick(concreteChoices, howMany);
}
private void removeRoadmap(ApplicationContext ctx, String roadmapName) {
DSLContext dsl = getDsl(ctx);
dsl.select(ROADMAP.ID)
.from(ROADMAP)
.where(ROADMAP.NAME.eq(roadmapName))
.fetchOptional(ROADMAP.ID)
.ifPresent(rId -> {
SelectConditionStep<Record1<Long>> scenarioIds = DSL
.select(SCENARIO.ID)
.from(SCENARIO)
.where(SCENARIO.ROADMAP_ID.eq(rId));
dsl.deleteFrom(SCENARIO_AXIS_ITEM)
.where(SCENARIO_AXIS_ITEM.SCENARIO_ID.in(scenarioIds))
.execute();
dsl.deleteFrom(SCENARIO_RATING_ITEM)
.where(SCENARIO_RATING_ITEM.SCENARIO_ID.in(scenarioIds))
.execute();
dsl.deleteFrom(ROADMAP)
.where(ROADMAP.ID.eq(rId))
.execute();
});
}
private Long findCategoryId(DSLContext dsl, String extId) {
return dsl
.select(MEASURABLE_CATEGORY.ID)
.from(MEASURABLE_CATEGORY)
.where(MEASURABLE_CATEGORY.EXTERNAL_ID.eq(extId))
.fetchAny(MEASURABLE_CATEGORY.ID);
}
private ScenarioAxisItemDao getScenarioAxisItemDao(ApplicationContext ctx) {
return ctx.getBean(ScenarioAxisItemDao.class);
}
private ScenarioDao getScenarioDao(ApplicationContext ctx) {
return ctx.getBean(ScenarioDao.class);
}
private RoadmapDao getRoadmapDao(ApplicationContext ctx) {
return ctx.getBean(RoadmapDao.class);
}
private MeasurableDao getMeasurableDao(ApplicationContext ctx) {
return ctx.getBean(MeasurableDao.class);
}
private MeasurableRatingDao getMeasurableRatingDao(ApplicationContext ctx) {
return ctx.getBean(MeasurableRatingDao.class);
}
private MeasurableRatingService getMeasurableRatingService(ApplicationContext ctx) {
return ctx.getBean(MeasurableRatingService.class);
}
private MeasurableIdSelectorFactory getMeasurableIdSelectorFactory(ApplicationContext ctx) {
return ctx.getBean(MeasurableIdSelectorFactory.class);
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.sling.servlets.post.impl.operations;
import java.io.IOException;
import java.io.InputStream;
import java.util.List;
import java.util.Map;
import javax.jcr.Node;
import javax.jcr.RepositoryException;
import javax.jcr.Session;
import javax.servlet.http.HttpServletResponse;
import org.apache.sling.api.SlingHttpServletRequest;
import org.apache.sling.api.request.RequestParameter;
import org.apache.sling.api.resource.PersistenceException;
import org.apache.sling.jcr.contentloader.ContentImportListener;
import org.apache.sling.jcr.contentloader.ContentImporter;
import org.apache.sling.jcr.contentloader.ImportOptions;
import org.apache.sling.servlets.post.Modification;
import org.apache.sling.servlets.post.ModificationType;
import org.apache.sling.servlets.post.PostResponse;
import org.apache.sling.servlets.post.SlingPostConstants;
import org.apache.sling.servlets.post.VersioningConfiguration;
import org.apache.sling.servlets.post.impl.helper.RequestProperty;
/**
* The <code>ImportOperation</code> class implements the
* {@link org.apache.sling.servlets.post.SlingPostConstants#OPERATION_IMPORT}
* import operation for the Sling default POST servlet.
*/
public class ImportOperation extends AbstractCreateOperation {
/**
* Reference to the content importer service
*/
private ContentImporter contentImporter;
public void setContentImporter(ContentImporter importer) {
this.contentImporter = importer;
}
private String getRequestParamAsString(SlingHttpServletRequest request, String key) {
RequestParameter requestParameter = request.getRequestParameter(key);
if (requestParameter == null) {
return null;
}
return requestParameter.getString();
}
@Override
protected void doRun(SlingHttpServletRequest request, PostResponse response, final List<Modification> changes)
throws RepositoryException {
ContentImporter importer = contentImporter;
if (importer == null) {
response.setStatus(HttpServletResponse.SC_INTERNAL_SERVER_ERROR,
"Missing content importer for import");
return;
}
Map<String, RequestProperty> reqProperties = collectContent(request,
response);
VersioningConfiguration versioningConfiguration = getVersioningConfiguration(request);
// do not change order unless you have a very good reason.
Session session = request.getResourceResolver().adaptTo(Session.class);
try {
processCreate(request.getResourceResolver(), reqProperties, response, changes, versioningConfiguration);
} catch ( final PersistenceException pe) {
if ( pe.getCause() instanceof RepositoryException ) {
throw (RepositoryException)pe.getCause();
}
throw new RepositoryException(pe);
}
String path = response.getPath();
Node node = null;
try {
node = (Node) session.getItem(path);
} catch ( RepositoryException e ) {
log.warn(e.getMessage(),e);
// was not able to resolve the node
} catch ( ClassCastException e) {
log.warn(e.getMessage(),e);
// it was not a node
}
if (node == null) {
response.setStatus(HttpServletResponse.SC_NOT_FOUND,
"Missing target node " + path + " for import");
return;
}
String contentType = getRequestParamAsString(request, SlingPostConstants.RP_CONTENT_TYPE);
if (contentType == null) {
response.setStatus(HttpServletResponse.SC_PRECONDITION_FAILED,
"Required :contentType parameter is missing");
return;
}
//import options passed as request parameters.
final boolean replace = "true".equalsIgnoreCase(getRequestParamAsString(request, SlingPostConstants.RP_REPLACE));
final boolean replaceProperties = "true".equalsIgnoreCase(getRequestParamAsString(request, SlingPostConstants.RP_REPLACE_PROPERTIES));
final boolean checkin = "true".equalsIgnoreCase(getRequestParamAsString(request, SlingPostConstants.RP_CHECKIN));
final boolean autoCheckout = "true".equalsIgnoreCase(getRequestParamAsString(request, SlingPostConstants.RP_AUTO_CHECKOUT));
String basePath = getItemPath(request);
basePath = removeAndValidateWorkspace(basePath, request.getResourceResolver().adaptTo(Session.class));
if (basePath.endsWith("/")) {
//remove the trailing slash
basePath = basePath.substring(0, basePath.length() - 1);
}
// default to creating content
response.setCreateRequest(true);
final String targetName;
//check if a name was posted to use as the name of the imported root node
if (getRequestParamAsString(request, SlingPostConstants.RP_NODE_NAME) != null) {
// exact name
targetName = getRequestParamAsString(request, SlingPostConstants.RP_NODE_NAME);
if (targetName.length() > 0 && node.hasNode(targetName)) {
if (replace) {
response.setCreateRequest(false);
} else {
response.setStatus(
HttpServletResponse.SC_PRECONDITION_FAILED,
"Cannot import " + path + "/" + targetName
+ ": node exists");
return;
}
}
} else if (getRequestParamAsString(request, SlingPostConstants.RP_NODE_NAME_HINT) != null) {
// node name hint only
String nodePath = generateName(request, basePath);
String name = nodePath.substring(nodePath.lastIndexOf('/') + 1);
targetName = name;
} else {
// no name posted, so the import won't create a root node
targetName = "";
}
final String contentRootName = targetName + "." + contentType;
try {
InputStream contentStream = null;
RequestParameter contentParameter = request.getRequestParameter(SlingPostConstants.RP_CONTENT);
if (contentParameter != null) {
contentStream = contentParameter.getInputStream();
} else {
RequestParameter contentFile = request.getRequestParameter(SlingPostConstants.RP_CONTENT_FILE);
if (contentFile != null) {
contentStream = contentFile.getInputStream();
}
}
if (contentStream == null) {
response.setStatus(HttpServletResponse.SC_PRECONDITION_FAILED,
"Missing content for import");
return;
} else {
importer.importContent(node, contentRootName, contentStream,
new ImportOptions() {
@Override
public boolean isCheckin() {
return checkin;
}
@Override
public boolean isAutoCheckout() {
return autoCheckout;
}
@Override
public boolean isIgnoredImportProvider(
String extension) {
// this probably isn't important in this context.
return false;
}
@Override
public boolean isOverwrite() {
return replace;
}
/* (non-Javadoc)
* @see org.apache.sling.jcr.contentloader.ImportOptions#isPropertyOverwrite()
*/
@Override
public boolean isPropertyOverwrite() {
return replaceProperties;
}
},
new ContentImportListener() {
public void onReorder(String orderedPath, String beforeSibbling) {
changes.add(Modification.onOrder(orderedPath, beforeSibbling));
}
public void onMove(String srcPath, String destPath) {
changes.add(Modification.onMoved(srcPath, destPath));
}
public void onModify(String srcPath) {
changes.add(Modification.onModified(srcPath));
}
public void onDelete(String srcPath) {
changes.add(Modification.onDeleted(srcPath));
}
public void onCreate(String srcPath) {
changes.add(Modification.onCreated(srcPath));
}
public void onCopy(String srcPath, String destPath) {
changes.add(Modification.onCopied(srcPath, destPath));
}
public void onCheckin(String srcPath) {
changes.add(Modification.onCheckin(srcPath));
}
public void onCheckout(String srcPath) {
changes.add(Modification.onCheckout(srcPath));
}
});
}
if (!changes.isEmpty()) {
//fill in the data for the response report
Modification modification = changes.get(0);
if (modification.getType() == ModificationType.CREATE) {
String importedPath = modification.getSource();
response.setLocation(externalizePath(request, importedPath));
response.setPath(importedPath);
int lastSlashIndex = importedPath.lastIndexOf('/');
if (lastSlashIndex != -1) {
String parentPath = importedPath.substring(0, lastSlashIndex);
response.setParentLocation(externalizePath(request, parentPath));
}
}
}
} catch (IOException e) {
throw new RepositoryException(e);
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to you under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.calcite.test;
import org.apache.calcite.sql.parser.SqlParserImplFactory;
import org.apache.calcite.sql.parser.SqlParserTest;
import org.apache.calcite.sql.parser.ddl.SqlDdlParserImpl;
import org.junit.Test;
/**
* Tests SQL parser extensions for DDL.
*
* <p>Remaining tasks:
* <ul>
*
* <li>"create table x (a int) as values 1, 2" should fail validation;
* data type not allowed in "create table ... as".
*
* <li>"create table x (a int, b int as (a + 1)) stored"
* should not allow b to be specified in insert;
* should generate check constraint on b;
* should populate b in insert as if it had a default
*
* <li>"create table as select" should store constraints
* deduced by planner
*
* <li>during CREATE VIEW, check for a table and a materialized view
* with the same name (they have the same namespace)
*
* </ul>
*/
public class ServerParserTest extends SqlParserTest {
@Override protected SqlParserImplFactory parserImplFactory() {
return SqlDdlParserImpl.FACTORY;
}
@Override public void testGenerateKeyWords() {
// by design, method only works in base class; no-ops in this sub-class
}
@Test public void testCreateSchema() {
sql("create schema x")
.ok("CREATE SCHEMA `X`");
}
@Test public void testCreateOrReplaceSchema() {
sql("create or replace schema x")
.ok("CREATE OR REPLACE SCHEMA `X`");
}
@Test public void testCreateForeignSchema() {
final String sql = "create or replace foreign schema x\n"
+ "type 'jdbc'\n"
+ "options (\n"
+ " aBoolean true,\n"
+ " anInteger -45,\n"
+ " aDate DATE '1970-03-21',\n"
+ " \"quoted.id\" TIMESTAMP '1970-03-21 12:4:56.78',\n"
+ " aString 'foo''bar')";
final String expected = "CREATE OR REPLACE FOREIGN SCHEMA `X` TYPE 'jdbc' "
+ "OPTIONS (`ABOOLEAN` TRUE,"
+ " `ANINTEGER` -45,"
+ " `ADATE` DATE '1970-03-21',"
+ " `quoted.id` TIMESTAMP '1970-03-21 12:04:56.78',"
+ " `ASTRING` 'foo''bar')";
sql(sql).ok(expected);
}
@Test public void testCreateForeignSchema2() {
final String sql = "create or replace foreign schema x\n"
+ "library 'com.example.ExampleSchemaFactory'\n"
+ "options ()";
final String expected = "CREATE OR REPLACE FOREIGN SCHEMA `X` "
+ "LIBRARY 'com.example.ExampleSchemaFactory' "
+ "OPTIONS ()";
sql(sql).ok(expected);
}
@Test public void testCreateTypeWithAttributeList() {
sql("create type x.mytype1 as (i int not null, j varchar(5) null)")
.ok("CREATE TYPE `X`.`MYTYPE1` AS (`I` INTEGER NOT NULL, `J` VARCHAR(5))");
}
@Test public void testCreateTypeWithBaseType() {
sql("create type mytype1 as varchar(5)")
.ok("CREATE TYPE `MYTYPE1` AS VARCHAR(5)");
}
@Test public void testCreateOrReplaceTypeWith() {
sql("create or replace type mytype1 as varchar(5)")
.ok("CREATE OR REPLACE TYPE `MYTYPE1` AS VARCHAR(5)");
}
@Test public void testCreateTable() {
sql("create table x (i int not null, j varchar(5) null)")
.ok("CREATE TABLE `X` (`I` INTEGER NOT NULL, `J` VARCHAR(5))");
}
@Test public void testCreateTableAsSelect() {
final String expected = "CREATE TABLE `X` AS\n"
+ "SELECT *\n"
+ "FROM `EMP`";
sql("create table x as select * from emp")
.ok(expected);
}
@Test public void testCreateTableIfNotExistsAsSelect() {
final String expected = "CREATE TABLE IF NOT EXISTS `X`.`Y` AS\n"
+ "SELECT *\n"
+ "FROM `EMP`";
sql("create table if not exists x.y as select * from emp")
.ok(expected);
}
@Test public void testCreateTableAsValues() {
final String expected = "CREATE TABLE `X` AS\n"
+ "VALUES (ROW(1)),\n"
+ "(ROW(2))";
sql("create table x as values 1, 2")
.ok(expected);
}
@Test public void testCreateTableAsSelectColumnList() {
final String expected = "CREATE TABLE `X` (`A`, `B`) AS\n"
+ "SELECT *\n"
+ "FROM `EMP`";
sql("create table x (a, b) as select * from emp")
.ok(expected);
}
@Test public void testCreateTableCheck() {
final String expected = "CREATE TABLE `X` (`I` INTEGER NOT NULL,"
+ " CONSTRAINT `C1` CHECK (`I` < 10), `J` INTEGER)";
sql("create table x (i int not null, constraint c1 check (i < 10), j int)")
.ok(expected);
}
@Test public void testCreateTableVirtualColumn() {
final String sql = "create table if not exists x (\n"
+ " i int not null,\n"
+ " j int generated always as (i + 1) stored,\n"
+ " k int as (j + 1) virtual,\n"
+ " m int as (k + 1))";
final String expected = "CREATE TABLE IF NOT EXISTS `X` "
+ "(`I` INTEGER NOT NULL,"
+ " `J` INTEGER AS (`I` + 1) STORED,"
+ " `K` INTEGER AS (`J` + 1) VIRTUAL,"
+ " `M` INTEGER AS (`K` + 1) VIRTUAL)";
sql(sql).ok(expected);
}
@Test public void testCreateView() {
final String sql = "create or replace view v as\n"
+ "select * from (values (1, '2'), (3, '45')) as t (x, y)";
final String expected = "CREATE OR REPLACE VIEW `V` AS\n"
+ "SELECT *\n"
+ "FROM (VALUES (ROW(1, '2')),\n"
+ "(ROW(3, '45'))) AS `T` (`X`, `Y`)";
sql(sql).ok(expected);
}
@Test public void testCreateMaterializedView() {
final String sql = "create materialized view mv (d, v) as\n"
+ "select deptno, count(*) from emp\n"
+ "group by deptno order by deptno desc";
final String expected = "CREATE MATERIALIZED VIEW `MV` (`D`, `V`) AS\n"
+ "SELECT `DEPTNO`, COUNT(*)\n"
+ "FROM `EMP`\n"
+ "GROUP BY `DEPTNO`\n"
+ "ORDER BY `DEPTNO` DESC";
sql(sql).ok(expected);
}
@Test public void testCreateMaterializedView2() {
final String sql = "create materialized view if not exists mv as\n"
+ "select deptno, count(*) from emp\n"
+ "group by deptno order by deptno desc";
final String expected = "CREATE MATERIALIZED VIEW IF NOT EXISTS `MV` AS\n"
+ "SELECT `DEPTNO`, COUNT(*)\n"
+ "FROM `EMP`\n"
+ "GROUP BY `DEPTNO`\n"
+ "ORDER BY `DEPTNO` DESC";
sql(sql).ok(expected);
}
// "OR REPLACE" is allowed by the parser, but the validator will give an
// error later
@Test public void testCreateOrReplaceMaterializedView() {
final String sql = "create or replace materialized view mv as\n"
+ "select * from emp";
final String expected = "CREATE MATERIALIZED VIEW `MV` AS\n"
+ "SELECT *\n"
+ "FROM `EMP`";
sql(sql).ok(expected);
}
@Test public void testDropSchema() {
sql("drop schema x")
.ok("DROP SCHEMA `X`");
}
@Test public void testDropSchemaIfExists() {
sql("drop schema if exists x")
.ok("DROP SCHEMA IF EXISTS `X`");
}
@Test public void testDropForeignSchema() {
sql("drop foreign schema x")
.ok("DROP FOREIGN SCHEMA `X`");
}
@Test public void testDropType() {
sql("drop type X")
.ok("DROP TYPE `X`");
}
@Test public void testDropTypeIfExists() {
sql("drop type if exists X")
.ok("DROP TYPE IF EXISTS `X`");
}
@Test public void testDropTypeTrailingIfExistsFails() {
sql("drop type X ^if^ exists")
.fails("(?s)Encountered \"if\" at.*");
}
@Test public void testDropTable() {
sql("drop table x")
.ok("DROP TABLE `X`");
}
@Test public void testDropTableComposite() {
sql("drop table x.y")
.ok("DROP TABLE `X`.`Y`");
}
@Test public void testDropTableIfExists() {
sql("drop table if exists x")
.ok("DROP TABLE IF EXISTS `X`");
}
@Test public void testDropView() {
sql("drop view x")
.ok("DROP VIEW `X`");
}
@Test public void testDropMaterializedView() {
sql("drop materialized view x")
.ok("DROP MATERIALIZED VIEW `X`");
}
@Test public void testDropMaterializedViewIfExists() {
sql("drop materialized view if exists x")
.ok("DROP MATERIALIZED VIEW IF EXISTS `X`");
}
}
// End ServerParserTest.java
| |
package org.ovirt.engine.api.restapi.types;
import org.ovirt.engine.api.model.Cluster;
import org.ovirt.engine.api.model.DataCenter;
import org.ovirt.engine.api.model.Quota;
import org.ovirt.engine.api.model.QuotaClusterLimit;
import org.ovirt.engine.api.model.QuotaStorageLimit;
import org.ovirt.engine.api.model.StorageDomain;
import org.ovirt.engine.api.restapi.utils.GuidUtils;
import org.ovirt.engine.core.common.businessentities.QuotaStorage;
import org.ovirt.engine.core.common.businessentities.QuotaVdsGroup;
import org.ovirt.engine.core.compat.Guid;
public class QuotaMapper {
@Mapping(from = Quota.class, to = org.ovirt.engine.core.common.businessentities.Quota.class)
public static org.ovirt.engine.core.common.businessentities.Quota map(Quota model, org.ovirt.engine.core.common.businessentities.Quota template) {
org.ovirt.engine.core.common.businessentities.Quota entity = (template==null) ? new org.ovirt.engine.core.common.businessentities.Quota() : template;
if (model.isSetId()) {
entity.setId(GuidUtils.asGuid(model.getId()));
}
if (model.isSetName()) {
entity.setQuotaName(model.getName());
}
if (model.isSetDescription()) {
entity.setDescription(model.getDescription());
}
if (model.isSetDataCenter()) {
entity.setStoragePoolId(GuidUtils.asGuid(model.getDataCenter().getId()));
}
if (model.isSetClusterHardLimitPct()) {
entity.setGraceVdsGroupPercentage(model.getClusterHardLimitPct());
}
if (model.isSetStorageHardLimitPct()) {
entity.setGraceStoragePercentage(model.getStorageHardLimitPct());
}
if (model.isSetClusterSoftLimitPct()) {
entity.setThresholdVdsGroupPercentage(model.getClusterSoftLimitPct());
}
if (model.isSetStorageSoftLimitPct()) {
entity.setThresholdStoragePercentage(model.getStorageSoftLimitPct());
}
return entity;
}
@Mapping(from = org.ovirt.engine.core.common.businessentities.Quota.class, to = Quota.class)
public static Quota map(org.ovirt.engine.core.common.businessentities.Quota template, Quota model) {
Quota ret = (model==null) ? new Quota() : model;
if (template.getId()!=null) {
ret.setId(template.getId().toString());
}
if (template.getQuotaName()!=null) {
ret.setName(template.getQuotaName());
}
if (template.getDescription()!=null) {
ret.setDescription(template.getDescription());
}
if (template.getStoragePoolId()!=null) {
if (ret.getDataCenter()==null) {
ret.setDataCenter(new DataCenter());
}
ret.getDataCenter().setId(template.getStoragePoolId().toString());
}
ret.setClusterHardLimitPct(template.getGraceVdsGroupPercentage());
ret.setStorageHardLimitPct(template.getGraceStoragePercentage());
ret.setClusterSoftLimitPct(template.getThresholdVdsGroupPercentage());
ret.setStorageSoftLimitPct(template.getThresholdStoragePercentage());
return ret;
}
@Mapping(from = org.ovirt.engine.core.common.businessentities.Quota.class, to = QuotaStorageLimit.class)
public static QuotaStorageLimit map(org.ovirt.engine.core.common.businessentities.Quota entity,
QuotaStorageLimit template) {
QuotaStorageLimit model = template != null ? template : new QuotaStorageLimit();
Guid guid = GuidUtils.asGuid(model.getId());
// global
if (guid.equals(entity.getId())) {
map(model, entity.getGlobalQuotaStorage(), null, entity.getStoragePoolId().toString(), entity.getId()
.toString());
} else { // specific
if (entity.getQuotaStorages() != null) {
for (QuotaStorage quotaStorage : entity.getQuotaStorages()) {
if (quotaStorage.getStorageId() != null && quotaStorage.getStorageId().equals(guid)) {
map(model, quotaStorage, quotaStorage.getStorageId().toString(), entity.getStoragePoolId()
.toString(), entity.getId().toString());
}
}
}
}
return model;
}
private static void map(QuotaStorageLimit model,
QuotaStorage quotaStorage,
String storageDomainId,
String dataCenterId,
String quotaId) {
model.setQuota(new Quota());
model.getQuota().setId(quotaId);
model.getQuota().setDataCenter(new DataCenter());
model.getQuota().getDataCenter().setId(dataCenterId);
if (storageDomainId != null) {
model.setStorageDomain(new StorageDomain());
model.getStorageDomain().setId(storageDomainId);
}
if (quotaStorage.getStorageSizeGB() != null) {
model.setLimit(quotaStorage.getStorageSizeGB());
}
if (quotaStorage.getStorageSizeGBUsage() != null) {
model.setUsage(quotaStorage.getStorageSizeGBUsage());
}
}
@Mapping(from = QuotaStorageLimit.class, to = org.ovirt.engine.core.common.businessentities.Quota.class)
public static org.ovirt.engine.core.common.businessentities.Quota map(QuotaStorageLimit model,
org.ovirt.engine.core.common.businessentities.Quota template) {
org.ovirt.engine.core.common.businessentities.Quota entity =
template != null ? template : new org.ovirt.engine.core.common.businessentities.Quota();
QuotaStorage quotaStorage = new QuotaStorage();
if (model.isSetLimit()) {
quotaStorage.setStorageSizeGB(model.getLimit());
}
// specific SD
if(model.isSetStorageDomain() && model.getStorageDomain().isSetId()) {
quotaStorage.setStorageId(GuidUtils.asGuid(model.getStorageDomain().getId()));
entity.getQuotaStorages().add(quotaStorage);
} else { // global
entity.setGlobalQuotaStorage(quotaStorage);
}
return entity;
}
@Mapping(from = org.ovirt.engine.core.common.businessentities.Quota.class, to = QuotaClusterLimit.class)
public static QuotaClusterLimit map(org.ovirt.engine.core.common.businessentities.Quota entity,
QuotaClusterLimit template) {
QuotaClusterLimit model = template != null ? template : new QuotaClusterLimit();
Guid guid = GuidUtils.asGuid(model.getId());
// global
if (guid.equals(entity.getId())) {
map(model, entity.getGlobalQuotaVdsGroup(), null, entity.getStoragePoolId().toString(), entity.getId()
.toString());
} else { // specific
if (entity.getQuotaVdsGroups() != null) {
for (QuotaVdsGroup quotaCluster : entity.getQuotaVdsGroups()) {
if (quotaCluster.getVdsGroupId() != null && quotaCluster.getVdsGroupId().equals(guid)) {
map(model, quotaCluster, quotaCluster.getVdsGroupId().toString(), entity.getStoragePoolId()
.toString(), entity.getId().toString());
}
}
}
}
return model;
}
private static void map(QuotaClusterLimit template,
QuotaVdsGroup quotaCluster,
String clusterId,
String dataCenterId,
String quotaId) {
template.setQuota(new Quota());
template.getQuota().setId(quotaId);
template.getQuota().setDataCenter(new DataCenter());
template.getQuota().getDataCenter().setId(dataCenterId);
if (clusterId != null) {
template.setCluster(new Cluster());
template.getCluster().setId(clusterId);
}
if (quotaCluster.getMemSizeMB() != null) {
// show GB instead of MB (ignore -1)
double value = quotaCluster.getMemSizeMB() == -1 ? quotaCluster.getMemSizeMB().doubleValue()
: quotaCluster.getMemSizeMB().doubleValue() / 1024.0;
template.setMemoryLimit(value);
}
if (quotaCluster.getMemSizeMBUsage() != null) {
template.setMemoryUsage(quotaCluster.getMemSizeMBUsage() / 1024.0);
}
if (quotaCluster.getVirtualCpu() != null) {
template.setVcpuLimit(quotaCluster.getVirtualCpu());
}
if (quotaCluster.getVirtualCpuUsage() != null) {
template.setVcpuUsage(quotaCluster.getVirtualCpuUsage());
}
}
@Mapping(from = QuotaClusterLimit.class, to = org.ovirt.engine.core.common.businessentities.Quota.class)
public static org.ovirt.engine.core.common.businessentities.Quota map(QuotaClusterLimit model,
org.ovirt.engine.core.common.businessentities.Quota template) {
org.ovirt.engine.core.common.businessentities.Quota entity =
template != null ? template : new org.ovirt.engine.core.common.businessentities.Quota();
QuotaVdsGroup quotaCluster = new QuotaVdsGroup();
if (model.isSetVcpuLimit()) {
quotaCluster.setVirtualCpu(model.getVcpuLimit());
}
if (model.isSetMemoryLimit()) {
double limit = model.getMemoryLimit();
quotaCluster.setMemSizeMB( (limit < 0) ? -1 : (long) (limit * 1024) );
}
// specific cluster
if (model.isSetCluster() && model.getCluster().isSetId()) {
quotaCluster.setVdsGroupId(GuidUtils.asGuid(model.getCluster().getId()));
entity.getQuotaVdsGroups().add(quotaCluster);
} else { // global
entity.setGlobalQuotaVdsGroup(quotaCluster);
}
return entity;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.nifi.processors.kafka.pubsub;
import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.UUID;
import java.util.regex.Pattern;
import org.apache.kafka.clients.consumer.Consumer;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.apache.kafka.common.KafkaException;
import org.apache.kafka.common.TopicPartition;
import org.apache.nifi.logging.ComponentLog;
import org.apache.nifi.processor.ProcessContext;
import org.apache.nifi.processor.ProcessSession;
import org.apache.nifi.provenance.ProvenanceReporter;
import org.apache.nifi.processors.kafka.pubsub.ConsumerPool.PoolStats;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.fail;
import org.junit.Before;
import org.junit.Test;
import org.mockito.Mockito;
import static org.mockito.ArgumentMatchers.anyLong;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
public class ConsumerPoolTest {
private Consumer<byte[], byte[]> consumer = null;
private ProcessSession mockSession = null;
private ProcessContext mockContext = Mockito.mock(ProcessContext.class);
private ProvenanceReporter mockReporter = null;
private ConsumerPool testPool = null;
private ConsumerPool testDemarcatedPool = null;
private ComponentLog logger = null;
@Before
@SuppressWarnings("unchecked")
public void setup() {
consumer = mock(Consumer.class);
logger = mock(ComponentLog.class);
mockSession = mock(ProcessSession.class);
mockReporter = mock(ProvenanceReporter.class);
when(mockSession.getProvenanceReporter()).thenReturn(mockReporter);
testPool = new ConsumerPool(
1,
null,
Collections.emptyMap(),
Collections.singletonList("nifi"),
100L,
"utf-8",
"ssl",
"localhost",
logger,
true,
StandardCharsets.UTF_8,
null) {
@Override
protected Consumer<byte[], byte[]> createKafkaConsumer() {
return consumer;
}
};
testDemarcatedPool = new ConsumerPool(
1,
"--demarcator--".getBytes(StandardCharsets.UTF_8),
Collections.emptyMap(),
Collections.singletonList("nifi"),
100L,
"utf-8",
"ssl",
"localhost",
logger,
true,
StandardCharsets.UTF_8,
Pattern.compile(".*")) {
@Override
protected Consumer<byte[], byte[]> createKafkaConsumer() {
return consumer;
}
};
}
@Test
public void validatePoolSimpleCreateClose() throws Exception {
when(consumer.poll(anyLong())).thenReturn(createConsumerRecords("nifi", 0, 0L, new byte[][]{}));
try (final ConsumerLease lease = testPool.obtainConsumer(mockSession, mockContext)) {
lease.poll();
}
try (final ConsumerLease lease = testPool.obtainConsumer(mockSession, mockContext)) {
lease.poll();
}
try (final ConsumerLease lease = testPool.obtainConsumer(mockSession, mockContext)) {
lease.poll();
}
try (final ConsumerLease lease = testPool.obtainConsumer(mockSession, mockContext)) {
lease.poll();
}
testPool.close();
verify(mockSession, times(0)).create();
verify(mockSession, times(0)).commit();
final PoolStats stats = testPool.getPoolStats();
assertEquals(1, stats.consumerCreatedCount);
assertEquals(1, stats.consumerClosedCount);
assertEquals(4, stats.leasesObtainedCount);
}
@Test
@SuppressWarnings("unchecked")
public void validatePoolSimpleCreatePollClose() throws Exception {
final byte[][] firstPassValues = new byte[][]{
"Hello-1".getBytes(StandardCharsets.UTF_8),
"Hello-2".getBytes(StandardCharsets.UTF_8),
"Hello-3".getBytes(StandardCharsets.UTF_8)
};
final ConsumerRecords<byte[], byte[]> firstRecs = createConsumerRecords("foo", 1, 1L, firstPassValues);
when(consumer.poll(anyLong())).thenReturn(firstRecs, createConsumerRecords("nifi", 0, 0L, new byte[][]{}));
try (final ConsumerLease lease = testPool.obtainConsumer(mockSession, mockContext)) {
lease.poll();
lease.commit();
}
testPool.close();
verify(mockSession, times(3)).create();
verify(mockSession, times(1)).commit();
final PoolStats stats = testPool.getPoolStats();
assertEquals(1, stats.consumerCreatedCount);
assertEquals(1, stats.consumerClosedCount);
assertEquals(1, stats.leasesObtainedCount);
}
@Test
public void validatePoolSimpleBatchCreateClose() throws Exception {
when(consumer.poll(anyLong())).thenReturn(createConsumerRecords("nifi", 0, 0L, new byte[][]{}));
for (int i = 0; i < 100; i++) {
try (final ConsumerLease lease = testPool.obtainConsumer(mockSession, mockContext)) {
for (int j = 0; j < 100; j++) {
lease.poll();
}
}
}
testPool.close();
verify(mockSession, times(0)).create();
verify(mockSession, times(0)).commit();
final PoolStats stats = testPool.getPoolStats();
assertEquals(1, stats.consumerCreatedCount);
assertEquals(1, stats.consumerClosedCount);
assertEquals(100, stats.leasesObtainedCount);
}
@Test
@SuppressWarnings("unchecked")
public void validatePoolBatchCreatePollClose() throws Exception {
final byte[][] firstPassValues = new byte[][]{
"Hello-1".getBytes(StandardCharsets.UTF_8),
"Hello-2".getBytes(StandardCharsets.UTF_8),
"Hello-3".getBytes(StandardCharsets.UTF_8)
};
final ConsumerRecords<byte[], byte[]> firstRecs = createConsumerRecords("foo", 1, 1L, firstPassValues);
when(consumer.poll(anyLong())).thenReturn(firstRecs, createConsumerRecords("nifi", 0, 0L, new byte[][]{}));
try (final ConsumerLease lease = testDemarcatedPool.obtainConsumer(mockSession, mockContext)) {
lease.poll();
lease.commit();
}
testDemarcatedPool.close();
verify(mockSession, times(1)).create();
verify(mockSession, times(1)).commit();
final PoolStats stats = testDemarcatedPool.getPoolStats();
assertEquals(1, stats.consumerCreatedCount);
assertEquals(1, stats.consumerClosedCount);
assertEquals(1, stats.leasesObtainedCount);
}
@Test
public void validatePoolConsumerFails() throws Exception {
when(consumer.poll(anyLong())).thenThrow(new KafkaException("oops"));
try (final ConsumerLease lease = testPool.obtainConsumer(mockSession, mockContext)) {
try {
lease.poll();
fail();
} catch (final KafkaException ke) {
}
}
testPool.close();
verify(mockSession, times(0)).create();
verify(mockSession, times(0)).commit();
final PoolStats stats = testPool.getPoolStats();
assertEquals(1, stats.consumerCreatedCount);
assertEquals(1, stats.consumerClosedCount);
assertEquals(1, stats.leasesObtainedCount);
}
@SuppressWarnings({"rawtypes", "unchecked"})
static ConsumerRecords<byte[], byte[]> createConsumerRecords(final String topic, final int partition, final long startingOffset, final byte[][] rawRecords) {
final Map<TopicPartition, List<ConsumerRecord<byte[], byte[]>>> map = new HashMap<>();
final TopicPartition tPart = new TopicPartition(topic, partition);
final List<ConsumerRecord<byte[], byte[]>> records = new ArrayList<>();
long offset = startingOffset;
for (final byte[] rawRecord : rawRecords) {
final ConsumerRecord<byte[], byte[]> rec = new ConsumerRecord(topic, partition, offset++, UUID.randomUUID().toString().getBytes(), rawRecord);
records.add(rec);
}
map.put(tPart, records);
return new ConsumerRecords(map);
}
}
| |
/* @formatter:off */
/******************************************************************************
* ELEC5616
* Computer and Network Security, The University of Sydney
*
* PACKAGE: StealthNet
* FILENAME: ProxyThread.java
* AUTHORS: Joshua Spence
* DESCRIPTION: The thread to be spawned for each StealthNet proxy
* connection.
*
*****************************************************************************/
/* @formatter:on */
package StealthNet;
/* Import Libraries ********************************************************* */
import java.io.IOException;
import java.math.BigInteger;
import java.net.Socket;
import java.net.SocketException;
import java.util.Random;
/* StealthNet.ProxyThread Class Definition ********************************** */
/**
* Represents a {@link Thread} within the operating system for communications
* between the StealthNet {@link Proxy} and a StealthNet peer.
*
* <p> A new instance is created for each {@link Client} such that multiple
* {@link Client}s can be active concurrently. This class receives
* {@link EncryptedPacket}s from one peer and forwards them to the other peer.
* To simulate various security attacks, the {@link EncryptedPacket}s may be
* altered/retransmitted/dropped.
*
* <p>A {@link ProxyThread} is "paired" with another {@link ProxyThread} such
* that when one {@link Thread} ends, the paired {@link Thread} can be ended as
* well. This is because for a single {@link Client}<=>{@link Server}
* connection, for example, two {@link ProxyThread} will be created - one to
* handle the {@link Client}-> {@link Server} communications and another to
* handle the {@link Server}-> {@link Client} communications.
*
* @author Joshua Spence
* @see Proxy
*/
public class ProxyThread extends Thread {
/* Debug options. */
private static final boolean DEBUG_GENERAL = Debug.isDebug("StealthNet.ProxyThread.General");
private static final boolean DEBUG_ERROR_TRACE = Debug.isDebug("StealthNet.ProxyThread.ErrorTrace") || Debug.isDebug("ErrorTrace");
/* How malicious should we be? */
/** True to enable simulated security attacks. */
public static final boolean isMalicious = System.getProperty("StealthNet.Proxy.Malicious", "false").equals("true");
/** Don't perform any malicious activity for the first X packets. */
private static final long noMaliciousPacketCount = 5;
/**
* Probability (as an integer out of 100) of a replay attack after the first
* X packets.
*/
private static final int replayProbability = 25;
/**
* Probability (as an integer out of 100) of a corruption attack after the
* first X packets.
*/
private static final int corruptionProbability = 25;
/** {@link ProxyComms} class to handle communications to/from one peer. */
private ProxyComms stealthCommsSource = null;
/** {@link ProxyComms} class to handle communications to/from the other peer. */
private ProxyComms stealthCommsDestination = null;
/** Paired {@link ProxyThread} (to be killed when this thread terminates). */
private ProxyThread pairedThread;
/**
* Boolean to indicate that this thread should be stopped. Set by this
* thread's "paired" thread.
*/
private boolean shouldStop = false;
/**
* Constructor.
*
* @param sourceSocket The {@link Socket} on which the {@link Proxy} has
* accepted a connection.
* @param destinationSocket The {@link Socket} that the {@link Proxy} is
* retransmitting {@link EncryptedPacket}s on.
*/
public ProxyThread(final Socket sourceSocket, final Socket destinationSocket) {
/* Thread constructor. */
super("StealthNet.ProxyThread");
if (DEBUG_GENERAL)
System.out.println("Creating a ProxyThread.");
/* Create a new ProxyComms instance and accept sessions. */
stealthCommsSource = new ProxyComms();
stealthCommsSource.acceptSession(sourceSocket);
stealthCommsDestination = new ProxyComms();
stealthCommsDestination.acceptSession(destinationSocket);
}
/**
* Set a the paired {@link ProxyThread} to be terminated when this
* {@link Thread} terminates.
*
* @param thread The ProxyThread to be paired with this thread.
*/
public void setPairedThread(final ProxyThread thread) {
pairedThread = thread;
}
/**
* Set whether or not a {@link ProxyThread} should stop executing. This
* should be set by this {@link Thread} "paired" {@link Thread} when the
* paired {@link Thread} itself wishes to terminate.
*
* @param stop True if this {@link Thread} should stop executing, otherwise
* false.
*/
private synchronized void setShouldStop(final boolean stop) {
shouldStop = stop;
}
/**
* Checks if a thread should stop executing. This should be called within
* this thread to provide thread-safe access to the `shouldStop' boolean
* variable, which may be set by this thread's paired thread.
*
* @return True if the thread should stop executing, otherwise false.
*/
private synchronized boolean getShouldStop() {
return shouldStop;
}
/**
* Cleans up before destroying the class.
*
* @throws IOException
*/
@Override
protected void finalize() throws IOException {
if (stealthCommsSource != null)
stealthCommsSource.terminateSession();
if (stealthCommsDestination != null)
stealthCommsDestination.terminateSession();
}
/**
* The main function for the {@link ProxyThread} class. This function
* forwards {@link EncryptedPacket}s from source to destination.
*
* <p> If <code>isMalicious</code> is true, then the {@link Proxy} will
* attempt to randomly simulate various security attacks. After the initial
* <code>noMaliciousPacketCount</code> received {@link EncryptedPacket}s
* (during which no malicious activity will occur - to allow the
* communicating parties to perform various security protocols), the
* {@link Proxy} will randomly simulate a security attack with some
* probability based on a pseudo-random number generator.
*/
@Override
public void run() {
if (DEBUG_GENERAL)
System.out.println("Running ProxyThread... (Thread ID is " + getId() + ")");
String packetString = new String();
final Random rnd = new Random();
BigInteger pcktCounter = BigInteger.ZERO;
try {
while (packetString != null && !getShouldStop()) {
/* Receive a StealthNet packet. */
packetString = stealthCommsSource.recvString();
if (packetString == null)
break;
/* Increment the packet counter. */
pcktCounter = pcktCounter.add(BigInteger.ONE);
/* Decide whether or not to corrupt a message. */
if (isMalicious && pcktCounter.compareTo(BigInteger.valueOf(noMaliciousPacketCount)) > 0 && rnd.nextInt() % 100 < corruptionProbability) {
if (DEBUG_GENERAL)
System.out.println("Corrupting packet...");
/* Simply reverse the packet string. */
stealthCommsDestination.sendString(new StringBuffer(packetString).reverse().toString());
} else
stealthCommsDestination.sendString(packetString);
/* Decide whether or not to replay a message. */
if (isMalicious && pcktCounter.compareTo(BigInteger.valueOf(noMaliciousPacketCount)) > 0 && rnd.nextInt() % 100 < replayProbability) {
if (DEBUG_GENERAL)
System.out.println("Replaying last packet...");
stealthCommsDestination.sendString(packetString);
}
}
} catch (final SocketException e) {
/*
* This is a fairly "clean" exit which can, but hopefully won't,
* occur.
*/
System.out.println("Session terminated.");
} catch (final IOException e) {
System.out.println("Session terminated.");
if (DEBUG_ERROR_TRACE)
e.printStackTrace();
} catch (final Exception e) {
System.err.println("Error running proxy thread.");
if (DEBUG_ERROR_TRACE)
e.printStackTrace();
}
/* Clean up. */
if (stealthCommsSource != null) {
stealthCommsSource.terminateSession();
stealthCommsSource = null;
}
if (stealthCommsDestination != null) {
stealthCommsDestination.terminateSession();
stealthCommsDestination = null;
}
/* Kill the paired thread. */
if (pairedThread != null && !pairedThread.getShouldStop()) {
if (DEBUG_GENERAL)
System.out.println("Killing paired thread " + pairedThread.getId() + ".");
pairedThread.setShouldStop(true);
}
}
}
/******************************************************************************
* END OF FILE: ProxyThread.java
*****************************************************************************/
| |
/*******************************************************************************
*
* Pentaho Big Data
*
* Copyright (C) 2002-2019 by Hitachi Vantara : http://www.pentaho.com
*
*******************************************************************************
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
******************************************************************************/
package org.pentaho.big.data.impl.shim.mapreduce;
import com.google.common.annotations.VisibleForTesting;
import org.pentaho.di.core.exception.KettleFileException;
import org.pentaho.di.core.logging.LogChannelInterface;
import org.pentaho.di.core.plugins.LifecyclePluginType;
import org.pentaho.di.core.plugins.PluginInterface;
import org.pentaho.di.core.plugins.PluginRegistry;
import org.pentaho.di.core.util.Utils;
import org.pentaho.di.core.variables.VariableSpace;
import org.pentaho.di.i18n.BaseMessages;
import org.pentaho.hadoop.PluginPropertiesUtil;
import org.pentaho.hadoop.shim.api.cluster.NamedCluster;
import org.pentaho.hadoop.shim.api.mapreduce.MapReduceExecutionException;
import org.pentaho.hadoop.shim.api.mapreduce.MapReduceJarInfo;
import org.pentaho.hadoop.shim.api.mapreduce.MapReduceJobBuilder;
import org.pentaho.hadoop.shim.api.mapreduce.MapReduceJobSimple;
import org.pentaho.hadoop.shim.api.mapreduce.MapReduceService;
import org.pentaho.hadoop.shim.api.mapreduce.PentahoMapReduceJobBuilder;
import org.pentaho.hadoop.shim.spi.HadoopShim;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.lang.reflect.Method;
import java.lang.reflect.Modifier;
import java.net.MalformedURLException;
import java.net.URISyntaxException;
import java.net.URL;
import java.net.URLClassLoader;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.Properties;
import java.util.concurrent.ExecutorService;
import java.util.jar.JarEntry;
import java.util.jar.JarFile;
import java.util.jar.JarInputStream;
import java.util.jar.Manifest;
/**
* Created by bryan on 12/1/15.
*/
public class MapReduceServiceImpl implements MapReduceService {
public static final Class<?> PKG = MapReduceServiceImpl.class;
private final NamedCluster namedCluster;
private final HadoopShim hadoopShim;
private final ExecutorService executorService;
private final List<TransformationVisitorService> visitorServices = new ArrayList<>();
private final PluginPropertiesUtil pluginPropertiesUtil;
private final PluginRegistry pluginRegistry;
private static final Logger logger = LoggerFactory.getLogger( MapReduceServiceImpl.class );
public MapReduceServiceImpl( NamedCluster namedCluster, HadoopShim hadoopShim,
ExecutorService executorService, List<TransformationVisitorService> visitorServices ) {
this( namedCluster, hadoopShim, executorService, new PluginPropertiesUtil(),
PluginRegistry.getInstance(), visitorServices );
}
public MapReduceServiceImpl( NamedCluster namedCluster, HadoopShim hadoopShim,
ExecutorService executorService,
PluginPropertiesUtil pluginPropertiesUtil, PluginRegistry pluginRegistry,
List<TransformationVisitorService> visitorServices ) {
this.namedCluster = namedCluster;
this.hadoopShim = hadoopShim;
this.executorService = executorService;
this.pluginPropertiesUtil = pluginPropertiesUtil;
this.pluginRegistry = pluginRegistry;
this.visitorServices.addAll( visitorServices );
}
@Override
public MapReduceJobSimple executeSimple( URL resolvedJarUrl, String driverClass, final String commandLineArgs )
throws MapReduceExecutionException {
final Class<?> mainClass = locateDriverClass( driverClass, resolvedJarUrl, hadoopShim );
return new FutureMapReduceJobSimpleImpl( executorService, mainClass, commandLineArgs );
}
@Override
public MapReduceJobBuilder createJobBuilder( final LogChannelInterface log, VariableSpace variableSpace ) {
return new MapReduceJobBuilderImpl( namedCluster, hadoopShim, log, variableSpace );
}
@Override
public PentahoMapReduceJobBuilder createPentahoMapReduceJobBuilder( LogChannelInterface log,
VariableSpace variableSpace )
throws IOException {
PluginInterface pluginInterface =
pluginRegistry.findPluginWithId( LifecyclePluginType.class, "HadoopSpoonPlugin" );
Properties pmrProperties;
try {
pmrProperties = pluginPropertiesUtil.loadPluginProperties( pluginInterface );
return new PentahoMapReduceJobBuilderImpl( namedCluster, hadoopShim, log, variableSpace, pluginInterface,
pmrProperties, visitorServices );
} catch ( KettleFileException e ) {
throw new IOException( e );
}
}
@Override
public MapReduceJarInfo getJarInfo( URL resolvedJarUrl ) throws IOException, ClassNotFoundException {
ClassLoader classLoader = getClass().getClassLoader();
List<Class<?>> classesInJarWithMain = getClassesInJarWithMain( resolvedJarUrl.toExternalForm(), classLoader );
List<String> classNamesInJarWithMain = new ArrayList<>( classesInJarWithMain.size() );
for ( Class<?> aClass : classesInJarWithMain ) {
classNamesInJarWithMain.add( aClass.getCanonicalName() );
}
final List<String> finalClassNamesInJarWithMain = Collections.unmodifiableList( classNamesInJarWithMain );
Class<?> mainClassFromManifest = null;
try {
mainClassFromManifest = getMainClassFromManifest( resolvedJarUrl, classLoader );
} catch ( Exception e ) {
// Ignore
}
final String mainClassName = mainClassFromManifest != null ? mainClassFromManifest.getCanonicalName() : null;
return new MapReduceJarInfo() {
@Override
public List<String> getClassesWithMain() {
return finalClassNamesInJarWithMain;
}
@Override
public String getMainClass() {
return mainClassName;
}
};
}
public void addTransformationVisitorService( TransformationVisitorService service ) {
visitorServices.add( service );
}
@VisibleForTesting
Class<?> locateDriverClass( String driverClass, final URL resolvedJarUrl, final HadoopShim shim )
throws MapReduceExecutionException {
try {
if ( Utils.isEmpty( driverClass ) ) {
Class<?> mainClass = getMainClassFromManifest( resolvedJarUrl, shim.getClass().getClassLoader() );
if ( mainClass == null ) {
List<Class<?>> mainClasses =
getClassesInJarWithMain( resolvedJarUrl.toExternalForm(), shim.getClass().getClassLoader() );
if ( mainClasses.size() == 1 ) {
return mainClasses.get( 0 );
} else if ( mainClasses.isEmpty() ) {
throw new MapReduceExecutionException(
BaseMessages.getString( PKG, "MapReduceServiceImpl.DriverClassNotSpecified" ) );
} else {
throw new MapReduceExecutionException(
BaseMessages.getString( PKG, "MapReduceServiceImpl.MultipleDriverClasses" ) );
}
}
return mainClass;
} else {
return getClassByName( driverClass, resolvedJarUrl, shim.getClass().getClassLoader() );
}
} catch ( MapReduceExecutionException mrEx ) {
throw mrEx;
} catch ( Exception e ) {
throw new MapReduceExecutionException( e );
}
}
private List<Class<?>> getClassesInJarWithMain( String jarUrl, ClassLoader parentClassloader )
throws MalformedURLException {
ArrayList<Class<?>> mainClasses = new ArrayList<Class<?>>();
List<Class<?>> allClasses = getClassesInJar( jarUrl, parentClassloader );
for ( Class<?> clazz : allClasses ) {
try {
Method mainMethod = clazz.getMethod( "main", new Class[] { String[].class } );
if ( Modifier.isStatic( mainMethod.getModifiers() ) ) {
mainClasses.add( clazz );
}
} catch ( Throwable ignored ) {
// Ignore classes without main() methods
}
}
return mainClasses;
}
private Class<?> getMainClassFromManifest( URL jarUrl, ClassLoader parentClassLoader )
throws IOException, ClassNotFoundException {
JarFile jarFile = getJarFile( jarUrl, parentClassLoader );
try {
Manifest manifest = jarFile.getManifest();
String className = manifest == null ? null : manifest.getMainAttributes().getValue( "Main-Class" );
return loadClassByName( className, jarUrl, parentClassLoader );
} finally {
jarFile.close();
}
}
private JarFile getJarFile( final URL jarUrl, final ClassLoader parentClassLoader ) throws IOException {
if ( jarUrl == null || parentClassLoader == null ) {
throw new NullPointerException();
}
JarFile jarFile;
try {
jarFile = new JarFile( new File( jarUrl.toURI() ) );
} catch ( URISyntaxException ex ) {
throw new IOException( "Error locating jar: " + jarUrl );
} catch ( IOException ex ) {
throw new IOException( "Error opening job jar: " + jarUrl, ex );
}
return jarFile;
}
private Class<?> loadClassByName( final String className, final URL jarUrl, final ClassLoader parentClassLoader )
throws ClassNotFoundException {
if ( className != null ) {
try ( URLClassLoader cl = new URLClassLoader( new URL[] { jarUrl }, parentClassLoader ) ) {
return cl.loadClass( className.replace( "/", "." ) );
} catch ( IOException e ) {
logger.error( e.getMessage(), e );
}
}
return null;
}
private Class<?> getClassByName( String className, URL jarUrl, ClassLoader parentClassLoader )
throws IOException, ClassNotFoundException {
JarFile jarFile = getJarFile( jarUrl, parentClassLoader );
try {
return loadClassByName( className, jarUrl, parentClassLoader );
} finally {
jarFile.close();
}
}
private List<Class<?>> getClassesInJar( String jarUrl, ClassLoader parentClassloader )
throws MalformedURLException {
ArrayList<Class<?>> classes = new ArrayList<Class<?>>();
URL url = new URL( jarUrl );
URL[] urls = new URL[] { url };
try ( URLClassLoader loader = new URLClassLoader( urls, getClass().getClassLoader() );
JarInputStream jarFile = new JarInputStream( new FileInputStream( new File( url.toURI() ) ) ) ) {
while ( true ) {
JarEntry jarEntry = jarFile.getNextJarEntry();
if ( jarEntry == null ) {
break;
}
if ( jarEntry.getName().endsWith( ".class" ) ) {
String className =
jarEntry.getName().substring( 0, jarEntry.getName().indexOf( ".class" ) ).replace( "/", "\\." );
classes.add( loader.loadClass( className ) );
}
}
} catch ( IOException e ) {
} catch ( ClassNotFoundException e ) {
} catch ( URISyntaxException e ) {
}
return classes;
}
}
| |
/*******************************************************************************
* Copyright (c) 2015-2018 Skymind, Inc.
*
* This program and the accompanying materials are made available under the
* terms of the Apache License, Version 2.0 which is available at
* https://www.apache.org/licenses/LICENSE-2.0.
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*
* SPDX-License-Identifier: Apache-2.0
******************************************************************************/
package org.deeplearning4j.spark.datavec;
import lombok.val;
import org.apache.commons.io.FilenameUtils;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.lib.output.SequenceFileOutputFormat;
import org.apache.spark.api.java.JavaPairRDD;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.input.PortableDataStream;
import org.datavec.api.io.labels.ParentPathLabelGenerator;
import org.datavec.api.records.reader.SequenceRecordReader;
import org.datavec.api.records.reader.impl.csv.CSVRecordReader;
import org.datavec.api.records.reader.impl.csv.CSVSequenceRecordReader;
import org.datavec.api.split.FileSplit;
import org.datavec.api.split.InputSplit;
import org.datavec.api.split.NumberedFileInputSplit;
import org.datavec.api.writable.Writable;
import org.datavec.image.recordreader.ImageRecordReader;
import org.datavec.spark.functions.SequenceRecordReaderFunction;
import org.datavec.spark.functions.pairdata.*;
import org.datavec.spark.transform.misc.StringToWritablesFunction;
import org.datavec.spark.util.DataVecSparkUtil;
import org.deeplearning4j.datasets.datavec.RecordReaderDataSetIterator;
import org.deeplearning4j.datasets.datavec.SequenceRecordReaderDataSetIterator;
import org.deeplearning4j.spark.BaseSparkTest;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.TemporaryFolder;
import org.nd4j.linalg.api.ndarray.INDArray;
import org.nd4j.linalg.dataset.DataSet;
import org.nd4j.linalg.factory.Nd4j;
import org.nd4j.linalg.io.ClassPathResource;
import scala.Tuple2;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import static org.junit.Assert.*;
public class TestDataVecDataSetFunctions extends BaseSparkTest {
@Rule
public TemporaryFolder testDir = new TemporaryFolder();
@Test
public void testDataVecDataSetFunction() throws Exception {
JavaSparkContext sc = getContext();
File f = testDir.newFolder();
ClassPathResource cpr = new ClassPathResource("dl4j-spark/imagetest/");
cpr.copyDirectory(f);
//Test Spark record reader functionality vs. local
List<String> labelsList = Arrays.asList("0", "1"); //Need this for Spark: can't infer without init call
String path = f.getPath() + "/*";
JavaPairRDD<String, PortableDataStream> origData = sc.binaryFiles(path);
assertEquals(4, origData.count()); //4 images
ImageRecordReader rr = new ImageRecordReader(28, 28, 1, new ParentPathLabelGenerator());
rr.setLabels(labelsList);
org.datavec.spark.functions.RecordReaderFunction rrf = new org.datavec.spark.functions.RecordReaderFunction(rr);
JavaRDD<List<Writable>> rdd = origData.map(rrf);
JavaRDD<DataSet> data = rdd.map(new DataVecDataSetFunction(1, 2, false));
List<DataSet> collected = data.collect();
//Load normally (i.e., not via Spark), and check that we get the same results (order not withstanding)
InputSplit is = new FileSplit(f, new String[] {"bmp"}, true);
ImageRecordReader irr = new ImageRecordReader(28, 28, 1, new ParentPathLabelGenerator());
irr.initialize(is);
RecordReaderDataSetIterator iter = new RecordReaderDataSetIterator(irr, 1, 1, 2);
List<DataSet> listLocal = new ArrayList<>(4);
while (iter.hasNext()) {
listLocal.add(iter.next());
}
//Compare:
assertEquals(4, collected.size());
assertEquals(4, listLocal.size());
//Check that results are the same (order not withstanding)
boolean[] found = new boolean[4];
for (int i = 0; i < 4; i++) {
int foundIndex = -1;
DataSet ds = collected.get(i);
for (int j = 0; j < 4; j++) {
if (ds.equals(listLocal.get(j))) {
if (foundIndex != -1)
fail(); //Already found this value -> suggests this spark value equals two or more of local version? (Shouldn't happen)
foundIndex = j;
if (found[foundIndex])
fail(); //One of the other spark values was equal to this one -> suggests duplicates in Spark list
found[foundIndex] = true; //mark this one as seen before
}
}
}
int count = 0;
for (boolean b : found)
if (b)
count++;
assertEquals(4, count); //Expect all 4 and exactly 4 pairwise matches between spark and local versions
}
@Test
public void testDataVecDataSetFunctionMultiLabelRegression() throws Exception {
JavaSparkContext sc = getContext();
List<String> stringData = new ArrayList<>();
int n = 6;
for (int i = 0; i < 10; i++) {
StringBuilder sb = new StringBuilder();
boolean first = true;
for (int j = 0; j < n; j++) {
if (!first)
sb.append(",");
sb.append(10 * i + j);
first = false;
}
stringData.add(sb.toString());
}
JavaRDD<String> stringList = sc.parallelize(stringData);
JavaRDD<List<Writable>> writables = stringList.map(new StringToWritablesFunction(new CSVRecordReader()));
JavaRDD<DataSet> dataSets = writables.map(new DataVecDataSetFunction(3, 5, -1, true, null, null));
List<DataSet> ds = dataSets.collect();
assertEquals(10, ds.size());
boolean[] seen = new boolean[10];
for (DataSet d : ds) {
INDArray f = d.getFeatures();
INDArray l = d.getLabels();
assertEquals(3, f.length());
assertEquals(3, l.length());
int exampleIdx = ((int) f.getDouble(0)) / 10;
seen[exampleIdx] = true;
for (int j = 0; j < 3; j++) {
assertEquals(10 * exampleIdx + j, (int) f.getDouble(j));
assertEquals(10 * exampleIdx + j + 3, (int) l.getDouble(j));
}
}
int seenCount = 0;
for (boolean b : seen)
if (b)
seenCount++;
assertEquals(10, seenCount);
}
@Test
public void testDataVecSequenceDataSetFunction() throws Exception {
JavaSparkContext sc = getContext();
//Test Spark record reader functionality vs. local
File dir = testDir.newFolder();
ClassPathResource cpr = new ClassPathResource("dl4j-spark/csvsequence/");
cpr.copyDirectory(dir);
JavaPairRDD<String, PortableDataStream> origData = sc.binaryFiles(dir.getAbsolutePath());
assertEquals(3, origData.count()); //3 CSV sequences
SequenceRecordReader seqRR = new CSVSequenceRecordReader(1, ",");
SequenceRecordReaderFunction rrf = new SequenceRecordReaderFunction(seqRR);
JavaRDD<List<List<Writable>>> rdd = origData.map(rrf);
JavaRDD<DataSet> data = rdd.map(new DataVecSequenceDataSetFunction(2, -1, true, null, null));
List<DataSet> collected = data.collect();
//Load normally (i.e., not via Spark), and check that we get the same results (order not withstanding)
InputSplit is = new FileSplit(dir, new String[] {"txt"}, true);
SequenceRecordReader seqRR2 = new CSVSequenceRecordReader(1, ",");
seqRR2.initialize(is);
SequenceRecordReaderDataSetIterator iter = new SequenceRecordReaderDataSetIterator(seqRR2, 1, -1, 2, true);
List<DataSet> listLocal = new ArrayList<>(3);
while (iter.hasNext()) {
listLocal.add(iter.next());
}
//Compare:
assertEquals(3, collected.size());
assertEquals(3, listLocal.size());
//Check that results are the same (order not withstanding)
boolean[] found = new boolean[3];
for (int i = 0; i < 3; i++) {
int foundIndex = -1;
DataSet ds = collected.get(i);
for (int j = 0; j < 3; j++) {
if (ds.equals(listLocal.get(j))) {
if (foundIndex != -1)
fail(); //Already found this value -> suggests this spark value equals two or more of local version? (Shouldn't happen)
foundIndex = j;
if (found[foundIndex])
fail(); //One of the other spark values was equal to this one -> suggests duplicates in Spark list
found[foundIndex] = true; //mark this one as seen before
}
}
}
int count = 0;
for (boolean b : found)
if (b)
count++;
assertEquals(3, count); //Expect all 3 and exactly 3 pairwise matches between spark and local versions
}
@Test
public void testDataVecSequencePairDataSetFunction() throws Exception {
JavaSparkContext sc = getContext();
File f = testDir.newFolder();
ClassPathResource cpr = new ClassPathResource("dl4j-spark/csvsequence/");
cpr.copyDirectory(f);
String path = f.getAbsolutePath() + "/*";
PathToKeyConverter pathConverter = new PathToKeyConverterFilename();
JavaPairRDD<Text, BytesPairWritable> toWrite =
DataVecSparkUtil.combineFilesForSequenceFile(sc, path, path, pathConverter);
Path p = testDir.newFolder("dl4j_testSeqPairFn").toPath();
p.toFile().deleteOnExit();
String outPath = p.toString() + "/out";
new File(outPath).deleteOnExit();
toWrite.saveAsNewAPIHadoopFile(outPath, Text.class, BytesPairWritable.class, SequenceFileOutputFormat.class);
//Load from sequence file:
JavaPairRDD<Text, BytesPairWritable> fromSeq = sc.sequenceFile(outPath, Text.class, BytesPairWritable.class);
SequenceRecordReader srr1 = new CSVSequenceRecordReader(1, ",");
SequenceRecordReader srr2 = new CSVSequenceRecordReader(1, ",");
PairSequenceRecordReaderBytesFunction psrbf = new PairSequenceRecordReaderBytesFunction(srr1, srr2);
JavaRDD<Tuple2<List<List<Writable>>, List<List<Writable>>>> writables = fromSeq.map(psrbf);
//Map to DataSet:
DataVecSequencePairDataSetFunction pairFn = new DataVecSequencePairDataSetFunction();
JavaRDD<DataSet> data = writables.map(pairFn);
List<DataSet> sparkData = data.collect();
//Now: do the same thing locally (SequenceRecordReaderDataSetIterator) and compare
String featuresPath = FilenameUtils.concat(f.getAbsolutePath(), "csvsequence_%d.txt");
SequenceRecordReader featureReader = new CSVSequenceRecordReader(1, ",");
SequenceRecordReader labelReader = new CSVSequenceRecordReader(1, ",");
featureReader.initialize(new NumberedFileInputSplit(featuresPath, 0, 2));
labelReader.initialize(new NumberedFileInputSplit(featuresPath, 0, 2));
SequenceRecordReaderDataSetIterator iter =
new SequenceRecordReaderDataSetIterator(featureReader, labelReader, 1, -1, true);
List<DataSet> localData = new ArrayList<>(3);
while (iter.hasNext())
localData.add(iter.next());
assertEquals(3, sparkData.size());
assertEquals(3, localData.size());
for (int i = 0; i < 3; i++) {
//Check shapes etc. data sets order may differ for spark vs. local
DataSet dsSpark = sparkData.get(i);
DataSet dsLocal = localData.get(i);
assertNull(dsSpark.getFeaturesMaskArray());
assertNull(dsSpark.getLabelsMaskArray());
INDArray fSpark = dsSpark.getFeatures();
INDArray fLocal = dsLocal.getFeatures();
INDArray lSpark = dsSpark.getLabels();
INDArray lLocal = dsLocal.getLabels();
val s = new long[] {1, 3, 4}; //1 example, 3 values, 3 time steps
assertArrayEquals(s, fSpark.shape());
assertArrayEquals(s, fLocal.shape());
assertArrayEquals(s, lSpark.shape());
assertArrayEquals(s, lLocal.shape());
}
//Check that results are the same (order not withstanding)
boolean[] found = new boolean[3];
for (int i = 0; i < 3; i++) {
int foundIndex = -1;
DataSet ds = sparkData.get(i);
for (int j = 0; j < 3; j++) {
if (ds.equals(localData.get(j))) {
if (foundIndex != -1)
fail(); //Already found this value -> suggests this spark value equals two or more of local version? (Shouldn't happen)
foundIndex = j;
if (found[foundIndex])
fail(); //One of the other spark values was equal to this one -> suggests duplicates in Spark list
found[foundIndex] = true; //mark this one as seen before
}
}
}
int count = 0;
for (boolean b : found)
if (b)
count++;
assertEquals(3, count); //Expect all 3 and exactly 3 pairwise matches between spark and local versions
}
@Test
public void testDataVecSequencePairDataSetFunctionVariableLength() throws Exception {
//Same sort of test as testDataVecSequencePairDataSetFunction() but with variable length time series (labels shorter, align end)
File dirFeatures = testDir.newFolder();
ClassPathResource cpr = new ClassPathResource("dl4j-spark/csvsequence/");
cpr.copyDirectory(dirFeatures);
File dirLabels = testDir.newFolder();
ClassPathResource cpr2 = new ClassPathResource("dl4j-spark/csvsequencelabels/");
cpr2.copyDirectory(dirLabels);
PathToKeyConverter pathConverter = new PathToKeyConverterNumber(); //Extract a number from the file name
JavaPairRDD<Text, BytesPairWritable> toWrite =
DataVecSparkUtil.combineFilesForSequenceFile(sc, dirFeatures.getAbsolutePath(), dirLabels.getAbsolutePath(), pathConverter);
Path p = testDir.newFolder("dl4j_testSeqPairFnVarLength").toPath();
p.toFile().deleteOnExit();
String outPath = p.toFile().getAbsolutePath() + "/out";
new File(outPath).deleteOnExit();
toWrite.saveAsNewAPIHadoopFile(outPath, Text.class, BytesPairWritable.class, SequenceFileOutputFormat.class);
//Load from sequence file:
JavaPairRDD<Text, BytesPairWritable> fromSeq = sc.sequenceFile(outPath, Text.class, BytesPairWritable.class);
SequenceRecordReader srr1 = new CSVSequenceRecordReader(1, ",");
SequenceRecordReader srr2 = new CSVSequenceRecordReader(1, ",");
PairSequenceRecordReaderBytesFunction psrbf = new PairSequenceRecordReaderBytesFunction(srr1, srr2);
JavaRDD<Tuple2<List<List<Writable>>, List<List<Writable>>>> writables = fromSeq.map(psrbf);
//Map to DataSet:
DataVecSequencePairDataSetFunction pairFn = new DataVecSequencePairDataSetFunction(4, false,
DataVecSequencePairDataSetFunction.AlignmentMode.ALIGN_END);
JavaRDD<DataSet> data = writables.map(pairFn);
List<DataSet> sparkData = data.collect();
//Now: do the same thing locally (SequenceRecordReaderDataSetIterator) and compare
String featuresPath = FilenameUtils.concat(dirFeatures.getAbsolutePath(), "csvsequence_%d.txt");
String labelsPath = FilenameUtils.concat(dirLabels.getAbsolutePath(), "csvsequencelabelsShort_%d.txt");
SequenceRecordReader featureReader = new CSVSequenceRecordReader(1, ",");
SequenceRecordReader labelReader = new CSVSequenceRecordReader(1, ",");
featureReader.initialize(new NumberedFileInputSplit(featuresPath, 0, 2));
labelReader.initialize(new NumberedFileInputSplit(labelsPath, 0, 2));
SequenceRecordReaderDataSetIterator iter = new SequenceRecordReaderDataSetIterator(featureReader, labelReader,
1, 4, false, SequenceRecordReaderDataSetIterator.AlignmentMode.ALIGN_END);
List<DataSet> localData = new ArrayList<>(3);
while (iter.hasNext())
localData.add(iter.next());
assertEquals(3, sparkData.size());
assertEquals(3, localData.size());
val fShapeExp = new long[] {1, 3, 4}; //1 example, 3 values, 4 time steps
val lShapeExp = new long[] {1, 4, 4}; //1 example, 4 values/classes, 4 time steps (after padding)
for (int i = 0; i < 3; i++) {
//Check shapes etc. data sets order may differ for spark vs. local
DataSet dsSpark = sparkData.get(i);
DataSet dsLocal = localData.get(i);
assertNotNull(dsSpark.getLabelsMaskArray()); //Expect mask array for labels
INDArray fSpark = dsSpark.getFeatures();
INDArray fLocal = dsLocal.getFeatures();
INDArray lSpark = dsSpark.getLabels();
INDArray lLocal = dsLocal.getLabels();
assertArrayEquals(fShapeExp, fSpark.shape());
assertArrayEquals(fShapeExp, fLocal.shape());
assertArrayEquals(lShapeExp, lSpark.shape());
assertArrayEquals(lShapeExp, lLocal.shape());
}
//Check that results are the same (order not withstanding)
boolean[] found = new boolean[3];
for (int i = 0; i < 3; i++) {
int foundIndex = -1;
DataSet ds = sparkData.get(i);
for (int j = 0; j < 3; j++) {
if (dataSetsEqual(ds, localData.get(j))) {
if (foundIndex != -1)
fail(); //Already found this value -> suggests this spark value equals two or more of local version? (Shouldn't happen)
foundIndex = j;
if (found[foundIndex])
fail(); //One of the other spark values was equal to this one -> suggests duplicates in Spark list
found[foundIndex] = true; //mark this one as seen before
}
}
}
int count = 0;
for (boolean b : found) {
if (b) {
count++;
}
}
assertEquals(3, count); //Expect all 3 and exactly 3 pairwise matches between spark and local versions
//-------------------------------------------------
//NOW: test same thing, but for align start...
DataVecSequencePairDataSetFunction pairFnAlignStart = new DataVecSequencePairDataSetFunction(4, false,
DataVecSequencePairDataSetFunction.AlignmentMode.ALIGN_START);
JavaRDD<DataSet> rddDataAlignStart = writables.map(pairFnAlignStart);
List<DataSet> sparkDataAlignStart = rddDataAlignStart.collect();
featureReader.initialize(new NumberedFileInputSplit(featuresPath, 0, 2)); //re-initialize to reset
labelReader.initialize(new NumberedFileInputSplit(labelsPath, 0, 2));
SequenceRecordReaderDataSetIterator iterAlignStart = new SequenceRecordReaderDataSetIterator(featureReader,
labelReader, 1, 4, false, SequenceRecordReaderDataSetIterator.AlignmentMode.ALIGN_START);
List<DataSet> localDataAlignStart = new ArrayList<>(3);
while (iterAlignStart.hasNext())
localDataAlignStart.add(iterAlignStart.next());
assertEquals(3, sparkDataAlignStart.size());
assertEquals(3, localDataAlignStart.size());
for (int i = 0; i < 3; i++) {
//Check shapes etc. data sets order may differ for spark vs. local
DataSet dsSpark = sparkDataAlignStart.get(i);
DataSet dsLocal = localDataAlignStart.get(i);
assertNotNull(dsSpark.getLabelsMaskArray()); //Expect mask array for labels
INDArray fSpark = dsSpark.getFeatures();
INDArray fLocal = dsLocal.getFeatures();
INDArray lSpark = dsSpark.getLabels();
INDArray lLocal = dsLocal.getLabels();
assertArrayEquals(fShapeExp, fSpark.shape());
assertArrayEquals(fShapeExp, fLocal.shape());
assertArrayEquals(lShapeExp, lSpark.shape());
assertArrayEquals(lShapeExp, lLocal.shape());
}
//Check that results are the same (order not withstanding)
found = new boolean[3];
for (int i = 0; i < 3; i++) {
int foundIndex = -1;
DataSet ds = sparkData.get(i);
for (int j = 0; j < 3; j++) {
if (dataSetsEqual(ds, localData.get(j))) {
if (foundIndex != -1)
fail(); //Already found this value -> suggests this spark value equals two or more of local version? (Shouldn't happen)
foundIndex = j;
if (found[foundIndex])
fail(); //One of the other spark values was equal to this one -> suggests duplicates in Spark list
found[foundIndex] = true; //mark this one as seen before
}
}
}
count = 0;
for (boolean b : found)
if (b)
count++;
assertEquals(3, count); //Expect all 3 and exactly 3 pairwise matches between spark and local versions
}
private static boolean dataSetsEqual(DataSet d1, DataSet d2) {
if (!d1.getFeatures().equals(d2.getFeatures())) {
return false;
}
if (d1.getLabels() == null && d2.getLabels() != null || d1.getLabels() != null && d2.getLabels() == null) {
return false;
}
if (d1.getLabels() != null && !d1.getLabels().equals(d2.getLabels())) {
return false;
}
return masksEqual(d1.getFeatures(), d2.getFeatures())
&& masksEqual(d1.getLabelsMaskArray(), d2.getLabelsMaskArray());
}
private static boolean masksEqual(INDArray m1, INDArray m2) {
if (m1 == null && m2 == null) {
return true;
}
if (m1 != null && m2 != null) {
return m1.equals(m2);
}
//One is null, other is not. Null and ones mask arrays are equal though
if (m1 != null && !m1.equals(Nd4j.ones(m1.shape()))) {
return false;
}
if (m2 != null && !m2.equals(Nd4j.ones(m2.shape()))) {
return false;
}
return true;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by http://code.google.com/p/protostuff/ ... DO NOT EDIT!
// Generated from protobuf
package org.apache.drill.exec.proto;
public final class SchemaBitData
{
public static final class BitClientHandshake
{
public static final org.apache.drill.exec.proto.SchemaBitData.BitClientHandshake.MessageSchema WRITE =
new org.apache.drill.exec.proto.SchemaBitData.BitClientHandshake.MessageSchema();
public static final org.apache.drill.exec.proto.SchemaBitData.BitClientHandshake.BuilderSchema MERGE =
new org.apache.drill.exec.proto.SchemaBitData.BitClientHandshake.BuilderSchema();
public static class MessageSchema implements com.dyuproject.protostuff.Schema<org.apache.drill.exec.proto.BitData.BitClientHandshake>
{
public void writeTo(com.dyuproject.protostuff.Output output, org.apache.drill.exec.proto.BitData.BitClientHandshake message) throws java.io.IOException
{
if(message.hasRpcVersion())
output.writeInt32(1, message.getRpcVersion(), false);
if(message.hasChannel())
output.writeEnum(2, message.getChannel().getNumber(), false);
}
public boolean isInitialized(org.apache.drill.exec.proto.BitData.BitClientHandshake message)
{
return message.isInitialized();
}
public java.lang.String getFieldName(int number)
{
return org.apache.drill.exec.proto.SchemaBitData.BitClientHandshake.getFieldName(number);
}
public int getFieldNumber(java.lang.String name)
{
return org.apache.drill.exec.proto.SchemaBitData.BitClientHandshake.getFieldNumber(name);
}
public java.lang.Class<org.apache.drill.exec.proto.BitData.BitClientHandshake> typeClass()
{
return org.apache.drill.exec.proto.BitData.BitClientHandshake.class;
}
public java.lang.String messageName()
{
return org.apache.drill.exec.proto.BitData.BitClientHandshake.class.getSimpleName();
}
public java.lang.String messageFullName()
{
return org.apache.drill.exec.proto.BitData.BitClientHandshake.class.getName();
}
//unused
public void mergeFrom(com.dyuproject.protostuff.Input input, org.apache.drill.exec.proto.BitData.BitClientHandshake message) throws java.io.IOException {}
public org.apache.drill.exec.proto.BitData.BitClientHandshake newMessage() { return null; }
}
public static class BuilderSchema implements com.dyuproject.protostuff.Schema<org.apache.drill.exec.proto.BitData.BitClientHandshake.Builder>
{
public void mergeFrom(com.dyuproject.protostuff.Input input, org.apache.drill.exec.proto.BitData.BitClientHandshake.Builder builder) throws java.io.IOException
{
for(int number = input.readFieldNumber(this);; number = input.readFieldNumber(this))
{
switch(number)
{
case 0:
return;
case 1:
builder.setRpcVersion(input.readInt32());
break;
case 2:
builder.setChannel(org.apache.drill.exec.proto.UserBitShared.RpcChannel.valueOf(input.readEnum()));
break;
default:
input.handleUnknownField(number, this);
}
}
}
public boolean isInitialized(org.apache.drill.exec.proto.BitData.BitClientHandshake.Builder builder)
{
return builder.isInitialized();
}
public org.apache.drill.exec.proto.BitData.BitClientHandshake.Builder newMessage()
{
return org.apache.drill.exec.proto.BitData.BitClientHandshake.newBuilder();
}
public java.lang.String getFieldName(int number)
{
return org.apache.drill.exec.proto.SchemaBitData.BitClientHandshake.getFieldName(number);
}
public int getFieldNumber(java.lang.String name)
{
return org.apache.drill.exec.proto.SchemaBitData.BitClientHandshake.getFieldNumber(name);
}
public java.lang.Class<org.apache.drill.exec.proto.BitData.BitClientHandshake.Builder> typeClass()
{
return org.apache.drill.exec.proto.BitData.BitClientHandshake.Builder.class;
}
public java.lang.String messageName()
{
return org.apache.drill.exec.proto.BitData.BitClientHandshake.class.getSimpleName();
}
public java.lang.String messageFullName()
{
return org.apache.drill.exec.proto.BitData.BitClientHandshake.class.getName();
}
//unused
public void writeTo(com.dyuproject.protostuff.Output output, org.apache.drill.exec.proto.BitData.BitClientHandshake.Builder builder) throws java.io.IOException {}
}
public static java.lang.String getFieldName(int number)
{
switch(number)
{
case 1: return "rpcVersion";
case 2: return "channel";
default: return null;
}
}
public static int getFieldNumber(java.lang.String name)
{
java.lang.Integer number = fieldMap.get(name);
return number == null ? 0 : number.intValue();
}
private static final java.util.HashMap<java.lang.String,java.lang.Integer> fieldMap = new java.util.HashMap<java.lang.String,java.lang.Integer>();
static
{
fieldMap.put("rpcVersion", 1);
fieldMap.put("channel", 2);
}
}
public static final class BitServerHandshake
{
public static final org.apache.drill.exec.proto.SchemaBitData.BitServerHandshake.MessageSchema WRITE =
new org.apache.drill.exec.proto.SchemaBitData.BitServerHandshake.MessageSchema();
public static final org.apache.drill.exec.proto.SchemaBitData.BitServerHandshake.BuilderSchema MERGE =
new org.apache.drill.exec.proto.SchemaBitData.BitServerHandshake.BuilderSchema();
public static class MessageSchema implements com.dyuproject.protostuff.Schema<org.apache.drill.exec.proto.BitData.BitServerHandshake>
{
public void writeTo(com.dyuproject.protostuff.Output output, org.apache.drill.exec.proto.BitData.BitServerHandshake message) throws java.io.IOException
{
if(message.hasRpcVersion())
output.writeInt32(1, message.getRpcVersion(), false);
for(String authenticationMechanisms : message.getAuthenticationMechanismsList())
output.writeString(2, authenticationMechanisms, true);
}
public boolean isInitialized(org.apache.drill.exec.proto.BitData.BitServerHandshake message)
{
return message.isInitialized();
}
public java.lang.String getFieldName(int number)
{
return org.apache.drill.exec.proto.SchemaBitData.BitServerHandshake.getFieldName(number);
}
public int getFieldNumber(java.lang.String name)
{
return org.apache.drill.exec.proto.SchemaBitData.BitServerHandshake.getFieldNumber(name);
}
public java.lang.Class<org.apache.drill.exec.proto.BitData.BitServerHandshake> typeClass()
{
return org.apache.drill.exec.proto.BitData.BitServerHandshake.class;
}
public java.lang.String messageName()
{
return org.apache.drill.exec.proto.BitData.BitServerHandshake.class.getSimpleName();
}
public java.lang.String messageFullName()
{
return org.apache.drill.exec.proto.BitData.BitServerHandshake.class.getName();
}
//unused
public void mergeFrom(com.dyuproject.protostuff.Input input, org.apache.drill.exec.proto.BitData.BitServerHandshake message) throws java.io.IOException {}
public org.apache.drill.exec.proto.BitData.BitServerHandshake newMessage() { return null; }
}
public static class BuilderSchema implements com.dyuproject.protostuff.Schema<org.apache.drill.exec.proto.BitData.BitServerHandshake.Builder>
{
public void mergeFrom(com.dyuproject.protostuff.Input input, org.apache.drill.exec.proto.BitData.BitServerHandshake.Builder builder) throws java.io.IOException
{
for(int number = input.readFieldNumber(this);; number = input.readFieldNumber(this))
{
switch(number)
{
case 0:
return;
case 1:
builder.setRpcVersion(input.readInt32());
break;
case 2:
builder.addAuthenticationMechanisms(input.readString());
break;
default:
input.handleUnknownField(number, this);
}
}
}
public boolean isInitialized(org.apache.drill.exec.proto.BitData.BitServerHandshake.Builder builder)
{
return builder.isInitialized();
}
public org.apache.drill.exec.proto.BitData.BitServerHandshake.Builder newMessage()
{
return org.apache.drill.exec.proto.BitData.BitServerHandshake.newBuilder();
}
public java.lang.String getFieldName(int number)
{
return org.apache.drill.exec.proto.SchemaBitData.BitServerHandshake.getFieldName(number);
}
public int getFieldNumber(java.lang.String name)
{
return org.apache.drill.exec.proto.SchemaBitData.BitServerHandshake.getFieldNumber(name);
}
public java.lang.Class<org.apache.drill.exec.proto.BitData.BitServerHandshake.Builder> typeClass()
{
return org.apache.drill.exec.proto.BitData.BitServerHandshake.Builder.class;
}
public java.lang.String messageName()
{
return org.apache.drill.exec.proto.BitData.BitServerHandshake.class.getSimpleName();
}
public java.lang.String messageFullName()
{
return org.apache.drill.exec.proto.BitData.BitServerHandshake.class.getName();
}
//unused
public void writeTo(com.dyuproject.protostuff.Output output, org.apache.drill.exec.proto.BitData.BitServerHandshake.Builder builder) throws java.io.IOException {}
}
public static java.lang.String getFieldName(int number)
{
switch(number)
{
case 1: return "rpcVersion";
case 2: return "authenticationMechanisms";
default: return null;
}
}
public static int getFieldNumber(java.lang.String name)
{
java.lang.Integer number = fieldMap.get(name);
return number == null ? 0 : number.intValue();
}
private static final java.util.HashMap<java.lang.String,java.lang.Integer> fieldMap = new java.util.HashMap<java.lang.String,java.lang.Integer>();
static
{
fieldMap.put("rpcVersion", 1);
fieldMap.put("authenticationMechanisms", 2);
}
}
public static final class FragmentRecordBatch
{
public static final org.apache.drill.exec.proto.SchemaBitData.FragmentRecordBatch.MessageSchema WRITE =
new org.apache.drill.exec.proto.SchemaBitData.FragmentRecordBatch.MessageSchema();
public static final org.apache.drill.exec.proto.SchemaBitData.FragmentRecordBatch.BuilderSchema MERGE =
new org.apache.drill.exec.proto.SchemaBitData.FragmentRecordBatch.BuilderSchema();
public static class MessageSchema implements com.dyuproject.protostuff.Schema<org.apache.drill.exec.proto.BitData.FragmentRecordBatch>
{
public void writeTo(com.dyuproject.protostuff.Output output, org.apache.drill.exec.proto.BitData.FragmentRecordBatch message) throws java.io.IOException
{
if(message.hasQueryId())
output.writeObject(1, message.getQueryId(), org.apache.drill.exec.proto.SchemaUserBitShared.QueryId.WRITE, false);
if(message.hasReceivingMajorFragmentId())
output.writeInt32(2, message.getReceivingMajorFragmentId(), false);
for(int receivingMinorFragmentId : message.getReceivingMinorFragmentIdList())
output.writeInt32(3, receivingMinorFragmentId, true);
if(message.hasSendingMajorFragmentId())
output.writeInt32(4, message.getSendingMajorFragmentId(), false);
if(message.hasSendingMinorFragmentId())
output.writeInt32(5, message.getSendingMinorFragmentId(), false);
if(message.hasDef())
output.writeObject(6, message.getDef(), org.apache.drill.exec.proto.SchemaUserBitShared.RecordBatchDef.WRITE, false);
if(message.hasIsLastBatch())
output.writeBool(7, message.getIsLastBatch(), false);
}
public boolean isInitialized(org.apache.drill.exec.proto.BitData.FragmentRecordBatch message)
{
return message.isInitialized();
}
public java.lang.String getFieldName(int number)
{
return org.apache.drill.exec.proto.SchemaBitData.FragmentRecordBatch.getFieldName(number);
}
public int getFieldNumber(java.lang.String name)
{
return org.apache.drill.exec.proto.SchemaBitData.FragmentRecordBatch.getFieldNumber(name);
}
public java.lang.Class<org.apache.drill.exec.proto.BitData.FragmentRecordBatch> typeClass()
{
return org.apache.drill.exec.proto.BitData.FragmentRecordBatch.class;
}
public java.lang.String messageName()
{
return org.apache.drill.exec.proto.BitData.FragmentRecordBatch.class.getSimpleName();
}
public java.lang.String messageFullName()
{
return org.apache.drill.exec.proto.BitData.FragmentRecordBatch.class.getName();
}
//unused
public void mergeFrom(com.dyuproject.protostuff.Input input, org.apache.drill.exec.proto.BitData.FragmentRecordBatch message) throws java.io.IOException {}
public org.apache.drill.exec.proto.BitData.FragmentRecordBatch newMessage() { return null; }
}
public static class BuilderSchema implements com.dyuproject.protostuff.Schema<org.apache.drill.exec.proto.BitData.FragmentRecordBatch.Builder>
{
public void mergeFrom(com.dyuproject.protostuff.Input input, org.apache.drill.exec.proto.BitData.FragmentRecordBatch.Builder builder) throws java.io.IOException
{
for(int number = input.readFieldNumber(this);; number = input.readFieldNumber(this))
{
switch(number)
{
case 0:
return;
case 1:
builder.setQueryId(input.mergeObject(org.apache.drill.exec.proto.UserBitShared.QueryId.newBuilder(), org.apache.drill.exec.proto.SchemaUserBitShared.QueryId.MERGE));
break;
case 2:
builder.setReceivingMajorFragmentId(input.readInt32());
break;
case 3:
builder.addReceivingMinorFragmentId(input.readInt32());
break;
case 4:
builder.setSendingMajorFragmentId(input.readInt32());
break;
case 5:
builder.setSendingMinorFragmentId(input.readInt32());
break;
case 6:
builder.setDef(input.mergeObject(org.apache.drill.exec.proto.UserBitShared.RecordBatchDef.newBuilder(), org.apache.drill.exec.proto.SchemaUserBitShared.RecordBatchDef.MERGE));
break;
case 7:
builder.setIsLastBatch(input.readBool());
break;
default:
input.handleUnknownField(number, this);
}
}
}
public boolean isInitialized(org.apache.drill.exec.proto.BitData.FragmentRecordBatch.Builder builder)
{
return builder.isInitialized();
}
public org.apache.drill.exec.proto.BitData.FragmentRecordBatch.Builder newMessage()
{
return org.apache.drill.exec.proto.BitData.FragmentRecordBatch.newBuilder();
}
public java.lang.String getFieldName(int number)
{
return org.apache.drill.exec.proto.SchemaBitData.FragmentRecordBatch.getFieldName(number);
}
public int getFieldNumber(java.lang.String name)
{
return org.apache.drill.exec.proto.SchemaBitData.FragmentRecordBatch.getFieldNumber(name);
}
public java.lang.Class<org.apache.drill.exec.proto.BitData.FragmentRecordBatch.Builder> typeClass()
{
return org.apache.drill.exec.proto.BitData.FragmentRecordBatch.Builder.class;
}
public java.lang.String messageName()
{
return org.apache.drill.exec.proto.BitData.FragmentRecordBatch.class.getSimpleName();
}
public java.lang.String messageFullName()
{
return org.apache.drill.exec.proto.BitData.FragmentRecordBatch.class.getName();
}
//unused
public void writeTo(com.dyuproject.protostuff.Output output, org.apache.drill.exec.proto.BitData.FragmentRecordBatch.Builder builder) throws java.io.IOException {}
}
public static java.lang.String getFieldName(int number)
{
switch(number)
{
case 1: return "queryId";
case 2: return "receivingMajorFragmentId";
case 3: return "receivingMinorFragmentId";
case 4: return "sendingMajorFragmentId";
case 5: return "sendingMinorFragmentId";
case 6: return "def";
case 7: return "isLastBatch";
default: return null;
}
}
public static int getFieldNumber(java.lang.String name)
{
java.lang.Integer number = fieldMap.get(name);
return number == null ? 0 : number.intValue();
}
private static final java.util.HashMap<java.lang.String,java.lang.Integer> fieldMap = new java.util.HashMap<java.lang.String,java.lang.Integer>();
static
{
fieldMap.put("queryId", 1);
fieldMap.put("receivingMajorFragmentId", 2);
fieldMap.put("receivingMinorFragmentId", 3);
fieldMap.put("sendingMajorFragmentId", 4);
fieldMap.put("sendingMinorFragmentId", 5);
fieldMap.put("def", 6);
fieldMap.put("isLastBatch", 7);
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.syncope.fit;
import static org.awaitility.Awaitility.await;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertNotNull;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.dataformat.xml.XmlMapper;
import com.fasterxml.jackson.dataformat.yaml.YAMLMapper;
import com.nimbusds.jose.JWSAlgorithm;
import java.io.IOException;
import java.io.InputStream;
import java.net.URI;
import java.util.ArrayList;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.Properties;
import java.util.Set;
import java.util.UUID;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicReference;
import javax.naming.Context;
import javax.naming.NamingException;
import javax.naming.directory.Attribute;
import javax.naming.directory.BasicAttribute;
import javax.naming.directory.BasicAttributes;
import javax.naming.directory.DirContext;
import javax.naming.directory.InitialDirContext;
import javax.naming.directory.ModificationItem;
import javax.sql.DataSource;
import javax.ws.rs.core.GenericType;
import javax.ws.rs.core.HttpHeaders;
import javax.ws.rs.core.Response;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.tuple.Pair;
import org.apache.cxf.jaxrs.client.WebClient;
import org.apache.syncope.client.lib.AnonymousAuthenticationHandler;
import org.apache.syncope.client.lib.SyncopeClient;
import org.apache.syncope.client.lib.SyncopeClientFactoryBean;
import org.apache.syncope.common.keymaster.client.api.ConfParamOps;
import org.apache.syncope.common.keymaster.client.api.DomainOps;
import org.apache.syncope.common.keymaster.client.api.ServiceOps;
import org.apache.syncope.common.keymaster.client.self.SelfKeymasterClientContext;
import org.apache.syncope.common.keymaster.client.zookeeper.ZookeeperKeymasterClientContext;
import org.apache.syncope.common.lib.AnyOperations;
import org.apache.syncope.common.lib.request.AnyObjectUR;
import org.apache.syncope.common.lib.request.AttrPatch;
import org.apache.syncope.common.lib.request.GroupUR;
import org.apache.syncope.common.lib.request.UserUR;
import org.apache.syncope.common.lib.audit.AuditEntry;
import org.apache.syncope.common.lib.policy.PolicyTO;
import org.apache.syncope.common.lib.request.AnyObjectCR;
import org.apache.syncope.common.lib.request.GroupCR;
import org.apache.syncope.common.lib.request.UserCR;
import org.apache.syncope.common.lib.to.SchemaTO;
import org.apache.syncope.common.lib.to.AnyObjectTO;
import org.apache.syncope.common.lib.Attr;
import org.apache.syncope.common.lib.policy.AccessPolicyTO;
import org.apache.syncope.common.lib.policy.DefaultAttrReleasePolicyConf;
import org.apache.syncope.common.lib.policy.AttrReleasePolicyTO;
import org.apache.syncope.common.lib.policy.DefaultAccessPolicyConf;
import org.apache.syncope.common.lib.policy.DefaultAuthPolicyConf;
import org.apache.syncope.common.lib.to.ConnInstanceTO;
import org.apache.syncope.common.lib.to.ResourceTO;
import org.apache.syncope.common.lib.to.GroupTO;
import org.apache.syncope.common.lib.to.MembershipTO;
import org.apache.syncope.common.lib.to.NotificationTO;
import org.apache.syncope.common.lib.to.ProvisioningResult;
import org.apache.syncope.common.lib.to.ReportTO;
import org.apache.syncope.common.lib.to.RoleTO;
import org.apache.syncope.common.lib.to.UserTO;
import org.apache.syncope.common.lib.to.AuthModuleTO;
import org.apache.syncope.common.lib.policy.AuthPolicyTO;
import org.apache.syncope.common.lib.to.ClientAppTO;
import org.apache.syncope.common.lib.to.OIDCRPClientAppTO;
import org.apache.syncope.common.lib.to.SAML2SPClientAppTO;
import org.apache.syncope.common.lib.types.AnyTypeKind;
import org.apache.syncope.common.lib.types.ClientAppType;
import org.apache.syncope.common.lib.types.OIDCGrantType;
import org.apache.syncope.common.lib.types.OIDCResponseType;
import org.apache.syncope.common.lib.types.OIDCSubjectType;
import org.apache.syncope.common.lib.types.PatchOperation;
import org.apache.syncope.common.lib.types.PolicyType;
import org.apache.syncope.common.lib.types.SAML2SPNameId;
import org.apache.syncope.common.lib.types.SchemaType;
import org.apache.syncope.common.lib.types.TraceLevel;
import org.apache.syncope.common.rest.api.RESTHeaders;
import org.apache.syncope.common.rest.api.batch.BatchPayloadParser;
import org.apache.syncope.common.rest.api.batch.BatchResponseItem;
import org.apache.syncope.common.rest.api.beans.AuditQuery;
import org.apache.syncope.common.rest.api.service.AnyObjectService;
import org.apache.syncope.common.rest.api.service.AnyTypeClassService;
import org.apache.syncope.common.rest.api.service.AnyTypeService;
import org.apache.syncope.common.rest.api.service.ApplicationService;
import org.apache.syncope.common.rest.api.service.AuditService;
import org.apache.syncope.common.rest.api.service.AuthModuleService;
import org.apache.syncope.common.rest.api.service.AuthProfileService;
import org.apache.syncope.common.rest.api.service.CamelRouteService;
import org.apache.syncope.common.rest.api.service.ClientAppService;
import org.apache.syncope.common.rest.api.service.ConnectorService;
import org.apache.syncope.common.rest.api.service.DynRealmService;
import org.apache.syncope.common.rest.api.service.NotificationService;
import org.apache.syncope.common.rest.api.service.wa.GoogleMfaAuthAccountService;
import org.apache.syncope.common.rest.api.service.wa.GoogleMfaAuthTokenService;
import org.apache.syncope.common.rest.api.service.PolicyService;
import org.apache.syncope.common.rest.api.service.ReportService;
import org.apache.syncope.common.rest.api.service.ResourceService;
import org.apache.syncope.common.rest.api.service.GroupService;
import org.apache.syncope.common.rest.api.service.ImplementationService;
import org.apache.syncope.common.rest.api.service.MailTemplateService;
import org.apache.syncope.common.rest.api.service.RealmService;
import org.apache.syncope.common.rest.api.service.ReconciliationService;
import org.apache.syncope.common.rest.api.service.RelationshipTypeService;
import org.apache.syncope.common.rest.api.service.RemediationService;
import org.apache.syncope.common.rest.api.service.ReportTemplateService;
import org.apache.syncope.common.rest.api.service.RoleService;
import org.apache.syncope.common.rest.api.service.SCIMConfService;
import org.apache.syncope.common.rest.api.service.SchemaService;
import org.apache.syncope.common.rest.api.service.SecurityQuestionService;
import org.apache.syncope.common.rest.api.service.SyncopeService;
import org.apache.syncope.common.rest.api.service.TaskService;
import org.apache.syncope.common.rest.api.service.UserSelfService;
import org.apache.syncope.common.rest.api.service.UserService;
import org.apache.syncope.common.rest.api.service.UserRequestService;
import org.apache.syncope.common.rest.api.service.BpmnProcessService;
import org.apache.syncope.common.rest.api.service.OIDCC4UIProviderService;
import org.apache.syncope.common.rest.api.service.OIDCC4UIService;
import org.apache.syncope.common.rest.api.service.OIDCJWKSService;
import org.apache.syncope.common.rest.api.service.SAML2IdPEntityService;
import org.apache.syncope.common.rest.api.service.SAML2SP4UIIdPService;
import org.apache.syncope.common.rest.api.service.SAML2SP4UIService;
import org.apache.syncope.common.rest.api.service.SAML2SPEntityService;
import org.apache.syncope.common.rest.api.service.SRARouteService;
import org.apache.syncope.common.rest.api.service.DelegationService;
import org.apache.syncope.common.rest.api.service.UserWorkflowTaskService;
import org.apache.syncope.common.rest.api.service.wa.ImpersonationService;
import org.apache.syncope.common.rest.api.service.wa.U2FRegistrationService;
import org.apache.syncope.common.rest.api.service.wa.WAConfigService;
import org.apache.syncope.common.rest.api.service.wa.WebAuthnRegistrationService;
import org.apache.syncope.fit.AbstractITCase.KeymasterInitializer;
import org.apache.syncope.fit.core.CoreITContext;
import org.apache.syncope.fit.core.UserITCase;
import org.junit.jupiter.api.BeforeAll;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.ApplicationContextInitializer;
import org.springframework.context.ConfigurableApplicationContext;
import org.springframework.jdbc.core.JdbcTemplate;
import org.springframework.test.context.TestPropertySource;
import org.springframework.test.context.junit.jupiter.SpringJUnitConfig;
import org.springframework.test.context.support.TestPropertySourceUtils;
@SpringJUnitConfig(
classes = { CoreITContext.class, SelfKeymasterClientContext.class, ZookeeperKeymasterClientContext.class },
initializers = KeymasterInitializer.class)
@TestPropertySource("classpath:test.properties")
public abstract class AbstractITCase {
static class KeymasterInitializer implements ApplicationContextInitializer<ConfigurableApplicationContext> {
@Override
public void initialize(final ConfigurableApplicationContext ctx) {
String profiles = ctx.getEnvironment().getProperty("springActiveProfiles");
if (profiles.contains("zookeeper")) {
TestPropertySourceUtils.addInlinedPropertiesToEnvironment(
ctx, "keymaster.address=127.0.0.1:2181");
} else {
TestPropertySourceUtils.addInlinedPropertiesToEnvironment(
ctx, "keymaster.address=http://localhost:9080/syncope/rest/keymaster");
}
TestPropertySourceUtils.addInlinedPropertiesToEnvironment(
ctx, "keymaster.username=" + ANONYMOUS_UNAME);
TestPropertySourceUtils.addInlinedPropertiesToEnvironment(
ctx, "keymaster.password=" + ANONYMOUS_KEY);
}
}
protected static final Logger LOG = LoggerFactory.getLogger(AbstractITCase.class);
protected static final ObjectMapper OBJECT_MAPPER = new ObjectMapper();
protected static final XmlMapper XML_MAPPER = new XmlMapper();
protected static final YAMLMapper YAML_MAPPER = new YAMLMapper();
protected static final String ADMIN_UNAME = "admin";
protected static final String ADMIN_PWD = "password";
protected static final String ADDRESS = "http://localhost:9080/syncope/rest";
protected static final String BUILD_TOOLS_ADDRESS = "http://localhost:9080/syncope-fit-build-tools/cxf";
protected static final String ENV_KEY_CONTENT_TYPE = "jaxrsContentType";
protected static final String RESOURCE_NAME_WS1 = "ws-target-resource-1";
protected static final String RESOURCE_NAME_WS2 = "ws-target-resource-2";
protected static final String RESOURCE_NAME_LDAP = "resource-ldap";
protected static final String RESOURCE_NAME_LDAP_ORGUNIT = "resource-ldap-orgunit";
protected static final String RESOURCE_NAME_TESTDB = "resource-testdb";
protected static final String RESOURCE_NAME_TESTDB2 = "resource-testdb2";
protected static final String RESOURCE_NAME_CSV = "resource-csv";
protected static final String RESOURCE_NAME_DBPULL = "resource-db-pull";
protected static final String RESOURCE_NAME_DBVIRATTR = "resource-db-virattr";
protected static final String RESOURCE_NAME_NOPROPAGATION = "ws-target-resource-nopropagation";
protected static final String RESOURCE_NAME_NOPROPAGATION2 = "ws-target-resource-nopropagation2";
protected static final String RESOURCE_NAME_NOPROPAGATION3 = "ws-target-resource-nopropagation3";
protected static final String RESOURCE_NAME_NOPROPAGATION4 = "ws-target-resource-nopropagation4";
protected static final String RESOURCE_NAME_RESETSYNCTOKEN = "ws-target-resource-update-resetsynctoken";
protected static final String RESOURCE_NAME_TIMEOUT = "ws-target-resource-timeout";
protected static final String RESOURCE_NAME_MAPPINGS1 = "ws-target-resource-list-mappings-1";
protected static final String RESOURCE_NAME_MAPPINGS2 = "ws-target-resource-list-mappings-2";
protected static final String RESOURCE_NAME_CREATE_SINGLE = "ws-target-resource-create-single";
protected static final String RESOURCE_NAME_CREATE_WRONG = "ws-target-resource-create-wrong";
protected static final String RESOURCE_NAME_DELETE = "ws-target-resource-delete";
protected static final String RESOURCE_NAME_UPDATE = "ws-target-resource-update";
protected static final String RESOURCE_NAME_CREATE_NONE = "ws-target-resource-create-none";
protected static final String RESOURCE_NAME_DBSCRIPTED = "resource-db-scripted";
protected static final String RESOURCE_NAME_REST = "rest-target-resource";
protected static final String RESOURCE_LDAP_ADMIN_DN = "uid=admin,ou=system";
protected static final String RESOURCE_LDAP_ADMIN_PWD = "secret";
protected static final String PRINTER = "PRINTER";
protected static final int MAX_WAIT_SECONDS = 50;
protected static String ANONYMOUS_UNAME;
protected static String ANONYMOUS_KEY;
protected static String JWS_KEY;
protected static String JWT_ISSUER;
protected static JWSAlgorithm JWS_ALGORITHM;
protected static SyncopeClientFactoryBean clientFactory;
protected static SyncopeClient adminClient;
protected static SyncopeClient anonymusClient;
protected static SyncopeService syncopeService;
protected static ApplicationService applicationService;
protected static AnyTypeClassService anyTypeClassService;
protected static AnyTypeService anyTypeService;
protected static RelationshipTypeService relationshipTypeService;
protected static RealmService realmService;
protected static AnyObjectService anyObjectService;
protected static RoleService roleService;
protected static DynRealmService dynRealmService;
protected static UserService userService;
protected static UserSelfService userSelfService;
protected static UserRequestService userRequestService;
protected static UserWorkflowTaskService userWorkflowTaskService;
protected static GroupService groupService;
protected static ResourceService resourceService;
protected static ConnectorService connectorService;
protected static AuditService auditService;
protected static ReportTemplateService reportTemplateService;
protected static ReportService reportService;
protected static TaskService taskService;
protected static ReconciliationService reconciliationService;
protected static BpmnProcessService bpmnProcessService;
protected static MailTemplateService mailTemplateService;
protected static NotificationService notificationService;
protected static SchemaService schemaService;
protected static PolicyService policyService;
protected static AuthModuleService authModuleService;
protected static SecurityQuestionService securityQuestionService;
protected static ImplementationService implementationService;
protected static RemediationService remediationService;
protected static DelegationService delegationService;
protected static SRARouteService sraRouteService;
protected static CamelRouteService camelRouteService;
protected static SAML2SP4UIService saml2SP4UIService;
protected static SAML2SP4UIIdPService saml2SP4UIIdPService;
protected static OIDCC4UIService oidcClientService;
protected static OIDCC4UIProviderService oidcProviderService;
protected static SCIMConfService scimConfService;
protected static ClientAppService clientAppService;
protected static AuthProfileService authProfileService;
protected static SAML2SPEntityService saml2SPEntityService;
protected static SAML2IdPEntityService saml2IdPEntityService;
protected static OIDCJWKSService oidcJWKSService;
protected static WAConfigService waConfigService;
protected static GoogleMfaAuthTokenService googleMfaAuthTokenService;
protected static GoogleMfaAuthAccountService googleMfaAuthAccountService;
protected static U2FRegistrationService u2fRegistrationService;
protected static WebAuthnRegistrationService webAuthnRegistrationService;
protected static ImpersonationService impersonationService;
@BeforeAll
public static void securitySetup() {
try (InputStream propStream = AbstractITCase.class.getResourceAsStream("/core.properties")) {
Properties props = new Properties();
props.load(propStream);
ANONYMOUS_UNAME = props.getProperty("security.anonymousUser");
ANONYMOUS_KEY = props.getProperty("security.anonymousKey");
JWT_ISSUER = props.getProperty("security.jwtIssuer");
JWS_ALGORITHM = JWSAlgorithm.parse(props.getProperty("security.jwsAlgorithm"));
JWS_KEY = props.getProperty("security.jwsKey");
} catch (Exception e) {
LOG.error("Could not read core.properties", e);
}
assertNotNull(ANONYMOUS_UNAME);
assertNotNull(ANONYMOUS_KEY);
assertNotNull(JWS_KEY);
assertNotNull(JWT_ISSUER);
anonymusClient = clientFactory.create(new AnonymousAuthenticationHandler(ANONYMOUS_UNAME, ANONYMOUS_KEY));
googleMfaAuthTokenService = anonymusClient.getService(GoogleMfaAuthTokenService.class);
googleMfaAuthAccountService = anonymusClient.getService(GoogleMfaAuthAccountService.class);
u2fRegistrationService = anonymusClient.getService(U2FRegistrationService.class);
webAuthnRegistrationService = anonymusClient.getService(WebAuthnRegistrationService.class);
impersonationService = anonymusClient.getService(ImpersonationService.class);
}
@BeforeAll
public static void restSetup() {
clientFactory = new SyncopeClientFactoryBean().setAddress(ADDRESS);
String envContentType = System.getProperty(ENV_KEY_CONTENT_TYPE);
if (StringUtils.isNotBlank(envContentType)) {
clientFactory.setContentType(envContentType);
}
LOG.info("Performing IT with content type {}", clientFactory.getContentType().getMediaType());
adminClient = clientFactory.create(ADMIN_UNAME, ADMIN_PWD);
syncopeService = adminClient.getService(SyncopeService.class);
applicationService = adminClient.getService(ApplicationService.class);
anyTypeClassService = adminClient.getService(AnyTypeClassService.class);
anyTypeService = adminClient.getService(AnyTypeService.class);
relationshipTypeService = adminClient.getService(RelationshipTypeService.class);
realmService = adminClient.getService(RealmService.class);
anyObjectService = adminClient.getService(AnyObjectService.class);
roleService = adminClient.getService(RoleService.class);
dynRealmService = adminClient.getService(DynRealmService.class);
userService = adminClient.getService(UserService.class);
userSelfService = adminClient.getService(UserSelfService.class);
userRequestService = adminClient.getService(UserRequestService.class);
userWorkflowTaskService = adminClient.getService(UserWorkflowTaskService.class);
groupService = adminClient.getService(GroupService.class);
resourceService = adminClient.getService(ResourceService.class);
connectorService = adminClient.getService(ConnectorService.class);
auditService = adminClient.getService(AuditService.class);
reportTemplateService = adminClient.getService(ReportTemplateService.class);
reportService = adminClient.getService(ReportService.class);
taskService = adminClient.getService(TaskService.class);
reconciliationService = adminClient.getService(ReconciliationService.class);
policyService = adminClient.getService(PolicyService.class);
bpmnProcessService = adminClient.getService(BpmnProcessService.class);
mailTemplateService = adminClient.getService(MailTemplateService.class);
notificationService = adminClient.getService(NotificationService.class);
schemaService = adminClient.getService(SchemaService.class);
securityQuestionService = adminClient.getService(SecurityQuestionService.class);
implementationService = adminClient.getService(ImplementationService.class);
remediationService = adminClient.getService(RemediationService.class);
delegationService = adminClient.getService(DelegationService.class);
sraRouteService = adminClient.getService(SRARouteService.class);
camelRouteService = adminClient.getService(CamelRouteService.class);
saml2SP4UIService = adminClient.getService(SAML2SP4UIService.class);
saml2SP4UIIdPService = adminClient.getService(SAML2SP4UIIdPService.class);
oidcClientService = adminClient.getService(OIDCC4UIService.class);
oidcProviderService = adminClient.getService(OIDCC4UIProviderService.class);
scimConfService = adminClient.getService(SCIMConfService.class);
clientAppService = adminClient.getService(ClientAppService.class);
authModuleService = adminClient.getService(AuthModuleService.class);
saml2SPEntityService = adminClient.getService(SAML2SPEntityService.class);
saml2IdPEntityService = adminClient.getService(SAML2IdPEntityService.class);
authProfileService = adminClient.getService(AuthProfileService.class);
oidcJWKSService = adminClient.getService(OIDCJWKSService.class);
waConfigService = adminClient.getService(WAConfigService.class);
}
protected static String getUUIDString() {
return UUID.randomUUID().toString().substring(0, 8);
}
protected static Attr attr(final String schema, final String value) {
return new Attr.Builder(schema).value(value).build();
}
protected static AttrPatch attrAddReplacePatch(final String schema, final String value) {
return new AttrPatch.Builder(attr(schema, value)).operation(PatchOperation.ADD_REPLACE).build();
}
protected static <T> T getObject(final URI location, final Class<?> serviceClass, final Class<T> resultClass) {
WebClient webClient = WebClient.fromClient(WebClient.client(adminClient.getService(serviceClass)));
webClient.accept(clientFactory.getContentType().getMediaType()).to(location.toASCIIString(), false);
return webClient.
header(RESTHeaders.DOMAIN, adminClient.getDomain()).
header(HttpHeaders.AUTHORIZATION, "Bearer " + adminClient.getJWT()).
get(resultClass);
}
@Autowired
protected ConfParamOps confParamOps;
@Autowired
protected ServiceOps serviceOps;
@Autowired
protected DomainOps domainOps;
@Autowired
protected DataSource testDataSource;
@SuppressWarnings("unchecked")
protected <T extends SchemaTO> T createSchema(final SchemaType type, final T schemaTO) {
Response response = schemaService.create(type, schemaTO);
if (response.getStatusInfo().getStatusCode() != Response.Status.CREATED.getStatusCode()) {
Exception ex = clientFactory.getExceptionMapper().fromResponse(response);
if (ex != null) {
throw (RuntimeException) ex;
}
}
return (T) getObject(response.getLocation(), SchemaService.class, schemaTO.getClass());
}
protected RoleTO createRole(final RoleTO roleTO) {
Response response = roleService.create(roleTO);
if (response.getStatusInfo().getStatusCode() != Response.Status.CREATED.getStatusCode()) {
Exception ex = clientFactory.getExceptionMapper().fromResponse(response);
if (ex != null) {
throw (RuntimeException) ex;
}
}
return getObject(response.getLocation(), RoleService.class, RoleTO.class);
}
protected ReportTO createReport(final ReportTO report) {
Response response = reportService.create(report);
assertEquals(Response.Status.CREATED.getStatusCode(), response.getStatusInfo().getStatusCode());
return getObject(response.getLocation(), ReportService.class, ReportTO.class);
}
protected Pair<String, String> createNotificationTask(
final boolean active,
final boolean includeAbout,
final TraceLevel traceLevel,
final String sender,
final String subject,
final String... staticRecipients) {
// 1. Create notification
NotificationTO notification = new NotificationTO();
notification.setTraceLevel(traceLevel);
notification.getEvents().add("[LOGIC]:[UserLogic]:[]:[create]:[SUCCESS]");
if (includeAbout) {
notification.getAbouts().put(AnyTypeKind.USER.name(),
SyncopeClient.getUserSearchConditionBuilder().
inGroups("bf825fe1-7320-4a54-bd64-143b5c18ab97").query());
}
notification.setRecipientsFIQL(SyncopeClient.getUserSearchConditionBuilder().
inGroups("f779c0d4-633b-4be5-8f57-32eb478a3ca5").query());
notification.setSelfAsRecipient(true);
notification.setRecipientAttrName("email");
if (staticRecipients != null) {
notification.getStaticRecipients().addAll(List.of(staticRecipients));
}
notification.setSender(sender);
notification.setSubject(subject);
notification.setTemplate("optin");
notification.setActive(active);
Response response = notificationService.create(notification);
notification = getObject(response.getLocation(), NotificationService.class, NotificationTO.class);
assertNotNull(notification);
// 2. create user
UserCR req = UserITCase.getUniqueSample("notificationtest@syncope.apache.org");
req.getMemberships().add(new MembershipTO.Builder("bf825fe1-7320-4a54-bd64-143b5c18ab97").build());
UserTO userTO = createUser(req).getEntity();
assertNotNull(userTO);
return Pair.of(notification.getKey(), req.getUsername());
}
protected ProvisioningResult<UserTO> createUser(final UserCR req) {
Response response = userService.create(req);
if (response.getStatusInfo().getStatusCode() != Response.Status.CREATED.getStatusCode()) {
Exception ex = clientFactory.getExceptionMapper().fromResponse(response);
if (ex != null) {
throw (RuntimeException) ex;
}
}
return response.readEntity(new GenericType<>() {
});
}
protected ProvisioningResult<UserTO> updateUser(final UserUR req) {
return userService.update(req).
readEntity(new GenericType<>() {
});
}
protected ProvisioningResult<UserTO> updateUser(final UserTO userTO) {
UserTO before = userService.read(userTO.getKey());
return userService.update(AnyOperations.diff(userTO, before, false)).
readEntity(new GenericType<>() {
});
}
protected ProvisioningResult<UserTO> deleteUser(final String key) {
return userService.delete(key).
readEntity(new GenericType<>() {
});
}
protected ProvisioningResult<AnyObjectTO> createAnyObject(final AnyObjectCR req) {
Response response = anyObjectService.create(req);
if (response.getStatusInfo().getStatusCode() != Response.Status.CREATED.getStatusCode()) {
Exception ex = clientFactory.getExceptionMapper().fromResponse(response);
if (ex != null) {
throw (RuntimeException) ex;
}
}
return response.readEntity(new GenericType<>() {
});
}
protected ProvisioningResult<AnyObjectTO> updateAnyObject(final AnyObjectUR req) {
return anyObjectService.update(req).
readEntity(new GenericType<>() {
});
}
protected ProvisioningResult<AnyObjectTO> deleteAnyObject(final String key) {
return anyObjectService.delete(key).
readEntity(new GenericType<>() {
});
}
protected ProvisioningResult<GroupTO> createGroup(final GroupCR req) {
Response response = groupService.create(req);
if (response.getStatusInfo().getStatusCode() != Response.Status.CREATED.getStatusCode()) {
Exception ex = clientFactory.getExceptionMapper().fromResponse(response);
if (ex != null) {
throw (RuntimeException) ex;
}
}
return response.readEntity(new GenericType<>() {
});
}
protected ProvisioningResult<GroupTO> updateGroup(final GroupUR req) {
return groupService.update(req).
readEntity(new GenericType<>() {
});
}
protected ProvisioningResult<GroupTO> deleteGroup(final String key) {
return groupService.delete(key).
readEntity(new GenericType<>() {
});
}
@SuppressWarnings("unchecked")
protected <T extends PolicyTO> T createPolicy(final PolicyType type, final T policy) {
Response response = policyService.create(type, policy);
if (response.getStatusInfo().getStatusCode() != Response.Status.CREATED.getStatusCode()) {
Exception ex = clientFactory.getExceptionMapper().fromResponse(response);
if (ex != null) {
throw (RuntimeException) ex;
}
}
return (T) getObject(response.getLocation(), PolicyService.class, policy.getClass());
}
@SuppressWarnings("unchecked")
protected AuthModuleTO createAuthModule(final AuthModuleTO authModule) {
Response response = authModuleService.create(authModule);
if (response.getStatusInfo().getStatusCode() != Response.Status.CREATED.getStatusCode()) {
Exception ex = clientFactory.getExceptionMapper().fromResponse(response);
if (ex != null) {
throw (RuntimeException) ex;
}
}
return getObject(response.getLocation(), AuthModuleService.class, authModule.getClass());
}
protected ResourceTO createResource(final ResourceTO resourceTO) {
Response response = resourceService.create(resourceTO);
if (response.getStatusInfo().getStatusCode() != Response.Status.CREATED.getStatusCode()) {
Exception ex = clientFactory.getExceptionMapper().fromResponse(response);
if (ex != null) {
throw (RuntimeException) ex;
}
}
return getObject(response.getLocation(), ResourceService.class, ResourceTO.class);
}
protected List<BatchResponseItem> parseBatchResponse(final Response response) throws IOException {
assertEquals(Response.Status.OK.getStatusCode(), response.getStatus());
return BatchPayloadParser.parse(
(InputStream) response.getEntity(), response.getMediaType(), new BatchResponseItem());
}
@SuppressWarnings({ "unchecked", "rawtypes", "UseOfObsoleteCollectionType" })
protected InitialDirContext getLdapResourceDirContext(final String bindDn, final String bindPwd)
throws NamingException {
ResourceTO ldapRes = resourceService.read(RESOURCE_NAME_LDAP);
ConnInstanceTO ldapConn = connectorService.read(ldapRes.getConnector(), Locale.ENGLISH.getLanguage());
Properties env = new Properties();
env.put(Context.INITIAL_CONTEXT_FACTORY, "com.sun.jndi.ldap.LdapCtxFactory");
env.put(Context.PROVIDER_URL, "ldap://" + ldapConn.getConf("host").get().getValues().get(0)
+ ':' + ldapConn.getConf("port").get().getValues().get(0) + '/');
env.put(Context.SECURITY_AUTHENTICATION, "simple");
env.put(Context.SECURITY_PRINCIPAL,
bindDn == null ? ldapConn.getConf("principal").get().getValues().get(0) : bindDn);
env.put(Context.SECURITY_CREDENTIALS,
bindPwd == null ? ldapConn.getConf("credentials").get().getValues().get(0) : bindPwd);
return new InitialDirContext(env);
}
protected Object getLdapRemoteObject(final String bindDn, final String bindPwd, final String objectDn) {
InitialDirContext ctx = null;
try {
ctx = getLdapResourceDirContext(bindDn, bindPwd);
return ctx.lookup(objectDn);
} catch (Exception e) {
LOG.error("Could not fetch {}", objectDn, e);
return null;
} finally {
if (ctx != null) {
try {
ctx.close();
} catch (NamingException e) {
// ignore
}
}
}
}
protected void createLdapRemoteObject(
final String bindDn,
final String bindPwd,
final Pair<String, Set<Attribute>> entryAttrs) throws NamingException {
InitialDirContext ctx = null;
try {
ctx = getLdapResourceDirContext(bindDn, bindPwd);
BasicAttributes entry = new BasicAttributes();
entryAttrs.getRight().forEach(item -> entry.put(item));
ctx.createSubcontext(entryAttrs.getLeft(), entry);
} catch (NamingException e) {
LOG.error("While creating {} with {}", entryAttrs.getLeft(), entryAttrs.getRight(), e);
throw e;
} finally {
if (ctx != null) {
try {
ctx.close();
} catch (NamingException e) {
// ignore
}
}
}
}
protected void updateLdapRemoteObject(
final String bindDn,
final String bindPwd,
final String objectDn,
final Map<String, String> attributes) {
InitialDirContext ctx = null;
try {
ctx = getLdapResourceDirContext(bindDn, bindPwd);
List<ModificationItem> items = new ArrayList<>();
attributes.forEach((key, value) -> items.add(new ModificationItem(DirContext.REPLACE_ATTRIBUTE,
new BasicAttribute(key, value))));
ctx.modifyAttributes(objectDn, items.toArray(new ModificationItem[] {}));
} catch (Exception e) {
LOG.error("While updating {} with {}", objectDn, attributes, e);
} finally {
if (ctx != null) {
try {
ctx.close();
} catch (NamingException e) {
// ignore
}
}
}
}
protected void removeLdapRemoteObject(
final String bindDn,
final String bindPwd,
final String objectDn) {
InitialDirContext ctx = null;
try {
ctx = getLdapResourceDirContext(bindDn, bindPwd);
ctx.destroySubcontext(objectDn);
} catch (Exception e) {
LOG.error("While removing {}", objectDn, e);
} finally {
if (ctx != null) {
try {
ctx.close();
} catch (NamingException e) {
// ignore
}
}
}
}
protected <T> T queryForObject(
final JdbcTemplate jdbcTemplate,
final int maxWaitSeconds,
final String sql, final Class<T> requiredType, final Object... args) {
AtomicReference<T> object = new AtomicReference<>();
await().atMost(maxWaitSeconds, TimeUnit.SECONDS).pollInterval(1, TimeUnit.SECONDS).until(() -> {
try {
object.set(jdbcTemplate.queryForObject(sql, requiredType, args));
return object.get() != null;
} catch (Exception e) {
return false;
}
});
return object.get();
}
protected OIDCRPClientAppTO buildOIDCRP() {
AuthPolicyTO authPolicyTO = new AuthPolicyTO();
authPolicyTO.setKey("AuthPolicyTest_" + getUUIDString());
authPolicyTO.setName("Authentication Policy");
authPolicyTO = createPolicy(PolicyType.AUTH, authPolicyTO);
assertNotNull(authPolicyTO);
AccessPolicyTO accessPolicyTO = new AccessPolicyTO();
accessPolicyTO.setKey("AccessPolicyTest_" + getUUIDString());
accessPolicyTO.setName("Access policy");
accessPolicyTO = createPolicy(PolicyType.ACCESS, accessPolicyTO);
assertNotNull(accessPolicyTO);
OIDCRPClientAppTO oidcrpTO = new OIDCRPClientAppTO();
oidcrpTO.setName("ExampleRP_" + getUUIDString());
oidcrpTO.setClientAppId(UUID.randomUUID().getMostSignificantBits() & Long.MAX_VALUE);
oidcrpTO.setDescription("Example OIDC RP application");
oidcrpTO.setClientId("clientId_" + getUUIDString());
oidcrpTO.setClientSecret("secret");
oidcrpTO.setSubjectType(OIDCSubjectType.PUBLIC);
oidcrpTO.getSupportedGrantTypes().add(OIDCGrantType.authorization_code);
oidcrpTO.getSupportedResponseTypes().add(OIDCResponseType.CODE);
oidcrpTO.setAuthPolicy(authPolicyTO.getKey());
oidcrpTO.setAccessPolicy(accessPolicyTO.getKey());
return oidcrpTO;
}
protected SAML2SPClientAppTO buildSAML2SP() {
AuthPolicyTO authPolicyTO = new AuthPolicyTO();
authPolicyTO.setKey("AuthPolicyTest_" + getUUIDString());
authPolicyTO.setName("Authentication Policy");
authPolicyTO = createPolicy(PolicyType.AUTH, authPolicyTO);
assertNotNull(authPolicyTO);
AccessPolicyTO accessPolicyTO = new AccessPolicyTO();
accessPolicyTO.setKey("AccessPolicyTest_" + getUUIDString());
accessPolicyTO.setName("Access policy");
accessPolicyTO = createPolicy(PolicyType.ACCESS, accessPolicyTO);
assertNotNull(accessPolicyTO);
SAML2SPClientAppTO saml2spto = new SAML2SPClientAppTO();
saml2spto.setName("ExampleSAML2SP_" + getUUIDString());
saml2spto.setClientAppId(UUID.randomUUID().getMostSignificantBits() & Long.MAX_VALUE);
saml2spto.setDescription("Example SAML 2.0 service provider");
saml2spto.setEntityId("SAML2SPEntityId_" + getUUIDString());
saml2spto.setMetadataLocation("file:./test.xml");
saml2spto.setRequiredNameIdFormat(SAML2SPNameId.EMAIL_ADDRESS);
saml2spto.setEncryptionOptional(true);
saml2spto.setEncryptAssertions(true);
saml2spto.setAuthPolicy(authPolicyTO.getKey());
saml2spto.setAccessPolicy(accessPolicyTO.getKey());
return saml2spto;
}
@SuppressWarnings("unchecked")
protected <T extends ClientAppTO> T createClientApp(final ClientAppType type, final T clientAppTO) {
Response response = clientAppService.create(type, clientAppTO);
if (response.getStatusInfo().getStatusCode() != Response.Status.CREATED.getStatusCode()) {
Exception ex = clientFactory.getExceptionMapper().fromResponse(response);
if (ex != null) {
throw (RuntimeException) ex;
}
}
return (T) getObject(response.getLocation(), ClientAppService.class, clientAppTO.getClass());
}
protected AuthPolicyTO buildAuthPolicyTO(final String authModuleKey) {
AuthPolicyTO policy = new AuthPolicyTO();
policy.setName("Test Authentication policy");
DefaultAuthPolicyConf conf = new DefaultAuthPolicyConf();
conf.getAuthModules().add(authModuleKey);
policy.setConf(conf);
return policy;
}
protected AttrReleasePolicyTO buildAttrReleasePolicyTO() {
AttrReleasePolicyTO policy = new AttrReleasePolicyTO();
policy.setName("Test Attribute Release policy");
policy.setStatus(Boolean.TRUE);
DefaultAttrReleasePolicyConf conf = new DefaultAttrReleasePolicyConf();
conf.getAllowedAttrs().addAll(List.of("cn", "givenName"));
conf.getIncludeOnlyAttrs().add("cn");
policy.setConf(conf);
return policy;
}
protected AccessPolicyTO buildAccessPolicyTO() {
AccessPolicyTO policy = new AccessPolicyTO();
policy.setName("Test Access policy");
policy.setEnabled(true);
DefaultAccessPolicyConf conf = new DefaultAccessPolicyConf();
conf.getRequiredAttrs().add(new Attr.Builder("cn").values("admin", "Admin", "TheAdmin").build());
policy.setConf(conf);
return policy;
}
protected List<AuditEntry> query(final AuditQuery query, final int maxWaitSeconds) {
int i = 0;
List<AuditEntry> results = List.of();
do {
try {
Thread.sleep(1000);
} catch (InterruptedException e) {
}
results = auditService.search(query).getResult();
i++;
} while (results.isEmpty() && i < maxWaitSeconds);
return results;
}
}
| |
package org.dita.dost.reader;
import com.google.common.collect.ImmutableMap;
import org.dita.dost.TestUtils;
import org.dita.dost.log.DITAOTJavaLogger;
import org.dita.dost.util.Job;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import java.io.File;
import java.io.IOException;
import java.net.URI;
import java.net.URISyntaxException;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import static org.dita.dost.util.Constants.INPUT_DIR_URI;
import static org.dita.dost.util.URLUtils.toURI;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNull;
public class ChunkMapReaderTest {
final File resourceDir = TestUtils.getResourceDir(ChunkMapReaderTest.class);
final File srcDir = new File(resourceDir, "src");
File tempDir = null;
@Before
public void setUp() throws Exception {
tempDir = TestUtils.createTempDir(getClass());
new File(tempDir, "maps").mkdirs();
new File(tempDir, "topics").mkdirs();
new File(tempDir, "maps" + File.separator + "topics").mkdirs();
}
@Test
public void testRead() throws IOException {
final Job job = new Job(tempDir);
job.setProperty(INPUT_DIR_URI, srcDir.toURI().toString());
final ChunkMapReader mapReader = new ChunkMapReader();
mapReader.setLogger(new TestUtils.TestLogger());
mapReader.setJob(job);
TestUtils.copy(new File(srcDir, "gen.ditamap"), new File(tempDir, "maps" + File.separator + "gen.ditamap"));
job.add(new Job.FileInfo.Builder().uri(toURI("maps/gen.ditamap")).build());
for (final String srcFile : getSrcFiles()) {
final URI dst = tempDir.toURI().resolve(srcFile);
TestUtils.copy(new File(srcDir, "topic.dita"), new File(dst));
job.add(new Job.FileInfo.Builder().uri(toURI(srcFile)).build());
}
mapReader.read(new File(tempDir, "maps" + File.separator + "gen.ditamap"));
assertEquals(getActChangeTable(), mapReader.getChangeTable());
assertEquals(getActConflictTable(), mapReader.getConflicTable());
}
@Test
public void testMissingSource() throws IOException, URISyntaxException {
final Job job = createJob("missing.ditamap", "2.dita");
final ChunkMapReader mapReader = new ChunkMapReader();
mapReader.setLogger(new TestUtils.TestLogger());
mapReader.setJob(job);
mapReader.read(new File(tempDir, "missing.ditamap"));
assertEquals(ImmutableMap.<URI, URI>builder()
.put(prefixTemp("2.dita"), prefixTemp("2.dita"))
.build(),
mapReader.getChangeTable());
assertEquals(Collections.emptyMap(), mapReader.getConflicTable());
assertNull(job.getFileInfo(new URI("missing.dita")));
}
@Test
public void testChunkFullMap() throws IOException {
final Job job = createJob("map.ditamap", "1.dita", "2.dita", "3.dita");
final ChunkMapReader mapReader = new ChunkMapReader();
mapReader.setLogger(new DITAOTJavaLogger());
mapReader.setJob(job);
mapReader.read(new File(tempDir, "map.ditamap"));
assertEquals(ImmutableMap.<URI, URI>builder()
.put(prefixTemp("map.dita"), prefixTemp("map.dita"))
.put(prefixTemp("1.dita"), prefixTemp("map.dita#topic_qft_qwn_hv"))
.put(prefixTemp("1.dita#topic_qft_qwn_hv"), prefixTemp("map.dita#topic_qft_qwn_hv"))
.put(prefixTemp("2.dita"), prefixTemp("map.dita#unique_0"))
.put(prefixTemp("2.dita#topic_qft_qwn_hv"), prefixTemp("map.dita#unique_0"))
.put(prefixTemp("3.dita"), prefixTemp("map.dita#unique_1"))
.put(prefixTemp("3.dita#topic_qft_qwn_hv"), prefixTemp("map.dita#unique_1"))
.build(),
mapReader.getChangeTable());
assertEquals(Collections.emptyMap(),
mapReader.getConflicTable());
}
@Test
public void testExistingGeneratedFile() throws IOException, URISyntaxException {
final Job job = createJob("conflict.ditamap", "2.dita", "Chunk0.dita");
final ChunkMapReader mapReader = new ChunkMapReader();
mapReader.setLogger(new DITAOTJavaLogger());
mapReader.setJob(job);
mapReader.read(new File(tempDir, "conflict.ditamap"));
assertEquals(ImmutableMap.<URI, URI>builder()
.put(prefixTemp("Chunk0.dita"), prefixTemp("Chunk0.dita"))
.put(prefixTemp("Chunk2.dita"), prefixTemp("Chunk2.dita"))
.put(prefixTemp("Chunk1.dita"), prefixTemp("Chunk2.dita#Chunk1"))
.put(prefixTemp("Chunk1.dita#Chunk1"), prefixTemp("Chunk2.dita#Chunk1"))
.put(prefixTemp("2.dita"), prefixTemp("Chunk2.dita#topic_qft_qwn_hv"))
.put(prefixTemp("2.dita#topic_qft_qwn_hv"), prefixTemp("Chunk2.dita#topic_qft_qwn_hv"))
.build(),
mapReader.getChangeTable());
assertEquals(ImmutableMap.<URI, URI>builder()
.put(prefixTemp("Chunk2.dita"), prefixTemp("Chunk1.dita"))
.build(),
mapReader.getConflicTable());
}
private Job createJob(final String map, final String... topics) throws IOException {
final Job job = new Job(tempDir);
job.setProperty(INPUT_DIR_URI, srcDir.toURI().toString());
TestUtils.copy(new File(srcDir, map), new File(tempDir, map));
job.add(new Job.FileInfo.Builder().uri(toURI(map)).build());
for (final String srcFile : topics) {
final URI dst = tempDir.toURI().resolve(srcFile);
TestUtils.copy(new File(srcDir, "topic.dita"), new File(dst));
job.add(new Job.FileInfo.Builder().uri(toURI(srcFile)).build());
}
return job;
}
private List<String> getSrcFiles() {
return Arrays.asList(
"maps/0.dita",
"maps/2.dita",
"maps/3.dita",
"maps/5.dita",
"maps/6.dita",
"maps/8.dita",
"maps/9.dita",
"maps/11.dita",
"maps/12.dita",
"maps/13.dita",
"maps/14.dita",
"16.dita",
"maps/17.dita",
"19.dita",
"maps/20.dita",
"22.dita",
"maps/23.dita",
"25.dita",
"maps/26.dita",
"27.dita",
"maps/28.dita",
"maps/topics/30.dita",
"maps/31.dita",
"maps/topics/33.dita",
"maps/34.dita",
"maps/topics/36.dita",
"maps/37.dita",
"maps/topics/39.dita",
"maps/40.dita",
"maps/topics/41.dita",
"maps/42.dita",
"topics/44.dita",
"maps/45.dita",
"topics/47.dita",
"maps/48.dita",
"topics/50.dita",
"maps/51.dita",
"topics/53.dita",
"maps/54.dita",
"topics/55.dita",
"56.dita",
"maps/58.dita",
"59.dita",
"maps/61.dita",
"62.dita",
"maps/64.dita",
"65.dita",
"maps/67.dita",
"68.dita",
"maps/69.dita",
"70.dita",
"72.dita",
"73.dita",
"75.dita",
"76.dita",
"78.dita",
"79.dita",
"81.dita",
"82.dita",
"83.dita",
"84.dita",
"maps/topics/86.dita",
"87.dita",
"maps/topics/89.dita",
"90.dita",
"maps/topics/92.dita",
"93.dita",
"maps/topics/95.dita",
"96.dita",
"maps/topics/97.dita",
"98.dita",
"topics/100.dita",
"101.dita",
"topics/103.dita",
"104.dita",
"topics/106.dita",
"107.dita",
"topics/109.dita",
"110.dita",
"topics/111.dita",
"maps/topics/112.dita",
"maps/114.dita",
"maps/topics/115.dita",
"maps/117.dita",
"maps/topics/118.dita",
"maps/120.dita",
"maps/topics/121.dita",
"maps/123.dita",
"maps/topics/124.dita",
"maps/125.dita",
"maps/topics/126.dita",
"128.dita",
"maps/topics/129.dita",
"131.dita",
"maps/topics/132.dita",
"134.dita",
"maps/topics/135.dita",
"137.dita",
"maps/topics/138.dita",
"139.dita",
"maps/topics/140.dita",
"maps/topics/142.dita",
"maps/topics/143.dita",
"maps/topics/145.dita",
"maps/topics/146.dita",
"maps/topics/148.dita",
"maps/topics/149.dita",
"maps/topics/151.dita",
"maps/topics/152.dita",
"maps/topics/153.dita",
"maps/topics/154.dita",
"topics/156.dita",
"maps/topics/157.dita",
"topics/159.dita",
"maps/topics/160.dita",
"topics/162.dita",
"maps/topics/163.dita",
"topics/165.dita",
"maps/topics/166.dita",
"topics/167.dita",
"topics/168.dita",
"maps/170.dita",
"topics/171.dita",
"maps/173.dita",
"topics/174.dita",
"maps/176.dita",
"topics/177.dita",
"maps/179.dita",
"topics/180.dita",
"maps/181.dita",
"topics/182.dita",
"184.dita",
"topics/185.dita",
"187.dita",
"topics/188.dita",
"190.dita",
"topics/191.dita",
"193.dita",
"topics/194.dita",
"195.dita",
"topics/196.dita",
"maps/topics/198.dita",
"topics/199.dita",
"maps/topics/201.dita",
"topics/202.dita",
"maps/topics/204.dita",
"topics/205.dita",
"maps/topics/207.dita",
"topics/208.dita",
"maps/topics/209.dita",
"topics/210.dita",
"topics/212.dita",
"topics/213.dita",
"topics/215.dita",
"topics/216.dita",
"topics/218.dita",
"topics/219.dita",
"topics/221.dita",
"topics/222.dita",
"topics/223.dita");
}
private Map<URI, URI> getActConflictTable() {
final ImmutableMap.Builder<URI, URI> b = ImmutableMap.builder();
b.put(prefixTemp("maps/Chunk82.dita"), prefixTemp("topics/194.dita"));
b.put(prefixTemp("maps/Chunk88.dita"), prefixTemp("topics/208.dita"));
b.put(prefixTemp("maps/Chunk34.dita"), prefixTemp("82.dita"));
b.put(prefixTemp("maps/Chunk70.dita"), prefixTemp("maps/topics/166.dita"));
b.put(prefixTemp("maps/Chunk64.dita"), prefixTemp("maps/topics/152.dita"));
b.put(prefixTemp("maps/Chunk10.dita"), prefixTemp("maps/26.dita"));
b.put(prefixTemp("maps/Chunk94.dita"), prefixTemp("topics/222.dita"));
b.put(prefixTemp("maps/Chunk46.dita"), prefixTemp("110.dita"));
b.put(prefixTemp("maps/Chunk40.dita"), prefixTemp("96.dita"));
b.put(prefixTemp("maps/Chunk16.dita"), prefixTemp("maps/40.dita"));
b.put(prefixTemp("maps/Chunk4.dita"), prefixTemp("maps/12.dita"));
b.put(prefixTemp("maps/Chunk76.dita"), prefixTemp("topics/180.dita"));
b.put(prefixTemp("maps/Chunk22.dita"), prefixTemp("maps/54.dita"));
b.put(prefixTemp("maps/Chunk58.dita"), prefixTemp("maps/topics/138.dita"));
b.put(prefixTemp("maps/Chunk52.dita"), prefixTemp("maps/topics/124.dita"));
b.put(prefixTemp("maps/Chunk28.dita"), prefixTemp("68.dita"));
return b.build();
}
private Map<URI, URI> getActChangeTable() {
final ImmutableMap.Builder<URI, URI> b = ImmutableMap.builder();
b.put(prefixTemp("maps/1.dita"), prefixTemp("maps/1.dita"));
b.put(prefixTemp("maps/0.dita"), prefixTemp("maps/1.dita#topic_qft_qwn_hv"));
b.put(prefixTemp("maps/0.dita#topic_qft_qwn_hv"), prefixTemp("maps/1.dita#topic_qft_qwn_hv"));
b.put(prefixTemp("maps/2.dita"), prefixTemp("maps/1.dita#unique_0"));
b.put(prefixTemp("maps/2.dita#topic_qft_qwn_hv"), prefixTemp("maps/1.dita#unique_0"));
b.put(prefixTemp("4.dita"), prefixTemp("4.dita"));
b.put(prefixTemp("maps/3.dita"), prefixTemp("4.dita#topic_qft_qwn_hv"));
b.put(prefixTemp("maps/3.dita#topic_qft_qwn_hv"), prefixTemp("4.dita#topic_qft_qwn_hv"));
b.put(prefixTemp("maps/5.dita"), prefixTemp("4.dita#unique_1"));
b.put(prefixTemp("maps/5.dita#topic_qft_qwn_hv"), prefixTemp("4.dita#unique_1"));
b.put(prefixTemp("maps/topics/7.dita"), prefixTemp("maps/topics/7.dita"));
b.put(prefixTemp("maps/6.dita"), prefixTemp("maps/topics/7.dita#topic_qft_qwn_hv"));
b.put(prefixTemp("maps/6.dita#topic_qft_qwn_hv"), prefixTemp("maps/topics/7.dita#topic_qft_qwn_hv"));
b.put(prefixTemp("maps/8.dita"), prefixTemp("maps/topics/7.dita#unique_2"));
b.put(prefixTemp("maps/8.dita#topic_qft_qwn_hv"), prefixTemp("maps/topics/7.dita#unique_2"));
b.put(prefixTemp("topics/10.dita"), prefixTemp("topics/10.dita"));
b.put(prefixTemp("maps/9.dita"), prefixTemp("topics/10.dita#topic_qft_qwn_hv"));
b.put(prefixTemp("maps/9.dita#topic_qft_qwn_hv"), prefixTemp("topics/10.dita#topic_qft_qwn_hv"));
b.put(prefixTemp("maps/11.dita"), prefixTemp("topics/10.dita#unique_3"));
b.put(prefixTemp("maps/11.dita#topic_qft_qwn_hv"), prefixTemp("topics/10.dita#unique_3"));
b.put(prefixTemp("maps/Chunk4.dita"), prefixTemp("maps/Chunk4.dita"));
b.put(prefixTemp("maps/12.dita"), prefixTemp("maps/Chunk4.dita#topic_qft_qwn_hv"));
b.put(prefixTemp("maps/12.dita#topic_qft_qwn_hv"), prefixTemp("maps/Chunk4.dita#topic_qft_qwn_hv"));
b.put(prefixTemp("maps/13.dita"), prefixTemp("maps/Chunk4.dita#unique_5"));
b.put(prefixTemp("maps/13.dita#topic_qft_qwn_hv"), prefixTemp("maps/Chunk4.dita#unique_5"));
b.put(prefixTemp("maps/15.dita"), prefixTemp("maps/15.dita"));
b.put(prefixTemp("maps/14.dita"), prefixTemp("maps/15.dita#topic_qft_qwn_hv"));
b.put(prefixTemp("maps/14.dita#topic_qft_qwn_hv"), prefixTemp("maps/15.dita#topic_qft_qwn_hv"));
b.put(prefixTemp("16.dita"), prefixTemp("maps/15.dita#unique_6"));
b.put(prefixTemp("16.dita#topic_qft_qwn_hv"), prefixTemp("maps/15.dita#unique_6"));
b.put(prefixTemp("18.dita"), prefixTemp("18.dita"));
b.put(prefixTemp("maps/17.dita"), prefixTemp("18.dita#topic_qft_qwn_hv"));
b.put(prefixTemp("maps/17.dita#topic_qft_qwn_hv"), prefixTemp("18.dita#topic_qft_qwn_hv"));
b.put(prefixTemp("19.dita"), prefixTemp("18.dita#unique_7"));
b.put(prefixTemp("19.dita#topic_qft_qwn_hv"), prefixTemp("18.dita#unique_7"));
b.put(prefixTemp("maps/topics/21.dita"), prefixTemp("maps/topics/21.dita"));
b.put(prefixTemp("maps/20.dita"), prefixTemp("maps/topics/21.dita#topic_qft_qwn_hv"));
b.put(prefixTemp("maps/20.dita#topic_qft_qwn_hv"), prefixTemp("maps/topics/21.dita#topic_qft_qwn_hv"));
b.put(prefixTemp("22.dita"), prefixTemp("maps/topics/21.dita#unique_8"));
b.put(prefixTemp("22.dita#topic_qft_qwn_hv"), prefixTemp("maps/topics/21.dita#unique_8"));
b.put(prefixTemp("topics/24.dita"), prefixTemp("topics/24.dita"));
b.put(prefixTemp("maps/23.dita"), prefixTemp("topics/24.dita#topic_qft_qwn_hv"));
b.put(prefixTemp("maps/23.dita#topic_qft_qwn_hv"), prefixTemp("topics/24.dita#topic_qft_qwn_hv"));
b.put(prefixTemp("25.dita"), prefixTemp("topics/24.dita#unique_9"));
b.put(prefixTemp("25.dita#topic_qft_qwn_hv"), prefixTemp("topics/24.dita#unique_9"));
b.put(prefixTemp("maps/Chunk10.dita"), prefixTemp("maps/Chunk10.dita"));
b.put(prefixTemp("maps/26.dita"), prefixTemp("maps/Chunk10.dita#topic_qft_qwn_hv"));
b.put(prefixTemp("maps/26.dita#topic_qft_qwn_hv"), prefixTemp("maps/Chunk10.dita#topic_qft_qwn_hv"));
b.put(prefixTemp("27.dita"), prefixTemp("maps/Chunk10.dita#unique_11"));
b.put(prefixTemp("27.dita#topic_qft_qwn_hv"), prefixTemp("maps/Chunk10.dita#unique_11"));
b.put(prefixTemp("maps/29.dita"), prefixTemp("maps/29.dita"));
b.put(prefixTemp("maps/28.dita"), prefixTemp("maps/29.dita#topic_qft_qwn_hv"));
b.put(prefixTemp("maps/28.dita#topic_qft_qwn_hv"), prefixTemp("maps/29.dita#topic_qft_qwn_hv"));
b.put(prefixTemp("maps/topics/30.dita"), prefixTemp("maps/29.dita#unique_12"));
b.put(prefixTemp("maps/topics/30.dita#topic_qft_qwn_hv"), prefixTemp("maps/29.dita#unique_12"));
b.put(prefixTemp("32.dita"), prefixTemp("32.dita"));
b.put(prefixTemp("maps/31.dita"), prefixTemp("32.dita#topic_qft_qwn_hv"));
b.put(prefixTemp("maps/31.dita#topic_qft_qwn_hv"), prefixTemp("32.dita#topic_qft_qwn_hv"));
b.put(prefixTemp("maps/topics/33.dita"), prefixTemp("32.dita#unique_13"));
b.put(prefixTemp("maps/topics/33.dita#topic_qft_qwn_hv"), prefixTemp("32.dita#unique_13"));
b.put(prefixTemp("maps/topics/35.dita"), prefixTemp("maps/topics/35.dita"));
b.put(prefixTemp("maps/34.dita"), prefixTemp("maps/topics/35.dita#topic_qft_qwn_hv"));
b.put(prefixTemp("maps/34.dita#topic_qft_qwn_hv"), prefixTemp("maps/topics/35.dita#topic_qft_qwn_hv"));
b.put(prefixTemp("maps/topics/36.dita"), prefixTemp("maps/topics/35.dita#unique_14"));
b.put(prefixTemp("maps/topics/36.dita#topic_qft_qwn_hv"), prefixTemp("maps/topics/35.dita#unique_14"));
b.put(prefixTemp("topics/38.dita"), prefixTemp("topics/38.dita"));
b.put(prefixTemp("maps/37.dita"), prefixTemp("topics/38.dita#topic_qft_qwn_hv"));
b.put(prefixTemp("maps/37.dita#topic_qft_qwn_hv"), prefixTemp("topics/38.dita#topic_qft_qwn_hv"));
b.put(prefixTemp("maps/topics/39.dita"), prefixTemp("topics/38.dita#unique_15"));
b.put(prefixTemp("maps/topics/39.dita#topic_qft_qwn_hv"), prefixTemp("topics/38.dita#unique_15"));
b.put(prefixTemp("maps/Chunk16.dita"), prefixTemp("maps/Chunk16.dita"));
b.put(prefixTemp("maps/40.dita"), prefixTemp("maps/Chunk16.dita#topic_qft_qwn_hv"));
b.put(prefixTemp("maps/40.dita#topic_qft_qwn_hv"), prefixTemp("maps/Chunk16.dita#topic_qft_qwn_hv"));
b.put(prefixTemp("maps/topics/41.dita"), prefixTemp("maps/Chunk16.dita#unique_17"));
b.put(prefixTemp("maps/topics/41.dita#topic_qft_qwn_hv"), prefixTemp("maps/Chunk16.dita#unique_17"));
b.put(prefixTemp("maps/43.dita"), prefixTemp("maps/43.dita"));
b.put(prefixTemp("maps/42.dita"), prefixTemp("maps/43.dita#topic_qft_qwn_hv"));
b.put(prefixTemp("maps/42.dita#topic_qft_qwn_hv"), prefixTemp("maps/43.dita#topic_qft_qwn_hv"));
b.put(prefixTemp("topics/44.dita"), prefixTemp("maps/43.dita#unique_18"));
b.put(prefixTemp("topics/44.dita#topic_qft_qwn_hv"), prefixTemp("maps/43.dita#unique_18"));
b.put(prefixTemp("46.dita"), prefixTemp("46.dita"));
b.put(prefixTemp("maps/45.dita"), prefixTemp("46.dita#topic_qft_qwn_hv"));
b.put(prefixTemp("maps/45.dita#topic_qft_qwn_hv"), prefixTemp("46.dita#topic_qft_qwn_hv"));
b.put(prefixTemp("topics/47.dita"), prefixTemp("46.dita#unique_19"));
b.put(prefixTemp("topics/47.dita#topic_qft_qwn_hv"), prefixTemp("46.dita#unique_19"));
b.put(prefixTemp("maps/topics/49.dita"), prefixTemp("maps/topics/49.dita"));
b.put(prefixTemp("maps/48.dita"), prefixTemp("maps/topics/49.dita#topic_qft_qwn_hv"));
b.put(prefixTemp("maps/48.dita#topic_qft_qwn_hv"), prefixTemp("maps/topics/49.dita#topic_qft_qwn_hv"));
b.put(prefixTemp("topics/50.dita"), prefixTemp("maps/topics/49.dita#unique_20"));
b.put(prefixTemp("topics/50.dita#topic_qft_qwn_hv"), prefixTemp("maps/topics/49.dita#unique_20"));
b.put(prefixTemp("topics/52.dita"), prefixTemp("topics/52.dita"));
b.put(prefixTemp("maps/51.dita"), prefixTemp("topics/52.dita#topic_qft_qwn_hv"));
b.put(prefixTemp("maps/51.dita#topic_qft_qwn_hv"), prefixTemp("topics/52.dita#topic_qft_qwn_hv"));
b.put(prefixTemp("topics/53.dita"), prefixTemp("topics/52.dita#unique_21"));
b.put(prefixTemp("topics/53.dita#topic_qft_qwn_hv"), prefixTemp("topics/52.dita#unique_21"));
b.put(prefixTemp("maps/Chunk22.dita"), prefixTemp("maps/Chunk22.dita"));
b.put(prefixTemp("maps/54.dita"), prefixTemp("maps/Chunk22.dita#topic_qft_qwn_hv"));
b.put(prefixTemp("maps/54.dita#topic_qft_qwn_hv"), prefixTemp("maps/Chunk22.dita#topic_qft_qwn_hv"));
b.put(prefixTemp("topics/55.dita"), prefixTemp("maps/Chunk22.dita#unique_23"));
b.put(prefixTemp("topics/55.dita#topic_qft_qwn_hv"), prefixTemp("maps/Chunk22.dita#unique_23"));
b.put(prefixTemp("maps/57.dita"), prefixTemp("maps/57.dita"));
b.put(prefixTemp("56.dita"), prefixTemp("maps/57.dita#topic_qft_qwn_hv"));
b.put(prefixTemp("56.dita#topic_qft_qwn_hv"), prefixTemp("maps/57.dita#topic_qft_qwn_hv"));
b.put(prefixTemp("maps/58.dita"), prefixTemp("maps/57.dita#unique_24"));
b.put(prefixTemp("maps/58.dita#topic_qft_qwn_hv"), prefixTemp("maps/57.dita#unique_24"));
b.put(prefixTemp("60.dita"), prefixTemp("60.dita"));
b.put(prefixTemp("59.dita"), prefixTemp("60.dita#topic_qft_qwn_hv"));
b.put(prefixTemp("59.dita#topic_qft_qwn_hv"), prefixTemp("60.dita#topic_qft_qwn_hv"));
b.put(prefixTemp("maps/61.dita"), prefixTemp("60.dita#unique_25"));
b.put(prefixTemp("maps/61.dita#topic_qft_qwn_hv"), prefixTemp("60.dita#unique_25"));
b.put(prefixTemp("maps/topics/63.dita"), prefixTemp("maps/topics/63.dita"));
b.put(prefixTemp("62.dita"), prefixTemp("maps/topics/63.dita#topic_qft_qwn_hv"));
b.put(prefixTemp("62.dita#topic_qft_qwn_hv"), prefixTemp("maps/topics/63.dita#topic_qft_qwn_hv"));
b.put(prefixTemp("maps/64.dita"), prefixTemp("maps/topics/63.dita#unique_26"));
b.put(prefixTemp("maps/64.dita#topic_qft_qwn_hv"), prefixTemp("maps/topics/63.dita#unique_26"));
b.put(prefixTemp("topics/66.dita"), prefixTemp("topics/66.dita"));
b.put(prefixTemp("65.dita"), prefixTemp("topics/66.dita#topic_qft_qwn_hv"));
b.put(prefixTemp("65.dita#topic_qft_qwn_hv"), prefixTemp("topics/66.dita#topic_qft_qwn_hv"));
b.put(prefixTemp("maps/67.dita"), prefixTemp("topics/66.dita#unique_27"));
b.put(prefixTemp("maps/67.dita#topic_qft_qwn_hv"), prefixTemp("topics/66.dita#unique_27"));
b.put(prefixTemp("maps/Chunk28.dita"), prefixTemp("maps/Chunk28.dita"));
b.put(prefixTemp("68.dita"), prefixTemp("maps/Chunk28.dita#topic_qft_qwn_hv"));
b.put(prefixTemp("68.dita#topic_qft_qwn_hv"), prefixTemp("maps/Chunk28.dita#topic_qft_qwn_hv"));
b.put(prefixTemp("maps/69.dita"), prefixTemp("maps/Chunk28.dita#unique_29"));
b.put(prefixTemp("maps/69.dita#topic_qft_qwn_hv"), prefixTemp("maps/Chunk28.dita#unique_29"));
b.put(prefixTemp("maps/71.dita"), prefixTemp("maps/71.dita"));
b.put(prefixTemp("70.dita"), prefixTemp("maps/71.dita#topic_qft_qwn_hv"));
b.put(prefixTemp("70.dita#topic_qft_qwn_hv"), prefixTemp("maps/71.dita#topic_qft_qwn_hv"));
b.put(prefixTemp("72.dita"), prefixTemp("maps/71.dita#unique_30"));
b.put(prefixTemp("72.dita#topic_qft_qwn_hv"), prefixTemp("maps/71.dita#unique_30"));
b.put(prefixTemp("74.dita"), prefixTemp("74.dita"));
b.put(prefixTemp("73.dita"), prefixTemp("74.dita#topic_qft_qwn_hv"));
b.put(prefixTemp("73.dita#topic_qft_qwn_hv"), prefixTemp("74.dita#topic_qft_qwn_hv"));
b.put(prefixTemp("75.dita"), prefixTemp("74.dita#unique_31"));
b.put(prefixTemp("75.dita#topic_qft_qwn_hv"), prefixTemp("74.dita#unique_31"));
b.put(prefixTemp("maps/topics/77.dita"), prefixTemp("maps/topics/77.dita"));
b.put(prefixTemp("76.dita"), prefixTemp("maps/topics/77.dita#topic_qft_qwn_hv"));
b.put(prefixTemp("76.dita#topic_qft_qwn_hv"), prefixTemp("maps/topics/77.dita#topic_qft_qwn_hv"));
b.put(prefixTemp("78.dita"), prefixTemp("maps/topics/77.dita#unique_32"));
b.put(prefixTemp("78.dita#topic_qft_qwn_hv"), prefixTemp("maps/topics/77.dita#unique_32"));
b.put(prefixTemp("topics/80.dita"), prefixTemp("topics/80.dita"));
b.put(prefixTemp("79.dita"), prefixTemp("topics/80.dita#topic_qft_qwn_hv"));
b.put(prefixTemp("79.dita#topic_qft_qwn_hv"), prefixTemp("topics/80.dita#topic_qft_qwn_hv"));
b.put(prefixTemp("81.dita"), prefixTemp("topics/80.dita#unique_33"));
b.put(prefixTemp("81.dita#topic_qft_qwn_hv"), prefixTemp("topics/80.dita#unique_33"));
b.put(prefixTemp("maps/Chunk34.dita"), prefixTemp("maps/Chunk34.dita"));
b.put(prefixTemp("82.dita"), prefixTemp("maps/Chunk34.dita#topic_qft_qwn_hv"));
b.put(prefixTemp("82.dita#topic_qft_qwn_hv"), prefixTemp("maps/Chunk34.dita#topic_qft_qwn_hv"));
b.put(prefixTemp("83.dita"), prefixTemp("maps/Chunk34.dita#unique_35"));
b.put(prefixTemp("83.dita#topic_qft_qwn_hv"), prefixTemp("maps/Chunk34.dita#unique_35"));
b.put(prefixTemp("maps/85.dita"), prefixTemp("maps/85.dita"));
b.put(prefixTemp("84.dita"), prefixTemp("maps/85.dita#topic_qft_qwn_hv"));
b.put(prefixTemp("84.dita#topic_qft_qwn_hv"), prefixTemp("maps/85.dita#topic_qft_qwn_hv"));
b.put(prefixTemp("maps/topics/86.dita"), prefixTemp("maps/85.dita#unique_36"));
b.put(prefixTemp("maps/topics/86.dita#topic_qft_qwn_hv"), prefixTemp("maps/85.dita#unique_36"));
b.put(prefixTemp("88.dita"), prefixTemp("88.dita"));
b.put(prefixTemp("87.dita"), prefixTemp("88.dita#topic_qft_qwn_hv"));
b.put(prefixTemp("87.dita#topic_qft_qwn_hv"), prefixTemp("88.dita#topic_qft_qwn_hv"));
b.put(prefixTemp("maps/topics/89.dita"), prefixTemp("88.dita#unique_37"));
b.put(prefixTemp("maps/topics/89.dita#topic_qft_qwn_hv"), prefixTemp("88.dita#unique_37"));
b.put(prefixTemp("maps/topics/91.dita"), prefixTemp("maps/topics/91.dita"));
b.put(prefixTemp("90.dita"), prefixTemp("maps/topics/91.dita#topic_qft_qwn_hv"));
b.put(prefixTemp("90.dita#topic_qft_qwn_hv"), prefixTemp("maps/topics/91.dita#topic_qft_qwn_hv"));
b.put(prefixTemp("maps/topics/92.dita"), prefixTemp("maps/topics/91.dita#unique_38"));
b.put(prefixTemp("maps/topics/92.dita#topic_qft_qwn_hv"), prefixTemp("maps/topics/91.dita#unique_38"));
b.put(prefixTemp("topics/94.dita"), prefixTemp("topics/94.dita"));
b.put(prefixTemp("93.dita"), prefixTemp("topics/94.dita#topic_qft_qwn_hv"));
b.put(prefixTemp("93.dita#topic_qft_qwn_hv"), prefixTemp("topics/94.dita#topic_qft_qwn_hv"));
b.put(prefixTemp("maps/topics/95.dita"), prefixTemp("topics/94.dita#unique_39"));
b.put(prefixTemp("maps/topics/95.dita#topic_qft_qwn_hv"), prefixTemp("topics/94.dita#unique_39"));
b.put(prefixTemp("maps/Chunk40.dita"), prefixTemp("maps/Chunk40.dita"));
b.put(prefixTemp("96.dita"), prefixTemp("maps/Chunk40.dita#topic_qft_qwn_hv"));
b.put(prefixTemp("96.dita#topic_qft_qwn_hv"), prefixTemp("maps/Chunk40.dita#topic_qft_qwn_hv"));
b.put(prefixTemp("maps/topics/97.dita"), prefixTemp("maps/Chunk40.dita#unique_41"));
b.put(prefixTemp("maps/topics/97.dita#topic_qft_qwn_hv"), prefixTemp("maps/Chunk40.dita#unique_41"));
b.put(prefixTemp("maps/99.dita"), prefixTemp("maps/99.dita"));
b.put(prefixTemp("98.dita"), prefixTemp("maps/99.dita#topic_qft_qwn_hv"));
b.put(prefixTemp("98.dita#topic_qft_qwn_hv"), prefixTemp("maps/99.dita#topic_qft_qwn_hv"));
b.put(prefixTemp("topics/100.dita"), prefixTemp("maps/99.dita#unique_42"));
b.put(prefixTemp("topics/100.dita#topic_qft_qwn_hv"), prefixTemp("maps/99.dita#unique_42"));
b.put(prefixTemp("102.dita"), prefixTemp("102.dita"));
b.put(prefixTemp("101.dita"), prefixTemp("102.dita#topic_qft_qwn_hv"));
b.put(prefixTemp("101.dita#topic_qft_qwn_hv"), prefixTemp("102.dita#topic_qft_qwn_hv"));
b.put(prefixTemp("topics/103.dita"), prefixTemp("102.dita#unique_43"));
b.put(prefixTemp("topics/103.dita#topic_qft_qwn_hv"), prefixTemp("102.dita#unique_43"));
b.put(prefixTemp("maps/topics/105.dita"), prefixTemp("maps/topics/105.dita"));
b.put(prefixTemp("104.dita"), prefixTemp("maps/topics/105.dita#topic_qft_qwn_hv"));
b.put(prefixTemp("104.dita#topic_qft_qwn_hv"), prefixTemp("maps/topics/105.dita#topic_qft_qwn_hv"));
b.put(prefixTemp("topics/106.dita"), prefixTemp("maps/topics/105.dita#unique_44"));
b.put(prefixTemp("topics/106.dita#topic_qft_qwn_hv"), prefixTemp("maps/topics/105.dita#unique_44"));
b.put(prefixTemp("topics/108.dita"), prefixTemp("topics/108.dita"));
b.put(prefixTemp("107.dita"), prefixTemp("topics/108.dita#topic_qft_qwn_hv"));
b.put(prefixTemp("107.dita#topic_qft_qwn_hv"), prefixTemp("topics/108.dita#topic_qft_qwn_hv"));
b.put(prefixTemp("topics/109.dita"), prefixTemp("topics/108.dita#unique_45"));
b.put(prefixTemp("topics/109.dita#topic_qft_qwn_hv"), prefixTemp("topics/108.dita#unique_45"));
b.put(prefixTemp("maps/Chunk46.dita"), prefixTemp("maps/Chunk46.dita"));
b.put(prefixTemp("110.dita"), prefixTemp("maps/Chunk46.dita#topic_qft_qwn_hv"));
b.put(prefixTemp("110.dita#topic_qft_qwn_hv"), prefixTemp("maps/Chunk46.dita#topic_qft_qwn_hv"));
b.put(prefixTemp("topics/111.dita"), prefixTemp("maps/Chunk46.dita#unique_47"));
b.put(prefixTemp("topics/111.dita#topic_qft_qwn_hv"), prefixTemp("maps/Chunk46.dita#unique_47"));
b.put(prefixTemp("maps/113.dita"), prefixTemp("maps/113.dita"));
b.put(prefixTemp("maps/topics/112.dita"), prefixTemp("maps/113.dita#topic_qft_qwn_hv"));
b.put(prefixTemp("maps/topics/112.dita#topic_qft_qwn_hv"), prefixTemp("maps/113.dita#topic_qft_qwn_hv"));
b.put(prefixTemp("maps/114.dita"), prefixTemp("maps/113.dita#unique_48"));
b.put(prefixTemp("maps/114.dita#topic_qft_qwn_hv"), prefixTemp("maps/113.dita#unique_48"));
b.put(prefixTemp("116.dita"), prefixTemp("116.dita"));
b.put(prefixTemp("maps/topics/115.dita"), prefixTemp("116.dita#topic_qft_qwn_hv"));
b.put(prefixTemp("maps/topics/115.dita#topic_qft_qwn_hv"), prefixTemp("116.dita#topic_qft_qwn_hv"));
b.put(prefixTemp("maps/117.dita"), prefixTemp("116.dita#unique_49"));
b.put(prefixTemp("maps/117.dita#topic_qft_qwn_hv"), prefixTemp("116.dita#unique_49"));
b.put(prefixTemp("maps/topics/119.dita"), prefixTemp("maps/topics/119.dita"));
b.put(prefixTemp("maps/topics/118.dita"), prefixTemp("maps/topics/119.dita#topic_qft_qwn_hv"));
b.put(prefixTemp("maps/topics/118.dita#topic_qft_qwn_hv"), prefixTemp("maps/topics/119.dita#topic_qft_qwn_hv"));
b.put(prefixTemp("maps/120.dita"), prefixTemp("maps/topics/119.dita#unique_50"));
b.put(prefixTemp("maps/120.dita#topic_qft_qwn_hv"), prefixTemp("maps/topics/119.dita#unique_50"));
b.put(prefixTemp("topics/122.dita"), prefixTemp("topics/122.dita"));
b.put(prefixTemp("maps/topics/121.dita"), prefixTemp("topics/122.dita#topic_qft_qwn_hv"));
b.put(prefixTemp("maps/topics/121.dita#topic_qft_qwn_hv"), prefixTemp("topics/122.dita#topic_qft_qwn_hv"));
b.put(prefixTemp("maps/123.dita"), prefixTemp("topics/122.dita#unique_51"));
b.put(prefixTemp("maps/123.dita#topic_qft_qwn_hv"), prefixTemp("topics/122.dita#unique_51"));
b.put(prefixTemp("maps/Chunk52.dita"), prefixTemp("maps/Chunk52.dita"));
b.put(prefixTemp("maps/topics/124.dita"), prefixTemp("maps/Chunk52.dita#topic_qft_qwn_hv"));
b.put(prefixTemp("maps/topics/124.dita#topic_qft_qwn_hv"), prefixTemp("maps/Chunk52.dita#topic_qft_qwn_hv"));
b.put(prefixTemp("maps/125.dita"), prefixTemp("maps/Chunk52.dita#unique_53"));
b.put(prefixTemp("maps/125.dita#topic_qft_qwn_hv"), prefixTemp("maps/Chunk52.dita#unique_53"));
b.put(prefixTemp("maps/127.dita"), prefixTemp("maps/127.dita"));
b.put(prefixTemp("maps/topics/126.dita"), prefixTemp("maps/127.dita#topic_qft_qwn_hv"));
b.put(prefixTemp("maps/topics/126.dita#topic_qft_qwn_hv"), prefixTemp("maps/127.dita#topic_qft_qwn_hv"));
b.put(prefixTemp("128.dita"), prefixTemp("maps/127.dita#unique_54"));
b.put(prefixTemp("128.dita#topic_qft_qwn_hv"), prefixTemp("maps/127.dita#unique_54"));
b.put(prefixTemp("130.dita"), prefixTemp("130.dita"));
b.put(prefixTemp("maps/topics/129.dita"), prefixTemp("130.dita#topic_qft_qwn_hv"));
b.put(prefixTemp("maps/topics/129.dita#topic_qft_qwn_hv"), prefixTemp("130.dita#topic_qft_qwn_hv"));
b.put(prefixTemp("131.dita"), prefixTemp("130.dita#unique_55"));
b.put(prefixTemp("131.dita#topic_qft_qwn_hv"), prefixTemp("130.dita#unique_55"));
b.put(prefixTemp("maps/topics/133.dita"), prefixTemp("maps/topics/133.dita"));
b.put(prefixTemp("maps/topics/132.dita"), prefixTemp("maps/topics/133.dita#topic_qft_qwn_hv"));
b.put(prefixTemp("maps/topics/132.dita#topic_qft_qwn_hv"), prefixTemp("maps/topics/133.dita#topic_qft_qwn_hv"));
b.put(prefixTemp("134.dita"), prefixTemp("maps/topics/133.dita#unique_56"));
b.put(prefixTemp("134.dita#topic_qft_qwn_hv"), prefixTemp("maps/topics/133.dita#unique_56"));
b.put(prefixTemp("topics/136.dita"), prefixTemp("topics/136.dita"));
b.put(prefixTemp("maps/topics/135.dita"), prefixTemp("topics/136.dita#topic_qft_qwn_hv"));
b.put(prefixTemp("maps/topics/135.dita#topic_qft_qwn_hv"), prefixTemp("topics/136.dita#topic_qft_qwn_hv"));
b.put(prefixTemp("137.dita"), prefixTemp("topics/136.dita#unique_57"));
b.put(prefixTemp("137.dita#topic_qft_qwn_hv"), prefixTemp("topics/136.dita#unique_57"));
b.put(prefixTemp("maps/Chunk58.dita"), prefixTemp("maps/Chunk58.dita"));
b.put(prefixTemp("maps/topics/138.dita"), prefixTemp("maps/Chunk58.dita#topic_qft_qwn_hv"));
b.put(prefixTemp("maps/topics/138.dita#topic_qft_qwn_hv"), prefixTemp("maps/Chunk58.dita#topic_qft_qwn_hv"));
b.put(prefixTemp("139.dita"), prefixTemp("maps/Chunk58.dita#unique_59"));
b.put(prefixTemp("139.dita#topic_qft_qwn_hv"), prefixTemp("maps/Chunk58.dita#unique_59"));
b.put(prefixTemp("maps/141.dita"), prefixTemp("maps/141.dita"));
b.put(prefixTemp("maps/topics/140.dita"), prefixTemp("maps/141.dita#topic_qft_qwn_hv"));
b.put(prefixTemp("maps/topics/140.dita#topic_qft_qwn_hv"), prefixTemp("maps/141.dita#topic_qft_qwn_hv"));
b.put(prefixTemp("maps/topics/142.dita"), prefixTemp("maps/141.dita#unique_60"));
b.put(prefixTemp("maps/topics/142.dita#topic_qft_qwn_hv"), prefixTemp("maps/141.dita#unique_60"));
b.put(prefixTemp("144.dita"), prefixTemp("144.dita"));
b.put(prefixTemp("maps/topics/143.dita"), prefixTemp("144.dita#topic_qft_qwn_hv"));
b.put(prefixTemp("maps/topics/143.dita#topic_qft_qwn_hv"), prefixTemp("144.dita#topic_qft_qwn_hv"));
b.put(prefixTemp("maps/topics/145.dita"), prefixTemp("144.dita#unique_61"));
b.put(prefixTemp("maps/topics/145.dita#topic_qft_qwn_hv"), prefixTemp("144.dita#unique_61"));
b.put(prefixTemp("maps/topics/147.dita"), prefixTemp("maps/topics/147.dita"));
b.put(prefixTemp("maps/topics/146.dita"), prefixTemp("maps/topics/147.dita#topic_qft_qwn_hv"));
b.put(prefixTemp("maps/topics/146.dita#topic_qft_qwn_hv"), prefixTemp("maps/topics/147.dita#topic_qft_qwn_hv"));
b.put(prefixTemp("maps/topics/148.dita"), prefixTemp("maps/topics/147.dita#unique_62"));
b.put(prefixTemp("maps/topics/148.dita#topic_qft_qwn_hv"), prefixTemp("maps/topics/147.dita#unique_62"));
b.put(prefixTemp("topics/150.dita"), prefixTemp("topics/150.dita"));
b.put(prefixTemp("maps/topics/149.dita"), prefixTemp("topics/150.dita#topic_qft_qwn_hv"));
b.put(prefixTemp("maps/topics/149.dita#topic_qft_qwn_hv"), prefixTemp("topics/150.dita#topic_qft_qwn_hv"));
b.put(prefixTemp("maps/topics/151.dita"), prefixTemp("topics/150.dita#unique_63"));
b.put(prefixTemp("maps/topics/151.dita#topic_qft_qwn_hv"), prefixTemp("topics/150.dita#unique_63"));
b.put(prefixTemp("maps/Chunk64.dita"), prefixTemp("maps/Chunk64.dita"));
b.put(prefixTemp("maps/topics/152.dita"), prefixTemp("maps/Chunk64.dita#topic_qft_qwn_hv"));
b.put(prefixTemp("maps/topics/152.dita#topic_qft_qwn_hv"), prefixTemp("maps/Chunk64.dita#topic_qft_qwn_hv"));
b.put(prefixTemp("maps/topics/153.dita"), prefixTemp("maps/Chunk64.dita#unique_65"));
b.put(prefixTemp("maps/topics/153.dita#topic_qft_qwn_hv"), prefixTemp("maps/Chunk64.dita#unique_65"));
b.put(prefixTemp("maps/155.dita"), prefixTemp("maps/155.dita"));
b.put(prefixTemp("maps/topics/154.dita"), prefixTemp("maps/155.dita#topic_qft_qwn_hv"));
b.put(prefixTemp("maps/topics/154.dita#topic_qft_qwn_hv"), prefixTemp("maps/155.dita#topic_qft_qwn_hv"));
b.put(prefixTemp("topics/156.dita"), prefixTemp("maps/155.dita#unique_66"));
b.put(prefixTemp("topics/156.dita#topic_qft_qwn_hv"), prefixTemp("maps/155.dita#unique_66"));
b.put(prefixTemp("158.dita"), prefixTemp("158.dita"));
b.put(prefixTemp("maps/topics/157.dita"), prefixTemp("158.dita#topic_qft_qwn_hv"));
b.put(prefixTemp("maps/topics/157.dita#topic_qft_qwn_hv"), prefixTemp("158.dita#topic_qft_qwn_hv"));
b.put(prefixTemp("topics/159.dita"), prefixTemp("158.dita#unique_67"));
b.put(prefixTemp("topics/159.dita#topic_qft_qwn_hv"), prefixTemp("158.dita#unique_67"));
b.put(prefixTemp("maps/topics/161.dita"), prefixTemp("maps/topics/161.dita"));
b.put(prefixTemp("maps/topics/160.dita"), prefixTemp("maps/topics/161.dita#topic_qft_qwn_hv"));
b.put(prefixTemp("maps/topics/160.dita#topic_qft_qwn_hv"), prefixTemp("maps/topics/161.dita#topic_qft_qwn_hv"));
b.put(prefixTemp("topics/162.dita"), prefixTemp("maps/topics/161.dita#unique_68"));
b.put(prefixTemp("topics/162.dita#topic_qft_qwn_hv"), prefixTemp("maps/topics/161.dita#unique_68"));
b.put(prefixTemp("topics/164.dita"), prefixTemp("topics/164.dita"));
b.put(prefixTemp("maps/topics/163.dita"), prefixTemp("topics/164.dita#topic_qft_qwn_hv"));
b.put(prefixTemp("maps/topics/163.dita#topic_qft_qwn_hv"), prefixTemp("topics/164.dita#topic_qft_qwn_hv"));
b.put(prefixTemp("topics/165.dita"), prefixTemp("topics/164.dita#unique_69"));
b.put(prefixTemp("topics/165.dita#topic_qft_qwn_hv"), prefixTemp("topics/164.dita#unique_69"));
b.put(prefixTemp("maps/Chunk70.dita"), prefixTemp("maps/Chunk70.dita"));
b.put(prefixTemp("maps/topics/166.dita"), prefixTemp("maps/Chunk70.dita#topic_qft_qwn_hv"));
b.put(prefixTemp("maps/topics/166.dita#topic_qft_qwn_hv"), prefixTemp("maps/Chunk70.dita#topic_qft_qwn_hv"));
b.put(prefixTemp("topics/167.dita"), prefixTemp("maps/Chunk70.dita#unique_71"));
b.put(prefixTemp("topics/167.dita#topic_qft_qwn_hv"), prefixTemp("maps/Chunk70.dita#unique_71"));
b.put(prefixTemp("maps/169.dita"), prefixTemp("maps/169.dita"));
b.put(prefixTemp("topics/168.dita"), prefixTemp("maps/169.dita#topic_qft_qwn_hv"));
b.put(prefixTemp("topics/168.dita#topic_qft_qwn_hv"), prefixTemp("maps/169.dita#topic_qft_qwn_hv"));
b.put(prefixTemp("maps/170.dita"), prefixTemp("maps/169.dita#unique_72"));
b.put(prefixTemp("maps/170.dita#topic_qft_qwn_hv"), prefixTemp("maps/169.dita#unique_72"));
b.put(prefixTemp("172.dita"), prefixTemp("172.dita"));
b.put(prefixTemp("topics/171.dita"), prefixTemp("172.dita#topic_qft_qwn_hv"));
b.put(prefixTemp("topics/171.dita#topic_qft_qwn_hv"), prefixTemp("172.dita#topic_qft_qwn_hv"));
b.put(prefixTemp("maps/173.dita"), prefixTemp("172.dita#unique_73"));
b.put(prefixTemp("maps/173.dita#topic_qft_qwn_hv"), prefixTemp("172.dita#unique_73"));
b.put(prefixTemp("maps/topics/175.dita"), prefixTemp("maps/topics/175.dita"));
b.put(prefixTemp("topics/174.dita"), prefixTemp("maps/topics/175.dita#topic_qft_qwn_hv"));
b.put(prefixTemp("topics/174.dita#topic_qft_qwn_hv"), prefixTemp("maps/topics/175.dita#topic_qft_qwn_hv"));
b.put(prefixTemp("maps/176.dita"), prefixTemp("maps/topics/175.dita#unique_74"));
b.put(prefixTemp("maps/176.dita#topic_qft_qwn_hv"), prefixTemp("maps/topics/175.dita#unique_74"));
b.put(prefixTemp("topics/178.dita"), prefixTemp("topics/178.dita"));
b.put(prefixTemp("topics/177.dita"), prefixTemp("topics/178.dita#topic_qft_qwn_hv"));
b.put(prefixTemp("topics/177.dita#topic_qft_qwn_hv"), prefixTemp("topics/178.dita#topic_qft_qwn_hv"));
b.put(prefixTemp("maps/179.dita"), prefixTemp("topics/178.dita#unique_75"));
b.put(prefixTemp("maps/179.dita#topic_qft_qwn_hv"), prefixTemp("topics/178.dita#unique_75"));
b.put(prefixTemp("maps/Chunk76.dita"), prefixTemp("maps/Chunk76.dita"));
b.put(prefixTemp("topics/180.dita"), prefixTemp("maps/Chunk76.dita#topic_qft_qwn_hv"));
b.put(prefixTemp("topics/180.dita#topic_qft_qwn_hv"), prefixTemp("maps/Chunk76.dita#topic_qft_qwn_hv"));
b.put(prefixTemp("maps/181.dita"), prefixTemp("maps/Chunk76.dita#unique_77"));
b.put(prefixTemp("maps/181.dita#topic_qft_qwn_hv"), prefixTemp("maps/Chunk76.dita#unique_77"));
b.put(prefixTemp("maps/183.dita"), prefixTemp("maps/183.dita"));
b.put(prefixTemp("topics/182.dita"), prefixTemp("maps/183.dita#topic_qft_qwn_hv"));
b.put(prefixTemp("topics/182.dita#topic_qft_qwn_hv"), prefixTemp("maps/183.dita#topic_qft_qwn_hv"));
b.put(prefixTemp("184.dita"), prefixTemp("maps/183.dita#unique_78"));
b.put(prefixTemp("184.dita#topic_qft_qwn_hv"), prefixTemp("maps/183.dita#unique_78"));
b.put(prefixTemp("186.dita"), prefixTemp("186.dita"));
b.put(prefixTemp("topics/185.dita"), prefixTemp("186.dita#topic_qft_qwn_hv"));
b.put(prefixTemp("topics/185.dita#topic_qft_qwn_hv"), prefixTemp("186.dita#topic_qft_qwn_hv"));
b.put(prefixTemp("187.dita"), prefixTemp("186.dita#unique_79"));
b.put(prefixTemp("187.dita#topic_qft_qwn_hv"), prefixTemp("186.dita#unique_79"));
b.put(prefixTemp("maps/topics/189.dita"), prefixTemp("maps/topics/189.dita"));
b.put(prefixTemp("topics/188.dita"), prefixTemp("maps/topics/189.dita#topic_qft_qwn_hv"));
b.put(prefixTemp("topics/188.dita#topic_qft_qwn_hv"), prefixTemp("maps/topics/189.dita#topic_qft_qwn_hv"));
b.put(prefixTemp("190.dita"), prefixTemp("maps/topics/189.dita#unique_80"));
b.put(prefixTemp("190.dita#topic_qft_qwn_hv"), prefixTemp("maps/topics/189.dita#unique_80"));
b.put(prefixTemp("topics/192.dita"), prefixTemp("topics/192.dita"));
b.put(prefixTemp("topics/191.dita"), prefixTemp("topics/192.dita#topic_qft_qwn_hv"));
b.put(prefixTemp("topics/191.dita#topic_qft_qwn_hv"), prefixTemp("topics/192.dita#topic_qft_qwn_hv"));
b.put(prefixTemp("193.dita"), prefixTemp("topics/192.dita#unique_81"));
b.put(prefixTemp("193.dita#topic_qft_qwn_hv"), prefixTemp("topics/192.dita#unique_81"));
b.put(prefixTemp("maps/Chunk82.dita"), prefixTemp("maps/Chunk82.dita"));
b.put(prefixTemp("topics/194.dita"), prefixTemp("maps/Chunk82.dita#topic_qft_qwn_hv"));
b.put(prefixTemp("topics/194.dita#topic_qft_qwn_hv"), prefixTemp("maps/Chunk82.dita#topic_qft_qwn_hv"));
b.put(prefixTemp("195.dita"), prefixTemp("maps/Chunk82.dita#unique_83"));
b.put(prefixTemp("195.dita#topic_qft_qwn_hv"), prefixTemp("maps/Chunk82.dita#unique_83"));
b.put(prefixTemp("maps/197.dita"), prefixTemp("maps/197.dita"));
b.put(prefixTemp("topics/196.dita"), prefixTemp("maps/197.dita#topic_qft_qwn_hv"));
b.put(prefixTemp("topics/196.dita#topic_qft_qwn_hv"), prefixTemp("maps/197.dita#topic_qft_qwn_hv"));
b.put(prefixTemp("maps/topics/198.dita"), prefixTemp("maps/197.dita#unique_84"));
b.put(prefixTemp("maps/topics/198.dita#topic_qft_qwn_hv"), prefixTemp("maps/197.dita#unique_84"));
b.put(prefixTemp("200.dita"), prefixTemp("200.dita"));
b.put(prefixTemp("topics/199.dita"), prefixTemp("200.dita#topic_qft_qwn_hv"));
b.put(prefixTemp("topics/199.dita#topic_qft_qwn_hv"), prefixTemp("200.dita#topic_qft_qwn_hv"));
b.put(prefixTemp("maps/topics/201.dita"), prefixTemp("200.dita#unique_85"));
b.put(prefixTemp("maps/topics/201.dita#topic_qft_qwn_hv"), prefixTemp("200.dita#unique_85"));
b.put(prefixTemp("maps/topics/203.dita"), prefixTemp("maps/topics/203.dita"));
b.put(prefixTemp("topics/202.dita"), prefixTemp("maps/topics/203.dita#topic_qft_qwn_hv"));
b.put(prefixTemp("topics/202.dita#topic_qft_qwn_hv"), prefixTemp("maps/topics/203.dita#topic_qft_qwn_hv"));
b.put(prefixTemp("maps/topics/204.dita"), prefixTemp("maps/topics/203.dita#unique_86"));
b.put(prefixTemp("maps/topics/204.dita#topic_qft_qwn_hv"), prefixTemp("maps/topics/203.dita#unique_86"));
b.put(prefixTemp("topics/206.dita"), prefixTemp("topics/206.dita"));
b.put(prefixTemp("topics/205.dita"), prefixTemp("topics/206.dita#topic_qft_qwn_hv"));
b.put(prefixTemp("topics/205.dita#topic_qft_qwn_hv"), prefixTemp("topics/206.dita#topic_qft_qwn_hv"));
b.put(prefixTemp("maps/topics/207.dita"), prefixTemp("topics/206.dita#unique_87"));
b.put(prefixTemp("maps/topics/207.dita#topic_qft_qwn_hv"), prefixTemp("topics/206.dita#unique_87"));
b.put(prefixTemp("maps/Chunk88.dita"), prefixTemp("maps/Chunk88.dita"));
b.put(prefixTemp("topics/208.dita"), prefixTemp("maps/Chunk88.dita#topic_qft_qwn_hv"));
b.put(prefixTemp("topics/208.dita#topic_qft_qwn_hv"), prefixTemp("maps/Chunk88.dita#topic_qft_qwn_hv"));
b.put(prefixTemp("maps/topics/209.dita"), prefixTemp("maps/Chunk88.dita#unique_89"));
b.put(prefixTemp("maps/topics/209.dita#topic_qft_qwn_hv"), prefixTemp("maps/Chunk88.dita#unique_89"));
b.put(prefixTemp("maps/211.dita"), prefixTemp("maps/211.dita"));
b.put(prefixTemp("topics/210.dita"), prefixTemp("maps/211.dita#topic_qft_qwn_hv"));
b.put(prefixTemp("topics/210.dita#topic_qft_qwn_hv"), prefixTemp("maps/211.dita#topic_qft_qwn_hv"));
b.put(prefixTemp("topics/212.dita"), prefixTemp("maps/211.dita#unique_90"));
b.put(prefixTemp("topics/212.dita#topic_qft_qwn_hv"), prefixTemp("maps/211.dita#unique_90"));
b.put(prefixTemp("214.dita"), prefixTemp("214.dita"));
b.put(prefixTemp("topics/213.dita"), prefixTemp("214.dita#topic_qft_qwn_hv"));
b.put(prefixTemp("topics/213.dita#topic_qft_qwn_hv"), prefixTemp("214.dita#topic_qft_qwn_hv"));
b.put(prefixTemp("topics/215.dita"), prefixTemp("214.dita#unique_91"));
b.put(prefixTemp("topics/215.dita#topic_qft_qwn_hv"), prefixTemp("214.dita#unique_91"));
b.put(prefixTemp("maps/topics/217.dita"), prefixTemp("maps/topics/217.dita"));
b.put(prefixTemp("topics/216.dita"), prefixTemp("maps/topics/217.dita#topic_qft_qwn_hv"));
b.put(prefixTemp("topics/216.dita#topic_qft_qwn_hv"), prefixTemp("maps/topics/217.dita#topic_qft_qwn_hv"));
b.put(prefixTemp("topics/218.dita"), prefixTemp("maps/topics/217.dita#unique_92"));
b.put(prefixTemp("topics/218.dita#topic_qft_qwn_hv"), prefixTemp("maps/topics/217.dita#unique_92"));
b.put(prefixTemp("topics/220.dita"), prefixTemp("topics/220.dita"));
b.put(prefixTemp("topics/219.dita"), prefixTemp("topics/220.dita#topic_qft_qwn_hv"));
b.put(prefixTemp("topics/219.dita#topic_qft_qwn_hv"), prefixTemp("topics/220.dita#topic_qft_qwn_hv"));
b.put(prefixTemp("topics/221.dita"), prefixTemp("topics/220.dita#unique_93"));
b.put(prefixTemp("topics/221.dita#topic_qft_qwn_hv"), prefixTemp("topics/220.dita#unique_93"));
b.put(prefixTemp("maps/Chunk94.dita"), prefixTemp("maps/Chunk94.dita"));
b.put(prefixTemp("topics/222.dita"), prefixTemp("maps/Chunk94.dita#topic_qft_qwn_hv"));
b.put(prefixTemp("topics/222.dita#topic_qft_qwn_hv"), prefixTemp("maps/Chunk94.dita#topic_qft_qwn_hv"));
b.put(prefixTemp("topics/223.dita"), prefixTemp("maps/Chunk94.dita#unique_95"));
b.put(prefixTemp("topics/223.dita#topic_qft_qwn_hv"), prefixTemp("maps/Chunk94.dita#unique_95"));
return b.build();
}
private URI prefixTemp(final String s) {
return tempDir.toURI().resolve(s);
}
@After
public void teardown() throws IOException {
TestUtils.forceDelete(tempDir);
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.nifi.processors.standard;
import org.apache.commons.lang3.SystemUtils;
import org.apache.nifi.controller.AbstractControllerService;
import org.apache.nifi.flowfile.FlowFile;
import org.apache.nifi.json.JsonRecordSetWriter;
import org.apache.nifi.json.JsonTreeReader;
import org.apache.nifi.json.JsonTreeRowRecordReader;
import org.apache.nifi.logging.ComponentLog;
import org.apache.nifi.reporting.InitializationException;
import org.apache.nifi.schema.access.SchemaAccessUtils;
import org.apache.nifi.schema.access.SchemaNotFoundException;
import org.apache.nifi.serialization.MalformedRecordException;
import org.apache.nifi.serialization.RecordReader;
import org.apache.nifi.serialization.RecordReaderFactory;
import org.apache.nifi.serialization.SimpleRecordSchema;
import org.apache.nifi.serialization.record.DataType;
import org.apache.nifi.serialization.record.MockRecordWriter;
import org.apache.nifi.serialization.record.RecordField;
import org.apache.nifi.serialization.record.RecordFieldType;
import org.apache.nifi.serialization.record.RecordSchema;
import org.apache.nifi.util.MockFlowFile;
import org.apache.nifi.util.TestRunner;
import org.apache.nifi.util.TestRunners;
import org.junit.Assume;
import org.junit.BeforeClass;
import org.junit.Test;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.nio.file.Files;
import java.nio.file.Paths;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
public class TestForkRecord {
private final String dateFormat = RecordFieldType.DATE.getDefaultFormat();
private final String timeFormat = RecordFieldType.TIME.getDefaultFormat();
private final String timestampFormat = RecordFieldType.TIMESTAMP.getDefaultFormat();
private List<RecordField> getDefaultFields() {
final List<RecordField> fields = new ArrayList<>();
fields.add(new RecordField("id", RecordFieldType.INT.getDataType()));
fields.add(new RecordField("name", RecordFieldType.STRING.getDataType()));
fields.add(new RecordField("address", RecordFieldType.STRING.getDataType()));
fields.add(new RecordField("city", RecordFieldType.STRING.getDataType()));
fields.add(new RecordField("state", RecordFieldType.STRING.getDataType()));
fields.add(new RecordField("zipCode", RecordFieldType.STRING.getDataType()));
fields.add(new RecordField("country", RecordFieldType.STRING.getDataType()));
return fields;
}
private RecordSchema getAccountSchema() {
final List<RecordField> accountFields = new ArrayList<>();
accountFields.add(new RecordField("id", RecordFieldType.INT.getDataType()));
accountFields.add(new RecordField("balance", RecordFieldType.DOUBLE.getDataType()));
return new SimpleRecordSchema(accountFields);
}
private RecordSchema getAccountWithTransactionSchema() {
final List<RecordField> accountFields = new ArrayList<>();
accountFields.add(new RecordField("id", RecordFieldType.INT.getDataType()));
accountFields.add(new RecordField("balance", RecordFieldType.DOUBLE.getDataType()));
final DataType transactionRecordType = RecordFieldType.RECORD.getRecordDataType(getTransactionSchema());
final DataType transactionsType = RecordFieldType.ARRAY.getArrayDataType(transactionRecordType);
accountFields.add(new RecordField("transactions", transactionsType));
return new SimpleRecordSchema(accountFields);
}
private RecordSchema getTransactionSchema() {
final List<RecordField> transactionFields = new ArrayList<>();
transactionFields.add(new RecordField("id", RecordFieldType.INT.getDataType()));
transactionFields.add(new RecordField("amount", RecordFieldType.DOUBLE.getDataType()));
return new SimpleRecordSchema(transactionFields);
}
//Pretty printing is not portable as these fail on windows
@BeforeClass
public static void setUpSuite() {
Assume.assumeTrue("Test only runs on *nix", !SystemUtils.IS_OS_WINDOWS);
}
@Test
public void testForkExtractSimpleWithoutParentFields() throws IOException, MalformedRecordException, InitializationException {
TestRunner runner = TestRunners.newTestRunner(new ForkRecord());
final DataType accountRecordType = RecordFieldType.RECORD.getRecordDataType(getAccountSchema());
final DataType accountsType = RecordFieldType.ARRAY.getArrayDataType(accountRecordType);
final List<RecordField> fields = getDefaultFields();
fields.add(new RecordField("accounts", accountsType));
final RecordSchema schema = new SimpleRecordSchema(fields);
final JsonRecordReader readerService = new JsonRecordReader(schema);
final MockRecordWriter writerService = new CustomRecordWriter("header", false, getAccountSchema());
runner.addControllerService("reader", readerService);
runner.enableControllerService(readerService);
runner.addControllerService("writer", writerService);
runner.enableControllerService(writerService);
runner.setProperty(ForkRecord.RECORD_READER, "reader");
runner.setProperty(ForkRecord.RECORD_WRITER, "writer");
runner.setProperty(ForkRecord.MODE, ForkRecord.MODE_EXTRACT);
runner.setProperty("my-path", "/accounts");
runner.enqueue(new File("src/test/resources/TestForkRecord/single-element-nested-array.json").toPath());
runner.run(1);
runner.assertTransferCount(ForkRecord.REL_ORIGINAL, 1);
runner.assertTransferCount(ForkRecord.REL_FORK, 1);
final MockFlowFile mff = runner.getFlowFilesForRelationship(ForkRecord.REL_FORK).get(0);
mff.assertAttributeEquals("record.count", "2");
mff.assertContentEquals("header\n42,4750.89\n43,48212.38\n");
}
@Test
public void testForkExtractSimpleWithParentFields() throws IOException, MalformedRecordException, InitializationException {
TestRunner runner = TestRunners.newTestRunner(new ForkRecord());
final DataType accountRecordType = RecordFieldType.RECORD.getRecordDataType(getAccountSchema());
final DataType accountsType = RecordFieldType.ARRAY.getArrayDataType(accountRecordType);
final List<RecordField> fields = getDefaultFields();
fields.add(new RecordField("accounts", accountsType));
final RecordSchema schema = new SimpleRecordSchema(fields);
final List<RecordField> fieldsWrite = getDefaultFields();
fieldsWrite.add(new RecordField("balance", RecordFieldType.DOUBLE.getDataType()));
final RecordSchema schemaWrite = new SimpleRecordSchema(fieldsWrite);
final JsonRecordReader readerService = new JsonRecordReader(schema);
final MockRecordWriter writerService = new CustomRecordWriter("header", false, schemaWrite);
runner.addControllerService("reader", readerService);
runner.enableControllerService(readerService);
runner.addControllerService("writer", writerService);
runner.enableControllerService(writerService);
runner.setProperty(ForkRecord.RECORD_READER, "reader");
runner.setProperty(ForkRecord.RECORD_WRITER, "writer");
runner.setProperty(ForkRecord.MODE, ForkRecord.MODE_EXTRACT);
runner.setProperty(ForkRecord.INCLUDE_PARENT_FIELDS, "true");
runner.setProperty("my-path", "/accounts");
runner.enqueue(new File("src/test/resources/TestForkRecord/single-element-nested-array.json").toPath());
runner.run(1);
runner.assertTransferCount(ForkRecord.REL_ORIGINAL, 1);
runner.assertTransferCount(ForkRecord.REL_FORK, 1);
final MockFlowFile mff = runner.getFlowFilesForRelationship(ForkRecord.REL_FORK).get(0);
mff.assertAttributeEquals("record.count", "2");
mff.assertContentEquals("header\n42,4750.89,John Doe,123 My Street,My City,MS,11111,USA\n43,48212.38,John Doe,123 My Street,My City,MS,11111,USA\n");
}
@Test
public void testForkExtractNotAnArray() throws IOException, MalformedRecordException, InitializationException {
TestRunner runner = TestRunners.newTestRunner(new ForkRecord());
final DataType accountRecordType = RecordFieldType.RECORD.getRecordDataType(getAccountSchema());
final DataType accountsType = RecordFieldType.ARRAY.getArrayDataType(accountRecordType);
final List<RecordField> fields = getDefaultFields();
fields.add(new RecordField("accounts", accountsType));
final RecordSchema schema = new SimpleRecordSchema(fields);
final List<RecordField> fieldsWrite = getDefaultFields();
fieldsWrite.add(new RecordField("balance", RecordFieldType.DOUBLE.getDataType()));
final RecordSchema schemaWrite = new SimpleRecordSchema(fieldsWrite);
final JsonRecordReader readerService = new JsonRecordReader(schema);
final MockRecordWriter writerService = new CustomRecordWriter("header", false, schemaWrite);
runner.addControllerService("reader", readerService);
runner.enableControllerService(readerService);
runner.addControllerService("writer", writerService);
runner.enableControllerService(writerService);
runner.setProperty(ForkRecord.RECORD_READER, "reader");
runner.setProperty(ForkRecord.RECORD_WRITER, "writer");
runner.setProperty(ForkRecord.MODE, ForkRecord.MODE_EXTRACT);
runner.setProperty(ForkRecord.INCLUDE_PARENT_FIELDS, "true");
runner.setProperty("my-path", "/country");
runner.enqueue(new File("src/test/resources/TestForkRecord/single-element-nested-array.json").toPath());
runner.run(1);
runner.assertTransferCount(ForkRecord.REL_ORIGINAL, 1);
runner.assertTransferCount(ForkRecord.REL_FORK, 1);
final MockFlowFile mff = runner.getFlowFilesForRelationship(ForkRecord.REL_FORK).get(0);
mff.assertAttributeEquals("record.count", "0");
}
@Test
public void testForkExtractNotAnArrayOfRecords() throws IOException, MalformedRecordException, InitializationException {
TestRunner runner = TestRunners.newTestRunner(new ForkRecord());
final DataType accountRecordType = RecordFieldType.STRING.getDataType();
final DataType accountsType = RecordFieldType.ARRAY.getArrayDataType(accountRecordType);
final List<RecordField> fields = getDefaultFields();
fields.add(new RecordField("accounts", accountsType));
final RecordSchema schema = new SimpleRecordSchema(fields);
final List<RecordField> fieldsWrite = getDefaultFields();
fieldsWrite.add(new RecordField("balance", RecordFieldType.DOUBLE.getDataType()));
final RecordSchema schemaWrite = new SimpleRecordSchema(fieldsWrite);
final JsonRecordReader readerService = new JsonRecordReader(schema);
final MockRecordWriter writerService = new CustomRecordWriter("header", false, schemaWrite);
runner.addControllerService("reader", readerService);
runner.enableControllerService(readerService);
runner.addControllerService("writer", writerService);
runner.enableControllerService(writerService);
runner.setProperty(ForkRecord.RECORD_READER, "reader");
runner.setProperty(ForkRecord.RECORD_WRITER, "writer");
runner.setProperty(ForkRecord.MODE, ForkRecord.MODE_EXTRACT);
runner.setProperty(ForkRecord.INCLUDE_PARENT_FIELDS, "true");
runner.setProperty("my-path", "/accounts");
runner.enqueue(new File("src/test/resources/TestForkRecord/single-element-nested-array-strings.json").toPath());
runner.run(1);
runner.assertTransferCount(ForkRecord.REL_ORIGINAL, 1);
runner.assertTransferCount(ForkRecord.REL_FORK, 1);
final MockFlowFile mff = runner.getFlowFilesForRelationship(ForkRecord.REL_FORK).get(0);
mff.assertAttributeEquals("record.count", "0");
}
@Test
public void testForkExtractComplexWithParentFields() throws IOException, MalformedRecordException, InitializationException {
TestRunner runner = TestRunners.newTestRunner(new ForkRecord());
final DataType accountRecordType = RecordFieldType.RECORD.getRecordDataType(getAccountWithTransactionSchema());
final DataType accountsType = RecordFieldType.ARRAY.getArrayDataType(accountRecordType);
final List<RecordField> fields = getDefaultFields();
fields.add(new RecordField("accounts", accountsType));
final RecordSchema schema = new SimpleRecordSchema(fields);
final List<RecordField> fieldsWrite = getDefaultFields();
fieldsWrite.add(new RecordField("balance", RecordFieldType.DOUBLE.getDataType()));
fieldsWrite.add(new RecordField("amount", RecordFieldType.DOUBLE.getDataType()));
final RecordSchema schemaWrite = new SimpleRecordSchema(fieldsWrite);
final JsonRecordReader readerService = new JsonRecordReader(schema);
final MockRecordWriter writerService = new CustomRecordWriter("header", false, schemaWrite);
runner.addControllerService("reader", readerService);
runner.enableControllerService(readerService);
runner.addControllerService("writer", writerService);
runner.enableControllerService(writerService);
runner.setProperty(ForkRecord.RECORD_READER, "reader");
runner.setProperty(ForkRecord.RECORD_WRITER, "writer");
runner.setProperty(ForkRecord.MODE, ForkRecord.MODE_EXTRACT);
runner.setProperty(ForkRecord.INCLUDE_PARENT_FIELDS, "true");
runner.setProperty("my-path", "/accounts[*]/transactions");
runner.enqueue(new File("src/test/resources/TestForkRecord/single-element-nested-nested-array.json").toPath());
runner.run(1);
runner.assertTransferCount(ForkRecord.REL_ORIGINAL, 1);
runner.assertTransferCount(ForkRecord.REL_FORK, 1);
final MockFlowFile mff = runner.getFlowFilesForRelationship(ForkRecord.REL_FORK).get(0);
mff.assertAttributeEquals("record.count", "4");
mff.assertContentEquals("header\n5,150.31,John Doe,123 My Street,My City,MS,11111,USA,4750.89\n6,-15.31,John Doe,123 My Street,My City,MS,11111,USA,4750.89\n"
+ "7,36.78,John Doe,123 My Street,My City,MS,11111,USA,48212.38\n8,-21.34,John Doe,123 My Street,My City,MS,11111,USA,48212.38\n");
}
@Test
public void testForkExtractComplexWithoutParentFields() throws IOException, MalformedRecordException, InitializationException {
TestRunner runner = TestRunners.newTestRunner(new ForkRecord());
final DataType accountRecordType = RecordFieldType.RECORD.getRecordDataType(getAccountWithTransactionSchema());
final DataType accountsType = RecordFieldType.ARRAY.getArrayDataType(accountRecordType);
final List<RecordField> fields = getDefaultFields();
fields.add(new RecordField("accounts", accountsType));
final RecordSchema schema = new SimpleRecordSchema(fields);
final List<RecordField> fieldsWrite = new ArrayList<RecordField>();
fieldsWrite.add(new RecordField("id", RecordFieldType.INT.getDataType()));
fieldsWrite.add(new RecordField("amount", RecordFieldType.DOUBLE.getDataType()));
final RecordSchema schemaWrite = new SimpleRecordSchema(fieldsWrite);
final JsonRecordReader readerService = new JsonRecordReader(schema);
final MockRecordWriter writerService = new CustomRecordWriter("header", false, schemaWrite);
runner.addControllerService("reader", readerService);
runner.enableControllerService(readerService);
runner.addControllerService("writer", writerService);
runner.enableControllerService(writerService);
runner.setProperty(ForkRecord.RECORD_READER, "reader");
runner.setProperty(ForkRecord.RECORD_WRITER, "writer");
runner.setProperty(ForkRecord.MODE, ForkRecord.MODE_EXTRACT);
runner.setProperty(ForkRecord.INCLUDE_PARENT_FIELDS, "false");
runner.setProperty("my-path", "/accounts[*]/transactions");
runner.enqueue(new File("src/test/resources/TestForkRecord/single-element-nested-nested-array.json").toPath());
runner.run(1);
runner.assertTransferCount(ForkRecord.REL_ORIGINAL, 1);
runner.assertTransferCount(ForkRecord.REL_FORK, 1);
final MockFlowFile mff = runner.getFlowFilesForRelationship(ForkRecord.REL_FORK).get(0);
mff.assertAttributeEquals("record.count", "4");
mff.assertContentEquals("header\n5,150.31\n6,-15.31\n7,36.78\n8,-21.34\n");
}
@Test
public void testForkExtractComplexWithParentFieldsAndNull() throws IOException, MalformedRecordException, InitializationException {
TestRunner runner = TestRunners.newTestRunner(new ForkRecord());
final DataType accountRecordType = RecordFieldType.RECORD.getRecordDataType(getAccountWithTransactionSchema());
final DataType accountsType = RecordFieldType.ARRAY.getArrayDataType(accountRecordType);
final List<RecordField> fields = getDefaultFields();
fields.add(new RecordField("accounts", accountsType));
final RecordSchema schema = new SimpleRecordSchema(fields);
final List<RecordField> fieldsWrite = getDefaultFields();
fieldsWrite.add(new RecordField("balance", RecordFieldType.DOUBLE.getDataType()));
fieldsWrite.add(new RecordField("amount", RecordFieldType.DOUBLE.getDataType()));
final RecordSchema schemaWrite = new SimpleRecordSchema(fieldsWrite);
final JsonRecordReader readerService = new JsonRecordReader(schema);
final MockRecordWriter writerService = new CustomRecordWriter("header", false, schemaWrite);
runner.addControllerService("reader", readerService);
runner.enableControllerService(readerService);
runner.addControllerService("writer", writerService);
runner.enableControllerService(writerService);
runner.setProperty(ForkRecord.RECORD_READER, "reader");
runner.setProperty(ForkRecord.RECORD_WRITER, "writer");
runner.setProperty(ForkRecord.MODE, ForkRecord.MODE_EXTRACT);
runner.setProperty(ForkRecord.INCLUDE_PARENT_FIELDS, "true");
runner.setProperty("my-path", "/accounts[*]/transactions");
runner.enqueue(new File("src/test/resources/TestForkRecord/two-elements-nested-nested-array-null.json").toPath());
runner.run(1);
runner.assertTransferCount(ForkRecord.REL_ORIGINAL, 1);
runner.assertTransferCount(ForkRecord.REL_FORK, 1);
final MockFlowFile mff = runner.getFlowFilesForRelationship(ForkRecord.REL_FORK).get(0);
mff.assertAttributeEquals("record.count", "4");
mff.assertContentEquals("header\n5,150.31,John Doe,123 My Street,My City,MS,11111,USA,4750.89\n6,-15.31,John Doe,123 My Street,My City,MS,11111,USA,4750.89\n"
+ "7,36.78,John Doe,123 My Street,My City,MS,11111,USA,48212.38\n8,-21.34,John Doe,123 My Street,My City,MS,11111,USA,48212.38\n");
}
@Test
public void testSplitMode() throws InitializationException, IOException {
String expectedOutput = null;
final TestRunner runner = TestRunners.newTestRunner(new ForkRecord());
final JsonTreeReader jsonReader = new JsonTreeReader();
runner.addControllerService("record-reader", jsonReader);
final String inputSchemaText = new String(Files.readAllBytes(Paths.get("src/test/resources/TestForkRecord/schema/schema.avsc")));
final String outputSchemaText = new String(Files.readAllBytes(Paths.get("src/test/resources/TestForkRecord/schema/schema.avsc")));
runner.setProperty(jsonReader, SchemaAccessUtils.SCHEMA_ACCESS_STRATEGY, SchemaAccessUtils.SCHEMA_TEXT_PROPERTY);
runner.setProperty(jsonReader, SchemaAccessUtils.SCHEMA_TEXT, inputSchemaText);
runner.enableControllerService(jsonReader);
runner.setProperty(ForkRecord.RECORD_READER, "record-reader");
final JsonRecordSetWriter jsonWriter = new JsonRecordSetWriter();
runner.addControllerService("record-writer", jsonWriter);
runner.setProperty(jsonWriter, SchemaAccessUtils.SCHEMA_ACCESS_STRATEGY, SchemaAccessUtils.SCHEMA_TEXT_PROPERTY);
runner.setProperty(jsonWriter, SchemaAccessUtils.SCHEMA_TEXT, outputSchemaText);
runner.setProperty(jsonWriter, "Pretty Print JSON", "true");
runner.setProperty(jsonWriter, "Schema Write Strategy", "full-schema-attribute");
runner.enableControllerService(jsonWriter);
runner.setProperty(ForkRecord.RECORD_WRITER, "record-writer");
runner.setProperty(ForkRecord.MODE, ForkRecord.MODE_SPLIT);
runner.setProperty("my-path", "/address");
runner.enqueue(Paths.get("src/test/resources/TestForkRecord/input/complex-input-json.json"));
runner.run();
runner.assertTransferCount(ForkRecord.REL_ORIGINAL, 1);
runner.assertTransferCount(ForkRecord.REL_FORK, 1);
expectedOutput = new String(Files.readAllBytes(Paths.get("src/test/resources/TestForkRecord/output/split-address.json")));
runner.getFlowFilesForRelationship(ForkRecord.REL_FORK).get(0).assertContentEquals(expectedOutput);
runner.getFlowFilesForRelationship(ForkRecord.REL_FORK).get(0).assertAttributeEquals("record.count", "5");
runner.clearTransferState();
runner.setProperty("my-path", "/bankAccounts[*]/last5Transactions");
runner.enqueue(Paths.get("src/test/resources/TestForkRecord/input/complex-input-json.json"));
runner.run();
runner.assertTransferCount(ForkRecord.REL_ORIGINAL, 1);
runner.assertTransferCount(ForkRecord.REL_FORK, 1);
expectedOutput = new String(Files.readAllBytes(Paths.get("src/test/resources/TestForkRecord/output/split-transactions.json")));
runner.getFlowFilesForRelationship(ForkRecord.REL_FORK).get(0).assertContentEquals(expectedOutput);
runner.getFlowFilesForRelationship(ForkRecord.REL_FORK).get(0).assertAttributeEquals("record.count", "6");
}
@Test
public void testExtractMode() throws InitializationException, IOException {
String expectedOutput = null;
final TestRunner runner = TestRunners.newTestRunner(new ForkRecord());
final JsonTreeReader jsonReader = new JsonTreeReader();
runner.addControllerService("record-reader", jsonReader);
final String inputSchemaText = new String(Files.readAllBytes(Paths.get("src/test/resources/TestForkRecord/schema/schema.avsc")));
final String outputSchemaText = new String(Files.readAllBytes(Paths.get("src/test/resources/TestForkRecord/schema/extract-schema.avsc")));
runner.setProperty(jsonReader, SchemaAccessUtils.SCHEMA_ACCESS_STRATEGY, SchemaAccessUtils.SCHEMA_TEXT_PROPERTY);
runner.setProperty(jsonReader, SchemaAccessUtils.SCHEMA_TEXT, inputSchemaText);
runner.enableControllerService(jsonReader);
runner.setProperty(ForkRecord.RECORD_READER, "record-reader");
final JsonRecordSetWriter jsonWriter = new JsonRecordSetWriter();
runner.addControllerService("record-writer", jsonWriter);
runner.setProperty(jsonWriter, SchemaAccessUtils.SCHEMA_ACCESS_STRATEGY, SchemaAccessUtils.SCHEMA_TEXT_PROPERTY);
runner.setProperty(jsonWriter, SchemaAccessUtils.SCHEMA_TEXT, outputSchemaText);
runner.setProperty(jsonWriter, "Pretty Print JSON", "true");
runner.setProperty(jsonWriter, "Schema Write Strategy", "full-schema-attribute");
runner.enableControllerService(jsonWriter);
runner.setProperty(ForkRecord.RECORD_WRITER, "record-writer");
runner.setProperty(ForkRecord.MODE, ForkRecord.MODE_EXTRACT);
runner.setProperty(ForkRecord.INCLUDE_PARENT_FIELDS, "true");
runner.setProperty("my-path", "/bankAccounts[*]/last5Transactions");
runner.enqueue(Paths.get("src/test/resources/TestForkRecord/input/complex-input-json.json"));
runner.run();
runner.assertTransferCount(ForkRecord.REL_ORIGINAL, 1);
runner.assertTransferCount(ForkRecord.REL_FORK, 1);
expectedOutput = new String(Files.readAllBytes(Paths.get("src/test/resources/TestForkRecord/output/extract-transactions.json")));
runner.getFlowFilesForRelationship(ForkRecord.REL_FORK).get(0).assertContentEquals(expectedOutput);
runner.getFlowFilesForRelationship(ForkRecord.REL_FORK).get(0).assertAttributeEquals("record.count", "6");
}
private class JsonRecordReader extends AbstractControllerService implements RecordReaderFactory {
RecordSchema schema;
public JsonRecordReader(RecordSchema schema) {
this.schema = schema;
}
@Override
public RecordReader createRecordReader(FlowFile flowFile, InputStream in, ComponentLog logger) throws MalformedRecordException, IOException, SchemaNotFoundException {
return new JsonTreeRowRecordReader(in, logger, schema, dateFormat, timeFormat, timestampFormat);
}
@Override
public RecordReader createRecordReader(Map<String, String> variables, InputStream in, long inputLength, ComponentLog logger)
throws MalformedRecordException, IOException, SchemaNotFoundException {
return new JsonTreeRowRecordReader(in, logger, schema, dateFormat, timeFormat, timestampFormat);
}
}
private class CustomRecordWriter extends MockRecordWriter {
RecordSchema schema;
public CustomRecordWriter(final String header, final boolean quoteValues, RecordSchema schema) {
super(header, quoteValues);
this.schema = schema;
}
@Override
public RecordSchema getSchema(Map<String, String> variables, RecordSchema readSchema) throws SchemaNotFoundException, IOException {
return this.schema;
}
}
}
| |
package org.fortiss.smg.actuatorclient.enocean.impl;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.Executors;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.TimeoutException;
import org.fortiss.smg.actuatorclient.enocean.impl.model.Actor;
import org.fortiss.smg.actuatorclient.enocean.impl.persistence.WrapperPersistor;
import org.fortiss.smg.actuatormaster.api.IActuatorClient;
import org.fortiss.smg.actuatormaster.api.IActuatorMaster;
import org.fortiss.smg.actuatormaster.api.events.DeviceEvent;
import org.fortiss.smg.containermanager.api.devices.DeviceContainer;
import org.fortiss.smg.containermanager.api.devices.DeviceId;
import org.fortiss.smg.smgschemas.commands.DoubleCommand;
import org.slf4j.LoggerFactory;
public class ActuatorClientImpl implements IActuatorClient {
private static org.slf4j.Logger logger = LoggerFactory
.getLogger(ActuatorClientImpl.class);
private IActuatorMaster master;
private String clientId;
private ScheduledExecutorService executor;
private int pollFrequency;
private String host;
private int port;
private String wrapperTag;
// private EnOceanCommunicator communicator;
ArrayList<String> enOceanDeviceIds = new ArrayList<String>();
ArrayList<DeviceContainer> devices = new ArrayList<DeviceContainer>();
private EnOceanLooper looper;
public ActuatorClientImpl(String host, int port, String wrapperTag, int pollFreq) {
this.wrapperTag = wrapperTag;
this.pollFrequency = pollFreq;
this.port = port;
this.host = host;
}
public void connectToEncoean() {
looper = new EnOceanLooper(this);
WrapperPersistor persistor = new WrapperPersistor(this);
persistor.createWrappersFromPersistency();
}
public synchronized void activate() {
sendNewDeviceEvents();
// executor = Executors.newSingleThreadScheduledExecutor();
// executor.scheduleAtFixedRate(looper, 0, getPollFrequency(),
// TimeUnit.SECONDS);
}
public synchronized void deactivate() {
executor.shutdown();
try {
executor.awaitTermination(1, TimeUnit.MINUTES);
} catch (InterruptedException e) {
throw new RuntimeException(e);
}
}
public String getClientId() {
return this.clientId;
}
public IActuatorMaster getMaster() {
return this.master;
}
public int getPollFrequency() {
return this.pollFrequency;
}
public String getWrapperTag() {
return this.wrapperTag;
}
public String getHost() {
return this.host;
}
public int getPort() {
return this.port;
}
@Override
public boolean isComponentAlive() {
return true;
}
public List<DeviceContainer> getDeviceSpecs() {
return devices;
}
public EnOceanLooper getLooperForTest() {
return this.looper;
}
public List<String> getAvailableActorStrategies() {
List<String> classNames = new ArrayList<String>();
classNames
.add("org.fortiss.smg.actuatorclient.enocean.impl.model.strategies.actor.FortissBlindActorStrategy");
classNames
.add("org.fortiss.smg.actuatorclient.enocean.impl.model.strategies.actor.Light1030ActorStrategy");
classNames
.add("org.fortiss.smg.actuatorclient.enocean.impl.model.strategies.actor.Light5070ActorStrategy");
classNames
.add("org.fortiss.smg.actuatorclient.enocean.impl.model.strategies.actor.HeatingActorStrategy");
classNames
.add("org.fortiss.smg.actuatorclient.enocean.impl.model.strategies.actor.SteckdosenleisteActorStrategy");
classNames
.add("org.fortiss.smg.actuatorclient.enocean.impl.model.strategies.actor.GenericPushRelease1030SwitchActorStrategy");
return classNames;
}
public List<String> getAvailableSensorStrategies() {
List<String> classNames = new ArrayList<String>();
classNames
.add("org.fortiss.smg.actuatorclient.enocean.impl.model.strategies.sensor.BSCsOTSSensorStrategy");
classNames
.add("org.fortiss.smg.actuatorclient.enocean.impl.model.strategies.sensor.FAH60SensorStrategy");
classNames
.add("org.fortiss.smg.actuatorclient.enocean.impl.model.strategies.sensor.FBH55SensorStrategy");
classNames
.add("org.fortiss.smg.actuatorclient.enocean.impl.model.strategies.sensor.FT4DoubleRockerBooleanSensorStrategy");
classNames
.add("org.fortiss.smg.actuatorclient.enocean.impl.model.strategies.sensor.FT4SingleRockerStrategy");
classNames
.add("org.fortiss.smg.actuatorclient.enocean.impl.model.strategies.sensor.FTKSensorStrategy");
classNames
.add("org.fortiss.smg.actuatorclient.enocean.impl.model.strategies.sensor.FTR55DSensorStrategy");
classNames
.add("org.fortiss.smg.actuatorclient.enocean.impl.model.strategies.sensor.FWZ12SensorStrategy");
classNames
.add("org.fortiss.smg.actuatorclient.enocean.impl.model.strategies.sensor.SystestSensorStrategy");
return classNames;
}
//
// public ArrayList<DeviceContainer> getDevices() {
// return devices;
// }
@Override
public void onDoubleCommand(DoubleCommand command, DeviceId dev) {
logger.debug("Received Doublecommand for " + dev.getDevid());
for (DeviceContainer device : devices) {
if(device.getDeviceId().equals(dev)) {
if(device.isBinary()){
boolean valueBool = true;
if (command.getValue() == 0.0) {
valueBool = false;
}
this.looper.getActor(dev.toString()).setBoolean(valueBool, dev.toString(), 0, "", true, "");
}
}
}
}
private void sendNewDeviceEvents() {
for (DeviceContainer dev : devices) {
try {
logger.debug("Dev:" + dev.getDeviceId().getDevid() + " -> "
+ clientId);
master.sendDeviceEvent(new DeviceEvent(dev), this.clientId);
} catch (TimeoutException e) {
logger.debug("Failed to send " + dev.getDeviceId()
+ " to master");
}
}
}
public void setMaster(IActuatorMaster master) {
this.master = master;
}
public void setClientId(String clientId) {
this.clientId = clientId;
}
public void setPollFrequency(int pollFrequency) {
this.pollFrequency = pollFrequency;
}
public void setWrapperTag(String wrapperTag) {
this.wrapperTag = wrapperTag;
}
//////////////// I add it !
// public void setEnoceanDeviceIds(DeviceId devId){
// enOceanDeviceIds.add(devId.toString());
// }
public ArrayList<String> getEnOceanDeviceIds() {
return enOceanDeviceIds;
}
}
| |
/**
*/
package CIM.IEC61970.Informative.InfERPSupport;
import CIM.IEC61968.Common.ActivityRecord;
import CIM.IEC61968.Common.ElectronicAddress;
import CIM.IEC61968.Common.Status;
import CIM.IEC61968.Common.TelephoneNumber;
import CIM.IEC61968.Customers.Customer;
import CIM.IEC61968.Customers.ServiceLocation;
import CIM.IEC61970.Core.IdentifiedObject;
import CIM.IEC61970.Informative.InfCommon.Craft;
import CIM.IEC61970.Informative.InfCommon.Skill;
import CIM.IEC61970.Informative.InfLocations.PersonPropertyRole;
import CIM.IEC61970.Informative.InfOperations.CallBack;
import CIM.IEC61970.Informative.InfOperations.ChangeItem;
import CIM.IEC61970.Informative.InfOperations.ErpPersonScheduleStepRole;
import CIM.IEC61970.Informative.InfWork.Appointment;
import CIM.IEC61970.Informative.InfWork.Crew;
import CIM.IEC61970.Informative.InfWork.LaborItem;
import CIM.IEC61970.Meas.MeasurementValue;
import org.eclipse.emf.common.util.EList;
/**
* <!-- begin-user-doc -->
* A representation of the model object '<em><b>Erp Person</b></em>'.
* <!-- end-user-doc -->
*
* <p>
* The following features are supported:
* </p>
* <ul>
* <li>{@link CIM.IEC61970.Informative.InfERPSupport.ErpPerson#getCallBacks <em>Call Backs</em>}</li>
* <li>{@link CIM.IEC61970.Informative.InfERPSupport.ErpPerson#getServiceLocation <em>Service Location</em>}</li>
* <li>{@link CIM.IEC61970.Informative.InfERPSupport.ErpPerson#getDocumentRoles <em>Document Roles</em>}</li>
* <li>{@link CIM.IEC61970.Informative.InfERPSupport.ErpPerson#getCategory <em>Category</em>}</li>
* <li>{@link CIM.IEC61970.Informative.InfERPSupport.ErpPerson#getFirstName <em>First Name</em>}</li>
* <li>{@link CIM.IEC61970.Informative.InfERPSupport.ErpPerson#getSwitchingStepRoles <em>Switching Step Roles</em>}</li>
* <li>{@link CIM.IEC61970.Informative.InfERPSupport.ErpPerson#getLastName <em>Last Name</em>}</li>
* <li>{@link CIM.IEC61970.Informative.InfERPSupport.ErpPerson#getChangeItems <em>Change Items</em>}</li>
* <li>{@link CIM.IEC61970.Informative.InfERPSupport.ErpPerson#getMName <em>MName</em>}</li>
* <li>{@link CIM.IEC61970.Informative.InfERPSupport.ErpPerson#getSkills <em>Skills</em>}</li>
* <li>{@link CIM.IEC61970.Informative.InfERPSupport.ErpPerson#getElectronicAddress <em>Electronic Address</em>}</li>
* <li>{@link CIM.IEC61970.Informative.InfERPSupport.ErpPerson#getGovernmentID <em>Government ID</em>}</li>
* <li>{@link CIM.IEC61970.Informative.InfERPSupport.ErpPerson#getSpecialNeed <em>Special Need</em>}</li>
* <li>{@link CIM.IEC61970.Informative.InfERPSupport.ErpPerson#getErpOrganisationRoles <em>Erp Organisation Roles</em>}</li>
* <li>{@link CIM.IEC61970.Informative.InfERPSupport.ErpPerson#getStatus <em>Status</em>}</li>
* <li>{@link CIM.IEC61970.Informative.InfERPSupport.ErpPerson#getPrefix <em>Prefix</em>}</li>
* <li>{@link CIM.IEC61970.Informative.InfERPSupport.ErpPerson#getLaborItems <em>Labor Items</em>}</li>
* <li>{@link CIM.IEC61970.Informative.InfERPSupport.ErpPerson#getCrafts <em>Crafts</em>}</li>
* <li>{@link CIM.IEC61970.Informative.InfERPSupport.ErpPerson#getMobilePhone <em>Mobile Phone</em>}</li>
* <li>{@link CIM.IEC61970.Informative.InfERPSupport.ErpPerson#getErpCompetency <em>Erp Competency</em>}</li>
* <li>{@link CIM.IEC61970.Informative.InfERPSupport.ErpPerson#getLandPropertyRoles <em>Land Property Roles</em>}</li>
* <li>{@link CIM.IEC61970.Informative.InfERPSupport.ErpPerson#getLandlinePhone <em>Landline Phone</em>}</li>
* <li>{@link CIM.IEC61970.Informative.InfERPSupport.ErpPerson#getMeasurementValues <em>Measurement Values</em>}</li>
* <li>{@link CIM.IEC61970.Informative.InfERPSupport.ErpPerson#getActivityRecords <em>Activity Records</em>}</li>
* <li>{@link CIM.IEC61970.Informative.InfERPSupport.ErpPerson#getCrews <em>Crews</em>}</li>
* <li>{@link CIM.IEC61970.Informative.InfERPSupport.ErpPerson#getAppointments <em>Appointments</em>}</li>
* <li>{@link CIM.IEC61970.Informative.InfERPSupport.ErpPerson#getErpPersonnel <em>Erp Personnel</em>}</li>
* <li>{@link CIM.IEC61970.Informative.InfERPSupport.ErpPerson#getSuffix <em>Suffix</em>}</li>
* <li>{@link CIM.IEC61970.Informative.InfERPSupport.ErpPerson#getCustomerData <em>Customer Data</em>}</li>
* </ul>
*
* @see CIM.IEC61970.Informative.InfERPSupport.InfERPSupportPackage#getErpPerson()
* @model annotation="http://iec.ch/TC57/2009/CIM-schema-cim14 Documentation='General purpose information for name and other information to contact people.'"
* annotation="http://langdale.com.au/2005/UML Profile\040documentation='General purpose information for name and other information to contact people.'"
* annotation="http://www.eclipse.org/emf/2002/GenModel Documentation='General purpose information for name and other information to contact people.' Profile\040documentation='General purpose information for name and other information to contact people.'"
* @generated
*/
public interface ErpPerson extends IdentifiedObject {
/**
* Returns the value of the '<em><b>Call Backs</b></em>' reference list.
* The list contents are of type {@link CIM.IEC61970.Informative.InfOperations.CallBack}.
* It is bidirectional and its opposite is '{@link CIM.IEC61970.Informative.InfOperations.CallBack#getErpPersons <em>Erp Persons</em>}'.
* <!-- begin-user-doc -->
* <p>
* If the meaning of the '<em>Call Backs</em>' reference list isn't clear,
* there really should be more of a description here...
* </p>
* <!-- end-user-doc -->
* @return the value of the '<em>Call Backs</em>' reference list.
* @see CIM.IEC61970.Informative.InfERPSupport.InfERPSupportPackage#getErpPerson_CallBacks()
* @see CIM.IEC61970.Informative.InfOperations.CallBack#getErpPersons
* @model opposite="ErpPersons"
* @generated
*/
EList<CallBack> getCallBacks();
/**
* Returns the value of the '<em><b>Service Location</b></em>' reference.
* It is bidirectional and its opposite is '{@link CIM.IEC61968.Customers.ServiceLocation#getErpPersons <em>Erp Persons</em>}'.
* <!-- begin-user-doc -->
* <p>
* If the meaning of the '<em>Service Location</em>' reference isn't clear,
* there really should be more of a description here...
* </p>
* <!-- end-user-doc -->
* @return the value of the '<em>Service Location</em>' reference.
* @see #setServiceLocation(ServiceLocation)
* @see CIM.IEC61970.Informative.InfERPSupport.InfERPSupportPackage#getErpPerson_ServiceLocation()
* @see CIM.IEC61968.Customers.ServiceLocation#getErpPersons
* @model opposite="ErpPersons"
* @generated
*/
ServiceLocation getServiceLocation();
/**
* Sets the value of the '{@link CIM.IEC61970.Informative.InfERPSupport.ErpPerson#getServiceLocation <em>Service Location</em>}' reference.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @param value the new value of the '<em>Service Location</em>' reference.
* @see #getServiceLocation()
* @generated
*/
void setServiceLocation(ServiceLocation value);
/**
* Returns the value of the '<em><b>Document Roles</b></em>' reference list.
* The list contents are of type {@link CIM.IEC61970.Informative.InfERPSupport.DocErpPersonRole}.
* It is bidirectional and its opposite is '{@link CIM.IEC61970.Informative.InfERPSupport.DocErpPersonRole#getErpPerson <em>Erp Person</em>}'.
* <!-- begin-user-doc -->
* <p>
* If the meaning of the '<em>Document Roles</em>' reference list isn't clear,
* there really should be more of a description here...
* </p>
* <!-- end-user-doc -->
* @return the value of the '<em>Document Roles</em>' reference list.
* @see CIM.IEC61970.Informative.InfERPSupport.InfERPSupportPackage#getErpPerson_DocumentRoles()
* @see CIM.IEC61970.Informative.InfERPSupport.DocErpPersonRole#getErpPerson
* @model opposite="ErpPerson"
* @generated
*/
EList<DocErpPersonRole> getDocumentRoles();
/**
* Returns the value of the '<em><b>Category</b></em>' attribute.
* <!-- begin-user-doc -->
* <p>
* If the meaning of the '<em>Category</em>' attribute isn't clear,
* there really should be more of a description here...
* </p>
* <!-- end-user-doc -->
* @return the value of the '<em>Category</em>' attribute.
* @see #setCategory(String)
* @see CIM.IEC61970.Informative.InfERPSupport.InfERPSupportPackage#getErpPerson_Category()
* @model required="true"
* annotation="http://iec.ch/TC57/2009/CIM-schema-cim14 Documentation='Category of this person relative to utility operations, classified according to the utility\'s corporate standards and practices. Examples include employee, contractor, agent, not affiliated, etc.\nNote that this field is not used to indicate whether this person is a customer of the utility. Often an employee or contractor is also a customer. Customer information is gained with relationship to Organisation and CustomerData. In similar fashion, this field does not indicate the various roles this person may fill as part of utility operations.'"
* annotation="http://www.eclipse.org/emf/2002/GenModel Documentation='Category of this person relative to utility operations, classified according to the utility\'s corporate standards and practices. Examples include employee, contractor, agent, not affiliated, etc.\nNote that this field is not used to indicate whether this person is a customer of the utility. Often an employee or contractor is also a customer. Customer information is gained with relationship to Organisation and CustomerData. In similar fashion, this field does not indicate the various roles this person may fill as part of utility operations.'"
* @generated
*/
String getCategory();
/**
* Sets the value of the '{@link CIM.IEC61970.Informative.InfERPSupport.ErpPerson#getCategory <em>Category</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @param value the new value of the '<em>Category</em>' attribute.
* @see #getCategory()
* @generated
*/
void setCategory(String value);
/**
* Returns the value of the '<em><b>First Name</b></em>' attribute.
* <!-- begin-user-doc -->
* <p>
* If the meaning of the '<em>First Name</em>' attribute isn't clear,
* there really should be more of a description here...
* </p>
* <!-- end-user-doc -->
* @return the value of the '<em>First Name</em>' attribute.
* @see #setFirstName(String)
* @see CIM.IEC61970.Informative.InfERPSupport.InfERPSupportPackage#getErpPerson_FirstName()
* @model required="true"
* annotation="http://iec.ch/TC57/2009/CIM-schema-cim14 Documentation='Person\'s first name.'"
* annotation="http://www.eclipse.org/emf/2002/GenModel Documentation='Person\'s first name.'"
* @generated
*/
String getFirstName();
/**
* Sets the value of the '{@link CIM.IEC61970.Informative.InfERPSupport.ErpPerson#getFirstName <em>First Name</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @param value the new value of the '<em>First Name</em>' attribute.
* @see #getFirstName()
* @generated
*/
void setFirstName(String value);
/**
* Returns the value of the '<em><b>Switching Step Roles</b></em>' reference list.
* The list contents are of type {@link CIM.IEC61970.Informative.InfOperations.ErpPersonScheduleStepRole}.
* It is bidirectional and its opposite is '{@link CIM.IEC61970.Informative.InfOperations.ErpPersonScheduleStepRole#getErpPerson <em>Erp Person</em>}'.
* <!-- begin-user-doc -->
* <p>
* If the meaning of the '<em>Switching Step Roles</em>' reference list isn't clear,
* there really should be more of a description here...
* </p>
* <!-- end-user-doc -->
* @return the value of the '<em>Switching Step Roles</em>' reference list.
* @see CIM.IEC61970.Informative.InfERPSupport.InfERPSupportPackage#getErpPerson_SwitchingStepRoles()
* @see CIM.IEC61970.Informative.InfOperations.ErpPersonScheduleStepRole#getErpPerson
* @model opposite="ErpPerson"
* @generated
*/
EList<ErpPersonScheduleStepRole> getSwitchingStepRoles();
/**
* Returns the value of the '<em><b>Last Name</b></em>' attribute.
* <!-- begin-user-doc -->
* <p>
* If the meaning of the '<em>Last Name</em>' attribute isn't clear,
* there really should be more of a description here...
* </p>
* <!-- end-user-doc -->
* @return the value of the '<em>Last Name</em>' attribute.
* @see #setLastName(String)
* @see CIM.IEC61970.Informative.InfERPSupport.InfERPSupportPackage#getErpPerson_LastName()
* @model required="true"
* annotation="http://iec.ch/TC57/2009/CIM-schema-cim14 Documentation='Person\'s last (family, sir) name.'"
* annotation="http://www.eclipse.org/emf/2002/GenModel Documentation='Person\'s last (family, sir) name.'"
* @generated
*/
String getLastName();
/**
* Sets the value of the '{@link CIM.IEC61970.Informative.InfERPSupport.ErpPerson#getLastName <em>Last Name</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @param value the new value of the '<em>Last Name</em>' attribute.
* @see #getLastName()
* @generated
*/
void setLastName(String value);
/**
* Returns the value of the '<em><b>Change Items</b></em>' reference list.
* The list contents are of type {@link CIM.IEC61970.Informative.InfOperations.ChangeItem}.
* It is bidirectional and its opposite is '{@link CIM.IEC61970.Informative.InfOperations.ChangeItem#getErpPerson <em>Erp Person</em>}'.
* <!-- begin-user-doc -->
* <p>
* If the meaning of the '<em>Change Items</em>' reference list isn't clear,
* there really should be more of a description here...
* </p>
* <!-- end-user-doc -->
* @return the value of the '<em>Change Items</em>' reference list.
* @see CIM.IEC61970.Informative.InfERPSupport.InfERPSupportPackage#getErpPerson_ChangeItems()
* @see CIM.IEC61970.Informative.InfOperations.ChangeItem#getErpPerson
* @model opposite="ErpPerson"
* @generated
*/
EList<ChangeItem> getChangeItems();
/**
* Returns the value of the '<em><b>MName</b></em>' attribute.
* <!-- begin-user-doc -->
* <p>
* If the meaning of the '<em>MName</em>' attribute isn't clear,
* there really should be more of a description here...
* </p>
* <!-- end-user-doc -->
* @return the value of the '<em>MName</em>' attribute.
* @see #setMName(String)
* @see CIM.IEC61970.Informative.InfERPSupport.InfERPSupportPackage#getErpPerson_MName()
* @model required="true"
* annotation="http://iec.ch/TC57/2009/CIM-schema-cim14 Documentation='Middle name(s) or initial(s).'"
* annotation="http://www.eclipse.org/emf/2002/GenModel Documentation='Middle name(s) or initial(s).'"
* @generated
*/
String getMName();
/**
* Sets the value of the '{@link CIM.IEC61970.Informative.InfERPSupport.ErpPerson#getMName <em>MName</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @param value the new value of the '<em>MName</em>' attribute.
* @see #getMName()
* @generated
*/
void setMName(String value);
/**
* Returns the value of the '<em><b>Skills</b></em>' reference list.
* The list contents are of type {@link CIM.IEC61970.Informative.InfCommon.Skill}.
* It is bidirectional and its opposite is '{@link CIM.IEC61970.Informative.InfCommon.Skill#getErpPerson <em>Erp Person</em>}'.
* <!-- begin-user-doc -->
* <p>
* If the meaning of the '<em>Skills</em>' reference list isn't clear,
* there really should be more of a description here...
* </p>
* <!-- end-user-doc -->
* @return the value of the '<em>Skills</em>' reference list.
* @see CIM.IEC61970.Informative.InfERPSupport.InfERPSupportPackage#getErpPerson_Skills()
* @see CIM.IEC61970.Informative.InfCommon.Skill#getErpPerson
* @model opposite="ErpPerson"
* @generated
*/
EList<Skill> getSkills();
/**
* Returns the value of the '<em><b>Electronic Address</b></em>' reference.
* <!-- begin-user-doc -->
* <p>
* If the meaning of the '<em>Electronic Address</em>' reference isn't clear,
* there really should be more of a description here...
* </p>
* <!-- end-user-doc -->
* @return the value of the '<em>Electronic Address</em>' reference.
* @see #setElectronicAddress(ElectronicAddress)
* @see CIM.IEC61970.Informative.InfERPSupport.InfERPSupportPackage#getErpPerson_ElectronicAddress()
* @model annotation="http://iec.ch/TC57/2009/CIM-schema-cim14 Documentation='Electronic address.'"
* annotation="http://www.eclipse.org/emf/2002/GenModel Documentation='Electronic address.'"
* @generated
*/
ElectronicAddress getElectronicAddress();
/**
* Sets the value of the '{@link CIM.IEC61970.Informative.InfERPSupport.ErpPerson#getElectronicAddress <em>Electronic Address</em>}' reference.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @param value the new value of the '<em>Electronic Address</em>' reference.
* @see #getElectronicAddress()
* @generated
*/
void setElectronicAddress(ElectronicAddress value);
/**
* Returns the value of the '<em><b>Government ID</b></em>' attribute.
* <!-- begin-user-doc -->
* <p>
* If the meaning of the '<em>Government ID</em>' attribute isn't clear,
* there really should be more of a description here...
* </p>
* <!-- end-user-doc -->
* @return the value of the '<em>Government ID</em>' attribute.
* @see #setGovernmentID(String)
* @see CIM.IEC61970.Informative.InfERPSupport.InfERPSupportPackage#getErpPerson_GovernmentID()
* @model required="true"
* annotation="http://iec.ch/TC57/2009/CIM-schema-cim14 Documentation='Unique identifier for person relative to its governing authority, for example a federal tax identifier (such as a Social Security number in the United States).'"
* annotation="http://www.eclipse.org/emf/2002/GenModel Documentation='Unique identifier for person relative to its governing authority, for example a federal tax identifier (such as a Social Security number in the United States).'"
* @generated
*/
String getGovernmentID();
/**
* Sets the value of the '{@link CIM.IEC61970.Informative.InfERPSupport.ErpPerson#getGovernmentID <em>Government ID</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @param value the new value of the '<em>Government ID</em>' attribute.
* @see #getGovernmentID()
* @generated
*/
void setGovernmentID(String value);
/**
* Returns the value of the '<em><b>Special Need</b></em>' attribute.
* <!-- begin-user-doc -->
* <p>
* If the meaning of the '<em>Special Need</em>' attribute isn't clear,
* there really should be more of a description here...
* </p>
* <!-- end-user-doc -->
* @return the value of the '<em>Special Need</em>' attribute.
* @see #setSpecialNeed(String)
* @see CIM.IEC61970.Informative.InfERPSupport.InfERPSupportPackage#getErpPerson_SpecialNeed()
* @model required="true"
* annotation="http://iec.ch/TC57/2009/CIM-schema-cim14 Documentation='Special service needs for the person (contact) are described; examples include life support, etc.'"
* annotation="http://www.eclipse.org/emf/2002/GenModel Documentation='Special service needs for the person (contact) are described; examples include life support, etc.'"
* @generated
*/
String getSpecialNeed();
/**
* Sets the value of the '{@link CIM.IEC61970.Informative.InfERPSupport.ErpPerson#getSpecialNeed <em>Special Need</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @param value the new value of the '<em>Special Need</em>' attribute.
* @see #getSpecialNeed()
* @generated
*/
void setSpecialNeed(String value);
/**
* Returns the value of the '<em><b>Erp Organisation Roles</b></em>' reference list.
* The list contents are of type {@link CIM.IEC61970.Informative.InfERPSupport.OrgErpPersonRole}.
* It is bidirectional and its opposite is '{@link CIM.IEC61970.Informative.InfERPSupport.OrgErpPersonRole#getErpPerson <em>Erp Person</em>}'.
* <!-- begin-user-doc -->
* <p>
* If the meaning of the '<em>Erp Organisation Roles</em>' reference list isn't clear,
* there really should be more of a description here...
* </p>
* <!-- end-user-doc -->
* @return the value of the '<em>Erp Organisation Roles</em>' reference list.
* @see CIM.IEC61970.Informative.InfERPSupport.InfERPSupportPackage#getErpPerson_ErpOrganisationRoles()
* @see CIM.IEC61970.Informative.InfERPSupport.OrgErpPersonRole#getErpPerson
* @model opposite="ErpPerson"
* @generated
*/
EList<OrgErpPersonRole> getErpOrganisationRoles();
/**
* Returns the value of the '<em><b>Status</b></em>' reference.
* <!-- begin-user-doc -->
* <p>
* If the meaning of the '<em>Status</em>' reference isn't clear,
* there really should be more of a description here...
* </p>
* <!-- end-user-doc -->
* @return the value of the '<em>Status</em>' reference.
* @see #setStatus(Status)
* @see CIM.IEC61970.Informative.InfERPSupport.InfERPSupportPackage#getErpPerson_Status()
* @model
* @generated
*/
Status getStatus();
/**
* Sets the value of the '{@link CIM.IEC61970.Informative.InfERPSupport.ErpPerson#getStatus <em>Status</em>}' reference.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @param value the new value of the '<em>Status</em>' reference.
* @see #getStatus()
* @generated
*/
void setStatus(Status value);
/**
* Returns the value of the '<em><b>Prefix</b></em>' attribute.
* <!-- begin-user-doc -->
* <p>
* If the meaning of the '<em>Prefix</em>' attribute isn't clear,
* there really should be more of a description here...
* </p>
* <!-- end-user-doc -->
* @return the value of the '<em>Prefix</em>' attribute.
* @see #setPrefix(String)
* @see CIM.IEC61970.Informative.InfERPSupport.InfERPSupportPackage#getErpPerson_Prefix()
* @model required="true"
* annotation="http://iec.ch/TC57/2009/CIM-schema-cim14 Documentation='A prefix or title for the person\'s name, such as Miss, Mister, Doctor, etc.'"
* annotation="http://www.eclipse.org/emf/2002/GenModel Documentation='A prefix or title for the person\'s name, such as Miss, Mister, Doctor, etc.'"
* @generated
*/
String getPrefix();
/**
* Sets the value of the '{@link CIM.IEC61970.Informative.InfERPSupport.ErpPerson#getPrefix <em>Prefix</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @param value the new value of the '<em>Prefix</em>' attribute.
* @see #getPrefix()
* @generated
*/
void setPrefix(String value);
/**
* Returns the value of the '<em><b>Labor Items</b></em>' reference list.
* The list contents are of type {@link CIM.IEC61970.Informative.InfWork.LaborItem}.
* It is bidirectional and its opposite is '{@link CIM.IEC61970.Informative.InfWork.LaborItem#getErpPersons <em>Erp Persons</em>}'.
* <!-- begin-user-doc -->
* <p>
* If the meaning of the '<em>Labor Items</em>' reference list isn't clear,
* there really should be more of a description here...
* </p>
* <!-- end-user-doc -->
* @return the value of the '<em>Labor Items</em>' reference list.
* @see CIM.IEC61970.Informative.InfERPSupport.InfERPSupportPackage#getErpPerson_LaborItems()
* @see CIM.IEC61970.Informative.InfWork.LaborItem#getErpPersons
* @model opposite="ErpPersons"
* @generated
*/
EList<LaborItem> getLaborItems();
/**
* Returns the value of the '<em><b>Crafts</b></em>' reference list.
* The list contents are of type {@link CIM.IEC61970.Informative.InfCommon.Craft}.
* It is bidirectional and its opposite is '{@link CIM.IEC61970.Informative.InfCommon.Craft#getErpPersons <em>Erp Persons</em>}'.
* <!-- begin-user-doc -->
* <p>
* If the meaning of the '<em>Crafts</em>' reference list isn't clear,
* there really should be more of a description here...
* </p>
* <!-- end-user-doc -->
* @return the value of the '<em>Crafts</em>' reference list.
* @see CIM.IEC61970.Informative.InfERPSupport.InfERPSupportPackage#getErpPerson_Crafts()
* @see CIM.IEC61970.Informative.InfCommon.Craft#getErpPersons
* @model opposite="ErpPersons"
* @generated
*/
EList<Craft> getCrafts();
/**
* Returns the value of the '<em><b>Mobile Phone</b></em>' reference.
* <!-- begin-user-doc -->
* <p>
* If the meaning of the '<em>Mobile Phone</em>' reference isn't clear,
* there really should be more of a description here...
* </p>
* <!-- end-user-doc -->
* @return the value of the '<em>Mobile Phone</em>' reference.
* @see #setMobilePhone(TelephoneNumber)
* @see CIM.IEC61970.Informative.InfERPSupport.InfERPSupportPackage#getErpPerson_MobilePhone()
* @model annotation="http://iec.ch/TC57/2009/CIM-schema-cim14 Documentation='Mobile phone number.'"
* annotation="http://www.eclipse.org/emf/2002/GenModel Documentation='Mobile phone number.'"
* @generated
*/
TelephoneNumber getMobilePhone();
/**
* Sets the value of the '{@link CIM.IEC61970.Informative.InfERPSupport.ErpPerson#getMobilePhone <em>Mobile Phone</em>}' reference.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @param value the new value of the '<em>Mobile Phone</em>' reference.
* @see #getMobilePhone()
* @generated
*/
void setMobilePhone(TelephoneNumber value);
/**
* Returns the value of the '<em><b>Erp Competency</b></em>' reference.
* It is bidirectional and its opposite is '{@link CIM.IEC61970.Informative.InfERPSupport.ErpCompetency#getErpPersons <em>Erp Persons</em>}'.
* <!-- begin-user-doc -->
* <p>
* If the meaning of the '<em>Erp Competency</em>' reference isn't clear,
* there really should be more of a description here...
* </p>
* <!-- end-user-doc -->
* @return the value of the '<em>Erp Competency</em>' reference.
* @see #setErpCompetency(ErpCompetency)
* @see CIM.IEC61970.Informative.InfERPSupport.InfERPSupportPackage#getErpPerson_ErpCompetency()
* @see CIM.IEC61970.Informative.InfERPSupport.ErpCompetency#getErpPersons
* @model opposite="ErpPersons"
* @generated
*/
ErpCompetency getErpCompetency();
/**
* Sets the value of the '{@link CIM.IEC61970.Informative.InfERPSupport.ErpPerson#getErpCompetency <em>Erp Competency</em>}' reference.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @param value the new value of the '<em>Erp Competency</em>' reference.
* @see #getErpCompetency()
* @generated
*/
void setErpCompetency(ErpCompetency value);
/**
* Returns the value of the '<em><b>Land Property Roles</b></em>' reference list.
* The list contents are of type {@link CIM.IEC61970.Informative.InfLocations.PersonPropertyRole}.
* It is bidirectional and its opposite is '{@link CIM.IEC61970.Informative.InfLocations.PersonPropertyRole#getErpPerson <em>Erp Person</em>}'.
* <!-- begin-user-doc -->
* <p>
* If the meaning of the '<em>Land Property Roles</em>' reference list isn't clear,
* there really should be more of a description here...
* </p>
* <!-- end-user-doc -->
* @return the value of the '<em>Land Property Roles</em>' reference list.
* @see CIM.IEC61970.Informative.InfERPSupport.InfERPSupportPackage#getErpPerson_LandPropertyRoles()
* @see CIM.IEC61970.Informative.InfLocations.PersonPropertyRole#getErpPerson
* @model opposite="ErpPerson"
* @generated
*/
EList<PersonPropertyRole> getLandPropertyRoles();
/**
* Returns the value of the '<em><b>Landline Phone</b></em>' reference.
* <!-- begin-user-doc -->
* <p>
* If the meaning of the '<em>Landline Phone</em>' reference isn't clear,
* there really should be more of a description here...
* </p>
* <!-- end-user-doc -->
* @return the value of the '<em>Landline Phone</em>' reference.
* @see #setLandlinePhone(TelephoneNumber)
* @see CIM.IEC61970.Informative.InfERPSupport.InfERPSupportPackage#getErpPerson_LandlinePhone()
* @model annotation="http://iec.ch/TC57/2009/CIM-schema-cim14 Documentation='Landline phone number.'"
* annotation="http://www.eclipse.org/emf/2002/GenModel Documentation='Landline phone number.'"
* @generated
*/
TelephoneNumber getLandlinePhone();
/**
* Sets the value of the '{@link CIM.IEC61970.Informative.InfERPSupport.ErpPerson#getLandlinePhone <em>Landline Phone</em>}' reference.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @param value the new value of the '<em>Landline Phone</em>' reference.
* @see #getLandlinePhone()
* @generated
*/
void setLandlinePhone(TelephoneNumber value);
/**
* Returns the value of the '<em><b>Measurement Values</b></em>' reference list.
* The list contents are of type {@link CIM.IEC61970.Meas.MeasurementValue}.
* It is bidirectional and its opposite is '{@link CIM.IEC61970.Meas.MeasurementValue#getErpPerson <em>Erp Person</em>}'.
* <!-- begin-user-doc -->
* <p>
* If the meaning of the '<em>Measurement Values</em>' reference list isn't clear,
* there really should be more of a description here...
* </p>
* <!-- end-user-doc -->
* @return the value of the '<em>Measurement Values</em>' reference list.
* @see CIM.IEC61970.Informative.InfERPSupport.InfERPSupportPackage#getErpPerson_MeasurementValues()
* @see CIM.IEC61970.Meas.MeasurementValue#getErpPerson
* @model opposite="ErpPerson"
* @generated
*/
EList<MeasurementValue> getMeasurementValues();
/**
* Returns the value of the '<em><b>Activity Records</b></em>' reference list.
* The list contents are of type {@link CIM.IEC61968.Common.ActivityRecord}.
* It is bidirectional and its opposite is '{@link CIM.IEC61968.Common.ActivityRecord#getErpPersons <em>Erp Persons</em>}'.
* <!-- begin-user-doc -->
* <p>
* If the meaning of the '<em>Activity Records</em>' reference list isn't clear,
* there really should be more of a description here...
* </p>
* <!-- end-user-doc -->
* @return the value of the '<em>Activity Records</em>' reference list.
* @see CIM.IEC61970.Informative.InfERPSupport.InfERPSupportPackage#getErpPerson_ActivityRecords()
* @see CIM.IEC61968.Common.ActivityRecord#getErpPersons
* @model opposite="ErpPersons"
* @generated
*/
EList<ActivityRecord> getActivityRecords();
/**
* Returns the value of the '<em><b>Crews</b></em>' reference list.
* The list contents are of type {@link CIM.IEC61970.Informative.InfWork.Crew}.
* It is bidirectional and its opposite is '{@link CIM.IEC61970.Informative.InfWork.Crew#getCrewMembers <em>Crew Members</em>}'.
* <!-- begin-user-doc -->
* <p>
* If the meaning of the '<em>Crews</em>' reference list isn't clear,
* there really should be more of a description here...
* </p>
* <!-- end-user-doc -->
* @return the value of the '<em>Crews</em>' reference list.
* @see CIM.IEC61970.Informative.InfERPSupport.InfERPSupportPackage#getErpPerson_Crews()
* @see CIM.IEC61970.Informative.InfWork.Crew#getCrewMembers
* @model opposite="CrewMembers"
* annotation="http://iec.ch/TC57/2009/CIM-schema-cim14 Documentation='All Crews to which this ErpPerson belongs.'"
* annotation="http://www.eclipse.org/emf/2002/GenModel Documentation='All Crews to which this ErpPerson belongs.'"
* @generated
*/
EList<Crew> getCrews();
/**
* Returns the value of the '<em><b>Appointments</b></em>' reference list.
* The list contents are of type {@link CIM.IEC61970.Informative.InfWork.Appointment}.
* It is bidirectional and its opposite is '{@link CIM.IEC61970.Informative.InfWork.Appointment#getErpPersons <em>Erp Persons</em>}'.
* <!-- begin-user-doc -->
* <p>
* If the meaning of the '<em>Appointments</em>' reference list isn't clear,
* there really should be more of a description here...
* </p>
* <!-- end-user-doc -->
* @return the value of the '<em>Appointments</em>' reference list.
* @see CIM.IEC61970.Informative.InfERPSupport.InfERPSupportPackage#getErpPerson_Appointments()
* @see CIM.IEC61970.Informative.InfWork.Appointment#getErpPersons
* @model opposite="ErpPersons"
* @generated
*/
EList<Appointment> getAppointments();
/**
* Returns the value of the '<em><b>Erp Personnel</b></em>' reference.
* It is bidirectional and its opposite is '{@link CIM.IEC61970.Informative.InfERPSupport.ErpPersonnel#getErpPersons <em>Erp Persons</em>}'.
* <!-- begin-user-doc -->
* <p>
* If the meaning of the '<em>Erp Personnel</em>' reference isn't clear,
* there really should be more of a description here...
* </p>
* <!-- end-user-doc -->
* @return the value of the '<em>Erp Personnel</em>' reference.
* @see #setErpPersonnel(ErpPersonnel)
* @see CIM.IEC61970.Informative.InfERPSupport.InfERPSupportPackage#getErpPerson_ErpPersonnel()
* @see CIM.IEC61970.Informative.InfERPSupport.ErpPersonnel#getErpPersons
* @model opposite="ErpPersons"
* @generated
*/
ErpPersonnel getErpPersonnel();
/**
* Sets the value of the '{@link CIM.IEC61970.Informative.InfERPSupport.ErpPerson#getErpPersonnel <em>Erp Personnel</em>}' reference.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @param value the new value of the '<em>Erp Personnel</em>' reference.
* @see #getErpPersonnel()
* @generated
*/
void setErpPersonnel(ErpPersonnel value);
/**
* Returns the value of the '<em><b>Suffix</b></em>' attribute.
* <!-- begin-user-doc -->
* <p>
* If the meaning of the '<em>Suffix</em>' attribute isn't clear,
* there really should be more of a description here...
* </p>
* <!-- end-user-doc -->
* @return the value of the '<em>Suffix</em>' attribute.
* @see #setSuffix(String)
* @see CIM.IEC61970.Informative.InfERPSupport.InfERPSupportPackage#getErpPerson_Suffix()
* @model required="true"
* annotation="http://iec.ch/TC57/2009/CIM-schema-cim14 Documentation='A suffix for the person\'s name, such as II, III, etc.'"
* annotation="http://www.eclipse.org/emf/2002/GenModel Documentation='A suffix for the person\'s name, such as II, III, etc.'"
* @generated
*/
String getSuffix();
/**
* Sets the value of the '{@link CIM.IEC61970.Informative.InfERPSupport.ErpPerson#getSuffix <em>Suffix</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @param value the new value of the '<em>Suffix</em>' attribute.
* @see #getSuffix()
* @generated
*/
void setSuffix(String value);
/**
* Returns the value of the '<em><b>Customer Data</b></em>' reference.
* It is bidirectional and its opposite is '{@link CIM.IEC61968.Customers.Customer#getErpPersons <em>Erp Persons</em>}'.
* <!-- begin-user-doc -->
* <p>
* If the meaning of the '<em>Customer Data</em>' reference isn't clear,
* there really should be more of a description here...
* </p>
* <!-- end-user-doc -->
* @return the value of the '<em>Customer Data</em>' reference.
* @see #setCustomerData(Customer)
* @see CIM.IEC61970.Informative.InfERPSupport.InfERPSupportPackage#getErpPerson_CustomerData()
* @see CIM.IEC61968.Customers.Customer#getErpPersons
* @model opposite="ErpPersons"
* @generated
*/
Customer getCustomerData();
/**
* Sets the value of the '{@link CIM.IEC61970.Informative.InfERPSupport.ErpPerson#getCustomerData <em>Customer Data</em>}' reference.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @param value the new value of the '<em>Customer Data</em>' reference.
* @see #getCustomerData()
* @generated
*/
void setCustomerData(Customer value);
} // ErpPerson
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.cassandra.cql3.statements;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.List;
import com.google.common.collect.Iterators;
import org.apache.cassandra.cql3.*;
import org.apache.cassandra.cql3.functions.Function;
import org.apache.cassandra.cql3.functions.Functions;
import org.apache.cassandra.config.CFMetaData;
import org.apache.cassandra.config.ColumnDefinition;
import org.apache.cassandra.db.Cell;
import org.apache.cassandra.db.CounterCell;
import org.apache.cassandra.db.ExpiringCell;
import org.apache.cassandra.db.context.CounterContext;
import org.apache.cassandra.db.marshal.AbstractType;
import org.apache.cassandra.db.marshal.Int32Type;
import org.apache.cassandra.db.marshal.LongType;
import org.apache.cassandra.db.marshal.UserType;
import org.apache.cassandra.db.marshal.UTF8Type;
import org.apache.cassandra.exceptions.InvalidRequestException;
import org.apache.cassandra.utils.ByteBufferUtil;
public abstract class Selection
{
private final List<ColumnDefinition> columnsList;
private final ResultSet.Metadata metadata;
private final boolean collectTimestamps;
private final boolean collectTTLs;
protected Selection(List<ColumnDefinition> columnsList, List<ColumnSpecification> metadata, boolean collectTimestamps, boolean collectTTLs)
{
this.columnsList = columnsList;
this.metadata = new ResultSet.Metadata(metadata);
this.collectTimestamps = collectTimestamps;
this.collectTTLs = collectTTLs;
}
// Overriden by SimpleSelection when appropriate.
public boolean isWildcard()
{
return false;
}
public ResultSet.Metadata getResultMetadata()
{
return metadata;
}
public static Selection wildcard(CFMetaData cfm)
{
List<ColumnDefinition> all = new ArrayList<ColumnDefinition>(cfm.allColumns().size());
Iterators.addAll(all, cfm.allColumnsInSelectOrder());
return new SimpleSelection(all, true);
}
public static Selection forColumns(List<ColumnDefinition> columnsList)
{
return new SimpleSelection(columnsList, false);
}
public int addColumnForOrdering(ColumnDefinition c)
{
columnsList.add(c);
metadata.addNonSerializedColumn(c);
return columnsList.size() - 1;
}
private static boolean isUsingFunction(List<RawSelector> rawSelectors)
{
for (RawSelector rawSelector : rawSelectors)
{
if (!(rawSelector.selectable instanceof ColumnIdentifier))
return true;
}
return false;
}
private static int addAndGetIndex(ColumnDefinition def, List<ColumnDefinition> l)
{
int idx = l.indexOf(def);
if (idx < 0)
{
idx = l.size();
l.add(def);
}
return idx;
}
private static Selector makeSelector(CFMetaData cfm, RawSelector raw, List<ColumnDefinition> defs, List<ColumnSpecification> metadata) throws InvalidRequestException
{
if (raw.selectable instanceof ColumnIdentifier)
{
ColumnDefinition def = cfm.getColumnDefinition((ColumnIdentifier)raw.selectable);
if (def == null)
throw new InvalidRequestException(String.format("Undefined name %s in selection clause", raw.selectable));
if (metadata != null)
metadata.add(raw.alias == null ? def : makeAliasSpec(cfm, def.type, raw.alias));
return new SimpleSelector(def.name.toString(), addAndGetIndex(def, defs), def.type);
}
else if (raw.selectable instanceof Selectable.WritetimeOrTTL)
{
Selectable.WritetimeOrTTL tot = (Selectable.WritetimeOrTTL)raw.selectable;
ColumnDefinition def = cfm.getColumnDefinition(tot.id);
if (def == null)
throw new InvalidRequestException(String.format("Undefined name %s in selection clause", tot.id));
if (def.isPrimaryKeyColumn())
throw new InvalidRequestException(String.format("Cannot use selection function %s on PRIMARY KEY part %s", tot.isWritetime ? "writeTime" : "ttl", def.name));
if (def.type.isCollection())
throw new InvalidRequestException(String.format("Cannot use selection function %s on collections", tot.isWritetime ? "writeTime" : "ttl"));
if (metadata != null)
metadata.add(makeWritetimeOrTTLSpec(cfm, tot, raw.alias));
return new WritetimeOrTTLSelector(def.name.toString(), addAndGetIndex(def, defs), tot.isWritetime);
}
else if (raw.selectable instanceof Selectable.WithFieldSelection)
{
Selectable.WithFieldSelection withField = (Selectable.WithFieldSelection)raw.selectable;
Selector selected = makeSelector(cfm, new RawSelector(withField.selected, null), defs, null);
AbstractType<?> type = selected.getType();
if (!(type instanceof UserType))
throw new InvalidRequestException(String.format("Invalid field selection: %s of type %s is not a user type", withField.selected, type.asCQL3Type()));
UserType ut = (UserType)type;
for (int i = 0; i < ut.types.size(); i++)
{
if (!ut.columnNames.get(i).equals(withField.field.bytes))
continue;
if (metadata != null)
metadata.add(makeFieldSelectSpec(cfm, withField, ut.types.get(i), raw.alias));
return new FieldSelector(ut, i, selected);
}
throw new InvalidRequestException(String.format("%s of type %s has no field %s", withField.selected, type.asCQL3Type(), withField.field));
}
else
{
Selectable.WithFunction withFun = (Selectable.WithFunction)raw.selectable;
List<Selector> args = new ArrayList<Selector>(withFun.args.size());
for (Selectable rawArg : withFun.args)
args.add(makeSelector(cfm, new RawSelector(rawArg, null), defs, null));
AbstractType<?> returnType = Functions.getReturnType(withFun.functionName, cfm.ksName, cfm.cfName);
if (returnType == null)
throw new InvalidRequestException(String.format("Unknown function '%s'", withFun.functionName));
ColumnSpecification spec = makeFunctionSpec(cfm, withFun, returnType, raw.alias);
Function fun = Functions.get(cfm.ksName, withFun.functionName, args, spec);
if (metadata != null)
metadata.add(spec);
return new FunctionSelector(fun, args);
}
}
private static ColumnSpecification makeWritetimeOrTTLSpec(CFMetaData cfm, Selectable.WritetimeOrTTL tot, ColumnIdentifier alias)
{
return new ColumnSpecification(cfm.ksName,
cfm.cfName,
alias == null ? new ColumnIdentifier(tot.toString(), true) : alias,
tot.isWritetime ? LongType.instance : Int32Type.instance);
}
private static ColumnSpecification makeFieldSelectSpec(CFMetaData cfm, Selectable.WithFieldSelection s, AbstractType<?> type, ColumnIdentifier alias)
{
return new ColumnSpecification(cfm.ksName,
cfm.cfName,
alias == null ? new ColumnIdentifier(s.toString(), true) : alias,
type);
}
private static ColumnSpecification makeFunctionSpec(CFMetaData cfm,
Selectable.WithFunction fun,
AbstractType<?> returnType,
ColumnIdentifier alias) throws InvalidRequestException
{
if (returnType == null)
throw new InvalidRequestException(String.format("Unknown function %s called in selection clause", fun.functionName));
return new ColumnSpecification(cfm.ksName,
cfm.cfName,
alias == null ? new ColumnIdentifier(fun.toString(), true) : alias,
returnType);
}
private static ColumnSpecification makeAliasSpec(CFMetaData cfm, AbstractType<?> type, ColumnIdentifier alias)
{
return new ColumnSpecification(cfm.ksName, cfm.cfName, alias, type);
}
public static Selection fromSelectors(CFMetaData cfm, List<RawSelector> rawSelectors) throws InvalidRequestException
{
boolean usesFunction = isUsingFunction(rawSelectors);
if (usesFunction)
{
List<ColumnDefinition> defs = new ArrayList<ColumnDefinition>();
List<ColumnSpecification> metadata = new ArrayList<ColumnSpecification>(rawSelectors.size());
List<Selector> selectors = new ArrayList<Selector>(rawSelectors.size());
boolean collectTimestamps = false;
boolean collectTTLs = false;
for (RawSelector rawSelector : rawSelectors)
{
Selector selector = makeSelector(cfm, rawSelector, defs, metadata);
selectors.add(selector);
if (selector instanceof WritetimeOrTTLSelector)
{
collectTimestamps |= ((WritetimeOrTTLSelector)selector).isWritetime;
collectTTLs |= !((WritetimeOrTTLSelector)selector).isWritetime;
}
}
return new SelectionWithFunctions(defs, metadata, selectors, collectTimestamps, collectTTLs);
}
else
{
List<ColumnDefinition> defs = new ArrayList<ColumnDefinition>(rawSelectors.size());
List<ColumnSpecification> metadata = new ArrayList<ColumnSpecification>(rawSelectors.size());
for (RawSelector rawSelector : rawSelectors)
{
assert rawSelector.selectable instanceof ColumnIdentifier;
ColumnDefinition def = cfm.getColumnDefinition((ColumnIdentifier)rawSelector.selectable);
if (def == null)
throw new InvalidRequestException(String.format("Undefined name %s in selection clause", rawSelector.selectable));
defs.add(def);
metadata.add(rawSelector.alias == null ? def : makeAliasSpec(cfm, def.type, rawSelector.alias));
}
return new SimpleSelection(defs, metadata, false);
}
}
protected abstract List<ByteBuffer> handleRow(ResultSetBuilder rs) throws InvalidRequestException;
/**
* @return the list of CQL3 columns value this SelectionClause needs.
*/
public List<ColumnDefinition> getColumnsList()
{
return columnsList;
}
public ResultSetBuilder resultSetBuilder(long now)
{
return new ResultSetBuilder(now);
}
private static ByteBuffer value(Cell c)
{
return (c instanceof CounterCell)
? ByteBufferUtil.bytes(CounterContext.instance().total(c.value()))
: c.value();
}
public class ResultSetBuilder
{
private final ResultSet resultSet;
/*
* We'll build CQL3 row one by one.
* The currentRow is the values for the (CQL3) columns we've fetched.
* We also collect timestamps and ttls for the case where the writetime and
* ttl functions are used. Note that we might collect timestamp and/or ttls
* we don't care about, but since the array below are allocated just once,
* it doesn't matter performance wise.
*/
List<ByteBuffer> current;
final long[] timestamps;
final int[] ttls;
final long now;
private ResultSetBuilder(long now)
{
this.resultSet = new ResultSet(getResultMetadata(), new ArrayList<List<ByteBuffer>>());
this.timestamps = collectTimestamps ? new long[columnsList.size()] : null;
this.ttls = collectTTLs ? new int[columnsList.size()] : null;
this.now = now;
}
public void add(ByteBuffer v)
{
current.add(v);
}
public void add(Cell c)
{
current.add(isDead(c) ? null : value(c));
if (timestamps != null)
{
timestamps[current.size() - 1] = isDead(c) ? -1 : c.timestamp();
}
if (ttls != null)
{
int ttl = -1;
if (!isDead(c) && c instanceof ExpiringCell)
ttl = c.getLocalDeletionTime() - (int) (now / 1000);
ttls[current.size() - 1] = ttl;
}
}
private boolean isDead(Cell c)
{
return c == null || c.isMarkedForDelete(now);
}
public void newRow() throws InvalidRequestException
{
if (current != null)
resultSet.addRow(handleRow(this));
current = new ArrayList<ByteBuffer>(columnsList.size());
}
public ResultSet build() throws InvalidRequestException
{
if (current != null)
{
resultSet.addRow(handleRow(this));
current = null;
}
return resultSet;
}
}
// Special cased selection for when no function is used (this save some allocations).
private static class SimpleSelection extends Selection
{
private final boolean isWildcard;
public SimpleSelection(List<ColumnDefinition> columnsList, boolean isWildcard)
{
this(columnsList, new ArrayList<ColumnSpecification>(columnsList), isWildcard);
}
public SimpleSelection(List<ColumnDefinition> columnsList, List<ColumnSpecification> metadata, boolean isWildcard)
{
/*
* In theory, even a simple selection could have multiple time the same column, so we
* could filter those duplicate out of columnsList. But since we're very unlikely to
* get much duplicate in practice, it's more efficient not to bother.
*/
super(columnsList, metadata, false, false);
this.isWildcard = isWildcard;
}
protected List<ByteBuffer> handleRow(ResultSetBuilder rs)
{
return rs.current;
}
@Override
public boolean isWildcard()
{
return isWildcard;
}
}
private static abstract class Selector implements AssignementTestable
{
public abstract ByteBuffer compute(ResultSetBuilder rs) throws InvalidRequestException;
public abstract AbstractType<?> getType();
public boolean isAssignableTo(String keyspace, ColumnSpecification receiver)
{
return getType().asCQL3Type().equals(receiver.type.asCQL3Type());
}
}
private static class SimpleSelector extends Selector
{
private final String columnName;
private final int idx;
private final AbstractType<?> type;
public SimpleSelector(String columnName, int idx, AbstractType<?> type)
{
this.columnName = columnName;
this.idx = idx;
this.type = type;
}
public ByteBuffer compute(ResultSetBuilder rs)
{
return rs.current.get(idx);
}
public AbstractType<?> getType()
{
return type;
}
@Override
public String toString()
{
return columnName;
}
}
private static class FunctionSelector extends Selector
{
private final Function fun;
private final List<Selector> argSelectors;
public FunctionSelector(Function fun, List<Selector> argSelectors)
{
this.fun = fun;
this.argSelectors = argSelectors;
}
public ByteBuffer compute(ResultSetBuilder rs) throws InvalidRequestException
{
List<ByteBuffer> args = new ArrayList<ByteBuffer>(argSelectors.size());
for (Selector s : argSelectors)
args.add(s.compute(rs));
return fun.execute(args);
}
public AbstractType<?> getType()
{
return fun.returnType();
}
@Override
public String toString()
{
StringBuilder sb = new StringBuilder();
sb.append(fun.name()).append("(");
for (int i = 0; i < argSelectors.size(); i++)
{
if (i > 0)
sb.append(", ");
sb.append(argSelectors.get(i));
}
return sb.append(")").toString();
}
}
private static class FieldSelector extends Selector
{
private final UserType type;
private final int field;
private final Selector selected;
public FieldSelector(UserType type, int field, Selector selected)
{
this.type = type;
this.field = field;
this.selected = selected;
}
public ByteBuffer compute(ResultSetBuilder rs) throws InvalidRequestException
{
ByteBuffer[] buffers = type.split(selected.compute(rs));
return field < buffers.length ? buffers[field] : null;
}
public AbstractType<?> getType()
{
return type.types.get(field);
}
@Override
public String toString()
{
return String.format("%s.%s", selected, UTF8Type.instance.getString(type.columnNames.get(field)));
}
}
private static class WritetimeOrTTLSelector extends Selector
{
private final String columnName;
private final int idx;
private final boolean isWritetime;
public WritetimeOrTTLSelector(String columnName, int idx, boolean isWritetime)
{
this.columnName = columnName;
this.idx = idx;
this.isWritetime = isWritetime;
}
public ByteBuffer compute(ResultSetBuilder rs)
{
if (isWritetime)
{
long ts = rs.timestamps[idx];
return ts >= 0 ? ByteBufferUtil.bytes(ts) : null;
}
int ttl = rs.ttls[idx];
return ttl > 0 ? ByteBufferUtil.bytes(ttl) : null;
}
public AbstractType<?> getType()
{
return isWritetime ? LongType.instance : Int32Type.instance;
}
@Override
public String toString()
{
return columnName;
}
}
private static class SelectionWithFunctions extends Selection
{
private final List<Selector> selectors;
public SelectionWithFunctions(List<ColumnDefinition> columnsList, List<ColumnSpecification> metadata, List<Selector> selectors, boolean collectTimestamps, boolean collectTTLs)
{
super(columnsList, metadata, collectTimestamps, collectTTLs);
this.selectors = selectors;
}
protected List<ByteBuffer> handleRow(ResultSetBuilder rs) throws InvalidRequestException
{
List<ByteBuffer> result = new ArrayList<ByteBuffer>();
for (Selector selector : selectors)
{
result.add(selector.compute(rs));
}
return result;
}
}
}
| |
/*
* Copyright 2010 Lorenzo Carrara
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/* This code is mainly adapated from Xerces 2.6.0 and Jena 2.6.2
* Xerces copyright and license:
* Copyright (c) 1999-2003 The Apache Software Foundation. All rights reserved.
* License http://www.apache.org/licenses/LICENSE-2.0
*
* Jena copyright and license:
* Copyright 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 Hewlett-Packard Development Company, LP
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
* 3. The name of the author may not be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
* IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
* OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
* IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
* INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
* NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
* DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
* THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
* THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*
* Specific source classes:
*
* Xerces:
* org.apache.xerces.impl.dv.XSSimpleType
* org.apache.xerces.impl.dv.xs.XSSimpleTypeDecl
*/
package it.polimi.dei.dbgroup.pedigree.androjena.xsd.impl;
import it.polimi.dei.dbgroup.pedigree.androjena.xsd.XSDBuiltinType;
import it.polimi.dei.dbgroup.pedigree.androjena.xsd.XSDBuiltinTypeFormatException;
import java.util.regex.Pattern;
import org.apache.xerces.util.XMLChar;
public class XSDBuiltinTypeImpl implements XSDBuiltinType {
public static final XSDBuiltinTypeImpl ANY_SIMPLE_TYPE = new XSDBuiltinTypeImpl(null, "anySimpleType", TypeValidator.ANY_SIMPLE_TYPE);
private static final String XSD_NAMESPACE = "http://www.w3.org/2001/XMLSchema";
private String mName;
private XSDBuiltinTypeImpl mBase;
private TypeValidator mValidator;
private short mDefinedFacets = 0;
private Object mMaxInclusive = null;
private Object mMinInclusive = null;
private short mWhitespace = XSDFacets.WS_PRESERVE;
private Pattern mPattern = null;
private String mPatternStr = null;
private int mMinLength = -1;
private short mSpecialPattern = XSDFacets.SPECIAL_PATTERN_NONE;
private static final String[] SPECIAL_PATTERN_STRING = {
"NONE", "NMTOKEN", "Name", "NCName"
};
public XSDBuiltinTypeImpl(XSDBuiltinTypeImpl base, String name,
TypeValidator validator) {
this.mBase = base;
this.mName = name;
this.mValidator = validator;
inheritFacetsFromBase();
}
public XSDBuiltinTypeImpl(XSDBuiltinTypeImpl base, String name) {
this(base, name, base.mValidator);
}
public boolean isEqual(Object value1, Object value2) {
if (value1 == null) {
return false;
}
return value1.equals(value2);
}
public void applyFacets(XSDFacets facets, short actualFacets)
throws XSDBuiltinTypeFormatException {
if ((actualFacets & XSDFacets.MAX_INCLUSIVE) != 0) {
mMaxInclusive = mBase.parse(facets.maxInclusive);
}
if ((actualFacets & XSDFacets.MIN_INCLUSIVE) != 0) {
mMinInclusive = mBase.parse(facets.minInclusive);
}
if ((actualFacets & XSDFacets.WHITESPACE) != 0) {
mWhitespace = facets.whitespace;
}
if ((actualFacets & XSDFacets.PATTERN) != 0) {
mPatternStr = facets.pattern;
mPattern = Pattern.compile(mPatternStr);
}
if ((actualFacets & XSDFacets.MIN_LENGTH) != 0) {
mMinLength = facets.minLength;
}
if((actualFacets & XSDFacets.SPECIAL_PATTERN) != 0) {
mSpecialPattern = facets.specialPattern;
}
mDefinedFacets |= actualFacets;
}
void applyFacetsInternal(XSDFacets facets, short actualFacets)
{
try
{
applyFacets(facets, actualFacets);
}
catch(XSDBuiltinTypeFormatException ex)
{
throw new RuntimeException("internal error", ex);
}
}
public String getName() {
return mName;
}
public String getNamespace() {
return XSD_NAMESPACE;
}
public XSDBuiltinType getBaseType() {
return mBase;
}
public Object parse(String lexicalContent)
throws XSDBuiltinTypeFormatException {
String normalized = normalize(lexicalContent);
if (isFacetDefined(XSDFacets.PATTERN)) {
if (!mPattern.matcher(normalized).matches())
throw new XSDBuiltinTypeFormatException(lexicalContent, this,
"lexical content does not match type pattern: "
+ mPatternStr);
}
if(isFacetDefined(XSDFacets.SPECIAL_PATTERN))
{
if(mSpecialPattern != XSDFacets.SPECIAL_PATTERN_NONE)
{
boolean seenErr = false;
if (mSpecialPattern == XSDFacets.SPECIAL_PATTERN_NMTOKEN) {
// PATTERN "\\c+"
seenErr = !XMLChar.isValidNmtoken(normalized);
}
else if (mSpecialPattern == XSDFacets.SPECIAL_PATTERN_NAME) {
// PATTERN "\\i\\c*"
seenErr = !XMLChar.isValidName(normalized);
}
else if (mSpecialPattern == XSDFacets.SPECIAL_PATTERN_NCNAME) {
// PATTERN "[\\i-[:]][\\c-[:]]*"
seenErr = !XMLChar.isValidNCName(normalized);
}
if (seenErr) {
throw new XSDBuiltinTypeFormatException(normalized, this, "normalized value is not a valid " + SPECIAL_PATTERN_STRING[mSpecialPattern]);
}
}
}
Object actualValue = mValidator.getActualValue(normalized);
checkFacets(normalized, actualValue);
mValidator.checkExtraRules(actualValue);
return actualValue;
}
private String normalize(String lexicalContent) {
if (lexicalContent == null)
return null;
if (!isFacetDefined(XSDFacets.PATTERN)) {
short normType = mValidator.getNormalizationType();
if (normType == TypeValidator.NORMALIZE_NONE) {
return lexicalContent;
} else if (normType == TypeValidator.NORMALIZE_TRIM) {
return XMLChar.trim(lexicalContent);
}
}
int len = lexicalContent == null ? 0 : lexicalContent.length();
if (len == 0 || mWhitespace == XSDFacets.WS_PRESERVE)
return lexicalContent;
StringBuffer sb = new StringBuffer();
if (mWhitespace == XSDFacets.WS_REPLACE) {
char ch;
// when it's replace, just replace #x9, #xa, #xd by #x20
for (int i = 0; i < len; i++) {
ch = lexicalContent.charAt(i);
if (ch != 0x9 && ch != 0xa && ch != 0xd)
sb.append(ch);
else
sb.append((char) 0x20);
}
} else {
char ch;
int i;
boolean isLeading = true;
// when it's collapse
for (i = 0; i < len; i++) {
ch = lexicalContent.charAt(i);
// append real characters, so we passed leading ws
if (ch != 0x9 && ch != 0xa && ch != 0xd && ch != 0x20) {
sb.append(ch);
isLeading = false;
} else {
// for whitespaces, we skip all following ws
for (; i < len - 1; i++) {
ch = lexicalContent.charAt(i + 1);
if (ch != 0x9 && ch != 0xa && ch != 0xd && ch != 0x20)
break;
}
// if it's not a leading or tailing ws, then append a space
if (i < len - 1 && !isLeading)
sb.append((char) 0x20);
}
}
}
return sb.toString();
}
private void checkFacets(String normalized, Object actualValue) throws XSDBuiltinTypeFormatException
{
if(mDefinedFacets == 0 || mDefinedFacets == XSDFacets.WHITESPACE) return;
if(mValidator != TypeValidator.QNAME && isFacetDefined(XSDFacets.MIN_LENGTH))
{
int length = mValidator.getDataLength(actualValue);
if(length < mMinLength) throw new XSDBuiltinTypeFormatException(normalized, this, "data length is less than specified minimum length (" + length + " < " + mMinLength + ")");
}
int compare;
if(isFacetDefined(XSDFacets.MAX_INCLUSIVE))
{
compare = mValidator.compare(actualValue, mMaxInclusive);
if(compare != TypeValidator.LESS_THAN && compare != TypeValidator.EQUAL) throw new XSDBuiltinTypeFormatException(normalized, this, "data is greater than specified maximum (inclusive)");
}
if(isFacetDefined(XSDFacets.MIN_INCLUSIVE))
{
compare = mValidator.compare(actualValue, mMinInclusive);
if(compare != TypeValidator.GREATER_THAN && compare != TypeValidator.EQUAL) throw new XSDBuiltinTypeFormatException(normalized, this, "data is less than specified minimum (inclusive)");
}
}
private boolean isFacetDefined(short facet) {
return (mDefinedFacets & facet) != 0;
}
private void inheritFacetsFromBase() {
if (mBase != null) {
mDefinedFacets = mBase.mDefinedFacets;
mMaxInclusive = mBase.mMaxInclusive;
mMinInclusive = mBase.mMinInclusive;
mPattern = mBase.mPattern;
mPatternStr = mBase.mPatternStr;
mWhitespace = mBase.mWhitespace;
mMinLength = mBase.mMinLength;
mSpecialPattern = mBase.mSpecialPattern;
}
}
}
| |
/*
Copyright 2015 Google Inc. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package com.google.security.zynamics.binnavi.Gui.MainWindow.Implementations;
import java.awt.Window;
import javax.swing.JFrame;
import javax.swing.JOptionPane;
import javax.swing.JTree;
import com.google.common.base.Preconditions;
import com.google.security.zynamics.binnavi.CUtilityFunctions;
import com.google.security.zynamics.binnavi.Database.Exceptions.CPartialLoadException;
import com.google.security.zynamics.binnavi.Database.Exceptions.CouldntDeleteException;
import com.google.security.zynamics.binnavi.Database.Exceptions.CouldntLoadDataException;
import com.google.security.zynamics.binnavi.Database.Exceptions.CouldntSaveDataException;
import com.google.security.zynamics.binnavi.Database.Exceptions.LoadCancelledException;
import com.google.security.zynamics.binnavi.Database.Interfaces.IDatabase;
import com.google.security.zynamics.binnavi.Gui.ErrorDialog.CNaviErrorDialog;
import com.google.security.zynamics.binnavi.Gui.Loaders.CProjectLoader;
import com.google.security.zynamics.binnavi.Gui.MainWindow.ProjectTree.Updaters.INodeSelectionUpdater;
import com.google.security.zynamics.binnavi.Gui.MainWindow.ProjectTree.Updaters.ITreeUpdater;
import com.google.security.zynamics.binnavi.Gui.Progress.CDefaultProgressOperation;
import com.google.security.zynamics.binnavi.disassembly.INaviAddressSpace;
import com.google.security.zynamics.binnavi.disassembly.INaviProject;
import com.google.security.zynamics.binnavi.disassembly.AddressSpaces.CAddressSpace;
import com.google.security.zynamics.binnavi.disassembly.views.INaviView;
import com.google.security.zynamics.zylib.gui.CMessageBox;
/**
* Contains helper functions for working with projects.
*/
public final class CProjectFunctions {
/**
* You are not supposed to instantiate this class.
*/
private CProjectFunctions() {
}
/**
* Creates a new default address space in a project.
*
* @param parent Parent window used for dialogs.
* @param newProject Project where the address space is created.
*
* @throws CouldntSaveDataException Thrown if the address space could not be created.
*/
private static void createDefaultAddressSpace(final JFrame parent, final INaviProject newProject)
throws CouldntSaveDataException {
final CAddressSpace addressSpace =
newProject.getContent().createAddressSpace("Default address space");
try {
addressSpace.load();
} catch (final CouldntLoadDataException exception) {
CUtilityFunctions.logException(exception);
final String innerMessage = "E00139: " + "Default address space could not be loaded";
final String innerDescription =
CUtilityFunctions
.createDescription(
"The default address space of the new project could not be loaded.",
new String[] {"There was a problem with the database connection."},
new String[] {"The new project was created but its default address space could not be loaded."});
CNaviErrorDialog.show(parent, innerMessage, innerDescription, exception);
} catch (final LoadCancelledException e) {
// Do nothing
}
}
/**
* Adds a new address space with a default name to a given project.
*
* @param parent Parent window used for dialogs.
* @param project The project where the new address space is added.
* @param updater Updates the project tree after the execution is complete.
*/
public static void addAddressSpace(final Window parent, final INaviProject project,
final INodeSelectionUpdater updater) {
new Thread() {
@Override
public void run() {
final CDefaultProgressOperation operation = new CDefaultProgressOperation("", false, true);
operation.getProgressPanel().setMaximum(2);
try {
operation.getProgressPanel().setText("Creating new address space");
operation.getProgressPanel().next();
final CAddressSpace addressSpace =
project.getContent().createAddressSpace("New Address Space");
operation.getProgressPanel().setText("Loading new address space");
addressSpace.load();
operation.getProgressPanel().next();
operation.stop();
updater.setObject(addressSpace);
updater.update();
} catch (final CouldntSaveDataException exception) {
CUtilityFunctions.logException(exception);
final String innerMessage = "E00136: " + "Could not add address space";
final String innerDescription =
CUtilityFunctions.createDescription(String.format(
"It was not possible to add a new address space to the project '%s'.", project
.getConfiguration().getName()),
new String[] {"There was a problem with the database connection."},
new String[] {"The address space was not created."});
CNaviErrorDialog.show(parent, innerMessage, innerDescription, exception);
} catch (final CouldntLoadDataException exception) {
CUtilityFunctions.logException(exception);
final String innerMessage = "E00137: " + "Could not load the new address space";
final String innerDescription =
CUtilityFunctions.createDescription(String.format(
"The new address space in project '%s' was created but it could not be loaded.",
project.getConfiguration().getName()),
new String[] {"There was a problem with the database connection."},
new String[] {"The address space was created but not loaded."});
CNaviErrorDialog.show(parent, innerMessage, innerDescription, exception);
} catch (final LoadCancelledException e) {
// Do nothing
}
}
}.start();
}
/**
* Copies a view to a project.
*
* @param parent Parent window used for dialogs.
* @param project The project to where the view is copied.
* @param view The view to copy to the project.
*/
public static void copyView(final JFrame parent, final INaviProject project, final INaviView view) {
Preconditions.checkNotNull(parent, "IE01835: Parent argument can not be null");
Preconditions.checkNotNull(project, "IE01836: Project argument can not be null");
Preconditions.checkNotNull(view, "IE01837: View argument can not be null");
if (!view.isLoaded()) {
try {
view.load();
} catch (final CouldntLoadDataException e) {
CUtilityFunctions.logException(e);
final String innerMessage =
"E00138: " + "View could not be copied because it could not be loaded";
final String innerDescription =
CUtilityFunctions.createDescription(
String.format("The view '%s' could not be copied.", view.getName()),
new String[] {"There was a problem with the database connection."},
new String[] {"The new view was not created."});
CNaviErrorDialog.show(parent, innerMessage, innerDescription, e);
return;
} catch (final CPartialLoadException e) {
// TODO: This
CUtilityFunctions.logException(e);
return;
} catch (final LoadCancelledException e) {
return;
}
}
project.getContent().createView(view, view.getName(), view.getConfiguration().getDescription());
view.close();
try {
view.save();
} catch (final CouldntSaveDataException exception) {
CUtilityFunctions.logException(exception);
final String innerMessage = "E00206: " + "Could not save view";
final String innerDescription =
CUtilityFunctions.createDescription(
String.format("The function '%s' could not be saved.", view.getName()),
new String[] {"There was a problem with the database connection."},
new String[] {"The graph remains unsaved."});
CNaviErrorDialog.show(parent, innerMessage, innerDescription, exception);
}
}
/**
* Creates a new project.
*
* @param parent Parent window used for dialogs.
* @param database Database where the project is created.
* @param updater Responsible for updating the project tree after the project was created.
*/
public static void createProject(final JFrame parent, final IDatabase database,
final INodeSelectionUpdater updater) {
new Thread() {
@Override
public void run() {
try {
final CDefaultProgressOperation operation =
new CDefaultProgressOperation("", false, true);
operation.getProgressPanel().setMaximum(3);
operation.getProgressPanel().setText("Creating new project");
final INaviProject newProject = database.getContent().addProject("New Project");
operation.getProgressPanel().next();
try {
newProject.load();
} catch (final LoadCancelledException e) {
// Do nothing
}
operation.getProgressPanel().next();
createDefaultAddressSpace(parent, newProject);
updater.setObject(newProject);
updater.update();
operation.getProgressPanel().next();
operation.stop();
} catch (final CouldntSaveDataException exception) {
CUtilityFunctions.logException(exception);
final String innerMessage = "E00140: " + "New project could not be created";
final String innerDescription =
CUtilityFunctions.createDescription(
"It was not possible to create a new project in the selected database.",
new String[] {"There was a problem with the database connection."},
new String[] {"No new project was created in the selected database."});
CNaviErrorDialog.show(parent, innerMessage, innerDescription, exception);
} catch (final CouldntLoadDataException exception) {
CUtilityFunctions.logException(exception);
final String innerMessage = "E00141: " + "New project could not be loaded";
final String innerDescription =
CUtilityFunctions.createDescription("The new project could not be loaded.",
new String[] {"There was a problem with the database connection."},
new String[] {"The new project was created but it could not be loaded."});
CNaviErrorDialog.show(parent, innerMessage, innerDescription, exception);
}
}
}.start();
}
/**
* Opens one or more projects.
*
* @param projectTree Project tree of the main window.
* @param projects The projects to load.
*/
public static void openProjects(final JTree projectTree, final INaviProject[] projects) {
for (final INaviProject project : projects) {
CProjectLoader.loadProject(projectTree, project);
}
}
/**
* Removes an address spaces from a project.
*
* @param parent Parent window used for dialogs.
* @param project The project the address space belongs to.
* @param addressSpaces The address spaces to be removed from the project.
* @param updater Updates the project tree after the address space was removed.
*/
public static void removeAddressSpace(final JFrame parent, final INaviProject project,
final INaviAddressSpace[] addressSpaces, final ITreeUpdater updater) {
if (CMessageBox.showYesNoQuestion(parent, String.format(
"Do you really want to delete the following address spaces from the project?\n\n%s",
CNameListGenerators.getNameList(addressSpaces))) == JOptionPane.YES_OPTION) {
for (final INaviAddressSpace addressSpace : addressSpaces) {
new Thread() {
@Override
public void run() {
final CDefaultProgressOperation operation =
new CDefaultProgressOperation("", false, true);
operation.getProgressPanel().setMaximum(1);
operation.getProgressPanel().setText(
"Removing address space" + ": " + addressSpace.getConfiguration().getName());
operation.getProgressPanel().next();
if (addressSpace.isLoaded()) {
addressSpace.close();
}
if (addressSpace.isLoaded()) {
final String innerMessage = "E00123: " + "Address space could not be deleted";
final String innerDescription =
CUtilityFunctions.createDescription(String.format(
"BinNavi could not delete the address space '%s'.", addressSpace
.getConfiguration().getName()),
new String[] {"BinNavi or one of the active plugins vetoed the deletion "
+ "operation."},
new String[] {"The address space can not be deleted until the delete "
+ "operation is not vetoed anymore."});
CNaviErrorDialog.show(parent, innerMessage, innerDescription);
} else {
try {
project.getContent().removeAddressSpace(addressSpace);
updater.update();
} catch (final CouldntDeleteException exception) {
CUtilityFunctions.logException(exception);
final String innerMessage = "E00143: " + "Address space could not be deleted";
final String innerDescription =
CUtilityFunctions.createDescription(
"The selected address space could not be deleted.",
new String[] {"There was a problem with the database connection."},
new String[] {"The address space was not deleted."});
CNaviErrorDialog.show(parent, innerMessage, innerDescription, exception);
}
}
operation.stop();
}
}.start();
}
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ignite.internal.processors.cache.distributed.dht;
import java.io.Externalizable;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.BitSet;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.UUID;
import org.apache.ignite.IgniteCheckedException;
import org.apache.ignite.internal.GridDirectCollection;
import org.apache.ignite.internal.GridDirectTransient;
import org.apache.ignite.internal.processors.affinity.AffinityTopologyVersion;
import org.apache.ignite.internal.processors.cache.GridCacheContext;
import org.apache.ignite.internal.processors.cache.GridCacheSharedContext;
import org.apache.ignite.internal.processors.cache.KeyCacheObject;
import org.apache.ignite.internal.processors.cache.distributed.GridDistributedLockRequest;
import org.apache.ignite.internal.processors.cache.version.GridCacheVersion;
import org.apache.ignite.internal.util.GridLeanMap;
import org.apache.ignite.internal.util.tostring.GridToStringExclude;
import org.apache.ignite.internal.util.tostring.GridToStringInclude;
import org.apache.ignite.internal.util.typedef.internal.S;
import org.apache.ignite.lang.IgniteUuid;
import org.apache.ignite.plugin.extensions.communication.MessageCollectionItemType;
import org.apache.ignite.plugin.extensions.communication.MessageReader;
import org.apache.ignite.plugin.extensions.communication.MessageWriter;
import org.apache.ignite.transactions.TransactionIsolation;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
/**
* DHT lock request.
*/
public class GridDhtLockRequest extends GridDistributedLockRequest {
/** */
private static final long serialVersionUID = 0L;
/** Near keys to lock. */
@GridToStringInclude
@GridDirectCollection(KeyCacheObject.class)
private List<KeyCacheObject> nearKeys;
/** Invalidate reader flags. */
private BitSet invalidateEntries;
/** Mini future ID. */
private IgniteUuid miniId;
/** Owner mapped version, if any. */
@GridToStringInclude
@GridDirectTransient
private Map<KeyCacheObject, GridCacheVersion> owned;
/** Array of keys from {@link #owned}. Used during marshalling and unmarshalling. */
@GridToStringExclude
private KeyCacheObject[] ownedKeys;
/** Array of values from {@link #owned}. Used during marshalling and unmarshalling. */
@GridToStringExclude
private GridCacheVersion[] ownedValues;
/** Topology version. */
private AffinityTopologyVersion topVer;
/** Subject ID. */
private UUID subjId;
/** Task name hash. */
private int taskNameHash;
/** Indexes of keys needed to be preloaded. */
private BitSet preloadKeys;
/** TTL for read operation. */
private long accessTtl;
/**
* Empty constructor required for {@link Externalizable}.
*/
public GridDhtLockRequest() {
// No-op.
}
/**
* @param cacheId Cache ID.
* @param nodeId Node ID.
* @param nearXidVer Near transaction ID.
* @param threadId Thread ID.
* @param futId Future ID.
* @param miniId Mini future ID.
* @param lockVer Cache version.
* @param topVer Topology version.
* @param isInTx {@code True} if implicit transaction lock.
* @param isRead Indicates whether implicit lock is for read or write operation.
* @param isolation Transaction isolation.
* @param isInvalidate Invalidation flag.
* @param timeout Lock timeout.
* @param dhtCnt DHT count.
* @param nearCnt Near count.
* @param txSize Expected transaction size.
* @param subjId Subject ID.
* @param taskNameHash Task name hash code.
* @param accessTtl TTL for read operation.
* @param skipStore Skip store flag.
*/
public GridDhtLockRequest(
int cacheId,
UUID nodeId,
GridCacheVersion nearXidVer,
long threadId,
IgniteUuid futId,
IgniteUuid miniId,
GridCacheVersion lockVer,
@NotNull AffinityTopologyVersion topVer,
boolean isInTx,
boolean isRead,
TransactionIsolation isolation,
boolean isInvalidate,
long timeout,
int dhtCnt,
int nearCnt,
int txSize,
@Nullable UUID subjId,
int taskNameHash,
long accessTtl,
boolean skipStore
) {
super(cacheId,
nodeId,
nearXidVer,
threadId,
futId,
lockVer,
isInTx,
isRead,
isolation,
isInvalidate,
timeout,
dhtCnt == 0 ? nearCnt : dhtCnt,
txSize,
skipStore);
this.topVer = topVer;
nearKeys = nearCnt == 0 ? Collections.<KeyCacheObject>emptyList() : new ArrayList<KeyCacheObject>(nearCnt);
invalidateEntries = new BitSet(dhtCnt == 0 ? nearCnt : dhtCnt);
assert miniId != null;
this.miniId = miniId;
this.subjId = subjId;
this.taskNameHash = taskNameHash;
this.accessTtl = accessTtl;
}
/** {@inheritDoc} */
@Override public boolean allowForStartup() {
return true;
}
/**
* @return Near node ID.
*/
public UUID nearNodeId() {
return nodeId();
}
/**
* @return Subject ID.
*/
public UUID subjectId() {
return subjId;
}
/**
* @return Task name hash.
*/
public int taskNameHash() {
return taskNameHash;
}
/**
* @return Topology version.
*/
@Override public AffinityTopologyVersion topologyVersion() {
return topVer;
}
/**
* Adds a Near key.
*
* @param key Key.
* @param ctx Context.
* @throws IgniteCheckedException If failed.
*/
public void addNearKey(KeyCacheObject key, GridCacheSharedContext ctx)
throws IgniteCheckedException {
nearKeys.add(key);
}
/**
* @return Near keys.
*/
public List<KeyCacheObject> nearKeys() {
return nearKeys == null ? Collections.<KeyCacheObject>emptyList() : nearKeys;
}
/**
* Adds a DHT key.
*
* @param key Key.
* @param invalidateEntry Flag indicating whether node should attempt to invalidate reader.
* @param ctx Context.
* @throws IgniteCheckedException If failed.
*/
public void addDhtKey(
KeyCacheObject key,
boolean invalidateEntry,
GridCacheContext ctx
) throws IgniteCheckedException {
invalidateEntries.set(idx, invalidateEntry);
addKeyBytes(key, false, null, ctx);
}
/**
* Marks last added key for preloading.
*/
public void markLastKeyForPreload() {
assert idx > 0;
if (preloadKeys == null)
preloadKeys = new BitSet();
preloadKeys.set(idx - 1, true);
}
/**
* @param idx Key index.
* @return {@code True} if need to preload key with given index.
*/
public boolean needPreloadKey(int idx) {
return preloadKeys != null && preloadKeys.get(idx);
}
/**
* Sets owner and its mapped version.
*
* @param key Key.
* @param ownerMapped Owner mapped version.
*/
public void owned(KeyCacheObject key, GridCacheVersion ownerMapped) {
if (owned == null)
owned = new GridLeanMap<>(3);
owned.put(key, ownerMapped);
}
/**
* @param key Key.
* @return Owner and its mapped versions.
*/
@Nullable public GridCacheVersion owned(KeyCacheObject key) {
return owned == null ? null : owned.get(key);
}
/**
* @param idx Entry index to check.
* @return {@code True} if near entry should be invalidated.
*/
public boolean invalidateNearEntry(int idx) {
return invalidateEntries.get(idx);
}
/**
* @return Mini ID.
*/
public IgniteUuid miniId() {
return miniId;
}
/**
* @return TTL for read operation.
*/
public long accessTtl() {
return accessTtl;
}
/** {@inheritDoc}
* @param ctx*/
@Override public void prepareMarshal(GridCacheSharedContext ctx) throws IgniteCheckedException {
super.prepareMarshal(ctx);
prepareMarshalCacheObjects(nearKeys, ctx.cacheContext(cacheId));
if (owned != null) {
ownedKeys = new KeyCacheObject[owned.size()];
ownedValues = new GridCacheVersion[ownedKeys.length];
int i = 0;
for (Map.Entry<KeyCacheObject, GridCacheVersion> entry : owned.entrySet()) {
ownedKeys[i] = entry.getKey();
ownedValues[i] = entry.getValue();
i++;
}
}
}
/** {@inheritDoc} */
@Override public void finishUnmarshal(GridCacheSharedContext ctx, ClassLoader ldr) throws IgniteCheckedException {
super.finishUnmarshal(ctx, ldr);
finishUnmarshalCacheObjects(nearKeys, ctx.cacheContext(cacheId), ldr);
if (ownedKeys != null) {
owned = new GridLeanMap<>(ownedKeys.length);
for (int i = 0; i < ownedKeys.length; i++) {
ownedKeys[i].finishUnmarshal(ctx.cacheContext(cacheId).cacheObjectContext(), ldr);
owned.put(ownedKeys[i], ownedValues[i]);
}
ownedKeys = null;
ownedValues = null;
}
}
/** {@inheritDoc} */
@Override public boolean writeTo(ByteBuffer buf, MessageWriter writer) {
writer.setBuffer(buf);
if (!super.writeTo(buf, writer))
return false;
if (!writer.isHeaderWritten()) {
if (!writer.writeHeader(directType(), fieldsCount()))
return false;
writer.onHeaderWritten();
}
switch (writer.state()) {
case 20:
if (!writer.writeLong("accessTtl", accessTtl))
return false;
writer.incrementState();
case 21:
if (!writer.writeBitSet("invalidateEntries", invalidateEntries))
return false;
writer.incrementState();
case 22:
if (!writer.writeIgniteUuid("miniId", miniId))
return false;
writer.incrementState();
case 23:
if (!writer.writeCollection("nearKeys", nearKeys, MessageCollectionItemType.MSG))
return false;
writer.incrementState();
case 24:
if (!writer.writeObjectArray("ownedKeys", ownedKeys, MessageCollectionItemType.MSG))
return false;
writer.incrementState();
case 25:
if (!writer.writeObjectArray("ownedValues", ownedValues, MessageCollectionItemType.MSG))
return false;
writer.incrementState();
case 26:
if (!writer.writeBitSet("preloadKeys", preloadKeys))
return false;
writer.incrementState();
case 27:
if (!writer.writeUuid("subjId", subjId))
return false;
writer.incrementState();
case 28:
if (!writer.writeInt("taskNameHash", taskNameHash))
return false;
writer.incrementState();
case 29:
if (!writer.writeMessage("topVer", topVer))
return false;
writer.incrementState();
}
return true;
}
/** {@inheritDoc} */
@Override public boolean readFrom(ByteBuffer buf, MessageReader reader) {
reader.setBuffer(buf);
if (!reader.beforeMessageRead())
return false;
if (!super.readFrom(buf, reader))
return false;
switch (reader.state()) {
case 20:
accessTtl = reader.readLong("accessTtl");
if (!reader.isLastRead())
return false;
reader.incrementState();
case 21:
invalidateEntries = reader.readBitSet("invalidateEntries");
if (!reader.isLastRead())
return false;
reader.incrementState();
case 22:
miniId = reader.readIgniteUuid("miniId");
if (!reader.isLastRead())
return false;
reader.incrementState();
case 23:
nearKeys = reader.readCollection("nearKeys", MessageCollectionItemType.MSG);
if (!reader.isLastRead())
return false;
reader.incrementState();
case 24:
ownedKeys = reader.readObjectArray("ownedKeys", MessageCollectionItemType.MSG, KeyCacheObject.class);
if (!reader.isLastRead())
return false;
reader.incrementState();
case 25:
ownedValues = reader.readObjectArray("ownedValues", MessageCollectionItemType.MSG, GridCacheVersion.class);
if (!reader.isLastRead())
return false;
reader.incrementState();
case 26:
preloadKeys = reader.readBitSet("preloadKeys");
if (!reader.isLastRead())
return false;
reader.incrementState();
case 27:
subjId = reader.readUuid("subjId");
if (!reader.isLastRead())
return false;
reader.incrementState();
case 28:
taskNameHash = reader.readInt("taskNameHash");
if (!reader.isLastRead())
return false;
reader.incrementState();
case 29:
topVer = reader.readMessage("topVer");
if (!reader.isLastRead())
return false;
reader.incrementState();
}
return reader.afterMessageRead(GridDhtLockRequest.class);
}
/** {@inheritDoc} */
@Override public byte directType() {
return 30;
}
/** {@inheritDoc} */
@Override public byte fieldsCount() {
return 30;
}
/** {@inheritDoc} */
@Override public String toString() {
return S.toString(GridDhtLockRequest.class, this, "super", super.toString());
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.builder.component.dsl;
import javax.annotation.Generated;
import org.apache.camel.Component;
import org.apache.camel.builder.component.AbstractComponentBuilder;
import org.apache.camel.builder.component.ComponentBuilder;
import org.apache.camel.component.ahc.AhcComponent;
/**
* To call external HTTP services using Async Http Client.
*
* Generated by camel-package-maven-plugin - do not edit this file!
*/
@Generated("org.apache.camel.maven.packaging.ComponentDslMojo")
public interface AhcComponentBuilderFactory {
/**
* AHC (camel-ahc)
* To call external HTTP services using Async Http Client.
*
* Category: http
* Since: 2.8
* Maven coordinates: org.apache.camel:camel-ahc
*/
static AhcComponentBuilder ahc() {
return new AhcComponentBuilderImpl();
}
/**
* Builder for the AHC component.
*/
interface AhcComponentBuilder extends ComponentBuilder<AhcComponent> {
/**
* Whether the producer should be started lazy (on the first message).
* By starting lazy you can use this to allow CamelContext and routes to
* startup in situations where a producer may otherwise fail during
* starting and cause the route to fail being started. By deferring this
* startup to be lazy then the startup failure can be handled during
* routing messages via Camel's routing error handlers. Beware that when
* the first message is processed then creating and starting the
* producer may take a little time and prolong the total processing time
* of the processing.
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: producer
*/
default AhcComponentBuilder lazyStartProducer(boolean lazyStartProducer) {
doSetProperty("lazyStartProducer", lazyStartProducer);
return this;
}
/**
* Whether to allow java serialization when a request uses
* context-type=application/x-java-serialized-object This is by default
* turned off. If you enable this then be aware that Java will
* deserialize the incoming data from the request to Java and that can
* be a potential security risk.
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: advanced
*/
default AhcComponentBuilder allowJavaSerializedObject(
boolean allowJavaSerializedObject) {
doSetProperty("allowJavaSerializedObject", allowJavaSerializedObject);
return this;
}
/**
* Whether the component should use basic property binding (Camel 2.x)
* or the newer property binding with additional capabilities.
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: advanced
*/
default AhcComponentBuilder basicPropertyBinding(
boolean basicPropertyBinding) {
doSetProperty("basicPropertyBinding", basicPropertyBinding);
return this;
}
/**
* To use a custom AhcBinding which allows to control how to bind
* between AHC and Camel.
*
* The option is a:
* <code>org.apache.camel.component.ahc.AhcBinding</code> type.
*
* Group: advanced
*/
default AhcComponentBuilder binding(
org.apache.camel.component.ahc.AhcBinding binding) {
doSetProperty("binding", binding);
return this;
}
/**
* To use a custom AsyncHttpClient.
*
* The option is a: <code>org.asynchttpclient.AsyncHttpClient</code>
* type.
*
* Group: advanced
*/
default AhcComponentBuilder client(
org.asynchttpclient.AsyncHttpClient client) {
doSetProperty("client", client);
return this;
}
/**
* To configure the AsyncHttpClient to use a custom
* com.ning.http.client.AsyncHttpClientConfig instance.
*
* The option is a:
* <code>org.asynchttpclient.AsyncHttpClientConfig</code> type.
*
* Group: advanced
*/
default AhcComponentBuilder clientConfig(
org.asynchttpclient.AsyncHttpClientConfig clientConfig) {
doSetProperty("clientConfig", clientConfig);
return this;
}
/**
* To use a custom org.apache.camel.spi.HeaderFilterStrategy to filter
* header to and from Camel message.
*
* The option is a:
* <code>org.apache.camel.spi.HeaderFilterStrategy</code> type.
*
* Group: filter
*/
default AhcComponentBuilder headerFilterStrategy(
org.apache.camel.spi.HeaderFilterStrategy headerFilterStrategy) {
doSetProperty("headerFilterStrategy", headerFilterStrategy);
return this;
}
/**
* Reference to a org.apache.camel.support.jsse.SSLContextParameters in
* the Registry. Note that configuring this option will override any
* SSL/TLS configuration options provided through the clientConfig
* option at the endpoint or component level.
*
* The option is a:
* <code>org.apache.camel.support.jsse.SSLContextParameters</code> type.
*
* Group: security
*/
default AhcComponentBuilder sslContextParameters(
org.apache.camel.support.jsse.SSLContextParameters sslContextParameters) {
doSetProperty("sslContextParameters", sslContextParameters);
return this;
}
/**
* Enable usage of global SSL context parameters.
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: security
*/
default AhcComponentBuilder useGlobalSslContextParameters(
boolean useGlobalSslContextParameters) {
doSetProperty("useGlobalSslContextParameters", useGlobalSslContextParameters);
return this;
}
}
class AhcComponentBuilderImpl
extends
AbstractComponentBuilder<AhcComponent>
implements
AhcComponentBuilder {
@Override
protected AhcComponent buildConcreteComponent() {
return new AhcComponent();
}
@Override
protected boolean setPropertyOnComponent(
Component component,
String name,
Object value) {
switch (name) {
case "lazyStartProducer": ((AhcComponent) component).setLazyStartProducer((boolean) value); return true;
case "allowJavaSerializedObject": ((AhcComponent) component).setAllowJavaSerializedObject((boolean) value); return true;
case "basicPropertyBinding": ((AhcComponent) component).setBasicPropertyBinding((boolean) value); return true;
case "binding": ((AhcComponent) component).setBinding((org.apache.camel.component.ahc.AhcBinding) value); return true;
case "client": ((AhcComponent) component).setClient((org.asynchttpclient.AsyncHttpClient) value); return true;
case "clientConfig": ((AhcComponent) component).setClientConfig((org.asynchttpclient.AsyncHttpClientConfig) value); return true;
case "headerFilterStrategy": ((AhcComponent) component).setHeaderFilterStrategy((org.apache.camel.spi.HeaderFilterStrategy) value); return true;
case "sslContextParameters": ((AhcComponent) component).setSslContextParameters((org.apache.camel.support.jsse.SSLContextParameters) value); return true;
case "useGlobalSslContextParameters": ((AhcComponent) component).setUseGlobalSslContextParameters((boolean) value); return true;
default: return false;
}
}
}
}
| |
/*
* Copyright 2007 The Closure Compiler Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.javascript.jscomp;
import com.google.common.base.Preconditions;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableSet;
import com.google.javascript.jscomp.newtypes.DeclaredTypeRegistry;
import com.google.javascript.jscomp.newtypes.JSType;
import com.google.javascript.jscomp.newtypes.QualifiedName;
import com.google.javascript.jscomp.newtypes.RawNominalType;
import com.google.javascript.rhino.Node;
import com.google.javascript.rhino.jstype.FunctionType;
import com.google.javascript.rhino.jstype.JSTypeNative;
import com.google.javascript.rhino.jstype.JSTypeRegistry;
import com.google.javascript.rhino.jstype.ObjectType;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import java.util.Set;
/**
* This describes the Closure-specific JavaScript coding conventions.
*
*/
public final class ClosureCodingConvention extends CodingConventions.Proxy {
private static final long serialVersionUID = 1L;
static final DiagnosticType OBJECTLIT_EXPECTED = DiagnosticType.warning(
"JSC_REFLECT_OBJECTLIT_EXPECTED",
"Object literal expected as second argument");
private final Set<String> indirectlyDeclaredProperties;
public ClosureCodingConvention() {
this(CodingConventions.getDefault());
}
public ClosureCodingConvention(CodingConvention wrapped) {
super(wrapped);
ImmutableSet.Builder<String> props = ImmutableSet.builder();
props.add(
"superClass_",
"instance_",
"getInstance");
props.addAll(wrapped.getIndirectlyDeclaredProperties());
indirectlyDeclaredProperties = props.build();
}
/**
* Closure's goog.inherits adds a {@code superClass_} property to the
* subclass, and a {@code constructor} property.
*/
@Override
public void applySubclassRelationship(FunctionType parentCtor,
FunctionType childCtor, SubclassType type) {
super.applySubclassRelationship(parentCtor, childCtor, type);
if (type == SubclassType.INHERITS) {
childCtor.defineDeclaredProperty("superClass_",
parentCtor.getPrototype(), childCtor.getSource());
childCtor.getPrototype().defineDeclaredProperty("constructor",
// Notice that constructor functions do not need to be covariant
// on the superclass.
// So if G extends F, new G() and new F() can accept completely
// different argument types, but G.prototype.constructor needs
// to be covariant on F.prototype.constructor.
// To get around this, we just turn off type-checking on arguments
// and return types of G.prototype.constructor.
childCtor.forgetParameterAndReturnTypes(),
childCtor.getSource());
}
}
/**
* {@inheritDoc}
*
* <p>Understands several different inheritance patterns that occur in
* Google code (various uses of {@code inherits} and {@code mixin}).
*/
@Override
public SubclassRelationship getClassesDefinedByCall(Node callNode) {
SubclassRelationship relationship =
super.getClassesDefinedByCall(callNode);
if (relationship != null) {
return relationship;
}
Node callName = callNode.getFirstChild();
SubclassType type = typeofClassDefiningName(callName);
if (type != null) {
Node subclass = null;
Node superclass = callNode.getLastChild();
// There are six possible syntaxes for a class-defining method:
// SubClass.inherits(SuperClass)
// goog.inherits(SubClass, SuperClass)
// goog$inherits(SubClass, SuperClass)
// SubClass.mixin(SuperClass.prototype)
// goog.mixin(SubClass.prototype, SuperClass.prototype)
// goog$mixin(SubClass.prototype, SuperClass.prototype)
boolean isDeprecatedCall = callNode.getChildCount() == 2 &&
callName.isGetProp();
if (isDeprecatedCall) {
// SubClass.inherits(SuperClass)
subclass = callName.getFirstChild();
} else if (callNode.getChildCount() == 3) {
// goog.inherits(SubClass, SuperClass)
subclass = callName.getNext();
} else {
return null;
}
if (type == SubclassType.MIXIN) {
// Only consider mixins that mix two prototypes as related to
// inheritance.
if (!endsWithPrototype(superclass)) {
return null;
}
if (!isDeprecatedCall) {
if (!endsWithPrototype(subclass)) {
return null;
}
// Strip off the prototype from the name.
subclass = subclass.getFirstChild();
}
superclass = superclass.getFirstChild();
}
// bail out if either of the side of the "inherits"
// isn't a real class name. This prevents us from
// doing something weird in cases like:
// goog.inherits(MySubClass, cond ? SuperClass1 : BaseClass2)
if (subclass != null &&
subclass.isUnscopedQualifiedName() &&
superclass.isUnscopedQualifiedName()) {
return new SubclassRelationship(type, subclass, superclass);
}
}
return null;
}
@Override
public boolean isClassFactoryCall(Node callNode) {
return callNode.getFirstChild().matchesQualifiedName("goog.defineClass");
}
/**
* Determines whether the given node is a class-defining name, like
* "inherits" or "mixin."
* @return The type of class-defining name, or null.
*/
private static SubclassType typeofClassDefiningName(Node callName) {
// Check if the method name matches one of the class-defining methods.
String methodName = null;
if (callName.isGetProp()) {
methodName = callName.getLastChild().getString();
} else if (callName.isName()) {
String name = callName.getString();
int dollarIndex = name.lastIndexOf('$');
if (dollarIndex != -1) {
methodName = name.substring(dollarIndex + 1);
}
}
if (methodName != null) {
if (methodName.equals("inherits")) {
return SubclassType.INHERITS;
} else if (methodName.equals("mixin")) {
return SubclassType.MIXIN;
}
}
return null;
}
@Override
public boolean isSuperClassReference(String propertyName) {
return "superClass_".equals(propertyName) ||
super.isSuperClassReference(propertyName);
}
/**
* Given a qualified name node, returns whether "prototype" is at the end.
* For example:
* a.b.c => false
* a.b.c.prototype => true
*/
private static boolean endsWithPrototype(Node qualifiedName) {
return qualifiedName.isGetProp() &&
qualifiedName.getLastChild().getString().equals("prototype");
}
/**
* @return Whether the node indicates that the file represents a "module", a file whose top level
* declarations are not in global scope.
*/
@Override
public boolean extractIsModuleFile(Node node, Node parent) {
String namespace = extractClassNameIfGoog(node, parent, "goog.module");
return namespace != null;
}
/**
* Extracts X from goog.provide('X'), if the applied Node is goog.
*
* @return The extracted class name, or null.
*/
@Override
public String extractClassNameIfProvide(Node node, Node parent) {
String namespace = extractClassNameIfGoog(node, parent, "goog.provide");
if (namespace == null) {
namespace = extractClassNameIfGoog(node, parent, "goog.module");
}
return namespace;
}
/**
* Extracts X from goog.require('X'), if the applied Node is goog.
*
* @return The extracted class name, or null.
*/
@Override
public String extractClassNameIfRequire(Node node, Node parent) {
return extractClassNameIfGoog(node, parent, "goog.require");
}
private static String extractClassNameIfGoog(Node node, Node parent,
String functionName){
String className = null;
if (NodeUtil.isExprCall(parent)) {
Node callee = node.getFirstChild();
if (callee != null && callee.isGetProp() && callee.matchesQualifiedName(functionName)) {
Node target = callee.getNext();
if (target != null && target.isString()) {
className = target.getString();
}
}
}
return className;
}
/**
* Use closure's implementation.
* @return closure's function name for exporting properties.
*/
@Override
public String getExportPropertyFunction() {
return "goog.exportProperty";
}
/**
* Use closure's implementation.
* @return closure's function name for exporting symbols.
*/
@Override
public String getExportSymbolFunction() {
return "goog.exportSymbol";
}
@Override
public List<String> identifyTypeDeclarationCall(Node n) {
Node callName = n.getFirstChild();
if (callName.matchesQualifiedName("goog.addDependency") &&
n.getChildCount() >= 3) {
Node typeArray = callName.getNext().getNext();
if (typeArray.isArrayLit()) {
List<String> typeNames = new ArrayList<>();
for (Node name = typeArray.getFirstChild(); name != null;
name = name.getNext()) {
if (name.isString()) {
typeNames.add(name.getString());
}
}
return typeNames;
}
}
// Identify forward declaration of form goog.forwardDeclare('foo.bar')
if (callName.matchesQualifiedName("goog.forwardDeclare") &&
n.getChildCount() == 2) {
Node typeDeclaration = n.getSecondChild();
if (typeDeclaration.isString()) {
return ImmutableList.of(typeDeclaration.getString());
}
}
return super.identifyTypeDeclarationCall(n);
}
@Override
public String getAbstractMethodName() {
return "goog.abstractMethod";
}
@Override
public String getSingletonGetterClassName(Node callNode) {
Node callArg = callNode.getFirstChild();
// Use both the original name and the post-CollapseProperties name.
if (callNode.getChildCount() == 2
&& (callArg.matchesQualifiedName("goog.addSingletonGetter")
|| callArg.matchesQualifiedName("goog$addSingletonGetter"))) {
return callArg.getNext().getQualifiedName();
}
return super.getSingletonGetterClassName(callNode);
}
@Override
public void applySingletonGetterOld(FunctionType functionType,
FunctionType getterType, ObjectType objectType) {
functionType.defineDeclaredProperty("getInstance", getterType,
functionType.getSource());
functionType.defineDeclaredProperty("instance_", objectType,
functionType.getSource());
}
@Override
public void applySingletonGetterNew(
RawNominalType rawType, JSType getInstanceType, JSType instanceType) {
rawType.addCtorProperty("getInstance", null, getInstanceType, true);
rawType.addCtorProperty("instance_", null, instanceType, true);
}
@Override
public String getGlobalObject() {
return "goog.global";
}
private final Set<String> propertyTestFunctions = ImmutableSet.of(
"goog.isDef", "goog.isNull", "goog.isDefAndNotNull",
"goog.isString", "goog.isNumber", "goog.isBoolean",
"goog.isFunction", "goog.isArray", "goog.isArrayLike", "goog.isObject");
@Override
public boolean isPropertyTestFunction(Node call) {
Preconditions.checkArgument(call.isCall());
return propertyTestFunctions.contains(
call.getFirstChild().getQualifiedName()) ||
super.isPropertyTestFunction(call);
}
@Override
public boolean isFunctionCallThatAlwaysThrows(Node n) {
return CodingConventions.defaultIsFunctionCallThatAlwaysThrows(
n, "goog.asserts.fail");
}
@Override
public ObjectLiteralCast getObjectLiteralCast(Node callNode) {
Preconditions.checkArgument(callNode.isCall());
ObjectLiteralCast proxyCast = super.getObjectLiteralCast(callNode);
if (proxyCast != null) {
return proxyCast;
}
Node callName = callNode.getFirstChild();
if (!callName.matchesQualifiedName("goog.reflect.object") ||
callNode.getChildCount() != 3) {
return null;
}
Node typeNode = callName.getNext();
if (!typeNode.isQualifiedName()) {
return null;
}
Node objectNode = typeNode.getNext();
if (!objectNode.isObjectLit()) {
return new ObjectLiteralCast(null, null, OBJECTLIT_EXPECTED);
}
return new ObjectLiteralCast(
typeNode.getQualifiedName(), typeNode.getNext(), null);
}
@Override
public boolean isPrivate(String name) {
return false;
}
@Override
public Collection<AssertionFunctionSpec> getAssertionFunctions() {
return ImmutableList.of(
new AssertionFunctionSpec("goog.asserts.assert", JSType.TRUTHY),
new AssertionFunctionSpec("goog.asserts.assertNumber",
JSType.NUMBER, JSTypeNative.NUMBER_TYPE),
new AssertionFunctionSpec("goog.asserts.assertString",
JSType.STRING, JSTypeNative.STRING_TYPE),
new AssertionFunctionSpec("goog.asserts.assertObject",
JSType.TOP_OBJECT, JSTypeNative.OBJECT_TYPE),
new AssertFunctionByTypeName("goog.asserts.assertFunction", "Function"),
new AssertFunctionByTypeName("goog.asserts.assertArray", "Array"),
new AssertFunctionByTypeName("goog.asserts.assertElement", "Element"),
new AssertInstanceofSpec("goog.asserts.assertInstanceof")
);
}
@Override
public Bind describeFunctionBind(
Node n, boolean callerChecksTypes, boolean iCheckTypes) {
if (!n.isCall()) {
return null;
}
Node callTarget = n.getFirstChild();
if (callTarget.isQualifiedName()) {
if (callTarget.matchesQualifiedName("goog.bind")
|| callTarget.matchesQualifiedName("goog$bind")) {
// goog.bind(fn, self, args...);
Node fn = callTarget.getNext();
if (fn == null) {
return null;
}
Node thisValue = safeNext(fn);
Node parameters = safeNext(thisValue);
return new Bind(fn, thisValue, parameters);
}
if (callTarget.matchesQualifiedName("goog.partial") ||
callTarget.matchesQualifiedName("goog$partial")) {
// goog.partial(fn, args...);
Node fn = callTarget.getNext();
if (fn == null) {
return null;
}
Node thisValue = null;
Node parameters = safeNext(fn);
return new Bind(fn, thisValue, parameters);
}
}
return super.describeFunctionBind(n, callerChecksTypes, iCheckTypes);
}
@Override
public Cache describeCachingCall(Node node) {
if (!node.isCall()) {
return null;
}
Node callTarget = node.getFirstChild();
if (callTarget.isQualifiedName()
&& (callTarget.matchesQualifiedName("goog.reflect.cache")
|| callTarget.matchesQualifiedName("goog$reflect$cache"))) {
int paramCount = node.getChildCount() - 1;
if (3 <= paramCount && paramCount <= 4) {
Node cacheObj = callTarget.getNext();
Node keyNode = cacheObj.getNext();
Node valueFn = keyNode.getNext();
Node keyFn = valueFn.getNext();
return new Cache(cacheObj, keyNode, valueFn, keyFn);
}
}
return super.describeCachingCall(node);
}
@Override
public Collection<String> getIndirectlyDeclaredProperties() {
return indirectlyDeclaredProperties;
}
private static Node safeNext(Node n) {
if (n != null) {
return n.getNext();
}
return null;
}
/**
* A function that will throw an exception when if the value is not
* an instanceof a specific type.
*/
public static class AssertInstanceofSpec extends AssertionFunctionSpec {
public AssertInstanceofSpec(String functionName) {
super(functionName, JSType.TOP_OBJECT, JSTypeNative.OBJECT_TYPE);
}
/**
* Returns the type for a type assertion, or null if the function asserts
* that the node must not be null or undefined.
*/
@Override
public com.google.javascript.rhino.jstype.JSType
getAssertedOldType(Node call, JSTypeRegistry registry) {
if (call.getChildCount() > 2) {
Node constructor = call.getSecondChild().getNext();
if (constructor != null) {
com.google.javascript.rhino.jstype.JSType ownerType =
constructor.getJSType();
if (ownerType != null
&& ownerType.isFunctionType()
&& ownerType.isConstructor()) {
FunctionType functionType = ((FunctionType) ownerType);
return functionType.getInstanceType();
}
}
}
return registry.getNativeType(JSTypeNative.UNKNOWN_TYPE);
}
@Override
public JSType getAssertedNewType(Node call, DeclaredTypeRegistry scope) {
if (call.getChildCount() > 2) {
Node constructor = call.getSecondChild().getNext();
if (constructor != null && constructor.isQualifiedName()) {
QualifiedName qname = QualifiedName.fromNode(constructor);
JSType functionType = scope.getDeclaredTypeOf(qname.getLeftmostName());
if (functionType != null) {
if (!qname.isIdentifier()) {
functionType = functionType.getProp(qname.getAllButLeftmost());
}
com.google.javascript.jscomp.newtypes.FunctionType ctorType =
functionType.getFunTypeIfSingletonObj();
if (ctorType != null && ctorType.isUniqueConstructor()) {
return ctorType.getInstanceTypeOfCtor();
}
}
}
}
return JSType.UNKNOWN;
}
}
/**
* A function that will throw an exception when the value is not an
* instanceof the given type name, for instance "Element".
*/
public static class AssertFunctionByTypeName extends AssertionFunctionSpec {
private final String typeName;
public AssertFunctionByTypeName(String functionName, String typeName) {
super(functionName, null);
this.typeName = typeName;
}
@Override
public com.google.javascript.rhino.jstype.JSType
getAssertedOldType(Node call, JSTypeRegistry registry) {
return registry.getType(typeName);
}
@Override
public JSType getAssertedNewType(Node call, DeclaredTypeRegistry scope) {
JSType result = scope.getDeclaredTypeOf(typeName)
.getFunTypeIfSingletonObj().getInstanceTypeOfCtor();
return result;
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ambari.server.upgrade;
import static junit.framework.Assert.assertEquals;
import static org.easymock.EasyMock.anyObject;
import static org.easymock.EasyMock.anyString;
import static org.easymock.EasyMock.capture;
import static org.easymock.EasyMock.createMockBuilder;
import static org.easymock.EasyMock.createNiceMock;
import static org.easymock.EasyMock.eq;
import static org.easymock.EasyMock.expect;
import static org.easymock.EasyMock.expectLastCall;
import static org.easymock.EasyMock.newCapture;
import static org.easymock.EasyMock.replay;
import static org.easymock.EasyMock.reset;
import static org.easymock.EasyMock.verify;
import java.lang.reflect.Field;
import java.lang.reflect.Method;
import java.sql.Connection;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;
import java.util.HashMap;
import java.util.Map;
import javax.persistence.EntityManager;
import org.apache.ambari.server.AmbariException;
import org.apache.ambari.server.H2DatabaseCleaner;
import org.apache.ambari.server.api.services.AmbariMetaInfo;
import org.apache.ambari.server.configuration.Configuration;
import org.apache.ambari.server.controller.AmbariManagementController;
import org.apache.ambari.server.orm.DBAccessor;
import org.apache.ambari.server.orm.DBAccessor.DBColumnInfo;
import org.apache.ambari.server.orm.GuiceJpaInitializer;
import org.apache.ambari.server.orm.InMemoryDefaultTestModule;
import org.apache.ambari.server.state.Cluster;
import org.apache.ambari.server.state.Clusters;
import org.apache.ambari.server.state.Config;
import org.apache.ambari.server.state.ConfigHelper;
import org.apache.ambari.server.state.StackId;
import org.apache.ambari.server.state.stack.OsFamily;
import org.easymock.Capture;
import org.easymock.EasyMock;
import org.easymock.EasyMockRule;
import org.easymock.EasyMockSupport;
import org.easymock.Mock;
import org.easymock.MockType;
import org.easymock.TestSubject;
import org.junit.After;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import com.google.inject.AbstractModule;
import com.google.inject.Binder;
import com.google.inject.Guice;
import com.google.inject.Injector;
import com.google.inject.Module;
import com.google.inject.Provider;
/**
* {@link org.apache.ambari.server.upgrade.UpgradeCatalog212} unit tests.
*/
public class UpgradeCatalog212Test {
private static final String TOPOLOGY_REQUEST_TABLE = "topology_request";
private static final String TOPOLOGY_REQUEST_CLUSTER_NAME_COLUMN = "cluster_name";
private Injector injector;
@Rule
public EasyMockRule mocks = new EasyMockRule(this);
@Mock(type = MockType.STRICT)
private Provider<EntityManager> entityManagerProvider;
@Mock(type = MockType.NICE)
private EntityManager entityManager;
@Mock(type = MockType.NICE)
private DBAccessor dbAccessor;
@Mock
private Injector mockedInjector;
@Mock(type = MockType.NICE)
private Connection connection;
@Mock
private Statement statement;
@Mock
private ResultSet resultSet;
@TestSubject
private UpgradeCatalog212 testSubject = new UpgradeCatalog212(
EasyMock.createNiceMock(Injector.class));
@Before
public void setUp() {
reset(entityManagerProvider);
expect(entityManagerProvider.get()).andReturn(entityManager).anyTimes();
replay(entityManagerProvider);
injector = Guice.createInjector(new InMemoryDefaultTestModule());
injector.getInstance(GuiceJpaInitializer.class);
// inject AmbariMetaInfo to ensure that stacks get populated in the DB
injector.getInstance(AmbariMetaInfo.class);
}
@After
public void tearDown() throws AmbariException, SQLException {
if (injector != null) {
H2DatabaseCleaner.clearDatabaseAndStopPersistenceService(injector);
}
}
@Test
public void testFinilizeTopologyDDL() throws Exception {
final DBAccessor dbAccessor = createNiceMock(DBAccessor.class);
dbAccessor.dropColumn(eq("topology_request"), eq("cluster_name"));
dbAccessor.setColumnNullable(eq("topology_request"), eq("cluster_id"), eq(false));
dbAccessor.addFKConstraint(eq("topology_request"), eq("FK_topology_request_cluster_id"), eq("cluster_id"),
eq("clusters"), eq("cluster_id"), eq(false));
replay(dbAccessor);
Module module = new Module() {
@Override
public void configure(Binder binder) {
binder.bind(DBAccessor.class).toInstance(dbAccessor);
binder.bind(OsFamily.class).toInstance(createNiceMock(OsFamily.class));
}
};
Injector injector = Guice.createInjector(module);
UpgradeCatalog212 upgradeCatalog212 = injector.getInstance(UpgradeCatalog212.class);
upgradeCatalog212.finilizeTopologyDDL();
verify(dbAccessor);
}
@Test
public void testExecuteDDLUpdates() throws Exception {
final DBAccessor dbAccessor = createNiceMock(DBAccessor.class);
Configuration configuration = createNiceMock(Configuration.class);
Connection connection = createNiceMock(Connection.class);
Statement statement = createNiceMock(Statement.class);
ResultSet resultSet = createNiceMock(ResultSet.class);
expect(configuration.getDatabaseUrl()).andReturn(Configuration.JDBC_IN_MEMORY_URL).anyTimes();
dbAccessor.getConnection();
expectLastCall().andReturn(connection).anyTimes();
connection.createStatement();
expectLastCall().andReturn(statement).anyTimes();
statement.executeQuery(anyObject(String.class));
expectLastCall().andReturn(resultSet).anyTimes();
// Create DDL sections with their own capture groups
HostRoleCommandDDL hostRoleCommandDDL = new HostRoleCommandDDL();
// Execute any DDL schema changes
hostRoleCommandDDL.execute(dbAccessor);
// Replay sections
replay(dbAccessor, configuration, resultSet, connection, statement);
AbstractUpgradeCatalog upgradeCatalog = getUpgradeCatalog(dbAccessor);
Class<?> c = AbstractUpgradeCatalog.class;
Field f = c.getDeclaredField("configuration");
f.setAccessible(true);
f.set(upgradeCatalog, configuration);
upgradeCatalog.executeDDLUpdates();
verify(dbAccessor, configuration, resultSet, connection, statement);
// Verify sections
hostRoleCommandDDL.verify(dbAccessor);
}
@Test
public void testExecuteDMLUpdates() throws Exception {
Method addMissingConfigs = UpgradeCatalog212.class.getDeclaredMethod("addMissingConfigs");
Method addNewConfigurationsFromXml = AbstractUpgradeCatalog.class.getDeclaredMethod("addNewConfigurationsFromXml");
UpgradeCatalog212 upgradeCatalog212 = createMockBuilder(UpgradeCatalog212.class)
.addMockedMethod(addNewConfigurationsFromXml)
.addMockedMethod(addMissingConfigs)
.createMock();
upgradeCatalog212.addNewConfigurationsFromXml();
expectLastCall().once();
upgradeCatalog212.addMissingConfigs();
expectLastCall().once();
replay(upgradeCatalog212);
upgradeCatalog212.executeDMLUpdates();
verify(upgradeCatalog212);
}
@Test
public void testUpdateHBaseAdnClusterConfigs() throws Exception {
EasyMockSupport easyMockSupport = new EasyMockSupport();
final AmbariManagementController mockAmbariManagementController = easyMockSupport.createNiceMock(AmbariManagementController.class);
final ConfigHelper mockConfigHelper = easyMockSupport.createMock(ConfigHelper.class);
final Clusters mockClusters = easyMockSupport.createStrictMock(Clusters.class);
final Cluster mockClusterExpected = easyMockSupport.createNiceMock(Cluster.class);
final Map<String, String> propertiesHbaseSite = new HashMap<String, String>() {
{
put("hbase.bucketcache.size", "1024m");
}
};
final Map<String, String> propertiesHbaseEnv = new HashMap<String, String>() {
{
put("override_hbase_uid", "false");
}
};
final Config mockHbaseEnv = easyMockSupport.createNiceMock(Config.class);
expect(mockHbaseEnv.getProperties()).andReturn(propertiesHbaseEnv).once();
final Config mockHbaseSite = easyMockSupport.createNiceMock(Config.class);
expect(mockHbaseSite.getProperties()).andReturn(propertiesHbaseSite).once();
final Config mockClusterEnv = easyMockSupport.createNiceMock(Config.class);
final Map<String, String> propertiesExpectedHbaseEnv = new HashMap<>();
final Map<String, String> propertiesExpectedClusterEnv = new HashMap<String, String>() {{
put("override_uid", "false");
}};
final Map<String, String> propertiesExpectedHbaseSite = new HashMap<String, String>() {{
put("hbase.bucketcache.size", "1024");
}};
final Injector mockInjector = Guice.createInjector(new AbstractModule() {
@Override
protected void configure() {
bind(AmbariManagementController.class).toInstance(mockAmbariManagementController);
bind(ConfigHelper.class).toInstance(mockConfigHelper);
bind(Clusters.class).toInstance(mockClusters);
bind(DBAccessor.class).toInstance(createNiceMock(DBAccessor.class));
bind(OsFamily.class).toInstance(createNiceMock(OsFamily.class));
}
});
expect(mockAmbariManagementController.getClusters()).andReturn(mockClusters).once();
expect(mockClusters.getClusters()).andReturn(new HashMap<String, Cluster>() {{
put("normal", mockClusterExpected);
}}).once();
expect(mockClusterExpected.getDesiredConfigByType("cluster-env")).andReturn(mockClusterEnv).atLeastOnce();
expect(mockClusterExpected.getDesiredConfigByType("hbase-env")).andReturn(mockHbaseEnv).atLeastOnce();
expect(mockClusterExpected.getDesiredConfigByType("hbase-site")).andReturn(mockHbaseSite).atLeastOnce();
expect(mockClusterEnv.getProperties()).andReturn(propertiesExpectedClusterEnv).atLeastOnce();
expect(mockHbaseEnv.getProperties()).andReturn(propertiesExpectedHbaseEnv).atLeastOnce();
expect(mockHbaseSite.getProperties()).andReturn(propertiesExpectedHbaseSite).atLeastOnce();
easyMockSupport.replayAll();
mockInjector.getInstance(UpgradeCatalog212.class).updateHbaseAndClusterConfigurations();
easyMockSupport.verifyAll();
}
@Test
public void testUpdateHiveConfigs() throws Exception {
EasyMockSupport easyMockSupport = new EasyMockSupport();
final AmbariManagementController mockAmbariManagementController = easyMockSupport.createNiceMock(AmbariManagementController.class);
final ConfigHelper mockConfigHelper = easyMockSupport.createMock(ConfigHelper.class);
final Clusters mockClusters = easyMockSupport.createStrictMock(Clusters.class);
final Cluster mockClusterExpected = easyMockSupport.createNiceMock(Cluster.class);
final Config mockHiveSite = easyMockSupport.createNiceMock(Config.class);
final Map<String, String> propertiesExpectedHiveSite = new HashMap<String, String>() {{
put("hive.heapsize", "512");
put("hive.server2.custom.authentication.class", "");
}};
final Injector mockInjector = Guice.createInjector(new AbstractModule() {
@Override
protected void configure() {
bind(AmbariManagementController.class).toInstance(mockAmbariManagementController);
bind(ConfigHelper.class).toInstance(mockConfigHelper);
bind(Clusters.class).toInstance(mockClusters);
bind(DBAccessor.class).toInstance(createNiceMock(DBAccessor.class));
bind(OsFamily.class).toInstance(createNiceMock(OsFamily.class));
}
});
expect(mockAmbariManagementController.getClusters()).andReturn(mockClusters).once();
expect(mockClusters.getClusters()).andReturn(new HashMap<String, Cluster>() {{
put("normal", mockClusterExpected);
}}).once();
expect(mockClusterExpected.getDesiredConfigByType("hive-site")).andReturn(mockHiveSite).atLeastOnce();
expect(mockHiveSite.getProperties()).andReturn(propertiesExpectedHiveSite).atLeastOnce();
StackId stackId = new StackId("HDP-2.2");
expect(mockClusterExpected.getCurrentStackVersion()).andReturn(stackId).atLeastOnce();
easyMockSupport.replayAll();
mockInjector.getInstance(UpgradeCatalog212.class).updateHiveConfigs();
easyMockSupport.verifyAll();
}
@Test
public void testUpdateOozieConfigs() throws Exception {
EasyMockSupport easyMockSupport = new EasyMockSupport();
final AmbariManagementController mockAmbariManagementController = easyMockSupport.createNiceMock(AmbariManagementController.class);
final ConfigHelper mockConfigHelper = easyMockSupport.createMock(ConfigHelper.class);
final Clusters mockClusters = easyMockSupport.createStrictMock(Clusters.class);
final Cluster mockClusterExpected = easyMockSupport.createNiceMock(Cluster.class);
final Config mockOozieEnv = easyMockSupport.createNiceMock(Config.class);
final Map<String, String> propertiesExpectedOozieEnv = new HashMap<String, String>() {{
put("oozie_hostname", "");
put("oozie_database", "123");
}};
final Injector mockInjector = Guice.createInjector(new AbstractModule() {
@Override
protected void configure() {
bind(AmbariManagementController.class).toInstance(mockAmbariManagementController);
bind(ConfigHelper.class).toInstance(mockConfigHelper);
bind(Clusters.class).toInstance(mockClusters);
bind(DBAccessor.class).toInstance(createNiceMock(DBAccessor.class));
bind(OsFamily.class).toInstance(createNiceMock(OsFamily.class));
}
});
expect(mockAmbariManagementController.getClusters()).andReturn(mockClusters).once();
expect(mockClusters.getClusters()).andReturn(new HashMap<String, Cluster>() {{
put("normal", mockClusterExpected);
}}).once();
expect(mockClusterExpected.getDesiredConfigByType("oozie-env")).andReturn(mockOozieEnv).atLeastOnce();
expect(mockOozieEnv.getProperties()).andReturn(propertiesExpectedOozieEnv).atLeastOnce();
easyMockSupport.replayAll();
mockInjector.getInstance(UpgradeCatalog212.class).updateOozieConfigs();
easyMockSupport.verifyAll();
}
@Test
public void testUpdateHiveEnvContent() throws Exception {
final Injector mockInjector = Guice.createInjector(new AbstractModule() {
@Override
protected void configure() {
bind(DBAccessor.class).toInstance(createNiceMock(DBAccessor.class));
bind(OsFamily.class).toInstance(createNiceMock(OsFamily.class));
}
});
String testContent = " if [ \"$SERVICE\" = \"cli\" ]; then\n" +
" if [ -z \"$DEBUG\" ]; then\n" +
" export HADOOP_OPTS=\"$HADOOP_OPTS -XX:NewRatio=12 -Xms10m -XX:MaxHeapFreeRatio=40 -XX:MinHeapFreeRatio=15 -XX:+UseParNewGC -XX:-UseGCOverheadLimit\"\n" +
" else\n" +
" export HADOOP_OPTS=\"$HADOOP_OPTS -XX:NewRatio=12 -Xms10m -XX:MaxHeapFreeRatio=40 -XX:MinHeapFreeRatio=15 -XX:-UseGCOverheadLimit\"\n" +
" fi\n" +
" fi\n" +
"\n" +
"export HADOOP_HEAPSIZE=\"{{hive_heapsize}}\"\n" +
"export HADOOP_CLIENT_OPTS=\"-Xmx${HADOOP_HEAPSIZE}m $HADOOP_CLIENT_OPTS\"\n" +
"\n" +
"# Set HADOOP_HOME to point to a specific hadoop install directory\n" +
"HADOOP_HOME=${HADOOP_HOME:-{{hadoop_home}}}\n";
String expectedResult = " if [ \"$SERVICE\" = \"cli\" ]; then\n" +
" if [ -z \"$DEBUG\" ]; then\n" +
" export HADOOP_OPTS=\"$HADOOP_OPTS -XX:NewRatio=12 -Xms10m -XX:MaxHeapFreeRatio=40 -XX:MinHeapFreeRatio=15 -XX:+UseParNewGC -XX:-UseGCOverheadLimit\"\n" +
" else\n" +
" export HADOOP_OPTS=\"$HADOOP_OPTS -XX:NewRatio=12 -Xms10m -XX:MaxHeapFreeRatio=40 -XX:MinHeapFreeRatio=15 -XX:-UseGCOverheadLimit\"\n" +
" fi\n" +
" fi\n" +
"\n" +
"\n" +
"\n" +
"# Set HADOOP_HOME to point to a specific hadoop install directory\n" +
"HADOOP_HOME=${HADOOP_HOME:-{{hadoop_home}}}\n";
assertEquals(expectedResult, mockInjector.getInstance(UpgradeCatalog212.class).updateHiveEnvContent(testContent));
}
/**
* @param dbAccessor
* @return
*/
private AbstractUpgradeCatalog getUpgradeCatalog(final DBAccessor dbAccessor) {
Module module = new Module() {
@Override
public void configure(Binder binder) {
binder.bind(DBAccessor.class).toInstance(dbAccessor);
binder.bind(EntityManager.class).toInstance(entityManager);
binder.bind(OsFamily.class).toInstance(createNiceMock(OsFamily.class));
}
};
Injector injector = Guice.createInjector(module);
return injector.getInstance(UpgradeCatalog212.class);
}
@Test
public void testGetSourceVersion() {
final DBAccessor dbAccessor = createNiceMock(DBAccessor.class);
UpgradeCatalog upgradeCatalog = getUpgradeCatalog(dbAccessor);
Assert.assertEquals("2.1.1", upgradeCatalog.getSourceVersion());
}
@Test
public void testGetTargetVersion() throws Exception {
final DBAccessor dbAccessor = createNiceMock(DBAccessor.class);
UpgradeCatalog upgradeCatalog = getUpgradeCatalog(dbAccessor);
Assert.assertEquals("2.1.2", upgradeCatalog.getTargetVersion());
}
/**
* Verify alert changes
*/
class HostRoleCommandDDL implements SectionDDL {
HashMap<String, Capture<DBColumnInfo>> captures;
public HostRoleCommandDDL() {
captures = new HashMap<>();
Capture<DBAccessor.DBColumnInfo> hostRoleCommandAutoSkipColumnCapture = new Capture<>();
captures.put("host_role_command", hostRoleCommandAutoSkipColumnCapture);
}
/**
* {@inheritDoc}
*/
@Override
public void execute(DBAccessor dbAccessor) throws SQLException {
Capture<DBColumnInfo> hostRoleCommandAuotSkipColumnCapture = captures.get(
"host_role_command");
dbAccessor.addColumn(eq("host_role_command"),
capture(hostRoleCommandAuotSkipColumnCapture));
}
/**
* {@inheritDoc}
*/
@Override
public void verify(DBAccessor dbAccessor) throws SQLException {
verifyHostRoleCommandSkipCapture(captures.get("host_role_command"));
}
private void verifyHostRoleCommandSkipCapture(
Capture<DBAccessor.DBColumnInfo> hostRoleCommandAuotSkipColumnCapture) {
DBColumnInfo clusterIdColumn = hostRoleCommandAuotSkipColumnCapture.getValue();
Assert.assertEquals(Integer.class, clusterIdColumn.getType());
Assert.assertEquals("auto_skip_on_failure", clusterIdColumn.getName());
}
}
@Test
public void testShouldSkipPreDMLLogicIfClusterNameColumnDoesNotExist() throws Exception {
// GIVEN
reset(dbAccessor);
Capture<String> tableNameCaptor = newCapture();
Capture<String> columnNameCaptor = newCapture();
// the column used by the logic is already deleted
// this could happen as a result of previously running the update
expect(dbAccessor.tableHasColumn(capture(tableNameCaptor), capture(columnNameCaptor))).andReturn(false);
replay(dbAccessor);
// WHEN
testSubject.executePreDMLUpdates();
// THEN
Assert.assertNotNull("The table name hasn't been captured", tableNameCaptor.getValue());
Assert.assertEquals("The table name is not as expected", TOPOLOGY_REQUEST_TABLE, tableNameCaptor.getValue());
Assert.assertNotNull("The column name hasn't been captured", columnNameCaptor.getValue());
Assert.assertEquals("The column name is not as expected", TOPOLOGY_REQUEST_CLUSTER_NAME_COLUMN,
columnNameCaptor.getValue());
}
@Test
public void testShouldPerformPreDMLLogicIfClusterNameColumnExists() throws Exception {
// GIVEN
reset(dbAccessor);
expect(dbAccessor.getConnection()).andReturn(connection).anyTimes();
expect(connection.createStatement()).andReturn(statement);
Capture<String> tableNameCaptor = newCapture();
Capture<String> columnNameCaptor = newCapture();
expect(dbAccessor.tableHasColumn(capture(tableNameCaptor), capture(columnNameCaptor))).andReturn(true);
expect(statement.executeQuery(anyString())).andReturn(resultSet);
statement.close();
expect(resultSet.next()).andReturn(false);
resultSet.close();
replay(dbAccessor, connection, statement, resultSet);
// WHEN
testSubject.executePreDMLUpdates();
// THEN
Assert.assertNotNull("The table name hasn't been captured", tableNameCaptor.getValue());
Assert.assertEquals("The table name is not as expected", TOPOLOGY_REQUEST_TABLE, tableNameCaptor.getValue());
Assert.assertNotNull("The column name hasn't been captured", columnNameCaptor.getValue());
Assert.assertEquals("The column name is not as expected", TOPOLOGY_REQUEST_CLUSTER_NAME_COLUMN,
columnNameCaptor.getValue());
verify(dbAccessor, statement, resultSet);
}
}
| |
package com.reucon.openfire.plugin.archive.impl;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.text.DateFormat;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.Date;
import java.util.TreeMap;
import java.util.HashSet;
import java.util.List;
import org.jivesoftware.database.DbConnectionManager;
import org.jivesoftware.openfire.archive.ConversationManager;
import org.jivesoftware.util.JiveConstants;
import org.jivesoftware.util.JiveGlobals;
import org.jivesoftware.util.Log;
import org.xmpp.packet.JID;
import com.reucon.openfire.plugin.archive.ArchivedMessageConsumer;
import com.reucon.openfire.plugin.archive.PersistenceManager;
import com.reucon.openfire.plugin.archive.model.ArchivedMessage;
import com.reucon.openfire.plugin.archive.model.ArchivedMessage.Direction;
import com.reucon.openfire.plugin.archive.model.Conversation;
import com.reucon.openfire.plugin.archive.model.Participant;
import com.reucon.openfire.plugin.archive.xep0059.XmppResultSet;
/**
* Manages database persistence.
*/
public class JdbcPersistenceManager implements PersistenceManager {
public static final int DEFAULT_MAX = 1000;
public static final String SELECT_MESSAGES_BY_CONVERSATION = "SELECT DISTINCT " + "ofConversation.conversationID, " + "ofConversation.room, "
+ "ofConversation.isExternal, " + "ofConversation.startDate, " + "ofConversation.lastActivity, " + "ofConversation.messageCount, "
+ "ofConParticipant.joinedDate, " + "ofConParticipant.leftDate, " + "ofConParticipant.bareJID, " + "ofConParticipant.jidResource, "
+ "ofConParticipant.nickname, " + "ofMessageArchive.fromJID, " + "ofMessageArchive.toJID, " + "ofMessageArchive.sentDate, "
+ "ofMessageArchive.body " + "FROM ofConversation "
+ "INNER JOIN ofConParticipant ON ofConversation.conversationID = ofConParticipant.conversationID "
+ "INNER JOIN ofMessageArchive ON ofConParticipant.conversationID = ofMessageArchive.conversationID "
+ "WHERE ofConversation.conversationID = ? AND ofConParticipant.bareJID = ? ORDER BY ofMessageArchive.sentDate";
// public static final String SELECT_MESSAGES_BY_CONVERSATION =
// "SELECT messageId,time,direction,type,subject,body "
// + "FROM archiveMessages WHERE conversationId = ? ORDER BY time";
public static final String SELECT_CONVERSATIONS = "SELECT DISTINCT " + "ofConversation.conversationID, " + "ofConversation.room, "
+ "ofConversation.isExternal, " + "ofConversation.startDate, " + "ofConversation.lastActivity, " + "ofConversation.messageCount, "
+ "ofConParticipant.joinedDate, " + "ofConParticipant.leftDate, " + "ofConParticipant.bareJID, " + "ofConParticipant.jidResource, "
+ "ofConParticipant.nickname, "
+ "ofConParticipant.bareJID as fromJID, "
+ "ofMessageArchive.toJID "
+ "FROM ofConversation "
+ "INNER JOIN ofConParticipant ON ofConversation.conversationID = ofConParticipant.conversationID "
+ "INNER JOIN (SELECT conversationID, toJID FROM ofMessageArchive "
+ "union all "
+ "SELECT conversationID, fromJID as toJID FROM ofMessageArchive) ofMessageArchive ON ofConParticipant.conversationID = ofMessageArchive.conversationID";
// public static final String SELECT_CONVERSATIONS =
// "SELECT c.conversationId,c.startTime,c.endTime,c.ownerJid,c.ownerResource,c.withJid,c.withResource,"
// + " c.subject,c.thread " + "FROM archiveConversations AS c";
public static final String COUNT_CONVERSATIONS = "SELECT COUNT(DISTINCT ofConversation.conversationID) FROM ofConversation "
+ "INNER JOIN ofConParticipant ON ofConversation.conversationID = ofConParticipant.conversationID "
+ "INNER JOIN (SELECT conversationID, toJID FROM ofMessageArchive "
+ "union all "
+ "SELECT conversationID, fromJID as toJID FROM ofMessageArchive) ofMessageArchive ON ofConParticipant.conversationID = ofMessageArchive.conversationID";
// public static final String COUNT_CONVERSATIONS =
// "SELECT count(*) FROM archiveConversations AS c";
public static final String CONVERSATION_ID = "ofConversation.conversationID";
// public static final String CONVERSATION_ID = "c.conversationId";
public static final String CONVERSATION_START_TIME = "ofConversation.startDate";
// public static final String CONVERSATION_START_TIME = "c.startTime";
public static final String CONVERSATION_END_TIME = "ofConversation.lastActivity";
// public static final String CONVERSATION_END_TIME = "c.endTime";
public static final String CONVERSATION_OWNER_JID = "ofConParticipant.bareJID";
// public static final String CONVERSATION_OWNER_JID = "c.ownerJid";
public static final String CONVERSATION_WITH_JID = "ofMessageArchive.toJID";
// public static final String CONVERSATION_WITH_JID = "c.withJid";
public static final String SELECT_ACTIVE_CONVERSATIONS = "SELECT DISTINCT " + "ofConversation.conversationID, " + "ofConversation.room, "
+ "ofConversation.isExternal, " + "ofConversation.startDate, " + "ofConversation.lastActivity, " + "ofConversation.messageCount, "
+ "ofConParticipant.joinedDate, " + "ofConParticipant.leftDate, " + "ofConParticipant.bareJID, " + "ofConParticipant.jidResource, "
+ "ofConParticipant.nickname, " + "ofMessageArchive.fromJID, " + "ofMessageArchive.toJID, " + "ofMessageArchive.sentDate, "
+ "ofMessageArchive.body " + "FROM ofConversation "
+ "INNER JOIN ofConParticipant ON ofConversation.conversationID = ofConParticipant.conversationID "
+ "INNER JOIN ofMessageArchive ON ofConParticipant.conversationID = ofMessageArchive.conversationID "
+ "WHERE ofConversation.lastActivity > ?";
// public static final String SELECT_ACTIVE_CONVERSATIONS =
// "SELECT c.conversationId,c.startTime,c.endTime,c.ownerJid,c.ownerResource,withJid,c.withResource,"
// + " c.subject,c.thread "
// + "FROM archiveConversations AS c WHERE c.endTime > ?";
public static final String SELECT_PARTICIPANTS_BY_CONVERSATION = "SELECT DISTINCT " + "ofConversation.conversationID, "
+ "ofConversation.startDate, " + "ofConversation.lastActivity, " + "ofConParticipant.bareJID " + "FROM ofConversation "
+ "INNER JOIN ofConParticipant ON ofConversation.conversationID = ofConParticipant.conversationID "
+ "INNER JOIN ofMessageArchive ON ofConParticipant.conversationID = ofMessageArchive.conversationID "
+ "WHERE ofConversation.conversationID = ? ORDER BY ofConversation.startDate";
// public static final String SELECT_PARTICIPANTS_BY_CONVERSATION =
// "SELECT participantId,startTime,endTime,jid FROM archiveParticipants WHERE conversationId =? ORDER BY startTime";
public boolean createMessage(ArchivedMessage message) {
/* read only */
return false;
}
public int processAllMessages(ArchivedMessageConsumer callback) {
return 0;
}
public boolean createConversation(Conversation conversation) {
/* read only */
return false;
}
public boolean updateConversationEnd(Conversation conversation) {
/* read only */
return false;
}
public boolean createParticipant(Participant participant, Long conversationId) {
return false;
}
public List<Conversation> findConversations(String[] participants, Date startDate, Date endDate) {
final List<Conversation> conversations = new ArrayList<Conversation>();
return conversations;
}
public Date getAuditedStartDate(Date startDate) {
long maxRetrievable = JiveGlobals.getIntProperty("conversation.maxRetrievable", ConversationManager.DEFAULT_MAX_RETRIEVABLE)
* JiveConstants.DAY;
Date result = null;
if (maxRetrievable > 0) {
Date now = new Date();
Date maxRetrievableDate = new Date(now.getTime() - maxRetrievable);
if (startDate == null) {
result = maxRetrievableDate;
} else if (startDate.before(maxRetrievableDate)) {
result = maxRetrievableDate;
}
}else{
result = startDate;
}
return result;
}
public Collection<Conversation> findConversations(Date startDate, Date endDate, String ownerJid, String withJid, XmppResultSet xmppResultSet) {
final TreeMap<Long, Conversation> conversations;
final StringBuilder querySB;
final StringBuilder whereSB;
final StringBuilder limitSB;
conversations = new TreeMap<Long, Conversation>();
querySB = new StringBuilder(SELECT_CONVERSATIONS);
whereSB = new StringBuilder();
limitSB = new StringBuilder();
startDate = getAuditedStartDate(startDate);
if (startDate != null) {
appendWhere(whereSB, CONVERSATION_START_TIME, " >= ?");
}
if (endDate != null) {
appendWhere(whereSB, CONVERSATION_END_TIME, " <= ?");
}
if (ownerJid != null) {
appendWhere(whereSB, CONVERSATION_OWNER_JID, " = ?");
}
if (withJid != null) {
appendWhere(whereSB, CONVERSATION_WITH_JID, " = ?");
}
if (xmppResultSet != null) {
Integer firstIndex = null;
int max = xmppResultSet.getMax() != null ? xmppResultSet.getMax() : DEFAULT_MAX;
xmppResultSet.setCount(countConversations(startDate, endDate, ownerJid, withJid, whereSB.toString()));
if (xmppResultSet.getIndex() != null) {
firstIndex = xmppResultSet.getIndex();
} else if (xmppResultSet.getAfter() != null) {
firstIndex = countConversationsBefore(startDate, endDate, ownerJid, withJid, xmppResultSet.getAfter(), whereSB.toString());
firstIndex += 1;
} else if (xmppResultSet.getBefore() != null) {
firstIndex = countConversationsBefore(startDate, endDate, ownerJid, withJid, xmppResultSet.getBefore(), whereSB.toString());
firstIndex -= max;
if (firstIndex < 0) {
firstIndex = 0;
}
}
firstIndex = firstIndex != null ? firstIndex : 0;
if (DbConnectionManager.getDatabaseType() == DbConnectionManager.DatabaseType.sqlserver) {
limitSB.append(" BETWEEN ").append(firstIndex+1);
limitSB.append(" AND ").append(firstIndex+max);
}
else {
limitSB.append(" LIMIT ").append(max);
limitSB.append(" OFFSET ").append(firstIndex);
}
xmppResultSet.setFirstIndex(firstIndex);
}
if (whereSB.length() != 0) {
querySB.append(" WHERE ").append(whereSB);
}
if (DbConnectionManager.getDatabaseType() == DbConnectionManager.DatabaseType.sqlserver) {
querySB.insert(0,"SELECT * FROM (SELECT *, ROW_NUMBER() OVER (ORDER BY "+CONVERSATION_ID+") AS RowNum FROM ( ");
querySB.append(") ofConversation ) t2 WHERE RowNum");
}
else {
querySB.append(" ORDER BY ").append(CONVERSATION_ID);
}
querySB.append(limitSB);
Connection con = null;
PreparedStatement pstmt = null;
ResultSet rs = null;
try {
con = DbConnectionManager.getConnection();
pstmt = con.prepareStatement(querySB.toString());
bindConversationParameters(startDate, endDate, ownerJid, withJid, pstmt);
rs = pstmt.executeQuery();
Log.debug("findConversations: SELECT_CONVERSATIONS: " + pstmt.toString());
while (rs.next()) {
Conversation conv = extractConversation(rs);
conversations.put(conv.getId(), conv);
}
} catch (SQLException sqle) {
Log.error("Error selecting conversations", sqle);
} finally {
DbConnectionManager.closeConnection(rs, pstmt, con);
}
if (xmppResultSet != null && conversations.size() > 0) {
xmppResultSet.setFirst(conversations.firstKey());
xmppResultSet.setLast(conversations.lastKey());
}
return conversations.values();
}
private void appendWhere(StringBuilder sb, String... fragments) {
if (sb.length() != 0) {
sb.append(" AND ");
}
for (String fragment : fragments) {
sb.append(fragment);
}
}
private int countConversations(Date startDate, Date endDate, String ownerJid, String withJid, String whereClause) {
StringBuilder querySB;
querySB = new StringBuilder(COUNT_CONVERSATIONS);
if (whereClause != null && whereClause.length() != 0) {
querySB.append(" WHERE ").append(whereClause);
}
Connection con = null;
PreparedStatement pstmt = null;
ResultSet rs = null;
try {
con = DbConnectionManager.getConnection();
pstmt = con.prepareStatement(querySB.toString());
bindConversationParameters(startDate, endDate, ownerJid, withJid, pstmt);
rs = pstmt.executeQuery();
if (rs.next()) {
return rs.getInt(1);
} else {
return 0;
}
} catch (SQLException sqle) {
Log.error("Error counting conversations", sqle);
return 0;
} finally {
DbConnectionManager.closeConnection(rs, pstmt, con);
}
}
private int countConversationsBefore(Date startDate, Date endDate, String ownerJid, String withJid, Long before, String whereClause) {
StringBuilder querySB;
querySB = new StringBuilder(COUNT_CONVERSATIONS);
querySB.append(" WHERE ");
if (whereClause != null && whereClause.length() != 0) {
querySB.append(whereClause);
querySB.append(" AND ");
}
querySB.append(CONVERSATION_ID).append(" < ?");
Connection con = null;
PreparedStatement pstmt = null;
ResultSet rs = null;
try {
int parameterIndex;
con = DbConnectionManager.getConnection();
pstmt = con.prepareStatement(querySB.toString());
parameterIndex = bindConversationParameters(startDate, endDate, ownerJid, withJid, pstmt);
pstmt.setLong(parameterIndex, before);
rs = pstmt.executeQuery();
if (rs.next()) {
return rs.getInt(1);
} else {
return 0;
}
} catch (SQLException sqle) {
Log.error("Error counting conversations", sqle);
return 0;
} finally {
DbConnectionManager.closeConnection(rs, pstmt, con);
}
}
private int bindConversationParameters(Date startDate, Date endDate, String ownerJid, String withJid, PreparedStatement pstmt)
throws SQLException {
int parameterIndex = 1;
if (startDate != null) {
pstmt.setLong(parameterIndex++, dateToMillis(startDate));
}
if (endDate != null) {
pstmt.setLong(parameterIndex++, dateToMillis(endDate));
}
if (ownerJid != null) {
pstmt.setString(parameterIndex++, ownerJid);
}
if (withJid != null) {
pstmt.setString(parameterIndex++, withJid);
}
return parameterIndex;
}
public Collection<Conversation> getActiveConversations(int conversationTimeout) {
final Collection<Conversation> conversations;
final long now = System.currentTimeMillis();
conversations = new ArrayList<Conversation>();
Connection con = null;
PreparedStatement pstmt = null;
ResultSet rs = null;
try {
con = DbConnectionManager.getConnection();
pstmt = con.prepareStatement(SELECT_ACTIVE_CONVERSATIONS);
pstmt.setLong(1, now - conversationTimeout * 60L * 1000L);
rs = pstmt.executeQuery();
while (rs.next()) {
conversations.add(extractConversation(rs));
}
} catch (SQLException sqle) {
Log.error("Error selecting conversations", sqle);
} finally {
DbConnectionManager.closeConnection(rs, pstmt, con);
}
return conversations;
}
public List<Conversation> getConversations(Collection<Long> conversationIds) {
final List<Conversation> conversations;
final StringBuilder querySB;
conversations = new ArrayList<Conversation>();
if (conversationIds.isEmpty()) {
return conversations;
}
querySB = new StringBuilder(SELECT_CONVERSATIONS);
querySB.append(" WHERE ");
querySB.append(CONVERSATION_ID);
querySB.append(" IN ( ");
for (int i = 0; i < conversationIds.size(); i++) {
if (i == 0) {
querySB.append("?");
} else {
querySB.append(",?");
}
}
querySB.append(" )");
querySB.append(" ORDER BY ").append(CONVERSATION_END_TIME);
Connection con = null;
PreparedStatement pstmt = null;
ResultSet rs = null;
try {
con = DbConnectionManager.getConnection();
pstmt = con.prepareStatement(querySB.toString());
int i = 0;
for (Long id : conversationIds) {
pstmt.setLong(++i, id);
}
rs = pstmt.executeQuery();
while (rs.next()) {
conversations.add(extractConversation(rs));
}
} catch (SQLException sqle) {
Log.error("Error selecting conversations", sqle);
} finally {
DbConnectionManager.closeConnection(rs, pstmt, con);
}
return conversations;
}
public Conversation getConversation(String ownerJid, String withJid, Date start) {
return getConversation(null, ownerJid, withJid, start);
}
public Conversation getConversation(Long conversationId) {
return getConversation(conversationId, null, null, null);
}
private Conversation getConversation(Long conversationId, String ownerJid, String withJid, Date start) {
Conversation conversation = null;
StringBuilder querySB;
Connection con = null;
PreparedStatement pstmt = null;
ResultSet rs = null;
querySB = new StringBuilder(SELECT_CONVERSATIONS);
querySB.append(" WHERE ");
if (conversationId != null) {
querySB.append(CONVERSATION_ID).append(" = ? ");
} else {
querySB.append(CONVERSATION_OWNER_JID).append(" = ?");
if (withJid != null) {
querySB.append(" AND ");
querySB.append(CONVERSATION_WITH_JID).append(" = ? ");
}
if (start != null) {
querySB.append(" AND ");
querySB.append(CONVERSATION_START_TIME).append(" = ? ");
}
}
try {
con = DbConnectionManager.getConnection();
pstmt = con.prepareStatement(querySB.toString());
int i = 1;
if (conversationId != null) {
pstmt.setLong(1, conversationId);
} else {
pstmt.setString(i++, ownerJid);
if (withJid != null) {
pstmt.setString(i++, withJid);
}
if (start != null) {
pstmt.setLong(i++, dateToMillis(start));
}
}
rs = pstmt.executeQuery();
Log.debug("getConversation: SELECT_CONVERSATIONS: " + pstmt.toString());
if (rs.next()) {
conversation = extractConversation(rs);
} else {
return null;
}
rs.close();
pstmt.close();
pstmt = con.prepareStatement(SELECT_PARTICIPANTS_BY_CONVERSATION);
pstmt.setLong(1, conversation.getId());
rs = pstmt.executeQuery();
Log.debug("getConversation: SELECT_PARTICIPANTS_BY_CONVERSATION: " + pstmt.toString());
while (rs.next()) {
for (Participant participant : extractParticipant(rs)) {
conversation.addParticipant(participant);
}
}
rs.close();
pstmt.close();
pstmt = con.prepareStatement(SELECT_MESSAGES_BY_CONVERSATION);
pstmt.setLong(1, conversation.getId());
pstmt.setString(2, conversation.getOwnerJid());
rs = pstmt.executeQuery();
Log.debug("getConversation: SELECT_MESSAGES_BY_CONVERSATION: " + pstmt.toString());
while (rs.next()) {
ArchivedMessage message;
message = extractMessage(rs);
message.setConversation(conversation);
conversation.addMessage(message);
}
} catch (SQLException sqle) {
Log.error("Error selecting conversation", sqle);
} finally {
DbConnectionManager.closeConnection(rs, pstmt, con);
}
return conversation;
}
private String getWithJidConversations(ResultSet rs) throws SQLException {
String bareJid = rs.getString("bareJID");
String fromJid = rs.getString("fromJID");
String toJid = rs.getString("toJID");
String room = rs.getString("room");
String result = null;
if (bareJid != null && fromJid != null && toJid != null) {
if (room != null && !room.equals("")) {
result = room;
} else if (fromJid.contains(bareJid)) {
result = toJid;
} else {
result = fromJid;
}
}
return result;
}
private Direction getDirection(ResultSet rs) throws SQLException {
Direction direction = null;
String bareJid = rs.getString("bareJID");
String fromJid = rs.getString("fromJID");
String toJid = rs.getString("toJID");
if (bareJid != null && fromJid != null && toJid != null) {
if (bareJid.equals(fromJid)) {
/*
* if message from me to withJid then it is to the withJid participant
*/
direction = Direction.to;
} else {
/*
* if message to me from withJid then it is from the withJid participant
*/
direction = Direction.from;
}
}
return direction;
}
private Conversation extractConversation(ResultSet rs) throws SQLException {
final Conversation conversation;
long id = rs.getLong("conversationID");
Date startDate = millisToDate(rs.getLong("startDate"));
String ownerJid = rs.getString("bareJID");
String ownerResource = null;
String withJid = getWithJidConversations(rs);
String withResource = null;
String subject = null;
String thread = String.valueOf(id);
conversation = new Conversation(startDate, ownerJid, ownerResource, withJid, withResource, subject, thread);
conversation.setId(id);
return conversation;
}
private Collection<Participant> extractParticipant(ResultSet rs) throws SQLException {
Collection<Participant> participants = new HashSet<Participant>();
Date startDate = millisToDate(rs.getLong("startDate"));
String participantJid = rs.getString("bareJID");
Date endDate = millisToDate(rs.getLong("lastActivity"));
if (participantJid != null) {
Participant participant = new Participant(startDate, participantJid);
participant.setEnd(endDate);
participants.add(participant);
}
// String withJid = getWithJid(rs);
// if (withJid != null) {
// Participant participant = new Participant(startDate, participantJid);
// participant.setEnd(endDate);
// participants.add(participant);
// }
return participants;
}
private ArchivedMessage extractMessage(ResultSet rs) throws SQLException {
final ArchivedMessage message;
Date time = millisToDate(rs.getLong("sentDate"));
Direction direction = getDirection(rs);
String type = null;
String subject = null;
String body = rs.getString("body");
String bareJid = rs.getString("bareJID");
JID withJid = null;
if (Direction.from == direction) {
withJid = new JID(rs.getString("fromJID"));
}
message = new ArchivedMessage(time, direction, null, withJid);
// message.setId(id);
// message.setSubject(subject);
message.setBody(body);
return message;
}
private Long dateToMillis(Date date) {
return date == null ? null : date.getTime();
}
private Date millisToDate(Long millis) {
return millis == null ? null : new Date(millis);
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.hyracks.algebricks.rewriter.rules;
import java.util.ArrayList;
import java.util.BitSet;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.apache.commons.lang3.mutable.Mutable;
import org.apache.commons.lang3.mutable.MutableInt;
import org.apache.commons.lang3.mutable.MutableObject;
import org.apache.hyracks.algebricks.common.exceptions.AlgebricksException;
import org.apache.hyracks.algebricks.common.utils.Pair;
import org.apache.hyracks.algebricks.core.algebra.base.ILogicalExpression;
import org.apache.hyracks.algebricks.core.algebra.base.ILogicalOperator;
import org.apache.hyracks.algebricks.core.algebra.base.IOptimizationContext;
import org.apache.hyracks.algebricks.core.algebra.base.LogicalOperatorTag;
import org.apache.hyracks.algebricks.core.algebra.base.LogicalVariable;
import org.apache.hyracks.algebricks.core.algebra.expressions.VariableReferenceExpression;
import org.apache.hyracks.algebricks.core.algebra.operators.logical.AbstractLogicalOperator;
import org.apache.hyracks.algebricks.core.algebra.operators.logical.AssignOperator;
import org.apache.hyracks.algebricks.core.algebra.operators.logical.ExchangeOperator;
import org.apache.hyracks.algebricks.core.algebra.operators.logical.ProjectOperator;
import org.apache.hyracks.algebricks.core.algebra.operators.logical.ReplicateOperator;
import org.apache.hyracks.algebricks.core.algebra.operators.logical.visitors.IsomorphismUtilities;
import org.apache.hyracks.algebricks.core.algebra.operators.logical.visitors.VariableUtilities;
import org.apache.hyracks.algebricks.core.algebra.operators.physical.AssignPOperator;
import org.apache.hyracks.algebricks.core.algebra.operators.physical.OneToOneExchangePOperator;
import org.apache.hyracks.algebricks.core.algebra.operators.physical.ReplicatePOperator;
import org.apache.hyracks.algebricks.core.algebra.operators.physical.StreamProjectPOperator;
import org.apache.hyracks.algebricks.core.rewriter.base.HeuristicOptimizer;
import org.apache.hyracks.algebricks.core.rewriter.base.IAlgebraicRewriteRule;
public class ExtractCommonOperatorsRule implements IAlgebraicRewriteRule {
private final HashMap<Mutable<ILogicalOperator>, List<Mutable<ILogicalOperator>>> childrenToParents = new HashMap<Mutable<ILogicalOperator>, List<Mutable<ILogicalOperator>>>();
private final List<Mutable<ILogicalOperator>> roots = new ArrayList<Mutable<ILogicalOperator>>();
private final List<List<Mutable<ILogicalOperator>>> equivalenceClasses = new ArrayList<List<Mutable<ILogicalOperator>>>();
private final HashMap<Mutable<ILogicalOperator>, BitSet> opToCandidateInputs = new HashMap<Mutable<ILogicalOperator>, BitSet>();
private final HashMap<Mutable<ILogicalOperator>, MutableInt> clusterMap = new HashMap<Mutable<ILogicalOperator>, MutableInt>();
private final HashMap<Integer, BitSet> clusterWaitForMap = new HashMap<Integer, BitSet>();
private int lastUsedClusterId = 0;
@Override
public boolean rewritePre(Mutable<ILogicalOperator> opRef, IOptimizationContext context)
throws AlgebricksException {
AbstractLogicalOperator op = (AbstractLogicalOperator) opRef.getValue();
if (op.getOperatorTag() != LogicalOperatorTag.WRITE && op.getOperatorTag() != LogicalOperatorTag.WRITE_RESULT
&& op.getOperatorTag() != LogicalOperatorTag.DISTRIBUTE_RESULT) {
return false;
}
if (!roots.contains(op)) {
roots.add(new MutableObject<ILogicalOperator>(op));
}
return false;
}
@Override
public boolean rewritePost(Mutable<ILogicalOperator> opRef, IOptimizationContext context)
throws AlgebricksException {
AbstractLogicalOperator op = (AbstractLogicalOperator) opRef.getValue();
if (op.getOperatorTag() != LogicalOperatorTag.WRITE && op.getOperatorTag() != LogicalOperatorTag.WRITE_RESULT
&& op.getOperatorTag() != LogicalOperatorTag.DISTRIBUTE_RESULT) {
return false;
}
boolean rewritten = false;
boolean changed = false;
if (roots.size() > 0) {
do {
changed = false;
// applying the rewriting until fixpoint
topDownMaterialization(roots);
genCandidates(context);
removeTrivialShare();
if (equivalenceClasses.size() > 0) {
changed = rewrite(context);
}
if (!rewritten) {
rewritten = changed;
}
equivalenceClasses.clear();
childrenToParents.clear();
opToCandidateInputs.clear();
clusterMap.clear();
clusterWaitForMap.clear();
lastUsedClusterId = 0;
} while (changed);
roots.clear();
}
return rewritten;
}
private void removeTrivialShare() {
for (List<Mutable<ILogicalOperator>> candidates : equivalenceClasses) {
for (int i = candidates.size() - 1; i >= 0; i--) {
Mutable<ILogicalOperator> opRef = candidates.get(i);
AbstractLogicalOperator aop = (AbstractLogicalOperator) opRef.getValue();
if (aop.getOperatorTag() == LogicalOperatorTag.EXCHANGE) {
aop = (AbstractLogicalOperator) aop.getInputs().get(0).getValue();
}
if (aop.getOperatorTag() == LogicalOperatorTag.EMPTYTUPLESOURCE) {
candidates.remove(i);
}
}
}
for (int i = equivalenceClasses.size() - 1; i >= 0; i--) {
if (equivalenceClasses.get(i).size() < 2) {
equivalenceClasses.remove(i);
}
}
}
private boolean rewrite(IOptimizationContext context) throws AlgebricksException {
boolean changed = false;
for (List<Mutable<ILogicalOperator>> members : equivalenceClasses) {
if (rewriteForOneEquivalentClass(members, context)) {
changed = true;
}
}
return changed;
}
private boolean rewriteForOneEquivalentClass(List<Mutable<ILogicalOperator>> members, IOptimizationContext context)
throws AlgebricksException {
List<Mutable<ILogicalOperator>> group = new ArrayList<Mutable<ILogicalOperator>>();
boolean rewritten = false;
while (members.size() > 0) {
group.clear();
Mutable<ILogicalOperator> candidate = members.remove(members.size() - 1);
group.add(candidate);
for (int i = members.size() - 1; i >= 0; i--) {
Mutable<ILogicalOperator> peer = members.get(i);
if (IsomorphismUtilities.isOperatorIsomorphic(candidate.getValue(), peer.getValue())) {
group.add(peer);
members.remove(i);
}
}
boolean[] materializationFlags = computeMaterilizationFlags(group);
if (group.isEmpty()) {
continue;
}
candidate = group.get(0);
ReplicateOperator rop = new ReplicateOperator(group.size(), materializationFlags);
rop.setPhysicalOperator(new ReplicatePOperator());
Mutable<ILogicalOperator> ropRef = new MutableObject<ILogicalOperator>(rop);
AbstractLogicalOperator aopCandidate = (AbstractLogicalOperator) candidate.getValue();
List<Mutable<ILogicalOperator>> originalCandidateParents = childrenToParents.get(candidate);
rop.setExecutionMode(((AbstractLogicalOperator) candidate.getValue()).getExecutionMode());
if (aopCandidate.getOperatorTag() == LogicalOperatorTag.EXCHANGE) {
rop.getInputs().add(candidate);
} else {
AbstractLogicalOperator beforeExchange = new ExchangeOperator();
beforeExchange.setPhysicalOperator(new OneToOneExchangePOperator());
beforeExchange.setExecutionMode(rop.getExecutionMode());
Mutable<ILogicalOperator> beforeExchangeRef = new MutableObject<ILogicalOperator>(beforeExchange);
beforeExchange.getInputs().add(candidate);
context.computeAndSetTypeEnvironmentForOperator(beforeExchange);
rop.getInputs().add(beforeExchangeRef);
}
context.computeAndSetTypeEnvironmentForOperator(rop);
for (Mutable<ILogicalOperator> parentRef : originalCandidateParents) {
AbstractLogicalOperator parent = (AbstractLogicalOperator) parentRef.getValue();
int index = parent.getInputs().indexOf(candidate);
if (parent.getOperatorTag() == LogicalOperatorTag.EXCHANGE) {
parent.getInputs().set(index, ropRef);
rop.getOutputs().add(parentRef);
} else {
AbstractLogicalOperator exchange = new ExchangeOperator();
exchange.setPhysicalOperator(new OneToOneExchangePOperator());
exchange.setExecutionMode(rop.getExecutionMode());
MutableObject<ILogicalOperator> exchangeRef = new MutableObject<ILogicalOperator>(exchange);
exchange.getInputs().add(ropRef);
rop.getOutputs().add(exchangeRef);
context.computeAndSetTypeEnvironmentForOperator(exchange);
parent.getInputs().set(index, exchangeRef);
context.computeAndSetTypeEnvironmentForOperator(parent);
}
}
List<LogicalVariable> liveVarsNew = new ArrayList<LogicalVariable>();
VariableUtilities.getLiveVariables(candidate.getValue(), liveVarsNew);
ArrayList<Mutable<ILogicalExpression>> assignExprs = new ArrayList<Mutable<ILogicalExpression>>();
for (LogicalVariable liveVar : liveVarsNew) {
assignExprs.add(new MutableObject<ILogicalExpression>(new VariableReferenceExpression(liveVar)));
}
for (Mutable<ILogicalOperator> ref : group) {
if (ref.equals(candidate)) {
continue;
}
ArrayList<LogicalVariable> liveVars = new ArrayList<LogicalVariable>();
Map<LogicalVariable, LogicalVariable> variableMappingBack = new HashMap<LogicalVariable, LogicalVariable>();
IsomorphismUtilities.mapVariablesTopDown(ref.getValue(), candidate.getValue(), variableMappingBack);
for (int i = 0; i < liveVarsNew.size(); i++) {
liveVars.add(variableMappingBack.get(liveVarsNew.get(i)));
}
AbstractLogicalOperator assignOperator = new AssignOperator(liveVars, assignExprs);
assignOperator.setExecutionMode(rop.getExecutionMode());
assignOperator.setPhysicalOperator(new AssignPOperator());
AbstractLogicalOperator projectOperator = new ProjectOperator(liveVars);
projectOperator.setPhysicalOperator(new StreamProjectPOperator());
projectOperator.setExecutionMode(rop.getExecutionMode());
AbstractLogicalOperator exchOp = new ExchangeOperator();
exchOp.setPhysicalOperator(new OneToOneExchangePOperator());
exchOp.setExecutionMode(rop.getExecutionMode());
exchOp.getInputs().add(ropRef);
MutableObject<ILogicalOperator> exchOpRef = new MutableObject<ILogicalOperator>(exchOp);
rop.getOutputs().add(exchOpRef);
assignOperator.getInputs().add(exchOpRef);
projectOperator.getInputs().add(new MutableObject<ILogicalOperator>(assignOperator));
// set the types
context.computeAndSetTypeEnvironmentForOperator(exchOp);
context.computeAndSetTypeEnvironmentForOperator(assignOperator);
context.computeAndSetTypeEnvironmentForOperator(projectOperator);
List<Mutable<ILogicalOperator>> parentOpList = childrenToParents.get(ref);
for (Mutable<ILogicalOperator> parentOpRef : parentOpList) {
AbstractLogicalOperator parentOp = (AbstractLogicalOperator) parentOpRef.getValue();
int index = parentOp.getInputs().indexOf(ref);
ILogicalOperator childOp = parentOp.getOperatorTag() == LogicalOperatorTag.PROJECT ? assignOperator
: projectOperator;
if (!HeuristicOptimizer.isHyracksOp(parentOp.getPhysicalOperator().getOperatorTag())) {
parentOp.getInputs().set(index, new MutableObject<ILogicalOperator>(childOp));
} else {
// If the parent operator is a hyracks operator,
// an extra one-to-one exchange is needed.
AbstractLogicalOperator exchg = new ExchangeOperator();
exchg.setPhysicalOperator(new OneToOneExchangePOperator());
exchg.setExecutionMode(childOp.getExecutionMode());
exchg.getInputs().add(new MutableObject<ILogicalOperator>(childOp));
parentOp.getInputs().set(index, new MutableObject<ILogicalOperator>(exchg));
context.computeAndSetTypeEnvironmentForOperator(exchg);
}
context.computeAndSetTypeEnvironmentForOperator(parentOp);
}
}
rewritten = true;
}
return rewritten;
}
private void genCandidates(IOptimizationContext context) throws AlgebricksException {
List<List<Mutable<ILogicalOperator>>> previousEquivalenceClasses = new ArrayList<List<Mutable<ILogicalOperator>>>();
while (equivalenceClasses.size() > 0) {
previousEquivalenceClasses.clear();
for (List<Mutable<ILogicalOperator>> candidates : equivalenceClasses) {
List<Mutable<ILogicalOperator>> candidatesCopy = new ArrayList<Mutable<ILogicalOperator>>();
candidatesCopy.addAll(candidates);
previousEquivalenceClasses.add(candidatesCopy);
}
List<Mutable<ILogicalOperator>> currentLevelOpRefs = new ArrayList<Mutable<ILogicalOperator>>();
for (List<Mutable<ILogicalOperator>> candidates : equivalenceClasses) {
if (candidates.size() > 0) {
for (Mutable<ILogicalOperator> opRef : candidates) {
List<Mutable<ILogicalOperator>> refs = childrenToParents.get(opRef);
if (refs != null) {
currentLevelOpRefs.addAll(refs);
}
}
}
if (currentLevelOpRefs.size() == 0) {
continue;
}
candidatesGrow(currentLevelOpRefs, candidates);
}
if (currentLevelOpRefs.size() == 0) {
break;
}
prune(context);
}
if (equivalenceClasses.size() < 1 && previousEquivalenceClasses.size() > 0) {
equivalenceClasses.addAll(previousEquivalenceClasses);
prune(context);
}
}
private void topDownMaterialization(List<Mutable<ILogicalOperator>> tops) {
List<Mutable<ILogicalOperator>> candidates = new ArrayList<Mutable<ILogicalOperator>>();
List<Mutable<ILogicalOperator>> nextLevel = new ArrayList<Mutable<ILogicalOperator>>();
for (Mutable<ILogicalOperator> op : tops) {
for (Mutable<ILogicalOperator> opRef : op.getValue().getInputs()) {
List<Mutable<ILogicalOperator>> opRefList = childrenToParents.get(opRef);
if (opRefList == null) {
opRefList = new ArrayList<Mutable<ILogicalOperator>>();
childrenToParents.put(opRef, opRefList);
nextLevel.add(opRef);
}
opRefList.add(op);
}
if (op.getValue().getInputs().size() == 0) {
candidates.add(op);
}
}
if (equivalenceClasses.size() > 0) {
equivalenceClasses.get(0).addAll(candidates);
} else {
equivalenceClasses.add(candidates);
}
if (nextLevel.size() > 0) {
topDownMaterialization(nextLevel);
}
}
private void candidatesGrow(List<Mutable<ILogicalOperator>> opList, List<Mutable<ILogicalOperator>> candidates) {
List<Mutable<ILogicalOperator>> previousCandidates = new ArrayList<Mutable<ILogicalOperator>>();
previousCandidates.addAll(candidates);
candidates.clear();
boolean validCandidate = false;
for (Mutable<ILogicalOperator> op : opList) {
List<Mutable<ILogicalOperator>> inputs = op.getValue().getInputs();
for (int i = 0; i < inputs.size(); i++) {
Mutable<ILogicalOperator> inputRef = inputs.get(i);
validCandidate = false;
for (Mutable<ILogicalOperator> candidate : previousCandidates) {
// if current input is in candidates
if (inputRef.getValue().equals(candidate.getValue())) {
if (inputs.size() == 1) {
validCandidate = true;
} else {
BitSet candidateInputBitMap = opToCandidateInputs.get(op);
if (candidateInputBitMap == null) {
candidateInputBitMap = new BitSet(inputs.size());
opToCandidateInputs.put(op, candidateInputBitMap);
}
candidateInputBitMap.set(i);
if (candidateInputBitMap.cardinality() == inputs.size()) {
validCandidate = true;
}
}
break;
}
}
}
if (!validCandidate) {
continue;
}
if (!candidates.contains(op)) {
candidates.add(op);
}
}
}
private void prune(IOptimizationContext context) throws AlgebricksException {
List<List<Mutable<ILogicalOperator>>> previousEquivalenceClasses = new ArrayList<List<Mutable<ILogicalOperator>>>();
for (List<Mutable<ILogicalOperator>> candidates : equivalenceClasses) {
List<Mutable<ILogicalOperator>> candidatesCopy = new ArrayList<Mutable<ILogicalOperator>>();
candidatesCopy.addAll(candidates);
previousEquivalenceClasses.add(candidatesCopy);
}
equivalenceClasses.clear();
for (List<Mutable<ILogicalOperator>> candidates : previousEquivalenceClasses) {
boolean[] reserved = new boolean[candidates.size()];
for (int i = 0; i < reserved.length; i++) {
reserved[i] = false;
}
for (int i = candidates.size() - 1; i >= 0; i--) {
if (reserved[i] == false) {
List<Mutable<ILogicalOperator>> equivalentClass = new ArrayList<Mutable<ILogicalOperator>>();
ILogicalOperator candidate = candidates.get(i).getValue();
equivalentClass.add(candidates.get(i));
for (int j = i - 1; j >= 0; j--) {
ILogicalOperator peer = candidates.get(j).getValue();
if (IsomorphismUtilities.isOperatorIsomorphic(candidate, peer)) {
reserved[i] = true;
reserved[j] = true;
equivalentClass.add(candidates.get(j));
}
}
if (equivalentClass.size() > 1) {
equivalenceClasses.add(equivalentClass);
Collections.reverse(equivalentClass);
}
}
}
for (int i = candidates.size() - 1; i >= 0; i--) {
if (!reserved[i]) {
candidates.remove(i);
}
}
}
}
private boolean[] computeMaterilizationFlags(List<Mutable<ILogicalOperator>> group) {
lastUsedClusterId = 0;
for (Mutable<ILogicalOperator> root : roots) {
computeClusters(null, root, new MutableInt(++lastUsedClusterId));
}
boolean[] materializationFlags = new boolean[group.size()];
boolean worthMaterialization = worthMaterialization(group.get(0));
boolean requiresMaterialization;
// get clusterIds for each candidate in the group
List<Integer> groupClusterIds = new ArrayList<Integer>(group.size());
for (int i = 0; i < group.size(); i++) {
groupClusterIds.add(clusterMap.get(group.get(i)).getValue());
}
for (int i = group.size() - 1; i >= 0; i--) {
requiresMaterialization = requiresMaterialization(groupClusterIds, i);
if (requiresMaterialization && !worthMaterialization) {
group.remove(i);
groupClusterIds.remove(i);
}
materializationFlags[i] = requiresMaterialization;
}
if (group.size() < 2) {
group.clear();
}
// if does not worth materialization, the flags for the remaining candidates should be false
return worthMaterialization ? materializationFlags : new boolean[group.size()];
}
private boolean requiresMaterialization(List<Integer> groupClusterIds, int index) {
Integer clusterId = groupClusterIds.get(index);
BitSet blockingClusters = new BitSet();
getAllBlockingClusterIds(clusterId, blockingClusters);
if (!blockingClusters.isEmpty()) {
for (int i = 0; i < groupClusterIds.size(); i++) {
if (i == index) {
continue;
}
if (blockingClusters.get(groupClusterIds.get(i))) {
return true;
}
}
}
return false;
}
private void getAllBlockingClusterIds(int clusterId, BitSet blockingClusters) {
BitSet waitFor = clusterWaitForMap.get(clusterId);
if (waitFor != null) {
for (int i = waitFor.nextSetBit(0); i >= 0; i = waitFor.nextSetBit(i + 1)) {
getAllBlockingClusterIds(i, blockingClusters);
}
blockingClusters.or(waitFor);
}
}
private void computeClusters(Mutable<ILogicalOperator> parentRef, Mutable<ILogicalOperator> opRef,
MutableInt currentClusterId) {
// only replicate operator has multiple outputs
int outputIndex = 0;
if (opRef.getValue().getOperatorTag() == LogicalOperatorTag.REPLICATE) {
ReplicateOperator rop = (ReplicateOperator) opRef.getValue();
List<Mutable<ILogicalOperator>> outputs = rop.getOutputs();
for (outputIndex = 0; outputIndex < outputs.size(); outputIndex++) {
if (outputs.get(outputIndex).equals(parentRef)) {
break;
}
}
}
AbstractLogicalOperator aop = (AbstractLogicalOperator) opRef.getValue();
Pair<int[], int[]> labels = aop.getPhysicalOperator().getInputOutputDependencyLabels(opRef.getValue());
List<Mutable<ILogicalOperator>> inputs = opRef.getValue().getInputs();
for (int i = 0; i < inputs.size(); i++) {
Mutable<ILogicalOperator> inputRef = inputs.get(i);
if (labels.second[outputIndex] == 1 && labels.first[i] == 0) { // 1 -> 0
if (labels.second.length == 1) {
clusterMap.put(opRef, currentClusterId);
// start a new cluster
MutableInt newClusterId = new MutableInt(++lastUsedClusterId);
computeClusters(opRef, inputRef, newClusterId);
BitSet waitForList = clusterWaitForMap.get(currentClusterId.getValue());
if (waitForList == null) {
waitForList = new BitSet();
clusterWaitForMap.put(currentClusterId.getValue(), waitForList);
}
waitForList.set(newClusterId.getValue());
}
} else { // 0 -> 0 and 1 -> 1
MutableInt prevClusterId = clusterMap.get(opRef);
if (prevClusterId == null || prevClusterId.getValue().equals(currentClusterId.getValue())) {
clusterMap.put(opRef, currentClusterId);
computeClusters(opRef, inputRef, currentClusterId);
} else {
// merge prevClusterId and currentClusterId: update all the map entries that has currentClusterId to prevClusterId
for (BitSet bs : clusterWaitForMap.values()) {
if (bs.get(currentClusterId.getValue())) {
bs.clear(currentClusterId.getValue());
bs.set(prevClusterId.getValue());
}
}
currentClusterId.setValue(prevClusterId.getValue());
}
}
}
}
protected boolean worthMaterialization(Mutable<ILogicalOperator> candidate) {
AbstractLogicalOperator aop = (AbstractLogicalOperator) candidate.getValue();
if (aop.getPhysicalOperator().expensiveThanMaterialization()) {
return true;
}
List<Mutable<ILogicalOperator>> inputs = candidate.getValue().getInputs();
for (Mutable<ILogicalOperator> inputRef : inputs) {
if (worthMaterialization(inputRef)) {
return true;
}
}
return false;
}
}
| |
package net.sf.jabref.gui.cleanup;
import java.awt.BorderLayout;
import java.awt.Component;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import java.awt.event.MouseEvent;
import java.awt.event.MouseMotionAdapter;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.Objects;
import java.util.Optional;
import java.util.stream.Collectors;
import javax.swing.DefaultListCellRenderer;
import javax.swing.JButton;
import javax.swing.JCheckBox;
import javax.swing.JComboBox;
import javax.swing.JList;
import javax.swing.JPanel;
import javax.swing.JTextArea;
import javax.swing.ListSelectionModel;
import javax.swing.UIManager;
import javax.swing.event.ListDataEvent;
import javax.swing.event.ListDataListener;
import net.sf.jabref.JabRefGUI;
import net.sf.jabref.logic.cleanup.Cleanups;
import net.sf.jabref.logic.l10n.Localization;
import net.sf.jabref.model.cleanup.FieldFormatterCleanup;
import net.sf.jabref.model.cleanup.FieldFormatterCleanups;
import net.sf.jabref.model.cleanup.Formatter;
import net.sf.jabref.model.database.BibDatabaseContext;
import net.sf.jabref.model.entry.BibEntry;
import net.sf.jabref.model.entry.InternalBibtexFields;
import net.sf.jabref.model.metadata.MetaData;
import com.jgoodies.forms.builder.FormBuilder;
import com.jgoodies.forms.layout.FormLayout;
public class FieldFormatterCleanupsPanel extends JPanel {
private static final String DESCRIPTION = Localization.lang("Description") + ": ";
private final JCheckBox cleanupEnabled;
private FieldFormatterCleanups fieldFormatterCleanups;
private JList<?> actionsList;
private JComboBox<?> formattersCombobox;
private JComboBox<String> selectFieldCombobox;
private JButton addButton;
private JTextArea descriptionAreaText;
private JButton removeButton;
private JButton resetButton;
private JButton recommendButton;
private final FieldFormatterCleanups defaultFormatters;
public FieldFormatterCleanupsPanel(String description, FieldFormatterCleanups defaultFormatters) {
this.defaultFormatters = Objects.requireNonNull(defaultFormatters);
cleanupEnabled = new JCheckBox(description);
}
public void setValues(MetaData metaData) {
Objects.requireNonNull(metaData);
Optional<FieldFormatterCleanups> saveActions = metaData.getSaveActions();
setValues(saveActions.orElse(Cleanups.DEFAULT_SAVE_ACTIONS));
}
public void setValues(FieldFormatterCleanups formatterCleanups) {
fieldFormatterCleanups = formatterCleanups;
// first clear existing content
this.removeAll();
List<FieldFormatterCleanup> configuredActions = fieldFormatterCleanups.getConfiguredActions();
//The copy is necessary because the original List is unmodifiable
List<FieldFormatterCleanup> actionsToDisplay = new ArrayList<>(configuredActions);
buildLayout(actionsToDisplay);
}
private void buildLayout(List<FieldFormatterCleanup> actionsToDisplay) {
FormBuilder builder = FormBuilder.create().layout(new FormLayout(
"left:pref, 13dlu, left:pref:grow, 4dlu, pref, 4dlu, pref",
"pref, 2dlu, pref, 2dlu, pref, 4dlu, pref, 2dlu, pref, 2dlu, pref, 2dlu, pref, 2dlu, pref, 2dlu, fill:pref:grow, 2dlu"));
builder.add(cleanupEnabled).xyw(1, 1, 7);
actionsList = new JList<>(new CleanupActionsListModel(actionsToDisplay));
actionsList.setSelectionMode(ListSelectionModel.SINGLE_SELECTION);
actionsList.addMouseMotionListener(new MouseMotionAdapter() {
@Override
public void mouseMoved(MouseEvent e) {
super.mouseMoved(e);
CleanupActionsListModel m = (CleanupActionsListModel) actionsList.getModel();
int index = actionsList.locationToIndex(e.getPoint());
if (index > -1) {
actionsList.setToolTipText(m.getElementAt(index).getFormatter().getDescription());
}
}
});
actionsList.getModel().addListDataListener(new ListDataListener() {
@Override
public void intervalRemoved(ListDataEvent e) {
//index0 is sufficient, because of SingleSelection
if (e.getIndex0() == 0) {
//when an item gets deleted, the next one becomes the new 0
actionsList.setSelectedIndex(e.getIndex0());
}
if (e.getIndex0() > 0) {
actionsList.setSelectedIndex(e.getIndex0() - 1);
}
}
@Override
public void intervalAdded(ListDataEvent e) {
//empty, not needed
}
@Override
public void contentsChanged(ListDataEvent e) {
//empty, not needed
}
});
builder.add(actionsList).xyw(3, 5, 5);
resetButton = new JButton(Localization.lang("Reset"));
resetButton.addActionListener(e -> ((CleanupActionsListModel) actionsList.getModel()).reset(defaultFormatters));
BibDatabaseContext databaseContext = JabRefGUI.getMainFrame().getCurrentBasePanel().getDatabaseContext();
recommendButton = new JButton(Localization.lang("Recommended for %0", databaseContext.getMode().getFormattedName()));
boolean isBibLaTeX = databaseContext.isBiblatexMode();
recommendButton.addActionListener(e -> {
if (isBibLaTeX) {
((CleanupActionsListModel) actionsList.getModel()).reset(Cleanups.RECOMMEND_BIBLATEX_ACTIONS);
} else {
((CleanupActionsListModel) actionsList.getModel()).reset(Cleanups.RECOMMEND_BIBTEX_ACTIONS);
}
});
removeButton = new JButton(Localization.lang("Remove selected"));
removeButton.addActionListener(
e -> ((CleanupActionsListModel) actionsList.getModel()).removeAtIndex(actionsList.getSelectedIndex()));
builder.add(removeButton).xy(7, 11);
builder.add(resetButton).xy(3, 11);
builder.add(recommendButton).xy(5, 11);
builder.add(getSelectorPanel()).xyw(3, 15, 5);
makeDescriptionTextAreaLikeJLabel();
builder.add(descriptionAreaText).xyw(3, 17, 5);
this.setLayout(new BorderLayout());
this.add(builder.getPanel(), BorderLayout.WEST);
updateDescription();
// make sure the layout is set according to the checkbox
cleanupEnabled.addActionListener(new EnablementStatusListener(fieldFormatterCleanups.isEnabled()));
cleanupEnabled.setSelected(fieldFormatterCleanups.isEnabled());
}
/**
* Create a TextArea that looks and behaves like a JLabel. Has the advantage of supporting multine and wordwrap
*/
private void makeDescriptionTextAreaLikeJLabel() {
descriptionAreaText = new JTextArea(DESCRIPTION);
descriptionAreaText.setLineWrap(true);
descriptionAreaText.setWrapStyleWord(true);
descriptionAreaText.setColumns(6);
descriptionAreaText.setEditable(false);
descriptionAreaText.setOpaque(false);
descriptionAreaText.setFocusable(false);
descriptionAreaText.setCursor(null);
descriptionAreaText.setFont(UIManager.getFont("Label.font"));
}
private void updateDescription() {
FieldFormatterCleanup formatterCleanup = getFieldFormatterCleanup();
if (formatterCleanup != null) {
descriptionAreaText.setText(DESCRIPTION + formatterCleanup.getFormatter().getDescription());
} else {
Formatter selectedFormatter = getFieldFormatter();
if (selectedFormatter != null) {
descriptionAreaText.setText(DESCRIPTION + selectedFormatter.getDescription());
} else {
descriptionAreaText.setText(DESCRIPTION);
}
}
}
/**
* This panel contains the two comboboxes and the Add button
* @return Returns the created JPanel
*/
private JPanel getSelectorPanel() {
FormBuilder builder = FormBuilder.create()
.layout(new FormLayout("left:pref:grow, 4dlu, left:pref:grow, 4dlu, pref:grow, 4dlu, right:pref",
"pref, 2dlu, pref:grow, 2dlu"));
List<String> fieldNames = InternalBibtexFields.getAllPublicAndInteralFieldNames();
fieldNames.add(BibEntry.KEY_FIELD);
Collections.sort(fieldNames);
String[] allPlusKey = fieldNames.toArray(new String[fieldNames.size()]);
selectFieldCombobox = new JComboBox<>(allPlusKey);
selectFieldCombobox.setEditable(true);
builder.add(selectFieldCombobox).xy(1, 1);
List<String> formatterNames = Cleanups.getAvailableFormatters().stream()
.map(Formatter::getName).collect(Collectors.toList());
List<String> formatterDescriptions = Cleanups.getAvailableFormatters().stream()
.map(Formatter::getDescription).collect(Collectors.toList());
formattersCombobox = new JComboBox<>(formatterNames.toArray());
formattersCombobox.setRenderer(new DefaultListCellRenderer() {
@Override
public Component getListCellRendererComponent(JList<?> list, Object value, int index, boolean isSelected,
boolean cellHasFocus) {
if ((-1 < index) && (index < formatterDescriptions.size()) && (value != null)) {
setToolTipText(formatterDescriptions.get(index));
}
return super.getListCellRendererComponent(list, value, index, isSelected, cellHasFocus);
}
});
formattersCombobox.addItemListener(e -> updateDescription());
builder.add(formattersCombobox).xy(3, 1);
addButton = new JButton(Localization.lang("Add"));
addButton.addActionListener(e -> {
FieldFormatterCleanup newAction = getFieldFormatterCleanup();
if (newAction == null) {
return;
}
((CleanupActionsListModel) actionsList.getModel()).addCleanupAction(newAction);
});
builder.add(addButton).xy(5, 1);
return builder.getPanel();
}
public void storeSettings(MetaData metaData) {
Objects.requireNonNull(metaData);
FieldFormatterCleanups formatterCleanups = getFormatterCleanups();
// if all actions have been removed, remove the save actions from the MetaData
if (formatterCleanups.getConfiguredActions().isEmpty()) {
metaData.clearSaveActions();
return;
}
metaData.setSaveActions(formatterCleanups);
}
public FieldFormatterCleanups getFormatterCleanups() {
List<FieldFormatterCleanup> actions = ((CleanupActionsListModel) actionsList.getModel()).getAllActions();
return new FieldFormatterCleanups(cleanupEnabled.isSelected(), actions);
}
public boolean hasChanged() {
return !fieldFormatterCleanups.equals(getFormatterCleanups());
}
public boolean isDefaultSaveActions() {
return Cleanups.DEFAULT_SAVE_ACTIONS.equals(getFormatterCleanups());
}
private FieldFormatterCleanup getFieldFormatterCleanup() {
Formatter selectedFormatter = getFieldFormatter();
String fieldKey = selectFieldCombobox.getSelectedItem().toString();
return new FieldFormatterCleanup(fieldKey, selectedFormatter);
}
private Formatter getFieldFormatter() {
Formatter selectedFormatter = null;
String selectedFormatterName = formattersCombobox.getSelectedItem().toString();
for (Formatter formatter : Cleanups.getAvailableFormatters()) {
if (formatter.getName().equals(selectedFormatterName)) {
selectedFormatter = formatter;
break;
}
}
return selectedFormatter;
}
class EnablementStatusListener implements ActionListener {
public EnablementStatusListener(boolean initialStatus) {
setStatus(initialStatus);
}
@Override
public void actionPerformed(ActionEvent e) {
boolean enablementStatus = cleanupEnabled.isSelected();
setStatus(enablementStatus);
}
private void setStatus(boolean status) {
actionsList.setEnabled(status);
selectFieldCombobox.setEnabled(status);
formattersCombobox.setEnabled(status);
addButton.setEnabled(status);
removeButton.setEnabled(status);
resetButton.setEnabled(status);
recommendButton.setEnabled(status);
}
}
}
| |
package crazypants.enderio.material;
import java.util.ArrayList;
import net.minecraft.init.Blocks;
import net.minecraft.init.Items;
import net.minecraft.item.ItemStack;
import net.minecraftforge.oredict.OreDictionary;
import net.minecraftforge.oredict.ShapelessOreRecipe;
import com.enderio.core.common.util.OreDictionaryHelper;
import cpw.mods.fml.common.registry.GameRegistry;
import crazypants.enderio.EnderIO;
import crazypants.enderio.config.Config;
import crazypants.enderio.machine.obelisk.weather.TileWeatherObelisk.WeatherTask;
import static com.enderio.core.common.util.OreDictionaryHelper.*;
import static crazypants.enderio.EnderIO.itemBasicCapacitor;
import static crazypants.enderio.material.Alloy.*;
import static crazypants.enderio.material.Material.*;
import static crazypants.util.RecipeUtil.*;
public class MaterialRecipes {
public static void registerDependantOresInDictionary() {
if (hasCopper()) {
OreDictionary.registerOre("dustCopper", new ItemStack(EnderIO.itemPowderIngot, 1, PowderIngot.POWDER_COPPER.ordinal()));
}
if (hasTin()) {
OreDictionary.registerOre("dustTin", new ItemStack(EnderIO.itemPowderIngot, 1, PowderIngot.POWDER_TIN.ordinal()));
}
if (hasEnderPearlDust()) {
OreDictionary.registerOre("dustEnderPearl", new ItemStack(EnderIO.itemPowderIngot, 1, PowderIngot.POWDER_ENDER.ordinal()));
}
//Enderium Base
if (OreDictionaryHelper.hasEnderium()) {
OreDictionary.registerOre("ingotEnderiumBase", new ItemStack(EnderIO.itemPowderIngot, 1, PowderIngot.INGOT_ENDERIUM_BASE.ordinal()));
}
}
public static void registerOresInDictionary() {
//Ore Dictionary Registeration
OreDictionary.registerOre("dustCoal", new ItemStack(EnderIO.itemPowderIngot, 1, PowderIngot.POWDER_COAL.ordinal()));
OreDictionary.registerOre("dustIron", new ItemStack(EnderIO.itemPowderIngot, 1, PowderIngot.POWDER_IRON.ordinal()));
OreDictionary.registerOre("dustGold", new ItemStack(EnderIO.itemPowderIngot, 1, PowderIngot.POWDER_GOLD.ordinal()));
OreDictionary.registerOre("dustObsidian", new ItemStack(EnderIO.itemPowderIngot, 1, PowderIngot.POWDER_OBSIDIAN.ordinal()));
for (Alloy alloy : Alloy.values()) {
OreDictionary.registerOre(alloy.oreIngot, new ItemStack(EnderIO.itemAlloy, 1, alloy.ordinal()));
OreDictionary.registerOre(alloy.oreBlock, new ItemStack(EnderIO.blockIngotStorage, 1, alloy.ordinal()));
}
OreDictionary.registerOre("nuggetPulsatingIron", new ItemStack(EnderIO.itemMaterial, 1, Material.PHASED_IRON_NUGGET.ordinal()));
OreDictionary.registerOre("nuggetVibrantAlloy", new ItemStack(EnderIO.itemMaterial, 1, Material.VIBRANT_NUGGET.ordinal()));
OreDictionary.registerOre("glass", Blocks.glass);
OreDictionary.registerOre("stickWood", Items.stick);
OreDictionary.registerOre("woodStick", Items.stick);
OreDictionary.registerOre("sand", new ItemStack(Blocks.sand, 1, OreDictionary.WILDCARD_VALUE));
OreDictionary.registerOre("ingotIron", Items.iron_ingot);
OreDictionary.registerOre("ingotGold", Items.gold_ingot);
ItemStack pureGlass = new ItemStack(EnderIO.blockFusedQuartz, 1, BlockFusedQuartz.Type.GLASS.ordinal());
OreDictionary.registerOre("glass", pureGlass);
OreDictionary.registerOre("blockGlass", pureGlass);
OreDictionary.registerOre("blockGlassHardened", new ItemStack(EnderIO.blockFusedQuartz, 1, BlockFusedQuartz.Type.FUSED_QUARTZ.ordinal()));
//Skulls
ItemStack skull = new ItemStack(Items.skull, 1, OreDictionary.WILDCARD_VALUE);
OreDictionary.registerOre("itemSkull", skull);
OreDictionary.registerOre("itemSkull", new ItemStack(EnderIO.blockEndermanSkull));
//Glass stuff for compatability
addShaped(new ItemStack(Blocks.glass_pane, 16, 0), " ", "eee", "eee", 'e', pureGlass);
addShapeless(new ItemStack(Blocks.glass), pureGlass);
addShaped(new ItemStack(Items.glass_bottle, 3, 0), " ", "g g", " g ", 'g', pureGlass);
Material.registerOres(EnderIO.itemMaterial);
MachinePart.registerOres(EnderIO.itemMachinePart);
}
public static void addRecipes() {
//Common Ingredients
String conduitBinder = CONDUIT_BINDER.oreDict;
ItemStack fusedQuartzFrame = new ItemStack(EnderIO.itemFusedQuartzFrame, 1, 0);
String energeticAlloy = ENERGETIC_ALLOY.oreIngot;
String phasedGold = PHASED_GOLD.oreIngot;
String phasedIron = PHASED_IRON.oreIngot;
String darkSteel = DARK_STEEL.oreIngot;
ItemStack capacitor = new ItemStack(itemBasicCapacitor, 1, 0);
//Conduit Binder
ItemStack cbc = BINDER_COMPOSITE.getStack(8);
if (Config.useAlternateBinderRecipe) {
addShaped(cbc, "gcg", "sgs", "gcg", 'g', Blocks.gravel, 's', Blocks.sand, 'c', Items.clay_ball);
} else {
addShaped(cbc, "ggg", "scs", "ggg", 'g', Blocks.gravel, 's', Blocks.sand, 'c', Items.clay_ball);
}
GameRegistry.addSmelting(BINDER_COMPOSITE.getStack(), CONDUIT_BINDER.getStack(4), 0);
//Nuggets
ItemStack phasedIronNugget = new ItemStack(EnderIO.itemMaterial, 9, Material.PHASED_IRON_NUGGET.ordinal());
addShapeless(phasedIronNugget, phasedIron);
phasedIronNugget = phasedIronNugget.copy();
phasedIronNugget.stackSize = 1;
addShaped(PHASED_IRON.getStackIngot(), "eee", "eee", "eee", 'e', phasedIronNugget);
ItemStack vibrantNugget = new ItemStack(EnderIO.itemMaterial, 9, Material.VIBRANT_NUGGET.ordinal());
addShapeless(vibrantNugget, phasedGold);
vibrantNugget = vibrantNugget.copy();
vibrantNugget.stackSize = 1;
addShaped(PHASED_GOLD.getStackIngot(), "eee", "eee", "eee", 'e', vibrantNugget);
//Crystals
ItemStack pulsCry = new ItemStack(EnderIO.itemMaterial, 1, Material.PULSATING_CYSTAL.ordinal());
addShaped(pulsCry, "nnn", "ngn", "nnn", 'n', phasedIronNugget, 'g', Items.diamond);
ItemStack vibCry = new ItemStack(EnderIO.itemMaterial, 1, Material.VIBRANT_CYSTAL.ordinal());
addShaped(vibCry, "nnn", "ngn", "nnn", 'n', vibrantNugget, 'g', Items.emerald);
//Balls
ItemStack darkBall = new ItemStack(EnderIO.itemMaterial, 5, Material.DARK_GRINDING_BALL.ordinal());
addShaped(darkBall, " s ", "sss", " s ", 's', darkSteel);
//Smelting
ItemStack dustIron = new ItemStack(EnderIO.itemPowderIngot, 1, PowderIngot.POWDER_IRON.ordinal());
ItemStack dustGold = new ItemStack(EnderIO.itemPowderIngot, 1, PowderIngot.POWDER_GOLD.ordinal());
ItemStack ingotIron = new ItemStack(Items.iron_ingot);
ItemStack ingotGold = new ItemStack(Items.gold_ingot);
GameRegistry.addSmelting(dustIron, ingotIron, 0);
GameRegistry.addSmelting(dustGold, ingotGold, 0);
//Ender Dusts
ItemStack enderDust = new ItemStack(EnderIO.itemPowderIngot, 1, PowderIngot.POWDER_ENDER.ordinal());
addShaped(new ItemStack(Items.ender_pearl), "eee", "eee", "eee", 'e', enderDust);
//Dark Iron Bars
ItemStack diBars = new ItemStack(EnderIO.blockDarkIronBars, 16, 0);
addShaped(diBars, "ddd", "ddd", 'd', darkSteel);
// Fused Quartz Frame
addShaped(fusedQuartzFrame, "bsb", "s s", "bsb", 'b', conduitBinder, 's', "stickWood");
addShaped(fusedQuartzFrame, "bsb", "s s", "bsb", 'b', conduitBinder, 's', "woodStick");
// Machine Chassi
ArrayList<ItemStack> steelIngots = OreDictionary.getOres("ingotSteel");
ItemStack chassis = new ItemStack(EnderIO.itemMachinePart, 1, MachinePart.MACHINE_CHASSI.ordinal());
String mat = Config.useSteelInChassi == true && steelIngots != null && !steelIngots.isEmpty() ? "ingotSteel" : "ingotIron";
addShaped(chassis, "fif", "ici", "fif", 'f', Blocks.iron_bars, 'i', mat, 'c', capacitor);
// Basic Gear
ItemStack gear = new ItemStack(EnderIO.itemMachinePart, 1, MachinePart.BASIC_GEAR.ordinal());
addShaped(gear, "scs", "c c", "scs", 's', "stickWood", 'c', "cobblestone");
addShaped(gear, "scs", "c c", "scs", 's', "woodStick", 'c', "cobblestone");
//Ender Capacitor
ItemStack enderCapacitor = new ItemStack(EnderIO.itemBasicCapacitor, 1, 2);
ItemStack activatedCapacitor = new ItemStack(EnderIO.itemBasicCapacitor, 1, 1);
if (Config.useHardRecipes) {
addShaped(enderCapacitor, "eee", "cgc", "eee", 'e', phasedGold, 'c', activatedCapacitor, 'g', Blocks.glowstone);
} else {
addShaped(enderCapacitor, " e ", "cgc", " e ", 'e', phasedGold, 'c', activatedCapacitor, 'g', Blocks.glowstone);
}
// Weather Crystal
ItemStack main = Config.useHardRecipes ? new ItemStack(EnderIO.itemMaterial, 1, Material.VIBRANT_CYSTAL.ordinal()) : new ItemStack(Items.diamond);
GameRegistry.addRecipe(new ShapelessOreRecipe(new ItemStack(EnderIO.itemMaterial, 1, Material.WEATHER_CRYSTAL.ordinal()), main, WeatherTask.CLEAR
.requiredItem(), WeatherTask.RAIN.requiredItem(), WeatherTask.STORM.requiredItem()));
if (Config.reinforcedObsidianEnabled) {
ItemStack reinfObs = new ItemStack(EnderIO.blockReinforcedObsidian);
String corners = darkSteel;
if (Config.reinforcedObsidianUseDarkSteelBlocks) {
corners = Alloy.DARK_STEEL.oreBlock;
}
addShaped(reinfObs, "dbd", "bob", "dbd", 'd', corners, 'b', EnderIO.blockDarkIronBars, 'o', Blocks.obsidian);
}
addShaped(EnderIO.blockDarkSteelAnvil, "bbb", " i ", "iii", 'b', DARK_STEEL.oreBlock, 'i', darkSteel);
addShaped(new ItemStack(EnderIO.blockDarkSteelLadder, 12), "b", "b", "b", 'b', EnderIO.blockDarkIronBars);
for (Alloy alloy : Alloy.values()) {
addShaped(alloy.getStackBlock(), "iii", "iii", "iii", 'i', alloy.oreIngot);
addShapeless(alloy.getStackIngot(9), alloy.oreBlock);
}
//Food
ItemStack flour = new ItemStack(EnderIO.itemPowderIngot, 1, PowderIngot.FLOUR.ordinal());
ItemStack bread = new ItemStack(Items.bread, 1, 0);
GameRegistry.addSmelting(flour, bread, 0.35f);
if (hasCopper()) {
ItemStack dustCopper = new ItemStack(EnderIO.itemPowderIngot, 1, PowderIngot.POWDER_COPPER.ordinal());
ItemStack ingotCopper = OreDictionaryPreferences.instance.getPreferred(OreDictionaryHelper.INGOT_COPPER);
GameRegistry.addSmelting(dustCopper, ingotCopper, 0);
}
if (hasTin()) {
ItemStack dustTin = new ItemStack(EnderIO.itemPowderIngot, 1, PowderIngot.POWDER_TIN.ordinal());
ItemStack ingotTin = OreDictionaryPreferences.instance.getPreferred(OreDictionaryHelper.INGOT_TIN);
GameRegistry.addSmelting(dustTin, ingotTin, 0);
}
ArrayList<ItemStack> copperIngots = OreDictionary.getOres("ingotCopper");
String gold;
if (Config.useHardRecipes) {
gold = "ingotGold";
} else {
gold = "nuggetGold";
}
if (copperIngots != null && !copperIngots.isEmpty() && Config.useModMetals) {
addShaped(capacitor, " gr", "gcg", "rg ", 'r', Items.redstone, 'g', gold, 'c', "ingotCopper");
} else {
addShaped(capacitor, " gr", "gig", "rg ", 'r', "dustRedstone", 'g', gold, 'i', "ingotIron");
}
if (Config.useHardRecipes) {
addShaped(activatedCapacitor, "eee", "cCc", "eee", 'e', energeticAlloy, 'c', capacitor, 'C', "dustCoal");
} else {
addShaped(activatedCapacitor, " e ", "cCc", " e ", 'e', energeticAlloy, 'c', capacitor, 'C', "dustCoal");
}
}
}
| |
/*
* Copyright (c) Facebook, Inc. and its affiliates.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.facebook.buck.cli;
import com.facebook.buck.core.util.graph.AbstractBreadthFirstThrowingTraversal;
import com.facebook.buck.core.util.graph.DirectedAcyclicGraph;
import com.facebook.buck.util.Escaper;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSortedMap;
import com.google.common.collect.ImmutableSortedSet;
import com.google.common.collect.Sets;
import java.io.IOException;
import java.util.HashMap;
import java.util.Map;
import java.util.function.Function;
import java.util.function.Predicate;
import java.util.regex.Pattern;
import java.util.stream.Collectors;
public class Dot<T> {
private static final Pattern VALID_ID_PATTERN = Pattern.compile("[a-zA-Z\200-\377_0-9]+");
private final DirectedAcyclicGraph<T> graph;
private final String graphName;
private final Function<T, String> nodeToName;
private final Function<T, String> nodeToTypeName;
private final Function<T, ImmutableSortedMap<String, String>> nodeToAttributes;
private final OutputOrder outputOrder;
private final Predicate<T> shouldContainNode;
private final boolean compactMode;
private final Map<T, Integer> nodeToNodeId;
private static final Map<String, String> typeColors;
static {
typeColors =
new ImmutableMap.Builder<String, String>() {
{
put("android_aar", "springgreen2");
put("android_library", "springgreen3");
put("android_resource", "springgreen1");
put("android_prebuilt_aar", "olivedrab3");
put("java_library", "indianred1");
put("prebuilt_jar", "mediumpurple1");
}
}.build();
}
public static <T> Builder<T> builder(DirectedAcyclicGraph<T> graph, String graphName) {
return new Builder<>(graph, graphName);
}
/**
* Builder class for Dot output
*
* @param <T>
*/
public static class Builder<T> {
private final DirectedAcyclicGraph<T> graph;
private final String graphName;
private boolean compactMode;
private Function<T, String> nodeToName;
private Function<T, String> nodeToTypeName;
private Function<T, ImmutableSortedMap<String, String>> nodeToAttributes;
private OutputOrder outputOrder;
private Predicate<T> shouldContainNode;
private Builder(DirectedAcyclicGraph<T> graph, String graphName) {
this.graph = graph;
this.graphName = graphName;
nodeToName = Object::toString;
nodeToTypeName = Object::toString;
outputOrder = OutputOrder.SORTED;
shouldContainNode = node -> true;
nodeToAttributes = node -> ImmutableSortedMap.of();
}
public Builder<T> setNodeToName(Function<T, String> func) {
nodeToName = func;
return this;
}
public Builder<T> setNodeToTypeName(Function<T, String> func) {
nodeToTypeName = func;
return this;
}
public Builder<T> setOutputOrder(OutputOrder outputOrder) {
this.outputOrder = outputOrder;
return this;
}
public Builder<T> setNodesToFilter(Predicate<T> pred) {
shouldContainNode = pred;
return this;
}
public Builder<T> setCompactMode(boolean value) {
compactMode = value;
return this;
}
/**
* Configures a function to be used to extract additional attributes to include when rendering
* graph nodes.
*
* <p>In order ot prevent collisions, all attribute names are prefixed with {@code buck_}. They
* are also escaped in order to be compatible with the <a
* href="https://graphviz.gitlab.io/_pages/doc/info/lang.html">Dot format</a>.
*/
public Builder<T> setNodeToAttributes(Function<T, ImmutableSortedMap<String, String>> func) {
nodeToAttributes = func;
return this;
}
public Dot<T> build() {
return new Dot<>(this);
}
}
private Dot(Builder<T> builder) {
this.graph = builder.graph;
this.graphName = builder.graphName;
this.nodeToName = builder.nodeToName;
this.nodeToTypeName = builder.nodeToTypeName;
this.outputOrder = builder.outputOrder;
this.shouldContainNode = builder.shouldContainNode;
this.nodeToAttributes = builder.nodeToAttributes;
this.compactMode = builder.compactMode;
this.nodeToNodeId = new HashMap<>();
}
private String getNodeId(T node) {
if (!this.nodeToNodeId.containsKey(node)) {
this.nodeToNodeId.put(node, this.nodeToNodeId.size() + 1);
}
return String.valueOf(this.nodeToNodeId.get(node));
}
/** Writes out the graph in dot format to the given output */
public void writeOutput(Appendable output) throws IOException {
// Sorting the edges to have deterministic output and be able to test this.
output.append("digraph ").append(graphName).append(" {");
output.append(System.lineSeparator());
switch (outputOrder) {
case BFS:
{
new AbstractBreadthFirstThrowingTraversal<T, IOException>(
graph.getNodesWithNoIncomingEdges()) {
@Override
public Iterable<T> visit(T node) throws IOException {
ImmutableSortedSet<T> deps =
ImmutableSortedSet.copyOf(graph.getOutgoingNodesFor(node));
if (shouldContainNode.test(node)) {
output.append(
printNode(
node,
Dot.this::getNodeId,
nodeToName,
nodeToTypeName,
nodeToAttributes,
compactMode));
for (T dep : Sets.filter(deps, shouldContainNode::test)) {
output.append(printEdge(node, dep, nodeToName, Dot.this::getNodeId, compactMode));
}
}
return deps;
}
}.start();
break;
}
case SORTED:
{
for (T node : ImmutableSortedSet.copyOf(graph.getNodes())) {
if (shouldContainNode.test(node)) {
output.append(
printNode(
node,
Dot.this::getNodeId,
nodeToName,
nodeToTypeName,
nodeToAttributes,
compactMode));
for (T dep :
Sets.filter(
ImmutableSortedSet.copyOf(graph.getOutgoingNodesFor(node)),
shouldContainNode::test)) {
output.append(printEdge(node, dep, nodeToName, Dot.this::getNodeId, compactMode));
}
}
}
break;
}
}
output.append("}");
output.append(System.lineSeparator());
}
private static String escape(String str) {
// decide if node name should be escaped according to DOT specification
// https://en.wikipedia.org/wiki/DOT_(graph_description_language)
boolean needEscape =
!VALID_ID_PATTERN.matcher(str).matches()
|| str.isEmpty()
|| Character.isDigit(str.charAt(0));
if (!needEscape) {
return str;
}
return Escaper.Quoter.DOUBLE.quote(str);
}
private static String colorFromType(String type) {
if (Dot.typeColors.containsKey(type)) {
return Dot.typeColors.get(type);
}
int r = 192 + (type.hashCode() % 64);
int g = 192 + (type.hashCode() / 64 % 64);
int b = 192 + (type.hashCode() / 4096 % 64);
return String.format("\"#%02X%02X%02X\"", r, g, b);
}
private static <T> String printNode(
T node,
Function<T, String> nodeToId,
Function<T, String> nodeToName,
Function<T, String> nodeToTypeName,
Function<T, ImmutableSortedMap<String, String>> nodeToAttributes,
boolean compactMode) {
String source = nodeToName.apply(node);
String sourceType = nodeToTypeName.apply(node);
String extraAttributes = "";
ImmutableSortedMap<String, String> nodeAttributes = nodeToAttributes.apply(node);
String labelAttribute = "";
if (compactMode) {
labelAttribute = ",label=" + escape(source);
source = nodeToId.apply(node); // Don't need to escape numeric IDs
} else {
source = escape(source);
}
if (!nodeAttributes.isEmpty()) {
extraAttributes =
","
+ nodeAttributes.entrySet().stream()
.map(entry -> escape("buck_" + entry.getKey()) + "=" + escape(entry.getValue()))
.collect(Collectors.joining(","));
}
return String.format(
" %s [style=filled,color=%s%s%s];%n",
source, Dot.colorFromType(sourceType), labelAttribute, extraAttributes);
}
private static <T> String printEdge(
T sourceN,
T sinkN,
Function<T, String> nodeToName,
Function<T, String> nodeToId,
boolean compactMode) {
// Don't need to escape the names in compact mode because they're just numbers
String sourceName = compactMode ? nodeToId.apply(sourceN) : escape(nodeToName.apply(sourceN));
String sinkName = compactMode ? nodeToId.apply(sinkN) : escape(nodeToName.apply(sinkN));
return String.format(" %s -> %s;%n", sourceName, sinkName);
}
/** How to print the dot graph. */
public enum OutputOrder {
/** Generate the dot output in the order of a breadth-first traversal of the graph. */
BFS,
/** Generate the dot output in the sorted natural order of the nodes. */
SORTED,
}
}
| |
/**
* Copyright (C) 2016 - 2030 youtongluan.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.yx.redis.command;
import java.util.List;
import java.util.Map;
import java.util.Set;
public interface JedisCommand {
String set(String key, String value);
String set(String key, String value, String nxxx, String expx, long time);
String set(String key, String value, String expx, long time);
String set(String key, String value, String nxxx);
String get(String key);
Boolean exists(String key);
Long persist(String key);
String type(String key);
byte[] dump(String key);
String restore(String key, int ttl, byte[] serializedValue);
Long expire(String key, int seconds);
Long pexpire(String key, long milliseconds);
Long expireAt(String key, long unixTime);
Long pexpireAt(String key, long millisecondsTimestamp);
Long ttl(String key);
Long pttl(String key);
Long touch(String key);
Boolean setbit(String key, long offset, boolean value);
Boolean getbit(String key, long offset);
Long setrange(String key, long offset, String value);
String getrange(String key, long startOffset, long endOffset);
String getSet(String key, String value);
Long setnx(String key, String value);
String setex(String key, int seconds, String value);
String psetex(String key, long milliseconds, String value);
Long decrBy(String key, long decrement);
Long decr(String key);
Long incrBy(String key, long increment);
Double incrByFloat(String key, double increment);
Long incr(String key);
Long append(String key, String value);
String substr(String key, int start, int end);
Long hset(String key, String field, String value);
Long hset(String key, Map<String, String> hash);
String hget(String key, String field);
Long hsetnx(String key, String field, String value);
String hmset(String key, Map<String, String> hash);
List<String> hmget(String key, String... fields);
Long hincrBy(String key, String field, long value);
Double hincrByFloat(String key, String field, double value);
Boolean hexists(String key, String field);
Long hdel(String key, String... field);
Long hlen(String key);
Set<String> hkeys(String key);
List<String> hvals(String key);
Map<String, String> hgetAll(String key);
Long rpush(String key, String... string);
Long lpush(String key, String... string);
Long llen(String key);
List<String> lrange(String key, long start, long stop);
String ltrim(String key, long start, long stop);
String lindex(String key, long index);
String lset(String key, long index, String value);
Long lrem(String key, long count, String value);
String lpop(String key);
String rpop(String key);
Long sadd(String key, String... member);
Set<String> smembers(String key);
Long srem(String key, String... member);
String spop(String key);
Set<String> spop(String key, long count);
Long scard(String key);
Boolean sismember(String key, String member);
String srandmember(String key);
List<String> srandmember(String key, int count);
Long strlen(String key);
Long zadd(String key, double score, String member);
Long zadd(String key, Map<String, Double> scoreMembers);
Set<String> zrange(String key, long start, long stop);
Long zrem(String key, String... members);
Double zincrby(String key, double increment, String member);
Long zrank(String key, String member);
Long zrevrank(String key, String member);
Set<String> zrevrange(String key, long start, long stop);
Long zcard(String key);
Double zscore(String key, String member);
List<String> sort(String key);
Long zcount(String key, double min, double max);
Long zcount(String key, String min, String max);
Set<String> zrangeByScore(String key, double min, double max);
Set<String> zrangeByScore(String key, String min, String max);
Set<String> zrevrangeByScore(String key, double max, double min);
Set<String> zrangeByScore(String key, double min, double max, int offset, int count);
Set<String> zrevrangeByScore(String key, String max, String min);
Set<String> zrangeByScore(String key, String min, String max, int offset, int count);
Set<String> zrevrangeByScore(String key, double max, double min, int offset, int count);
Set<String> zrevrangeByScore(String key, String max, String min, int offset, int count);
Long zremrangeByRank(String key, long start, long stop);
Long zremrangeByScore(String key, double min, double max);
Long zremrangeByScore(String key, String min, String max);
Long zlexcount(String key, String min, String max);
Set<String> zrangeByLex(String key, String min, String max);
Set<String> zrangeByLex(String key, String min, String max, int offset, int count);
Set<String> zrevrangeByLex(String key, String max, String min);
Set<String> zrevrangeByLex(String key, String max, String min, int offset, int count);
Long zremrangeByLex(String key, String min, String max);
Long lpushx(String key, String... string);
Long rpushx(String key, String... string);
List<String> blpop(int timeout, String key);
List<String> brpop(int timeout, String key);
Long del(String key);
Long unlink(String key);
String echo(String string);
Long move(String key, int dbIndex);
Long bitcount(String key);
Long bitcount(String key, long start, long end);
Long pfadd(String key, String... elements);
long pfcount(String key);
Long geoadd(String key, double longitude, double latitude, String member);
Double geodist(String key, String member1, String member2);
List<String> geohash(String key, String... members);
List<Long> bitfield(String key, String... arguments);
Long hstrlen(String key, String field);
}
| |
/*
* Copyright (C) 2014 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.example.android.uamp.ui;
import android.app.Activity;
import android.app.Fragment;
import android.content.BroadcastReceiver;
import android.content.Context;
import android.content.Intent;
import android.content.IntentFilter;
import android.net.ConnectivityManager;
import android.os.Bundle;
import android.support.annotation.NonNull;
import android.support.v4.app.FragmentActivity;
import android.support.v4.media.MediaBrowserCompat;
import android.support.v4.media.MediaMetadataCompat;
import android.support.v4.media.session.MediaControllerCompat;
import android.support.v4.media.session.PlaybackStateCompat;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.AdapterView;
import android.widget.ArrayAdapter;
import android.widget.ListView;
import android.widget.TextView;
import android.widget.Toast;
import com.example.android.uamp.R;
import com.example.android.uamp.utils.LogHelper;
import com.example.android.uamp.utils.MediaIDHelper;
import com.example.android.uamp.utils.NetworkHelper;
import java.util.ArrayList;
import java.util.List;
/**
* A Fragment that lists all the various browsable queues available
* from a {@link android.service.media.MediaBrowserService}.
* <p/>
* It uses a {@link MediaBrowserCompat} to connect to the {@link com.example.android.uamp.MusicService}.
* Once connected, the fragment subscribes to get all the children.
* All {@link MediaBrowserCompat.MediaItem}'s that can be browsed are shown in a ListView.
*/
public class MediaBrowserFragment extends Fragment {
private static final String TAG = LogHelper.makeLogTag(MediaBrowserFragment.class);
private static final String ARG_MEDIA_ID = "media_id";
private BrowseAdapter mBrowserAdapter;
private String mMediaId;
private MediaFragmentListener mMediaFragmentListener;
private View mErrorView;
private TextView mErrorMessage;
private final BroadcastReceiver mConnectivityChangeReceiver = new BroadcastReceiver() {
private boolean oldOnline = false;
@Override
public void onReceive(Context context, Intent intent) {
// We don't care about network changes while this fragment is not associated
// with a media ID (for example, while it is being initialized)
if (mMediaId != null) {
boolean isOnline = NetworkHelper.isOnline(context);
if (isOnline != oldOnline) {
oldOnline = isOnline;
checkForUserVisibleErrors(false);
if (isOnline) {
mBrowserAdapter.notifyDataSetChanged();
}
}
}
}
};
// Receive callbacks from the MediaController. Here we update our state such as which queue
// is being shown, the current title and description and the PlaybackState.
private final MediaControllerCompat.Callback mMediaControllerCallback =
new MediaControllerCompat.Callback() {
@Override
public void onMetadataChanged(MediaMetadataCompat metadata) {
super.onMetadataChanged(metadata);
if (metadata == null) {
return;
}
LogHelper.d(TAG, "Received metadata change to media ",
metadata.getDescription().getMediaId());
mBrowserAdapter.notifyDataSetChanged();
}
@Override
public void onPlaybackStateChanged(@NonNull PlaybackStateCompat state) {
super.onPlaybackStateChanged(state);
LogHelper.d(TAG, "Received state change: ", state);
checkForUserVisibleErrors(false);
mBrowserAdapter.notifyDataSetChanged();
}
};
private final MediaBrowserCompat.SubscriptionCallback mSubscriptionCallback =
new MediaBrowserCompat.SubscriptionCallback() {
@Override
public void onChildrenLoaded(@NonNull String parentId,
@NonNull List<MediaBrowserCompat.MediaItem> children) {
try {
LogHelper.d(TAG, "fragment onChildrenLoaded, parentId=" + parentId +
" count=" + children.size());
checkForUserVisibleErrors(children.isEmpty());
mBrowserAdapter.clear();
for (MediaBrowserCompat.MediaItem item : children) {
mBrowserAdapter.add(item);
}
mBrowserAdapter.notifyDataSetChanged();
} catch (Throwable t) {
LogHelper.e(TAG, "Error on childrenloaded", t);
}
}
@Override
public void onError(@NonNull String id) {
LogHelper.e(TAG, "browse fragment subscription onError, id=" + id);
Toast.makeText(getActivity(), R.string.error_loading_media, Toast.LENGTH_LONG).show();
checkForUserVisibleErrors(true);
}
};
@Override
public void onAttach(Activity activity) {
super.onAttach(activity);
// If used on an activity that doesn't implement MediaFragmentListener, it
// will throw an exception as expected:
mMediaFragmentListener = (MediaFragmentListener) activity;
}
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup container,
Bundle savedInstanceState) {
LogHelper.d(TAG, "fragment.onCreateView");
View rootView = inflater.inflate(R.layout.fragment_list, container, false);
mErrorView = rootView.findViewById(R.id.playback_error);
mErrorMessage = (TextView) mErrorView.findViewById(R.id.error_message);
mBrowserAdapter = new BrowseAdapter(getActivity());
ListView listView = (ListView) rootView.findViewById(R.id.list_view);
listView.setAdapter(mBrowserAdapter);
listView.setOnItemClickListener(new AdapterView.OnItemClickListener() {
@Override
public void onItemClick(AdapterView<?> parent, View view, int position, long id) {
checkForUserVisibleErrors(false);
MediaBrowserCompat.MediaItem item = mBrowserAdapter.getItem(position);
mMediaFragmentListener.onMediaItemSelected(item);
}
});
return rootView;
}
@Override
public void onStart() {
super.onStart();
// fetch browsing information to fill the listview:
MediaBrowserCompat mediaBrowser = mMediaFragmentListener.getMediaBrowser();
LogHelper.d(TAG, "fragment.onStart, mediaId=", mMediaId,
" onConnected=" + mediaBrowser.isConnected());
if (mediaBrowser.isConnected()) {
onConnected();
}
// Registers BroadcastReceiver to track network connection changes.
this.getActivity().registerReceiver(mConnectivityChangeReceiver,
new IntentFilter(ConnectivityManager.CONNECTIVITY_ACTION));
}
@Override
public void onStop() {
super.onStop();
MediaBrowserCompat mediaBrowser = mMediaFragmentListener.getMediaBrowser();
if (mediaBrowser != null && mediaBrowser.isConnected() && mMediaId != null) {
mediaBrowser.unsubscribe(mMediaId);
}
MediaControllerCompat controller = ((FragmentActivity) getActivity())
.getSupportMediaController();
if (controller != null) {
controller.unregisterCallback(mMediaControllerCallback);
}
this.getActivity().unregisterReceiver(mConnectivityChangeReceiver);
}
@Override
public void onDetach() {
super.onDetach();
mMediaFragmentListener = null;
}
public String getMediaId() {
Bundle args = getArguments();
if (args != null) {
return args.getString(ARG_MEDIA_ID);
}
return null;
}
public void setMediaId(String mediaId) {
Bundle args = new Bundle(1);
args.putString(MediaBrowserFragment.ARG_MEDIA_ID, mediaId);
setArguments(args);
}
// Called when the MediaBrowser is connected. This method is either called by the
// fragment.onStart() or explicitly by the activity in the case where the connection
// completes after the onStart()
public void onConnected() {
if (isDetached()) {
return;
}
mMediaId = getMediaId();
if (mMediaId == null) {
mMediaId = mMediaFragmentListener.getMediaBrowser().getRoot();
}
updateTitle();
// Unsubscribing before subscribing is required if this mediaId already has a subscriber
// on this MediaBrowser instance. Subscribing to an already subscribed mediaId will replace
// the callback, but won't trigger the initial callback.onChildrenLoaded.
//
// This is temporary: A bug is being fixed that will make subscribe
// consistently call onChildrenLoaded initially, no matter if it is replacing an existing
// subscriber or not. Currently this only happens if the mediaID has no previous
// subscriber or if the media content changes on the service side, so we need to
// unsubscribe first.
mMediaFragmentListener.getMediaBrowser().unsubscribe(mMediaId);
mMediaFragmentListener.getMediaBrowser().subscribe(mMediaId, mSubscriptionCallback);
// Add MediaController callback so we can redraw the list when metadata changes:
MediaControllerCompat controller = ((FragmentActivity) getActivity())
.getSupportMediaController();
if (controller != null) {
controller.registerCallback(mMediaControllerCallback);
}
}
private void checkForUserVisibleErrors(boolean forceError) {
boolean showError = forceError;
// If offline, message is about the lack of connectivity:
if (!NetworkHelper.isOnline(getActivity())) {
mErrorMessage.setText(R.string.error_no_connection);
showError = true;
} else {
// otherwise, if state is ERROR and metadata!=null, use playback state error message:
MediaControllerCompat controller = ((FragmentActivity) getActivity())
.getSupportMediaController();
if (controller != null
&& controller.getMetadata() != null
&& controller.getPlaybackState() != null
&& controller.getPlaybackState().getState() == PlaybackStateCompat.STATE_ERROR
&& controller.getPlaybackState().getErrorMessage() != null) {
mErrorMessage.setText(controller.getPlaybackState().getErrorMessage());
showError = true;
} else if (forceError) {
// Finally, if the caller requested to show error, show a generic message:
mErrorMessage.setText(R.string.error_loading_media);
showError = true;
}
}
mErrorView.setVisibility(showError ? View.VISIBLE : View.GONE);
LogHelper.d(TAG, "checkForUserVisibleErrors. forceError=", forceError,
" showError=", showError,
" isOnline=", NetworkHelper.isOnline(getActivity()));
}
private void updateTitle() {
if (MediaIDHelper.MEDIA_ID_ROOT.equals(mMediaId)) {
mMediaFragmentListener.setToolbarTitle(null);
return;
}
MediaBrowserCompat mediaBrowser = mMediaFragmentListener.getMediaBrowser();
mediaBrowser.getItem(mMediaId, new MediaBrowserCompat.ItemCallback() {
@Override
public void onItemLoaded(MediaBrowserCompat.MediaItem item) {
mMediaFragmentListener.setToolbarTitle(
item.getDescription().getTitle());
}
});
}
// An adapter for showing the list of browsed MediaItem's
private static class BrowseAdapter extends ArrayAdapter<MediaBrowserCompat.MediaItem> {
public BrowseAdapter(Activity context) {
super(context, R.layout.media_list_item, new ArrayList<MediaBrowserCompat.MediaItem>());
}
@Override
public View getView(int position, View convertView, ViewGroup parent) {
MediaBrowserCompat.MediaItem item = getItem(position);
int itemState = MediaItemViewHolder.STATE_NONE;
if (item.isPlayable()) {
itemState = MediaItemViewHolder.STATE_PLAYABLE;
MediaControllerCompat controller = ((FragmentActivity) getContext())
.getSupportMediaController();
if (controller != null && controller.getMetadata() != null) {
String currentPlaying = controller.getMetadata().getDescription().getMediaId();
String musicId = MediaIDHelper.extractMusicIDFromMediaID(
item.getDescription().getMediaId());
if (currentPlaying != null && currentPlaying.equals(musicId)) {
PlaybackStateCompat pbState = controller.getPlaybackState();
if (pbState == null ||
pbState.getState() == PlaybackStateCompat.STATE_ERROR) {
itemState = MediaItemViewHolder.STATE_NONE;
} else if (pbState.getState() == PlaybackStateCompat.STATE_PLAYING) {
itemState = MediaItemViewHolder.STATE_PLAYING;
} else {
itemState = MediaItemViewHolder.STATE_PAUSED;
}
}
}
}
return MediaItemViewHolder.setupView((Activity) getContext(), convertView, parent,
item.getDescription(), itemState);
}
}
public interface MediaFragmentListener extends MediaBrowserProvider {
void onMediaItemSelected(MediaBrowserCompat.MediaItem item);
void setToolbarTitle(CharSequence title);
}
}
| |
/*
* The MIT License
*
* Copyright (c) 2004-2009, Sun Microsystems, Inc., Kohsuke Kawaguchi
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package hudson.model;
import hudson.Extension;
import hudson.ExtensionList;
import hudson.ExtensionListListener;
import hudson.ExtensionPoint;
import hudson.ProxyConfiguration;
import jenkins.util.SystemProperties;
import hudson.init.InitMilestone;
import hudson.init.Initializer;
import hudson.util.FormValidation;
import hudson.util.FormValidation.Kind;
import hudson.util.QuotedStringTokenizer;
import hudson.util.TextFile;
import static hudson.util.TimeUnit2.DAYS;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.lang.reflect.Field;
import java.net.HttpURLConnection;
import java.net.URL;
import java.net.URLConnection;
import java.net.URLEncoder;
import java.util.ArrayList;
import java.util.List;
import java.util.logging.Level;
import java.util.logging.Logger;
import jenkins.model.DownloadSettings;
import jenkins.model.Jenkins;
import jenkins.util.JSONSignatureValidator;
import net.sf.json.JSONException;
import net.sf.json.JSONObject;
import org.apache.commons.io.IOUtils;
import org.kohsuke.accmod.Restricted;
import org.kohsuke.accmod.restrictions.NoExternalUse;
import org.kohsuke.stapler.Stapler;
import org.kohsuke.stapler.StaplerRequest;
import org.kohsuke.stapler.StaplerResponse;
import org.kohsuke.stapler.interceptor.RequirePOST;
/**
* Service for plugins to periodically retrieve update data files
* (like the one in the update center) through browsers.
*
* <p>
* Because the retrieval of the file goes through XmlHttpRequest,
* we cannot reliably pass around binary.
*
* @author Kohsuke Kawaguchi
*/
@Extension
public class DownloadService extends PageDecorator {
/**
* the prefix for the signature validator name
*/
private static final String signatureValidatorPrefix = "downloadable";
/**
* Builds up an HTML fragment that starts all the download jobs.
*/
public String generateFragment() {
if (!DownloadSettings.usePostBack()) {
return "";
}
if (neverUpdate) return "";
if (doesNotSupportPostMessage()) return "";
StringBuilder buf = new StringBuilder();
if(Jenkins.getInstance().hasPermission(Jenkins.READ)) {
long now = System.currentTimeMillis();
for (Downloadable d : Downloadable.all()) {
if(d.getDue()<now && d.lastAttempt+10*1000<now) {
buf.append("<script>")
.append("Behaviour.addLoadEvent(function() {")
.append(" downloadService.download(")
.append(QuotedStringTokenizer.quote(d.getId()))
.append(',')
.append(QuotedStringTokenizer.quote(mapHttps(d.getUrl())))
.append(',')
.append("{version:"+QuotedStringTokenizer.quote(Jenkins.VERSION)+'}')
.append(',')
.append(QuotedStringTokenizer.quote(Stapler.getCurrentRequest().getContextPath()+'/'+getUrl()+"/byId/"+d.getId()+"/postBack"))
.append(',')
.append("null);")
.append("});")
.append("</script>");
d.lastAttempt = now;
}
}
}
return buf.toString();
}
private boolean doesNotSupportPostMessage() {
StaplerRequest req = Stapler.getCurrentRequest();
if (req==null) return false;
String ua = req.getHeader("User-Agent");
if (ua==null) return false;
// according to http://caniuse.com/#feat=x-doc-messaging, IE <=7 doesn't support pstMessage
// see http://www.useragentstring.com/pages/Internet%20Explorer/ for user agents
// we want to err on the cautious side here.
// Because of JENKINS-15105, we can't serve signed metadata from JSON, which means we need to be
// using a modern browser as a vehicle to request these data. This check is here to prevent Jenkins
// from using older browsers that are known not to support postMessage as the vehicle.
return ua.contains("Windows") && (ua.contains(" MSIE 5.") || ua.contains(" MSIE 6.") || ua.contains(" MSIE 7."));
}
private String mapHttps(String url) {
/*
HACKISH:
Loading scripts in HTTP from HTTPS pages cause browsers to issue a warning dialog.
The elegant way to solve the problem is to always load update center from HTTPS,
but our backend mirroring scheme isn't ready for that. So this hack serves regular
traffic in HTTP server, and only use HTTPS update center for Jenkins in HTTPS.
We'll monitor the traffic to see if we can sustain this added traffic.
*/
if (url.startsWith("http://updates.jenkins-ci.org/") && Jenkins.getInstance().isRootUrlSecure())
return "https"+url.substring(4);
return url;
}
/**
* Gets {@link Downloadable} by its ID.
* Used to bind them to URL.
*/
public Downloadable getById(String id) {
for (Downloadable d : Downloadable.all())
if(d.getId().equals(id))
return d;
return null;
}
/**
* Loads JSON from a JSONP URL.
* Metadata for downloadables and update centers is offered in two formats, both designed for download from the browser (predating {@link DownloadSettings}):
* HTML using {@code postMessage} for newer browsers, and JSONP as a fallback.
* Confusingly, the JSONP files are given the {@code *.json} file extension, when they are really JavaScript and should be {@code *.js}.
* This method extracts the JSON from a JSONP URL, since that is what we actually want when we download from the server.
* (Currently the true JSON is not published separately, and extracting from the {@code *.json.html} is more work.)
* @param src a URL to a JSONP file (typically including {@code id} and {@code version} query parameters)
* @return the embedded JSON text
* @throws IOException if either downloading or processing failed
*/
@Restricted(NoExternalUse.class)
public static String loadJSON(URL src) throws IOException {
URLConnection con = ProxyConfiguration.open(src);
if (con instanceof HttpURLConnection) {
// prevent problems from misbehaving plugins disabling redirects by default
((HttpURLConnection) con).setInstanceFollowRedirects(true);
}
try (InputStream is = con.getInputStream()) {
String jsonp = IOUtils.toString(is, "UTF-8");
int start = jsonp.indexOf('{');
int end = jsonp.lastIndexOf('}');
if (start >= 0 && end > start) {
return jsonp.substring(start, end + 1);
} else {
throw new IOException("Could not find JSON in " + src);
}
}
}
/**
* Loads JSON from a JSON-with-{@code postMessage} URL.
* @param src a URL to a JSON HTML file (typically including {@code id} and {@code version} query parameters)
* @return the embedded JSON text
* @throws IOException if either downloading or processing failed
*/
@Restricted(NoExternalUse.class)
public static String loadJSONHTML(URL src) throws IOException {
URLConnection con = ProxyConfiguration.open(src);
if (con instanceof HttpURLConnection) {
// prevent problems from misbehaving plugins disabling redirects by default
((HttpURLConnection) con).setInstanceFollowRedirects(true);
}
try (InputStream is = con.getInputStream()) {
String jsonp = IOUtils.toString(is, "UTF-8");
String preamble = "window.parent.postMessage(JSON.stringify(";
int start = jsonp.indexOf(preamble);
int end = jsonp.lastIndexOf("),'*');");
if (start >= 0 && end > start) {
return jsonp.substring(start + preamble.length(), end).trim();
} else {
throw new IOException("Could not find JSON in " + src);
}
}
}
/**
* This installs itself as a listener to changes to the Downloadable extension list and will download the metadata
* for any newly added Downloadables.
*/
@Restricted(NoExternalUse.class)
public static class DownloadableListener extends ExtensionListListener {
/**
* Install this listener to the Downloadable extension list after all extensions have been loaded; we only
* care about those that are added after initialization
*/
@Initializer(after = InitMilestone.EXTENSIONS_AUGMENTED)
public static void installListener() {
ExtensionList.lookup(Downloadable.class).addListener(new DownloadableListener());
}
/**
* Look for Downloadables that have no data, and update them.
*/
@Override
public void onChange() {
for (Downloadable d : Downloadable.all()) {
TextFile f = d.getDataFile();
if (f == null || !f.exists()) {
LOGGER.log(Level.FINE, "Updating metadata for " + d.getId());
try {
d.updateNow();
} catch (IOException e) {
LOGGER.log(Level.WARNING, "Failed to update metadata for " + d.getId(), e);
}
} else {
LOGGER.log(Level.FINER, "Skipping update of metadata for " + d.getId());
}
}
}
private static final Logger LOGGER = Logger.getLogger(DownloadableListener.class.getName());
}
/**
* Represents a periodically updated JSON data file obtained from a remote URL.
*
* <p>
* This mechanism is one of the basis of the update center, which involves fetching
* up-to-date data file.
*
* @since 1.305
*/
public static class Downloadable implements ExtensionPoint {
private final String id;
private final String url;
private final long interval;
private volatile long due=0;
private volatile long lastAttempt=Long.MIN_VALUE;
/**
*
* @param url
* URL relative to {@link UpdateCenter#getDefaultBaseUrl()}.
* So if this string is "foo.json", the ultimate URL will be
* something like "http://updates.jenkins-ci.org/updates/foo.json"
*
* For security and privacy reasons, we don't allow the retrieval
* from random locations.
*/
public Downloadable(String id, String url, long interval) {
this.id = id;
this.url = url;
this.interval = interval;
}
public Downloadable() {
this.id = getClass().getName().replace('$','.');
this.url = this.id+".json";
this.interval = DEFAULT_INTERVAL;
}
/**
* Uses the class name as an ID.
*/
public Downloadable(Class id) {
this(id.getName().replace('$','.'));
}
public Downloadable(String id) {
this(id,id+".json");
}
public Downloadable(String id, String url) {
this(id,url, DEFAULT_INTERVAL);
}
public String getId() {
return id;
}
/**
* URL to download.
*/
public String getUrl() {
return Jenkins.getInstance().getUpdateCenter().getDefaultBaseUrl()+"updates/"+url;
}
/**
* URLs to download from.
*/
public List<String> getUrls() {
List<String> updateSites = new ArrayList<String>();
for (UpdateSite site : Jenkins.getActiveInstance().getUpdateCenter().getSiteList()) {
String siteUrl = site.getUrl();
int baseUrlEnd = siteUrl.indexOf("update-center.json");
if (baseUrlEnd != -1) {
String siteBaseUrl = siteUrl.substring(0, baseUrlEnd);
updateSites.add(siteBaseUrl + "updates/" + url);
} else {
LOGGER.log(Level.WARNING, "Url {0} does not look like an update center:", siteUrl);
}
}
return updateSites;
}
/**
* How often do we retrieve the new image?
*
* @return
* number of milliseconds between retrieval.
*/
public long getInterval() {
return interval;
}
/**
* This is where the retrieved file will be stored.
*/
public TextFile getDataFile() {
return new TextFile(new File(Jenkins.getInstance().getRootDir(),"updates/"+id));
}
/**
* When shall we retrieve this file next time?
*/
public long getDue() {
if(due==0)
// if the file doesn't exist, this code should result
// in a very small (but >0) due value, which should trigger
// the retrieval immediately.
due = getDataFile().file.lastModified()+interval;
return due;
}
/**
* Loads the current file into JSON and returns it, or null
* if no data exists.
*/
public JSONObject getData() throws IOException {
TextFile df = getDataFile();
if(df.exists())
try {
return JSONObject.fromObject(df.read());
} catch (JSONException e) {
df.delete(); // if we keep this file, it will cause repeated failures
throw new IOException("Failed to parse "+df+" into JSON",e);
}
return null;
}
/**
* This is where the browser sends us the data.
*/
@RequirePOST
public void doPostBack(StaplerRequest req, StaplerResponse rsp) throws IOException {
DownloadSettings.checkPostBackAccess();
long dataTimestamp = System.currentTimeMillis();
due = dataTimestamp+getInterval(); // success or fail, don't try too often
String json = IOUtils.toString(req.getInputStream(),"UTF-8");
FormValidation e = load(json, dataTimestamp);
if (e.kind != Kind.OK) {
LOGGER.severe(e.renderHtml());
throw e;
}
rsp.setContentType("text/plain"); // So browser won't try to parse response
}
private FormValidation load(String json, long dataTimestamp) throws IOException {
TextFile df = getDataFile();
df.write(json);
df.file.setLastModified(dataTimestamp);
LOGGER.info("Obtained the updated data file for "+id);
return FormValidation.ok();
}
@Restricted(NoExternalUse.class)
public FormValidation updateNow() throws IOException {
List<JSONObject> jsonList = new ArrayList<>();
boolean toolInstallerMetadataExists = false;
for (UpdateSite updatesite : Jenkins.getActiveInstance().getUpdateCenter().getSiteList()) {
String site = updatesite.getMetadataUrlForDownloadable(url);
if (site == null) {
return FormValidation.warning("The update site " + site + " does not look like an update center");
}
String jsonString;
try {
jsonString = loadJSONHTML(new URL(site + ".html?id=" + URLEncoder.encode(getId(), "UTF-8") + "&version=" + URLEncoder.encode(Jenkins.VERSION, "UTF-8")));
toolInstallerMetadataExists = true;
} catch (Exception e) {
LOGGER.log(Level.FINE, "Could not load json from " + site, e );
continue;
}
JSONObject o = JSONObject.fromObject(jsonString);
if (signatureCheck) {
FormValidation e = updatesite.getJsonSignatureValidator(signatureValidatorPrefix +" '"+id+"'").verifySignature(o);
if (e.kind!= Kind.OK) {
LOGGER.log(Level.WARNING, "signature check failed for " + site, e );
continue;
}
}
jsonList.add(o);
}
if (jsonList.size() == 0 && toolInstallerMetadataExists) {
return FormValidation.warning("None of the tool installer metadata passed the signature check");
} else if (!toolInstallerMetadataExists) {
LOGGER.log(Level.WARNING, "No tool installer metadata found for " + id);
return FormValidation.ok();
}
JSONObject reducedJson = reduce(jsonList);
return load(reducedJson.toString(), System.currentTimeMillis());
}
/**
* Function that takes multiple JSONObjects and returns a single one.
* @param jsonList to be processed
* @return a single JSONObject
*/
public JSONObject reduce(List<JSONObject> jsonList) {
return jsonList.get(0);
}
/**
* check if the list of update center entries has duplicates
* @param genericList list of entries coming from multiple update centers
* @param comparator the unique ID of an entry
* @param <T> the generic class
* @return true if the list has duplicates, false otherwise
*/
public static <T> boolean hasDuplicates (List<T> genericList, String comparator) {
if (genericList.isEmpty()) {
return false;
}
Field field;
try {
field = genericList.get(0).getClass().getDeclaredField(comparator);
} catch (NoSuchFieldException e) {
LOGGER.warning("comparator: " + comparator + "does not exist for " + genericList.get(0).getClass() + ", " + e);
return false;
}
for (int i = 0; i < genericList.size(); i ++ ) {
T data1 = genericList.get(i);
for (int j = i + 1; j < genericList.size(); j ++ ) {
T data2 = genericList.get(j);
try {
if (field.get(data1).equals(field.get(data2))) {
return true;
}
} catch (IllegalAccessException e) {
LOGGER.warning("could not access field: " + comparator + ", " + e);
}
}
}
return false;
}
/**
* Returns all the registered {@link Downloadable}s.
*/
public static ExtensionList<Downloadable> all() {
return ExtensionList.lookup(Downloadable.class);
}
/**
* Returns the {@link Downloadable} that has the given ID.
*/
public static Downloadable get(String id) {
for (Downloadable d : all()) {
if(d.id.equals(id))
return d;
}
return null;
}
private static final Logger LOGGER = Logger.getLogger(Downloadable.class.getName());
private static final long DEFAULT_INTERVAL =
SystemProperties.getLong(Downloadable.class.getName()+".defaultInterval", DAYS.toMillis(1));
}
public static boolean neverUpdate = SystemProperties.getBoolean(DownloadService.class.getName()+".never");
/**
* May be used to temporarily disable signature checking on {@link DownloadService} and {@link UpdateCenter}.
* Useful when upstream signatures are broken, such as due to expired certificates.
* Should only be used when {@link DownloadSettings#isUseBrowser};
* disabling signature checks for in-browser downloads is <em>very dangerous</em> as unprivileged users could submit spoofed metadata!
*/
public static boolean signatureCheck = !SystemProperties.getBoolean(DownloadService.class.getName()+".noSignatureCheck");
}
| |
/*
* Copyright 2019 ThoughtWorks, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.thoughtworks.go.apiv1.scms;
import com.thoughtworks.go.api.ApiController;
import com.thoughtworks.go.api.ApiVersion;
import com.thoughtworks.go.api.CrudController;
import com.thoughtworks.go.api.base.OutputWriter;
import com.thoughtworks.go.api.representers.JsonReader;
import com.thoughtworks.go.api.spring.ApiAuthenticationHelper;
import com.thoughtworks.go.api.util.GsonTransformer;
import com.thoughtworks.go.apiv1.scms.representers.SCMRepresenter;
import com.thoughtworks.go.apiv1.scms.representers.SCMsRepresenter;
import com.thoughtworks.go.config.exceptions.EntityType;
import com.thoughtworks.go.config.exceptions.HttpException;
import com.thoughtworks.go.domain.scm.SCM;
import com.thoughtworks.go.domain.scm.SCMs;
import com.thoughtworks.go.server.service.EntityHashingService;
import com.thoughtworks.go.server.service.materials.PluggableScmService;
import com.thoughtworks.go.server.service.result.HttpLocalizedOperationResult;
import com.thoughtworks.go.spark.Routes;
import com.thoughtworks.go.spark.spring.SparkSpringController;
import org.apache.commons.lang3.StringUtils;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;
import spark.Request;
import spark.Response;
import java.io.IOException;
import java.util.function.Consumer;
import static com.thoughtworks.go.api.util.HaltApiResponses.*;
import static java.lang.String.format;
import static spark.Spark.*;
@Component
public class SCMControllerV1 extends ApiController implements SparkSpringController, CrudController<SCM> {
public static final String MATERIAL_NAME = "material_name";
private final ApiAuthenticationHelper apiAuthenticationHelper;
private final PluggableScmService pluggableScmService;
private final EntityHashingService entityHashingService;
@Autowired
public SCMControllerV1(ApiAuthenticationHelper apiAuthenticationHelper, PluggableScmService pluggableScmService, EntityHashingService entityHashingService) {
super(ApiVersion.v1);
this.apiAuthenticationHelper = apiAuthenticationHelper;
this.pluggableScmService = pluggableScmService;
this.entityHashingService = entityHashingService;
}
@Override
public String controllerBasePath() {
return Routes.SCM.BASE;
}
@Override
public void setupRoutes() {
path(controllerBasePath(), () -> {
before("", mimeType, this::setContentType);
before("/*", mimeType, this::setContentType);
before("", mimeType, this::verifyContentType);
before("/*", mimeType, this::verifyContentType);
before("", this.mimeType, this.apiAuthenticationHelper::checkAdminUserOrGroupAdminUserAnd403);
before("/*", this.mimeType, this.apiAuthenticationHelper::checkAdminUserOrGroupAdminUserAnd403);
get("", mimeType, this::index);
post("", mimeType, this::create);
get(Routes.SCM.ID, mimeType, this::show);
put(Routes.SCM.ID, mimeType, this::update);
exception(HttpException.class, this::httpException);
});
}
public String index(Request request, Response response) throws IOException {
SCMs scms = pluggableScmService.listAllScms();
String etag = entityHashingService.md5ForEntity(scms);
if (fresh(request, etag)) {
return notModified(response);
}
setEtagHeader(response, etag);
return writerForTopLevelObject(request, response, writer -> SCMsRepresenter.toJSON(writer, scms));
}
public String show(Request request, Response response) throws IOException {
String materialName = request.params(MATERIAL_NAME);
SCM scm = fetchEntityFromConfig(materialName);
String etag = entityHashingService.md5ForEntity(scm);
if (fresh(request, etag)) {
return notModified(response);
}
setEtagHeader(response, etag);
return writerForTopLevelObject(request, response, writer -> SCMRepresenter.toJSON(writer, scm));
}
public String create(Request request, Response response) throws IOException {
SCM scmFromRequest = buildEntityFromRequestBody(request, false);
scmFromRequest.ensureIdExists();
haltIfEntityWithSameNameExists(scmFromRequest);
haltIfEntityWithSameIDExists(scmFromRequest);
HttpLocalizedOperationResult result = new HttpLocalizedOperationResult();
pluggableScmService.createPluggableScmMaterial(currentUsername(), scmFromRequest, result);
return handleCreateOrUpdateResponse(request, response, scmFromRequest, result);
}
public String update(Request request, Response response) throws IOException {
final String materialName = request.params(MATERIAL_NAME);
final SCM existingSCM = fetchEntityFromConfig(materialName);
final SCM scmFromRequest = buildEntityFromRequestBody(request);
if (isRenameAttempt(existingSCM.getId(), scmFromRequest.getId())
|| isRenameAttempt(existingSCM.getName(), scmFromRequest.getName())) {
throw haltBecauseRenameOfEntityIsNotSupported(getEntityType().getEntityNameLowerCase());
}
if (isPutRequestStale(request, existingSCM)) {
throw haltBecauseEtagDoesNotMatch(getEntityType().getEntityNameLowerCase(), existingSCM.getId());
}
final HttpLocalizedOperationResult operationResult = new HttpLocalizedOperationResult();
pluggableScmService.updatePluggableScmMaterial(currentUsername(), scmFromRequest, operationResult, getIfMatch(request));
return handleCreateOrUpdateResponse(request, response, scmFromRequest, operationResult);
}
@Override
public String etagFor(SCM entityFromServer) {
return entityHashingService.md5ForEntity(entityFromServer);
}
@Override
public EntityType getEntityType() {
return EntityType.SCM;
}
@Override
public SCM doFetchEntityFromConfig(String name) {
return pluggableScmService.findPluggableScmMaterial(name);
}
@Override
public SCM buildEntityFromRequestBody(Request req) {
JsonReader jsonReader = GsonTransformer.getInstance().jsonReaderFrom(req.body());
return SCMRepresenter.fromJSON(jsonReader);
}
public SCM buildEntityFromRequestBody(Request req, boolean mustHaveId) {
JsonReader jsonReader = GsonTransformer.getInstance().jsonReaderFrom(req.body());
return SCMRepresenter.fromJSON(jsonReader, mustHaveId);
}
@Override
public Consumer<OutputWriter> jsonWriter(SCM scm) {
return writer -> SCMRepresenter.toJSON(writer, scm);
}
private void haltIfEntityWithSameNameExists(SCM scm) {
if (pluggableScmService.findPluggableScmMaterial(scm.getName()) == null) {
return;
}
scm.addError("name", format("SCM name should be unique. SCM with name '%s' already exists.", scm.getName()));
throw haltBecauseEntityAlreadyExists(jsonWriter(scm), EntityType.SCM.getEntityNameLowerCase(), scm.getName());
}
private void haltIfEntityWithSameIDExists(SCM scm) {
boolean scmWithSameIdDoesNotExist = pluggableScmService
.listAllScms()
.stream()
.noneMatch(s -> s.getId().equals(scm.getId()));
if (scmWithSameIdDoesNotExist) {
return;
}
scm.addError("id", format("SCM id should be unique. SCM with id '%s' already exists.", scm.getId()));
throw haltBecauseEntityAlreadyExists(jsonWriter(scm), EntityType.SCM.getEntityNameLowerCase(), scm.getId());
}
private boolean isRenameAttempt(String profileIdFromRequestParam, String profileIdFromRequestBody) {
return !StringUtils.equals(profileIdFromRequestBody, profileIdFromRequestParam);
}
}
| |
/*
* Copyright 2011 JBoss Inc
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.drools.guvnor.client.widgets.drools.decoratedgrid.data;
import static org.junit.Assert.assertEquals;
import org.drools.guvnor.client.widgets.drools.decoratedgrid.CellValue;
import org.drools.guvnor.client.widgets.drools.decoratedgrid.data.Coordinate;
import org.junit.Test;
/**
* Tests for DynamicData
*/
public class DynamicDataTestsWithoutMergingColumnAdditions extends BaseDynamicDataTests {
@Test
public void testIndexing_DataCoordinates() {
Coordinate c;
c = data.get( 0 ).get( 0 ).getCoordinate();
assertEquals( c.getRow(),
0 );
assertEquals( c.getCol(),
0 );
c = data.get( 0 ).get( 1 ).getCoordinate();
assertEquals( c.getRow(),
0 );
assertEquals( c.getCol(),
1 );
c = data.get( 0 ).get( 2 ).getCoordinate();
assertEquals( c.getRow(),
0 );
assertEquals( c.getCol(),
2 );
c = data.get( 1 ).get( 0 ).getCoordinate();
assertEquals( c.getRow(),
1 );
assertEquals( c.getCol(),
0 );
c = data.get( 1 ).get( 1 ).getCoordinate();
assertEquals( c.getRow(),
1 );
assertEquals( c.getCol(),
1 );
c = data.get( 1 ).get( 2 ).getCoordinate();
assertEquals( c.getRow(),
1 );
assertEquals( c.getCol(),
2 );
c = data.get( 2 ).get( 0 ).getCoordinate();
assertEquals( c.getRow(),
2 );
assertEquals( c.getCol(),
0 );
c = data.get( 2 ).get( 1 ).getCoordinate();
assertEquals( c.getRow(),
2 );
assertEquals( c.getCol(),
1 );
c = data.get( 2 ).get( 2 ).getCoordinate();
assertEquals( c.getRow(),
2 );
assertEquals( c.getCol(),
2 );
data.addColumn( 3,
makeCellValueList(),
true );
assertEquals( data.size(),
3 );
assertEquals( data.get( 0 ).size(),
4 );
assertEquals( data.get( 1 ).size(),
4 );
assertEquals( data.get( 2 ).size(),
4 );
c = data.get( 0 ).get( 0 ).getCoordinate();
assertEquals( c.getRow(),
0 );
assertEquals( c.getCol(),
0 );
c = data.get( 0 ).get( 1 ).getCoordinate();
assertEquals( c.getRow(),
0 );
assertEquals( c.getCol(),
1 );
c = data.get( 0 ).get( 2 ).getCoordinate();
assertEquals( c.getRow(),
0 );
assertEquals( c.getCol(),
2 );
c = data.get( 0 ).get( 3 ).getCoordinate();
assertEquals( c.getRow(),
0 );
assertEquals( c.getCol(),
3 );
c = data.get( 1 ).get( 0 ).getCoordinate();
assertEquals( c.getRow(),
1 );
assertEquals( c.getCol(),
0 );
c = data.get( 1 ).get( 1 ).getCoordinate();
assertEquals( c.getRow(),
1 );
assertEquals( c.getCol(),
1 );
c = data.get( 1 ).get( 2 ).getCoordinate();
assertEquals( c.getRow(),
1 );
assertEquals( c.getCol(),
2 );
c = data.get( 1 ).get( 3 ).getCoordinate();
assertEquals( c.getRow(),
1 );
assertEquals( c.getCol(),
3 );
c = data.get( 2 ).get( 0 ).getCoordinate();
assertEquals( c.getRow(),
2 );
assertEquals( c.getCol(),
0 );
c = data.get( 2 ).get( 1 ).getCoordinate();
assertEquals( c.getRow(),
2 );
assertEquals( c.getCol(),
1 );
c = data.get( 2 ).get( 2 ).getCoordinate();
assertEquals( c.getRow(),
2 );
assertEquals( c.getCol(),
2 );
c = data.get( 2 ).get( 3 ).getCoordinate();
assertEquals( c.getRow(),
2 );
assertEquals( c.getCol(),
3 );
}
@Test
public void testIndexing_HtmlCoordinates() {
Coordinate c;
c = data.get( 0 ).get( 0 ).getHtmlCoordinate();
assertEquals( c.getRow(),
0 );
assertEquals( c.getCol(),
0 );
c = data.get( 0 ).get( 1 ).getHtmlCoordinate();
assertEquals( c.getRow(),
0 );
assertEquals( c.getCol(),
1 );
c = data.get( 0 ).get( 2 ).getHtmlCoordinate();
assertEquals( c.getRow(),
0 );
assertEquals( c.getCol(),
2 );
c = data.get( 1 ).get( 0 ).getHtmlCoordinate();
assertEquals( c.getRow(),
1 );
assertEquals( c.getCol(),
0 );
c = data.get( 1 ).get( 1 ).getHtmlCoordinate();
assertEquals( c.getRow(),
1 );
assertEquals( c.getCol(),
1 );
c = data.get( 1 ).get( 2 ).getHtmlCoordinate();
assertEquals( c.getRow(),
1 );
assertEquals( c.getCol(),
2 );
c = data.get( 2 ).get( 0 ).getHtmlCoordinate();
assertEquals( c.getRow(),
2 );
assertEquals( c.getCol(),
0 );
c = data.get( 2 ).get( 1 ).getHtmlCoordinate();
assertEquals( c.getRow(),
2 );
assertEquals( c.getCol(),
1 );
c = data.get( 2 ).get( 2 ).getHtmlCoordinate();
assertEquals( c.getRow(),
2 );
assertEquals( c.getCol(),
2 );
data.addColumn( 3,
makeCellValueList(),
true );
assertEquals( data.size(),
3 );
assertEquals( data.get( 0 ).size(),
4 );
assertEquals( data.get( 1 ).size(),
4 );
assertEquals( data.get( 2 ).size(),
4 );
c = data.get( 0 ).get( 0 ).getHtmlCoordinate();
assertEquals( c.getRow(),
0 );
assertEquals( c.getCol(),
0 );
c = data.get( 0 ).get( 1 ).getHtmlCoordinate();
assertEquals( c.getRow(),
0 );
assertEquals( c.getCol(),
1 );
c = data.get( 0 ).get( 2 ).getHtmlCoordinate();
assertEquals( c.getRow(),
0 );
assertEquals( c.getCol(),
2 );
c = data.get( 0 ).get( 3 ).getHtmlCoordinate();
assertEquals( c.getRow(),
0 );
assertEquals( c.getCol(),
3 );
c = data.get( 1 ).get( 0 ).getHtmlCoordinate();
assertEquals( c.getRow(),
1 );
assertEquals( c.getCol(),
0 );
c = data.get( 1 ).get( 1 ).getHtmlCoordinate();
assertEquals( c.getRow(),
1 );
assertEquals( c.getCol(),
1 );
c = data.get( 1 ).get( 2 ).getHtmlCoordinate();
assertEquals( c.getRow(),
1 );
assertEquals( c.getCol(),
2 );
c = data.get( 1 ).get( 3 ).getHtmlCoordinate();
assertEquals( c.getRow(),
1 );
assertEquals( c.getCol(),
3 );
c = data.get( 2 ).get( 0 ).getHtmlCoordinate();
assertEquals( c.getRow(),
2 );
assertEquals( c.getCol(),
0 );
c = data.get( 2 ).get( 1 ).getHtmlCoordinate();
assertEquals( c.getRow(),
2 );
assertEquals( c.getCol(),
1 );
c = data.get( 2 ).get( 2 ).getHtmlCoordinate();
assertEquals( c.getRow(),
2 );
assertEquals( c.getCol(),
2 );
c = data.get( 2 ).get( 3 ).getHtmlCoordinate();
assertEquals( c.getRow(),
2 );
assertEquals( c.getCol(),
3 );
}
@Test
public void testIndexing_PhysicalCoordinates() {
Coordinate c;
c = data.get( 0 ).get( 0 ).getPhysicalCoordinate();
assertEquals( c.getRow(),
0 );
assertEquals( c.getCol(),
0 );
c = data.get( 0 ).get( 1 ).getPhysicalCoordinate();
assertEquals( c.getRow(),
0 );
assertEquals( c.getCol(),
1 );
c = data.get( 0 ).get( 2 ).getPhysicalCoordinate();
assertEquals( c.getRow(),
0 );
assertEquals( c.getCol(),
2 );
c = data.get( 1 ).get( 0 ).getPhysicalCoordinate();
assertEquals( c.getRow(),
1 );
assertEquals( c.getCol(),
0 );
c = data.get( 1 ).get( 1 ).getPhysicalCoordinate();
assertEquals( c.getRow(),
1 );
assertEquals( c.getCol(),
1 );
c = data.get( 1 ).get( 2 ).getPhysicalCoordinate();
assertEquals( c.getRow(),
1 );
assertEquals( c.getCol(),
2 );
c = data.get( 2 ).get( 0 ).getPhysicalCoordinate();
assertEquals( c.getRow(),
2 );
assertEquals( c.getCol(),
0 );
c = data.get( 2 ).get( 1 ).getPhysicalCoordinate();
assertEquals( c.getRow(),
2 );
assertEquals( c.getCol(),
1 );
c = data.get( 2 ).get( 2 ).getPhysicalCoordinate();
assertEquals( c.getRow(),
2 );
assertEquals( c.getCol(),
2 );
data.addColumn( 3,
makeCellValueList(),
true );
assertEquals( data.size(),
3 );
assertEquals( data.get( 0 ).size(),
4 );
assertEquals( data.get( 1 ).size(),
4 );
assertEquals( data.get( 2 ).size(),
4 );
c = data.get( 0 ).get( 0 ).getPhysicalCoordinate();
assertEquals( c.getRow(),
0 );
assertEquals( c.getCol(),
0 );
c = data.get( 0 ).get( 1 ).getPhysicalCoordinate();
assertEquals( c.getRow(),
0 );
assertEquals( c.getCol(),
1 );
c = data.get( 0 ).get( 2 ).getPhysicalCoordinate();
assertEquals( c.getRow(),
0 );
assertEquals( c.getCol(),
2 );
c = data.get( 0 ).get( 3 ).getPhysicalCoordinate();
assertEquals( c.getRow(),
0 );
assertEquals( c.getCol(),
3 );
c = data.get( 1 ).get( 0 ).getPhysicalCoordinate();
assertEquals( c.getRow(),
1 );
assertEquals( c.getCol(),
0 );
c = data.get( 1 ).get( 1 ).getPhysicalCoordinate();
assertEquals( c.getRow(),
1 );
assertEquals( c.getCol(),
1 );
c = data.get( 1 ).get( 2 ).getPhysicalCoordinate();
assertEquals( c.getRow(),
1 );
assertEquals( c.getCol(),
2 );
c = data.get( 1 ).get( 3 ).getPhysicalCoordinate();
assertEquals( c.getRow(),
1 );
assertEquals( c.getCol(),
3 );
c = data.get( 2 ).get( 0 ).getPhysicalCoordinate();
assertEquals( c.getRow(),
2 );
assertEquals( c.getCol(),
0 );
c = data.get( 2 ).get( 1 ).getPhysicalCoordinate();
assertEquals( c.getRow(),
2 );
assertEquals( c.getCol(),
1 );
c = data.get( 2 ).get( 2 ).getPhysicalCoordinate();
assertEquals( c.getRow(),
2 );
assertEquals( c.getCol(),
2 );
c = data.get( 2 ).get( 3 ).getPhysicalCoordinate();
assertEquals( c.getRow(),
2 );
assertEquals( c.getCol(),
3 );
}
@Test
public void testIndexing_RowSpans() {
CellValue< ? extends Comparable< ? >> cv;
cv = data.get( 0 ).get( 0 );
assertEquals( cv.getRowSpan(),
1 );
cv = data.get( 0 ).get( 1 );
assertEquals( cv.getRowSpan(),
1 );
cv = data.get( 0 ).get( 2 );
assertEquals( cv.getRowSpan(),
1 );
cv = data.get( 1 ).get( 0 );
assertEquals( cv.getRowSpan(),
1 );
cv = data.get( 1 ).get( 1 );
assertEquals( cv.getRowSpan(),
1 );
cv = data.get( 1 ).get( 2 );
assertEquals( cv.getRowSpan(),
1 );
cv = data.get( 2 ).get( 0 );
assertEquals( cv.getRowSpan(),
1 );
cv = data.get( 2 ).get( 1 );
assertEquals( cv.getRowSpan(),
1 );
cv = data.get( 2 ).get( 2 );
assertEquals( cv.getRowSpan(),
1 );
data.addColumn( 3,
makeCellValueList(),
true );
assertEquals( data.size(),
3 );
assertEquals( data.get( 0 ).size(),
4 );
assertEquals( data.get( 1 ).size(),
4 );
assertEquals( data.get( 2 ).size(),
4 );
cv = data.get( 0 ).get( 0 );
assertEquals( cv.getRowSpan(),
1 );
cv = data.get( 0 ).get( 1 );
assertEquals( cv.getRowSpan(),
1 );
cv = data.get( 0 ).get( 2 );
assertEquals( cv.getRowSpan(),
1 );
cv = data.get( 0 ).get( 3 );
assertEquals( cv.getRowSpan(),
1 );
cv = data.get( 1 ).get( 0 );
assertEquals( cv.getRowSpan(),
1 );
cv = data.get( 1 ).get( 1 );
assertEquals( cv.getRowSpan(),
1 );
cv = data.get( 1 ).get( 2 );
assertEquals( cv.getRowSpan(),
1 );
cv = data.get( 1 ).get( 3 );
assertEquals( cv.getRowSpan(),
1 );
cv = data.get( 2 ).get( 0 );
assertEquals( cv.getRowSpan(),
1 );
cv = data.get( 2 ).get( 1 );
assertEquals( cv.getRowSpan(),
1 );
cv = data.get( 2 ).get( 2 );
assertEquals( cv.getRowSpan(),
1 );
cv = data.get( 2 ).get( 3 );
assertEquals( cv.getRowSpan(),
1 );
}
}
| |
// Copyright (c) 2008 The Board of Trustees of The Leland Stanford Junior University
// Copyright (c) 2011, 2012 Open Networking Foundation
// Copyright (c) 2012, 2013 Big Switch Networks, Inc.
// This library was generated by the LoxiGen Compiler.
// See the file LICENSE.txt which should have been included in the source distribution
// Automatically generated by LOXI from template of_class.java
// Do not modify
package org.projectfloodlight.openflow.protocol.ver14;
import org.projectfloodlight.openflow.protocol.*;
import org.projectfloodlight.openflow.protocol.action.*;
import org.projectfloodlight.openflow.protocol.actionid.*;
import org.projectfloodlight.openflow.protocol.bsntlv.*;
import org.projectfloodlight.openflow.protocol.errormsg.*;
import org.projectfloodlight.openflow.protocol.meterband.*;
import org.projectfloodlight.openflow.protocol.instruction.*;
import org.projectfloodlight.openflow.protocol.instructionid.*;
import org.projectfloodlight.openflow.protocol.match.*;
import org.projectfloodlight.openflow.protocol.stat.*;
import org.projectfloodlight.openflow.protocol.oxm.*;
import org.projectfloodlight.openflow.protocol.oxs.*;
import org.projectfloodlight.openflow.protocol.queueprop.*;
import org.projectfloodlight.openflow.types.*;
import org.projectfloodlight.openflow.util.*;
import org.projectfloodlight.openflow.exceptions.*;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.Set;
import io.netty.buffer.ByteBuf;
import com.google.common.hash.PrimitiveSink;
import com.google.common.hash.Funnel;
class OFBsnBwEnableGetReplyVer14 implements OFBsnBwEnableGetReply {
private static final Logger logger = LoggerFactory.getLogger(OFBsnBwEnableGetReplyVer14.class);
// version: 1.4
final static byte WIRE_VERSION = 5;
final static int LENGTH = 20;
private final static long DEFAULT_XID = 0x0L;
private final static long DEFAULT_ENABLED = 0x0L;
// OF message fields
private final long xid;
private final long enabled;
//
// Immutable default instance
final static OFBsnBwEnableGetReplyVer14 DEFAULT = new OFBsnBwEnableGetReplyVer14(
DEFAULT_XID, DEFAULT_ENABLED
);
// package private constructor - used by readers, builders, and factory
OFBsnBwEnableGetReplyVer14(long xid, long enabled) {
this.xid = xid;
this.enabled = enabled;
}
// Accessors for OF message fields
@Override
public OFVersion getVersion() {
return OFVersion.OF_14;
}
@Override
public OFType getType() {
return OFType.EXPERIMENTER;
}
@Override
public long getXid() {
return xid;
}
@Override
public long getExperimenter() {
return 0x5c16c7L;
}
@Override
public long getSubtype() {
return 0x14L;
}
@Override
public long getEnabled() {
return enabled;
}
public OFBsnBwEnableGetReply.Builder createBuilder() {
return new BuilderWithParent(this);
}
static class BuilderWithParent implements OFBsnBwEnableGetReply.Builder {
final OFBsnBwEnableGetReplyVer14 parentMessage;
// OF message fields
private boolean xidSet;
private long xid;
private boolean enabledSet;
private long enabled;
BuilderWithParent(OFBsnBwEnableGetReplyVer14 parentMessage) {
this.parentMessage = parentMessage;
}
@Override
public OFVersion getVersion() {
return OFVersion.OF_14;
}
@Override
public OFType getType() {
return OFType.EXPERIMENTER;
}
@Override
public long getXid() {
return xid;
}
@Override
public OFBsnBwEnableGetReply.Builder setXid(long xid) {
this.xid = xid;
this.xidSet = true;
return this;
}
@Override
public long getExperimenter() {
return 0x5c16c7L;
}
@Override
public long getSubtype() {
return 0x14L;
}
@Override
public long getEnabled() {
return enabled;
}
@Override
public OFBsnBwEnableGetReply.Builder setEnabled(long enabled) {
this.enabled = enabled;
this.enabledSet = true;
return this;
}
@Override
public OFBsnBwEnableGetReply build() {
long xid = this.xidSet ? this.xid : parentMessage.xid;
long enabled = this.enabledSet ? this.enabled : parentMessage.enabled;
//
return new OFBsnBwEnableGetReplyVer14(
xid,
enabled
);
}
}
static class Builder implements OFBsnBwEnableGetReply.Builder {
// OF message fields
private boolean xidSet;
private long xid;
private boolean enabledSet;
private long enabled;
@Override
public OFVersion getVersion() {
return OFVersion.OF_14;
}
@Override
public OFType getType() {
return OFType.EXPERIMENTER;
}
@Override
public long getXid() {
return xid;
}
@Override
public OFBsnBwEnableGetReply.Builder setXid(long xid) {
this.xid = xid;
this.xidSet = true;
return this;
}
@Override
public long getExperimenter() {
return 0x5c16c7L;
}
@Override
public long getSubtype() {
return 0x14L;
}
@Override
public long getEnabled() {
return enabled;
}
@Override
public OFBsnBwEnableGetReply.Builder setEnabled(long enabled) {
this.enabled = enabled;
this.enabledSet = true;
return this;
}
//
@Override
public OFBsnBwEnableGetReply build() {
long xid = this.xidSet ? this.xid : DEFAULT_XID;
long enabled = this.enabledSet ? this.enabled : DEFAULT_ENABLED;
return new OFBsnBwEnableGetReplyVer14(
xid,
enabled
);
}
}
final static Reader READER = new Reader();
static class Reader implements OFMessageReader<OFBsnBwEnableGetReply> {
@Override
public OFBsnBwEnableGetReply readFrom(ByteBuf bb) throws OFParseError {
int start = bb.readerIndex();
// fixed value property version == 5
byte version = bb.readByte();
if(version != (byte) 0x5)
throw new OFParseError("Wrong version: Expected=OFVersion.OF_14(5), got="+version);
// fixed value property type == 4
byte type = bb.readByte();
if(type != (byte) 0x4)
throw new OFParseError("Wrong type: Expected=OFType.EXPERIMENTER(4), got="+type);
int length = U16.f(bb.readShort());
if(length != 20)
throw new OFParseError("Wrong length: Expected=20(20), got="+length);
if(bb.readableBytes() + (bb.readerIndex() - start) < length) {
// Buffer does not have all data yet
bb.readerIndex(start);
return null;
}
if(logger.isTraceEnabled())
logger.trace("readFrom - length={}", length);
long xid = U32.f(bb.readInt());
// fixed value property experimenter == 0x5c16c7L
int experimenter = bb.readInt();
if(experimenter != 0x5c16c7)
throw new OFParseError("Wrong experimenter: Expected=0x5c16c7L(0x5c16c7L), got="+experimenter);
// fixed value property subtype == 0x14L
int subtype = bb.readInt();
if(subtype != 0x14)
throw new OFParseError("Wrong subtype: Expected=0x14L(0x14L), got="+subtype);
long enabled = U32.f(bb.readInt());
OFBsnBwEnableGetReplyVer14 bsnBwEnableGetReplyVer14 = new OFBsnBwEnableGetReplyVer14(
xid,
enabled
);
if(logger.isTraceEnabled())
logger.trace("readFrom - read={}", bsnBwEnableGetReplyVer14);
return bsnBwEnableGetReplyVer14;
}
}
public void putTo(PrimitiveSink sink) {
FUNNEL.funnel(this, sink);
}
final static OFBsnBwEnableGetReplyVer14Funnel FUNNEL = new OFBsnBwEnableGetReplyVer14Funnel();
static class OFBsnBwEnableGetReplyVer14Funnel implements Funnel<OFBsnBwEnableGetReplyVer14> {
private static final long serialVersionUID = 1L;
@Override
public void funnel(OFBsnBwEnableGetReplyVer14 message, PrimitiveSink sink) {
// fixed value property version = 5
sink.putByte((byte) 0x5);
// fixed value property type = 4
sink.putByte((byte) 0x4);
// fixed value property length = 20
sink.putShort((short) 0x14);
sink.putLong(message.xid);
// fixed value property experimenter = 0x5c16c7L
sink.putInt(0x5c16c7);
// fixed value property subtype = 0x14L
sink.putInt(0x14);
sink.putLong(message.enabled);
}
}
public void writeTo(ByteBuf bb) {
WRITER.write(bb, this);
}
final static Writer WRITER = new Writer();
static class Writer implements OFMessageWriter<OFBsnBwEnableGetReplyVer14> {
@Override
public void write(ByteBuf bb, OFBsnBwEnableGetReplyVer14 message) {
// fixed value property version = 5
bb.writeByte((byte) 0x5);
// fixed value property type = 4
bb.writeByte((byte) 0x4);
// fixed value property length = 20
bb.writeShort((short) 0x14);
bb.writeInt(U32.t(message.xid));
// fixed value property experimenter = 0x5c16c7L
bb.writeInt(0x5c16c7);
// fixed value property subtype = 0x14L
bb.writeInt(0x14);
bb.writeInt(U32.t(message.enabled));
}
}
@Override
public String toString() {
StringBuilder b = new StringBuilder("OFBsnBwEnableGetReplyVer14(");
b.append("xid=").append(xid);
b.append(", ");
b.append("enabled=").append(enabled);
b.append(")");
return b.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
OFBsnBwEnableGetReplyVer14 other = (OFBsnBwEnableGetReplyVer14) obj;
if( xid != other.xid)
return false;
if( enabled != other.enabled)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * (int) (xid ^ (xid >>> 32));
result = prime * (int) (enabled ^ (enabled >>> 32));
return result;
}
}
| |
package co.za.mom.tests;
import com.google.ratel.Context;
import com.google.ratel.core.JsonParam;
import com.google.ratel.core.Param;
import com.google.ratel.core.RatelService;
import com.google.ratel.deps.fileupload.FileItem;
import com.google.ratel.deps.jackson.databind.*;
import com.google.ratel.util.Constants;
import java.util.HashMap;
import java.util.Map;
@RatelService
public class PersonService {
public String getNoArgs() {
return "OK";
}
public Integer getInteger(Integer i) {
System.out.println("getInteger() called: " + i);
return i;
}
public Integer[] getIntegerArray(@Param(name = "id", required = true) Integer[] i) {
System.out.println("getIntegerArray() called: " + i);
return i;
}
public long getPrimitiveLong(long l) {
System.out.println("getPrimitiveLong() called: " + l);
return l;
}
public long[] getPrimitiveLongArray(@Param(name = "id", required = true) long[] l) {
System.out.println("getPrimitiveLongArray() called: " + l);
return l;
}
public boolean getPrimitiveBoolean(boolean bool) {
System.out.println("getPrimitiveBoolean() called: " + bool);
return bool;
}
public String getJson(String args) {
System.out.println("json() called: " + args);
return args;
}
public Person getPojo(Person person) {
try {
ObjectMapper mapper = new ObjectMapper();
mapper.enable(SerializationFeature.INDENT_OUTPUT);
String json = mapper.writeValueAsString(person);
System.out.println("getPerson() called with: " + json);
} catch (Exception e) {
throw new RuntimeException(e);
}
Person result = new Person();
result.setFirstname(person.getFirstname());
result.setLastname(person.getLastname());
return result;
}
public Person getNestedPojo(Person person) {
System.out.println("getNestedPojo() called with: person: " + person);
Person pojo = new Person();
pojo.setFirstname(person.getFirstname());
pojo.setLastname(person.getLastname());
if (person.getOrg() != null) {
Organisation pojo2 = new Organisation();
pojo2.setName(person.getOrg().getName());
}
return person;
}
public Object[] getArray(int i, long l, boolean b, Person person) {
try {
ObjectMapper mapper = new ObjectMapper();
mapper.enable(SerializationFeature.INDENT_OUTPUT);
String json = mapper.writeValueAsString(person);
System.out.println("getArray() called with: " + json);
} catch (Exception e) {
throw new RuntimeException(e);
}
Person result = new Person();
result.setFirstname(person.getFirstname());
result.setLastname(person.getLastname());
Object[] ar = new Object[4];
ar[0] = i;
ar[1] = l;
ar[2] = b;
ar[3] = person;
return ar;
}
public Integer getParam(@Param(name = "id", required = true) Integer i) {
System.out.println("getParam() called - id:" + i);
return i;
}
public Object[] getParams(@Param(name = "id", required = true) Long id, @Param(name = "employed") boolean employed, @Param(name =
"description", required = true) String description) {
System.out.println("getParams() called - id: " + id + ", employed: " + employed + ", description: " + description);
Object[] array = new Object[3];
array[0] = id;
array[1] = employed;
array[2] = description;
return array;
}
public Person getJsonParam(@JsonParam(name = "person", required = true) Person person) {
System.out.println("getJsonParam() called - person: " + person);
return person;
}
public Object[] getMixedParams(@JsonParam(name = "person", required = true) Person person,
@Param(name = "description") String description) {
System.out.println("getMixedParams() called - person: " + person + ", description: " + description);
Object[] array = new Object[2];
array[0] = person;
array[1] = description;
return array;
}
public String formParam(@Param(name = "name", required = true) String name) {
System.out.println("formParam() called - name: " + name);
Context.getContext().getResponse().setContentType(Constants.HTML);
return "<b>" + name + "</b>";
}
public Object[] formParams(@Param(name = "description") String description, @JsonParam(name = "person", required = true) Person person) {
System.out.println("formParams() called - person: " + person + ", description: " + description);
Object[] array = new Object[2];
array[0] = description;
array[1] = person;
return array;
}
public String upload() {
Context c = Context.getContext();
FileItem[] fileItems = c.getFileItems("file");
StringBuilder sb = new StringBuilder();
for (FileItem fileItem : fileItems) {
System.out.println("content:" + fileItem.getString());
sb.append("Filename:").append(fileItem.getName());
sb.append("<br/>");
}
return "Uploaded file(s): " + sb.toString();
}
public String uploadFileItem(@Param(name = "file", required = true) FileItem fileItem) {
Context.getContext().getResponse().setContentType(Constants.HTML);
StringBuilder sb = new StringBuilder();
System.out.println("content:" + fileItem.getString());
sb.append("Filename:").append(fileItem.getName());
sb.append("<br/>");
return "Uploaded file: " + sb.toString();
}
public String uploadFileItems(@Param(name = "file", required = true) FileItem[] fileItems) {
Context.getContext().getResponse().setContentType(Constants.HTML);
StringBuilder sb = new StringBuilder();
for (FileItem fileItem : fileItems) {
System.out.println("content:" + fileItem.getString());
sb.append("Filename:").append(fileItem.getName());
sb.append("<br/>");
}
return "Uploaded file(s): " + sb.toString();
}
public String getExceptionWithArgs(@Param(name = "id", required = true) String args) {
return args;
}
public String getExceptionWithRuntime(String args) {
throw new RuntimeException("Runtime Exception");
}
public String getExceptionWithJson() {
throw new RuntimeException("JSON Exception");
}
public String getTemplate() {
Map<String, Object> model = new HashMap<String, Object>();
model.put("name", "Steve");
model.put("break", this);
String result = Context.getContext().renderTemplate("/template/test.htm", model);
System.out.println(result);
return result;
}
public String getTemplate2() {
Map<String, Object> model = new HashMap<String, Object>();
model.put("name", "Steve");
model.put("BrokenRenderer", new BrokenRenderer());
String result = Context.getContext().renderTemplate("/template/test2.htm", model);
System.out.println(result);
return result;
}
public void exception() {
throw new RuntimeException("Stop!");
}
public static class BrokenRenderer {
/**
* Guaranteed to fail, or you money back.
*
* @see Object#toString()
*/
@SuppressWarnings("null")
@Override
public String toString() {
Object object = null;
return object.toString();
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.commons.math3.ode.nonstiff;
import org.apache.commons.math3.exception.DimensionMismatchException;
import org.apache.commons.math3.exception.MaxCountExceededException;
import org.apache.commons.math3.exception.NoBracketingException;
import org.apache.commons.math3.exception.NumberIsTooSmallException;
import org.apache.commons.math3.ode.FirstOrderDifferentialEquations;
import org.apache.commons.math3.ode.FirstOrderIntegrator;
import org.apache.commons.math3.ode.TestProblem1;
import org.apache.commons.math3.ode.TestProblem3;
import org.apache.commons.math3.ode.TestProblem5;
import org.apache.commons.math3.ode.TestProblemAbstract;
import org.apache.commons.math3.ode.TestProblemFactory;
import org.apache.commons.math3.ode.TestProblemHandler;
import org.apache.commons.math3.ode.events.EventHandler;
import org.apache.commons.math3.ode.sampling.StepHandler;
import org.apache.commons.math3.ode.sampling.StepInterpolator;
import org.apache.commons.math3.util.FastMath;
import org.junit.Assert;
import org.junit.Test;
public class ClassicalRungeKuttaIntegratorTest {
@Test
public void testMissedEndEvent()
throws DimensionMismatchException, NumberIsTooSmallException,
MaxCountExceededException, NoBracketingException {
final double t0 = 1878250320.0000029;
final double tEvent = 1878250379.9999986;
final double[] k = { 1.0e-4, 1.0e-5, 1.0e-6 };
FirstOrderDifferentialEquations ode = new FirstOrderDifferentialEquations() {
public int getDimension() {
return k.length;
}
public void computeDerivatives(double t, double[] y, double[] yDot) {
for (int i = 0; i < y.length; ++i) {
yDot[i] = k[i] * y[i];
}
}
};
ClassicalRungeKuttaIntegrator integrator = new ClassicalRungeKuttaIntegrator(60.0);
double[] y0 = new double[k.length];
for (int i = 0; i < y0.length; ++i) {
y0[i] = i + 1;
}
double[] y = new double[k.length];
double finalT = integrator.integrate(ode, t0, y0, tEvent, y);
Assert.assertEquals(tEvent, finalT, 5.0e-6);
for (int i = 0; i < y.length; ++i) {
Assert.assertEquals(y0[i] * FastMath.exp(k[i] * (finalT - t0)), y[i], 1.0e-9);
}
integrator.addEventHandler(new EventHandler() {
public void init(double t0, double[] y0, double t) {
}
public void resetState(double t, double[] y) {
}
public double g(double t, double[] y) {
return t - tEvent;
}
public Action eventOccurred(double t, double[] y, boolean increasing) {
Assert.assertEquals(tEvent, t, 5.0e-6);
return Action.CONTINUE;
}
}, Double.POSITIVE_INFINITY, 1.0e-20, 100);
finalT = integrator.integrate(ode, t0, y0, tEvent + 120, y);
Assert.assertEquals(tEvent + 120, finalT, 5.0e-6);
for (int i = 0; i < y.length; ++i) {
Assert.assertEquals(y0[i] * FastMath.exp(k[i] * (finalT - t0)), y[i], 1.0e-9);
}
}
@Test
public void testSanityChecks()
throws DimensionMismatchException, NumberIsTooSmallException,
MaxCountExceededException, NoBracketingException {
try {
TestProblem1 pb = new TestProblem1();
new ClassicalRungeKuttaIntegrator(0.01).integrate(pb,
0.0, new double[pb.getDimension()+10],
1.0, new double[pb.getDimension()]);
Assert.fail("an exception should have been thrown");
} catch(DimensionMismatchException ie) {
}
try {
TestProblem1 pb = new TestProblem1();
new ClassicalRungeKuttaIntegrator(0.01).integrate(pb,
0.0, new double[pb.getDimension()],
1.0, new double[pb.getDimension()+10]);
Assert.fail("an exception should have been thrown");
} catch(DimensionMismatchException ie) {
}
try {
TestProblem1 pb = new TestProblem1();
new ClassicalRungeKuttaIntegrator(0.01).integrate(pb,
0.0, new double[pb.getDimension()],
0.0, new double[pb.getDimension()]);
Assert.fail("an exception should have been thrown");
} catch(NumberIsTooSmallException ie) {
}
}
@Test
public void testDecreasingSteps()
throws DimensionMismatchException, NumberIsTooSmallException,
MaxCountExceededException, NoBracketingException {
TestProblemAbstract[] problems = TestProblemFactory.getProblems();
for (int k = 0; k < problems.length; ++k) {
double previousValueError = Double.NaN;
double previousTimeError = Double.NaN;
for (int i = 4; i < 10; ++i) {
TestProblemAbstract pb = problems[k].copy();
double step = (pb.getFinalTime() - pb.getInitialTime()) * FastMath.pow(2.0, -i);
FirstOrderIntegrator integ = new ClassicalRungeKuttaIntegrator(step);
TestProblemHandler handler = new TestProblemHandler(pb, integ);
integ.addStepHandler(handler);
EventHandler[] functions = pb.getEventsHandlers();
for (int l = 0; l < functions.length; ++l) {
integ.addEventHandler(functions[l],
Double.POSITIVE_INFINITY, 1.0e-6 * step, 1000);
}
Assert.assertEquals(functions.length, integ.getEventHandlers().size());
double stopTime = integ.integrate(pb, pb.getInitialTime(), pb.getInitialState(),
pb.getFinalTime(), new double[pb.getDimension()]);
if (functions.length == 0) {
Assert.assertEquals(pb.getFinalTime(), stopTime, 1.0e-10);
}
double error = handler.getMaximalValueError();
if (i > 4) {
Assert.assertTrue(error < 1.01 * FastMath.abs(previousValueError));
}
previousValueError = error;
double timeError = handler.getMaximalTimeError();
if (i > 4) {
Assert.assertTrue(timeError <= FastMath.abs(previousTimeError));
}
previousTimeError = timeError;
integ.clearEventHandlers();
Assert.assertEquals(0, integ.getEventHandlers().size());
}
}
}
@Test
public void testSmallStep()
throws DimensionMismatchException, NumberIsTooSmallException,
MaxCountExceededException, NoBracketingException {
TestProblem1 pb = new TestProblem1();
double step = (pb.getFinalTime() - pb.getInitialTime()) * 0.001;
FirstOrderIntegrator integ = new ClassicalRungeKuttaIntegrator(step);
TestProblemHandler handler = new TestProblemHandler(pb, integ);
integ.addStepHandler(handler);
integ.integrate(pb, pb.getInitialTime(), pb.getInitialState(),
pb.getFinalTime(), new double[pb.getDimension()]);
Assert.assertTrue(handler.getLastError() < 2.0e-13);
Assert.assertTrue(handler.getMaximalValueError() < 4.0e-12);
Assert.assertEquals(0, handler.getMaximalTimeError(), 1.0e-12);
Assert.assertEquals("classical Runge-Kutta", integ.getName());
}
@Test
public void testBigStep()
throws DimensionMismatchException, NumberIsTooSmallException,
MaxCountExceededException, NoBracketingException {
TestProblem1 pb = new TestProblem1();
double step = (pb.getFinalTime() - pb.getInitialTime()) * 0.2;
FirstOrderIntegrator integ = new ClassicalRungeKuttaIntegrator(step);
TestProblemHandler handler = new TestProblemHandler(pb, integ);
integ.addStepHandler(handler);
integ.integrate(pb, pb.getInitialTime(), pb.getInitialState(),
pb.getFinalTime(), new double[pb.getDimension()]);
Assert.assertTrue(handler.getLastError() > 0.0004);
Assert.assertTrue(handler.getMaximalValueError() > 0.005);
Assert.assertEquals(0, handler.getMaximalTimeError(), 1.0e-12);
}
@Test
public void testBackward()
throws DimensionMismatchException, NumberIsTooSmallException,
MaxCountExceededException, NoBracketingException {
TestProblem5 pb = new TestProblem5();
double step = FastMath.abs(pb.getFinalTime() - pb.getInitialTime()) * 0.001;
FirstOrderIntegrator integ = new ClassicalRungeKuttaIntegrator(step);
TestProblemHandler handler = new TestProblemHandler(pb, integ);
integ.addStepHandler(handler);
integ.integrate(pb, pb.getInitialTime(), pb.getInitialState(),
pb.getFinalTime(), new double[pb.getDimension()]);
Assert.assertTrue(handler.getLastError() < 5.0e-10);
Assert.assertTrue(handler.getMaximalValueError() < 7.0e-10);
Assert.assertEquals(0, handler.getMaximalTimeError(), 1.0e-12);
Assert.assertEquals("classical Runge-Kutta", integ.getName());
}
@Test
public void testKepler()
throws DimensionMismatchException, NumberIsTooSmallException,
MaxCountExceededException, NoBracketingException {
final TestProblem3 pb = new TestProblem3(0.9);
double step = (pb.getFinalTime() - pb.getInitialTime()) * 0.0003;
FirstOrderIntegrator integ = new ClassicalRungeKuttaIntegrator(step);
integ.addStepHandler(new KeplerHandler(pb));
integ.integrate(pb,
pb.getInitialTime(), pb.getInitialState(),
pb.getFinalTime(), new double[pb.getDimension()]);
}
private static class KeplerHandler implements StepHandler {
public KeplerHandler(TestProblem3 pb) {
this.pb = pb;
maxError = 0;
}
public void init(double t0, double[] y0, double t) {
maxError = 0;
}
public void handleStep(StepInterpolator interpolator, boolean isLast)
throws MaxCountExceededException {
double[] interpolatedY = interpolator.getInterpolatedState ();
double[] theoreticalY = pb.computeTheoreticalState(interpolator.getCurrentTime());
double dx = interpolatedY[0] - theoreticalY[0];
double dy = interpolatedY[1] - theoreticalY[1];
double error = dx * dx + dy * dy;
if (error > maxError) {
maxError = error;
}
if (isLast) {
// even with more than 1000 evaluations per period,
// RK4 is not able to integrate such an eccentric
// orbit with a good accuracy
Assert.assertTrue(maxError > 0.005);
}
}
private double maxError = 0;
private TestProblem3 pb;
}
@Test
public void testStepSize()
throws DimensionMismatchException, NumberIsTooSmallException,
MaxCountExceededException, NoBracketingException {
final double step = 1.23456;
FirstOrderIntegrator integ = new ClassicalRungeKuttaIntegrator(step);
integ.addStepHandler(new StepHandler() {
public void handleStep(StepInterpolator interpolator, boolean isLast) {
if (! isLast) {
Assert.assertEquals(step,
interpolator.getCurrentTime() - interpolator.getPreviousTime(),
1.0e-12);
}
}
public void init(double t0, double[] y0, double t) {
}
});
integ.integrate(new FirstOrderDifferentialEquations() {
public void computeDerivatives(double t, double[] y, double[] dot) {
dot[0] = 1.0;
}
public int getDimension() {
return 1;
}
}, 0.0, new double[] { 0.0 }, 5.0, new double[1]);
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package gobblin.converter.avro;
import java.io.UnsupportedEncodingException;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.TimeZone;
import org.apache.avro.Schema;
import org.apache.avro.generic.GenericData;
import org.apache.avro.util.Utf8;
import org.joda.time.DateTimeZone;
import org.joda.time.format.DateTimeFormat;
import org.joda.time.format.DateTimeFormatter;
import sun.util.calendar.ZoneInfo;
import com.google.gson.JsonArray;
import com.google.gson.JsonElement;
import com.google.gson.JsonObject;
import gobblin.configuration.ConfigurationKeys;
import gobblin.configuration.WorkUnitState;
/**
* <p>
* Creates a JsonElement to Avro converter for all supported data types.
* </p>
*
* @author kgoodhop
*
*/
public class JsonElementConversionFactory {
public enum Type {
DATE,
TIMESTAMP,
TIME,
FIXED,
STRING,
BYTES,
INT,
LONG,
FLOAT,
DOUBLE,
BOOLEAN,
ARRAY,
MAP,
ENUM
}
/**
* Use to create a converter for a single field from a schema.
*
* @param fieldName
* @param fieldType
* @param nullable
* @param schemaNode
* @param state
* @return
* @throws UnsupportedDateTypeException
*/
public static JsonElementConverter getConvertor(String fieldName, String fieldType, JsonObject schemaNode,
WorkUnitState state, boolean nullable) throws UnsupportedDateTypeException {
Type type;
try {
type = Type.valueOf(fieldType.toUpperCase());
} catch (IllegalArgumentException e) {
throw new UnsupportedDateTypeException(fieldType + " is unsupported");
}
DateTimeZone timeZone = getTimeZone(state.getProp(ConfigurationKeys.CONVERTER_AVRO_DATE_TIMEZONE, "UTC"));
switch (type) {
case DATE:
return new DateConverter(fieldName, nullable, type.toString(),
state.getProp(ConfigurationKeys.CONVERTER_AVRO_DATE_FORMAT, "yyyy-MM-dd HH:mm:ss"), timeZone, state);
case TIMESTAMP:
return new DateConverter(fieldName, nullable, type.toString(),
state.getProp(ConfigurationKeys.CONVERTER_AVRO_TIMESTAMP_FORMAT, "yyyy-MM-dd HH:mm:ss"), timeZone, state);
case TIME:
return new DateConverter(fieldName, nullable, type.toString(),
state.getProp(ConfigurationKeys.CONVERTER_AVRO_TIME_FORMAT, "HH:mm:ss"), timeZone, state);
case FIXED:
throw new UnsupportedDateTypeException(fieldType + " is unsupported");
case STRING:
return new StringConverter(fieldName, nullable, type.toString());
case BYTES:
return new BinaryConverter(fieldName, nullable, type.toString(),
state.getProp(ConfigurationKeys.CONVERTER_AVRO_BINARY_CHARSET, "UTF8"));
case INT:
return new IntConverter(fieldName, nullable, type.toString());
case LONG:
return new LongConverter(fieldName, nullable, type.toString());
case FLOAT:
return new FloatConverter(fieldName, nullable, type.toString());
case DOUBLE:
return new DoubleConverter(fieldName, nullable, type.toString());
case BOOLEAN:
return new BooleanConverter(fieldName, nullable, type.toString());
case ARRAY:
return new ArrayConverter(fieldName, nullable, type.toString(), schemaNode, state);
case MAP:
return new MapConverter(fieldName, nullable, type.toString(), schemaNode, state);
case ENUM:
return new EnumConverter(fieldName, nullable, type.toString(), schemaNode);
default:
throw new UnsupportedDateTypeException(fieldType + " is unsupported");
}
}
private static DateTimeZone getTimeZone(String id) {
DateTimeZone zone;
try {
zone = DateTimeZone.forID(id);
} catch (IllegalArgumentException e) {
TimeZone timeZone = ZoneInfo.getTimeZone(id);
//throw error if unrecognized zone
if (timeZone == null) {
throw new IllegalArgumentException("TimeZone " + id + " not recognized");
}
zone = DateTimeZone.forTimeZone(timeZone);
}
return zone;
}
/**
* Converts a JsonElement into a supported AvroType
* @author kgoodhop
*
*/
public static abstract class JsonElementConverter {
private String name;
private boolean nullable;
private String sourceType;
/**
*
* @param fieldName
* @param nullable
*/
public JsonElementConverter(String fieldName, boolean nullable, String sourceType) {
this.name = fieldName;
this.nullable = nullable;
this.sourceType = sourceType;
}
/**
* Field name from schema
* @return
*/
public String getName() {
return this.name;
}
/**
* is field nullable
* @return
*/
public boolean isNullable() {
return this.nullable;
}
/**
* avro schema for the converted type
* @return
*/
public Schema getSchema() {
if (this.nullable) {
List<Schema> list = new ArrayList<>();
list.add(Schema.create(Schema.Type.NULL));
list.add(schema());
return Schema.createUnion(list);
}
return schema();
}
protected Schema schema() {
Schema schema = Schema.create(getTargetType());
schema.addProp("source.type", this.sourceType.toLowerCase());
return schema;
}
/**
* Convert value
* @param value is JsonNull will return null if allowed or exception if not allowed
* @return Avro safe type
*/
public Object convert(JsonElement value) {
if (value.isJsonNull()) {
if (this.nullable) {
return null;
}
throw new RuntimeException("Field: " + getName() + " is not nullable and contains a null value");
}
return convertField(value);
}
/**
* Convert JsonElement to Avro type
* @param value
* @return
*/
abstract Object convertField(JsonElement value);
/**
* Avro data type after conversion
* @return
*/
public abstract Schema.Type getTargetType();
}
public static class StringConverter extends JsonElementConverter {
public StringConverter(String fieldName, boolean nullable, String sourceType) {
super(fieldName, nullable, sourceType);
}
@Override
Object convertField(JsonElement value) {
return new Utf8(value.getAsString());
}
@Override
public org.apache.avro.Schema.Type getTargetType() {
return Schema.Type.STRING;
}
}
public static class IntConverter extends JsonElementConverter {
public IntConverter(String fieldName, boolean nullable, String sourceType) {
super(fieldName, nullable, sourceType);
}
@Override
Object convertField(JsonElement value) {
return value.getAsInt();
}
@Override
public org.apache.avro.Schema.Type getTargetType() {
return Schema.Type.INT;
}
}
public static class LongConverter extends JsonElementConverter {
public LongConverter(String fieldName, boolean nullable, String sourceType) {
super(fieldName, nullable, sourceType);
}
@Override
Object convertField(JsonElement value) {
return value.getAsLong();
}
@Override
public org.apache.avro.Schema.Type getTargetType() {
return Schema.Type.LONG;
}
}
public static class DoubleConverter extends JsonElementConverter {
public DoubleConverter(String fieldName, boolean nullable, String sourceType) {
super(fieldName, nullable, sourceType);
}
@Override
Object convertField(JsonElement value) {
return value.getAsDouble();
}
@Override
public org.apache.avro.Schema.Type getTargetType() {
return Schema.Type.DOUBLE;
}
}
public static class FloatConverter extends JsonElementConverter {
public FloatConverter(String fieldName, boolean nullable, String sourceType) {
super(fieldName, nullable, sourceType);
}
@Override
Object convertField(JsonElement value) {
return value.getAsFloat();
}
@Override
public org.apache.avro.Schema.Type getTargetType() {
return Schema.Type.FLOAT;
}
}
public static class BooleanConverter extends JsonElementConverter {
public BooleanConverter(String fieldName, boolean nullable, String sourceType) {
super(fieldName, nullable, sourceType);
}
@Override
Object convertField(JsonElement value) {
return value.getAsBoolean();
}
@Override
public org.apache.avro.Schema.Type getTargetType() {
return Schema.Type.BOOLEAN;
}
}
public static class DateConverter extends JsonElementConverter {
private String inputPatterns;
private DateTimeZone timeZone;
private WorkUnitState state;
public DateConverter(String fieldName, boolean nullable, String sourceType, String pattern, DateTimeZone zone,
WorkUnitState state) {
super(fieldName, nullable, sourceType);
this.inputPatterns = pattern;
this.timeZone = zone;
this.state = state;
}
@Override
Object convertField(JsonElement value) {
List<String> patterns = Arrays.asList(this.inputPatterns.split(","));
int patternFailCount = 0;
Object formattedDate = null;
for (String pattern : patterns) {
DateTimeFormatter dtf = DateTimeFormat.forPattern(pattern).withZone(this.timeZone);
try {
formattedDate = dtf.parseDateTime(value.getAsString()).withZone(DateTimeZone.forID("UTC")).getMillis();
if (Boolean.valueOf(this.state.getProp(ConfigurationKeys.CONVERTER_IS_EPOCH_TIME_IN_SECONDS))) {
formattedDate = (Long) formattedDate / 1000;
}
break;
} catch (Exception e) {
patternFailCount++;
}
}
if (patternFailCount == patterns.size()) {
throw new RuntimeException("Failed to parse the date");
}
return formattedDate;
}
@Override
public org.apache.avro.Schema.Type getTargetType() {
return Schema.Type.LONG;
}
}
public static class BinaryConverter extends JsonElementConverter {
private String charSet;
public BinaryConverter(String fieldName, boolean nullable, String sourceType, String charSet) {
super(fieldName, nullable, sourceType);
this.charSet = charSet;
}
@Override
Object convertField(JsonElement value) {
try {
return ByteBuffer.wrap(value.getAsString().getBytes(this.charSet));
} catch (UnsupportedEncodingException e) {
throw new RuntimeException(e);
}
}
@Override
public org.apache.avro.Schema.Type getTargetType() {
return Schema.Type.BYTES;
}
}
public static abstract class ComplexConverter extends JsonElementConverter {
private JsonElementConverter elementConverter;
public ComplexConverter(String fieldName, boolean nullable, String sourceType) {
super(fieldName, nullable, sourceType);
}
protected void setElementConverter(JsonElementConverter elementConverter) {
this.elementConverter = elementConverter;
}
public JsonElementConverter getElementConverter() {
return this.elementConverter;
}
}
public static class ArrayConverter extends ComplexConverter {
public ArrayConverter(String fieldName, boolean nullable, String sourceType, JsonObject schemaNode,
WorkUnitState state) throws UnsupportedDateTypeException {
super(fieldName, nullable, sourceType);
super.setElementConverter(
getConvertor(fieldName, schemaNode.get("dataType").getAsJsonObject().get("items").getAsString(),
schemaNode.get("dataType").getAsJsonObject(), state, isNullable()));
}
@Override
Object convertField(JsonElement value) {
List<Object> list = new ArrayList<>();
for (JsonElement elem : (JsonArray) value) {
list.add(getElementConverter().convertField(elem));
}
return new GenericData.Array<>(schema(), list);
}
@Override
public org.apache.avro.Schema.Type getTargetType() {
return Schema.Type.ARRAY;
}
@Override
public Schema schema() {
Schema schema = Schema.createArray(getElementConverter().schema());
schema.addProp("source.type", "array");
return schema;
}
}
public static class MapConverter extends ComplexConverter {
public MapConverter(String fieldName, boolean nullable, String sourceType, JsonObject schemaNode,
WorkUnitState state) throws UnsupportedDateTypeException {
super(fieldName, nullable, sourceType);
super.setElementConverter(
getConvertor(fieldName, schemaNode.get("dataType").getAsJsonObject().get("values").getAsString(),
schemaNode.get("dataType").getAsJsonObject(), state, isNullable()));
}
@Override
Object convertField(JsonElement value) {
Map<String, Object> map = new HashMap<>();
for (Map.Entry<String, JsonElement> entry : ((JsonObject) value).entrySet()) {
map.put(entry.getKey(), getElementConverter().convertField(entry.getValue()));
}
return map;
}
@Override
public org.apache.avro.Schema.Type getTargetType() {
return Schema.Type.MAP;
}
@Override
public Schema schema() {
Schema schema = Schema.createMap(getElementConverter().schema());
schema.addProp("source.type", "map");
return schema;
}
}
public static class EnumConverter extends JsonElementConverter {
String enumName;
List<String> enumSet = new ArrayList<>();
Schema schema;
public EnumConverter(String fieldName, boolean nullable, String sourceType, JsonObject schemaNode) {
super(fieldName, nullable, sourceType);
for (JsonElement elem : schemaNode.get("dataType").getAsJsonObject().get("symbols").getAsJsonArray()) {
this.enumSet.add(elem.getAsString());
}
this.enumName = schemaNode.get("dataType").getAsJsonObject().get("name").getAsString();
}
@Override
Object convertField(JsonElement value) {
return new GenericData.EnumSymbol(this.schema, value.getAsString());
}
@Override
public org.apache.avro.Schema.Type getTargetType() {
return Schema.Type.ENUM;
}
@Override
public Schema schema() {
this.schema = Schema.createEnum(this.enumName, "", "", this.enumSet);
this.schema.addProp("source.type", "enum");
return this.schema;
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.kafka.tools;
import com.fasterxml.jackson.annotation.JsonInclude;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.annotation.JsonPropertyOrder;
import com.fasterxml.jackson.core.JsonGenerator;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.JsonSerializer;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.SerializerProvider;
import com.fasterxml.jackson.databind.module.SimpleModule;
import net.sourceforge.argparse4j.ArgumentParsers;
import net.sourceforge.argparse4j.inf.ArgumentParser;
import net.sourceforge.argparse4j.inf.ArgumentParserException;
import net.sourceforge.argparse4j.inf.MutuallyExclusiveGroup;
import net.sourceforge.argparse4j.inf.Namespace;
import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.clients.consumer.ConsumerRebalanceListener;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.apache.kafka.clients.consumer.KafkaConsumer;
import org.apache.kafka.clients.consumer.OffsetAndMetadata;
import org.apache.kafka.clients.consumer.OffsetCommitCallback;
import org.apache.kafka.clients.consumer.RangeAssignor;
import org.apache.kafka.clients.consumer.RoundRobinAssignor;
import org.apache.kafka.common.TopicPartition;
import org.apache.kafka.common.errors.FencedInstanceIdException;
import org.apache.kafka.common.errors.WakeupException;
import org.apache.kafka.common.serialization.StringDeserializer;
import org.apache.kafka.common.utils.Utils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.Closeable;
import java.io.IOException;
import java.io.PrintStream;
import java.time.Duration;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import java.util.concurrent.CountDownLatch;
import static net.sourceforge.argparse4j.impl.Arguments.store;
import static net.sourceforge.argparse4j.impl.Arguments.storeTrue;
/**
* Command line consumer designed for system testing. It outputs consumer events to STDOUT as JSON
* formatted objects. The "name" field in each JSON event identifies the event type. The following
* events are currently supported:
*
* <ul>
* <li>partitions_revoked: outputs the partitions revoked through {@link ConsumerRebalanceListener#onPartitionsRevoked(Collection)}.
* See {@link org.apache.kafka.tools.VerifiableConsumer.PartitionsRevoked}</li>
* <li>partitions_assigned: outputs the partitions assigned through {@link ConsumerRebalanceListener#onPartitionsAssigned(Collection)}
* See {@link org.apache.kafka.tools.VerifiableConsumer.PartitionsAssigned}.</li>
* <li>records_consumed: contains a summary of records consumed in a single call to {@link KafkaConsumer#poll(long)}.
* See {@link org.apache.kafka.tools.VerifiableConsumer.RecordsConsumed}.</li>
* <li>record_data: contains the key, value, and offset of an individual consumed record (only included if verbose
* output is enabled). See {@link org.apache.kafka.tools.VerifiableConsumer.RecordData}.</li>
* <li>offsets_committed: The result of every offset commit (only included if auto-commit is not enabled).
* See {@link org.apache.kafka.tools.VerifiableConsumer.OffsetsCommitted}</li>
* <li>shutdown_complete: emitted after the consumer returns from {@link KafkaConsumer#close()}.
* See {@link org.apache.kafka.tools.VerifiableConsumer.ShutdownComplete}.</li>
* </ul>
*/
public class VerifiableConsumer implements Closeable, OffsetCommitCallback, ConsumerRebalanceListener {
private static final Logger log = LoggerFactory.getLogger(VerifiableConsumer.class);
private final ObjectMapper mapper = new ObjectMapper();
private final PrintStream out;
private final KafkaConsumer<String, String> consumer;
private final String topic;
private final boolean useAutoCommit;
private final boolean useAsyncCommit;
private final boolean verbose;
private final int maxMessages;
private int consumedMessages = 0;
private CountDownLatch shutdownLatch = new CountDownLatch(1);
public VerifiableConsumer(KafkaConsumer<String, String> consumer,
PrintStream out,
String topic,
int maxMessages,
boolean useAutoCommit,
boolean useAsyncCommit,
boolean verbose) {
this.consumer = consumer;
this.out = out;
this.topic = topic;
this.maxMessages = maxMessages;
this.useAutoCommit = useAutoCommit;
this.useAsyncCommit = useAsyncCommit;
this.verbose = verbose;
addKafkaSerializerModule();
}
private void addKafkaSerializerModule() {
SimpleModule kafka = new SimpleModule();
kafka.addSerializer(TopicPartition.class, new JsonSerializer<TopicPartition>() {
@Override
public void serialize(TopicPartition tp, JsonGenerator gen, SerializerProvider serializers) throws IOException {
gen.writeStartObject();
gen.writeObjectField("topic", tp.topic());
gen.writeObjectField("partition", tp.partition());
gen.writeEndObject();
}
});
mapper.registerModule(kafka);
}
private boolean hasMessageLimit() {
return maxMessages >= 0;
}
private boolean isFinished() {
return hasMessageLimit() && consumedMessages >= maxMessages;
}
private Map<TopicPartition, OffsetAndMetadata> onRecordsReceived(ConsumerRecords<String, String> records) {
Map<TopicPartition, OffsetAndMetadata> offsets = new HashMap<>();
List<RecordSetSummary> summaries = new ArrayList<>();
for (TopicPartition tp : records.partitions()) {
List<ConsumerRecord<String, String>> partitionRecords = records.records(tp);
if (hasMessageLimit() && consumedMessages + partitionRecords.size() > maxMessages)
partitionRecords = partitionRecords.subList(0, maxMessages - consumedMessages);
if (partitionRecords.isEmpty())
continue;
long minOffset = partitionRecords.get(0).offset();
long maxOffset = partitionRecords.get(partitionRecords.size() - 1).offset();
offsets.put(tp, new OffsetAndMetadata(maxOffset + 1));
summaries.add(new RecordSetSummary(tp.topic(), tp.partition(),
partitionRecords.size(), minOffset, maxOffset));
if (verbose) {
for (ConsumerRecord<String, String> record : partitionRecords) {
printJson(new RecordData(record));
}
}
consumedMessages += partitionRecords.size();
if (isFinished())
break;
}
printJson(new RecordsConsumed(records.count(), summaries));
return offsets;
}
@Override
public void onComplete(Map<TopicPartition, OffsetAndMetadata> offsets, Exception exception) {
List<CommitData> committedOffsets = new ArrayList<>();
for (Map.Entry<TopicPartition, OffsetAndMetadata> offsetEntry : offsets.entrySet()) {
TopicPartition tp = offsetEntry.getKey();
committedOffsets.add(new CommitData(tp.topic(), tp.partition(), offsetEntry.getValue().offset()));
}
boolean success = true;
String error = null;
if (exception != null) {
success = false;
error = exception.getMessage();
}
printJson(new OffsetsCommitted(committedOffsets, error, success));
}
@Override
public void onPartitionsAssigned(Collection<TopicPartition> partitions) {
printJson(new PartitionsAssigned(partitions));
}
@Override
public void onPartitionsRevoked(Collection<TopicPartition> partitions) {
printJson(new PartitionsRevoked(partitions));
}
private void printJson(Object data) {
try {
out.println(mapper.writeValueAsString(data));
} catch (JsonProcessingException e) {
out.println("Bad data can't be written as json: " + e.getMessage());
}
}
public void commitSync(Map<TopicPartition, OffsetAndMetadata> offsets) {
try {
consumer.commitSync(offsets);
onComplete(offsets, null);
} catch (WakeupException e) {
// we only call wakeup() once to close the consumer, so this recursion should be safe
commitSync(offsets);
throw e;
} catch (FencedInstanceIdException e) {
throw e;
} catch (Exception e) {
onComplete(offsets, e);
}
}
public void run() {
try {
printJson(new StartupComplete());
consumer.subscribe(Collections.singletonList(topic), this);
while (!isFinished()) {
ConsumerRecords<String, String> records = consumer.poll(Duration.ofMillis(Long.MAX_VALUE));
Map<TopicPartition, OffsetAndMetadata> offsets = onRecordsReceived(records);
if (!useAutoCommit) {
if (useAsyncCommit)
consumer.commitAsync(offsets, this);
else
commitSync(offsets);
}
}
} catch (WakeupException e) {
// ignore, we are closing
log.trace("Caught WakeupException because consumer is shutdown, ignore and terminate.", e);
} catch (Throwable t) {
// Log the error so it goes to the service log and not stdout
log.error("Error during processing, terminating consumer process: ", t);
} finally {
consumer.close();
printJson(new ShutdownComplete());
shutdownLatch.countDown();
}
}
public void close() {
boolean interrupted = false;
try {
consumer.wakeup();
while (true) {
try {
shutdownLatch.await();
return;
} catch (InterruptedException e) {
interrupted = true;
}
}
} finally {
if (interrupted)
Thread.currentThread().interrupt();
}
}
@JsonPropertyOrder({ "timestamp", "name" })
private static abstract class ConsumerEvent {
private final long timestamp = System.currentTimeMillis();
@JsonProperty
public abstract String name();
@JsonProperty
public long timestamp() {
return timestamp;
}
}
private static class StartupComplete extends ConsumerEvent {
@Override
public String name() {
return "startup_complete";
}
}
private static class ShutdownComplete extends ConsumerEvent {
@Override
public String name() {
return "shutdown_complete";
}
}
private static class PartitionsRevoked extends ConsumerEvent {
private final Collection<TopicPartition> partitions;
public PartitionsRevoked(Collection<TopicPartition> partitions) {
this.partitions = partitions;
}
@JsonProperty
public Collection<TopicPartition> partitions() {
return partitions;
}
@Override
public String name() {
return "partitions_revoked";
}
}
private static class PartitionsAssigned extends ConsumerEvent {
private final Collection<TopicPartition> partitions;
public PartitionsAssigned(Collection<TopicPartition> partitions) {
this.partitions = partitions;
}
@JsonProperty
public Collection<TopicPartition> partitions() {
return partitions;
}
@Override
public String name() {
return "partitions_assigned";
}
}
public static class RecordsConsumed extends ConsumerEvent {
private final long count;
private final List<RecordSetSummary> partitionSummaries;
public RecordsConsumed(long count, List<RecordSetSummary> partitionSummaries) {
this.count = count;
this.partitionSummaries = partitionSummaries;
}
@Override
public String name() {
return "records_consumed";
}
@JsonProperty
public long count() {
return count;
}
@JsonProperty
public List<RecordSetSummary> partitions() {
return partitionSummaries;
}
}
@JsonPropertyOrder({ "timestamp", "name", "key", "value", "topic", "partition", "offset" })
public static class RecordData extends ConsumerEvent {
private final ConsumerRecord<String, String> record;
public RecordData(ConsumerRecord<String, String> record) {
this.record = record;
}
@Override
public String name() {
return "record_data";
}
@JsonProperty
public String topic() {
return record.topic();
}
@JsonProperty
public int partition() {
return record.partition();
}
@JsonProperty
public String key() {
return record.key();
}
@JsonProperty
public String value() {
return record.value();
}
@JsonProperty
public long offset() {
return record.offset();
}
}
private static class PartitionData {
private final String topic;
private final int partition;
public PartitionData(String topic, int partition) {
this.topic = topic;
this.partition = partition;
}
@JsonProperty
public String topic() {
return topic;
}
@JsonProperty
public int partition() {
return partition;
}
}
private static class OffsetsCommitted extends ConsumerEvent {
private final List<CommitData> offsets;
private final String error;
private final boolean success;
public OffsetsCommitted(List<CommitData> offsets, String error, boolean success) {
this.offsets = offsets;
this.error = error;
this.success = success;
}
@Override
public String name() {
return "offsets_committed";
}
@JsonProperty
public List<CommitData> offsets() {
return offsets;
}
@JsonProperty
@JsonInclude(JsonInclude.Include.NON_NULL)
public String error() {
return error;
}
@JsonProperty
public boolean success() {
return success;
}
}
private static class CommitData extends PartitionData {
private final long offset;
public CommitData(String topic, int partition, long offset) {
super(topic, partition);
this.offset = offset;
}
@JsonProperty
public long offset() {
return offset;
}
}
private static class RecordSetSummary extends PartitionData {
private final long count;
private final long minOffset;
private final long maxOffset;
public RecordSetSummary(String topic, int partition, long count, long minOffset, long maxOffset) {
super(topic, partition);
this.count = count;
this.minOffset = minOffset;
this.maxOffset = maxOffset;
}
@JsonProperty
public long count() {
return count;
}
@JsonProperty
public long minOffset() {
return minOffset;
}
@JsonProperty
public long maxOffset() {
return maxOffset;
}
}
private static ArgumentParser argParser() {
ArgumentParser parser = ArgumentParsers
.newArgumentParser("verifiable-consumer")
.defaultHelp(true)
.description("This tool consumes messages from a specific topic and emits consumer events (e.g. group rebalances, received messages, and offsets committed) as JSON objects to STDOUT.");
MutuallyExclusiveGroup connectionGroup = parser.addMutuallyExclusiveGroup("Connection Group")
.description("Group of arguments for connection to brokers")
.required(true);
connectionGroup.addArgument("--bootstrap-server")
.action(store())
.required(false)
.type(String.class)
.metavar("HOST1:PORT1[,HOST2:PORT2[...]]")
.dest("bootstrapServer")
.help("REQUIRED unless --broker-list(deprecated) is specified. The server(s) to connect to. Comma-separated list of Kafka brokers in the form HOST1:PORT1,HOST2:PORT2,...");
connectionGroup.addArgument("--broker-list")
.action(store())
.required(false)
.type(String.class)
.metavar("HOST1:PORT1[,HOST2:PORT2[...]]")
.dest("brokerList")
.help("DEPRECATED, use --bootstrap-server instead; ignored if --bootstrap-server is specified. Comma-separated list of Kafka brokers in the form HOST1:PORT1,HOST2:PORT2,...");
parser.addArgument("--topic")
.action(store())
.required(true)
.type(String.class)
.metavar("TOPIC")
.help("Consumes messages from this topic.");
parser.addArgument("--group-id")
.action(store())
.required(true)
.type(String.class)
.metavar("GROUP_ID")
.dest("groupId")
.help("The groupId shared among members of the consumer group");
parser.addArgument("--group-instance-id")
.action(store())
.required(false)
.type(String.class)
.metavar("GROUP_INSTANCE_ID")
.dest("groupInstanceId")
.help("A unique identifier of the consumer instance");
parser.addArgument("--max-messages")
.action(store())
.required(false)
.type(Integer.class)
.setDefault(-1)
.metavar("MAX-MESSAGES")
.dest("maxMessages")
.help("Consume this many messages. If -1 (the default), the consumer will consume until the process is killed externally");
parser.addArgument("--session-timeout")
.action(store())
.required(false)
.setDefault(30000)
.type(Integer.class)
.metavar("TIMEOUT_MS")
.dest("sessionTimeout")
.help("Set the consumer's session timeout");
parser.addArgument("--verbose")
.action(storeTrue())
.type(Boolean.class)
.metavar("VERBOSE")
.help("Enable to log individual consumed records");
parser.addArgument("--enable-autocommit")
.action(storeTrue())
.type(Boolean.class)
.metavar("ENABLE-AUTOCOMMIT")
.dest("useAutoCommit")
.help("Enable offset auto-commit on consumer");
parser.addArgument("--reset-policy")
.action(store())
.required(false)
.setDefault("earliest")
.type(String.class)
.dest("resetPolicy")
.help("Set reset policy (must be either 'earliest', 'latest', or 'none'");
parser.addArgument("--assignment-strategy")
.action(store())
.required(false)
.setDefault(RangeAssignor.class.getName())
.type(String.class)
.dest("assignmentStrategy")
.help("Set assignment strategy (e.g. " + RoundRobinAssignor.class.getName() + ")");
parser.addArgument("--consumer.config")
.action(store())
.required(false)
.type(String.class)
.metavar("CONFIG_FILE")
.help("Consumer config properties file (config options shared with command line parameters will be overridden).");
return parser;
}
public static VerifiableConsumer createFromArgs(ArgumentParser parser, String[] args) throws ArgumentParserException {
Namespace res = parser.parseArgs(args);
boolean useAutoCommit = res.getBoolean("useAutoCommit");
String configFile = res.getString("consumer.config");
String brokerHostandPort = null;
Properties consumerProps = new Properties();
if (configFile != null) {
try {
consumerProps.putAll(Utils.loadProps(configFile));
} catch (IOException e) {
throw new ArgumentParserException(e.getMessage(), parser);
}
}
consumerProps.put(ConsumerConfig.GROUP_ID_CONFIG, res.getString("groupId"));
String groupInstanceId = res.getString("groupInstanceId");
if (groupInstanceId != null) {
consumerProps.put(ConsumerConfig.GROUP_INSTANCE_ID_CONFIG, groupInstanceId);
}
if (res.get("bootstrapServer") != null) {
brokerHostandPort = res.getString("bootstrapServer");
} else if (res.getString("brokerList") != null) {
brokerHostandPort = res.getString("brokerList");
} else {
parser.printHelp();
// Can't use `Exit.exit` here because it didn't exist until 0.11.0.0.
System.exit(0);
}
consumerProps.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, brokerHostandPort);
consumerProps.put(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, useAutoCommit);
consumerProps.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, res.getString("resetPolicy"));
consumerProps.put(ConsumerConfig.SESSION_TIMEOUT_MS_CONFIG, Integer.toString(res.getInt("sessionTimeout")));
consumerProps.put(ConsumerConfig.PARTITION_ASSIGNMENT_STRATEGY_CONFIG, res.getString("assignmentStrategy"));
StringDeserializer deserializer = new StringDeserializer();
KafkaConsumer<String, String> consumer = new KafkaConsumer<>(consumerProps, deserializer, deserializer);
String topic = res.getString("topic");
int maxMessages = res.getInt("maxMessages");
boolean verbose = res.getBoolean("verbose");
return new VerifiableConsumer(
consumer,
System.out,
topic,
maxMessages,
useAutoCommit,
false,
verbose);
}
public static void main(String[] args) {
ArgumentParser parser = argParser();
if (args.length == 0) {
parser.printHelp();
// Can't use `Exit.exit` here because it didn't exist until 0.11.0.0.
System.exit(0);
}
try {
final VerifiableConsumer consumer = createFromArgs(parser, args);
// Can't use `Exit.addShutdownHook` here because it didn't exist until 2.5.0.
Runtime.getRuntime().addShutdownHook(new Thread(consumer::close, "verifiable-consumer-shutdown-hook"));
consumer.run();
} catch (ArgumentParserException e) {
parser.handleError(e);
// Can't use `Exit.exit` here because it didn't exist until 0.11.0.0.
System.exit(1);
}
}
}
| |
package com.android2ee.tool.animatedvector.morphing;
import android.annotation.TargetApi;
import android.graphics.drawable.AnimatedVectorDrawable;
import android.graphics.drawable.LevelListDrawable;
import android.os.Build;
import android.os.Bundle;
import android.os.Handler;
import android.support.v7.app.AppCompatActivity;
import android.view.View;
import android.widget.ImageView;
/**
* This class aims to show you the code to use to animated your AnimatedVectorDrawables
* enjoy
* (by the way the bug id is https://code.google.com/p/android/issues/detail?id=195999)
*/
public class MainActivity extends AppCompatActivity {
/**
* The ImageViews
*/
ImageView imageView1, imageView2, imageView3, imageView4;
/**
* Too simple AnimatedVectorDrawables for having a simple exemple
*/
AnimatedVectorDrawable animatedVector3, animatedVector4;
/***********************************************************
* Managing Level List: To chain animations
**********************************************************/
//The LevelListDrawable that contains all the AnimatedVectorDrawables
LevelListDrawable animatedVectorList;
/**
* The current VectorDrawable displayed by the animatedVectorList
*/
AnimatedVectorDrawable currentAnimatedVectorFromList;
/**
* The max level of your LevelList animatedVectorList
*/
int animatedVectorListMaxLevel = 0;
/**
* The handler to automaticly launch the next animation
*/
Handler uiHandler;
/**
* The Runnable that launches the next animation
*/
Runnable uiRunnable;
/**
* To know is the animation have been already launched
*/
boolean animatedVectorFirstLaunched=true;
/***********************************************************
* Managing RoundTrip animation (VectorDrawable1 to VectorDrawable 2 and back again
**********************************************************
/**
* The LevelList that contains only two AnimatedVectorDrawable,
* the ones used to go from on to the other
*/
LevelListDrawable backupRoundTrip;
/**
* The current AnimatedVector diaplsyed by the RoundTrip
*/
AnimatedVectorDrawable currentBackupDrawable;
/**
* To know is the animation have been already launched
*/
boolean backupRoundTripFirstLaunched=true;
/***********************************************************
* Managing LifeCycle
**********************************************************/
@TargetApi(Build.VERSION_CODES.M)
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
//managing the levelList to chain animations
//----------------------------------------------
animatedVectorListMaxLevel = 4;//TODO can not be compute, you have to set it yourself!!!
//instantiate drawable and imageView
imageView1 = (ImageView) findViewById(R.id.imageView1);
animatedVectorList = (LevelListDrawable) imageView1.getDrawable();
currentAnimatedVectorFromList = (AnimatedVectorDrawable) animatedVectorList.getCurrent();
//launch animation when the click is done on the imageView
imageView1.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
launchAnimVectorList();
}
});uiRunnable=new Runnable() {
@Override
public void run() {
launchAnimVectorList();
}
};
uiHandler=new Handler();
//managing the round trip scenario
//--------------------------------
//instantiate drawable and imageView
imageView2 = (ImageView) findViewById(R.id.imageView2);
backupRoundTrip = (LevelListDrawable) imageView2.getDrawable();
currentBackupDrawable= (AnimatedVectorDrawable) backupRoundTrip.getCurrent();
//launch animation when the click is done on the imageView
imageView2.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
launchAnimBackup();
}
});
//managing simple animated vector drawable
//------------------------------------------
imageView3 = (ImageView) findViewById(R.id.imageView3);
animatedVector3 = (AnimatedVectorDrawable) imageView3.getDrawable();
imageView4 = (ImageView) findViewById(R.id.imageView4);
animatedVector4 = (AnimatedVectorDrawable) imageView4.getDrawable();
//set on click listener on them to launch animation
imageView3.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
launchAnim3();
}
});
imageView4.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
launchAnim4();
}
});
}
@Override
protected void onPause() {
super.onPause();
//and insure your remove every runnable from the handler (memory leak else)
uiHandler.removeCallbacks(uiRunnable);
// llScreenRecorder.stopRecording();
}
/***********************************************************
* Managing LevelListDrawable to chain animations
**********************************************************/
/**
* Launch the animation on the ImageView1
* And update the level of the drawable
*/
private void launchAnimVectorList() {
if(!animatedVectorFirstLaunched) {
if (animatedVectorList.getLevel() < animatedVectorListMaxLevel) {
//then increment
animatedVectorList.setLevel(animatedVectorList.getLevel() + 1);
currentAnimatedVectorFromList = (AnimatedVectorDrawable) animatedVectorList.getCurrent();
} else {
//go back to the beginning
animatedVectorList.setLevel(0);
currentAnimatedVectorFromList = (AnimatedVectorDrawable) animatedVectorList.getCurrent();
}
}else {
animatedVectorFirstLaunched=false;
}
//start the animation on the current element
currentAnimatedVectorFromList.start();
//launch it again in 300 ms + the time your animation take
uiHandler.postDelayed(uiRunnable,300+3000);//TODO instead of 3000 set your animation duration !!!
}
/***********************************************************
* Managing backup button round trip
**********************************************************/
/**
* Launch the animation on the currentAnimatedVectorDrawable
*/
private void launchAnimBackup(){
if(!backupRoundTripFirstLaunched) {
if (backupRoundTrip.getLevel() == 1) {
//then reverse
backupRoundTrip.setLevel(0);
} else {
//then reverse
backupRoundTrip.setLevel(1);
}
}else{
backupRoundTripFirstLaunched=false;
}
//find the current AnimatedVectorDrawable displayed
currentBackupDrawable = (AnimatedVectorDrawable) backupRoundTrip.getCurrent();
//start the animation
currentBackupDrawable.start();
}
/***********************************************************
* Launching simple animation on AnimatedVectorDrawable
**********************************************************/
/**
* Launch the animation on the AnimatedVectorDrawable displayed by the imageView3
*/
private void launchAnim3() {
// llScreenRecorder.startRecording();
animatedVector3.start();
}
/**
* Launch the animation on the AnimatedVectorDrawable displayed by the imageView4
*/
private void launchAnim4() {
animatedVector4.start();
}
}
| |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.cluster.service;
import org.apache.logging.log4j.Logger;
import org.apache.logging.log4j.message.ParameterizedMessage;
import org.apache.logging.log4j.util.Supplier;
import org.elasticsearch.cluster.AckedClusterStateTaskListener;
import org.elasticsearch.cluster.ClusterChangedEvent;
import org.elasticsearch.cluster.ClusterName;
import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.ClusterState.Builder;
import org.elasticsearch.cluster.ClusterStateApplier;
import org.elasticsearch.cluster.ClusterStateListener;
import org.elasticsearch.cluster.ClusterStateObserver;
import org.elasticsearch.cluster.ClusterStateTaskConfig;
import org.elasticsearch.cluster.ClusterStateTaskExecutor;
import org.elasticsearch.cluster.ClusterStateTaskExecutor.ClusterTasksResult;
import org.elasticsearch.cluster.ClusterStateTaskListener;
import org.elasticsearch.cluster.LocalNodeMasterListener;
import org.elasticsearch.cluster.NodeConnectionsService;
import org.elasticsearch.cluster.TimeoutClusterStateListener;
import org.elasticsearch.cluster.block.ClusterBlock;
import org.elasticsearch.cluster.block.ClusterBlocks;
import org.elasticsearch.cluster.metadata.MetaData;
import org.elasticsearch.cluster.metadata.ProcessClusterEventTimeoutException;
import org.elasticsearch.cluster.node.DiscoveryNode;
import org.elasticsearch.cluster.node.DiscoveryNodes;
import org.elasticsearch.cluster.routing.OperationRouting;
import org.elasticsearch.cluster.routing.RoutingTable;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.Priority;
import org.elasticsearch.common.component.AbstractLifecycleComponent;
import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.common.settings.ClusterSettings;
import org.elasticsearch.common.settings.Setting;
import org.elasticsearch.common.settings.Setting.Property;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.text.Text;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.util.concurrent.ConcurrentCollections;
import org.elasticsearch.common.util.concurrent.CountDown;
import org.elasticsearch.common.util.concurrent.EsExecutors;
import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException;
import org.elasticsearch.common.util.concurrent.FutureUtils;
import org.elasticsearch.common.util.concurrent.PrioritizedEsThreadPoolExecutor;
import org.elasticsearch.common.util.concurrent.PrioritizedRunnable;
import org.elasticsearch.common.util.iterable.Iterables;
import org.elasticsearch.discovery.Discovery;
import org.elasticsearch.discovery.DiscoverySettings;
import org.elasticsearch.threadpool.ThreadPool;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.IdentityHashMap;
import java.util.Iterator;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.Objects;
import java.util.Queue;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.CopyOnWriteArrayList;
import java.util.concurrent.Executor;
import java.util.concurrent.Future;
import java.util.concurrent.ScheduledFuture;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicReference;
import java.util.function.BiConsumer;
import java.util.function.UnaryOperator;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import static org.elasticsearch.common.util.concurrent.EsExecutors.daemonThreadFactory;
public class ClusterService extends AbstractLifecycleComponent {
public static final Setting<TimeValue> CLUSTER_SERVICE_SLOW_TASK_LOGGING_THRESHOLD_SETTING =
Setting.positiveTimeSetting("cluster.service.slow_task_logging_threshold", TimeValue.timeValueSeconds(30),
Property.Dynamic, Property.NodeScope);
public static final String UPDATE_THREAD_NAME = "clusterService#updateTask";
private final ThreadPool threadPool;
private final ClusterName clusterName;
private final Supplier<DiscoveryNode> localNodeSupplier;
private BiConsumer<ClusterChangedEvent, Discovery.AckListener> clusterStatePublisher;
private final OperationRouting operationRouting;
private final ClusterSettings clusterSettings;
private TimeValue slowTaskLoggingThreshold;
private volatile PrioritizedEsThreadPoolExecutor threadPoolExecutor;
/**
* Those 3 state listeners are changing infrequently - CopyOnWriteArrayList is just fine
*/
private final Collection<ClusterStateApplier> highPriorityStateAppliers = new CopyOnWriteArrayList<>();
private final Collection<ClusterStateApplier> normalPriorityStateAppliers = new CopyOnWriteArrayList<>();
private final Collection<ClusterStateApplier> lowPriorityStateAppliers = new CopyOnWriteArrayList<>();
final Map<ClusterStateTaskExecutor, LinkedHashSet<UpdateTask>> updateTasksPerExecutor = new HashMap<>();
private final Iterable<ClusterStateApplier> clusterStateAppliers = Iterables.concat(highPriorityStateAppliers,
normalPriorityStateAppliers, lowPriorityStateAppliers);
private final Collection<ClusterStateListener> clusterStateListeners = new CopyOnWriteArrayList<>();
private final Collection<TimeoutClusterStateListener> timeoutClusterStateListeners =
Collections.newSetFromMap(new ConcurrentHashMap<TimeoutClusterStateListener, Boolean>());
private final LocalNodeMasterListeners localNodeMasterListeners;
private final Queue<NotifyTimeout> onGoingTimeouts = ConcurrentCollections.newQueue();
private final AtomicReference<ClusterState> state;
private final ClusterBlocks.Builder initialBlocks;
private NodeConnectionsService nodeConnectionsService;
private DiscoverySettings discoverySettings;
public ClusterService(Settings settings,
ClusterSettings clusterSettings, ThreadPool threadPool, Supplier<DiscoveryNode> localNodeSupplier) {
super(settings);
this.localNodeSupplier = localNodeSupplier;
this.operationRouting = new OperationRouting(settings, clusterSettings);
this.threadPool = threadPool;
this.clusterSettings = clusterSettings;
this.clusterName = ClusterName.CLUSTER_NAME_SETTING.get(settings);
// will be replaced on doStart.
this.state = new AtomicReference<>(ClusterState.builder(clusterName).build());
this.clusterSettings.addSettingsUpdateConsumer(CLUSTER_SERVICE_SLOW_TASK_LOGGING_THRESHOLD_SETTING,
this::setSlowTaskLoggingThreshold);
this.slowTaskLoggingThreshold = CLUSTER_SERVICE_SLOW_TASK_LOGGING_THRESHOLD_SETTING.get(settings);
localNodeMasterListeners = new LocalNodeMasterListeners(threadPool);
initialBlocks = ClusterBlocks.builder();
}
private void setSlowTaskLoggingThreshold(TimeValue slowTaskLoggingThreshold) {
this.slowTaskLoggingThreshold = slowTaskLoggingThreshold;
}
public synchronized void setClusterStatePublisher(BiConsumer<ClusterChangedEvent, Discovery.AckListener> publisher) {
clusterStatePublisher = publisher;
}
private void updateState(UnaryOperator<ClusterState> updateFunction) {
this.state.getAndUpdate(updateFunction);
}
public synchronized void setNodeConnectionsService(NodeConnectionsService nodeConnectionsService) {
assert this.nodeConnectionsService == null : "nodeConnectionsService is already set";
this.nodeConnectionsService = nodeConnectionsService;
}
/**
* Adds an initial block to be set on the first cluster state created.
*/
public synchronized void addInitialStateBlock(ClusterBlock block) throws IllegalStateException {
if (lifecycle.started()) {
throw new IllegalStateException("can't set initial block when started");
}
initialBlocks.addGlobalBlock(block);
}
/**
* Remove an initial block to be set on the first cluster state created.
*/
public synchronized void removeInitialStateBlock(ClusterBlock block) throws IllegalStateException {
removeInitialStateBlock(block.id());
}
/**
* Remove an initial block to be set on the first cluster state created.
*/
public synchronized void removeInitialStateBlock(int blockId) throws IllegalStateException {
if (lifecycle.started()) {
throw new IllegalStateException("can't set initial block when started");
}
initialBlocks.removeGlobalBlock(blockId);
}
@Override
protected synchronized void doStart() {
Objects.requireNonNull(clusterStatePublisher, "please set a cluster state publisher before starting");
Objects.requireNonNull(nodeConnectionsService, "please set the node connection service before starting");
Objects.requireNonNull(discoverySettings, "please set discovery settings before starting");
addListener(localNodeMasterListeners);
DiscoveryNode localNode = localNodeSupplier.get();
assert localNode != null;
updateState(state -> {
assert state.nodes().getLocalNodeId() == null : "local node is already set";
DiscoveryNodes nodes = DiscoveryNodes.builder(state.nodes()).add(localNode).localNodeId(localNode.getId()).build();
return ClusterState.builder(state).nodes(nodes).blocks(initialBlocks).build();
});
this.threadPoolExecutor = EsExecutors.newSinglePrioritizing(UPDATE_THREAD_NAME, daemonThreadFactory(settings, UPDATE_THREAD_NAME),
threadPool.getThreadContext());
}
@Override
protected synchronized void doStop() {
for (NotifyTimeout onGoingTimeout : onGoingTimeouts) {
onGoingTimeout.cancel();
try {
onGoingTimeout.cancel();
onGoingTimeout.listener.onClose();
} catch (Exception ex) {
logger.debug("failed to notify listeners on shutdown", ex);
}
}
ThreadPool.terminate(threadPoolExecutor, 10, TimeUnit.SECONDS);
// close timeout listeners that did not have an ongoing timeout
timeoutClusterStateListeners.forEach(TimeoutClusterStateListener::onClose);
removeListener(localNodeMasterListeners);
}
@Override
protected synchronized void doClose() {
}
/**
* The local node.
*/
public DiscoveryNode localNode() {
DiscoveryNode localNode = state().getNodes().getLocalNode();
if (localNode == null) {
throw new IllegalStateException("No local node found. Is the node started?");
}
return localNode;
}
public OperationRouting operationRouting() {
return operationRouting;
}
/**
* The current cluster state.
*/
public ClusterState state() {
assert assertNotCalledFromClusterStateApplier("the applied cluster state is not yet available");
return this.state.get();
}
/**
* Adds a high priority applier of updated cluster states.
*/
public void addHighPriorityApplier(ClusterStateApplier applier) {
highPriorityStateAppliers.add(applier);
}
/**
* Adds an applier which will be called after all high priority and normal appliers have been called.
*/
public void addLowPriorityApplier(ClusterStateApplier applier) {
lowPriorityStateAppliers.add(applier);
}
/**
* Adds a applier of updated cluster states.
*/
public void addStateApplier(ClusterStateApplier applier) {
normalPriorityStateAppliers.add(applier);
}
/**
* Removes an applier of updated cluster states.
*/
public void removeApplier(ClusterStateApplier applier) {
normalPriorityStateAppliers.remove(applier);
highPriorityStateAppliers.remove(applier);
lowPriorityStateAppliers.remove(applier);
}
/**
* Add a listener for updated cluster states
*/
public void addListener(ClusterStateListener listener) {
clusterStateListeners.add(listener);
}
/**
* Removes a listener for updated cluster states.
*/
public void removeListener(ClusterStateListener listener) {
clusterStateListeners.remove(listener);
}
/**
* Removes a timeout listener for updated cluster states.
*/
public void removeTimeoutListener(TimeoutClusterStateListener listener) {
timeoutClusterStateListeners.remove(listener);
for (Iterator<NotifyTimeout> it = onGoingTimeouts.iterator(); it.hasNext(); ) {
NotifyTimeout timeout = it.next();
if (timeout.listener.equals(listener)) {
timeout.cancel();
it.remove();
}
}
}
/**
* Add a listener for on/off local node master events
*/
public void addLocalNodeMasterListener(LocalNodeMasterListener listener) {
localNodeMasterListeners.add(listener);
}
/**
* Remove the given listener for on/off local master events
*/
public void removeLocalNodeMasterListener(LocalNodeMasterListener listener) {
localNodeMasterListeners.remove(listener);
}
/**
* Adds a cluster state listener that is expected to be removed during a short period of time.
* If provided, the listener will be notified once a specific time has elapsed.
*
* NOTE: the listener is not removed on timeout. This is the responsibility of the caller.
*/
public void addTimeoutListener(@Nullable final TimeValue timeout, final TimeoutClusterStateListener listener) {
if (lifecycle.stoppedOrClosed()) {
listener.onClose();
return;
}
// call the post added notification on the same event thread
try {
threadPoolExecutor.execute(new SourcePrioritizedRunnable(Priority.HIGH, "_add_listener_") {
@Override
public void run() {
if (timeout != null) {
NotifyTimeout notifyTimeout = new NotifyTimeout(listener, timeout);
notifyTimeout.future = threadPool.schedule(timeout, ThreadPool.Names.GENERIC, notifyTimeout);
onGoingTimeouts.add(notifyTimeout);
}
timeoutClusterStateListeners.add(listener);
listener.postAdded();
}
});
} catch (EsRejectedExecutionException e) {
if (lifecycle.stoppedOrClosed()) {
listener.onClose();
} else {
throw e;
}
}
}
/**
* Submits a cluster state update task; unlike {@link #submitStateUpdateTask(String, Object, ClusterStateTaskConfig,
* ClusterStateTaskExecutor, ClusterStateTaskListener)}, submitted updates will not be batched.
*
* @param source the source of the cluster state update task
* @param updateTask the full context for the cluster state update
* task
*
*/
public <T extends ClusterStateTaskConfig & ClusterStateTaskExecutor<T> & ClusterStateTaskListener> void submitStateUpdateTask(
final String source, final T updateTask) {
submitStateUpdateTask(source, updateTask, updateTask, updateTask, updateTask);
}
/**
* Submits a cluster state update task; submitted updates will be
* batched across the same instance of executor. The exact batching
* semantics depend on the underlying implementation but a rough
* guideline is that if the update task is submitted while there
* are pending update tasks for the same executor, these update
* tasks will all be executed on the executor in a single batch
*
* @param source the source of the cluster state update task
* @param task the state needed for the cluster state update task
* @param config the cluster state update task configuration
* @param executor the cluster state update task executor; tasks
* that share the same executor will be executed
* batches on this executor
* @param listener callback after the cluster state update task
* completes
* @param <T> the type of the cluster state update task state
*
*/
public <T> void submitStateUpdateTask(final String source, final T task,
final ClusterStateTaskConfig config,
final ClusterStateTaskExecutor<T> executor,
final ClusterStateTaskListener listener) {
submitStateUpdateTasks(source, Collections.singletonMap(task, listener), config, executor);
}
/**
* Submits a batch of cluster state update tasks; submitted updates are guaranteed to be processed together,
* potentially with more tasks of the same executor.
*
* @param source the source of the cluster state update task
* @param tasks a map of update tasks and their corresponding listeners
* @param config the cluster state update task configuration
* @param executor the cluster state update task executor; tasks
* that share the same executor will be executed
* batches on this executor
* @param <T> the type of the cluster state update task state
*
*/
public <T> void submitStateUpdateTasks(final String source,
final Map<T, ClusterStateTaskListener> tasks, final ClusterStateTaskConfig config,
final ClusterStateTaskExecutor<T> executor) {
if (!lifecycle.started()) {
return;
}
if (tasks.isEmpty()) {
return;
}
try {
@SuppressWarnings("unchecked")
ClusterStateTaskExecutor<Object> taskExecutor = (ClusterStateTaskExecutor<Object>) executor;
// convert to an identity map to check for dups based on update tasks semantics of using identity instead of equal
final IdentityHashMap<Object, ClusterStateTaskListener> tasksIdentity = new IdentityHashMap<>(tasks);
final List<UpdateTask> updateTasks = tasksIdentity.entrySet().stream().map(
entry -> new UpdateTask(source, entry.getKey(), config.priority(), taskExecutor, safe(entry.getValue(), logger))
).collect(Collectors.toList());
synchronized (updateTasksPerExecutor) {
LinkedHashSet<UpdateTask> existingTasks = updateTasksPerExecutor.computeIfAbsent(executor,
k -> new LinkedHashSet<>(updateTasks.size()));
for (UpdateTask existing : existingTasks) {
if (tasksIdentity.containsKey(existing.task)) {
throw new IllegalStateException("task [" + taskExecutor.describeTasks(Collections.singletonList(existing.task)) +
"] with source [" + source + "] is already queued");
}
}
existingTasks.addAll(updateTasks);
}
final UpdateTask firstTask = updateTasks.get(0);
final TimeValue timeout = config.timeout();
if (timeout != null) {
threadPoolExecutor.execute(firstTask, threadPool.scheduler(), timeout, () -> onTimeout(updateTasks, source, timeout));
} else {
threadPoolExecutor.execute(firstTask);
}
} catch (EsRejectedExecutionException e) {
// ignore cases where we are shutting down..., there is really nothing interesting
// to be done here...
if (!lifecycle.stoppedOrClosed()) {
throw e;
}
}
}
private void onTimeout(List<UpdateTask> updateTasks, String source, TimeValue timeout) {
threadPool.generic().execute(() -> {
final ArrayList<UpdateTask> toRemove = new ArrayList<>();
for (UpdateTask task : updateTasks) {
if (task.processed.getAndSet(true) == false) {
logger.debug("cluster state update task [{}] timed out after [{}]", source, timeout);
toRemove.add(task);
}
}
if (toRemove.isEmpty() == false) {
ClusterStateTaskExecutor<Object> clusterStateTaskExecutor = toRemove.get(0).executor;
synchronized (updateTasksPerExecutor) {
LinkedHashSet<UpdateTask> existingTasks = updateTasksPerExecutor.get(clusterStateTaskExecutor);
if (existingTasks != null) {
existingTasks.removeAll(toRemove);
if (existingTasks.isEmpty()) {
updateTasksPerExecutor.remove(clusterStateTaskExecutor);
}
}
}
for (UpdateTask task : toRemove) {
task.listener.onFailure(source, new ProcessClusterEventTimeoutException(timeout, source));
}
}
});
}
/**
* Returns the tasks that are pending.
*/
public List<PendingClusterTask> pendingTasks() {
PrioritizedEsThreadPoolExecutor.Pending[] pendings = threadPoolExecutor.getPending();
List<PendingClusterTask> pendingClusterTasks = new ArrayList<>(pendings.length);
for (PrioritizedEsThreadPoolExecutor.Pending pending : pendings) {
final String source;
final long timeInQueue;
// we have to capture the task as it will be nulled after execution and we don't want to change while we check things here.
final Object task = pending.task;
if (task == null) {
continue;
} else if (task instanceof SourcePrioritizedRunnable) {
SourcePrioritizedRunnable runnable = (SourcePrioritizedRunnable) task;
source = runnable.source();
timeInQueue = runnable.getAgeInMillis();
} else {
assert false : "expected SourcePrioritizedRunnable got " + task.getClass();
source = "unknown [" + task.getClass() + "]";
timeInQueue = 0;
}
pendingClusterTasks.add(
new PendingClusterTask(pending.insertionOrder, pending.priority, new Text(source), timeInQueue, pending.executing));
}
return pendingClusterTasks;
}
/**
* Returns the number of currently pending tasks.
*/
public int numberOfPendingTasks() {
return threadPoolExecutor.getNumberOfPendingTasks();
}
/**
* Returns the maximum wait time for tasks in the queue
*
* @return A zero time value if the queue is empty, otherwise the time value oldest task waiting in the queue
*/
public TimeValue getMaxTaskWaitTime() {
return threadPoolExecutor.getMaxTaskWaitTime();
}
/** asserts that the current thread is the cluster state update thread */
public static boolean assertClusterStateThread() {
assert Thread.currentThread().getName().contains(ClusterService.UPDATE_THREAD_NAME) :
"not called from the cluster state update thread";
return true;
}
/** asserts that the current thread is <b>NOT</b> the cluster state update thread */
public static boolean assertNotClusterStateUpdateThread(String reason) {
assert Thread.currentThread().getName().contains(UPDATE_THREAD_NAME) == false :
"Expected current thread [" + Thread.currentThread() + "] to not be the cluster state update thread. Reason: [" + reason + "]";
return true;
}
/** asserts that the current stack trace does <b>NOT</b> involve a cluster state applier */
private static boolean assertNotCalledFromClusterStateApplier(String reason) {
if (Thread.currentThread().getName().contains(UPDATE_THREAD_NAME)) {
for (StackTraceElement element : Thread.currentThread().getStackTrace()) {
final String className = element.getClassName();
final String methodName = element.getMethodName();
if (className.equals(ClusterStateObserver.class.getName())) {
// people may start an observer from an applier
return true;
} else if (className.equals(ClusterService.class.getName())
&& methodName.equals("callClusterStateAppliers")) {
throw new AssertionError("should not be called by a cluster state applier. reason [" + reason + "]");
}
}
}
return true;
}
public ClusterName getClusterName() {
return clusterName;
}
public void setDiscoverySettings(DiscoverySettings discoverySettings) {
this.discoverySettings = discoverySettings;
}
abstract static class SourcePrioritizedRunnable extends PrioritizedRunnable {
protected final String source;
SourcePrioritizedRunnable(Priority priority, String source) {
super(priority);
this.source = source;
}
public String source() {
return source;
}
}
void runTasks(TaskInputs taskInputs) {
if (!lifecycle.started()) {
logger.debug("processing [{}]: ignoring, cluster service not started", taskInputs.summary);
return;
}
logger.debug("processing [{}]: execute", taskInputs.summary);
ClusterState previousClusterState = state();
if (!previousClusterState.nodes().isLocalNodeElectedMaster() && taskInputs.runOnlyOnMaster()) {
logger.debug("failing [{}]: local node is no longer master", taskInputs.summary);
taskInputs.onNoLongerMaster();
return;
}
long startTimeNS = currentTimeInNanos();
TaskOutputs taskOutputs = calculateTaskOutputs(taskInputs, previousClusterState, startTimeNS);
taskOutputs.notifyFailedTasks();
if (taskOutputs.clusterStateUnchanged()) {
taskOutputs.notifySuccessfulTasksOnUnchangedClusterState();
TimeValue executionTime = TimeValue.timeValueMillis(Math.max(0, TimeValue.nsecToMSec(currentTimeInNanos() - startTimeNS)));
logger.debug("processing [{}]: took [{}] no change in cluster_state", taskInputs.summary, executionTime);
warnAboutSlowTaskIfNeeded(executionTime, taskInputs.summary);
} else {
ClusterState newClusterState = taskOutputs.newClusterState;
if (logger.isTraceEnabled()) {
logger.trace("cluster state updated, source [{}]\n{}", taskInputs.summary, newClusterState);
} else if (logger.isDebugEnabled()) {
logger.debug("cluster state updated, version [{}], source [{}]", newClusterState.version(), taskInputs.summary);
}
try {
publishAndApplyChanges(taskInputs, taskOutputs);
TimeValue executionTime = TimeValue.timeValueMillis(Math.max(0, TimeValue.nsecToMSec(currentTimeInNanos() - startTimeNS)));
logger.debug("processing [{}]: took [{}] done applying updated cluster_state (version: {}, uuid: {})", taskInputs.summary,
executionTime, newClusterState.version(), newClusterState.stateUUID());
warnAboutSlowTaskIfNeeded(executionTime, taskInputs.summary);
} catch (Exception e) {
TimeValue executionTime = TimeValue.timeValueMillis(Math.max(0, TimeValue.nsecToMSec(currentTimeInNanos() - startTimeNS)));
final long version = newClusterState.version();
final String stateUUID = newClusterState.stateUUID();
final String fullState = newClusterState.toString();
logger.warn(
(Supplier<?>) () -> new ParameterizedMessage(
"failed to apply updated cluster state in [{}]:\nversion [{}], uuid [{}], source [{}]\n{}",
executionTime,
version,
stateUUID,
taskInputs.summary,
fullState),
e);
// TODO: do we want to call updateTask.onFailure here?
}
}
}
public TaskOutputs calculateTaskOutputs(TaskInputs taskInputs, ClusterState previousClusterState, long startTimeNS) {
ClusterTasksResult<Object> clusterTasksResult = executeTasks(taskInputs, startTimeNS, previousClusterState);
// extract those that are waiting for results
List<UpdateTask> nonFailedTasks = new ArrayList<>();
for (UpdateTask updateTask : taskInputs.updateTasks) {
assert clusterTasksResult.executionResults.containsKey(updateTask.task) : "missing " + updateTask;
final ClusterStateTaskExecutor.TaskResult taskResult =
clusterTasksResult.executionResults.get(updateTask.task);
if (taskResult.isSuccess()) {
nonFailedTasks.add(updateTask);
}
}
ClusterState newClusterState = patchVersionsAndNoMasterBlocks(previousClusterState, clusterTasksResult);
return new TaskOutputs(taskInputs, previousClusterState, newClusterState, nonFailedTasks,
clusterTasksResult.executionResults);
}
private ClusterTasksResult<Object> executeTasks(TaskInputs taskInputs, long startTimeNS, ClusterState previousClusterState) {
ClusterTasksResult<Object> clusterTasksResult;
try {
List<Object> inputs = taskInputs.updateTasks.stream().map(tUpdateTask -> tUpdateTask.task).collect(Collectors.toList());
clusterTasksResult = taskInputs.executor.execute(previousClusterState, inputs);
} catch (Exception e) {
TimeValue executionTime = TimeValue.timeValueMillis(Math.max(0, TimeValue.nsecToMSec(currentTimeInNanos() - startTimeNS)));
if (logger.isTraceEnabled()) {
logger.trace(
(Supplier<?>) () -> new ParameterizedMessage(
"failed to execute cluster state update in [{}], state:\nversion [{}], source [{}]\n{}{}{}",
executionTime,
previousClusterState.version(),
taskInputs.summary,
previousClusterState.nodes(),
previousClusterState.routingTable(),
previousClusterState.getRoutingNodes()),
e);
}
warnAboutSlowTaskIfNeeded(executionTime, taskInputs.summary);
clusterTasksResult = ClusterTasksResult.builder()
.failures(taskInputs.updateTasks.stream().map(updateTask -> updateTask.task)::iterator, e)
.build(previousClusterState);
}
assert clusterTasksResult.executionResults != null;
assert clusterTasksResult.executionResults.size() == taskInputs.updateTasks.size()
: String.format(Locale.ROOT, "expected [%d] task result%s but was [%d]", taskInputs.updateTasks.size(),
taskInputs.updateTasks.size() == 1 ? "" : "s", clusterTasksResult.executionResults.size());
boolean assertsEnabled = false;
assert (assertsEnabled = true);
if (assertsEnabled) {
for (UpdateTask updateTask : taskInputs.updateTasks) {
assert clusterTasksResult.executionResults.containsKey(updateTask.task) :
"missing task result for " + updateTask;
}
}
return clusterTasksResult;
}
private ClusterState patchVersionsAndNoMasterBlocks(ClusterState previousClusterState, ClusterTasksResult<Object> executionResult) {
ClusterState newClusterState = executionResult.resultingState;
if (executionResult.noMaster) {
assert newClusterState == previousClusterState : "state can only be changed by ClusterService when noMaster = true";
if (previousClusterState.nodes().getMasterNodeId() != null) {
// remove block if it already exists before adding new one
assert previousClusterState.blocks().hasGlobalBlock(discoverySettings.getNoMasterBlock().id()) == false :
"NO_MASTER_BLOCK should only be added by ClusterService";
ClusterBlocks clusterBlocks = ClusterBlocks.builder().blocks(previousClusterState.blocks())
.addGlobalBlock(discoverySettings.getNoMasterBlock())
.build();
DiscoveryNodes discoveryNodes = new DiscoveryNodes.Builder(previousClusterState.nodes()).masterNodeId(null).build();
newClusterState = ClusterState.builder(previousClusterState)
.blocks(clusterBlocks)
.nodes(discoveryNodes)
.build();
}
} else if (newClusterState.nodes().isLocalNodeElectedMaster() && previousClusterState != newClusterState) {
// only the master controls the version numbers
Builder builder = ClusterState.builder(newClusterState).incrementVersion();
if (previousClusterState.routingTable() != newClusterState.routingTable()) {
builder.routingTable(RoutingTable.builder(newClusterState.routingTable())
.version(newClusterState.routingTable().version() + 1).build());
}
if (previousClusterState.metaData() != newClusterState.metaData()) {
builder.metaData(MetaData.builder(newClusterState.metaData()).version(newClusterState.metaData().version() + 1));
}
// remove the no master block, if it exists
if (newClusterState.blocks().hasGlobalBlock(discoverySettings.getNoMasterBlock().id())) {
builder.blocks(ClusterBlocks.builder().blocks(newClusterState.blocks())
.removeGlobalBlock(discoverySettings.getNoMasterBlock().id()));
}
newClusterState = builder.build();
}
assert newClusterState.nodes().getMasterNodeId() == null ||
newClusterState.blocks().hasGlobalBlock(discoverySettings.getNoMasterBlock().id()) == false :
"cluster state with master node must not have NO_MASTER_BLOCK";
return newClusterState;
}
private void publishAndApplyChanges(TaskInputs taskInputs, TaskOutputs taskOutputs) {
ClusterState previousClusterState = taskOutputs.previousClusterState;
ClusterState newClusterState = taskOutputs.newClusterState;
ClusterChangedEvent clusterChangedEvent = new ClusterChangedEvent(taskInputs.summary, newClusterState, previousClusterState);
// new cluster state, notify all listeners
final DiscoveryNodes.Delta nodesDelta = clusterChangedEvent.nodesDelta();
if (nodesDelta.hasChanges() && logger.isInfoEnabled()) {
String summary = nodesDelta.shortSummary();
if (summary.length() > 0) {
logger.info("{}, reason: {}", summary, taskInputs.summary);
}
}
final Discovery.AckListener ackListener = newClusterState.nodes().isLocalNodeElectedMaster() ?
taskOutputs.createAckListener(threadPool, newClusterState) :
null;
nodeConnectionsService.connectToNodes(newClusterState.nodes());
// if we are the master, publish the new state to all nodes
// we publish here before we send a notification to all the listeners, since if it fails
// we don't want to notify
if (newClusterState.nodes().isLocalNodeElectedMaster()) {
logger.debug("publishing cluster state version [{}]", newClusterState.version());
try {
clusterStatePublisher.accept(clusterChangedEvent, ackListener);
} catch (Discovery.FailedToCommitClusterStateException t) {
final long version = newClusterState.version();
logger.warn(
(Supplier<?>) () -> new ParameterizedMessage(
"failing [{}]: failed to commit cluster state version [{}]", taskInputs.summary, version),
t);
// ensure that list of connected nodes in NodeConnectionsService is in-sync with the nodes of the current cluster state
nodeConnectionsService.connectToNodes(previousClusterState.nodes());
nodeConnectionsService.disconnectFromNodesExcept(previousClusterState.nodes());
taskOutputs.publishingFailed(t);
return;
}
}
logger.debug("applying cluster state version {}", newClusterState.version());
try {
// nothing to do until we actually recover from the gateway or any other block indicates we need to disable persistency
if (clusterChangedEvent.state().blocks().disableStatePersistence() == false && clusterChangedEvent.metaDataChanged()) {
final Settings incomingSettings = clusterChangedEvent.state().metaData().settings();
clusterSettings.applySettings(incomingSettings);
}
} catch (Exception ex) {
logger.warn("failed to apply cluster settings", ex);
}
logger.debug("set local cluster state to version {}", newClusterState.version());
callClusterStateAppliers(newClusterState, clusterChangedEvent);
nodeConnectionsService.disconnectFromNodesExcept(newClusterState.nodes());
updateState(css -> newClusterState);
Stream.concat(clusterStateListeners.stream(), timeoutClusterStateListeners.stream()).forEach(listener -> {
try {
logger.trace("calling [{}] with change to version [{}]", listener, newClusterState.version());
listener.clusterChanged(clusterChangedEvent);
} catch (Exception ex) {
logger.warn("failed to notify ClusterStateListener", ex);
}
});
//manual ack only from the master at the end of the publish
if (newClusterState.nodes().isLocalNodeElectedMaster()) {
try {
ackListener.onNodeAck(newClusterState.nodes().getLocalNode(), null);
} catch (Exception e) {
final DiscoveryNode localNode = newClusterState.nodes().getLocalNode();
logger.debug(
(Supplier<?>) () -> new ParameterizedMessage("error while processing ack for master node [{}]", localNode),
e);
}
}
taskOutputs.processedDifferentClusterState(previousClusterState, newClusterState);
if (newClusterState.nodes().isLocalNodeElectedMaster()) {
try {
taskOutputs.clusterStatePublished(clusterChangedEvent);
} catch (Exception e) {
logger.error(
(Supplier<?>) () -> new ParameterizedMessage(
"exception thrown while notifying executor of new cluster state publication [{}]",
taskInputs.summary),
e);
}
}
}
private void callClusterStateAppliers(ClusterState newClusterState, ClusterChangedEvent clusterChangedEvent) {
for (ClusterStateApplier applier : clusterStateAppliers) {
try {
logger.trace("calling [{}] with change to version [{}]", applier, newClusterState.version());
applier.applyClusterState(clusterChangedEvent);
} catch (Exception ex) {
logger.warn("failed to notify ClusterStateApplier", ex);
}
}
}
/**
* Represents a set of tasks to be processed together with their executor
*/
class TaskInputs {
public final String summary;
public final ArrayList<UpdateTask> updateTasks;
public final ClusterStateTaskExecutor<Object> executor;
TaskInputs(ClusterStateTaskExecutor<Object> executor, ArrayList<UpdateTask> updateTasks, String summary) {
this.summary = summary;
this.executor = executor;
this.updateTasks = updateTasks;
}
public boolean runOnlyOnMaster() {
return executor.runOnlyOnMaster();
}
public void onNoLongerMaster() {
updateTasks.stream().forEach(task -> task.listener.onNoLongerMaster(task.source));
}
}
/**
* Output created by executing a set of tasks provided as TaskInputs
*/
class TaskOutputs {
public final TaskInputs taskInputs;
public final ClusterState previousClusterState;
public final ClusterState newClusterState;
public final List<UpdateTask> nonFailedTasks;
public final Map<Object, ClusterStateTaskExecutor.TaskResult> executionResults;
TaskOutputs(TaskInputs taskInputs, ClusterState previousClusterState,
ClusterState newClusterState, List<UpdateTask> nonFailedTasks,
Map<Object, ClusterStateTaskExecutor.TaskResult> executionResults) {
this.taskInputs = taskInputs;
this.previousClusterState = previousClusterState;
this.newClusterState = newClusterState;
this.nonFailedTasks = nonFailedTasks;
this.executionResults = executionResults;
}
public void publishingFailed(Discovery.FailedToCommitClusterStateException t) {
nonFailedTasks.forEach(task -> task.listener.onFailure(task.source, t));
}
public void processedDifferentClusterState(ClusterState previousClusterState, ClusterState newClusterState) {
nonFailedTasks.forEach(task -> task.listener.clusterStateProcessed(task.source, previousClusterState, newClusterState));
}
public void clusterStatePublished(ClusterChangedEvent clusterChangedEvent) {
taskInputs.executor.clusterStatePublished(clusterChangedEvent);
}
public Discovery.AckListener createAckListener(ThreadPool threadPool, ClusterState newClusterState) {
ArrayList<Discovery.AckListener> ackListeners = new ArrayList<>();
//timeout straightaway, otherwise we could wait forever as the timeout thread has not started
nonFailedTasks.stream().filter(task -> task.listener instanceof AckedClusterStateTaskListener).forEach(task -> {
final AckedClusterStateTaskListener ackedListener = (AckedClusterStateTaskListener) task.listener;
if (ackedListener.ackTimeout() == null || ackedListener.ackTimeout().millis() == 0) {
ackedListener.onAckTimeout();
} else {
try {
ackListeners.add(new AckCountDownListener(ackedListener, newClusterState.version(), newClusterState.nodes(),
threadPool));
} catch (EsRejectedExecutionException ex) {
if (logger.isDebugEnabled()) {
logger.debug("Couldn't schedule timeout thread - node might be shutting down", ex);
}
//timeout straightaway, otherwise we could wait forever as the timeout thread has not started
ackedListener.onAckTimeout();
}
}
});
return new DelegetingAckListener(ackListeners);
}
public boolean clusterStateUnchanged() {
return previousClusterState == newClusterState;
}
public void notifyFailedTasks() {
// fail all tasks that have failed
for (UpdateTask updateTask : taskInputs.updateTasks) {
assert executionResults.containsKey(updateTask.task) : "missing " + updateTask;
final ClusterStateTaskExecutor.TaskResult taskResult = executionResults.get(updateTask.task);
if (taskResult.isSuccess() == false) {
updateTask.listener.onFailure(updateTask.source, taskResult.getFailure());
}
}
}
public void notifySuccessfulTasksOnUnchangedClusterState() {
nonFailedTasks.forEach(task -> {
if (task.listener instanceof AckedClusterStateTaskListener) {
//no need to wait for ack if nothing changed, the update can be counted as acknowledged
((AckedClusterStateTaskListener) task.listener).onAllNodesAcked(null);
}
task.listener.clusterStateProcessed(task.source, newClusterState, newClusterState);
});
}
}
// this one is overridden in tests so we can control time
protected long currentTimeInNanos() {
return System.nanoTime();
}
private static SafeClusterStateTaskListener safe(ClusterStateTaskListener listener, Logger logger) {
if (listener instanceof AckedClusterStateTaskListener) {
return new SafeAckedClusterStateTaskListener((AckedClusterStateTaskListener) listener, logger);
} else {
return new SafeClusterStateTaskListener(listener, logger);
}
}
private static class SafeClusterStateTaskListener implements ClusterStateTaskListener {
private final ClusterStateTaskListener listener;
private final Logger logger;
SafeClusterStateTaskListener(ClusterStateTaskListener listener, Logger logger) {
this.listener = listener;
this.logger = logger;
}
@Override
public void onFailure(String source, Exception e) {
try {
listener.onFailure(source, e);
} catch (Exception inner) {
inner.addSuppressed(e);
logger.error(
(Supplier<?>) () -> new ParameterizedMessage(
"exception thrown by listener notifying of failure from [{}]", source), inner);
}
}
@Override
public void onNoLongerMaster(String source) {
try {
listener.onNoLongerMaster(source);
} catch (Exception e) {
logger.error(
(Supplier<?>) () -> new ParameterizedMessage(
"exception thrown by listener while notifying no longer master from [{}]", source), e);
}
}
@Override
public void clusterStateProcessed(String source, ClusterState oldState, ClusterState newState) {
try {
listener.clusterStateProcessed(source, oldState, newState);
} catch (Exception e) {
logger.error(
(Supplier<?>) () -> new ParameterizedMessage(
"exception thrown by listener while notifying of cluster state processed from [{}], old cluster state:\n" +
"{}\nnew cluster state:\n{}",
source, oldState, newState),
e);
}
}
}
private static class SafeAckedClusterStateTaskListener extends SafeClusterStateTaskListener implements AckedClusterStateTaskListener {
private final AckedClusterStateTaskListener listener;
private final Logger logger;
SafeAckedClusterStateTaskListener(AckedClusterStateTaskListener listener, Logger logger) {
super(listener, logger);
this.listener = listener;
this.logger = logger;
}
@Override
public boolean mustAck(DiscoveryNode discoveryNode) {
return listener.mustAck(discoveryNode);
}
@Override
public void onAllNodesAcked(@Nullable Exception e) {
try {
listener.onAllNodesAcked(e);
} catch (Exception inner) {
inner.addSuppressed(e);
logger.error("exception thrown by listener while notifying on all nodes acked", inner);
}
}
@Override
public void onAckTimeout() {
try {
listener.onAckTimeout();
} catch (Exception e) {
logger.error("exception thrown by listener while notifying on ack timeout", e);
}
}
@Override
public TimeValue ackTimeout() {
return listener.ackTimeout();
}
}
class UpdateTask extends SourcePrioritizedRunnable {
public final Object task;
public final ClusterStateTaskListener listener;
private final ClusterStateTaskExecutor<Object> executor;
public final AtomicBoolean processed = new AtomicBoolean();
UpdateTask(String source, Object task, Priority priority, ClusterStateTaskExecutor<Object> executor,
ClusterStateTaskListener listener) {
super(priority, source);
this.task = task;
this.executor = executor;
this.listener = listener;
}
@Override
public void run() {
// if this task is already processed, the executor shouldn't execute other tasks (that arrived later),
// to give other executors a chance to execute their tasks.
if (processed.get() == false) {
final ArrayList<UpdateTask> toExecute = new ArrayList<>();
final Map<String, ArrayList<Object>> processTasksBySource = new HashMap<>();
synchronized (updateTasksPerExecutor) {
LinkedHashSet<UpdateTask> pending = updateTasksPerExecutor.remove(executor);
if (pending != null) {
for (UpdateTask task : pending) {
if (task.processed.getAndSet(true) == false) {
logger.trace("will process {}", task);
toExecute.add(task);
processTasksBySource.computeIfAbsent(task.source, s -> new ArrayList<>()).add(task.task);
} else {
logger.trace("skipping {}, already processed", task);
}
}
}
}
if (toExecute.isEmpty() == false) {
final String tasksSummary = processTasksBySource.entrySet().stream().map(entry -> {
String tasks = executor.describeTasks(entry.getValue());
return tasks.isEmpty() ? entry.getKey() : entry.getKey() + "[" + tasks + "]";
}).reduce((s1, s2) -> s1 + ", " + s2).orElse("");
runTasks(new TaskInputs(executor, toExecute, tasksSummary));
}
}
}
@Override
public String toString() {
String taskDescription = executor.describeTasks(Collections.singletonList(task));
if (taskDescription.isEmpty()) {
return "[" + source + "]";
} else {
return "[" + source + "[" + taskDescription + "]]";
}
}
}
private void warnAboutSlowTaskIfNeeded(TimeValue executionTime, String source) {
if (executionTime.getMillis() > slowTaskLoggingThreshold.getMillis()) {
logger.warn("cluster state update task [{}] took [{}] above the warn threshold of {}", source, executionTime,
slowTaskLoggingThreshold);
}
}
class NotifyTimeout implements Runnable {
final TimeoutClusterStateListener listener;
final TimeValue timeout;
volatile ScheduledFuture future;
NotifyTimeout(TimeoutClusterStateListener listener, TimeValue timeout) {
this.listener = listener;
this.timeout = timeout;
}
public void cancel() {
FutureUtils.cancel(future);
}
@Override
public void run() {
if (future != null && future.isCancelled()) {
return;
}
if (lifecycle.stoppedOrClosed()) {
listener.onClose();
} else {
listener.onTimeout(this.timeout);
}
// note, we rely on the listener to remove itself in case of timeout if needed
}
}
private static class LocalNodeMasterListeners implements ClusterStateListener {
private final List<LocalNodeMasterListener> listeners = new CopyOnWriteArrayList<>();
private final ThreadPool threadPool;
private volatile boolean master = false;
private LocalNodeMasterListeners(ThreadPool threadPool) {
this.threadPool = threadPool;
}
@Override
public void clusterChanged(ClusterChangedEvent event) {
if (!master && event.localNodeMaster()) {
master = true;
for (LocalNodeMasterListener listener : listeners) {
Executor executor = threadPool.executor(listener.executorName());
executor.execute(new OnMasterRunnable(listener));
}
return;
}
if (master && !event.localNodeMaster()) {
master = false;
for (LocalNodeMasterListener listener : listeners) {
Executor executor = threadPool.executor(listener.executorName());
executor.execute(new OffMasterRunnable(listener));
}
}
}
private void add(LocalNodeMasterListener listener) {
listeners.add(listener);
}
private void remove(LocalNodeMasterListener listener) {
listeners.remove(listener);
}
private void clear() {
listeners.clear();
}
}
private static class OnMasterRunnable implements Runnable {
private final LocalNodeMasterListener listener;
private OnMasterRunnable(LocalNodeMasterListener listener) {
this.listener = listener;
}
@Override
public void run() {
listener.onMaster();
}
}
private static class OffMasterRunnable implements Runnable {
private final LocalNodeMasterListener listener;
private OffMasterRunnable(LocalNodeMasterListener listener) {
this.listener = listener;
}
@Override
public void run() {
listener.offMaster();
}
}
private static class DelegetingAckListener implements Discovery.AckListener {
private final List<Discovery.AckListener> listeners;
private DelegetingAckListener(List<Discovery.AckListener> listeners) {
this.listeners = listeners;
}
@Override
public void onNodeAck(DiscoveryNode node, @Nullable Exception e) {
for (Discovery.AckListener listener : listeners) {
listener.onNodeAck(node, e);
}
}
@Override
public void onTimeout() {
throw new UnsupportedOperationException("no timeout delegation");
}
}
private static class AckCountDownListener implements Discovery.AckListener {
private static final Logger logger = Loggers.getLogger(AckCountDownListener.class);
private final AckedClusterStateTaskListener ackedTaskListener;
private final CountDown countDown;
private final DiscoveryNodes nodes;
private final long clusterStateVersion;
private final Future<?> ackTimeoutCallback;
private Exception lastFailure;
AckCountDownListener(AckedClusterStateTaskListener ackedTaskListener, long clusterStateVersion, DiscoveryNodes nodes,
ThreadPool threadPool) {
this.ackedTaskListener = ackedTaskListener;
this.clusterStateVersion = clusterStateVersion;
this.nodes = nodes;
int countDown = 0;
for (DiscoveryNode node : nodes) {
if (ackedTaskListener.mustAck(node)) {
countDown++;
}
}
//we always wait for at least 1 node (the master)
countDown = Math.max(1, countDown);
logger.trace("expecting {} acknowledgements for cluster_state update (version: {})", countDown, clusterStateVersion);
this.countDown = new CountDown(countDown);
this.ackTimeoutCallback = threadPool.schedule(ackedTaskListener.ackTimeout(), ThreadPool.Names.GENERIC, () -> onTimeout());
}
@Override
public void onNodeAck(DiscoveryNode node, @Nullable Exception e) {
if (!ackedTaskListener.mustAck(node)) {
//we always wait for the master ack anyway
if (!node.equals(nodes.getMasterNode())) {
return;
}
}
if (e == null) {
logger.trace("ack received from node [{}], cluster_state update (version: {})", node, clusterStateVersion);
} else {
this.lastFailure = e;
logger.debug(
(Supplier<?>) () -> new ParameterizedMessage(
"ack received from node [{}], cluster_state update (version: {})", node, clusterStateVersion),
e);
}
if (countDown.countDown()) {
logger.trace("all expected nodes acknowledged cluster_state update (version: {})", clusterStateVersion);
FutureUtils.cancel(ackTimeoutCallback);
ackedTaskListener.onAllNodesAcked(lastFailure);
}
}
@Override
public void onTimeout() {
if (countDown.fastForward()) {
logger.trace("timeout waiting for acknowledgement for cluster_state update (version: {})", clusterStateVersion);
ackedTaskListener.onAckTimeout();
}
}
}
public ClusterSettings getClusterSettings() {
return clusterSettings;
}
public Settings getSettings() {
return settings;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.jackrabbit.oak.segment.scheduler;
import static com.google.common.base.Preconditions.checkArgument;
import static com.google.common.base.Preconditions.checkNotNull;
import static java.lang.Thread.currentThread;
import static java.util.concurrent.TimeUnit.MILLISECONDS;
import static java.util.concurrent.TimeUnit.NANOSECONDS;
import static java.util.concurrent.TimeUnit.SECONDS;
import static org.apache.jackrabbit.oak.api.Type.LONG;
import java.io.Closeable;
import java.text.MessageFormat;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Random;
import java.util.UUID;
import java.util.concurrent.Callable;
import java.util.concurrent.Semaphore;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicReference;
import com.codahale.metrics.Histogram;
import com.codahale.metrics.SlidingWindowReservoir;
import javax.annotation.Nonnull;
import org.apache.jackrabbit.oak.api.CommitFailedException;
import org.apache.jackrabbit.oak.api.PropertyState;
import org.apache.jackrabbit.oak.segment.Revisions;
import org.apache.jackrabbit.oak.segment.SegmentNodeBuilder;
import org.apache.jackrabbit.oak.segment.SegmentNodeState;
import org.apache.jackrabbit.oak.segment.SegmentNodeStoreStats;
import org.apache.jackrabbit.oak.segment.SegmentOverflowException;
import org.apache.jackrabbit.oak.segment.SegmentReader;
import org.apache.jackrabbit.oak.spi.commit.ChangeDispatcher;
import org.apache.jackrabbit.oak.spi.commit.CommitInfo;
import org.apache.jackrabbit.oak.spi.commit.Observable;
import org.apache.jackrabbit.oak.spi.commit.Observer;
import org.apache.jackrabbit.oak.spi.state.NodeBuilder;
import org.apache.jackrabbit.oak.spi.state.NodeState;
import org.apache.jackrabbit.oak.stats.StatisticsProvider;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class LockBasedScheduler implements Scheduler {
public static class LockBasedSchedulerBuilder {
@Nonnull
private final SegmentReader reader;
@Nonnull
private final Revisions revisions;
@Nonnull
private StatisticsProvider statsProvider = StatisticsProvider.NOOP;
private boolean dispatchChanges = true;
private LockBasedSchedulerBuilder(@Nonnull Revisions revisions, @Nonnull SegmentReader reader) {
this.revisions = revisions;
this.reader = reader;
}
/**
* {@link StatisticsProvider} for collecting statistics related to
* SegmentStore
*
* @param statisticsProvider
* @return this instance
*/
@Nonnull
public LockBasedSchedulerBuilder withStatisticsProvider(@Nonnull StatisticsProvider statisticsProvider) {
this.statsProvider = checkNotNull(statisticsProvider);
return this;
}
@Nonnull
public LockBasedSchedulerBuilder dispatchChanges(boolean dispatchChanges) {
this.dispatchChanges = dispatchChanges;
return this;
}
@Nonnull
public LockBasedScheduler build() {
if (dispatchChanges) {
return new ObservableLockBasedScheduler(this);
} else {
return new LockBasedScheduler(this);
}
}
}
public static LockBasedSchedulerBuilder builder(@Nonnull Revisions revisions, @Nonnull SegmentReader reader) {
return new LockBasedSchedulerBuilder(checkNotNull(revisions), checkNotNull(reader));
}
private static final Logger log = LoggerFactory.getLogger(LockBasedScheduler.class);
/**
* Flag controlling the commit lock fairness
*/
private static final boolean COMMIT_FAIR_LOCK = Boolean
.parseBoolean(System.getProperty("oak.segmentNodeStore.commitFairLock", "true"));
/**
* Flag controlling the commit time quantile to wait for the lock in order
* to increase chances of returning an up to date state.
*/
private static final double SCHEDULER_FETCH_COMMIT_DELAY_QUANTILE = Double
.parseDouble(System.getProperty("oak.scheduler.fetch.commitDelayQuantile", "0.5"));
/**
* Maximum number of milliseconds to wait before re-attempting to update the current
* head state after a successful commit, provided a concurrent head state update happens.
*/
private static final long MAXIMUM_BACKOFF = MILLISECONDS.convert(10, SECONDS);
/**
* Sets the number of seconds to wait for the attempt to grab the lock to
* create a checkpoint
*/
private final int checkpointsLockWaitTime = Integer.getInteger("oak.checkpoints.lockWaitTime", 10);
static final String ROOT = "root";
/**
* Semaphore that controls access to the {@link #head} variable. Only a
* single local commit is allowed at a time. When such a commit is in
* progress, no external updates will be seen.
*/
private final Semaphore commitSemaphore = new Semaphore(1, COMMIT_FAIR_LOCK);
@Nonnull
private final SegmentReader reader;
@Nonnull
private final Revisions revisions;
protected final AtomicReference<SegmentNodeState> head;
private final SegmentNodeStoreStats stats;
private final Histogram commitTimeHistogram = new Histogram(new SlidingWindowReservoir(1000));
private final Random random = new Random();
public LockBasedScheduler(LockBasedSchedulerBuilder builder) {
if (COMMIT_FAIR_LOCK) {
log.info("Initializing SegmentNodeStore with the commitFairLock option enabled.");
}
this.reader = builder.reader;
this.revisions = builder.revisions;
this.head = new AtomicReference<SegmentNodeState>(reader.readHeadState(revisions));
this.stats = new SegmentNodeStoreStats(builder.statsProvider);
}
@Override
public NodeState getHeadNodeState() {
long delay = (long) commitTimeHistogram.getSnapshot().getValue(SCHEDULER_FETCH_COMMIT_DELAY_QUANTILE);
try {
if (commitSemaphore.tryAcquire(delay, NANOSECONDS)) {
try {
refreshHead(true);
} finally {
commitSemaphore.release();
}
}
} catch (InterruptedException e) {
currentThread().interrupt();
}
return head.get();
}
/**
* Refreshes the head state. Should only be called while holding a permit
* from the {@link #commitSemaphore}.
*
* @param dispatchChanges
* if set to true the changes would also be dispatched
*/
private void refreshHead(boolean dispatchChanges) {
SegmentNodeState state = reader.readHeadState(revisions);
if (!state.getRecordId().equals(head.get().getRecordId())) {
head.set(state);
if (dispatchChanges) {
contentChanged(state.getChildNode(ROOT), CommitInfo.EMPTY_EXTERNAL);
}
}
}
protected void contentChanged(NodeState root, CommitInfo info) {
// do nothing without a change dispatcher
}
@Override
public NodeState schedule(@Nonnull Commit commit, SchedulerOption... schedulingOptions)
throws CommitFailedException {
boolean queued = false;
try {
long queuedTime = -1;
if (commitSemaphore.availablePermits() < 1) {
queuedTime = System.nanoTime();
stats.onCommitQueued();
queued = true;
}
commitSemaphore.acquire();
try {
if (queued) {
long dequeuedTime = System.nanoTime();
stats.dequeuedAfter(dequeuedTime - queuedTime);
stats.onCommitDequeued();
}
long beforeCommitTime = System.nanoTime();
SegmentNodeState merged = (SegmentNodeState) execute(commit);
commit.applied(merged);
long afterCommitTime = System.nanoTime();
stats.committedAfter(afterCommitTime - beforeCommitTime);
commitTimeHistogram.update(afterCommitTime - beforeCommitTime);
stats.onCommit();
return merged;
} finally {
commitSemaphore.release();
}
} catch (InterruptedException e) {
currentThread().interrupt();
throw new CommitFailedException("Segment", 2, "Merge interrupted", e);
} catch (SegmentOverflowException e) {
throw new CommitFailedException("Segment", 3, "Merge failed", e);
}
}
private NodeState execute(Commit commit) throws CommitFailedException, InterruptedException {
// only do the merge if there are some changes to commit
if (commit.hasChanges()) {
long start = System.nanoTime();
int count = 0;
for (long backoff = 1; backoff < MAXIMUM_BACKOFF; backoff *= 2) {
refreshHead(true);
SegmentNodeState before = head.get();
SegmentNodeState after = commit.apply(before);
if (revisions.setHead(before.getRecordId(), after.getRecordId())) {
head.set(after);
contentChanged(after.getChildNode(ROOT), commit.info());
return head.get().getChildNode(ROOT);
}
count++;
int randNs = random.nextInt(1_000_000);
log.info("Scheduler detected concurrent commits. Retrying after {} ms and {} ns", backoff, randNs);
Thread.sleep(backoff, randNs);
}
long finish = System.nanoTime();
String message = MessageFormat.format(
"The commit could not be executed after {} attempts. Total wait time: {} ms",
count, NANOSECONDS.toMillis(finish - start));
throw new CommitFailedException("Segment", 3, message);
}
return head.get().getChildNode(ROOT);
}
@Override
public String checkpoint(long lifetime, @Nonnull Map<String, String> properties) {
checkArgument(lifetime > 0);
checkNotNull(properties);
String name = UUID.randomUUID().toString();
try {
CPCreator cpc = new CPCreator(name, lifetime, properties);
if (commitSemaphore.tryAcquire(checkpointsLockWaitTime, TimeUnit.SECONDS)) {
try {
if (cpc.call()) {
return name;
}
} finally {
// Explicitly give up reference to the previous root state
// otherwise they would block cleanup. See OAK-3347
refreshHead(true);
commitSemaphore.release();
}
}
log.warn("Failed to create checkpoint {} in {} seconds.", name, checkpointsLockWaitTime);
} catch (InterruptedException e) {
currentThread().interrupt();
log.error("Failed to create checkpoint {}.", name, e);
} catch (Exception e) {
log.error("Failed to create checkpoint {}.", name, e);
}
return name;
}
@Override
public boolean removeCheckpoint(String name) {
checkNotNull(name);
// try 5 times
for (int i = 0; i < 5; i++) {
if (commitSemaphore.tryAcquire()) {
try {
refreshHead(true);
SegmentNodeState state = head.get();
SegmentNodeBuilder builder = state.builder();
NodeBuilder cp = builder.child("checkpoints").child(name);
if (cp.exists()) {
cp.remove();
SegmentNodeState newState = builder.getNodeState();
if (revisions.setHead(state.getRecordId(), newState.getRecordId())) {
refreshHead(false);
return true;
}
}
} finally {
commitSemaphore.release();
}
}
}
return false;
}
private static class ObservableLockBasedScheduler extends LockBasedScheduler implements Observable {
private final ChangeDispatcher changeDispatcher;
public ObservableLockBasedScheduler(LockBasedSchedulerBuilder builder) {
super(builder);
this.changeDispatcher = new ChangeDispatcher(head.get().getChildNode(ROOT));
}
@Override
protected void contentChanged(NodeState root, CommitInfo info) {
changeDispatcher.contentChanged(root, info);
}
@Override
public Closeable addObserver(Observer observer) {
return changeDispatcher.addObserver(observer);
}
}
private final class CPCreator implements Callable<Boolean> {
private final String name;
private final long lifetime;
private final Map<String, String> properties;
CPCreator(String name, long lifetime, Map<String, String> properties) {
this.name = name;
this.lifetime = lifetime;
this.properties = properties;
}
@Override
public Boolean call() {
long now = System.currentTimeMillis();
refreshHead(true);
SegmentNodeState state = head.get();
SegmentNodeBuilder builder = state.builder();
NodeBuilder checkpoints = builder.child("checkpoints");
for (String n : checkpoints.getChildNodeNames()) {
NodeBuilder cp = checkpoints.getChildNode(n);
PropertyState ts = cp.getProperty("timestamp");
if (ts == null || ts.getType() != LONG || now > ts.getValue(LONG)) {
cp.remove();
}
}
NodeBuilder cp = checkpoints.child(name);
if (Long.MAX_VALUE - now > lifetime) {
cp.setProperty("timestamp", now + lifetime);
} else {
cp.setProperty("timestamp", Long.MAX_VALUE);
}
cp.setProperty("created", now);
NodeBuilder props = cp.setChildNode("properties");
for (Entry<String, String> p : properties.entrySet()) {
props.setProperty(p.getKey(), p.getValue());
}
cp.setChildNode(ROOT, state.getChildNode(ROOT));
SegmentNodeState newState = builder.getNodeState();
if (revisions.setHead(state.getRecordId(), newState.getRecordId())) {
refreshHead(false);
return true;
} else {
return false;
}
}
}
}
| |
/*
* Copyright 2000-2015 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.codeInsight.daemon.quickFix;
import com.intellij.codeInsight.daemon.QuickFixBundle;
import com.intellij.codeInsight.daemon.impl.quickfix.CreateClassKind;
import com.intellij.codeInsight.daemon.impl.quickfix.CreateFromUsageUtils;
import com.intellij.codeInspection.LocalQuickFixAndIntentionActionOnPsiElement;
import com.intellij.ide.util.DirectoryChooserUtil;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.application.WriteAction;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.editor.Editor;
import com.intellij.openapi.module.Module;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.roots.JavaProjectRootsUtil;
import com.intellij.openapi.roots.ProjectFileIndex;
import com.intellij.openapi.roots.ProjectRootManager;
import com.intellij.openapi.vfs.VirtualFile;
import com.intellij.psi.*;
import com.intellij.psi.search.GlobalSearchScope;
import com.intellij.psi.util.ClassKind;
import com.intellij.psi.util.CreateClassUtil;
import com.intellij.util.IncorrectOperationException;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.util.*;
/**
* @author peter
*/
public class CreateClassOrPackageFix extends LocalQuickFixAndIntentionActionOnPsiElement {
private static final Logger LOG = Logger.getInstance("#com.intellij.codeInsight.daemon.quickFix.CreateClassOrPackageFix");
private final List<? extends PsiDirectory> myWritableDirectoryList;
private final String myPresentation;
@Nullable private final ClassKind myClassKind;
@Nullable private final String mySuperClass;
private final String myRedPart;
@Nullable private final String myTemplateName;
@Nullable
public static CreateClassOrPackageFix createFix(@NotNull final String qualifiedName,
@NotNull final GlobalSearchScope scope,
@NotNull final PsiElement context,
@Nullable final PsiPackage basePackage,
@Nullable ClassKind kind,
@Nullable String superClass,
@Nullable String templateName) {
final List<PsiDirectory> directories = getWritableDirectoryListDefault(basePackage, scope, context.getManager());
if (directories.isEmpty()) {
return null;
}
final String redPart = basePackage == null ? qualifiedName : qualifiedName.substring(basePackage.getQualifiedName().length() + 1);
final int dot = redPart.indexOf('.');
final boolean fixPath = dot >= 0;
final String firstRedName = fixPath ? redPart.substring(0, dot) : redPart;
directories.removeIf(directory -> !checkCreateClassOrPackage(kind != null && !fixPath, directory, firstRedName));
return new CreateClassOrPackageFix(directories,
context,
fixPath ? qualifiedName : redPart,
redPart,
kind,
superClass,
templateName);
}
@Nullable
public static CreateClassOrPackageFix createFix(@NotNull final String qualifiedName,
@NotNull final PsiElement context,
@Nullable ClassKind kind,
String superClass) {
return createFix(qualifiedName, context.getResolveScope(), context, null, kind, superClass, null);
}
private CreateClassOrPackageFix(@NotNull List<? extends PsiDirectory> writableDirectoryList,
@NotNull PsiElement context,
@NotNull String presentation,
@NotNull String redPart,
@Nullable ClassKind kind,
@Nullable String superClass,
@Nullable final String templateName) {
super(context);
myRedPart = redPart;
myTemplateName = templateName;
myWritableDirectoryList = writableDirectoryList;
myClassKind = kind;
mySuperClass = superClass;
myPresentation = presentation;
}
@Override
@NotNull
public String getText() {
return QuickFixBundle.message(
myClassKind == ClassKind.INTERFACE ? "create.interface.text" : myClassKind != null ? "create.class.text" : "create.package.text",
myPresentation);
}
@Override
@NotNull
public String getFamilyName() {
return getText();
}
@Override
public void invoke(@NotNull final Project project,
@NotNull final PsiFile file,
@Nullable("is null when called from inspection") Editor editor,
@NotNull final PsiElement startElement,
@NotNull PsiElement endElement) {
if (isAvailable(project, null, file)) {
PsiDirectory directory = chooseDirectory(project, file);
if (directory == null) return;
WriteAction.run(() -> doCreate(directory, startElement));
}
}
private static boolean checkCreateClassOrPackage(final boolean createJavaClass, final PsiDirectory directory, final String name) {
try {
if (createJavaClass) {
JavaDirectoryService.getInstance().checkCreateClass(directory, name);
}
else {
directory.checkCreateSubdirectory(name);
}
return true;
}
catch (IncorrectOperationException ex) {
return false;
}
}
@Nullable
private PsiDirectory chooseDirectory(final Project project, final PsiFile file) {
PsiDirectory preferredDirectory = myWritableDirectoryList.isEmpty() ? null : myWritableDirectoryList.get(0);
final ProjectFileIndex fileIndex = ProjectRootManager.getInstance(project).getFileIndex();
final VirtualFile virtualFile = file.getVirtualFile();
assert virtualFile != null;
final Module moduleForFile = fileIndex.getModuleForFile(virtualFile);
if (myWritableDirectoryList.size() > 1 && !ApplicationManager.getApplication().isUnitTestMode()) {
if (moduleForFile != null) {
for (PsiDirectory directory : myWritableDirectoryList) {
if (fileIndex.getModuleForFile(directory.getVirtualFile()) == moduleForFile) {
preferredDirectory = directory;
break;
}
}
}
return DirectoryChooserUtil
.chooseDirectory(myWritableDirectoryList.toArray(PsiDirectory.EMPTY_ARRAY),
preferredDirectory, project,
new HashMap<>());
}
return preferredDirectory;
}
private void doCreate(final PsiDirectory baseDirectory, PsiElement myContext) {
final PsiManager manager = baseDirectory.getManager();
PsiDirectory directory = baseDirectory;
String lastName;
for (StringTokenizer st = new StringTokenizer(myRedPart, "."); ;) {
lastName = st.nextToken();
if (st.hasMoreTokens()) {
try {
final PsiDirectory subdirectory = directory.findSubdirectory(lastName);
directory = subdirectory != null ? subdirectory : directory.createSubdirectory(lastName);
}
catch (IncorrectOperationException e) {
CreateFromUsageUtils.scheduleFileOrPackageCreationFailedMessageBox(e, lastName, directory, true);
return;
}
}
else {
break;
}
}
if (myClassKind != null) {
PsiClass createdClass;
if (myTemplateName != null) {
createdClass = CreateClassUtil.createClassFromCustomTemplate(directory, null, lastName, myTemplateName);
}
else {
createdClass = CreateFromUsageUtils
.createClass(myClassKind == ClassKind.INTERFACE ? CreateClassKind.INTERFACE : CreateClassKind.CLASS, directory, lastName,
manager, myContext, null, mySuperClass);
}
if (createdClass != null) {
createdClass.navigate(true);
}
}
else {
try {
directory.createSubdirectory(lastName);
}
catch (IncorrectOperationException e) {
CreateFromUsageUtils.scheduleFileOrPackageCreationFailedMessageBox(e, lastName, directory, true);
}
}
}
@Override
public boolean startInWriteAction() {
return false;
}
private static List<PsiDirectory> getWritableDirectoryListDefault(@Nullable final PsiPackage context,
final GlobalSearchScope scope,
final PsiManager psiManager) {
if (LOG.isDebugEnabled()) {
LOG.debug("Getting writable directory list for package '" + (context == null ? null : context.getQualifiedName()) + "', scope=" + scope);
}
final List<PsiDirectory> writableDirectoryList = new ArrayList<>();
if (context != null) {
for (PsiDirectory directory : context.getDirectories()) {
if (LOG.isDebugEnabled()) {
LOG.debug("Package directory: " + directory);
}
VirtualFile virtualFile = directory.getVirtualFile();
if (directory.isWritable() && scope.contains(virtualFile)
&& !JavaProjectRootsUtil.isInGeneratedCode(virtualFile, psiManager.getProject())) {
writableDirectoryList.add(directory);
}
}
}
else {
for (VirtualFile root : JavaProjectRootsUtil.getSuitableDestinationSourceRoots(psiManager.getProject())) {
PsiDirectory directory = psiManager.findDirectory(root);
if (LOG.isDebugEnabled()) {
LOG.debug("Root: " + root + ", directory: " + directory);
}
if (directory != null && directory.isWritable() && scope.contains(directory.getVirtualFile())) {
writableDirectoryList.add(directory);
}
}
}
if (LOG.isDebugEnabled()) {
LOG.debug("Result " + writableDirectoryList);
}
return writableDirectoryList;
}
}
| |
/*
* Copyright 2002-2016 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.messaging.support;
import java.util.Collections;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import org.springframework.messaging.Message;
import org.springframework.util.Assert;
import org.springframework.util.CollectionUtils;
import org.springframework.util.LinkedMultiValueMap;
import org.springframework.util.MultiValueMap;
import org.springframework.util.ObjectUtils;
/**
* An extension of {@link MessageHeaderAccessor} that also stores and provides read/write
* access to message headers from an external source -- e.g. a Spring {@link Message}
* created to represent a STOMP message received from a STOMP client or message broker.
* Native message headers are kept in a {@code Map<String, List<String>>} under the key
* {@link #NATIVE_HEADERS}.
*
* <p>This class is not intended for direct use but is rather expected to be used
* indirectly through protocol-specific sub-classes such as
* {@link org.springframework.messaging.simp.stomp.StompHeaderAccessor StompHeaderAccessor}.
* Such sub-classes may provide factory methods to translate message headers from
* an external messaging source (e.g. STOMP) to Spring {@link Message} headers and
* reversely to translate Spring {@link Message} headers to a message to send to an
* external source.
*
* @author Rossen Stoyanchev
* @since 4.0
*/
public class NativeMessageHeaderAccessor extends MessageHeaderAccessor {
public static final String NATIVE_HEADERS = "nativeHeaders";
/**
* A protected constructor to create new headers.
*/
protected NativeMessageHeaderAccessor() {
this((Map<String, List<String>>) null);
}
/**
* A protected constructor to create new headers.
* @param nativeHeaders native headers to create the message with (may be {@code null})
*/
protected NativeMessageHeaderAccessor(Map<String, List<String>> nativeHeaders) {
if (!CollectionUtils.isEmpty(nativeHeaders)) {
setHeader(NATIVE_HEADERS, new LinkedMultiValueMap<>(nativeHeaders));
}
}
/**
* A protected constructor accepting the headers of an existing message to copy.
*/
protected NativeMessageHeaderAccessor(Message<?> message) {
super(message);
if (message != null) {
@SuppressWarnings("unchecked")
Map<String, List<String>> map = (Map<String, List<String>>) getHeader(NATIVE_HEADERS);
if (map != null) {
// Force removal since setHeader checks for equality
removeHeader(NATIVE_HEADERS);
setHeader(NATIVE_HEADERS, new LinkedMultiValueMap<>(map));
}
}
}
@SuppressWarnings("unchecked")
private Map<String, List<String>> getNativeHeaders() {
return (Map<String, List<String>>) getHeader(NATIVE_HEADERS);
}
/**
* Return a copy of the native header values or an empty map.
*/
public Map<String, List<String>> toNativeHeaderMap() {
Map<String, List<String>> map = getNativeHeaders();
return (map != null ? new LinkedMultiValueMap<>(map) : Collections.emptyMap());
}
@Override
public void setImmutable() {
if (isMutable()) {
Map<String, List<String>> map = getNativeHeaders();
if (map != null) {
// Force removal since setHeader checks for equality
removeHeader(NATIVE_HEADERS);
setHeader(NATIVE_HEADERS, Collections.<String, List<String>>unmodifiableMap(map));
}
super.setImmutable();
}
}
/**
* Whether the native header map contains the give header name.
*/
public boolean containsNativeHeader(String headerName) {
Map<String, List<String>> map = getNativeHeaders();
return (map != null && map.containsKey(headerName));
}
/**
* @return all values for the specified native header or {@code null}.
*/
public List<String> getNativeHeader(String headerName) {
Map<String, List<String>> map = getNativeHeaders();
return (map != null ? map.get(headerName) : null);
}
/**
* @return the first value for the specified native header of {@code null}.
*/
public String getFirstNativeHeader(String headerName) {
Map<String, List<String>> map = getNativeHeaders();
if (map != null) {
List<String> values = map.get(headerName);
if (values != null) {
return values.get(0);
}
}
return null;
}
/**
* Set the specified native header value replacing existing values.
*/
public void setNativeHeader(String name, String value) {
Assert.state(isMutable(), "Already immutable");
Map<String, List<String>> map = getNativeHeaders();
if (value == null) {
if (map != null && map.get(name) != null) {
setModified(true);
map.remove(name);
}
return;
}
if (map == null) {
map = new LinkedMultiValueMap<>(4);
setHeader(NATIVE_HEADERS, map);
}
List<String> values = new LinkedList<>();
values.add(value);
if (!ObjectUtils.nullSafeEquals(values, getHeader(name))) {
setModified(true);
map.put(name, values);
}
}
/**
* Add the specified native header value to existing values.
*/
public void addNativeHeader(String name, String value) {
Assert.state(isMutable(), "Already immutable");
if (value == null) {
return;
}
Map<String, List<String>> nativeHeaders = getNativeHeaders();
if (nativeHeaders == null) {
nativeHeaders = new LinkedMultiValueMap<>(4);
setHeader(NATIVE_HEADERS, nativeHeaders);
}
List<String> values = nativeHeaders.get(name);
if (values == null) {
values = new LinkedList<>();
nativeHeaders.put(name, values);
}
values.add(value);
setModified(true);
}
public void addNativeHeaders(MultiValueMap<String, String> headers) {
if (headers == null) {
return;
}
for (String header : headers.keySet()) {
for (String value : headers.get(header)) {
addNativeHeader(header, value);
}
}
}
public List<String> removeNativeHeader(String name) {
Assert.state(isMutable(), "Already immutable");
Map<String, List<String>> nativeHeaders = getNativeHeaders();
if (nativeHeaders == null) {
return null;
}
return nativeHeaders.remove(name);
}
@SuppressWarnings("unchecked")
public static String getFirstNativeHeader(String headerName, Map<String, Object> headers) {
Map<String, List<String>> map = (Map<String, List<String>>) headers.get(NATIVE_HEADERS);
if (map != null) {
List<String> values = map.get(headerName);
if (values != null) {
return values.get(0);
}
}
return null;
}
}
| |
/*
* Copyright 2013-present Facebook, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License. You may obtain
* a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package com.facebook.buck.dalvik;
import com.facebook.buck.android.APKModule;
import com.facebook.buck.android.APKModuleGraph;
import com.facebook.buck.io.ProjectFilesystem;
import com.facebook.buck.jvm.java.classes.AbstractFileLike;
import com.facebook.buck.jvm.java.classes.ClasspathTraversal;
import com.facebook.buck.jvm.java.classes.ClasspathTraverser;
import com.facebook.buck.jvm.java.classes.DefaultClasspathTraverser;
import com.facebook.buck.jvm.java.classes.FileLike;
import com.facebook.buck.log.Logger;
import com.facebook.buck.util.HumanReadableException;
import com.google.common.base.Preconditions;
import com.google.common.base.Predicate;
import com.google.common.collect.ImmutableCollection;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableMultimap;
import com.google.common.collect.ImmutableSet;
import com.google.common.io.ByteStreams;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.nio.file.Path;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import javax.annotation.Nullable;
/**
* Implementation of {@link ZipSplitter} that uses estimates from {@link DalvikStatsTool} to
* determine how many classes to pack into a dex.
*
* <p>It does three passes through the .class files:
*
* <ul>
* <li>During the first pass, it uses the {@code requiredInPrimaryZip} predicate to filter the set
* of classes that <em>must</em> be included in the primary dex. These classes are added to
* the primary zip.
* <li>During the second pass, it uses the {@code wantedInPrimaryZip} list to find classes that
* were not included in the first pass but that should still be in the primary zip for
* performance reasons, and adds them to the primary zip.
* <li>During the third pass, classes that were not matched during the earlier passes are added to
* zips as space allows. This is a simple, greedy algorithm.
* </ul>
*/
public class DalvikAwareZipSplitter implements ZipSplitter {
private static final Logger LOG = Logger.get(DalvikAwareZipSplitter.class);
private final ProjectFilesystem filesystem;
private final Set<Path> inFiles;
private final Path outPrimary;
private final Predicate<String> requiredInPrimaryZip;
private final Set<String> wantedInPrimaryZip;
private final Path reportDir;
private final long linearAllocLimit;
private final DalvikStatsCache dalvikStatsCache;
private final DexSplitStrategy dexSplitStrategy;
private final ImmutableSet<String> secondaryHeadSet;
private final ImmutableSet<String> secondaryTailSet;
@Nullable private final ImmutableMultimap<String, APKModule> classPathToDexStore;
private final MySecondaryDexHelper secondaryDexWriter;
private final Map<APKModule, MySecondaryDexHelper> additionalDexWriters;
private final APKModuleGraph apkModuleGraph;
@Nullable private DalvikAwareOutputStreamHelper primaryOut;
/**
* @see ZipSplitterFactory#newInstance(ProjectFilesystem, Set, Path, Path, String, Path,
* Predicate, ImmutableSet, ImmutableSet, ImmutableMultimap, APKModuleGraph,
* com.facebook.buck.dalvik.ZipSplitter.DexSplitStrategy,
* com.facebook.buck.dalvik.ZipSplitter.CanaryStrategy, Path)
*/
private DalvikAwareZipSplitter(
ProjectFilesystem filesystem,
Set<Path> inFiles,
Path outPrimary,
Path outSecondaryDir,
String secondaryPattern,
Path outDexStoresDir,
long linearAllocLimit,
Predicate<String> requiredInPrimaryZip,
Set<String> wantedInPrimaryZip,
ImmutableSet<String> secondaryHeadSet,
ImmutableSet<String> secondaryTailSet,
ImmutableMultimap<APKModule, String> additionalDexStoreSets,
APKModuleGraph apkModuleGraph,
DexSplitStrategy dexSplitStrategy,
ZipSplitter.CanaryStrategy canaryStrategy,
Path reportDir) {
if (linearAllocLimit <= 0) {
throw new HumanReadableException("linear_alloc_hard_limit must be greater than zero.");
}
this.filesystem = filesystem;
this.inFiles = ImmutableSet.copyOf(inFiles);
this.outPrimary = outPrimary;
this.secondaryDexWriter =
new MySecondaryDexHelper("secondary", outSecondaryDir, secondaryPattern, canaryStrategy);
this.additionalDexWriters = new HashMap<>();
this.requiredInPrimaryZip = requiredInPrimaryZip;
this.wantedInPrimaryZip = ImmutableSet.copyOf(wantedInPrimaryZip);
this.secondaryHeadSet = secondaryHeadSet;
this.secondaryTailSet = secondaryTailSet;
this.classPathToDexStore = additionalDexStoreSets.inverse();
for (APKModule dexStore : additionalDexStoreSets.keySet()) {
if (!dexStore.equals(apkModuleGraph.getRootAPKModule())) {
additionalDexWriters.put(
dexStore,
new MySecondaryDexHelper(
dexStore.getCanaryClassName(),
outDexStoresDir.resolve(dexStore.getName()),
secondaryPattern,
CanaryStrategy.INCLUDE_CANARIES));
}
}
this.apkModuleGraph = apkModuleGraph;
this.reportDir = reportDir;
this.dexSplitStrategy = dexSplitStrategy;
this.linearAllocLimit = linearAllocLimit;
this.dalvikStatsCache = new DalvikStatsCache();
}
public static DalvikAwareZipSplitter splitZip(
ProjectFilesystem filesystem,
Set<Path> inFiles,
Path outPrimary,
Path outSecondaryDir,
String secondaryPattern,
Path outDexStoresDir,
long linearAllocLimit,
Predicate<String> requiredInPrimaryZip,
Set<String> wantedInPrimaryZip,
ImmutableSet<String> secondaryHeadSet,
ImmutableSet<String> secondaryTailSet,
ImmutableMultimap<APKModule, String> additionalDexStoreSets,
APKModuleGraph apkModuleGraph,
DexSplitStrategy dexSplitStrategy,
ZipSplitter.CanaryStrategy canaryStrategy,
Path reportDir) {
return new DalvikAwareZipSplitter(
filesystem,
inFiles,
outPrimary,
outSecondaryDir,
secondaryPattern,
outDexStoresDir,
linearAllocLimit,
requiredInPrimaryZip,
wantedInPrimaryZip,
secondaryHeadSet,
secondaryTailSet,
additionalDexStoreSets,
apkModuleGraph,
dexSplitStrategy,
canaryStrategy,
reportDir);
}
@Override
public ImmutableMultimap<APKModule, Path> execute() throws IOException {
ClasspathTraverser classpathTraverser = new DefaultClasspathTraverser();
final Set<String> secondaryTail = new HashSet<String>();
// Start out by writing the primary zip and recording which entries were added to it.
primaryOut = newZipOutput(outPrimary);
secondaryDexWriter.reset();
final ImmutableMap.Builder<String, FileLike> entriesBuilder = ImmutableMap.builder();
final List<String> additionalDexStoreEntries = new ArrayList<>();
// Iterate over all of the inFiles and add all entries that match the requiredInPrimaryZip
// predicate.
LOG.debug("Traversing classpath for primary zip");
classpathTraverser.traverse(
new ClasspathTraversal(inFiles, filesystem) {
@Override
public void visit(FileLike entry) throws IOException {
LOG.debug("Visiting " + entry.getRelativePath());
String relativePath = entry.getRelativePath();
if (!relativePath.endsWith(".class")) {
// We don't need resources in dex jars, so just drop them.
return;
}
String classPath = relativePath.replaceAll("\\.class$", "");
Preconditions.checkNotNull(primaryOut);
Preconditions.checkNotNull(classPathToDexStore);
if (requiredInPrimaryZip.apply(relativePath)) {
primaryOut.putEntry(entry);
} else if (wantedInPrimaryZip.contains(relativePath)
|| (secondaryHeadSet != null && secondaryHeadSet.contains(relativePath))) {
entriesBuilder.put(relativePath, new BufferedFileLike(entry));
} else if (secondaryTailSet != null && secondaryTailSet.contains(relativePath)) {
entriesBuilder.put(relativePath, new BufferedFileLike(entry));
secondaryTail.add(relativePath);
} else {
ImmutableCollection<APKModule> containingModule = classPathToDexStore.get(classPath);
if (!containingModule.isEmpty()) {
if (containingModule.size() > 1) {
throw new IllegalStateException(
String.format(
"classpath %s is contained in multiple dex stores: %s",
classPath, classPathToDexStore.get(classPath).asList().toString()));
}
APKModule dexStore = containingModule.iterator().next();
if (!dexStore.equals(apkModuleGraph.getRootAPKModule())) {
MySecondaryDexHelper dexHelper = additionalDexWriters.get(dexStore);
Preconditions.checkNotNull(dexHelper);
dexHelper.getOutputToWriteTo(entry).putEntry(entry);
additionalDexStoreEntries.add(relativePath);
}
}
}
}
});
// Put as many of the items wanted in the primary dex as we can into the primary dex.
ImmutableMap<String, FileLike> entries = entriesBuilder.build();
for (String wanted : wantedInPrimaryZip) {
FileLike entry = entries.get(wanted);
if ((entry != null) && !primaryOut.containsEntry(entry) && primaryOut.canPutEntry(entry)) {
primaryOut.putEntry(entry);
}
}
if (secondaryHeadSet != null) {
for (String head : secondaryHeadSet) {
FileLike headEntry = entries.get(head);
if ((headEntry != null) && !primaryOut.containsEntry(headEntry)) {
secondaryDexWriter.getOutputToWriteTo(headEntry).putEntry(headEntry);
}
}
}
LOG.debug("Traversing classpath for secondary zip");
// Now that all of the required entries have been added to the primary zip, fill the rest of
// the zip up with the remaining entries.
classpathTraverser.traverse(
new ClasspathTraversal(inFiles, filesystem) {
@Override
public void visit(FileLike entry) throws IOException {
Preconditions.checkNotNull(primaryOut);
String relativePath = entry.getRelativePath();
// skip if it is the primary dex, is part of a modular dex store, or is not a class file
if (primaryOut.containsEntry(entry)
|| additionalDexStoreEntries.contains(relativePath)) {
return;
}
LOG.debug("Visiting " + entry.getRelativePath());
// Even if we have started writing a secondary dex, we still check if there is any leftover
// room in the primary dex for the current entry in the traversal.
if (dexSplitStrategy == DexSplitStrategy.MAXIMIZE_PRIMARY_DEX_SIZE
&& primaryOut.canPutEntry(entry)) {
primaryOut.putEntry(entry);
} else {
if (secondaryHeadSet != null && secondaryHeadSet.contains(relativePath)) {
return;
}
if (secondaryTail.contains(relativePath)) {
return;
}
secondaryDexWriter.getOutputToWriteTo(entry).putEntry(entry);
}
}
});
if (secondaryTailSet != null) {
for (String tail : secondaryTailSet) {
FileLike tailEntry = entries.get(tail);
if ((tailEntry != null)
&& !primaryOut.containsEntry(tailEntry)
&& secondaryTail.contains(tail)) {
secondaryDexWriter.getOutputToWriteTo(tailEntry).putEntry(tailEntry);
}
}
}
primaryOut.close();
secondaryDexWriter.close();
ImmutableMultimap.Builder<APKModule, Path> outputFilesBuilder = ImmutableMultimap.builder();
APKModule secondaryDexStore = apkModuleGraph.getRootAPKModule();
outputFilesBuilder.putAll(secondaryDexStore, secondaryDexWriter.getFiles());
for (Map.Entry<APKModule, MySecondaryDexHelper> entry : additionalDexWriters.entrySet()) {
if (!entry.getKey().equals(secondaryDexStore)) {
entry.getValue().close();
outputFilesBuilder.putAll(entry.getKey(), entry.getValue().getFiles());
}
}
return outputFilesBuilder.build();
}
private DalvikAwareOutputStreamHelper newZipOutput(Path file) throws IOException {
return new DalvikAwareOutputStreamHelper(file, linearAllocLimit, reportDir, dalvikStatsCache);
}
private class MySecondaryDexHelper extends SecondaryDexHelper<DalvikAwareOutputStreamHelper> {
MySecondaryDexHelper(
String storeName,
Path outSecondaryDir,
String secondaryPattern,
CanaryStrategy canaryStrategy) {
super(storeName, outSecondaryDir, secondaryPattern, canaryStrategy);
}
@Override
protected DalvikAwareOutputStreamHelper newZipOutput(Path file) throws IOException {
return DalvikAwareZipSplitter.this.newZipOutput(file);
}
}
private static class BufferedFileLike extends AbstractFileLike {
private final Path container;
private final String relativePath;
private final byte[] contents;
public BufferedFileLike(FileLike original) throws IOException {
this.container = original.getContainer();
this.relativePath = original.getRelativePath();
try (InputStream stream = original.getInput()) {
contents = ByteStreams.toByteArray(stream);
}
}
@Override
public Path getContainer() {
return container;
}
@Override
public String getRelativePath() {
return relativePath;
}
@Override
public long getSize() {
return contents.length;
}
@Override
public InputStream getInput() throws IOException {
return new ByteArrayInputStream(contents);
}
}
}
| |
package com.formulasearchengine.mathmltools.querygenerator;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.LinkedHashMap;
import java.util.Map;
import org.w3c.dom.Document;
import org.w3c.dom.Node;
import com.formulasearchengine.mathmltools.xml.NonWhitespaceNodeList;
import com.formulasearchengine.mathmltools.helper.XMLHelper;
import com.google.common.collect.Lists;
/**
* Converts MathML queries into XQueries, given a namespace, a xquery/xpath to the root elements, and a xquery return format.
* The variable $x always represents a hit, so you can refer to $x in the return format as the result node.
* If addQvarMap is turned on, the function local:qvarMap($parentNode) always represents a map of qvars to their
* respective formula ID, so you can refer to local:qvarMap($parentNode) in the footer to return qvar services.
* If findRootApply is turned on, the xquery takes on a recursive format. The variable $rootApply represents the root
* apply node and the variable $depth represents the depth of the matched node. The root apply node has a depth of 0.
* <br/>
* Translated from http://git.wikimedia.org/blob/mediawiki%2Fextensions%2FMathSearch.git/31a80ae48d1aaa50da9103cea2e45a8dc2204b39/XQueryGenerator.php
*
* @author Moritz Schubotz on 9/3/14.
*/
@SuppressWarnings("WeakerAccess")
public class QVarXQueryGenerator extends BasicXQueryGenerator {
private boolean findRootApply = false;
private boolean addQvarMap = false;
private String qvarConstraint = "";
private String qvarMapVariable = "";
private Map<String, ArrayList<String>> qvar = new LinkedHashMap<>();
/**
* Convince constructor initializes the QueryGenerator with a document
*
* @param document The document whose root element is extracted
* @deprecated Use setMainElement(XMLHelper.getMainElement(document)) explicitly
*/
@Deprecated()
public QVarXQueryGenerator(Document document) {
setMainElement(XMLHelper.getMainElement(document));
}
public QVarXQueryGenerator() {
}
/**
* Create a {@see QVarXQueryGenerator} with a default configuration.
*
* @return returns {@see QVarXQueryGenerator}
*/
public static QVarXQueryGenerator getDefaultGenerator() {
return (QVarXQueryGenerator) new QVarXQueryGenerator()
.addHeader(XQueryGenerator.DEFAULT_NAMESPACE)
.addHeader(XQUERY_NAMESPACE_ELEMENT)
.addHeader(FN_PATH_FROM_ROOT)
.addHeader("<result> {")
.setPathToRoot(".")
.setReturnFormat(XQUERY_FOOTER)
.addFooter("}\n</result>");
}
public void addDefaultHeader() {
this.addHeader(XQueryGenerator.DEFAULT_NAMESPACE).addHeader("");
}
protected void generateConstraints() {
qvar = new LinkedHashMap<>();
super.generateConstraints();
generateQvarConstraints();
}
/**
* Uses the qvar map to generate a XQuery string containing qvar constraints,
* and the qvar map variable which maps qvar names to their respective formula ID's in the result.
*/
private void generateQvarConstraints() {
final StringBuilder qvarConstrBuilder = new StringBuilder();
final StringBuilder qvarMapStrBuilder = new StringBuilder();
final Iterator<Map.Entry<String, ArrayList<String>>> entryIterator = qvar.entrySet().iterator();
if (entryIterator.hasNext()) {
qvarMapStrBuilder.append("declare function local:qvarMap($x) {\n map {");
while (entryIterator.hasNext()) {
final Map.Entry<String, ArrayList<String>> currentEntry = entryIterator.next();
final Iterator<String> valueIterator = currentEntry.getValue().iterator();
final String firstValue = valueIterator.next();
qvarMapStrBuilder.append('"').append(currentEntry.getKey()).append('"')
.append(" : (data($x").append(firstValue).append("/@xml:id)");
//check if there are additional values that we need to constrain
if (valueIterator.hasNext()) {
if (qvarConstrBuilder.length() > 0) {
//only add beginning and if it's an additional constraint in the aggregate qvar string
qvarConstrBuilder.append("\n and ");
}
while (valueIterator.hasNext()) {
//process second value onwards
final String currentValue = valueIterator.next();
qvarMapStrBuilder.append(",data($x").append(currentValue).append("/@xml-id)");
//These constraints specify that the same qvars must refer to the same nodes,
//using the XQuery "=" equality
//This is equality based on: same text, same node names, and same children nodes
qvarConstrBuilder.append("$x").append(firstValue).append(" = $x").append(currentValue);
if (valueIterator.hasNext()) {
qvarConstrBuilder.append(" and ");
}
}
}
qvarMapStrBuilder.append(')');
if (entryIterator.hasNext()) {
qvarMapStrBuilder.append(',');
}
}
qvarMapStrBuilder.append("}\n};");
}
qvarMapVariable = qvarMapStrBuilder.toString();
qvarConstraint = qvarConstrBuilder.toString();
}
/**
* Builds the XQuery as a string. Uses the default format of looping through all apply nodes.
*
* @return XQuery as string
*/
protected String getDefaultString() {
final StringBuilder output = new StringBuilder();
// append headers e.g. namespaces and functions
output.append(String.join("\n", getHeaders()));
if (!qvarMapVariable.isEmpty() && addQvarMap) {
output.append(qvarMapVariable).append("\n");
}
output.append("for $m in ").append(getPathToRoot()).append(" return\n")
.append("for $x in $m//*:").append(NonWhitespaceNodeList.getFirstChild(getMainElement()).getLocalName())
.append("\n").append(getExactMatchXQuery());
if (!getLengthConstraint().isEmpty() || !qvarConstraint.isEmpty()) {
output.append("\n").append("where").append("\n");
if (getLengthConstraint().isEmpty()) {
output.append(qvarConstraint);
} else {
output.append(getLengthConstraint())
.append(qvarConstraint.isEmpty() ? "" : "\n and ").append(qvarConstraint);
}
}
// append return format
output.append("\n\n").append("return").append("\n").append(getReturnFormat());
// append footers e.g. closing fences from headers
output.append(String.join("\n", getFooters()));
return output.toString();
}
public Map<String, ArrayList<String>> getQvar() {
if (qvar.isEmpty()) {
generateConstraints();
}
return qvar;
}
/**
* Builds the XQuery as a string. Uses the recursive format of recursively looping through the documents.
* This enables the $depth and the $rootApply variables.
*
* @return XQuery as string
*/
private String getRecursiveString() {
final StringBuilder output = new StringBuilder();
// append headers e.g. namespaces and functions
output.append(String.join("\n", getHeaders()));
if (!qvarMapVariable.isEmpty() && addQvarMap) {
output.append(qvarMapVariable).append("\n");
}
output.append("\ndeclare function local:compareApply($rootApply, $depth, $x ) {\n")
.append("(for $child in $x/* return local:compareApply(\n")
.append("if (empty($rootApply) and $child/name() = \"apply\") then $child else $rootApply,\n")
.append("if (empty($rootApply) and $child/name() = \"apply\") then 0 else $depth+1, $child),\n")
.append("if ($x/name() = \"apply\"\n")
.append(" and $x").append(getExactMatchXQuery()).append("\n");
if (!getLengthConstraint().isEmpty()) {
output.append(" and ").append(getLengthConstraint()).append("\n");
}
if (!qvarConstraint.isEmpty()) {
output.append(" and ").append(qvarConstraint).append("\n");
}
output.append(" ) then\n")
.append(getReturnFormat()).append("\n")
.append("else ()\n")
.append(")};\n\n")
.append("for $m in ").append(getPathToRoot()).append(" return\n")
.append("local:compareApply((), 0, $m)");
return output.toString();
}
protected boolean handleSpecialElements(Node child, Integer childElementIndex) {
if (!"mws:qvar".equals(child.getNodeName())) {
return false;
}
//If qvar, add to qvar map
String qvarName = child.getTextContent();
if (qvarName.isEmpty()) {
qvarName = child.getAttributes().getNamedItem("name").getTextContent();
}
if (qvar.containsKey(qvarName)) {
qvar.get(qvarName).add(getRelativeXPath() + "/*[" + childElementIndex + "]");
} else {
qvar.put(qvarName, Lists.newArrayList(getRelativeXPath() + "/*[" + childElementIndex + "]"));
}
return true;
}
public boolean isAddQvarMap() {
return addQvarMap;
}
/**
* Determines whether or not the $q variable is generated with a map of qvar names to their respective xml:id
*/
public QVarXQueryGenerator setAddQvarMap(boolean addQvarMap) {
this.addQvarMap = addQvarMap;
return this;
}
/**
* Resets the current xQuery expression and sets a new main element.
*
* @param mainElement main node of a new document
*/
public void setMainElement(Node mainElement) {
super.setMainElement(mainElement);
qvar = new LinkedHashMap<>();
}
/**
* Determines whether or not the $rootApply and the $depth variables are generated using recursion to find the root
* node of the matched equation and the depth of the hit.
*/
public QVarXQueryGenerator setFindRootApply(boolean findRootApply) {
this.findRootApply = findRootApply;
return this;
}
/**
* Generates the constraints of the XQuery and then builds the XQuery and returns it as a string
*
* @return XQuery as string. Returns null if no main element set.
*/
public String toString() {
if (getMainElement() == null) {
return null;
}
generateConstraints();
return findRootApply ? getRecursiveString() : getDefaultString();
}
}
| |
package de.kaymx.jhashids;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
public class JHashIds {
private static final String DEFAULT_ALPHABET = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ1234567890";
private static final String DEFAULT_SALT = "";
private static final String DEFAULT_SEPARATORS = "cfhistuCFHISTU";
private static final double SEP_DIV_THRESHOLD = 3.5f;
private static final int GUARD_DIV = 12;
private static final int MIN_ALPHABET_LENGTH = 16;
private final char[] usedSeparators;
private final int minHashLength;
private final char[] usedSalt;
private final char[] usedAlphabet;
private final char[] usedGuards;
private final Pattern guardPattern;
private final Pattern splitPattern;
public JHashIds(String salt, Integer minHashLength, String alphabet) {
this.usedSalt = salt == null || salt.isEmpty()
? DEFAULT_SALT.toCharArray()
: salt.toCharArray();
this.minHashLength = minHashLength;
char[] prepUsedAlphabet = makeUniqueAlphabet(alphabet != null ? alphabet : DEFAULT_ALPHABET);
char[] prepUsedSeparators = DEFAULT_SEPARATORS.toCharArray();
if (prepUsedAlphabet.length < MIN_ALPHABET_LENGTH) {
throw new RuntimeException("error: alphabet must contain at least X unique characters");
}
if (findCharInCharArray(prepUsedAlphabet, ' ') != -1) {
throw new RuntimeException("error: alphabet cannot contain spaces");
}
for (int i = 0; i < prepUsedSeparators.length; ++i) {
int pos = findCharInCharArray(prepUsedAlphabet, prepUsedSeparators[i]);
if (pos == -1) {
prepUsedSeparators[i] = ' ';
} else {
prepUsedAlphabet[pos] = ' ';
}
}
prepUsedAlphabet = removeCharFromCharArray(prepUsedAlphabet, ' ');
prepUsedSeparators = consistentShuffle(removeCharFromCharArray(prepUsedSeparators, ' '), usedSalt);
if (prepUsedSeparators.length > 0 || prepUsedAlphabet.length / prepUsedSeparators.length > SEP_DIV_THRESHOLD) {
int separatorsLength = Math.max((int) Math.ceil(prepUsedAlphabet.length / SEP_DIV_THRESHOLD), 2);
if (separatorsLength > prepUsedSeparators.length) {
int diff = separatorsLength - prepUsedSeparators.length;
char[] newPrepUsedSeparators = Arrays.copyOf(prepUsedSeparators, prepUsedSeparators.length + diff);
System.arraycopy(prepUsedAlphabet, 0, newPrepUsedSeparators, prepUsedSeparators.length, diff);
prepUsedSeparators = newPrepUsedSeparators;
prepUsedAlphabet = Arrays.copyOfRange(prepUsedAlphabet, diff, prepUsedAlphabet.length);
} else {
prepUsedSeparators = Arrays.copyOfRange(prepUsedSeparators, 0, separatorsLength);
}
}
prepUsedAlphabet = consistentShuffle(prepUsedAlphabet, usedSalt);
int guardCount = (int) Math.ceil((double) prepUsedAlphabet.length / GUARD_DIV);
if (prepUsedAlphabet.length < 3) {
this.usedGuards = Arrays.copyOf(prepUsedSeparators, guardCount);
prepUsedSeparators = Arrays.copyOfRange(prepUsedSeparators, guardCount, prepUsedSeparators.length);
} else {
this.usedGuards = Arrays.copyOf(prepUsedAlphabet, guardCount);
prepUsedAlphabet = Arrays.copyOfRange(prepUsedAlphabet, guardCount, prepUsedAlphabet.length);
}
this.usedAlphabet = prepUsedAlphabet;
this.usedSeparators = prepUsedSeparators;
String g = new String(usedGuards);
String a = new String(usedAlphabet);
String s = new String(usedSeparators);
this.guardPattern = Pattern.compile("^([" + a + "]*[" + g + "])?([" + a + s + "]+)([" + g + "][" + a + "]*)?$");
this.splitPattern = Pattern.compile("[" + s + "]");
}
private static char[] removeCharFromCharArray(char[] input, char removeChar) {
int newArraySize = input.length;
for (char c : input) {
if (c == removeChar) {
newArraySize--;
}
}
char[] result = new char[newArraySize];
int writePos = 0;
for (char c : input) {
if (c != removeChar) {
result[writePos++] = c;
}
}
return result;
}
private static char[] makeUniqueAlphabet(String alphabet) {
LinkedHashSet<Character> characters = new LinkedHashSet<>();
for (char c : alphabet.toCharArray()) {
characters.add(c);
}
char[] result = new char[characters.size()];
int pos = 0;
for (Character character : characters) {
result[pos++] = character;
}
return result;
}
private static int calcNumbersHashInt(long... numbers) {
int numbersHashInt = 0;
for (int i = 0; i < numbers.length; i++) {
numbersHashInt += (numbers[i] % (i + 100));
}
return numbersHashInt;
}
public String encodeHex(String hex) {
if (!hex.matches("^[0-9a-fA-F]+$"))
return "";
Matcher matcher = Pattern.compile("[\\w\\W]{1,12}").matcher(hex);
List<Long> matched = new ArrayList<>();
while (matcher.find()) {
matched.add(Long.parseLong("1" + matcher.group(), 16));
}
long[] numbers = new long[matched.size()];
for (int i = 0; i < matched.size(); i++) {
numbers[i] = matched.get(i);
}
return encode(numbers);
}
public String decodeHex(String hashid) {
long[] numbers = decode(hashid);
StringBuilder sb = new StringBuilder();
for (long number : numbers) {
sb.append(Long.toHexString(number).substring(1));
}
return sb.toString();
}
public String encode(long... numbers) {
if (0 == numbers.length) {
return "";
}
for (long number : numbers) {
if (number < 0) {
return "";
}
}
return _encode(numbers);
}
private String _encode(long[] numbers) {
int numbersHashInt = calcNumbersHashInt(numbers);
char alphabet[] = Arrays.copyOf(usedAlphabet, usedAlphabet.length);
char lottery = alphabet[numbersHashInt % alphabet.length];
StringBuilder resultBuilder = new StringBuilder("" + lottery);
char[] buffer = new char[1 + usedSalt.length + usedAlphabet.length];
buffer[0] = lottery;
for (int i = 0; i != numbers.length; i++) {
System.arraycopy(usedSalt, 0, buffer, 1, usedSalt.length);
System.arraycopy(alphabet, 0, buffer, 1 + usedSalt.length, alphabet.length);
alphabet = consistentShuffle(alphabet, Arrays.copyOfRange(buffer, 0, alphabet.length));
char[] last = hash(numbers[i], alphabet);
resultBuilder.append(last);
if (i + 1 < numbers.length) {
numbers[i] %= (last[0] + 1);
int index = (int) (numbers[i] % usedSeparators.length);
resultBuilder.append(usedSeparators[index]);
}
}
char[] resultChars = resultBuilder.toString().toCharArray();
int currentSize = resultChars.length;
if (resultChars.length < minHashLength) {
int hashPos = minHashLength - resultChars.length - (minHashLength - resultChars.length) / 2;
char[] minResultSizeBuffer = new char[minHashLength];
System.arraycopy(resultChars, 0, minResultSizeBuffer, hashPos, resultChars.length);
int writeFrontPos = hashPos - 1;
int writeEndPos = hashPos + resultChars.length;
int guardIndex = (numbersHashInt + minResultSizeBuffer[hashPos]) % usedGuards.length;
minResultSizeBuffer[writeFrontPos--] = usedGuards[guardIndex];
currentSize++;
if (writeEndPos < minResultSizeBuffer.length) {
guardIndex = (numbersHashInt + minResultSizeBuffer[hashPos + 1]) % usedGuards.length;
minResultSizeBuffer[writeEndPos++] = usedGuards[guardIndex];
currentSize++;
}
int halfLength = usedAlphabet.length / 2;
while (currentSize < minHashLength) {
alphabet = consistentShuffle(alphabet, alphabet);
int readFrontPos = (2 * halfLength) - 1;
int readEndPos = 0;
while (writeFrontPos >= 0 && readFrontPos >= halfLength) {
minResultSizeBuffer[writeFrontPos--] = alphabet[readFrontPos--];
currentSize++;
}
while (writeEndPos < minResultSizeBuffer.length && readEndPos < halfLength) {
minResultSizeBuffer[writeEndPos++] = alphabet[readEndPos++];
currentSize++;
}
}
resultChars = minResultSizeBuffer;
}
return new String(resultChars);
}
public long[] decode(String input) {
if (input == null || input.isEmpty()) {
return new long[0];
}
return _decode(input);
}
private long[] _decode(String input) {
Hashes hashes = getHashes(input);
if (hashes != null) {
char[] alphabet = Arrays.copyOf(usedAlphabet, usedAlphabet.length);
long[] result = new long[hashes.hashes.length];
char buffer[] = new char[1 + usedSalt.length + alphabet.length];
System.arraycopy(usedSalt, 0, buffer, 1, usedSalt.length);
buffer[0] = hashes.lottery;
for (int i = 0; i < hashes.hashes.length; i++) {
System.arraycopy(alphabet, 0, buffer, 1 + usedSalt.length, alphabet.length);
alphabet = consistentShuffle(alphabet, Arrays.copyOfRange(buffer, 0, alphabet.length));
result[i] = unhash(hashes.hashes[i].toCharArray(), alphabet);
}
return result;
}
return new long[0];
}
private Hashes getHashes(String input) {
Matcher matcher = guardPattern.matcher(input);
if (matcher.matches()) {
String[] split = splitPattern.split(matcher.group(2));
if (split.length > 0) {
char lottery = split[0].charAt(0);
split[0] = split[0].substring(1);
return new Hashes(lottery, split);
}
}
return null;
}
private static char[] hash(long input, char[] alphabet) {
StringBuilder resultBuilder = new StringBuilder();
do {
resultBuilder.append(alphabet[(int) (input % alphabet.length)]);
input = input / alphabet.length;
}
while (input != 0);
return resultBuilder.reverse().toString().toCharArray();
}
private static long unhash(char[] input, char[] alphabet) {
long number = 0;
for (int i = 0; i < input.length; ++i) {
int pos = findCharInCharArray(alphabet, input[i]);
number += pos * (long) Math.pow(alphabet.length, input.length - i - 1);
}
return number;
}
private static int findCharInCharArray(char[] charArray, char c) {
for (int i = 0; i < charArray.length; i++) {
if (c == charArray[i]) {
return i;
}
}
return -1;
}
public static char[] consistentShuffle(char[] alphabet, char[] salt) {
if (salt == null || salt.length == 0) {
return alphabet;
}
char[] resultAlphabet = Arrays.copyOf(alphabet, alphabet.length);
char[] tempAlphabet = new char[resultAlphabet.length];
for (int i = resultAlphabet.length - 1, j, v = 0, p = 0; i > 0; i--, v++) {
v %= salt.length;
p += salt[v];
j = (salt[v] + v + p) % i;
tempAlphabet[j] = resultAlphabet[i];
System.arraycopy(resultAlphabet, 0, tempAlphabet, 0, j);
System.arraycopy(resultAlphabet, j + 1, tempAlphabet, j + 1, resultAlphabet.length - j - 1);
resultAlphabet[i] = resultAlphabet[j];
System.arraycopy(tempAlphabet, 0, resultAlphabet, 0, i);
System.arraycopy(tempAlphabet, i + 1, resultAlphabet, i + 1, tempAlphabet.length - i - 1);
}
return resultAlphabet;
}
public String getVersion() {
return "1.0.1";
}
private static class Hashes {
private final char lottery;
private final String[] hashes;
private Hashes(char lottery, String[] hashes) {
this.lottery = lottery;
this.hashes = hashes;
}
}
}
| |
package com.embraceplus.fragment;
import java.util.ArrayList;
import android.graphics.Color;
import android.os.Bundle;
import android.view.LayoutInflater;
import android.view.View;
import android.view.View.OnClickListener;
import android.view.ViewGroup;
import android.widget.Button;
import android.widget.LinearLayout;
import android.widget.RelativeLayout;
import android.widget.TextView;
import com.embraceplus.app.MainActivity;
import com.embraceplus.app.R;
import com.embraceplus.app.fragment.utils.MySwitch;
import com.embraceplus.app.fragment.utils.SwitchOnCheckedChangeListener;
import com.embraceplus.app.fragment.utils.TimeSwitch;
import com.embraceplus.app.fragment.utils.TimeSwitchOnCheckedChangeListener;
import com.embraceplus.ble.utils.Constants;
import com.embraceplus.ble.utils.ServiceManager;
import com.embraceplus.database.GrandpaMsgDB;
import com.embraceplus.model.EffectVO;
import com.embraceplus.model.EmbraceMsg;
import com.embraceplus.model.GrandFatherStatus;
import com.embraceplus.model.MotionPresetVO;
import com.embraceplus.model.VoTransfer;
import com.embraceplus.utils.EColor2;
import com.embraceplus.utils.GrandFatherClockStartor;
import com.embraceplus.utils.Optional;
import com.embraceplus.widget.EmulatorView;
import com.embraceplus.widget.PageSelectorPagerView;
import com.embraceplus.widget.PageSelectorView;
public class GrandpaClockGFragment extends BaseFragment {
private PageSelectorView mColorSelector;
private ArrayList<View> mColorButtons = new ArrayList<View>();
final Optional<GrandFatherStatus> hourlStatus = new Optional<GrandFatherStatus>();
final Optional<GrandFatherStatus> halfHourStatus = new Optional<GrandFatherStatus>();
final Optional<EmbraceMsg> grandFatherEmbraceMsg = new Optional<EmbraceMsg>();
MySwitch hourSwitch;
TimeSwitch countSwitch;
MySwitch VibrationSwitch;
MySwitch Half_Hour_Switch;
private View view;
private EffectVO mCurrentEffect = new EffectVO();
private EmulatorView mEmulator;
public Integer[] arrayOfInt = new Integer[] { R.drawable.chat, R.drawable.chat, R.drawable.chat, R.drawable.chat, R.drawable.chat, R.drawable.chat, R.drawable.chat, R.drawable.chat,
R.drawable.chat, R.drawable.chat, R.drawable.chat, R.drawable.chat };
private TextView preview;
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) {
this.
view = inflater.inflate(R.layout.fragment_grandpa_clock, container, false);
mEmulator = (EmulatorView) view.findViewById(R.id.emulator);
mColorSelector = (PageSelectorView) view.findViewById(R.id.colors);
setupColorButtons(mColorSelector);
attachColorData();
mColorSelector.setPager((PageSelectorPagerView) view.findViewById(R.id.colors_pager));
preview = (TextView) view.findViewById(R.id.preview);
preview.setOnClickListener(new OnClickListener() {
@Override
public void onClick(View v) {
mCurrentEffect.editable = true;
EmbraceMsg msg = VoTransfer.transferFromEffectVOToEmbraceMsg(mCurrentEffect);
msg.setLoop((byte) 1);
msg.setPause((byte) 0);
// msg.setFlag((byte)6);
msg.setFadeIn((byte) -56);
msg.setHold((byte) 0);
msg.setFadeOut((byte) 0);
msg.setMotoswitch(VibrationSwitch.isChecked());
msg.setPause((byte) 0);
ServiceManager.getInstant().getBluetoothService().writeEffectCommand(msg.getFXCommand());
}
});
return view;
}
// public MainActivity getMainActivity() {
// return (MainActivity) getActivity();
// }
@Override
public void onViewCreated(final View view, final Bundle savedInstanceState) {
super.onViewCreated(view, savedInstanceState);
// MotionPresetVO m = getMainActivity().getMotionPresets().get(0);
// grandFatherEmbraceMsg.set(DbBuilder.getInstant().getGrandFatherMsg());
grandFatherEmbraceMsg.set(GrandpaMsgDB.getInstance().getGrandFatherMsg());
if (grandFatherEmbraceMsg.isEmpty()) {
grandFatherEmbraceMsg.set(new EmbraceMsg());
mCurrentEffect.colorL1 = mCurrentEffect.colorR1 = EColor2.WHITE;
} else {
mCurrentEffect = VoTransfer.transferFromEmbraceMsgToEffectVO(grandFatherEmbraceMsg.get());
}
/*mCurrentEffect.fadeInTime = m.duration;
mCurrentEffect.fadeOutTime = m.duration;
mCurrentEffect.pauseTime = m.pause;*/
/*mCurrentEffect.fadeInTime = -56;
mCurrentEffect.fadeOutTime = 0;
mCurrentEffect.pauseTime = 0;
mCurrentEffect.blackoutOnPause=false;
mCurrentEffect.holdTime=0;*/
mCurrentEffect.fadeInTime = 1;
mCurrentEffect.fadeOutTime = 6;
mCurrentEffect.pauseTime = 1000;
mCurrentEffect.blackoutOnPause = false;
mCurrentEffect.holdTime = 1000;
mCurrentEffect.editable = true;
mCurrentEffect.colorL2 = mCurrentEffect.colorR2 = Color.TRANSPARENT;
mEmulator.play(mCurrentEffect);
// halfHourStatus.set(DbBuilder.getInstant().getGrandFatherStatus(Constants.GRANDFATHER_TYPE_HALF_HOUR));
// hourlStatus.set(DbBuilder.getInstant().getGrandFatherStatus(Constants.GRANDFATHER_TYPE_HOURLY));
halfHourStatus.set(GrandpaMsgDB.getInstance().getGrandFatherStatus(Constants.GRANDFATHER_TYPE_HALF_HOUR));
hourlStatus.set(GrandpaMsgDB.getInstance().getGrandFatherStatus(Constants.GRANDFATHER_TYPE_HOURLY));
if (halfHourStatus.isEmpty()) {
halfHourStatus.set(new GrandFatherStatus());
if (halfHourStatus.notEmpty())
halfHourStatus.get().setStatus(false);
}
if (halfHourStatus.notEmpty())
halfHourStatus.get().setType(Constants.GRANDFATHER_TYPE_HALF_HOUR);
if (hourlStatus.isEmpty()) {
hourlStatus.set(new GrandFatherStatus());
hourlStatus.get().setStatus(false);
hourlStatus.get().setCount("Hour");
hourlStatus.get().setVibration("0");
}
if (hourlStatus.notEmpty())
hourlStatus.get().setType(Constants.GRANDFATHER_TYPE_HOURLY);
hourSwitch = (MySwitch) this.getActivity().findViewById(R.id.hourSwitch);
countSwitch = (TimeSwitch) this.getActivity().findViewById(R.id.countSwitch);
VibrationSwitch = (MySwitch) this.getActivity().findViewById(R.id.VibrationSwitch);
Half_Hour_Switch = (MySwitch) this.getActivity().findViewById(R.id.Half_Hour_Switch);
if (hourlStatus.notEmpty() && halfHourStatus.notEmpty()) {
hourSwitch.setChecked(hourlStatus.get().isStatus());
countSwitch.setChecked(hourlStatus.get().getCountBoolean());
VibrationSwitch.setChecked(hourlStatus.get().getboolVibration());
Half_Hour_Switch.setChecked(halfHourStatus.get().isStatus());
}
/*
hourSwitch.setChecked(false);
countSwitch.setChecked(false);
VibrationSwitch.setChecked(false);
Half_Hour_Switch.setChecked(false);*/
hourSwitch.setOnCheckedChangeListener(new SwitchOnCheckedChangeListener() {
public void onCheckedChanged(MySwitch buttonView, boolean isChecked) {
// hourSwitch.setChecked(isChecked);
saveGrandFatherEffectMsg();
if (isChecked) {
if (hourlStatus.notEmpty())
hourlStatus.get().setStatus(true);
saveHourlyGrandFatherStatus();
GrandFatherClockStartor.getInstant().startHourlyGrandFatherClock();
// when the hourswitch was checked, the halfhourswitch was
// also checked, then start the halfhour clock tasks also.
if (hourSwitch.isChecked() && Half_Hour_Switch.isChecked()) {
GrandFatherClockStartor.getInstant().startHalfHOurGrandFatherClock();
System.out.println("the hour switch was checked,,");
}
System.out.println("the hour switch was checked,,");
} else {
if (hourlStatus.notEmpty())
hourlStatus.get().setStatus(false);
saveHourlyGrandFatherStatus();
GrandFatherClockStartor.getInstant().stopHourlyGrandFatherClock();
System.out.println("the hour switch was un checked,,");
// when the hourswitch was checked, the halfhourswitch was
// also checked, then start the halfhour clock tasks also.
// if ( Half_Hour_Switch.isChecked() == false)
// {
GrandFatherClockStartor.getInstant().stopHalfHOurGrandFatherClock();
System.out.println("the hour switch was checked,,");
// }
}
}
});
countSwitch.setOnCheckedChangeListener(new TimeSwitchOnCheckedChangeListener() {
public void onCheckedChanged(TimeSwitch buttonView, boolean isChecked) {
// hourSwitch.setChecked(isChecked);
String count = "";
if (isChecked) {
count = "Once";
} else {
count = "Hour";
}
if (hourlStatus.notEmpty())
hourlStatus.get().setCount(count);
saveHourlyGrandFatherStatus();
}
});
VibrationSwitch.setOnCheckedChangeListener(new SwitchOnCheckedChangeListener() {
public void onCheckedChanged(MySwitch buttonView, boolean isChecked) {
// hourSwitch.setChecked(isChecked);
if (isChecked) {
if (hourlStatus.notEmpty())
hourlStatus.get().setVibration("1");
System.out.println("the hour switch was checked,,");
} else {
if (hourlStatus.notEmpty())
hourlStatus.get().setVibration("0");
System.out.println("the hour switch was un checked,,");
}
saveHourlyGrandFatherStatus();
}
});
Half_Hour_Switch.setOnCheckedChangeListener(new SwitchOnCheckedChangeListener() {
public void onCheckedChanged(MySwitch buttonView, boolean isChecked) {
// hourSwitch.setChecked(isChecked);
saveGrandFatherEffectMsg();
if (isChecked) {
if (halfHourStatus.notEmpty())
halfHourStatus.get().setStatus(true);
saveHalfHourGrandFatherStatus();
if (hourSwitch.isChecked() && Half_Hour_Switch.isChecked()) {
GrandFatherClockStartor.getInstant().startHalfHOurGrandFatherClock();
System.out.println("the hour switch was checked,,");
}
} else {
if (halfHourStatus.notEmpty())
halfHourStatus.get().setStatus(false);
saveHalfHourGrandFatherStatus();
System.out.println("the hour switch was un checked,,");
GrandFatherClockStartor.getInstant().stopHalfHOurGrandFatherClock();
}
}
});
initTitileBar();
}
public void initTitileBar() {
initBackButton();
}
private void setupColorButtons(ViewGroup container) {
View child = null;
for (int i = 0; i < container.getChildCount(); i++) {
child = container.getChildAt(i);
if (child instanceof RelativeLayout) {
setupColorButton((RelativeLayout) child);
} else if (child instanceof Button) {
} else if (child instanceof LinearLayout) {
setupColorButtons((ViewGroup) child);
}
}
}
private void setupColorButton(RelativeLayout view) {
view.setOnClickListener(mOnColorClick);
// view.setOnTouchListener(mOnColorClick);
mColorButtons.add(view);
}
private void attachColorData() {
View v;
for (int i = 0; i < EColor2.COLORS.length; i++) {
int color = EColor2.COLORS[i];
v = mColorButtons.get(i).findViewById(R.id.color_container);
/*if(color==-99)
{
v.setVisibility(View.GONE);
continue;
}*/
if (color == -99) {
v.setBackgroundResource(R.drawable.btn_customfx_col_random);
// continue;
} else {
v.setBackgroundColor(color);
}
mColorButtons.get(i).setTag(new ColorData(color));
}
}
private void saveGrandFatherEffectMsg() {
mCurrentEffect.editable = true;
grandFatherEmbraceMsg.set(VoTransfer.transferFromEffectVOToEmbraceMsg(mCurrentEffect));
if (grandFatherEmbraceMsg.notEmpty()) {
grandFatherEmbraceMsg.get().setFadeIn((byte) -56);
grandFatherEmbraceMsg.get().setFadeOut((byte) 0);
grandFatherEmbraceMsg.get().setPause((byte) 0);
grandFatherEmbraceMsg.get().setBlackout(false);
grandFatherEmbraceMsg.get().setHold((byte) 0);
// DbBuilder.getInstant().updateGrandFatherMsg(grandFatherEmbraceMsg.get());
GrandpaMsgDB.getInstance().updateGrandFatherMsg(grandFatherEmbraceMsg.get());
}
}
private void saveHalfHourGrandFatherStatus() {
if (halfHourStatus.notEmpty()) {
// DbBuilder.getInstant().updateGrandFatherStatus(halfHourStatus.get());
GrandpaMsgDB.getInstance().updateGrandFatherStatus(halfHourStatus.get());
}
}
private void saveHourlyGrandFatherStatus() {
if (hourlStatus.notEmpty()) {
// DbBuilder.getInstant().updateGrandFatherStatus(hourlStatus.get());
GrandpaMsgDB.getInstance().updateGrandFatherStatus(hourlStatus.get());
}
}
private OnClickListener mOnColorClick = new OnClickListener() {
@Override
public void onClick(View view) {
int color = ((ColorData) view.getTag()).color;
if (color == -99) {
mCurrentEffect.random = true;
} else {
mCurrentEffect.random = false;
mCurrentEffect.colorL1 = mCurrentEffect.colorR1 = color;
}
mCurrentEffect.editable = true;
mCurrentEffect.colorL2 = mCurrentEffect.colorR2 = Color.TRANSPARENT;
mEmulator.play(mCurrentEffect);
saveGrandFatherEffectMsg();
}
};
private class ColorData {
public int color;
public ColorData(int color) {
this.color = color;
}
}
public static void initStatus() {
GrandpaMsgDB.getInstance().deleteGrandFatherStatus();
}
}
| |
/*
* Copyright 2000-2015 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.psi.util;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.projectRoots.JavaSdkVersion;
import com.intellij.openapi.projectRoots.JavaVersionService;
import com.intellij.openapi.roots.LanguageLevelProjectExtension;
import com.intellij.openapi.util.Comparing;
import com.intellij.openapi.util.Key;
import com.intellij.openapi.util.TextRange;
import com.intellij.openapi.vfs.VfsUtilCore;
import com.intellij.openapi.vfs.VirtualFile;
import com.intellij.pom.java.LanguageLevel;
import com.intellij.psi.*;
import com.intellij.psi.infos.MethodCandidateInfo;
import com.intellij.psi.infos.MethodCandidateInfo.ApplicabilityLevel;
import com.intellij.psi.javadoc.PsiDocComment;
import com.intellij.psi.meta.PsiMetaData;
import com.intellij.psi.meta.PsiMetaOwner;
import com.intellij.psi.search.ProjectScope;
import com.intellij.psi.tree.IElementType;
import com.intellij.psi.tree.TokenSet;
import com.intellij.util.IncorrectOperationException;
import com.intellij.util.TimeoutUtil;
import com.intellij.util.containers.ContainerUtil;
import com.intellij.util.containers.EmptyIterable;
import com.intellij.util.containers.HashMap;
import gnu.trove.THashSet;
import org.intellij.lang.annotations.MagicConstant;
import org.jetbrains.annotations.NonNls;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.util.*;
import static com.intellij.psi.CommonClassNames.JAVA_LANG_STRING;
public final class PsiUtil extends PsiUtilCore {
private static final Logger LOG = Logger.getInstance("#com.intellij.psi.util.PsiUtil");
public static final int ACCESS_LEVEL_PUBLIC = 4;
public static final int ACCESS_LEVEL_PROTECTED = 3;
public static final int ACCESS_LEVEL_PACKAGE_LOCAL = 2;
public static final int ACCESS_LEVEL_PRIVATE = 1;
public static final Key<Boolean> VALID_VOID_TYPE_IN_CODE_FRAGMENT = Key.create("VALID_VOID_TYPE_IN_CODE_FRAGMENT");
private PsiUtil() {}
public static boolean isOnAssignmentLeftHand(@NotNull PsiExpression expr) {
PsiElement parent = PsiTreeUtil.skipParentsOfType(expr, PsiParenthesizedExpression.class);
return parent instanceof PsiAssignmentExpression &&
PsiTreeUtil.isAncestor(((PsiAssignmentExpression)parent).getLExpression(), expr, false);
}
public static boolean isAccessibleFromPackage(@NotNull PsiModifierListOwner element, @NotNull PsiPackage aPackage) {
if (element.hasModifierProperty(PsiModifier.PUBLIC)) return true;
return !element.hasModifierProperty(PsiModifier.PRIVATE) &&
JavaPsiFacade.getInstance(element.getProject()).isInPackage(element, aPackage);
}
public static boolean isAccessedForWriting(@NotNull PsiExpression expr) {
if (isOnAssignmentLeftHand(expr)) return true;
PsiElement parent = PsiTreeUtil.skipParentsOfType(expr, PsiParenthesizedExpression.class);
if (parent instanceof PsiPrefixExpression) {
IElementType tokenType = ((PsiPrefixExpression) parent).getOperationTokenType();
return tokenType == JavaTokenType.PLUSPLUS || tokenType == JavaTokenType.MINUSMINUS;
}
else if (parent instanceof PsiPostfixExpression) {
IElementType tokenType = ((PsiPostfixExpression) parent).getOperationTokenType();
return tokenType == JavaTokenType.PLUSPLUS || tokenType == JavaTokenType.MINUSMINUS;
}
else {
return false;
}
}
public static boolean isAccessedForReading(@NotNull PsiExpression expr) {
PsiElement parent = PsiTreeUtil.skipParentsOfType(expr, PsiParenthesizedExpression.class);
return !(parent instanceof PsiAssignmentExpression) ||
!PsiTreeUtil.isAncestor(((PsiAssignmentExpression)parent).getLExpression(), expr, false) ||
((PsiAssignmentExpression)parent).getOperationTokenType() != JavaTokenType.EQ;
}
public static boolean isAccessible(@NotNull PsiMember member, @NotNull PsiElement place, @Nullable PsiClass accessObjectClass) {
return isAccessible(place.getProject(), member, place, accessObjectClass);
}
public static boolean isAccessible(@NotNull Project project, @NotNull PsiMember member,
@NotNull PsiElement place, @Nullable PsiClass accessObjectClass) {
return JavaPsiFacade.getInstance(project).getResolveHelper().isAccessible(member, place, accessObjectClass);
}
@NotNull
public static JavaResolveResult getAccessObjectClass(@NotNull PsiExpression expression) {
if (expression instanceof PsiSuperExpression) return JavaResolveResult.EMPTY;
PsiType type = expression.getType();
if (type instanceof PsiClassType) {
return ((PsiClassType)type).resolveGenerics();
}
if (type instanceof PsiDisjunctionType) {
final PsiType lub = ((PsiDisjunctionType)type).getLeastUpperBound();
if (lub instanceof PsiClassType) {
return ((PsiClassType)lub).resolveGenerics();
}
}
if (type == null && expression instanceof PsiReferenceExpression) {
JavaResolveResult resolveResult = ((PsiReferenceExpression)expression).advancedResolve(false);
if (resolveResult.getElement() instanceof PsiClass) {
return resolveResult;
}
}
return JavaResolveResult.EMPTY;
}
public static boolean isConstantExpression(@Nullable PsiExpression expression) {
if (expression == null) return false;
IsConstantExpressionVisitor visitor = new IsConstantExpressionVisitor();
expression.accept(visitor);
return visitor.myIsConstant;
}
// todo: move to PsiThrowsList?
public static void addException(@NotNull PsiMethod method, @NotNull @NonNls String exceptionFQName) throws IncorrectOperationException {
PsiClass exceptionClass = JavaPsiFacade.getInstance(method.getProject()).findClass(exceptionFQName, method.getResolveScope());
addException(method, exceptionClass, exceptionFQName);
}
public static void addException(@NotNull PsiMethod method, @NotNull PsiClass exceptionClass) throws IncorrectOperationException {
addException(method, exceptionClass, exceptionClass.getQualifiedName());
}
private static void addException(@NotNull PsiMethod method, @Nullable PsiClass exceptionClass, @Nullable String exceptionName) throws IncorrectOperationException {
assert exceptionClass != null || exceptionName != null : "One of exceptionName, exceptionClass must be not null";
PsiReferenceList throwsList = method.getThrowsList();
PsiJavaCodeReferenceElement[] refs = throwsList.getReferenceElements();
boolean replaced = false;
for (PsiJavaCodeReferenceElement ref : refs) {
if (ref.isReferenceTo(exceptionClass)) return;
PsiClass aClass = (PsiClass)ref.resolve();
if (exceptionClass == null || aClass == null) {
continue;
}
if (aClass.isInheritor(exceptionClass, true)) {
if (replaced) {
ref.delete();
}
else {
PsiElementFactory factory = JavaPsiFacade.getInstance(method.getProject()).getElementFactory();
PsiJavaCodeReferenceElement ref1;
if (exceptionName != null) {
ref1 = factory.createReferenceElementByFQClassName(exceptionName, method.getResolveScope());
}
else {
PsiClassType type = factory.createType(exceptionClass);
ref1 = factory.createReferenceElementByType(type);
}
ref.replace(ref1);
replaced = true;
}
}
else if (exceptionClass.isInheritor(aClass, true)) {
return;
}
}
if (replaced) return;
PsiElementFactory factory = JavaPsiFacade.getInstance(method.getProject()).getElementFactory();
PsiJavaCodeReferenceElement ref;
if (exceptionName != null) {
ref = factory.createReferenceElementByFQClassName(exceptionName, method.getResolveScope());
}
else {
PsiClassType type = factory.createType(exceptionClass);
ref = factory.createReferenceElementByType(type);
}
throwsList.add(ref);
}
// todo: move to PsiThrowsList?
public static void removeException(@NotNull PsiMethod method, @NonNls String exceptionClass) throws IncorrectOperationException {
PsiJavaCodeReferenceElement[] refs = method.getThrowsList().getReferenceElements();
for (PsiJavaCodeReferenceElement ref : refs) {
if (ref.getCanonicalText().equals(exceptionClass)) {
ref.delete();
}
}
}
public static boolean isVariableNameUnique(@NotNull String name, @NotNull PsiElement place) {
PsiResolveHelper helper = JavaPsiFacade.getInstance(place.getProject()).getResolveHelper();
return helper.resolveAccessibleReferencedVariable(name, place) == null;
}
/**
* @return enclosing outermost (method or class initializer) body but not higher than scope
*/
@Nullable
public static PsiElement getTopLevelEnclosingCodeBlock(@Nullable PsiElement element, PsiElement scope) {
PsiElement blockSoFar = null;
while (element != null) {
// variable can be defined in for loop initializer
PsiElement parent = element.getParent();
if (!(parent instanceof PsiExpression) || parent instanceof PsiLambdaExpression) {
if (element instanceof PsiCodeBlock || element instanceof PsiForStatement || element instanceof PsiForeachStatement) {
blockSoFar = element;
}
if (parent instanceof PsiMethod
&& parent.getParent() instanceof PsiClass
&& !isLocalOrAnonymousClass((PsiClass)parent.getParent()))
break;
if (parent instanceof PsiClassInitializer && !(parent.getParent() instanceof PsiAnonymousClass)) break;
if (parent instanceof PsiField && ((PsiField) parent).getInitializer() == element) {
blockSoFar = element;
}
if (parent instanceof PsiClassLevelDeclarationStatement) {
parent = parent.getParent();
}
if (element instanceof PsiClass && !isLocalOrAnonymousClass((PsiClass)element)) {
break;
}
if (element instanceof PsiFile && PsiUtilCore.getTemplateLanguageFile(element) != null) {
return element;
}
}
if (element == scope) break;
element = parent;
}
return blockSoFar;
}
public static boolean isLocalOrAnonymousClass(@NotNull PsiClass psiClass) {
return psiClass instanceof PsiAnonymousClass || isLocalClass(psiClass);
}
public static boolean isLocalClass(@NotNull PsiClass psiClass) {
PsiElement parent = psiClass.getParent();
return parent instanceof PsiDeclarationStatement && parent.getParent() instanceof PsiCodeBlock;
}
public static boolean isAbstractClass(@NotNull PsiClass clazz) {
PsiModifierList modifierList = clazz.getModifierList();
return modifierList != null && modifierList.hasModifierProperty(PsiModifier.ABSTRACT);
}
/**
* @return topmost code block where variable makes sense
*/
@Nullable
public static PsiElement getVariableCodeBlock(@NotNull PsiVariable variable, @Nullable PsiElement context) {
PsiElement codeBlock = null;
if (variable instanceof PsiParameter) {
PsiElement declarationScope = ((PsiParameter)variable).getDeclarationScope();
if (declarationScope instanceof PsiCatchSection) {
codeBlock = ((PsiCatchSection)declarationScope).getCatchBlock();
}
else if (declarationScope instanceof PsiForeachStatement) {
codeBlock = ((PsiForeachStatement)declarationScope).getBody();
}
else if (declarationScope instanceof PsiMethod) {
codeBlock = ((PsiMethod)declarationScope).getBody();
} else if (declarationScope instanceof PsiLambdaExpression) {
codeBlock = ((PsiLambdaExpression)declarationScope).getBody();
}
}
else if (variable instanceof PsiResourceVariable) {
final PsiElement resourceList = variable.getParent();
return resourceList != null ? resourceList.getParent() : null; // use try statement as topmost
}
else if (variable instanceof PsiLocalVariable && variable.getParent() instanceof PsiForStatement) {
return variable.getParent();
}
else if (variable instanceof PsiField && context != null) {
final PsiClass aClass = ((PsiField) variable).getContainingClass();
while (context != null && context.getParent() != aClass) {
context = context.getParent();
if (context instanceof PsiClassLevelDeclarationStatement) return null;
}
return context instanceof PsiMethod ?
((PsiMethod) context).getBody() :
context instanceof PsiClassInitializer ? ((PsiClassInitializer) context).getBody() : null;
}
else {
final PsiElement scope = variable.getParent() == null ? null : variable.getParent().getParent();
codeBlock = getTopLevelEnclosingCodeBlock(variable, scope);
if (codeBlock != null && codeBlock.getParent() instanceof PsiSwitchStatement) codeBlock = codeBlock.getParent().getParent();
}
return codeBlock;
}
public static boolean isIncrementDecrementOperation(@NotNull PsiElement element) {
if (element instanceof PsiPostfixExpression) {
final IElementType sign = ((PsiPostfixExpression)element).getOperationTokenType();
if (sign == JavaTokenType.PLUSPLUS || sign == JavaTokenType.MINUSMINUS)
return true;
}
else if (element instanceof PsiPrefixExpression) {
final IElementType sign = ((PsiPrefixExpression)element).getOperationTokenType();
if (sign == JavaTokenType.PLUSPLUS || sign == JavaTokenType.MINUSMINUS)
return true;
}
return false;
}
@MagicConstant(intValues = {ACCESS_LEVEL_PUBLIC, ACCESS_LEVEL_PROTECTED, ACCESS_LEVEL_PACKAGE_LOCAL, ACCESS_LEVEL_PRIVATE})
public @interface AccessLevel {}
@AccessLevel
public static int getAccessLevel(@NotNull PsiModifierList modifierList) {
if (modifierList.hasModifierProperty(PsiModifier.PRIVATE)) {
return ACCESS_LEVEL_PRIVATE;
}
if (modifierList.hasModifierProperty(PsiModifier.PACKAGE_LOCAL)) {
return ACCESS_LEVEL_PACKAGE_LOCAL;
}
if (modifierList.hasModifierProperty(PsiModifier.PROTECTED)) {
return ACCESS_LEVEL_PROTECTED;
}
return ACCESS_LEVEL_PUBLIC;
}
@PsiModifier.ModifierConstant
@Nullable
public static String getAccessModifier(@AccessLevel int accessLevel) {
@SuppressWarnings("UnnecessaryLocalVariable") @PsiModifier.ModifierConstant
final String modifier = accessLevel > accessModifiers.length ? null : accessModifiers[accessLevel - 1];
return modifier;
}
private static final String[] accessModifiers = {
PsiModifier.PRIVATE, PsiModifier.PACKAGE_LOCAL, PsiModifier.PROTECTED, PsiModifier.PUBLIC
};
/**
* @return true if element specified is statement or expression statement. see JLS 14.5-14.8
*/
public static boolean isStatement(@NotNull PsiElement element) {
PsiElement parent = element.getParent();
if (element instanceof PsiExpressionListStatement) {
// statement list allowed in for() init or update only
if (!(parent instanceof PsiForStatement)) return false;
final PsiForStatement forStatement = (PsiForStatement)parent;
if (!(element == forStatement.getInitialization() || element == forStatement.getUpdate())) return false;
final PsiExpressionList expressionList = ((PsiExpressionListStatement) element).getExpressionList();
final PsiExpression[] expressions = expressionList.getExpressions();
for (PsiExpression expression : expressions) {
if (!isStatement(expression)) return false;
}
return true;
}
else if (element instanceof PsiExpressionStatement) {
return isStatement(((PsiExpressionStatement) element).getExpression());
}
if (element instanceof PsiDeclarationStatement) {
if (parent instanceof PsiCodeBlock) return true;
if (parent instanceof PsiCodeFragment) return true;
if (!(parent instanceof PsiForStatement) || ((PsiForStatement)parent).getBody() == element) {
return false;
}
}
if (element instanceof PsiStatement) return true;
if (element instanceof PsiAssignmentExpression) return true;
if (isIncrementDecrementOperation(element)) return true;
if (element instanceof PsiMethodCallExpression) return true;
if (element instanceof PsiNewExpression) {
return !(((PsiNewExpression) element).getType() instanceof PsiArrayType);
}
return element instanceof PsiCodeBlock;
}
@Nullable
public static PsiElement getEnclosingStatement(PsiElement element) {
while (element != null) {
if (element.getParent() instanceof PsiCodeBlock) return element;
element = element.getParent();
}
return null;
}
@Nullable
public static PsiElement getElementInclusiveRange(@NotNull PsiElement scope, @NotNull TextRange range) {
PsiElement psiElement = scope.findElementAt(range.getStartOffset());
while (psiElement != null && !psiElement.getTextRange().contains(range)) {
if (psiElement == scope) return null;
psiElement = psiElement.getParent();
}
return psiElement;
}
@Nullable
public static PsiClass resolveClassInType(@Nullable PsiType type) {
if (type instanceof PsiClassType) {
return ((PsiClassType) type).resolve();
}
if (type instanceof PsiArrayType) {
return resolveClassInType(((PsiArrayType) type).getComponentType());
}
if (type instanceof PsiDisjunctionType) {
final PsiType lub = ((PsiDisjunctionType)type).getLeastUpperBound();
if (lub instanceof PsiClassType) {
return ((PsiClassType)lub).resolve();
}
}
return null;
}
@Nullable
public static PsiClass resolveClassInClassTypeOnly(@Nullable PsiType type) {
return type instanceof PsiClassType ? ((PsiClassType)type).resolve() : null;
}
public static PsiClassType.ClassResolveResult resolveGenericsClassInType(@Nullable PsiType type) {
if (type instanceof PsiClassType) {
final PsiClassType classType = (PsiClassType) type;
return classType.resolveGenerics();
}
if (type instanceof PsiArrayType) {
return resolveGenericsClassInType(((PsiArrayType) type).getComponentType());
}
if (type instanceof PsiDisjunctionType) {
final PsiType lub = ((PsiDisjunctionType)type).getLeastUpperBound();
if (lub instanceof PsiClassType) {
return ((PsiClassType)lub).resolveGenerics();
}
}
return PsiClassType.ClassResolveResult.EMPTY;
}
@NotNull
public static PsiType convertAnonymousToBaseType(@NotNull PsiType type) {
PsiClass psiClass = resolveClassInType(type);
if (psiClass instanceof PsiAnonymousClass) {
int dims = type.getArrayDimensions();
type = ((PsiAnonymousClass) psiClass).getBaseClassType();
while (dims != 0) {
type = type.createArrayType();
dims--;
}
}
return type;
}
public static boolean isApplicable(@NotNull PsiMethod method, @NotNull PsiSubstitutor substitutorForMethod, @NotNull PsiExpressionList argList) {
return getApplicabilityLevel(method, substitutorForMethod, argList) != ApplicabilityLevel.NOT_APPLICABLE;
}
public static boolean isApplicable(@NotNull PsiMethod method, @NotNull PsiSubstitutor substitutorForMethod, @NotNull PsiExpression[] argList) {
final PsiType[] types = ContainerUtil.map2Array(argList, PsiType.class, PsiExpression.EXPRESSION_TO_TYPE);
return getApplicabilityLevel(method, substitutorForMethod, types, getLanguageLevel(method)) != ApplicabilityLevel.NOT_APPLICABLE;
}
@MethodCandidateInfo.ApplicabilityLevelConstant
public static int getApplicabilityLevel(@NotNull PsiMethod method, @NotNull PsiSubstitutor substitutorForMethod, @NotNull PsiExpressionList argList) {
return getApplicabilityLevel(method, substitutorForMethod, argList.getExpressionTypes(), getLanguageLevel(argList));
}
@MethodCandidateInfo.ApplicabilityLevelConstant
public static int getApplicabilityLevel(@NotNull final PsiMethod method,
@NotNull final PsiSubstitutor substitutorForMethod,
@NotNull final PsiType[] args,
@NotNull final LanguageLevel languageLevel) {
return getApplicabilityLevel(method, substitutorForMethod, args, languageLevel, true, true);
}
public interface ApplicabilityChecker {
ApplicabilityChecker ASSIGNABILITY_CHECKER = new ApplicabilityChecker() {
@Override
public boolean isApplicable(PsiType left, PsiType right, boolean allowUncheckedConversion, int argId) {
return TypeConversionUtil.isAssignable(left, right, allowUncheckedConversion);
}
};
boolean isApplicable(PsiType left, PsiType right, boolean allowUncheckedConversion, int argId);
}
@MethodCandidateInfo.ApplicabilityLevelConstant
public static int getApplicabilityLevel(@NotNull final PsiMethod method,
@NotNull final PsiSubstitutor substitutorForMethod,
@NotNull final PsiType[] args,
@NotNull final LanguageLevel languageLevel,
final boolean allowUncheckedConversion,
final boolean checkVarargs) {
return getApplicabilityLevel(method, substitutorForMethod, args, languageLevel,
allowUncheckedConversion, checkVarargs, ApplicabilityChecker.ASSIGNABILITY_CHECKER);
}
@MethodCandidateInfo.ApplicabilityLevelConstant
public static int getApplicabilityLevel(@NotNull final PsiMethod method,
@NotNull final PsiSubstitutor substitutorForMethod,
@NotNull final PsiType[] args,
@NotNull final LanguageLevel languageLevel,
final boolean allowUncheckedConversion,
final boolean checkVarargs,
@NotNull final ApplicabilityChecker function) {
final PsiParameter[] parms = method.getParameterList().getParameters();
if (args.length < parms.length - 1) return ApplicabilityLevel.NOT_APPLICABLE;
final PsiClass containingClass = method.getContainingClass();
final boolean isRaw = containingClass != null && isRawSubstitutor(method, substitutorForMethod) && isRawSubstitutor(containingClass, substitutorForMethod);
if (!areFirstArgumentsApplicable(args, parms, languageLevel, substitutorForMethod, isRaw, allowUncheckedConversion, function)) return ApplicabilityLevel.NOT_APPLICABLE;
if (args.length == parms.length) {
if (parms.length == 0) return ApplicabilityLevel.FIXED_ARITY;
PsiType parmType = getParameterType(parms[parms.length - 1], languageLevel, substitutorForMethod);
PsiType argType = args[args.length - 1];
if (argType == null) return ApplicabilityLevel.NOT_APPLICABLE;
if (function.isApplicable(parmType, argType, allowUncheckedConversion, parms.length - 1)) return ApplicabilityLevel.FIXED_ARITY;
if (isRaw) {
final PsiType erasedParamType = TypeConversionUtil.erasure(parmType);
final PsiType erasedArgType = TypeConversionUtil.erasure(argType);
if (erasedArgType != null && erasedParamType != null &&
function.isApplicable(erasedParamType, erasedArgType, allowUncheckedConversion, parms.length - 1)) {
return ApplicabilityLevel.FIXED_ARITY;
}
}
}
if (checkVarargs && method.isVarArgs() && languageLevel.compareTo(LanguageLevel.JDK_1_5) >= 0) {
if (args.length < parms.length) return ApplicabilityLevel.VARARGS;
PsiParameter lastParameter = parms[parms.length - 1];
if (!lastParameter.isVarArgs()) return ApplicabilityLevel.NOT_APPLICABLE;
PsiType lastParmType = getParameterType(lastParameter, languageLevel, substitutorForMethod);
if (!(lastParmType instanceof PsiArrayType)) return ApplicabilityLevel.NOT_APPLICABLE;
lastParmType = ((PsiArrayType)lastParmType).getComponentType();
if (lastParmType instanceof PsiCapturedWildcardType &&
!JavaVersionService.getInstance().isAtLeast(((PsiCapturedWildcardType)lastParmType).getContext(), JavaSdkVersion.JDK_1_8)) {
lastParmType = ((PsiCapturedWildcardType)lastParmType).getWildcard();
}
for (int i = parms.length - 1; i < args.length; i++) {
PsiType argType = args[i];
if (argType == null || !function.isApplicable(lastParmType, argType, allowUncheckedConversion, i)) {
return ApplicabilityLevel.NOT_APPLICABLE;
}
}
return ApplicabilityLevel.VARARGS;
}
return ApplicabilityLevel.NOT_APPLICABLE;
}
private static boolean areFirstArgumentsApplicable(@NotNull PsiType[] args,
@NotNull final PsiParameter[] parms,
@NotNull LanguageLevel languageLevel,
@NotNull final PsiSubstitutor substitutorForMethod,
boolean isRaw,
boolean allowUncheckedConversion, ApplicabilityChecker function) {
for (int i = 0; i < parms.length - 1; i++) {
final PsiType type = args[i];
if (type == null) return false;
final PsiParameter parameter = parms[i];
final PsiType substitutedParmType = getParameterType(parameter, languageLevel, substitutorForMethod);
if (isRaw) {
final PsiType substErasure = TypeConversionUtil.erasure(substitutedParmType);
final PsiType typeErasure = TypeConversionUtil.erasure(type);
if (substErasure != null && typeErasure != null && !function.isApplicable(substErasure, typeErasure, allowUncheckedConversion, i)) {
return false;
}
}
else if (!function.isApplicable(substitutedParmType, type, allowUncheckedConversion, i)) {
return false;
}
}
return true;
}
private static PsiType getParameterType(@NotNull final PsiParameter parameter,
@NotNull LanguageLevel languageLevel,
@NotNull final PsiSubstitutor substitutor) {
PsiType parmType = parameter.getType();
if (parmType instanceof PsiClassType) {
parmType = ((PsiClassType)parmType).setLanguageLevel(languageLevel);
}
return substitutor.substitute(parmType);
}
/**
* Compares types with respect to type parameter bounds: e.g. for
* <code>class Foo<T extends Number>{}</code> types Foo<?> and Foo<? extends Number>
* would be equivalent
*/
public static boolean equalOnEquivalentClasses(PsiClassType thisClassType, @NotNull PsiClass aClass, PsiClassType otherClassType, @NotNull PsiClass bClass) {
final PsiClassType capture1 = (PsiClassType)captureToplevelWildcards(thisClassType, aClass);
final PsiClassType capture2 = (PsiClassType)captureToplevelWildcards(otherClassType, bClass);
final PsiClassType.ClassResolveResult result1 = capture1.resolveGenerics();
final PsiClassType.ClassResolveResult result2 = capture2.resolveGenerics();
return equalOnEquivalentClasses(result1.getSubstitutor(), aClass, result2.getSubstitutor(), bClass);
}
@Deprecated
public static boolean equalOnClass(@NotNull PsiSubstitutor s1, @NotNull PsiSubstitutor s2, @NotNull PsiClass aClass) {
return equalOnEquivalentClasses(s1, aClass, s2, aClass);
}
/**
* @deprecated to remove in v.16
* Checks if substitutors maps are identical. If substitutor map values contain wildcard type, type parameter bounds are IGNORED.
* Please use {@link PsiUtil#equalOnEquivalentClasses(PsiClassType, PsiClass, PsiClassType, PsiClass)} instead.
*/
public static boolean equalOnEquivalentClasses(@NotNull PsiSubstitutor s1, @NotNull PsiClass aClass, @NotNull PsiSubstitutor s2, @NotNull PsiClass bClass) {
// assume generic class equals to non-generic
if (aClass.hasTypeParameters() != bClass.hasTypeParameters()) return true;
final PsiTypeParameter[] typeParameters1 = aClass.getTypeParameters();
final PsiTypeParameter[] typeParameters2 = bClass.getTypeParameters();
if (typeParameters1.length != typeParameters2.length) return false;
for (int i = 0; i < typeParameters1.length; i++) {
final PsiType substituted2 = s2.substitute(typeParameters2[i]);
final PsiType substituted1 = s1.substitute(typeParameters1[i]);
if (!Comparing.equal(substituted1, substituted2)) return false;
}
if (aClass.hasModifierProperty(PsiModifier.STATIC)) return true;
final PsiClass containingClass1 = aClass.getContainingClass();
final PsiClass containingClass2 = bClass.getContainingClass();
if (containingClass1 != null && containingClass2 != null) {
return equalOnEquivalentClasses(s1, containingClass1, s2, containingClass2);
}
return containingClass1 == null && containingClass2 == null;
}
/**
* JLS 15.28
*/
public static boolean isCompileTimeConstant(@NotNull final PsiField field) {
return field.hasModifierProperty(PsiModifier.FINAL)
&& (TypeConversionUtil.isPrimitiveAndNotNull(field.getType()) || field.getType().equalsToText(JAVA_LANG_STRING))
&& field.hasInitializer()
&& isConstantExpression(field.getInitializer());
}
public static boolean allMethodsHaveSameSignature(@NotNull PsiMethod[] methods) {
if (methods.length == 0) return true;
final MethodSignature methodSignature = methods[0].getSignature(PsiSubstitutor.EMPTY);
for (int i = 1; i < methods.length; i++) {
PsiMethod method = methods[i];
if (!methodSignature.equals(method.getSignature(PsiSubstitutor.EMPTY))) return false;
}
return true;
}
@Nullable
public static PsiExpression deparenthesizeExpression(PsiExpression expression) {
while (true) {
if (expression instanceof PsiParenthesizedExpression) {
expression = ((PsiParenthesizedExpression)expression).getExpression();
continue;
}
if (expression instanceof PsiTypeCastExpression) {
expression = ((PsiTypeCastExpression)expression).getOperand();
continue;
}
return expression;
}
}
/**
* Checks whether given class is inner (as opposed to nested)
*
*/
public static boolean isInnerClass(@NotNull PsiClass aClass) {
return !aClass.hasModifierProperty(PsiModifier.STATIC) && aClass.getContainingClass() != null;
}
@Nullable
public static PsiElement findModifierInList(@NotNull final PsiModifierList modifierList, @NonNls String modifier) {
final PsiElement[] children = modifierList.getChildren();
for (PsiElement child : children) {
if (child.getText().equals(modifier)) return child;
}
return null;
}
@Nullable
public static PsiClass getTopLevelClass(@NotNull PsiElement element) {
final PsiFile file = element.getContainingFile();
if (file instanceof PsiClassOwner) {
final PsiClass[] classes = ((PsiClassOwner)file).getClasses();
for (PsiClass aClass : classes) {
if (PsiTreeUtil.isAncestor(aClass, element, false)) return aClass;
}
}
return null;
}
/**
* @param place place to start traversal
* @param aClass level to stop traversal
* @return element with static modifier enclosing place and enclosed by aClass (if not null)
*/
@Nullable
public static PsiModifierListOwner getEnclosingStaticElement(@NotNull PsiElement place, @Nullable PsiClass aClass) {
LOG.assertTrue(aClass == null || !place.isPhysical() || PsiTreeUtil.isContextAncestor(aClass, place, false));
PsiElement parent = place;
while (parent != aClass) {
if (parent instanceof PsiFile) break;
if (parent instanceof PsiModifierListOwner && ((PsiModifierListOwner)parent).hasModifierProperty(PsiModifier.STATIC)) {
return (PsiModifierListOwner)parent;
}
parent = parent.getParent();
}
return null;
}
@Nullable
public static PsiType getTypeByPsiElement(@NotNull final PsiElement element) {
if (element instanceof PsiVariable) {
return ((PsiVariable)element).getType();
}
else if (element instanceof PsiMethod) return ((PsiMethod)element).getReturnType();
return null;
}
@NotNull
public static PsiType captureToplevelWildcards(@NotNull final PsiType type, @NotNull final PsiElement context) {
if (type instanceof PsiClassType) {
final PsiClassType.ClassResolveResult result = ((PsiClassType)type).resolveGenerics();
final PsiClass aClass = result.getElement();
if (aClass != null) {
final PsiSubstitutor substitutor = result.getSubstitutor();
PsiSubstitutor captureSubstitutor = substitutor;
for (PsiTypeParameter typeParameter : typeParametersIterable(aClass)) {
final PsiType substituted = substitutor.substitute(typeParameter);
if (substituted instanceof PsiWildcardType) {
captureSubstitutor = captureSubstitutor.put(typeParameter, PsiCapturedWildcardType.create((PsiWildcardType)substituted, context, typeParameter));
}
}
Map<PsiTypeParameter, PsiType> substitutionMap = null;
for (PsiTypeParameter typeParameter : typeParametersIterable(aClass)) {
final PsiType substituted = substitutor.substitute(typeParameter);
if (substituted instanceof PsiWildcardType) {
if (substitutionMap == null) substitutionMap = new HashMap<PsiTypeParameter, PsiType>(substitutor.getSubstitutionMap());
substitutionMap.put(typeParameter,
captureTypeParameterBounds(typeParameter, substituted, context, captureSubstitutor));
}
}
if (substitutionMap != null) {
final PsiElementFactory factory = JavaPsiFacade.getInstance(aClass.getProject()).getElementFactory();
final PsiSubstitutor newSubstitutor = factory.createSubstitutor(substitutionMap);
return factory.createType(aClass, newSubstitutor);
}
}
}
else if (type instanceof PsiArrayType) {
return captureToplevelWildcards(((PsiArrayType)type).getComponentType(), context).createArrayType();
}
return type;
}
public static PsiType captureTypeParameterBounds(@NotNull PsiTypeParameter typeParameter, PsiType substituted,
PsiElement context,
PsiSubstitutor captureSubstitutor) {
PsiType oldSubstituted = substituted;
PsiElement captureContext = context;
if (substituted instanceof PsiCapturedWildcardType) {
final PsiCapturedWildcardType captured = (PsiCapturedWildcardType)substituted;
substituted = captured.getWildcard();
captureContext = captured.getContext();
}
PsiType glb = null;
if (substituted instanceof PsiWildcardType) {
final PsiType[] boundTypes = typeParameter.getExtendsListTypes();
PsiManager manager = typeParameter.getManager();
PsiType originalBound = !((PsiWildcardType)substituted).isSuper() ? ((PsiWildcardType)substituted).getBound() : null;
glb = originalBound;
for (PsiType boundType : boundTypes) {
PsiType substitutedBoundType = captureSubstitutor.substitute(boundType);
if (substitutedBoundType != null && !(substitutedBoundType instanceof PsiWildcardType) &&
!substitutedBoundType.equalsToText(CommonClassNames.JAVA_LANG_OBJECT)) {
if (originalBound instanceof PsiArrayType &&
substitutedBoundType instanceof PsiArrayType &&
!originalBound.isAssignableFrom(substitutedBoundType) &&
!substitutedBoundType.isAssignableFrom(originalBound)) {
continue;
}
if (originalBound == null ||
!TypeConversionUtil.erasure(substitutedBoundType).isAssignableFrom(TypeConversionUtil.erasure(originalBound)) &&
!TypeConversionUtil.erasure(substitutedBoundType).isAssignableFrom(originalBound)) { //erasure is essential to avoid infinite recursion
if (glb == null) {
glb = substitutedBoundType;
}
else {
glb = GenericsUtil.getGreatestLowerBound(glb, substitutedBoundType);
}
}
}
}
if (glb != null && !((PsiWildcardType)substituted).isSuper()) {
substituted = glb instanceof PsiCapturedWildcardType ? ((PsiCapturedWildcardType)glb).getWildcard()
: PsiWildcardType.createExtends(manager, glb);
}
}
if (captureContext != null) {
substituted =
oldSubstituted instanceof PsiCapturedWildcardType && substituted.equals(((PsiCapturedWildcardType)oldSubstituted).getWildcard())
? oldSubstituted
: captureSubstitutor.substitute(typeParameter);
LOG.assertTrue(substituted instanceof PsiCapturedWildcardType);
if (glb != null) {
((PsiCapturedWildcardType)substituted).setUpperBound(glb);
}
}
return substituted;
}
public static boolean isInsideJavadocComment(PsiElement element) {
return PsiTreeUtil.getParentOfType(element, PsiDocComment.class, true) != null;
}
@NotNull
public static List<PsiTypeElement> getParameterTypeElements(@NotNull PsiParameter parameter) {
PsiTypeElement typeElement = parameter.getTypeElement();
return typeElement != null && typeElement.getType() instanceof PsiDisjunctionType
? PsiTreeUtil.getChildrenOfTypeAsList(typeElement, PsiTypeElement.class)
: Collections.singletonList(typeElement);
}
public static void checkIsIdentifier(@NotNull PsiManager manager, String text) throws IncorrectOperationException{
if (!PsiNameHelper.getInstance(manager.getProject()).isIdentifier(text)){
throw new IncorrectOperationException(PsiBundle.message("0.is.not.an.identifier", text) );
}
}
@Nullable
public static VirtualFile getJarFile(@NotNull PsiElement candidate) {
VirtualFile file = candidate.getContainingFile().getVirtualFile();
if (file != null && file.getFileSystem().getProtocol().equals("jar")) {
return VfsUtilCore.getVirtualFileForJar(file);
}
return file;
}
public static boolean isAnnotationMethod(PsiElement element) {
if (!(element instanceof PsiAnnotationMethod)) return false;
PsiClass psiClass = ((PsiAnnotationMethod)element).getContainingClass();
return psiClass != null && psiClass.isAnnotationType();
}
@PsiModifier.ModifierConstant
public static String getMaximumModifierForMember(final PsiClass aClass) {
return getMaximumModifierForMember(aClass, true);
}
@PsiModifier.ModifierConstant
public static String getMaximumModifierForMember(final PsiClass aClass, boolean allowPublicAbstract) {
String modifier = PsiModifier.PUBLIC;
if (!allowPublicAbstract && aClass.hasModifierProperty(PsiModifier.ABSTRACT) && !aClass.isEnum()) {
modifier = PsiModifier.PROTECTED;
}
else if (aClass.hasModifierProperty(PsiModifier.PACKAGE_LOCAL) || aClass.isEnum()) {
modifier = PsiModifier.PACKAGE_LOCAL;
}
else if (aClass.hasModifierProperty(PsiModifier.PRIVATE)) {
modifier = PsiModifier.PRIVATE;
}
return modifier;
}
/*
* Returns iterator of type parameters visible in owner. Type parameters are iterated in
* inner-to-outer, right-to-left order.
*/
@NotNull
public static Iterator<PsiTypeParameter> typeParametersIterator(@NotNull PsiTypeParameterListOwner owner) {
return typeParametersIterable(owner).iterator();
}
@NotNull
public static Iterable<PsiTypeParameter> typeParametersIterable(@NotNull final PsiTypeParameterListOwner owner) {
List<PsiTypeParameter> result = null;
PsiTypeParameterListOwner currentOwner = owner;
while (currentOwner != null) {
PsiTypeParameter[] typeParameters = currentOwner.getTypeParameters();
if (typeParameters.length > 0) {
if (result == null) result = new ArrayList<PsiTypeParameter>(typeParameters.length);
for (int i = typeParameters.length - 1; i >= 0; i--) {
result.add(typeParameters[i]);
}
}
if (currentOwner.hasModifierProperty(PsiModifier.STATIC)) break;
currentOwner = currentOwner.getContainingClass();
}
if (result == null) return EmptyIterable.getInstance();
return result;
}
public static boolean canBeOverriden(@NotNull PsiMethod method) {
PsiClass parentClass = method.getContainingClass();
return parentClass != null &&
!method.isConstructor() &&
!method.hasModifierProperty(PsiModifier.STATIC) &&
!method.hasModifierProperty(PsiModifier.FINAL) &&
!method.hasModifierProperty(PsiModifier.PRIVATE) &&
!(parentClass instanceof PsiAnonymousClass) &&
!parentClass.hasModifierProperty(PsiModifier.FINAL);
}
@NotNull
public static PsiElement[] mapElements(@NotNull ResolveResult[] candidates) {
PsiElement[] result = new PsiElement[candidates.length];
for (int i = 0; i < candidates.length; i++) {
result[i] = candidates[i].getElement();
}
return result;
}
@Nullable
public static PsiMember findEnclosingConstructorOrInitializer(PsiElement expression) {
PsiMember parent = PsiTreeUtil.getParentOfType(expression, PsiClassInitializer.class, PsiEnumConstantInitializer.class, PsiMethod.class, PsiField.class);
if (parent instanceof PsiMethod && !((PsiMethod)parent).isConstructor()) return null;
if (parent instanceof PsiField && parent.hasModifierProperty(PsiModifier.STATIC)) return null;
return parent;
}
public static boolean checkName(@NotNull PsiElement element, @NotNull String name, final PsiElement context) {
if (element instanceof PsiMetaOwner) {
final PsiMetaData data = ((PsiMetaOwner) element).getMetaData();
if (data != null) return name.equals(data.getName(context));
}
return element instanceof PsiNamedElement && name.equals(((PsiNamedElement)element).getName());
}
public static boolean isRawSubstitutor (@NotNull PsiTypeParameterListOwner owner, @NotNull PsiSubstitutor substitutor) {
for (PsiTypeParameter parameter : typeParametersIterable(owner)) {
if (substitutor.substitute(parameter) == null) return true;
}
return false;
}
public static final Key<LanguageLevel> FILE_LANGUAGE_LEVEL_KEY = Key.create("FORCE_LANGUAGE_LEVEL");
public static boolean isLanguageLevel5OrHigher(@NotNull final PsiElement element) {
return getLanguageLevel(element).isAtLeast(LanguageLevel.JDK_1_5);
}
public static boolean isLanguageLevel6OrHigher(@NotNull final PsiElement element) {
return getLanguageLevel(element).isAtLeast(LanguageLevel.JDK_1_6);
}
public static boolean isLanguageLevel7OrHigher(@NotNull final PsiElement element) {
return getLanguageLevel(element).isAtLeast(LanguageLevel.JDK_1_7);
}
public static boolean isLanguageLevel8OrHigher(@NotNull final PsiElement element) {
return getLanguageLevel(element).isAtLeast(LanguageLevel.JDK_1_8);
}
public static boolean isLanguageLevel9OrHigher(@NotNull final PsiElement element) {
return getLanguageLevel(element).isAtLeast(LanguageLevel.JDK_1_9);
}
@NotNull
public static LanguageLevel getLanguageLevel(@NotNull PsiElement element) {
if (element instanceof PsiDirectory) {
return JavaDirectoryService.getInstance().getLanguageLevel((PsiDirectory)element);
}
PsiFile file = element.getContainingFile();
if (file instanceof PsiJavaFile) {
return ((PsiJavaFile)file).getLanguageLevel();
}
if (file != null) {
PsiElement context = file.getContext();
if (context != null) {
return getLanguageLevel(context);
}
}
return getLanguageLevel(element.getProject());
}
@NotNull
public static LanguageLevel getLanguageLevel(@NotNull Project project) {
LanguageLevelProjectExtension instance = LanguageLevelProjectExtension.getInstance(project);
return instance != null ? instance.getLanguageLevel() : LanguageLevel.HIGHEST;
}
public static boolean isInstantiatable(@NotNull PsiClass clazz) {
return !clazz.hasModifierProperty(PsiModifier.ABSTRACT) &&
clazz.hasModifierProperty(PsiModifier.PUBLIC) &&
hasDefaultConstructor(clazz);
}
public static boolean hasDefaultConstructor(@NotNull PsiClass clazz) {
return hasDefaultConstructor(clazz, false);
}
public static boolean hasDefaultConstructor(@NotNull PsiClass clazz, boolean allowProtected) {
return hasDefaultConstructor(clazz, allowProtected, true);
}
public static boolean hasDefaultConstructor(@NotNull PsiClass clazz, boolean allowProtected, boolean checkModifiers) {
return hasDefaultCtrInHierarchy(clazz, allowProtected, checkModifiers, null);
}
private static boolean hasDefaultCtrInHierarchy(@NotNull PsiClass clazz, boolean allowProtected, boolean checkModifiers, @Nullable Set<PsiClass> visited) {
final PsiMethod[] constructors = clazz.getConstructors();
if (constructors.length > 0) {
for (PsiMethod cls: constructors) {
if ((!checkModifiers || cls.hasModifierProperty(PsiModifier.PUBLIC) ||
allowProtected && cls.hasModifierProperty(PsiModifier.PROTECTED)) &&
cls.getParameterList().getParametersCount() == 0) {
return true;
}
}
}
else {
final PsiClass superClass = clazz.getSuperClass();
if (superClass == null) {
return true;
}
if (visited == null) visited = new THashSet<PsiClass>();
if (!visited.add(clazz)) return false;
return hasDefaultCtrInHierarchy(superClass, true, true, visited);
}
return false;
}
@Nullable
public static PsiType extractIterableTypeParameter(@Nullable PsiType psiType, final boolean eraseTypeParameter) {
final PsiType type = substituteTypeParameter(psiType, CommonClassNames.JAVA_LANG_ITERABLE, 0, eraseTypeParameter);
return type != null ? type : substituteTypeParameter(psiType, CommonClassNames.JAVA_UTIL_COLLECTION, 0, eraseTypeParameter);
}
@Nullable
public static PsiType substituteTypeParameter(@Nullable final PsiType psiType, @NotNull final String superClass, final int typeParamIndex,
final boolean eraseTypeParameter) {
if (psiType == null) return null;
if (!(psiType instanceof PsiClassType)) return null;
final PsiClassType classType = (PsiClassType)psiType;
final PsiClassType.ClassResolveResult classResolveResult = classType.resolveGenerics();
final PsiClass psiClass = classResolveResult.getElement();
if (psiClass == null) return null;
final PsiClass baseClass = JavaPsiFacade.getInstance(psiClass.getProject()).findClass(superClass, psiClass.getResolveScope());
if (baseClass == null) return null;
if (!psiClass.isEquivalentTo(baseClass) && !psiClass.isInheritor(baseClass, true)) return null;
final PsiTypeParameter[] parameters = baseClass.getTypeParameters();
if (parameters.length <= typeParamIndex) return PsiType.getJavaLangObject(psiClass.getManager(), psiClass.getResolveScope());
final PsiSubstitutor substitutor = TypeConversionUtil.getSuperClassSubstitutor(baseClass, psiClass, classResolveResult.getSubstitutor());
final PsiType type = substitutor.substitute(parameters[typeParamIndex]);
if (type == null && eraseTypeParameter) {
return TypeConversionUtil.typeParameterErasure(parameters[typeParamIndex]);
}
return type;
}
public static final Comparator<PsiElement> BY_POSITION = new Comparator<PsiElement>() {
@Override
public int compare(PsiElement o1, PsiElement o2) {
return compareElementsByPosition(o1, o2);
}
};
public static void setModifierProperty(@NotNull PsiModifierListOwner owner, @NotNull @PsiModifier.ModifierConstant String property, boolean value) {
final PsiModifierList modifierList = owner.getModifierList();
assert modifierList != null : owner;
modifierList.setModifierProperty(property, value);
}
public static boolean isTryBlock(@Nullable final PsiElement element) {
if (element == null) return false;
final PsiElement parent = element.getParent();
return parent instanceof PsiTryStatement && element == ((PsiTryStatement)parent).getTryBlock();
}
public static boolean isElseBlock(@Nullable final PsiElement element) {
if (element == null) return false;
final PsiElement parent = element.getParent();
return parent instanceof PsiIfStatement && element == ((PsiIfStatement)parent).getElseBranch();
}
public static boolean isJavaToken(@Nullable PsiElement element, IElementType type) {
return element instanceof PsiJavaToken && ((PsiJavaToken)element).getTokenType() == type;
}
public static boolean isJavaToken(@Nullable PsiElement element, @NotNull TokenSet types) {
return element instanceof PsiJavaToken && types.contains(((PsiJavaToken)element).getTokenType());
}
public static boolean isCatchParameter(@Nullable final PsiElement element) {
return element instanceof PsiParameter && element.getParent() instanceof PsiCatchSection;
}
public static boolean isIgnoredName(@Nullable final String name) {
return "ignore".equals(name) || "ignored".equals(name);
}
@Nullable
public static PsiMethod getResourceCloserMethod(@NotNull PsiResourceListElement resource) {
PsiType resourceType = resource.getType();
return resourceType instanceof PsiClassType ? getResourceCloserMethodForType((PsiClassType)resourceType) : null;
}
/** @deprecated use {@link #getResourceCloserMethod(PsiResourceListElement)} (to be removed in IDEA 17) */
@SuppressWarnings("unused")
public static PsiMethod getResourceCloserMethod(@NotNull PsiResourceVariable resource) {
return getResourceCloserMethod((PsiResourceListElement)resource);
}
@Nullable
public static PsiMethod getResourceCloserMethodForType(@NotNull final PsiClassType resourceType) {
final PsiClass resourceClass = resourceType.resolve();
if (resourceClass == null) return null;
final Project project = resourceClass.getProject();
final JavaPsiFacade facade = JavaPsiFacade.getInstance(project);
final PsiClass autoCloseable = facade.findClass(CommonClassNames.JAVA_LANG_AUTO_CLOSEABLE, ProjectScope.getLibrariesScope(project));
if (autoCloseable == null) return null;
if (JavaClassSupers.getInstance().getSuperClassSubstitutor(autoCloseable, resourceClass, resourceType.getResolveScope(), PsiSubstitutor.EMPTY) == null) return null;
final PsiMethod[] closes = autoCloseable.findMethodsByName("close", false);
return closes.length == 1 ? resourceClass.findMethodBySignature(closes[0], true) : null;
}
@Nullable
public static PsiExpression skipParenthesizedExprDown(PsiExpression initializer) {
while (initializer instanceof PsiParenthesizedExpression) {
initializer = ((PsiParenthesizedExpression)initializer).getExpression();
}
return initializer;
}
public static PsiElement skipParenthesizedExprUp(PsiElement parent) {
while (parent instanceof PsiParenthesizedExpression) {
parent = parent.getParent();
}
return parent;
}
public static void ensureValidType(@NotNull PsiType type) {
ensureValidType(type, null);
}
public static void ensureValidType(@NotNull PsiType type, @Nullable String customMessage) {
if (!type.isValid()) {
TimeoutUtil.sleep(1); // to see if processing in another thread suddenly makes the type valid again (which is a bug)
if (type.isValid()) {
LOG.error("PsiType resurrected: " + type + " of " + type.getClass() + " " + customMessage);
return;
}
if (type instanceof PsiClassType) {
try {
PsiClass psiClass = ((PsiClassType)type).resolve(); // should throw exception
if (psiClass != null) {
ensureValid(psiClass);
}
}
catch (PsiInvalidElementAccessException e) {
throw customMessage == null? e : new RuntimeException(customMessage, e);
}
}
throw new AssertionError("Invalid type: " + type + " of class " + type.getClass() + " " + customMessage);
}
for (PsiAnnotation annotation : type.getAnnotations()) {
try {
PsiUtilCore.ensureValid(annotation);
}
catch (PsiInvalidElementAccessException e) {
throw customMessage == null? e : new RuntimeException(customMessage, e);
}
}
}
@Nullable
public static String getMemberQualifiedName(@NotNull PsiMember member) {
if (member instanceof PsiClass) {
return ((PsiClass)member).getQualifiedName();
}
PsiClass containingClass = member.getContainingClass();
if (containingClass == null) return null;
String className = containingClass.getQualifiedName();
if (className == null) return null;
return className + "." + member.getName();
}
static boolean checkSameExpression(PsiElement templateExpr, final PsiExpression expression) {
return templateExpr.equals(skipParenthesizedExprDown(expression));
}
public static boolean isCondition(PsiElement expr, PsiElement parent) {
if (parent instanceof PsiIfStatement) {
if (checkSameExpression(expr, ((PsiIfStatement)parent).getCondition())) {
return true;
}
}
else if (parent instanceof PsiWhileStatement) {
if (checkSameExpression(expr, ((PsiWhileStatement)parent).getCondition())) {
return true;
}
}
else if (parent instanceof PsiForStatement) {
if (checkSameExpression(expr, ((PsiForStatement)parent).getCondition())) {
return true;
}
}
else if (parent instanceof PsiDoWhileStatement) {
if (checkSameExpression(expr, ((PsiDoWhileStatement)parent).getCondition())) {
return true;
}
}
else if (parent instanceof PsiConditionalExpression) {
if (checkSameExpression(expr, ((PsiConditionalExpression)parent).getCondition())) {
return true;
}
}
return false;
}
public static PsiReturnStatement[] findReturnStatements(@NotNull PsiMethod method) {
return findReturnStatements(method.getBody());
}
public static PsiReturnStatement[] findReturnStatements(@Nullable PsiCodeBlock body) {
ArrayList<PsiReturnStatement> vector = new ArrayList<PsiReturnStatement>();
if (body != null) {
addReturnStatements(vector, body);
}
return vector.toArray(new PsiReturnStatement[vector.size()]);
}
private static void addReturnStatements(ArrayList<PsiReturnStatement> vector, PsiElement element) {
if (element instanceof PsiReturnStatement) {
vector.add((PsiReturnStatement)element);
}
else if (!(element instanceof PsiClass) && !(element instanceof PsiLambdaExpression)) {
PsiElement[] children = element.getChildren();
for (PsiElement child : children) {
addReturnStatements(vector, child);
}
}
}
}
| |
package com.maxleap.code.impl;
import com.maxleap.code.*;
import com.maxleap.las.sdk.*;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.node.ArrayNode;
import com.fasterxml.jackson.databind.node.JsonNodeFactory;
import com.fasterxml.jackson.databind.node.ObjectNode;
import com.maxleap.code.MLException;
import java.io.IOException;
import java.net.URLEncoder;
import java.util.*;
/**
*
*/
public class MLClassManagerImpl<T> implements MLClassManager<T> {
private static final Logger LOGGER = LoggerFactory.getLogger(MLClassManagerImpl.class);
private MLClassManagerHook<T> hook;
private Class<T> entityClazz;
private String apiAddress;
public MLClassManagerImpl(MLClassManagerHook<T> hook, Class<T> entityClazz) {
this.hook = hook;
this.entityClazz = entityClazz;
this.apiAddress = CloudCodeContants.DEFAULT_API_ADDRESS_PREFIX + "/classes/" + entityClazz.getSimpleName();
}
@Override
public SaveResult<T> create(T object, UserPrincipal userPrincipal) throws MLException {
try {
BeforeResult<T> beforeResult = hook == null ? new BeforeResult<T>(object, true) : hook.beforeCreate(object, userPrincipal);
if (!beforeResult.isResult()) return new SaveResult<T>(beforeResult.getFailMessage());
String response = WebUtils.doPost(apiAddress, CloudCodeContants.getHeaders(userPrincipal), MLJsonParser.asJson(object), CloudCodeContants.DEFAULT_TIMEOUT, CloudCodeContants.DEFAULT_READ_TIMEOUT);
LOGGER.info("get response of create[" + apiAddress + "]:" + response);
SaveMsg saveMsg = MLJsonParser.asObject(response, SaveMsg.class);
SaveResult saveResult = new SaveResult<T>(beforeResult, saveMsg);
if (hook == null) return saveResult;
AfterResult afterResult = hook.afterCreate(beforeResult, saveMsg, userPrincipal);
if (!afterResult.isSuccess()) saveResult.setFailMessage(afterResult.getFailMessage());
return saveResult;
} catch (Exception e) {
throw new MLException(e);
}
}
@Override
public FindMsg<T> find(MLQuery query, UserPrincipal userPrincipal) {
return find(query, false, userPrincipal);
}
@Override
public FindMsg<T> find(MLQuery query, boolean count, UserPrincipal userPrincipal) throws MLException {
try {
String postQuery = serializeLasQueryForPostQuest(query, count);
String response = WebUtils.doPost(apiAddress + "/query", CloudCodeContants.getHeaders(userPrincipal), postQuery, CloudCodeContants.DEFAULT_TIMEOUT, CloudCodeContants.DEFAULT_READ_TIMEOUT);
LOGGER.info("get response of find[" + apiAddress + "/query](" + postQuery + "):" + response);
JsonNode responseJson = MLJsonParser.asJsonNode(response);
ArrayNode results = (ArrayNode) responseJson.get("results");
List<T> r = new ArrayList<T>();
if (results == null || results.size() == 0)
return new FindMsg<T>(count ? responseJson.get("count").asInt() : 0, r);
for (JsonNode result : results) {
r.add(MLJsonParser.asObject(result.toString(), entityClazz));
}
return new FindMsg<T>(count ? responseJson.get("count").asInt() : 0, r);
} catch (Exception e) {
throw new MLException(e);
}
}
@Override
public T findById(String id, UserPrincipal userPrincipal) throws MLException {
try {
String response = WebUtils.doGet(apiAddress + "/" + id, CloudCodeContants.getHeaders(userPrincipal), null);
LOGGER.info("get response of findById[" + apiAddress + "/" + id + "]:" + response);
if ("{}".equals(response)) return null;
return MLJsonParser.asObject(response, entityClazz);
} catch (IOException e) {
throw new MLException(e);
}
}
@Override
public UpdateMsg update(String id, MLUpdate update, UserPrincipal userPrincipal) throws MLException {
BeforeResult<MLUpdate> beforeResult = hook == null ? new BeforeResult<MLUpdate>(update, true) : hook.beforeUpdate(id, update, userPrincipal);
if (!beforeResult.isResult()) throw new MLException(beforeResult.getFailMessage());
try {
String response = WebUtils.doPut(apiAddress + "/" + id, CloudCodeContants.getHeaders(userPrincipal), MLJsonParser.asJson(update.update()), CloudCodeContants.DEFAULT_TIMEOUT, CloudCodeContants.DEFAULT_READ_TIMEOUT);
LOGGER.info("get response of update[" + apiAddress + "/" + id + "](" + update.update() + "):" + response);
UpdateMsg updateMsg = MLJsonParser.asObject(response, UpdateMsg.class);
if (hook != null) hook.afterUpdate(id, beforeResult, updateMsg, userPrincipal);
return updateMsg;
} catch (IOException e) {
throw new MLException(e);
}
}
@Override
public UpdateMsg updateByQuery(MLQuery query, MLUpdate update, UserPrincipal userPrincipal) throws MLException {
try {
String response = WebUtils.doPost(apiAddress + "/update?where=" + URLEncoder.encode(MLJsonParser.asJson(query.query()), "UTF-8"), CloudCodeContants.getHeaders(userPrincipal), MLJsonParser.asJson(update.update()), CloudCodeContants.DEFAULT_TIMEOUT, CloudCodeContants.DEFAULT_READ_TIMEOUT);
LOGGER.info("get response of update[" + apiAddress + "/" + update + "](where=" + query.query() + ")(" + update.update() + "):" + response);
return MLJsonParser.asObject(response, UpdateMsg.class);
} catch (IOException e) {
throw new MLException(e);
}
}
@Override
public DeleteResult delete(String id, UserPrincipal userPrincipal) throws MLException {
BeforeResult<String> beforeResult = hook == null ? new BeforeResult<String>(id, true) : hook.beforeDelete(id, userPrincipal);
if (!beforeResult.isResult()) return new DeleteResult(beforeResult.getFailMessage());
try {
String response = WebUtils.doDelete(apiAddress + "/" + id, CloudCodeContants.getHeaders(userPrincipal), null);
LOGGER.info("get response of delete[" + apiAddress + "/" + id + "]:" + response);
DeleteMsg deleteMsg = MLJsonParser.asObject(response, DeleteMsg.class);
DeleteResult deleteResult = new DeleteResult(beforeResult, deleteMsg);
if (hook == null) return deleteResult;
AfterResult afterResult = hook.afterDelete(beforeResult, deleteMsg, userPrincipal);
if (!afterResult.isSuccess()) deleteResult.setFailMessage(afterResult.getFailMessage());
return deleteResult;
} catch (Exception e) {
throw new MLException(e);
}
}
@Override
public DeleteResult delete(String[] ids, UserPrincipal userPrincipal) {
if (ids != null && ids.length > 50) throw new MLException("delete bach max limit 50.");
try {
BeforeResult<String[]> beforeResult = hook == null ? new BeforeResult<String[]>(ids, true) : hook.beforeDelete(ids, userPrincipal);
if (!beforeResult.isResult()) return new DeleteResult(beforeResult.getFailMessage());
ArrayNode arrays = JsonNodeFactory.instance.arrayNode();
for (String id : ids) arrays.add(id);
ObjectNode params = JsonNodeFactory.instance.objectNode();
params.put("objectIds", arrays);
String response = WebUtils.doPost(apiAddress + "/delete", CloudCodeContants.getHeaders(userPrincipal), params.toString(), CloudCodeContants.DEFAULT_TIMEOUT, CloudCodeContants.DEFAULT_READ_TIMEOUT);
LOGGER.info("get response of deleteBatch[" + apiAddress + "/delete](" + ids + "):" + response);
return new DeleteResult<String[]>(beforeResult, MLJsonParser.asObject(response, DeleteMsg.class));
} catch (Exception e) {
throw new MLException(e);
}
}
@Override
public SaveResult<T> create(T object) throws MLException {
return this.create(object, null);
}
@Override
public FindMsg<T> find(MLQuery query) throws MLException {
return this.find(query, null);
}
@Override
public FindMsg<T> find(MLQuery query, boolean count) throws MLException {
return this.find(query, count, null);
}
@Override
public T findById(String id) throws MLException {
return this.findById(id, null);
}
@Override
public UpdateMsg update(String id, MLUpdate update) throws MLException {
return this.update(id, update, null);
}
@Override
public UpdateMsg updateByQuery(MLQuery query, MLUpdate update) throws MLException {
return this.updateByQuery(query, update, null);
}
@Override
public DeleteResult delete(String id) throws MLException {
return this.delete(id, null);
}
@Override
public DeleteResult delete(String[] ids) throws MLException {
return this.delete(ids, null);
}
String serializeLasQueryForPostQuest(MLQuery lasQuery, Boolean count) {
Map<String, Object> map = new HashMap<String, Object>();
if (lasQuery.query() != null) map.put("where", MLJsonParser.asJson(lasQuery.query()));
if (lasQuery.sort() != null) map.put("order", lasQuery.sort());
if (lasQuery.keys() != null) map.put("keys", lasQuery.keys());
if (lasQuery.includes() != null) map.put("include", lasQuery.includes());
map.put("limit", lasQuery.limit());
map.put("skip", lasQuery.skip());
if (count) map.put("count", 1);
// map.put("excludeKeys", null); Unsupported.
return MLJsonParser.asJson(map);
}
}
| |
/*
* Copyright (c) 2010 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package com.google.api.client.xml;
import com.google.api.client.util.DateTime;
import com.google.api.client.util.DataUtil;
import com.google.api.client.util.FieldInfo;
import org.xmlpull.v1.XmlSerializer;
import java.io.IOException;
import java.io.StringWriter;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.SortedSet;
import java.util.TreeSet;
/**
* @since 2.2
* @author Yaniv Inbar
*/
public final class XmlNamespaceDictionary {
public final HashMap<String, String> namespaceAliasToUriMap =
new HashMap<String, String>();
/**
* Adds a known namespace of the given alias and URI.
*
* @param alias alias
* @param uri namespace URI
*/
public void addNamespace(String alias, String uri) {
if (alias == null || uri == null) {
throw new NullPointerException();
}
HashMap<String, String> namespaceAliasToUriMap =
this.namespaceAliasToUriMap;
String knownUri = namespaceAliasToUriMap.get(alias);
if (!uri.equals(knownUri)) {
if (knownUri != null) {
throw new IllegalArgumentException("expected namespace alias <" + alias
+ "> to be <" + knownUri + "> but encountered <" + uri + ">");
}
namespaceAliasToUriMap.put(alias, uri);
}
}
/**
* Shows a debug string representation of an element data object of key/value
* pairs.
*
* @param element element data object ({@link GenericXml}, {@link Map}, or any
* object with public fields)
* @param elementName optional XML element local name prefixed by its
* namespace alias -- for example {@code "atom:entry"} -- or {@code
* null} to make up something
*/
public String toStringOf(String elementName, Object element) {
try {
StringWriter writer = new StringWriter();
XmlSerializer serializer = Xml.createSerializer();
serializer.setOutput(writer);
serialize(serializer, elementName, element, false);
return writer.toString();
} catch (IOException e) {
throw new IllegalArgumentException(e);
}
}
/**
* Shows a debug string representation of an element data object of key/value
* pairs.
*
* @param element element data object ({@link GenericXml}, {@link Map}, or any
* object with public fields)
* @param elementNamespaceUri XML namespace URI or {@code null} for no
* namespace
* @param elementLocalName XML local name
* @throws IOException I/O exception
*/
public void serialize(XmlSerializer serializer, String elementNamespaceUri,
String elementLocalName, Object element) throws IOException {
serialize(serializer, elementNamespaceUri, elementLocalName, element, true);
}
/**
* Shows a debug string representation of an element data object of key/value
* pairs.
*
* @param element element data object ({@link GenericXml}, {@link Map}, or any
* object with public fields)
* @param elementName XML element local name prefixed by its namespace alias
* @throws IOException I/O exception
*/
public void serialize(XmlSerializer serializer, String elementName,
Object element) throws IOException {
serialize(serializer, elementName, element, true);
}
private void serialize(XmlSerializer serializer, String elementNamespaceUri,
String elementLocalName, Object element, boolean errorOnUnknown)
throws IOException {
startDoc(serializer, element, errorOnUnknown, elementNamespaceUri)
.serialize(serializer, elementNamespaceUri, elementLocalName);
serializer.endDocument();
}
private void serialize(XmlSerializer serializer, String elementName,
Object element, boolean errorOnUnknown) throws IOException {
startDoc(serializer, element, errorOnUnknown, null).serialize(serializer,
elementName);
serializer.endDocument();
}
private ElementSerializer startDoc(XmlSerializer serializer, Object element,
boolean errorOnUnknown, String extraNamespace) throws IOException {
serializer.startDocument(null, null);
SortedSet<String> aliases = new TreeSet<String>();
computeAliases(element, aliases);
HashMap<String, String> namespaceAliasToUriMap =
this.namespaceAliasToUriMap;
boolean foundExtra = extraNamespace == null;
for (String alias : aliases) {
String uri = namespaceAliasToUriMap.get(alias);
serializer.setPrefix(alias, uri);
if (!foundExtra && uri.equals(extraNamespace)) {
foundExtra = true;
}
}
if (!foundExtra) {
for (Map.Entry<String, String> entry : namespaceAliasToUriMap.entrySet()) {
if (extraNamespace.equals(entry.getValue())) {
serializer.setPrefix(entry.getKey(), extraNamespace);
break;
}
}
}
return new ElementSerializer(element, errorOnUnknown);
}
private void computeAliases(Object element, SortedSet<String> aliases) {
for (Map.Entry<String, Object> entry : DataUtil.mapOf(element).entrySet()) {
Object value = entry.getValue();
if (value != null) {
String name = entry.getKey();
if (!"text()".equals(name)) {
int colon = name.indexOf(':');
boolean isAttribute = name.charAt(0) == '@';
if (colon != -1 || !isAttribute) {
String alias =
colon == -1 ? "" : name.substring(
name.charAt(0) == '@' ? 1 : 0, colon);
aliases.add(alias);
}
if (!isAttribute && !FieldInfo.isPrimitive(value)) {
computeAliases(value, aliases);
}
}
}
}
}
class ElementSerializer {
private final boolean errorOnUnknown;
Object textValue = null;
final List<String> attributeNames = new ArrayList<String>();
final List<Object> attributeValues = new ArrayList<Object>();
final List<String> subElementNames = new ArrayList<String>();
final List<Object> subElementValues = new ArrayList<Object>();
ElementSerializer(Object elementValue, boolean errorOnUnknown) {
this.errorOnUnknown = errorOnUnknown;
Class<?> valueClass = elementValue.getClass();
if (FieldInfo.isPrimitive(valueClass)) {
this.textValue = elementValue;
} else {
for (Map.Entry<String, Object> entry : DataUtil.mapOf(elementValue)
.entrySet()) {
Object fieldValue = entry.getValue();
if (fieldValue != null) {
String fieldName = entry.getKey();
if ("text()".equals(fieldName)) {
this.textValue = fieldValue;
} else if (fieldName.charAt(0) == '@') {
this.attributeNames.add(fieldName.substring(1));
this.attributeValues.add(fieldValue);
} else {
this.subElementNames.add(fieldName);
this.subElementValues.add(fieldValue);
}
}
}
}
}
String getNamespaceUriForAlias(String alias) {
String result =
XmlNamespaceDictionary.this.namespaceAliasToUriMap.get(alias);
if (result == null) {
if (this.errorOnUnknown) {
throw new IllegalArgumentException("unrecognized alias: "
+ (alias.length() == 0 ? "(default)" : alias));
}
return "http://unknown/" + alias;
}
return result;
}
void serialize(XmlSerializer serializer, String elementName)
throws IOException {
String elementLocalName = null;
String elementNamespaceUri = null;
if (elementName != null) {
int colon = elementName.indexOf(':');
elementLocalName = elementName.substring(colon + 1);
String alias = colon == -1 ? "" : elementName.substring(0, colon);
elementNamespaceUri = getNamespaceUriForAlias(alias);
if (elementNamespaceUri == null) {
elementNamespaceUri = "http://unknown/" + alias;
}
}
serialize(serializer, elementNamespaceUri, elementLocalName);
}
void serialize(XmlSerializer serializer, String elementNamespaceUri,
String elementLocalName) throws IOException {
boolean errorOnUnknown = this.errorOnUnknown;
if (elementLocalName == null) {
if (errorOnUnknown) {
throw new IllegalArgumentException("XML name not specified");
}
elementLocalName = "unknownName";
}
serializer.startTag(elementNamespaceUri, elementLocalName);
// attributes
List<String> attributeNames = this.attributeNames;
List<Object> attributeValues = this.attributeValues;
int num = attributeNames.size();
for (int i = 0; i < num; i++) {
String attributeName = attributeNames.get(i);
int colon = attributeName.indexOf(':');
String attributeLocalName = attributeName.substring(colon + 1);
String attributeNamespaceUri =
colon == -1 ? null : getNamespaceUriForAlias(attributeName
.substring(0, colon));
serializer.attribute(attributeNamespaceUri, attributeLocalName,
toSerializedValue(attributeValues.get(i)));
}
// text
Object textValue = this.textValue;
if (textValue != null) {
serializer.text(toSerializedValue(textValue));
}
// elements
List<String> subElementNames = this.subElementNames;
List<Object> subElementValues = this.subElementValues;
num = subElementNames.size();
for (int i = 0; i < num; i++) {
Object subElementValue = subElementValues.get(i);
String subElementName = subElementNames.get(i);
if (subElementValue instanceof List<?>) {
for (Object subElement : (List<?>) subElementValue) {
new ElementSerializer(subElement, errorOnUnknown).serialize(
serializer, subElementName);
}
} else {
new ElementSerializer(subElementValue, errorOnUnknown).serialize(
serializer, subElementName);
}
}
serializer.endTag(elementNamespaceUri, elementLocalName);
}
}
static String toSerializedValue(Object value) {
if (value instanceof Float) {
Float f = (Float) value;
if (f.floatValue() == Float.POSITIVE_INFINITY) {
return "INF";
}
if (f.floatValue() == Float.NEGATIVE_INFINITY) {
return "-INF";
}
}
if (value instanceof Double) {
Double d = (Double) value;
if (d.doubleValue() == Double.POSITIVE_INFINITY) {
return "INF";
}
if (d.doubleValue() == Double.NEGATIVE_INFINITY) {
return "-INF";
}
}
if (value instanceof String || value instanceof Number
|| value instanceof Boolean) {
return value.toString();
}
if (value instanceof DateTime) {
return ((DateTime) value).toStringRfc3339();
}
throw new IllegalArgumentException("unrecognized value type: "
+ value.getClass());
}
}
| |
/*
* Copyright 2000-2014 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.psi.formatter.xml;
import com.intellij.formatting.*;
import com.intellij.lang.ASTNode;
import com.intellij.lang.xml.XMLLanguage;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.util.TextRange;
import com.intellij.psi.PsiElement;
import com.intellij.psi.PsiFile;
import com.intellij.psi.PsiWhiteSpace;
import com.intellij.psi.TokenType;
import com.intellij.psi.formatter.common.AbstractBlock;
import com.intellij.psi.impl.source.SourceTreeToPsiMap;
import com.intellij.psi.templateLanguages.OuterLanguageElement;
import com.intellij.psi.tree.IElementType;
import com.intellij.psi.xml.*;
import com.intellij.util.SmartList;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.util.ArrayList;
import java.util.List;
public class XmlBlock extends AbstractXmlBlock {
private final Indent myIndent;
private final TextRange myTextRange;
private static final Logger LOG = Logger.getInstance("#com.intellij.psi.formatter.xml.XmlBlock");
public XmlBlock(final ASTNode node,
final Wrap wrap,
final Alignment alignment,
final XmlFormattingPolicy policy,
final Indent indent,
final TextRange textRange) {
this(node, wrap, alignment, policy, indent, textRange, false);
}
public XmlBlock(final ASTNode node,
final Wrap wrap,
final Alignment alignment,
final XmlFormattingPolicy policy,
final Indent indent,
final TextRange textRange,
final boolean preserveSpace) {
super(node, wrap, alignment, policy, preserveSpace);
myIndent = indent;
myTextRange = textRange;
}
@Override
@NotNull
public TextRange getTextRange() {
if (myTextRange != null && !(isCDATAStart() || isCDATAEnd())) {
return myTextRange;
}
else {
return super.getTextRange();
}
}
@Override
protected List<Block> buildChildren() {
//
// Fix for EA-19269:
// Split XML attribute value to the value itself and delimiters (needed for the case when it contains
// template language tags inside).
//
if (myNode.getElementType() == XmlElementType.XML_ATTRIBUTE_VALUE) {
return splitAttribute(myNode, myXmlFormattingPolicy);
}
if (myNode.getElementType() == XmlElementType.XML_COMMENT) {
List<Block> result = new SmartList<>();
if (buildInjectedPsiBlocks(result, myNode, myWrap, null, Indent.getNoneIndent())) {
return result;
}
return splitComment();
}
if (myNode.getFirstChildNode() != null) {
boolean keepWhitespaces = shouldKeepWhitespaces();
final ArrayList<Block> result = new ArrayList<>(5);
ASTNode child = myNode.getFirstChildNode();
while (child != null) {
if (child.getTextLength() > 0) {
if (containsWhiteSpacesOnly(child)) {
if (keepWhitespaces) {
result.add(new ReadOnlyBlock(child));
}
}
else {
child = processChild(result, child, getDefaultWrap(child), null, getChildDefaultIndent());
}
}
if (child != null) {
LOG.assertTrue(child.getTreeParent() == myNode);
child = child.getTreeNext();
}
}
return result;
}
else {
return EMPTY;
}
}
private boolean shouldKeepWhitespaces() {
if (myNode.getElementType() == XmlElementType.XML_TEXT) {
if (myXmlFormattingPolicy.getShouldKeepWhiteSpaces()) {
return true;
}
else {
final ASTNode treeParent = myNode.getTreeParent();
final XmlTag tag = getTag(treeParent);
if (tag != null) {
if (myXmlFormattingPolicy.keepWhiteSpacesInsideTag(tag)) {
return true;
}
}
}
}
return false;
}
protected List<Block> splitAttribute(ASTNode node, XmlFormattingPolicy formattingPolicy) {
final ArrayList<Block> result = new ArrayList<>(3);
ASTNode child = node.getFirstChildNode();
while (child != null) {
if (child.getElementType() == XmlTokenType.XML_ATTRIBUTE_VALUE_START_DELIMITER ||
child.getElementType() == XmlTokenType.XML_ATTRIBUTE_VALUE_END_DELIMITER) {
result.add(new XmlBlock(child, null, null, formattingPolicy, null, null, isPreserveSpace()));
}
else if (!child.getPsi().getLanguage().isKindOf(XMLLanguage.INSTANCE) && containsOuterLanguageElement(child)) {
// Fix for EA-20311:
// In case of another embedded language create a splittable XML block which can be
// merged with other language's code blocks.
createLeafBlocks(child, result);
}
else if (child.getElementType() != TokenType.ERROR_ELEMENT || child.getFirstChildNode() != null) {
result.add(new ReadOnlyBlock(child));
}
child = child.getTreeNext();
}
return result;
}
private void createLeafBlocks(ASTNode node, List<Block> result) {
if (node instanceof OuterLanguageElement) {
processChild(result, node, null, null, null);
return;
}
ASTNode child = node.getFirstChildNode();
if (child == null && !(node instanceof PsiWhiteSpace) && node.getElementType() != TokenType.ERROR_ELEMENT && node.getTextLength() > 0) {
result.add(new ReadOnlyBlock(node));
return;
}
while (child != null) {
createLeafBlocks(child, result);
child = child.getTreeNext();
}
}
private static boolean containsOuterLanguageElement(ASTNode node) {
if (node instanceof OuterLanguageElement) {
return true;
}
ASTNode child = node.getFirstChildNode();
while (child != null) {
if (child instanceof OuterLanguageElement) {
return true;
}
if (containsOuterLanguageElement(child)) return true;
child = child.getTreeNext();
}
return false;
}
protected List<Block> splitComment() {
if (myNode.getElementType() != XmlElementType.XML_COMMENT) return EMPTY;
final ArrayList<Block> result = new ArrayList<>(3);
ASTNode child = myNode.getFirstChildNode();
boolean hasOuterLangElements = false;
while (child != null) {
if (child instanceof OuterLanguageElement) {
hasOuterLangElements = true;
}
result.add(new XmlBlock(child, null, null, myXmlFormattingPolicy, getChildIndent(), null, isPreserveSpace()));
child = child.getTreeNext();
}
if (hasOuterLangElements) {
return result;
}
else {
return EMPTY;
}
}
@Nullable
protected Wrap getDefaultWrap(ASTNode node) {
return null;
}
@Nullable
protected Indent getChildDefaultIndent() {
if (myNode.getElementType() == XmlElementType.HTML_DOCUMENT) {
return Indent.getNoneIndent();
}
if (myNode.getElementType() == TokenType.DUMMY_HOLDER) {
return Indent.getNoneIndent();
}
if (myNode.getElementType() == XmlElementType.XML_PROLOG) {
return Indent.getNoneIndent();
}
else {
return null;
}
}
@Override
public Spacing getSpacing(Block child1, @NotNull Block child2) {
if (!(child1 instanceof AbstractBlock) || !(child2 instanceof AbstractBlock)) {
return null;
}
final IElementType elementType = myNode.getElementType();
final ASTNode node1 = ((AbstractBlock)child1).getNode();
final IElementType type1 = node1.getElementType();
final ASTNode node2 = ((AbstractBlock)child2).getNode();
final IElementType type2 = node2.getElementType();
if ((isXmlTag(node2) || type2 == XmlTokenType.XML_END_TAG_START || type2 == XmlElementType.XML_TEXT) && myXmlFormattingPolicy
.getShouldKeepWhiteSpaces()) {
return Spacing.getReadOnlySpacing();
}
if (elementType == XmlElementType.XML_TEXT) {
return getSpacesInsideText(type1, type2);
}
else if (elementType == XmlElementType.XML_ATTRIBUTE) {
return getSpacesInsideAttribute(type1, type2);
}
if (type1 == XmlElementType.XML_PROLOG) {
return createDefaultSpace(true, false);
}
if (elementType == XmlElementType.XML_DOCTYPE) {
return createDefaultSpace(true, false);
}
return createDefaultSpace(false, false);
}
private Spacing getSpacesInsideAttribute(final IElementType type1, final IElementType type2) {
if (type1 == XmlTokenType.XML_EQ || type2 == XmlTokenType.XML_EQ) {
int spaces = myXmlFormattingPolicy.getShouldAddSpaceAroundEqualityInAttribute() ? 1 : 0;
return Spacing
.createSpacing(spaces, spaces, 0, myXmlFormattingPolicy.getShouldKeepLineBreaks(), myXmlFormattingPolicy.getKeepBlankLines());
}
else {
return createDefaultSpace(false, false);
}
}
private Spacing getSpacesInsideText(final IElementType type1, final IElementType type2) {
if (type1 == XmlTokenType.XML_DATA_CHARACTERS && type2 == XmlTokenType.XML_DATA_CHARACTERS) {
return Spacing
.createSpacing(1, 1, 0, myXmlFormattingPolicy.getShouldKeepLineBreaksInText(), myXmlFormattingPolicy.getKeepBlankLines());
}
else {
return createDefaultSpace(false, true);
}
}
@Override
public Indent getIndent() {
if (myNode.getElementType() == XmlElementType.XML_PROLOG || myNode.getElementType() == XmlElementType.XML_DOCTYPE ||
SourceTreeToPsiMap.treeElementToPsi(myNode) instanceof XmlDocument) {
return Indent.getNoneIndent();
}
return myIndent;
}
@Override
public boolean insertLineBreakBeforeTag() {
return false;
}
@Override
public boolean removeLineBreakBeforeTag() {
return false;
}
@Override
public boolean isTextElement() {
return myNode.getElementType() == XmlElementType.XML_TEXT || myNode.getElementType() == XmlTokenType.XML_DATA_CHARACTERS ||
myNode.getElementType() == XmlTokenType.XML_CHAR_ENTITY_REF;
}
@Override
@NotNull
public ChildAttributes getChildAttributes(final int newChildIndex) {
PsiElement element = myNode.getPsi();
if (element instanceof PsiFile || element instanceof XmlDocument || element instanceof XmlProlog) {
return new ChildAttributes(Indent.getNoneIndent(), null);
}
else {
return super.getChildAttributes(newChildIndex);
}
}
public XmlFormattingPolicy getPolicy() {
return myXmlFormattingPolicy;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.tomcat.util.threads;
import java.util.concurrent.BlockingQueue;
import java.util.concurrent.RejectedExecutionException;
import java.util.concurrent.RejectedExecutionHandler;
import java.util.concurrent.ThreadFactory;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicLong;
import org.apache.tomcat.util.res.StringManager;
/**
* Same as a java.util.concurrent.ThreadPoolExecutor but implements a much more efficient
* {@link #getSubmittedCount()} method, to be used to properly handle the work queue.
* If a RejectedExecutionHandler is not specified a default one will be configured
* and that one will always throw a RejectedExecutionException
*
*/
public class ThreadPoolExecutor extends java.util.concurrent.ThreadPoolExecutor {
/**
* The string manager for this package.
*/
protected static final StringManager sm = StringManager
.getManager("org.apache.tomcat.util.threads.res");
/**
* The number of tasks submitted but not yet finished. This includes tasks
* in the queue and tasks that have been handed to a worker thread but the
* latter did not start executing the task yet.
* This number is always greater or equal to {@link #getActiveCount()}.
*/
private final AtomicInteger submittedCount = new AtomicInteger(0);
private final AtomicLong lastContextStoppedTime = new AtomicLong(0L);
/**
* Most recent time in ms when a thread decided to kill itself to avoid
* potential memory leaks. Useful to throttle the rate of renewals of
* threads.
*/
private final AtomicLong lastTimeThreadKilledItself = new AtomicLong(0L);
/**
* Delay in ms between 2 threads being renewed. If negative, do not renew threads.
*/
private long threadRenewalDelay = Constants.DEFAULT_THREAD_RENEWAL_DELAY;
public ThreadPoolExecutor(int corePoolSize, int maximumPoolSize, long keepAliveTime, TimeUnit unit, BlockingQueue<Runnable> workQueue, RejectedExecutionHandler handler) {
super(corePoolSize, maximumPoolSize, keepAliveTime, unit, workQueue, handler);
prestartAllCoreThreads();
}
public ThreadPoolExecutor(int corePoolSize, int maximumPoolSize, long keepAliveTime, TimeUnit unit, BlockingQueue<Runnable> workQueue, ThreadFactory threadFactory,
RejectedExecutionHandler handler) {
super(corePoolSize, maximumPoolSize, keepAliveTime, unit, workQueue, threadFactory, handler);
prestartAllCoreThreads();
}
public ThreadPoolExecutor(int corePoolSize, int maximumPoolSize, long keepAliveTime, TimeUnit unit, BlockingQueue<Runnable> workQueue, ThreadFactory threadFactory) {
super(corePoolSize, maximumPoolSize, keepAliveTime, unit, workQueue, threadFactory, new RejectHandler());
prestartAllCoreThreads();
}
public ThreadPoolExecutor(int corePoolSize, int maximumPoolSize, long keepAliveTime, TimeUnit unit, BlockingQueue<Runnable> workQueue) {
super(corePoolSize, maximumPoolSize, keepAliveTime, unit, workQueue, new RejectHandler());
prestartAllCoreThreads();
}
public long getThreadRenewalDelay() {
return threadRenewalDelay;
}
public void setThreadRenewalDelay(long threadRenewalDelay) {
this.threadRenewalDelay = threadRenewalDelay;
}
@Override
protected void afterExecute(Runnable r, Throwable t) {
submittedCount.decrementAndGet();
if (t == null) {
stopCurrentThreadIfNeeded();
}
}
/**
* If the current thread was started before the last time when a context was
* stopped, an exception is thrown so that the current thread is stopped.
*/
protected void stopCurrentThreadIfNeeded() {
if (currentThreadShouldBeStopped()) {
long lastTime = lastTimeThreadKilledItself.longValue();
if (lastTime + threadRenewalDelay < System.currentTimeMillis()) {
if (lastTimeThreadKilledItself.compareAndSet(lastTime,
System.currentTimeMillis() + 1)) {
// OK, it's really time to dispose of this thread
final String msg = sm.getString(
"threadPoolExecutor.threadStoppedToAvoidPotentialLeak",
Thread.currentThread().getName());
throw new StopPooledThreadException(msg);
}
}
}
}
protected boolean currentThreadShouldBeStopped() {
if (threadRenewalDelay >= 0
&& Thread.currentThread() instanceof TaskThread) {
TaskThread currentTaskThread = (TaskThread) Thread.currentThread();
if (currentTaskThread.getCreationTime() <
this.lastContextStoppedTime.longValue()) {
return true;
}
}
return false;
}
public int getSubmittedCount() {
return submittedCount.get();
}
/**
* {@inheritDoc}
*/
@Override
public void execute(Runnable command) {
execute(command,0,TimeUnit.MILLISECONDS);
}
/**
* Executes the given command at some time in the future. The command
* may execute in a new thread, in a pooled thread, or in the calling
* thread, at the discretion of the <tt>Executor</tt> implementation.
* If no threads are available, it will be added to the work queue.
* If the work queue is full, the system will wait for the specified
* time and it throw a RejectedExecutionException if the queue is still
* full after that.
*
* @param command the runnable task
* @param timeout A timeout for the completion of the task
* @param unit The timeout time unit
* @throws RejectedExecutionException if this task cannot be
* accepted for execution - the queue is full
* @throws NullPointerException if command or unit is null
*/
public void execute(Runnable command, long timeout, TimeUnit unit) {
submittedCount.incrementAndGet();
try {
super.execute(command);
} catch (RejectedExecutionException rx) {
if (super.getQueue() instanceof TaskQueue) {
final TaskQueue queue = (TaskQueue)super.getQueue();
try {
if (!queue.force(command, timeout, unit)) {
submittedCount.decrementAndGet();
throw new RejectedExecutionException("Queue capacity is full.");
}
} catch (InterruptedException x) {
submittedCount.decrementAndGet();
throw new RejectedExecutionException(x);
}
} else {
submittedCount.decrementAndGet();
throw rx;
}
}
}
public void contextStopping() {
this.lastContextStoppedTime.set(System.currentTimeMillis());
// save the current pool parameters to restore them later
int savedCorePoolSize = this.getCorePoolSize();
TaskQueue taskQueue =
getQueue() instanceof TaskQueue ? (TaskQueue) getQueue() : null;
if (taskQueue != null) {
// note by slaurent : quite oddly threadPoolExecutor.setCorePoolSize
// checks that queue.remainingCapacity()==0. I did not understand
// why, but to get the intended effect of waking up idle threads, I
// temporarily fake this condition.
taskQueue.setForcedRemainingCapacity(Integer.valueOf(0));
}
// setCorePoolSize(0) wakes idle threads
this.setCorePoolSize(0);
// TaskQueue.take() takes care of timing out, so that we are sure that
// all threads of the pool are renewed in a limited time, something like
// (threadKeepAlive + longest request time)
if (taskQueue != null) {
// ok, restore the state of the queue and pool
taskQueue.setForcedRemainingCapacity(null);
}
this.setCorePoolSize(savedCorePoolSize);
}
private static class RejectHandler implements RejectedExecutionHandler {
@Override
public void rejectedExecution(Runnable r,
java.util.concurrent.ThreadPoolExecutor executor) {
throw new RejectedExecutionException();
}
}
}
| |
/*
* Copyright 2012-2017 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.elasticloadbalancing.model;
import java.io.Serializable;
import javax.annotation.Generated;
import com.amazonaws.AmazonWebServiceRequest;
/**
* <p>
* Contains the parameters for SetLoadBalancerListenerSSLCertificate.
* </p>
*
* @see <a
* href="http://docs.aws.amazon.com/goto/WebAPI/elasticloadbalancing-2012-06-01/SetLoadBalancerListenerSSLCertificate"
* target="_top">AWS API Documentation</a>
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class SetLoadBalancerListenerSSLCertificateRequest extends com.amazonaws.AmazonWebServiceRequest implements Serializable, Cloneable {
/**
* <p>
* The name of the load balancer.
* </p>
*/
private String loadBalancerName;
/**
* <p>
* The port that uses the specified SSL certificate.
* </p>
*/
private Integer loadBalancerPort;
/**
* <p>
* The Amazon Resource Name (ARN) of the SSL certificate.
* </p>
*/
private String sSLCertificateId;
/**
* Default constructor for SetLoadBalancerListenerSSLCertificateRequest object. Callers should use the setter or
* fluent setter (with...) methods to initialize the object after creating it.
*/
public SetLoadBalancerListenerSSLCertificateRequest() {
}
/**
* Constructs a new SetLoadBalancerListenerSSLCertificateRequest object. Callers should use the setter or fluent
* setter (with...) methods to initialize any additional object members.
*
* @param loadBalancerName
* The name of the load balancer.
* @param loadBalancerPort
* The port that uses the specified SSL certificate.
* @param sSLCertificateId
* The Amazon Resource Name (ARN) of the SSL certificate.
*/
public SetLoadBalancerListenerSSLCertificateRequest(String loadBalancerName, Integer loadBalancerPort, String sSLCertificateId) {
setLoadBalancerName(loadBalancerName);
setLoadBalancerPort(loadBalancerPort);
setSSLCertificateId(sSLCertificateId);
}
/**
* <p>
* The name of the load balancer.
* </p>
*
* @param loadBalancerName
* The name of the load balancer.
*/
public void setLoadBalancerName(String loadBalancerName) {
this.loadBalancerName = loadBalancerName;
}
/**
* <p>
* The name of the load balancer.
* </p>
*
* @return The name of the load balancer.
*/
public String getLoadBalancerName() {
return this.loadBalancerName;
}
/**
* <p>
* The name of the load balancer.
* </p>
*
* @param loadBalancerName
* The name of the load balancer.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public SetLoadBalancerListenerSSLCertificateRequest withLoadBalancerName(String loadBalancerName) {
setLoadBalancerName(loadBalancerName);
return this;
}
/**
* <p>
* The port that uses the specified SSL certificate.
* </p>
*
* @param loadBalancerPort
* The port that uses the specified SSL certificate.
*/
public void setLoadBalancerPort(Integer loadBalancerPort) {
this.loadBalancerPort = loadBalancerPort;
}
/**
* <p>
* The port that uses the specified SSL certificate.
* </p>
*
* @return The port that uses the specified SSL certificate.
*/
public Integer getLoadBalancerPort() {
return this.loadBalancerPort;
}
/**
* <p>
* The port that uses the specified SSL certificate.
* </p>
*
* @param loadBalancerPort
* The port that uses the specified SSL certificate.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public SetLoadBalancerListenerSSLCertificateRequest withLoadBalancerPort(Integer loadBalancerPort) {
setLoadBalancerPort(loadBalancerPort);
return this;
}
/**
* <p>
* The Amazon Resource Name (ARN) of the SSL certificate.
* </p>
*
* @param sSLCertificateId
* The Amazon Resource Name (ARN) of the SSL certificate.
*/
public void setSSLCertificateId(String sSLCertificateId) {
this.sSLCertificateId = sSLCertificateId;
}
/**
* <p>
* The Amazon Resource Name (ARN) of the SSL certificate.
* </p>
*
* @return The Amazon Resource Name (ARN) of the SSL certificate.
*/
public String getSSLCertificateId() {
return this.sSLCertificateId;
}
/**
* <p>
* The Amazon Resource Name (ARN) of the SSL certificate.
* </p>
*
* @param sSLCertificateId
* The Amazon Resource Name (ARN) of the SSL certificate.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public SetLoadBalancerListenerSSLCertificateRequest withSSLCertificateId(String sSLCertificateId) {
setSSLCertificateId(sSLCertificateId);
return this;
}
/**
* Returns a string representation of this object; useful for testing and debugging.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getLoadBalancerName() != null)
sb.append("LoadBalancerName: ").append(getLoadBalancerName()).append(",");
if (getLoadBalancerPort() != null)
sb.append("LoadBalancerPort: ").append(getLoadBalancerPort()).append(",");
if (getSSLCertificateId() != null)
sb.append("SSLCertificateId: ").append(getSSLCertificateId());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof SetLoadBalancerListenerSSLCertificateRequest == false)
return false;
SetLoadBalancerListenerSSLCertificateRequest other = (SetLoadBalancerListenerSSLCertificateRequest) obj;
if (other.getLoadBalancerName() == null ^ this.getLoadBalancerName() == null)
return false;
if (other.getLoadBalancerName() != null && other.getLoadBalancerName().equals(this.getLoadBalancerName()) == false)
return false;
if (other.getLoadBalancerPort() == null ^ this.getLoadBalancerPort() == null)
return false;
if (other.getLoadBalancerPort() != null && other.getLoadBalancerPort().equals(this.getLoadBalancerPort()) == false)
return false;
if (other.getSSLCertificateId() == null ^ this.getSSLCertificateId() == null)
return false;
if (other.getSSLCertificateId() != null && other.getSSLCertificateId().equals(this.getSSLCertificateId()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getLoadBalancerName() == null) ? 0 : getLoadBalancerName().hashCode());
hashCode = prime * hashCode + ((getLoadBalancerPort() == null) ? 0 : getLoadBalancerPort().hashCode());
hashCode = prime * hashCode + ((getSSLCertificateId() == null) ? 0 : getSSLCertificateId().hashCode());
return hashCode;
}
@Override
public SetLoadBalancerListenerSSLCertificateRequest clone() {
return (SetLoadBalancerListenerSSLCertificateRequest) super.clone();
}
}
| |
/*
* Copyright 2000-2017 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.ui;
import com.intellij.openapi.Disposable;
import com.intellij.openapi.editor.BaseDocumentAdapter;
import com.intellij.openapi.editor.RangeMarker;
import com.intellij.openapi.editor.SelectionModel;
import com.intellij.openapi.editor.colors.EditorColors;
import com.intellij.openapi.editor.colors.EditorColorsManager;
import com.intellij.openapi.editor.colors.EditorColorsScheme;
import com.intellij.openapi.editor.colors.impl.DelegateColorScheme;
import com.intellij.openapi.editor.ex.DocumentEx;
import com.intellij.openapi.editor.ex.EditorEx;
import com.intellij.openapi.editor.ex.LineIterator;
import com.intellij.openapi.editor.ex.RangeMarkerEx;
import com.intellij.openapi.editor.impl.EditorImpl;
import com.intellij.openapi.editor.impl.LineSet;
import com.intellij.openapi.editor.impl.RangeMarkerTree;
import com.intellij.openapi.editor.markup.HighlighterLayer;
import com.intellij.openapi.editor.markup.HighlighterTargetArea;
import com.intellij.openapi.editor.markup.TextAttributes;
import com.intellij.openapi.fileTypes.FileType;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.util.*;
import com.intellij.openapi.util.text.LineTokenizer;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.util.ArrayUtil;
import com.intellij.util.ObjectUtils;
import com.intellij.util.Processor;
import com.intellij.util.text.CharSequenceSubSequence;
import com.intellij.util.ui.UIUtil;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import javax.swing.*;
import javax.swing.table.TableCellRenderer;
import java.awt.*;
/**
* @author gregsh
*/
public abstract class EditorTextFieldCellRenderer implements TableCellRenderer, Disposable {
private static final Key<SimpleRendererComponent> MY_PANEL_PROPERTY = Key.create("EditorTextFieldCellRenderer.MyEditorPanel");
private final Project myProject;
private final FileType myFileType;
private final boolean myInheritFontFromLaF;
protected EditorTextFieldCellRenderer(@Nullable Project project, @Nullable FileType fileType, @NotNull Disposable parent) {
this(project, fileType, true, parent);
}
protected EditorTextFieldCellRenderer(@Nullable Project project, @Nullable FileType fileType,
boolean inheritFontFromLaF, @NotNull Disposable parent) {
myProject = project;
myFileType = fileType;
myInheritFontFromLaF = inheritFontFromLaF;
Disposer.register(parent, this);
}
protected abstract String getText(JTable table, Object value, int row, int column);
@Nullable
protected TextAttributes getTextAttributes(JTable table, Object value, int row, int column) {
return null;
}
@NotNull
protected EditorColorsScheme getColorScheme(final JTable table) {
return getEditorPanel(table).getEditor().getColorsScheme();
}
protected void customizeEditor(@NotNull EditorEx editor, JTable table, Object value, boolean selected, int row, int column) {
String text = getText(table, value, row, column);
getEditorPanel(table).setText(text, getTextAttributes(table, value, row, column), selected);
}
@Override
public Component getTableCellRendererComponent(JTable table, Object value, boolean selected, boolean focused, int row, int column) {
RendererComponent panel = getEditorPanel(table);
EditorEx editor = panel.getEditor();
editor.getColorsScheme().setEditorFontSize(table.getFont().getSize());
editor.getColorsScheme().setColor(EditorColors.SELECTION_BACKGROUND_COLOR, table.getSelectionBackground());
editor.getColorsScheme().setColor(EditorColors.SELECTION_FOREGROUND_COLOR, table.getSelectionForeground());
editor.setBackgroundColor(selected ? table.getSelectionBackground() : table.getBackground());
panel.setSelected(!Comparing.equal(editor.getBackgroundColor(), table.getBackground()));
panel.setBorder(null); // prevents double border painting when ExtendedItemRendererComponentWrapper is used
customizeEditor(editor, table, value, selected, row, column);
return panel;
}
@NotNull
private RendererComponent getEditorPanel(final JTable table) {
RendererComponent panel = UIUtil.getClientProperty(table, MY_PANEL_PROPERTY);
if (panel != null) {
DelegateColorScheme scheme = (DelegateColorScheme)panel.getEditor().getColorsScheme();
scheme.setDelegate(EditorColorsManager.getInstance().getGlobalScheme());
return panel;
}
panel = createRendererComponent(myProject, myFileType, myInheritFontFromLaF);
Disposer.register(this, panel);
Disposer.register(this, new Disposable() {
@Override
public void dispose() {
UIUtil.putClientProperty(table, MY_PANEL_PROPERTY, null);
}
});
table.putClientProperty(MY_PANEL_PROPERTY, panel);
return panel;
}
@NotNull
protected RendererComponent createRendererComponent(@Nullable Project project, @Nullable FileType fileType, boolean inheritFontFromLaF) {
return new AbbreviatingRendererComponent(project, fileType, inheritFontFromLaF);
}
@Override
public void dispose() {
}
public abstract static class RendererComponent extends CellRendererPanel implements Disposable {
private final EditorEx myEditor;
private final EditorTextField myTextField;
protected TextAttributes myTextAttributes;
private boolean mySelected;
public RendererComponent(Project project, @Nullable FileType fileType, boolean inheritFontFromLaF) {
Pair<EditorTextField, EditorEx> pair = createEditor(project, fileType, inheritFontFromLaF);
myTextField = pair.first;
myEditor = pair.second;
add(myEditor.getContentComponent());
}
public EditorEx getEditor() {
return myEditor;
}
@NotNull
private static Pair<EditorTextField, EditorEx> createEditor(Project project, @Nullable FileType fileType, boolean inheritFontFromLaF) {
EditorTextField field = new EditorTextField(new MyDocument(), project, fileType, false, false);
field.setSupplementary(true);
field.setFontInheritedFromLAF(inheritFontFromLaF);
field.addNotify(); // creates editor
EditorEx editor = (EditorEx)ObjectUtils.assertNotNull(field.getEditor());
editor.setRendererMode(true);
editor.setColorsScheme(editor.createBoundColorSchemeDelegate(null));
editor.getSettings().setCaretRowShown(false);
editor.getScrollPane().setBorder(null);
return Pair.create(field, editor);
}
public void setText(String text, @Nullable TextAttributes textAttributes, boolean selected) {
myTextAttributes = textAttributes;
mySelected = selected;
setText(text);
}
public abstract void setText(String text);
@Override
public void setBackground(Color bg) {
// allows for striped tables
if (myEditor != null) {
myEditor.setBackgroundColor(bg);
}
super.setBackground(bg);
}
@Override
public void dispose() {
remove(myEditor.getContentComponent());
myTextField.removeNotify();
}
protected void setTextToEditor(String text) {
myEditor.getMarkupModel().removeAllHighlighters();
myEditor.getDocument().setText(text);
((EditorImpl)myEditor).resetSizes();
myEditor.getHighlighter().setText(text);
if (myTextAttributes != null) {
myEditor.getMarkupModel().addRangeHighlighter(0, myEditor.getDocument().getTextLength(),
HighlighterLayer.ADDITIONAL_SYNTAX, myTextAttributes, HighlighterTargetArea.EXACT_RANGE);
}
((EditorImpl)myEditor).setPaintSelection(mySelected);
SelectionModel selectionModel = myEditor.getSelectionModel();
selectionModel.setSelection(0, mySelected ? myEditor.getDocument().getTextLength() : 0);
}
}
public static class SimpleRendererComponent extends RendererComponent implements Disposable {
public SimpleRendererComponent(Project project, @Nullable FileType fileType, boolean inheritFontFromLaF) {
super(project, fileType, inheritFontFromLaF);
}
@Override
public void setText(String text) {
setTextToEditor(text);
}
}
public static class AbbreviatingRendererComponent extends RendererComponent {
private static final char ABBREVIATION_SUFFIX = '\u2026'; // 2026 '...'
private static final char RETURN_SYMBOL = '\u23ce';
private final StringBuilder myDocumentTextBuilder = new StringBuilder();
private Dimension myPreferredSize;
private String myRawText;
public AbbreviatingRendererComponent(Project project, @Nullable FileType fileType, boolean inheritFontFromLaF) {
super(project, fileType, inheritFontFromLaF);
}
@Override
public void setText(String text) {
myRawText = text;
myPreferredSize = null;
}
@Override
public Dimension getPreferredSize() {
if (myPreferredSize == null) {
int maxLineLength = 0;
int linesCount = 0;
for (LineTokenizer lt = new LineTokenizer(myRawText); !lt.atEnd(); lt.advance()) {
maxLineLength = Math.max(maxLineLength, lt.getLength());
linesCount++;
}
FontMetrics fontMetrics = ((EditorImpl)getEditor()).getFontMetrics(myTextAttributes != null ? myTextAttributes.getFontType() : Font.PLAIN);
int preferredHeight = getEditor().getLineHeight() * Math.max(1, linesCount);
int preferredWidth = fontMetrics.charWidth('m') * maxLineLength;
Insets insets = getInsets();
if (insets != null) {
preferredHeight += insets.top + insets.bottom;
preferredWidth += insets.left + insets.right;
}
myPreferredSize = new Dimension(preferredWidth, preferredHeight);
}
return myPreferredSize;
}
@Override
protected void paintChildren(Graphics g) {
updateText(g.getClipBounds());
super.paintChildren(g);
}
private void updateText(Rectangle clip) {
FontMetrics fontMetrics = ((EditorImpl)getEditor()).getFontMetrics(myTextAttributes != null ? myTextAttributes.getFontType() : Font.PLAIN);
Insets insets = getInsets();
int maxLineWidth = getWidth() - (insets != null ? insets.left + insets.right : 0);
myDocumentTextBuilder.setLength(0);
boolean singleLineMode = getHeight() / (float)getEditor().getLineHeight() < 1.1f;
if (singleLineMode) {
appendAbbreviated(myDocumentTextBuilder, myRawText, 0, myRawText.length(), fontMetrics, maxLineWidth, true);
}
else {
int lineHeight = getEditor().getLineHeight();
int firstVisibleLine = clip.y / lineHeight;
float visibleLinesCountFractional = clip.height / (float)lineHeight;
int linesToAppend = 1 + (int)visibleLinesCountFractional;
LineTokenizer lt = new LineTokenizer(myRawText);
for (int line = 0; !lt.atEnd() && line < firstVisibleLine; lt.advance(), line++) {
myDocumentTextBuilder.append('\n');
}
for (int line = 0; !lt.atEnd() && line < linesToAppend; lt.advance(), line++) {
int start = lt.getOffset();
int end = start + lt.getLength();
appendAbbreviated(myDocumentTextBuilder, myRawText, start, end, fontMetrics, maxLineWidth, false);
if (lt.getLineSeparatorLength() > 0) {
myDocumentTextBuilder.append('\n');
}
}
}
setTextToEditor(myDocumentTextBuilder.toString());
}
private static void appendAbbreviated(StringBuilder to, String text, int start, int end,
FontMetrics metrics, int maxWidth, boolean replaceLineTerminators) {
int abbreviationLength = abbreviationLength(text, start, end, metrics, maxWidth, replaceLineTerminators);
if (!replaceLineTerminators) {
to.append(text, start, start + abbreviationLength);
}
else {
CharSequenceSubSequence subSeq = new CharSequenceSubSequence(text, start, start + abbreviationLength);
for (LineTokenizer lt = new LineTokenizer(subSeq); !lt.atEnd(); lt.advance()) {
to.append(subSeq, lt.getOffset(), lt.getOffset() + lt.getLength());
if (lt.getLineSeparatorLength() > 0) {
to.append(RETURN_SYMBOL);
}
}
}
if (abbreviationLength != end - start) {
to.append(ABBREVIATION_SUFFIX);
}
}
private static int abbreviationLength(String text, int start, int end, FontMetrics metrics, int maxWidth, boolean replaceSeparators) {
if (metrics.charWidth('m') * (end - start) <= maxWidth) return end - start;
int abbrWidth = metrics.charWidth(ABBREVIATION_SUFFIX);
int abbrLength = 0;
CharSequenceSubSequence subSeq = new CharSequenceSubSequence(text, start, end);
for (LineTokenizer lt = new LineTokenizer(subSeq); !lt.atEnd(); lt.advance()) {
for (int i = 0; i < lt.getLength(); i++, abbrLength++) {
abbrWidth += metrics.charWidth(subSeq.charAt(lt.getOffset() + i));
if (abbrWidth >= maxWidth) return abbrLength;
}
if (replaceSeparators && lt.getLineSeparatorLength() != 0) {
abbrWidth += metrics.charWidth(RETURN_SYMBOL);
if (abbrWidth >= maxWidth) return abbrLength;
abbrLength += lt.getLineSeparatorLength();
}
}
return abbrLength;
}
}
private static class MyDocument extends BaseDocumentAdapter implements DocumentEx {
RangeMarkerTree<RangeMarkerEx> myRangeMarkers = new RangeMarkerTree<RangeMarkerEx>(this) {
};
char[] myChars = ArrayUtil.EMPTY_CHAR_ARRAY;
String myString = "";
LineSet myLineSet = LineSet.createLineSet(myString);
@Override
public void setModificationStamp(long modificationStamp) {
}
@Override
public void replaceText(@NotNull CharSequence chars, long newModificationStamp) {
throw new UnsupportedOperationException();
}
@Override
public void moveText(int srcStart, int srcEnd, int dstOffset) {
throw new UnsupportedOperationException();
}
@Override
public void setText(@NotNull CharSequence text) {
String s = StringUtil.convertLineSeparators(text.toString());
myChars = new char[s.length()];
s.getChars(0, s.length(), myChars, 0);
myString = new String(myChars);
myLineSet = LineSet.createLineSet(myString);
}
@NotNull
@Override
public LineIterator createLineIterator() {
return myLineSet.createIterator();
}
@Override
public boolean removeRangeMarker(@NotNull RangeMarkerEx rangeMarker) { return myRangeMarkers.removeInterval(rangeMarker); }
@Override
public void registerRangeMarker(@NotNull RangeMarkerEx rangeMarker,
int start,
int end,
boolean greedyToLeft,
boolean greedyToRight,
int layer) {
myRangeMarkers.addInterval(rangeMarker, start, end, greedyToLeft, greedyToRight, layer);
}
@Override
public boolean processRangeMarkers(@NotNull Processor<? super RangeMarker> processor) { return myRangeMarkers.process(processor); }
@Override
public boolean processRangeMarkersOverlappingWith(int start,
int end,
@NotNull Processor<? super RangeMarker> processor) {
return myRangeMarkers.processOverlappingWith(start, end, processor);
}
@NotNull
@Override
public String getText() { return myString; }
@NotNull
@Override
public String getText(@NotNull TextRange range) { return range.substring(getText()); }
@NotNull
@Override
public char[] getChars() { return myChars; }
@Override
public int getLineCount() { return myLineSet.findLineIndex(myChars.length) + 1; }
@Override
public int getLineNumber(int offset) { return myLineSet.findLineIndex(offset); }
@Override
public int getLineStartOffset(int line) { return myChars.length == 0 ? 0 : myLineSet.getLineStart(line); }
@Override
public int getLineEndOffset(int line) { return myChars.length == 0 ? 0 : myLineSet.getLineEnd(line); }
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.pig.test;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileReader;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import org.apache.pig.FuncSpec;
import org.apache.pig.PigConfiguration;
import org.apache.pig.PigServer;
import org.apache.pig.backend.datastorage.DataStorage;
import org.apache.pig.backend.hadoop.executionengine.physicalLayer.POStatus;
import org.apache.pig.backend.hadoop.executionengine.physicalLayer.Result;
import org.apache.pig.backend.hadoop.executionengine.physicalLayer.relationalOperators.POLoad;
import org.apache.pig.builtin.PigStorage;
import org.apache.pig.data.DataBag;
import org.apache.pig.data.DataByteArray;
import org.apache.pig.data.DefaultBagFactory;
import org.apache.pig.data.DefaultTuple;
import org.apache.pig.data.Tuple;
import org.apache.pig.impl.PigContext;
import org.apache.pig.impl.io.FileLocalizer;
import org.apache.pig.impl.io.FileSpec;
import org.apache.pig.newplan.Operator;
import org.apache.pig.newplan.logical.relational.LOLoad;
import org.apache.pig.newplan.logical.relational.LogicalPlan;
import org.apache.pig.parser.ParserException;
import org.apache.pig.parser.QueryParserDriver;
import org.apache.pig.test.utils.GenPhyOp;
import org.apache.pig.test.utils.TestHelper;
import org.junit.AfterClass;
import org.junit.Before;
import org.junit.Test;
public class TestLoad {
PigContext pc;
PigServer[] servers;
static MiniGenericCluster cluster = MiniGenericCluster.buildCluster();
private static final String WORKING_DIR = "/tmp/test" + java.util.UUID.randomUUID();
@Before
public void setUp() throws Exception {
FileLocalizer.deleteTempFiles();
servers = new PigServer[] {
new PigServer(cluster.getExecType(), cluster.getProperties()),
new PigServer(Util.getLocalTestMode(), new Properties())
};
}
@Test
public void testGetNextTuple() throws IOException {
pc = servers[0].getPigContext();
String curDir = System.getProperty("user.dir");
String inpDir = curDir + File.separatorChar + "test/org/apache/pig/test/data/InputFiles/";
// copy passwd file to cluster and set that as the input location for the load
Util.copyFromLocalToCluster(cluster, inpDir + "passwd", "passwd");
FileSpec inpFSpec = new FileSpec("passwd", new FuncSpec(PigStorage.class.getName(), new String[]{":"}));
POLoad ld = GenPhyOp.topLoadOp();
ld.setLFile(inpFSpec);
ld.setPc(pc);
DataBag inpDB = DefaultBagFactory.getInstance().newDefaultBag();
BufferedReader br = new BufferedReader(new FileReader("test/org/apache/pig/test/data/InputFiles/passwd"));
for(String line = br.readLine();line!=null;line=br.readLine()){
String[] flds = line.split(":",-1);
Tuple t = new DefaultTuple();
for (String fld : flds) {
t.append((fld.compareTo("")!=0 ? new DataByteArray(fld.getBytes()) : null));
}
inpDB.add(t);
}
Tuple t=null;
int size = 0;
for(Result res = ld.getNextTuple();res.returnStatus!=POStatus.STATUS_EOP;res=ld.getNextTuple()){
assertEquals(true, TestHelper.bagContains(inpDB, (Tuple)res.result));
++size;
}
assertEquals(true, size==inpDB.size());
}
@AfterClass
public static void oneTimeTearDown() throws Exception {
cluster.shutDown();
}
@Test
public void testLoadRemoteRel() throws Exception {
for (PigServer pig : servers) {
pc = pig.getPigContext();
checkLoadPath("test", WORKING_DIR + "/test");
}
}
@Test
public void testLoadRemoteAbs() throws Exception {
for (PigServer pig : servers) {
pc = pig.getPigContext();
boolean noConversionExpected = true;
checkLoadPath(WORKING_DIR + "/test", WORKING_DIR + "/test", noConversionExpected);
}
}
@Test
public void testLoadRemoteRelScheme() throws Exception {
for (PigServer pig : servers) {
pc = pig.getPigContext();
checkLoadPath("test", WORKING_DIR + "/test");
}
}
@Test
public void testLoadRemoteAbsScheme() throws Exception {
pc = servers[0].getPigContext();
boolean noConversionExpected = true;
checkLoadPath("hdfs:" + WORKING_DIR + "/test","hdfs:" + WORKING_DIR + "/test", noConversionExpected);
// check if a location 'hdfs:<abs path>' can actually be read using PigStorage
String[] inputFileNames = new String[] {
WORKING_DIR + "/TestLoad-testLoadRemoteAbsSchema-input.txt"};
testLoadingMultipleFiles(inputFileNames, "hdfs:" + inputFileNames[0]);
}
@Test
public void testLoadRemoteAbsAuth() throws Exception {
pc = servers[0].getPigContext();
checkLoadPath(cluster.getFileSystem().getUri()+"/test","/test");
}
@Test
public void testLoadRemoteNormalize() throws Exception {
for (PigServer pig : servers) {
pc = pig.getPigContext();
boolean noConversionExpected = true;
checkLoadPath(WORKING_DIR + "/foo/../././", WORKING_DIR + "/foo/.././.", noConversionExpected);
}
}
@Test
public void testGlobChars() throws Exception {
for (PigServer pig : servers) {
pc = pig.getPigContext();
checkLoadPath("t?s*", WORKING_DIR + "/t?s*");
}
}
@Test
public void testCommaSeparatedString() throws Exception {
for (PigServer pig : servers) {
pc = pig.getPigContext();
checkLoadPath("usr/pig/a,b", WORKING_DIR + "/usr/pig/a,"+ WORKING_DIR + "/b");
}
}
@Test
public void testCommaSeparatedString2() throws Exception {
for (PigServer pig : servers) {
pc = pig.getPigContext();
checkLoadPath("t?s*,test", WORKING_DIR + "/t?s*,"+ WORKING_DIR + "/test");
}
}
@SuppressWarnings("unchecked")
@Test
public void testCommaSeparatedString3() throws Exception {
PigServer pig = servers[0];
pc = pig.getPigContext();
boolean noConversionExpected = true;
checkLoadPath("hdfs:"+ WORKING_DIR + "/test,hdfs:" + WORKING_DIR + "/test2,hdfs:" + WORKING_DIR + "/test3",
"hdfs:" + WORKING_DIR + "/test,hdfs:" + WORKING_DIR + "/test2,hdfs:" + WORKING_DIR + "/test3", noConversionExpected );
// check if a location 'hdfs:<abs path>,hdfs:<abs path>' can actually be
// read using PigStorage
String[] inputFileNames = new String[] {
WORKING_DIR + "/TestLoad-testCommaSeparatedString3-input1.txt",
WORKING_DIR + "/TestLoad-testCommaSeparatedString3-input2.txt"};
String inputString = "hdfs:" + inputFileNames[0] + ",hdfs:" +
inputFileNames[1];
testLoadingMultipleFiles(inputFileNames, inputString);
}
@Test
public void testCommaSeparatedString4() throws Exception {
for (PigServer pig : servers) {
pc = pig.getPigContext();
checkLoadPath("usr/pig/{a,c},usr/pig/b", WORKING_DIR + "/usr/pig/{a,c}," + WORKING_DIR + "/usr/pig/b");
}
}
@Test
public void testCommaSeparatedString5() throws Exception {
for (PigServer pig : servers) {
pc = pig.getPigContext();
checkLoadPath("/usr/pig/{a,c},b", "/usr/pig/{a,c}," + WORKING_DIR + "/b");
}
// check if a location '<abs path>,<relative path>' can actually be
// read using PigStorage
String loadLocationString = WORKING_DIR + "/TestLoad-testCommaSeparatedStringMixed-input{1,2}.txt," +
"TestLoad-testCommaSeparatedStringMixed-input3.txt"; // current working dir is set to WORKING_DIR in checkLoadPath()
String[] inputFileNames = new String[] {
WORKING_DIR + "/TestLoad-testCommaSeparatedStringMixed-input1.txt",
WORKING_DIR + "/TestLoad-testCommaSeparatedStringMixed-input2.txt",
WORKING_DIR + "/TestLoad-testCommaSeparatedStringMixed-input3.txt",};
pc = servers[0].getPigContext(); // test in map reduce mode
testLoadingMultipleFiles(inputFileNames, loadLocationString);
}
@Test
public void testCommaSeparatedString6() throws Exception {
for (PigServer pig : servers) {
pc = pig.getPigContext();
checkLoadPath("usr/pig/{a,c},/usr/pig/b", WORKING_DIR + "/usr/pig/{a,c},/usr/pig/b");
}
}
@Test
public void testNonDfsLocation() throws Exception {
String nonDfsUrl = "har:///user/foo/f.har";
String query = "a = load '" + nonDfsUrl + "' using PigStorage('\t','-noschema');" +
"store a into 'pigoutput';";
LogicalPlan lp = Util.buildLp(servers[1], query);
LOLoad load = (LOLoad) lp.getSources().get(0);
nonDfsUrl = nonDfsUrl.replaceFirst("/$", "");
assertEquals(nonDfsUrl, load.getFileSpec().getFileName());
}
@SuppressWarnings("unchecked")
private void testLoadingMultipleFiles(String[] inputFileNames,
String loadLocationString) throws IOException, ParserException {
String[][] inputStrings = new String[][] {
new String[] { "hello\tworld"},
new String[] { "bye\tnow"},
new String[] { "all\tgood"}
};
List<Tuple> expected = Arrays.asList(new Tuple[] {
(Tuple) Util.getPigConstant("('hello', 'world')"),
(Tuple) Util.getPigConstant("('bye', 'now')"),
(Tuple) Util.getPigConstant("('all', 'good')")});
List<Tuple> expectedBasedOnNumberOfInputs = new ArrayList<Tuple>();
for(int i = 0; i < inputFileNames.length; i++) {
Util.createInputFile(pc, inputFileNames[i], inputStrings[i]);
expectedBasedOnNumberOfInputs.add(expected.get(i));
}
try {
servers[0].registerQuery(" a = load '" + loadLocationString + "' as " +
"(s1:chararray, s2:chararray);");
Iterator<Tuple> it = servers[0].openIterator("a");
List<Tuple> actual = new ArrayList<Tuple>();
while(it.hasNext()) {
actual.add(it.next());
}
Collections.sort(expectedBasedOnNumberOfInputs);
Collections.sort(actual);
assertEquals(expectedBasedOnNumberOfInputs, actual);
} finally {
for(int i = 0; i < inputFileNames.length; i++) {
Util.deleteFile(pc, inputFileNames[i]);
}
}
}
private void checkLoadPath(String orig, String expected) throws Exception {
checkLoadPath(orig, expected, false);
}
private void checkLoadPath(String orig, String expected,
boolean noConversionExpected) throws Exception {
boolean[] multiquery = {true, false};
for (boolean b : multiquery) {
pc.getProperties().setProperty(PigConfiguration.PIG_OPT_MULTIQUERY, "" + b);
DataStorage dfs = pc.getDfs();
dfs.setActiveContainer(dfs.asContainer(WORKING_DIR));
Map<String, String> fileNameMap = new HashMap<String, String>();
QueryParserDriver builder = new QueryParserDriver(pc, "Test-Load", fileNameMap);
String query = "a = load '"+orig+"';";
LogicalPlan lp = builder.parse(query);
assertTrue(lp.size()>0);
Operator op = lp.getSources().get(0);
assertTrue(op instanceof LOLoad);
LOLoad load = (LOLoad)op;
String p = load.getFileSpec().getFileName();
System.err.println("DEBUG: p:" + p + " expected:" + expected +", exectype:" + pc.getExecType());
if(noConversionExpected) {
assertEquals(expected, p);
} else {
String protocol = pc.getExecType() == cluster.getExecType() ? "hdfs" : "file";
// regex : A word character, i.e. [a-zA-Z_0-9] or '-' followed by ':' then any characters
String regex = "[\\-\\w:\\.]";
assertTrue(p.matches(".*" + protocol + "://" + regex + "*.*"));
assertEquals(expected, p.replaceAll(protocol + "://" + regex + "*/", "/"));
}
}
}
}
| |
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0 and the Server Side Public License, v 1; you may not use this file except
* in compliance with, at your election, the Elastic License 2.0 or the Server
* Side Public License, v 1.
*/
package org.elasticsearch.action.admin.indices.close;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.action.NoShardAvailableActionException;
import org.elasticsearch.action.admin.indices.close.CloseIndexResponse.IndexResult;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.common.xcontent.XContentHelper;
import org.elasticsearch.index.Index;
import org.elasticsearch.index.IndexNotFoundException;
import org.elasticsearch.index.shard.ShardId;
import org.elasticsearch.test.AbstractWireSerializingTestCase;
import org.elasticsearch.transport.ActionNotFoundTransportException;
import org.elasticsearch.xcontent.ToXContent;
import org.elasticsearch.xcontent.XContentBuilder;
import org.elasticsearch.xcontent.XContentType;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import java.util.stream.Collectors;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.hasSize;
import static org.hamcrest.Matchers.is;
import static org.hamcrest.Matchers.notNullValue;
import static org.hamcrest.Matchers.nullValue;
public class CloseIndexResponseTests extends AbstractWireSerializingTestCase<CloseIndexResponse> {
@Override
protected CloseIndexResponse createTestInstance() {
return randomResponse();
}
@Override
protected Writeable.Reader<CloseIndexResponse> instanceReader() {
return CloseIndexResponse::new;
}
@Override
protected void assertEqualInstances(CloseIndexResponse expected, CloseIndexResponse actual) {
assertNotSame(expected, actual);
assertThat(actual.isAcknowledged(), equalTo(expected.isAcknowledged()));
assertThat(actual.isShardsAcknowledged(), equalTo(expected.isShardsAcknowledged()));
for (int i = 0; i < expected.getIndices().size(); i++) {
CloseIndexResponse.IndexResult expectedIndexResult = expected.getIndices().get(i);
CloseIndexResponse.IndexResult actualIndexResult = actual.getIndices().get(i);
assertNotSame(expectedIndexResult, actualIndexResult);
assertThat(actualIndexResult.getIndex(), equalTo(expectedIndexResult.getIndex()));
assertThat(actualIndexResult.hasFailures(), equalTo(expectedIndexResult.hasFailures()));
if (expectedIndexResult.hasFailures() == false) {
assertThat(actualIndexResult.getException(), nullValue());
if (actualIndexResult.getShards() != null) {
assertThat(
Arrays.stream(actualIndexResult.getShards()).allMatch(shardResult -> shardResult.hasFailures() == false),
is(true)
);
}
}
if (expectedIndexResult.getException() != null) {
assertThat(actualIndexResult.getShards(), nullValue());
assertThat(actualIndexResult.getException(), notNullValue());
assertThat(actualIndexResult.getException().getMessage(), equalTo(expectedIndexResult.getException().getMessage()));
assertThat(actualIndexResult.getException().getClass(), equalTo(expectedIndexResult.getException().getClass()));
assertArrayEquals(actualIndexResult.getException().getStackTrace(), expectedIndexResult.getException().getStackTrace());
} else {
assertThat(actualIndexResult.getException(), nullValue());
}
if (expectedIndexResult.getShards() != null) {
assertThat(actualIndexResult.getShards().length, equalTo(expectedIndexResult.getShards().length));
for (int j = 0; j < expectedIndexResult.getShards().length; j++) {
CloseIndexResponse.ShardResult expectedShardResult = expectedIndexResult.getShards()[j];
CloseIndexResponse.ShardResult actualShardResult = actualIndexResult.getShards()[j];
assertThat(actualShardResult.getId(), equalTo(expectedShardResult.getId()));
assertThat(actualShardResult.hasFailures(), equalTo(expectedShardResult.hasFailures()));
if (expectedShardResult.hasFailures()) {
assertThat(actualShardResult.getFailures().length, equalTo(expectedShardResult.getFailures().length));
for (int k = 0; k < expectedShardResult.getFailures().length; k++) {
CloseIndexResponse.ShardResult.Failure expectedFailure = expectedShardResult.getFailures()[k];
CloseIndexResponse.ShardResult.Failure actualFailure = actualShardResult.getFailures()[k];
assertThat(actualFailure.getNodeId(), equalTo(expectedFailure.getNodeId()));
assertThat(actualFailure.index(), equalTo(expectedFailure.index()));
assertThat(actualFailure.shardId(), equalTo(expectedFailure.shardId()));
// Serialising and deserialising an exception seems to remove the "java.base/" part from the stack trace
// in the `reason` property, so we don't compare it directly. Instead, check that the first lines match,
// and that the stack trace has the same number of lines.
List<String> expectedReasonLines = expectedFailure.reason().lines().collect(Collectors.toList());
List<String> actualReasonLines = actualFailure.reason().lines().collect(Collectors.toList());
assertThat(actualReasonLines.get(0), equalTo(expectedReasonLines.get(0)));
assertThat(
"Exceptions have a different number of lines",
actualReasonLines,
hasSize(expectedReasonLines.size())
);
assertThat(actualFailure.getCause().getMessage(), equalTo(expectedFailure.getCause().getMessage()));
assertThat(actualFailure.getCause().getClass(), equalTo(expectedFailure.getCause().getClass()));
assertArrayEquals(actualFailure.getCause().getStackTrace(), expectedFailure.getCause().getStackTrace());
}
} else {
assertThat(actualShardResult.getFailures(), nullValue());
}
}
} else {
assertThat(actualIndexResult.getShards(), nullValue());
}
}
}
/**
* Test that random responses can be written to xcontent without errors.
* Also check some specific simple cases for output.
*/
public void testToXContent() throws IOException {
CloseIndexResponse response = randomResponse();
XContentType xContentType = randomFrom(XContentType.values());
try (XContentBuilder builder = XContentBuilder.builder(xContentType.xContent())) {
response.toXContent(builder, ToXContent.EMPTY_PARAMS);
}
Index index = new Index("test", "uuid");
IndexResult indexResult = new CloseIndexResponse.IndexResult(index);
CloseIndexResponse closeIndexResponse = new CloseIndexResponse(true, true, Collections.singletonList(indexResult));
assertEquals("""
{"acknowledged":true,"shards_acknowledged":true,"indices":{"test":{"closed":true}}}""", Strings.toString(closeIndexResponse));
CloseIndexResponse.ShardResult[] shards = new CloseIndexResponse.ShardResult[1];
shards[0] = new CloseIndexResponse.ShardResult(
0,
new CloseIndexResponse.ShardResult.Failure[] {
new CloseIndexResponse.ShardResult.Failure("test", 0, new ActionNotFoundTransportException("test"), "nodeId") }
);
indexResult = new CloseIndexResponse.IndexResult(index, shards);
closeIndexResponse = new CloseIndexResponse(true, true, Collections.singletonList(indexResult));
assertEquals(XContentHelper.stripWhitespace("""
{
"acknowledged": true,
"shards_acknowledged": true,
"indices": {
"test": {
"closed": false,
"failedShards": {
"0": {
"failures": [
{
"node": "nodeId",
"shard": 0,
"index": "test",
"status": "INTERNAL_SERVER_ERROR",
"reason": {
"type": "action_not_found_transport_exception",
"reason": "No handler for action [test]"
}
}
]
}
}
}
}
}"""), Strings.toString(closeIndexResponse));
}
private CloseIndexResponse randomResponse() {
boolean acknowledged = true;
final String[] indicesNames = generateRandomStringArray(10, 10, false, true);
final List<CloseIndexResponse.IndexResult> indexResults = new ArrayList<>();
for (String indexName : indicesNames) {
final Index index = new Index(indexName, randomAlphaOfLength(5));
if (randomBoolean()) {
indexResults.add(new CloseIndexResponse.IndexResult(index));
} else {
if (randomBoolean()) {
acknowledged = false;
indexResults.add(new CloseIndexResponse.IndexResult(index, randomException(index, 0)));
} else {
final int nbShards = randomIntBetween(1, 5);
CloseIndexResponse.ShardResult[] shards = new CloseIndexResponse.ShardResult[nbShards];
for (int i = 0; i < nbShards; i++) {
CloseIndexResponse.ShardResult.Failure[] failures = null;
if (randomBoolean()) {
acknowledged = false;
failures = new CloseIndexResponse.ShardResult.Failure[randomIntBetween(1, 3)];
for (int j = 0; j < failures.length; j++) {
String nodeId = null;
if (frequently()) {
nodeId = randomAlphaOfLength(5);
}
failures[j] = new CloseIndexResponse.ShardResult.Failure(indexName, i, randomException(index, i), nodeId);
}
}
shards[i] = new CloseIndexResponse.ShardResult(i, failures);
}
indexResults.add(new CloseIndexResponse.IndexResult(index, shards));
}
}
}
final boolean shardsAcknowledged = acknowledged ? randomBoolean() : false;
return new CloseIndexResponse(acknowledged, shardsAcknowledged, indexResults);
}
private static ElasticsearchException randomException(final Index index, final int id) {
return randomFrom(
new IndexNotFoundException(index),
new ActionNotFoundTransportException("test"),
new NoShardAvailableActionException(new ShardId(index, id))
);
}
}
| |
/*
* Copyright 2016 Red Hat, Inc. and/or its affiliates.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jbpm.casemgmt.impl;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.jbpm.casemgmt.api.CaseNotFoundException;
import org.jbpm.casemgmt.api.model.AdHocFragment;
import org.jbpm.casemgmt.api.model.CaseStatus;
import org.jbpm.casemgmt.api.model.instance.CaseFileInstance;
import org.jbpm.casemgmt.api.model.instance.CaseInstance;
import org.jbpm.casemgmt.api.model.instance.CaseStageInstance;
import org.jbpm.casemgmt.api.model.instance.StageStatus;
import org.jbpm.casemgmt.impl.util.AbstractCaseServicesBaseTest;
import org.jbpm.services.api.model.NodeInstanceDesc;
import org.jbpm.services.api.model.ProcessDefinition;
import org.jbpm.services.api.model.ProcessInstanceDesc;
import org.jbpm.services.task.impl.model.UserImpl;
import org.junit.Test;
import org.kie.api.runtime.process.ProcessInstance;
import org.kie.api.runtime.query.QueryContext;
import org.kie.api.task.model.OrganizationalEntity;
import org.kie.api.task.model.Status;
import org.kie.api.task.model.TaskSummary;
import org.kie.internal.query.QueryFilter;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import static org.junit.Assert.*;
public class CaseRuntimeDataServiceImplTest extends AbstractCaseServicesBaseTest {
private static final Logger logger = LoggerFactory.getLogger(CaseRuntimeDataServiceImplTest.class);
@Override
protected List<String> getProcessDefinitionFiles() {
List<String> processes = new ArrayList<String>();
processes.add("cases/EmptyCase.bpmn2");
processes.add("cases/UserTaskCase.bpmn2");
processes.add("cases/UserTaskCaseBoundary.bpmn2");
processes.add("cases/UserTaskWithStageCase.bpmn2");
processes.add("cases/CaseWithTwoStages.bpmn2");
processes.add("cases/CaseWithTwoStagesConditions.bpmn2");
// add processes that can be used by cases but are not cases themselves
processes.add("processes/DataVerificationProcess.bpmn2");
processes.add("processes/UserTaskProcess.bpmn2");
return processes;
}
/*
* Case instance queries
*/
@Test
public void testStartEmptyCaseWithCaseFile() {
Map<String, Object> data = new HashMap<>();
data.put("name", "my first case");
CaseFileInstance caseFile = caseService.newCaseFileInstance(deploymentUnit.getIdentifier(), EMPTY_CASE_P_ID, data);
String caseId = caseService.startCase(deploymentUnit.getIdentifier(), EMPTY_CASE_P_ID, caseFile);
assertNotNull(caseId);
assertEquals(FIRST_CASE_ID, caseId);
try {
CaseInstance cInstance = caseService.getCaseInstance(caseId, true, false, false, false);
assertNotNull(cInstance);
assertEquals(FIRST_CASE_ID, cInstance.getCaseId());
assertNotNull(cInstance.getCaseFile());
assertEquals("my first case", cInstance.getCaseFile().getData("name"));
Collection<CaseInstance> instances = caseRuntimeDataService.getCaseInstances(new QueryContext());
assertNotNull(instances);
assertEquals(1, instances.size());
CaseInstance instance = instances.iterator().next();
assertNotNull(instance);
assertEquals(FIRST_CASE_ID, instance.getCaseId());
assertEquals(EMPTY_CASE_P_ID, instance.getCaseDefinitionId());
assertEquals("my first case", instance.getCaseDescription());
assertEquals(USER, instance.getOwner());
assertEquals(ProcessInstance.STATE_ACTIVE, instance.getStatus().intValue());
assertEquals(deploymentUnit.getIdentifier(), instance.getDeploymentId());
assertNotNull(instance.getStartedAt());
// add dynamic user task to empty case instance - first by case id
Map<String, Object> parameters = new HashMap<>();
caseService.addDynamicTask(FIRST_CASE_ID, caseService.newHumanTaskSpec("First task", "test", "john", null, parameters));
Collection<NodeInstanceDesc> activeNodes = caseRuntimeDataService.getActiveNodesForCase(FIRST_CASE_ID, new QueryContext());
assertNotNull(activeNodes);
assertEquals(1, activeNodes.size());
NodeInstanceDesc activeNode = activeNodes.iterator().next();
assertNotNull(activeNodes);
assertEquals("[Dynamic] First task", activeNode.getName());
List<TaskSummary> tasks = caseRuntimeDataService.getCaseTasksAssignedAsPotentialOwner(caseId, "john", null, new QueryContext());
assertNotNull(tasks);
assertEquals(1, tasks.size());
TaskSummary task = tasks.get(0);
assertEquals("First task", task.getName());
assertEquals("test", task.getDescription());
} catch (Exception e) {
logger.error("Unexpected error {}", e.getMessage(), e);
fail("Unexpected exception " + e.getMessage());
} finally {
if (caseId != null) {
caseService.cancelCase(caseId);
}
}
}
@Test
public void testUserTasksInCase() {
Map<String, OrganizationalEntity> roleAssignments = new HashMap<>();
roleAssignments.put("owner", new UserImpl(USER));
Map<String, Object> data = new HashMap<>();
CaseFileInstance caseFile = caseService.newCaseFileInstance(deploymentUnit.getIdentifier(), USER_TASK_CASE_P_ID, data, roleAssignments);
String caseId2 = null;
String caseId = caseService.startCase(deploymentUnit.getIdentifier(), USER_TASK_STAGE_CASE_P_ID, caseFile);
assertNotNull(caseId);
assertEquals(FIRST_CASE_ID, caseId);
try {
CaseInstance cInstance = caseService.getCaseInstance(caseId);
assertNotNull(cInstance);
assertEquals(FIRST_CASE_ID, cInstance.getCaseId());
assertEquals(deploymentUnit.getIdentifier(),
cInstance.getDeploymentId());
List<TaskSummary> tasks = caseRuntimeDataService.getCaseTasksAssignedAsPotentialOwner(caseId, "john", null, new QueryContext());
assertNotNull(tasks);
assertEquals(0, tasks.size());
Map<String, Object> taskInput = new HashMap<>();
taskInput.put("ActorId", "john");
taskInput.put("Comment",
"Need to provide data");
caseService.triggerAdHocFragment(caseId,
"Missing data",
taskInput);
tasks = caseRuntimeDataService.getCaseTasksAssignedAsPotentialOwner(caseId, "john", null, new QueryContext());
assertNotNull(tasks);
assertEquals(1, tasks.size());
TaskSummary task = tasks.get(0);
assertEquals("Missing data", task.getName());
assertEquals("Need to provide data", task.getSubject());
caseId2 = caseService.startCase(deploymentUnit.getIdentifier(), USER_TASK_STAGE_CASE_P_ID, caseFile);
assertNotNull(caseId2);
assertEquals("CASE-0000000002", caseId2);
caseService.triggerAdHocFragment(caseId2,
"Missing data",
taskInput);
tasks = caseRuntimeDataService.getCaseTasksAssignedAsPotentialOwner(caseId2, "john", null, new QueryContext());
assertNotNull(tasks);
assertEquals(1,
tasks.size());
task = tasks.get(0);
assertEquals("Missing data",
task.getName());
tasks = caseRuntimeDataService.getCaseTasksAssignedAsPotentialOwner(caseId, "john", null, new QueryContext());
assertNotNull(tasks);
assertEquals(1, tasks.size());
task = tasks.get(0);
assertEquals("Missing data", task.getName());
} catch (Exception e) {
logger.error("Unexpected error {}", e.getMessage(), e);
fail("Unexpected exception " + e.getMessage());
} finally {
if (caseId != null) {
caseService.cancelCase(caseId);
}
if (caseId2 != null) {
caseService.cancelCase(caseId2);
}
}
}
@Test
public void testUserTasksInCaseWithSubprocess() {
Map<String, OrganizationalEntity> roleAssignments = new HashMap<>();
roleAssignments.put("owner", new UserImpl(USER));
Map<String, Object> data = new HashMap<>();
CaseFileInstance caseFile = caseService.newCaseFileInstance(deploymentUnit.getIdentifier(), USER_TASK_CASE_P_ID, data, roleAssignments);
String caseId = caseService.startCase(deploymentUnit.getIdentifier(), USER_TASK_STAGE_CASE_P_ID, caseFile);
assertNotNull(caseId);
assertEquals(FIRST_CASE_ID, caseId);
try {
CaseInstance cInstance = caseService.getCaseInstance(caseId);
assertNotNull(cInstance);
assertEquals(FIRST_CASE_ID,
cInstance.getCaseId());
assertEquals(deploymentUnit.getIdentifier(), cInstance.getDeploymentId());
List<TaskSummary> tasks = caseRuntimeDataService.getCaseTasksAssignedAsPotentialOwner(caseId,
"john",
null,
new QueryContext());
assertNotNull(tasks);
assertEquals(0, tasks.size());
Map<String, Object> taskInput = new HashMap<>();
taskInput.put("ActorId", "john");
caseService.triggerAdHocFragment(caseId,
"Missing data",
taskInput);
tasks = caseRuntimeDataService.getCaseTasksAssignedAsPotentialOwner(caseId, "john", null, new QueryContext());
assertNotNull(tasks);
assertEquals(1, tasks.size());
TaskSummary task = tasks.get(0);
assertEquals("Missing data",
task.getName());
caseService.addDynamicSubprocess(caseId,
"UserTask",
null);
tasks = caseRuntimeDataService.getCaseTasksAssignedAsPotentialOwner(caseId,
"john",
null,
new QueryContext());
assertNotNull(tasks);
assertEquals(2, tasks.size());
task = tasks.get(0);
assertEquals("Hello", task.getName());
task = tasks.get(1);
assertEquals("Missing data", task.getName());
} catch (Exception e) {
logger.error("Unexpected error {}", e.getMessage(), e);
fail("Unexpected exception " + e.getMessage());
} finally {
if (caseId != null) {
caseService.cancelCase(caseId);
}
}
}
@Test
public void testUserTasksInCaseAdBusinessAdmin() {
Map<String, OrganizationalEntity> roleAssignments = new HashMap<>();
roleAssignments.put("owner", new UserImpl(USER));
Map<String, Object> data = new HashMap<>();
CaseFileInstance caseFile = caseService.newCaseFileInstance(deploymentUnit.getIdentifier(), USER_TASK_CASE_P_ID, data, roleAssignments);
String caseId2 = null;
String caseId = caseService.startCase(deploymentUnit.getIdentifier(), USER_TASK_STAGE_CASE_P_ID, caseFile);
assertNotNull(caseId);
assertEquals(FIRST_CASE_ID, caseId);
try {
CaseInstance cInstance = caseService.getCaseInstance(caseId);
assertNotNull(cInstance);
assertEquals(FIRST_CASE_ID, cInstance.getCaseId());
assertEquals(deploymentUnit.getIdentifier(), cInstance.getDeploymentId());
List<TaskSummary> tasks = caseRuntimeDataService.getCaseTasksAssignedAsBusinessAdmin(caseId, "Administrator", null, new QueryContext());
assertNotNull(tasks);
assertEquals(0, tasks.size());
Map<String, Object> taskInput = new HashMap<>();
taskInput.put("ActorId",
"john");
caseService.triggerAdHocFragment(caseId,
"Missing data",
taskInput);
tasks = caseRuntimeDataService.getCaseTasksAssignedAsBusinessAdmin(caseId, "Administrator", null, new QueryContext());
assertNotNull(tasks);
assertEquals(1, tasks.size());
TaskSummary task = tasks.get(0);
assertEquals("Missing data", task.getName());
caseId2 = caseService.startCase(deploymentUnit.getIdentifier(), USER_TASK_STAGE_CASE_P_ID, caseFile);
assertNotNull(caseId2);
assertEquals("CASE-0000000002", caseId2);
caseService.triggerAdHocFragment(caseId2,
"Missing data",
taskInput);
tasks = caseRuntimeDataService.getCaseTasksAssignedAsBusinessAdmin(caseId2, "Administrator", null, new QueryContext());
assertNotNull(tasks);
assertEquals(1,
tasks.size());
task = tasks.get(0);
assertEquals("Missing data",
task.getName());
tasks = caseRuntimeDataService.getCaseTasksAssignedAsBusinessAdmin(caseId, "Administrator", null, new QueryContext());
assertNotNull(tasks);
assertEquals(1, tasks.size());
task = tasks.get(0);
assertEquals("Missing data", task.getName());
} catch (Exception e) {
logger.error("Unexpected error {}", e.getMessage(), e);
fail("Unexpected exception " + e.getMessage());
} finally {
if (caseId != null) {
caseService.cancelCase(caseId);
}
if (caseId2 != null) {
caseService.cancelCase(caseId2);
}
}
}
@Test
public void testUserTasksInCaseAdStakeholder() {
Map<String, OrganizationalEntity> roleAssignments = new HashMap<>();
roleAssignments.put("owner", new UserImpl(USER));
Map<String, Object> data = new HashMap<>();
CaseFileInstance caseFile = caseService.newCaseFileInstance(deploymentUnit.getIdentifier(), USER_TASK_CASE_P_ID, data, roleAssignments);
String caseId2 = null;
String caseId = caseService.startCase(deploymentUnit.getIdentifier(), USER_TASK_STAGE_CASE_P_ID, caseFile);
assertNotNull(caseId);
assertEquals(FIRST_CASE_ID, caseId);
try {
CaseInstance cInstance = caseService.getCaseInstance(caseId);
assertNotNull(cInstance);
assertEquals(FIRST_CASE_ID, cInstance.getCaseId());
assertEquals(deploymentUnit.getIdentifier(), cInstance.getDeploymentId());
List<TaskSummary> tasks = caseRuntimeDataService.getCaseTasksAssignedAsStakeholder(caseId,
"john",
null,
new QueryContext());
assertNotNull(tasks);
assertEquals(0, tasks.size());
Map<String, Object> taskInput = new HashMap<>();
taskInput.put("ActorId", "mary");
taskInput.put("TaskStakeholderId", "john");
caseService.triggerAdHocFragment(caseId, "Missing data", taskInput);
tasks = caseRuntimeDataService.getCaseTasksAssignedAsStakeholder(caseId,
"john",
null,
new QueryContext());
assertNotNull(tasks);
assertEquals(1, tasks.size());
TaskSummary task = tasks.get(0);
assertEquals("Missing data", task.getName());
caseId2 = caseService.startCase(deploymentUnit.getIdentifier(), USER_TASK_STAGE_CASE_P_ID, caseFile);
assertNotNull(caseId2);
assertEquals("CASE-0000000002", caseId2);
caseService.triggerAdHocFragment(caseId2, "Missing data", taskInput);
tasks = caseRuntimeDataService.getCaseTasksAssignedAsStakeholder(caseId2,
"john",
null,
new QueryContext());
assertNotNull(tasks);
assertEquals(1, tasks.size());
task = tasks.get(0);
assertEquals("Missing data", task.getName());
tasks = caseRuntimeDataService.getCaseTasksAssignedAsStakeholder(caseId,
"john",
null,
new QueryContext());
assertNotNull(tasks);
assertEquals(1, tasks.size());
task = tasks.get(0);
assertEquals("Missing data", task.getName());
} catch (Exception e) {
logger.error("Unexpected error {}", e.getMessage(), e);
fail("Unexpected exception " + e.getMessage());
} finally {
if (caseId != null) {
caseService.cancelCase(caseId);
}
if (caseId2 != null) {
caseService.cancelCase(caseId2);
}
}
}
@Test
public void testGetProcessDefinitions() {
Collection<ProcessDefinition> processes = caseRuntimeDataService.getProcessDefinitions(new QueryContext());
assertNotNull(processes);
assertEquals(2, processes.size());
Map<String, ProcessDefinition> mappedProcesses = mapProcesses(processes);
assertTrue(mappedProcesses.containsKey("UserTask"));
assertTrue(mappedProcesses.containsKey("DataVerification"));
processes = caseRuntimeDataService.getProcessDefinitions("User", new QueryContext());
assertNotNull(processes);
assertEquals(1,
processes.size());
mappedProcesses = mapProcesses(processes);
assertTrue(mappedProcesses.containsKey("UserTask"));
processes = caseRuntimeDataService.getProcessDefinitionsByDeployment(deploymentUnit.getIdentifier(), new QueryContext());
assertNotNull(processes);
assertEquals(2, processes.size());
mappedProcesses = mapProcesses(processes);
assertTrue(mappedProcesses.containsKey("UserTask"));
assertTrue(mappedProcesses.containsKey("DataVerification"));
}
@Test
public void testTransitionBetweenStagesInCase() {
Map<String, OrganizationalEntity> roleAssignments = new HashMap<>();
roleAssignments.put("owner", new UserImpl(USER));
Map<String, Object> data = new HashMap<>();
CaseFileInstance caseFile = caseService.newCaseFileInstance(deploymentUnit.getIdentifier(), TWO_STAGES_CASE_P_ID, data, roleAssignments);
String caseId = caseService.startCase(deploymentUnit.getIdentifier(), TWO_STAGES_CASE_P_ID, caseFile);
assertNotNull(caseId);
try {
Collection<CaseStageInstance> stage = caseRuntimeDataService.getCaseInstanceStages(caseId, true, new QueryContext(0, 1));
assertNotNull(stage);
assertEquals(1, stage.size());
assertEquals("Stage One", stage.iterator().next().getName());
assertEquals(StageStatus.Active, stage.iterator().next().getStatus());
Collection<AdHocFragment> adhocTasks = caseRuntimeDataService.getAdHocFragmentsForCase(caseId);
assertNotNull(adhocTasks);
assertEquals(1, adhocTasks.size());
assertEquals("Task 1", adhocTasks.iterator().next().getName());
Collection<NodeInstanceDesc> activeNodes = caseRuntimeDataService.getActiveNodesForCase(caseId, new QueryContext(0, 10));
assertNotNull(activeNodes);
assertEquals(1, activeNodes.size());
assertEquals("Stage One", activeNodes.iterator().next().getName());
Collection<NodeInstanceDesc> completedNodes = caseRuntimeDataService.getCompletedNodesForCase(caseId, new QueryContext(0, 10));
assertNotNull(completedNodes);
assertEquals(0, completedNodes.size());
caseService.addDataToCaseFile(caseId, "customData", "nextStagePlease");
stage = caseRuntimeDataService.getCaseInstanceStages(caseId, true, new QueryContext(0, 1));
assertNotNull(stage);
assertEquals(1, stage.size());
assertEquals("Stage Two", stage.iterator().next().getName());
assertEquals(StageStatus.Active, stage.iterator().next().getStatus());
adhocTasks = caseRuntimeDataService.getAdHocFragmentsForCase(caseId);
assertNotNull(adhocTasks);
assertEquals(1, adhocTasks.size());
assertEquals("Task 2", adhocTasks.iterator().next().getName());
activeNodes = caseRuntimeDataService.getActiveNodesForCase(caseId, new QueryContext(0, 10));
assertNotNull(activeNodes);
assertEquals(1, activeNodes.size());
assertEquals("Stage Two", activeNodes.iterator().next().getName());
completedNodes = caseRuntimeDataService.getCompletedNodesForCase(caseId, new QueryContext(0, 10));
assertNotNull(completedNodes);
assertEquals(1, completedNodes.size());
assertEquals("Stage One", completedNodes.iterator().next().getName());
} catch (Exception e) {
logger.error("Unexpected error {}", e.getMessage(), e);
fail("Unexpected exception " + e.getMessage());
} finally {
if (caseId != null) {
caseService.cancelCase(caseId);
}
}
}
@Test
public void testAddSubprocessToEmptyCaseCheckCaseNodes() {
Map<String, Object> data = new HashMap<>();
data.put("name", "my first case");
CaseFileInstance caseFile = caseService.newCaseFileInstance(deploymentUnit.getIdentifier(), EMPTY_CASE_P_ID, data);
String caseId = caseService.startCase(deploymentUnit.getIdentifier(), EMPTY_CASE_P_ID, caseFile);
assertNotNull(caseId);
assertEquals(FIRST_CASE_ID,
caseId);
try {
CaseInstance cInstance = caseService.getCaseInstance(caseId);
assertNotNull(cInstance);
assertEquals(FIRST_CASE_ID, cInstance.getCaseId());
assertEquals(deploymentUnit.getIdentifier(),
cInstance.getDeploymentId());
Collection<NodeInstanceDesc> activeNodes = caseRuntimeDataService.getActiveNodesForCase(caseId, new QueryContext(0, 10));
assertNotNull(activeNodes);
assertEquals(0, activeNodes.size());
Collection<NodeInstanceDesc> completedNodes = caseRuntimeDataService.getCompletedNodesForCase(caseId,
new QueryContext(0,
10));
assertNotNull(completedNodes);
assertEquals(0, completedNodes.size());
Map<String, Object> parameters = new HashMap<>();
caseService.addDynamicSubprocess(caseId,
"UserTask",
parameters);
Collection<ProcessInstanceDesc> caseProcessInstances = caseRuntimeDataService.getProcessInstancesForCase(caseId,
new QueryContext());
assertNotNull(caseProcessInstances);
assertEquals(2, caseProcessInstances.size());
activeNodes = caseRuntimeDataService.getActiveNodesForCase(caseId, new QueryContext(0, 10));
assertNotNull(activeNodes);
assertEquals(2,
activeNodes.size());
Map<String, NodeInstanceDesc> mappedNodes = mapNodeInstances(activeNodes);
assertEquals("HumanTaskNode", mappedNodes.get("Hello").getNodeType());
assertEquals("SubProcessNode", mappedNodes.get("[Dynamic] Sub Process").getNodeType());
completedNodes = caseRuntimeDataService.getCompletedNodesForCase(caseId, new QueryContext(0, 10));
assertNotNull(completedNodes);
assertEquals(0,
completedNodes.size());
List<TaskSummary> tasks = caseRuntimeDataService.getCaseTasksAssignedAsPotentialOwner(caseId, "john", null, new QueryContext());
assertEquals(1, tasks.size());
userTaskService.completeAutoProgress(tasks.get(0).getId(),
"john",
null);
activeNodes = caseRuntimeDataService.getActiveNodesForCase(caseId, new QueryContext(0, 10));
assertNotNull(activeNodes);
assertEquals(0, activeNodes.size());
completedNodes = caseRuntimeDataService.getCompletedNodesForCase(caseId, new QueryContext(0, 10));
assertNotNull(completedNodes);
assertEquals(2,
completedNodes.size());
assertEquals("HumanTaskNode", mappedNodes.get("Hello").getNodeType());
assertEquals("SubProcessNode", mappedNodes.get("[Dynamic] Sub Process").getNodeType());
} catch (Exception e) {
logger.error("Unexpected error {}", e.getMessage(), e);
fail("Unexpected exception " + e.getMessage());
} finally {
if (caseId != null) {
caseService.cancelCase(caseId);
}
}
}
@Test
public void testTransitionBetweenStagesWithConditionsInCase() {
Map<String, OrganizationalEntity> roleAssignments = new HashMap<>();
roleAssignments.put("owner", new UserImpl(USER));
Map<String, Object> data = new HashMap<>();
data.put("customData", "none");
CaseFileInstance caseFile = caseService.newCaseFileInstance(deploymentUnit.getIdentifier(), TWO_STAGES_CONDITIONS_CASE_P_ID, data, roleAssignments);
String caseId = caseService.startCase(deploymentUnit.getIdentifier(), TWO_STAGES_CONDITIONS_CASE_P_ID, caseFile);
assertNotNull(caseId);
try {
Collection<CaseStageInstance> stage = caseRuntimeDataService.getCaseInstanceStages(caseId,
true,
new QueryContext(0,
1));
assertNotNull(stage);
assertEquals(1,
stage.size());
assertEquals("Stage One", stage.iterator().next().getName());
assertEquals(StageStatus.Active, stage.iterator().next().getStatus());
Collection<AdHocFragment> adhocTasks = caseRuntimeDataService.getAdHocFragmentsForCase(caseId);
assertNotNull(adhocTasks);
assertEquals(1, adhocTasks.size());
assertEquals("Task 1",
adhocTasks.iterator().next().getName());
caseService.triggerAdHocFragment(caseId,
"Task 1",
null);
List<TaskSummary> tasks = runtimeDataService.getTasksAssignedAsPotentialOwner("john", new QueryFilter());
assertNotNull(tasks);
assertEquals(1,
tasks.size());
assertTask(tasks.get(0),
"john",
"Task 1",
Status.Reserved);
Map<String, Object> params = new HashMap<>();
params.put("myData",
"nextStage");
userTaskService.completeAutoProgress(tasks.get(0).getId(), "john", params);
stage = caseRuntimeDataService.getCaseInstanceStages(caseId, true, new QueryContext(0, 1));
assertNotNull(stage);
assertEquals(1, stage.size());
assertEquals("Stage Two", stage.iterator().next().getName());
assertEquals(StageStatus.Active, stage.iterator().next().getStatus());
adhocTasks = caseRuntimeDataService.getAdHocFragmentsForCase(caseId);
assertNotNull(adhocTasks);
assertEquals(1, adhocTasks.size());
assertEquals("Task 2",
adhocTasks.iterator().next().getName());
caseService.triggerAdHocFragment(caseId, "Task 2", null);
tasks = runtimeDataService.getTasksAssignedAsPotentialOwner("john", new QueryFilter());
assertNotNull(tasks);
assertEquals(1, tasks.size());
assertTask(tasks.get(0),
"john",
"Task 2",
Status.Reserved);
params = new HashMap<>();
params.put("myData",
"none");
userTaskService.completeAutoProgress(tasks.get(0).getId(),
"john",
params);
try {
caseService.getCaseInstance(caseId);
fail("Case should already be finished");
} catch (CaseNotFoundException e) {
// expected
caseId = null;
}
} catch (Exception e) {
logger.error("Unexpected error {}", e.getMessage(), e);
fail("Unexpected exception " + e.getMessage());
} finally {
if (caseId != null) {
caseService.cancelCase(caseId);
}
}
}
@Test
public void testTransitionBetweenStagesInCaseWithActiveElements() {
Map<String, OrganizationalEntity> roleAssignments = new HashMap<>();
roleAssignments.put("owner", new UserImpl(USER));
Map<String, Object> data = new HashMap<>();
CaseFileInstance caseFile = caseService.newCaseFileInstance(deploymentUnit.getIdentifier(), TWO_STAGES_CASE_P_ID, data, roleAssignments);
String caseId = caseService.startCase(deploymentUnit.getIdentifier(), TWO_STAGES_CASE_P_ID, caseFile);
assertNotNull(caseId);
try {
Collection<CaseStageInstance> stage = caseRuntimeDataService.getCaseInstanceStages(caseId, true, new QueryContext(0, 1));
assertNotNull(stage);
assertEquals(1, stage.size());
CaseStageInstance stageInstance = stage.iterator().next();
assertEquals("Stage One", stageInstance.getName());
assertEquals(StageStatus.Active, stageInstance.getStatus());
Collection<NodeInstanceDesc> activeNodes = stageInstance.getActiveNodes();
assertNotNull(activeNodes);
assertEquals(0, activeNodes.size());
caseService.triggerAdHocFragment(caseId, "Task 1", data);
stage = caseRuntimeDataService.getCaseInstanceStages(caseId, true, new QueryContext(0, 1));
assertNotNull(stage);
assertEquals(1, stage.size());
stageInstance = stage.iterator().next();
assertEquals("Stage One", stageInstance.getName());
assertEquals(StageStatus.Active, stageInstance.getStatus());
activeNodes = stageInstance.getActiveNodes();
assertNotNull(activeNodes);
assertEquals(1, activeNodes.size());
assertEquals("Task 1", activeNodes.iterator().next().getName());
caseService.addDataToCaseFile(caseId, "customData", "nextStagePlease");
stage = caseRuntimeDataService.getCaseInstanceStages(caseId, true, new QueryContext(0, 1));
assertNotNull(stage);
assertEquals(1, stage.size());
assertEquals("Stage Two", stage.iterator().next().getName());
assertEquals(StageStatus.Active, stage.iterator().next().getStatus());
stage = caseRuntimeDataService.getCaseInstanceStages(caseId, true, new QueryContext(0, 1));
assertNotNull(stage);
assertEquals(1, stage.size());
stageInstance = stage.iterator().next();
assertEquals("Stage Two", stageInstance.getName());
assertEquals(StageStatus.Active, stageInstance.getStatus());
activeNodes = stageInstance.getActiveNodes();
assertNotNull(activeNodes);
assertEquals(0, activeNodes.size());
caseService.triggerAdHocFragment(caseId, "Task 2", data);
stage = caseRuntimeDataService.getCaseInstanceStages(caseId, true, new QueryContext(0, 1));
assertNotNull(stage);
assertEquals(1, stage.size());
stageInstance = stage.iterator().next();
assertEquals("Stage Two", stageInstance.getName());
assertEquals(StageStatus.Active, stageInstance.getStatus());
activeNodes = stageInstance.getActiveNodes();
assertNotNull(activeNodes);
assertEquals(1, activeNodes.size());
assertEquals("Task 2", activeNodes.iterator().next().getName());
} catch (Exception e) {
logger.error("Unexpected error {}", e.getMessage(), e);
fail("Unexpected exception " + e.getMessage());
} finally {
if (caseId != null) {
caseService.cancelCase(caseId);
}
}
}
@Test
public void testResolveCaseStatuses() {
List<CaseStatus> testStatuses = Arrays.asList(CaseStatus.CANCELLED,
CaseStatus.CLOSED,
CaseStatus.OPEN);
List<Integer> resolvedTestStatuses = ((CaseRuntimeDataServiceImpl) caseRuntimeDataService).resolveCaseStatuses(testStatuses);
List<Integer> resultStatuses = Arrays.asList(3,
2,
1);
List<Integer> invalidResultStatuses = Arrays.asList(1,
2,
3);
assertTrue(resolvedTestStatuses.equals(resultStatuses));
assertFalse(resolvedTestStatuses.equals(invalidResultStatuses));
}
@Test
public void testCaseStatusCreation() {
List<Integer> testStatuses = Arrays.asList(1);
List<String> testStatusesString = Arrays.asList("open");
List<CaseStatus> testCaseStatusesFromIds = CaseStatus.fromIdList(testStatuses);
List<CaseStatus> testCaseStatusesFromNames = CaseStatus.fromNameList(testStatusesString);
assertNotNull(testCaseStatusesFromIds);
assertNotNull(testCaseStatusesFromNames);
assertEquals(1,
testCaseStatusesFromIds.size());
assertEquals(1,
testCaseStatusesFromNames.size());
assertTrue(testCaseStatusesFromIds.contains(CaseStatus.OPEN));
assertTrue(testCaseStatusesFromNames.contains(CaseStatus.OPEN));
testStatuses = Arrays.asList(1, 2);
testStatusesString = Arrays.asList("open", "closed");
testCaseStatusesFromIds = CaseStatus.fromIdList(testStatuses);
testCaseStatusesFromNames = CaseStatus.fromNameList(testStatusesString);
assertNotNull(testCaseStatusesFromIds);
assertNotNull(testCaseStatusesFromNames);
assertEquals(2,
testCaseStatusesFromIds.size());
assertEquals(2,
testCaseStatusesFromNames.size());
assertTrue(testCaseStatusesFromIds.contains(CaseStatus.OPEN));
assertTrue(testCaseStatusesFromIds.contains(CaseStatus.CLOSED));
assertTrue(testCaseStatusesFromNames.contains(CaseStatus.OPEN));
assertTrue(testCaseStatusesFromNames.contains(CaseStatus.CLOSED));
testStatuses = Arrays.asList(1, 2, 3);
testStatusesString = Arrays.asList("open", "closed", "cancelled");
testCaseStatusesFromIds = CaseStatus.fromIdList(testStatuses);
testCaseStatusesFromNames = CaseStatus.fromNameList(testStatusesString);
assertNotNull(testCaseStatusesFromIds);
assertNotNull(testCaseStatusesFromNames);
assertEquals(3,
testCaseStatusesFromIds.size());
assertEquals(3,
testCaseStatusesFromNames.size());
assertTrue(testCaseStatusesFromIds.contains(CaseStatus.OPEN));
assertTrue(testCaseStatusesFromIds.contains(CaseStatus.CLOSED));
assertTrue(testCaseStatusesFromIds.contains(CaseStatus.CANCELLED));
assertTrue(testCaseStatusesFromNames.contains(CaseStatus.OPEN));
assertTrue(testCaseStatusesFromNames.contains(CaseStatus.CLOSED));
assertTrue(testCaseStatusesFromNames.contains(CaseStatus.CANCELLED));
}
}
| |
package jp.gr.java_conf.dyama.rink.parser.core;
import java.io.IOException;
import java.io.InvalidObjectException;
import java.io.ObjectInputStream;
import java.io.ObjectOutputStream;
import jp.gr.java_conf.dyama.rink.ml.svm.BinaryFeatureVector;
import jp.gr.java_conf.dyama.rink.ml.svm.BinaryFeatureVector.Buffer;
import jp.gr.java_conf.dyama.rink.parser.FeatureFunction;
import jp.gr.java_conf.dyama.rink.parser.Sample;
import jp.gr.java_conf.dyama.rink.parser.Sentence;
import jp.gr.java_conf.dyama.rink.parser.Word;
import jp.gr.java_conf.dyama.rink.parser.core.FeatureImpl.POSITION;
import jp.gr.java_conf.dyama.rink.parser.core.FeatureImpl.RELATION;
import jp.gr.java_conf.dyama.rink.parser.core.FeatureImpl.TYPE;
public class IWPT2003BestFeatureFunction implements FeatureFunction {
private static final long serialVersionUID = 5752622580255058238L;
/** the length of left context: maximum 7 */
private int length_of_left_context_;
/** the length of right context: maximum 7 */
private int length_of_right_context_;
/**
* @param left the length of left context for extracting features.
* @param right the length of right context for extracting features.
* @throws IllegalArgumentException if the length of left context is not from 1 to 7.
* @throws IllegalArgumentException if the length is not from 1 to 7.
*/
public IWPT2003BestFeatureFunction(int left, int right) {
if (left < 1 || left > 7)
throw new IllegalArgumentException("the length of left context is out of range.");
if (right < 1 || right > 7)
throw new IllegalArgumentException("the length of right context is out of range.");
length_of_left_context_ = left ;
length_of_right_context_ = right ;
}
/**
* Converts the relative position to the POSITION instance.
* @param pos the relative position (a negative number means that the position is in the left context, 0 means left/right target node).
* @param left the flag of left or right context. the flag is used to identify left/right target node when pos is 0.
* @return POSITION instance
*/
private POSITION toPosition(int pos, boolean left) {
if (left){
switch (pos) {
case -7: return POSITION.L7 ; // OK
case -6: return POSITION.L6 ; // OK
case -5: return POSITION.L5 ; // OK
case -4: return POSITION.L4 ; // OK
case -3: return POSITION.L3 ; // OK
case -2: return POSITION.L2 ;
case -1: return POSITION.L1 ;
case 0: return POSITION.L0 ;
}
}
switch (pos) {
case 0: return POSITION.R0 ;
case 1: return POSITION.R1 ;
case 2: return POSITION.R2 ;
case 3: return POSITION.R3 ;
case 4: return POSITION.R4 ;
case 5: return POSITION.R5 ; // OK
case 6: return POSITION.R6 ; // OK
case 7: return POSITION.R7 ; // OK
}
return null ; // OK
}
/**
* Adds the encoded feature to the buffer.
*
* @param position the position.
* @param relation the relation.
* @param type the type of features.
* @param value the feature's value. add no feature if value is 0 and fewer.
* @param buffer the buffer
* @return true if the encoded feature can be added to the buffer, otherwise false.
*/
private boolean addFeature(POSITION position, RELATION relation, TYPE type, int value, FeatureImpl f, Buffer buffer) {
f.set(position, relation, type, value);
int i = f.encode();
if (i > 0){
buffer.add(i);
return true;
}
return false ; // OK
}
/**
* Adds the left context features to the sample.
*
* @param sample the sample.
*/
private void addLeftContextFeature(SampleImpl sample) {
FeatureImpl f = sample.getFeature();
BinaryFeatureVector.Buffer buffer = sample.getFeatureBuffer();
State state = sample.getState();
Sentence sentence = sample.getSentence();
for (int t = -length_of_left_context_; t <= 0; t++) {
int nodeID = state.getIDofLeftNode(t);
POSITION position = toPosition(t, true);
Word word = WordImpl.BOS;
if (nodeID >= 0)
word = sentence.getWord(nodeID);
addFeature(position, RELATION.SELF, TYPE.LEXCON, word.getID(), f, buffer);
addFeature(position, RELATION.SELF, TYPE.POS, word.getPOS().getID(), f, buffer);
if (nodeID < 0)
continue;
DependencyRelations deps = state.getDependencies();
if (deps.getNumberOfChildren(nodeID) == 0)
continue;
for (int i = 0 ; i < deps.getNumberOfChildren(nodeID); i++) {
int childID = deps.getChildID(nodeID, i);
word = sentence.getWord(childID);
RELATION rel = RELATION.LEFT_CHILD;
if (nodeID < childID)
rel = RELATION.RIGHT_CHILD;
addFeature(position, rel, TYPE.LEXCON, word.getID(), f, buffer);
addFeature(position, rel, TYPE.POS, word.getPOS().getID(), f, buffer);
}
}
}
/**
* Adds the right context features to the sample.
*
* @param sample the sample.
*/
private void addRightContextFeature(SampleImpl sample) {
FeatureImpl f = sample.getFeature();
BinaryFeatureVector.Buffer buffer = sample.getFeatureBuffer();
State state = sample.getState();
Sentence sentence = sample.getSentence();
for (int t = 0; t <= length_of_right_context_; t++) {
int nodeID = state.getRightNode(t);
POSITION position = toPosition(t, false);
Word word = WordImpl.EOS;
if (nodeID >= 0)
word = sentence.getWord(nodeID);
addFeature(position, RELATION.SELF, TYPE.LEXCON, word.getID(), f, buffer);
addFeature(position, RELATION.SELF, TYPE.POS, word.getPOS().getID(), f, buffer);
if (nodeID < 0)
continue;
DependencyRelations deps = state.getDependencies();
if (deps.getNumberOfChildren(nodeID) == 0)
continue;
for (int i = 0 ; i < deps.getNumberOfChildren(nodeID); i++) {
int childID = deps.getChildID(nodeID, i);
word = sentence.getWord(childID);
RELATION rel = RELATION.LEFT_CHILD;
if (nodeID < childID)
rel = RELATION.RIGHT_CHILD;
addFeature(position, rel, TYPE.LEXCON, word.getID(), f, buffer);
addFeature(position, rel, TYPE.POS, word.getPOS().getID(), f, buffer);
}
}
}
@Override
public void apply(Sample _sample) {
if (_sample == null)
throw new IllegalArgumentException("the sample is null.");
SampleImpl sample = (SampleImpl) _sample;
sample.getFeatureBuffer().clear();
addLeftContextFeature(sample);
addRightContextFeature(sample);
}
private void readObject(ObjectInputStream in) throws IOException{
int l = in.readInt();
int r = in.readInt();
if (l < 1 || l > 7)
throw new InvalidObjectException("the length of left context is out of range.");
if (r < 1 || r > 7)
throw new InvalidObjectException("the length of right context is out of range.");
length_of_left_context_ = l ;
length_of_right_context_ = r ;
}
private void writeObject(ObjectOutputStream out) throws IOException{
out.writeInt(length_of_left_context_);
out.writeInt(length_of_right_context_);
}
}
| |
/**
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for
* license information.
*/
package com.microsoft.azure.management;
import com.microsoft.azure.AzureEnvironment;
import com.microsoft.azure.CloudException;
import com.microsoft.azure.PagedList;
import com.microsoft.azure.management.compute.Disks;
import com.microsoft.azure.management.compute.Snapshots;
import com.microsoft.azure.management.compute.VirtualMachineCustomImages;
import com.microsoft.azure.management.servicebus.ServiceBusNamespaces;
import com.microsoft.azure.management.servicebus.implementation.ServiceBusManager;
import com.microsoft.rest.RestClient;
import com.microsoft.azure.credentials.ApplicationTokenCredentials;
import com.microsoft.azure.credentials.AzureTokenCredentials;
import com.microsoft.azure.management.batch.BatchAccounts;
import com.microsoft.azure.management.batch.implementation.BatchManager;
import com.microsoft.azure.management.cdn.CdnProfiles;
import com.microsoft.azure.management.cdn.implementation.CdnManager;
import com.microsoft.azure.management.compute.AvailabilitySets;
import com.microsoft.azure.management.compute.ComputeUsages;
import com.microsoft.azure.management.compute.VirtualMachineImages;
import com.microsoft.azure.management.compute.VirtualMachineScaleSets;
import com.microsoft.azure.management.compute.VirtualMachines;
import com.microsoft.azure.management.compute.implementation.ComputeManager;
import com.microsoft.azure.management.dns.DnsZones;
import com.microsoft.azure.management.dns.implementation.DnsZoneManager;
import com.microsoft.azure.management.keyvault.Vaults;
import com.microsoft.azure.management.keyvault.implementation.KeyVaultManager;
import com.microsoft.azure.management.network.ApplicationGateways;
import com.microsoft.azure.management.network.LoadBalancers;
import com.microsoft.azure.management.network.NetworkInterfaces;
import com.microsoft.azure.management.network.NetworkSecurityGroups;
import com.microsoft.azure.management.network.NetworkUsages;
import com.microsoft.azure.management.network.Networks;
import com.microsoft.azure.management.network.PublicIPAddresses;
import com.microsoft.azure.management.network.RouteTables;
import com.microsoft.azure.management.network.implementation.NetworkManager;
import com.microsoft.azure.management.redis.RedisCaches;
import com.microsoft.azure.management.redis.implementation.RedisManager;
import com.microsoft.azure.management.resources.Deployments;
import com.microsoft.azure.management.resources.Features;
import com.microsoft.azure.management.resources.GenericResources;
import com.microsoft.azure.management.resources.PolicyAssignments;
import com.microsoft.azure.management.resources.PolicyDefinitions;
import com.microsoft.azure.management.resources.Providers;
import com.microsoft.azure.management.resources.ResourceGroups;
import com.microsoft.azure.management.resources.Subscription;
import com.microsoft.azure.management.resources.Subscriptions;
import com.microsoft.azure.management.resources.Tenants;
import com.microsoft.azure.management.resources.fluentcore.arm.AzureConfigurable;
import com.microsoft.azure.management.resources.fluentcore.arm.implementation.AzureConfigurableImpl;
import com.microsoft.azure.management.resources.implementation.ResourceManagementClientImpl;
import com.microsoft.azure.management.resources.implementation.ResourceManager;
import com.microsoft.azure.management.sql.SqlServers;
import com.microsoft.azure.management.sql.implementation.SqlServerManager;
import com.microsoft.azure.management.storage.StorageAccounts;
import com.microsoft.azure.management.storage.Usages;
import com.microsoft.azure.management.storage.implementation.StorageManager;
import com.microsoft.azure.management.trafficmanager.TrafficManagerProfiles;
import com.microsoft.azure.management.trafficmanager.implementation.TrafficManager;
import com.microsoft.azure.management.apigeneration.Beta;
import com.microsoft.azure.management.appservice.WebApps;
import com.microsoft.azure.management.appservice.implementation.AppServiceManager;
import java.io.File;
import java.io.IOException;
/**
* The entry point for accessing resource management APIs in Azure.
*/
public final class Azure {
private final ResourceManager resourceManager;
private final StorageManager storageManager;
private final ComputeManager computeManager;
private final NetworkManager networkManager;
private final KeyVaultManager keyVaultManager;
private final BatchManager batchManager;
private final TrafficManager trafficManager;
private final RedisManager redisManager;
private final CdnManager cdnManager;
private final DnsZoneManager dnsZoneManager;
private final AppServiceManager appServiceManager;
private final SqlServerManager sqlServerManager;
private final ServiceBusManager serviceBusManager;
private final String subscriptionId;
private final Authenticated authenticated;
/**
* Authenticate to Azure using an Azure credentials object.
*
* @param credentials the credentials object
* @return the authenticated Azure client
*/
public static Authenticated authenticate(AzureTokenCredentials credentials) {
return new AuthenticatedImpl(new RestClient.Builder()
.withBaseUrl(credentials.environment(), AzureEnvironment.Endpoint.RESOURCE_MANAGER)
.withCredentials(credentials)
.build(), credentials.domain());
}
/**
* Authenticates API access using a properties file containing the required credentials.
* @param credentialsFile the file containing the credentials in the standard Java properties file format,
* with the following keys:<p>
* <code>
* subscription= #subscription ID<br>
* tenant= #tenant ID<br>
* client= #client id<br>
* key= #client key<br>
* managementURI= #management URI<br>
* baseURL= #base URL<br>
* authURL= #authentication URL<br>
*</code>
* @return authenticated Azure client
* @throws IOException exception thrown from file access
*/
public static Authenticated authenticate(File credentialsFile) throws IOException {
ApplicationTokenCredentials credentials = ApplicationTokenCredentials.fromFile(credentialsFile);
return new AuthenticatedImpl(new RestClient.Builder()
.withBaseUrl(credentials.environment(), AzureEnvironment.Endpoint.RESOURCE_MANAGER)
.withCredentials(credentials)
.build(), credentials.domain()).withDefaultSubscription(credentials.defaultSubscriptionId());
}
/**
* Authenticates API access using a {@link RestClient} instance.
* @param restClient the {@link RestClient} configured with Azure authentication credentials
* @param tenantId the tenantId in Active Directory
* @return authenticated Azure client
*/
public static Authenticated authenticate(RestClient restClient, String tenantId) {
return new AuthenticatedImpl(restClient, tenantId);
}
/**
* Authenticates API access using a {@link RestClient} instance.
* @param restClient the {@link RestClient} configured with Azure authentication credentials
* @param tenantId the tenantId in Active Directory
* @param subscriptionId the ID of the subscription
* @return authenticated Azure client
*/
public static Authenticated authenticate(RestClient restClient, String tenantId, String subscriptionId) {
return new AuthenticatedImpl(restClient, tenantId).withDefaultSubscription(subscriptionId);
}
/**
* @return an interface allow configurations on the client.
*/
public static Configurable configure() {
return new ConfigurableImpl();
}
/**
* The interface allowing configurations to be made on the client.
*/
public interface Configurable extends AzureConfigurable<Configurable> {
/**
* Authenticates API access based on the provided credentials.
*
* @param credentials The credentials to authenticate API access with
* @return the authenticated Azure client
*/
Authenticated authenticate(AzureTokenCredentials credentials);
/**
* Authenticates API access using a properties file containing the required credentials.
*
* @param credentialsFile the file containing the credentials in the standard Java properties file format following
* the same schema as {@link Azure#authenticate(File)}.<p>
* @return Authenticated Azure client
* @throws IOException exceptions thrown from file access
*/
Authenticated authenticate(File credentialsFile) throws IOException;
}
/**
* The implementation for {@link Configurable}.
*/
private static final class ConfigurableImpl extends AzureConfigurableImpl<Configurable> implements Configurable {
@Override
public Authenticated authenticate(AzureTokenCredentials credentials) {
return Azure.authenticate(buildRestClient(credentials), credentials.domain());
}
@Override
public Authenticated authenticate(File credentialsFile) throws IOException {
ApplicationTokenCredentials credentials = ApplicationTokenCredentials.fromFile(credentialsFile);
return Azure.authenticate(buildRestClient(credentials), credentials.domain(), credentials.defaultSubscriptionId());
}
}
/**
* Provides authenticated access to a subset of Azure APIs that do not require a specific subscription.
* <p>
* To access the subscription-specific APIs, use {@link Authenticated#withSubscription(String)},
* or {@link Authenticated#withDefaultSubscription()} if a default subscription has already been previously specified
* (for example, in a previously specified authentication file).
* @see Azure#authenticate(File)
*/
public interface Authenticated {
/**
* Entry point to subscription management APIs.
*
* @return Subscriptions interface providing access to subscription management
*/
Subscriptions subscriptions();
/**
* Entry point to tenant management APIs.
*
* @return Tenants interface providing access to tenant management
*/
Tenants tenants();
/**
* Selects a specific subscription for the APIs to work with.
* <p>
* Most Azure APIs require a specific subscription to be selected.
* @param subscriptionId the ID of the subscription
* @return an authenticated Azure client configured to work with the specified subscription
*/
Azure withSubscription(String subscriptionId);
/**
* Selects the default subscription as the subscription for the APIs to work with.
* <p>
* The default subscription can be specified inside the authentication file using {@link Azure#authenticate(File)}.
* If no default subscription has been previously provided, the first subscription as
* returned by {@link Authenticated#subscriptions()} will be selected.
* @return an authenticated Azure client configured to work with the default subscription
* @throws CloudException exception thrown from Azure
* @throws IOException exception thrown from serialization/deserialization
*/
Azure withDefaultSubscription() throws CloudException, IOException;
}
/**
* The implementation for {@link Authenticated}.
*/
private static final class AuthenticatedImpl implements Authenticated {
private final RestClient restClient;
private final ResourceManager.Authenticated resourceManagerAuthenticated;
private String defaultSubscription;
private String tenantId;
private AuthenticatedImpl(RestClient restClient, String tenantId) {
this.resourceManagerAuthenticated = ResourceManager.authenticate(restClient);
this.restClient = restClient;
this.tenantId = tenantId;
}
private AuthenticatedImpl withDefaultSubscription(String subscriptionId) {
this.defaultSubscription = subscriptionId;
return this;
}
@Override
public Subscriptions subscriptions() {
return resourceManagerAuthenticated.subscriptions();
}
@Override
public Tenants tenants() {
return resourceManagerAuthenticated.tenants();
}
@Override
public Azure withSubscription(String subscriptionId) {
return new Azure(restClient, subscriptionId, tenantId, this);
}
@Override
public Azure withDefaultSubscription() throws CloudException, IOException {
if (this.defaultSubscription != null) {
return withSubscription(this.defaultSubscription);
} else {
PagedList<Subscription> subs = this.subscriptions().list();
if (!subs.isEmpty()) {
return withSubscription(subs.get(0).subscriptionId());
} else {
return withSubscription(null);
}
}
}
}
private Azure(RestClient restClient, String subscriptionId, String tenantId, Authenticated authenticated) {
ResourceManagementClientImpl resourceManagementClient = new ResourceManagementClientImpl(restClient);
resourceManagementClient.withSubscriptionId(subscriptionId);
this.resourceManager = ResourceManager.authenticate(restClient).withSubscription(subscriptionId);
this.storageManager = StorageManager.authenticate(restClient, subscriptionId);
this.computeManager = ComputeManager.authenticate(restClient, subscriptionId);
this.networkManager = NetworkManager.authenticate(restClient, subscriptionId);
this.keyVaultManager = KeyVaultManager.authenticate(restClient, tenantId, subscriptionId);
this.batchManager = BatchManager.authenticate(restClient, subscriptionId);
this.trafficManager = TrafficManager.authenticate(restClient, subscriptionId);
this.redisManager = RedisManager.authenticate(restClient, subscriptionId);
this.cdnManager = CdnManager.authenticate(restClient, subscriptionId);
this.dnsZoneManager = DnsZoneManager.authenticate(restClient, subscriptionId);
this.appServiceManager = AppServiceManager.authenticate(restClient, tenantId, subscriptionId);
this.sqlServerManager = SqlServerManager.authenticate(restClient, subscriptionId);
this.serviceBusManager = ServiceBusManager.authenticate(restClient, subscriptionId);
this.subscriptionId = subscriptionId;
this.authenticated = authenticated;
}
/**
* @return the currently selected subscription ID this client is authenticated to work with
*/
public String subscriptionId() {
return this.subscriptionId;
}
/**
* @return the currently selected subscription this client is authenticated to work with
*/
public Subscription getCurrentSubscription() {
return this.subscriptions().getById(this.subscriptionId());
}
/**
* @return subscriptions that this authenticated client has access to
*/
public Subscriptions subscriptions() {
return this.authenticated.subscriptions();
}
/**
* @return entry point to managing resource groups
*/
public ResourceGroups resourceGroups() {
return this.resourceManager.resourceGroups();
}
/**
* @return entry point to managing deployments
*/
public Deployments deployments() {
return this.resourceManager.deployments();
}
/**
* @return entry point to management generic resources
*/
public GenericResources genericResources() {
return resourceManager.genericResources();
}
/**
* @return entry point to managing features
*/
public Features features() {
return resourceManager.features();
}
/**
* @return entry point to managing resource providers
*/
public Providers providers() {
return resourceManager.providers();
}
/**
* @return entry point to managing policy definitions.
*/
public PolicyDefinitions policyDefinitions() {
return resourceManager.policyDefinitions();
}
/**
* @return entry point to managing policy assignments.
*/
public PolicyAssignments policyAssignments() {
return resourceManager.policyAssignments();
}
/**
* @return entry point to managing storage accounts
*/
public StorageAccounts storageAccounts() {
return storageManager.storageAccounts();
}
/**
* @return entry point to managing storage account usages
*/
public Usages storageUsages() {
return storageManager.usages();
}
/**
* @return entry point to managing availability sets
*/
public AvailabilitySets availabilitySets() {
return computeManager.availabilitySets();
}
/**
* @return entry point to managing virtual networks
*/
public Networks networks() {
return networkManager.networks();
}
/**
* @return entry point to managing route tables
*/
public RouteTables routeTables() {
return networkManager.routeTables();
}
/**
* @return entry point to managing load balancers
*/
public LoadBalancers loadBalancers() {
return networkManager.loadBalancers();
}
/**
* @return entry point to managing application gateways
*/
@Beta
public ApplicationGateways applicationGateways() {
return networkManager.applicationGateways();
}
/**
* @return entry point to managing network security groups
*/
public NetworkSecurityGroups networkSecurityGroups() {
return networkManager.networkSecurityGroups();
}
/**
* @return entry point to managing network resource usages
*/
public NetworkUsages networkUsages() {
return networkManager.usages();
}
/**
* @return entry point to managing virtual machines
*/
public VirtualMachines virtualMachines() {
return computeManager.virtualMachines();
}
/**
* @return entry point to managing virtual machine scale sets.
*/
public VirtualMachineScaleSets virtualMachineScaleSets() {
return computeManager.virtualMachineScaleSets();
}
/**
* @return entry point to managing virtual machine images
*/
public VirtualMachineImages virtualMachineImages() {
return computeManager.virtualMachineImages();
}
/**
* @return entry point to managing virtual machine custom images
*/
public VirtualMachineCustomImages virtualMachineCustomImages() {
return computeManager.virtualMachineCustomImages();
}
/**
* @return entry point to managing managed disks
*/
public Disks disks() {
return computeManager.disks();
}
/**
* @return entry point to managing managed snapshots
*/
public Snapshots snapshots() {
return computeManager.snapshots();
}
/**
* @return entry point to managing public IP addresses
*/
public PublicIPAddresses publicIPAddresses() {
return this.networkManager.publicIPAddresses();
}
/**
* @return entry point to managing network interfaces
*/
public NetworkInterfaces networkInterfaces() {
return this.networkManager.networkInterfaces();
}
/**
* @return entry point to managing compute resource usages
*/
public ComputeUsages computeUsages() {
return computeManager.usages();
}
/**
* @return entry point to managing key vaults
*/
public Vaults vaults() {
return this.keyVaultManager.vaults();
}
/**
* @return entry point to managing batch accounts.
*/
public BatchAccounts batchAccounts() {
return batchManager.batchAccounts();
}
/**
* @return entry point to managing traffic manager profiles.
*/
public TrafficManagerProfiles trafficManagerProfiles() {
return trafficManager.profiles();
}
/**
* @return entry point to managing Redis Caches.
*/
public RedisCaches redisCaches() {
return redisManager.redisCaches();
}
/**
* @return entry point to managing cdn manager profiles.
*/
public CdnProfiles cdnProfiles() {
return cdnManager.profiles();
}
/**
* @return entry point to managing DNS zones.
*/
public DnsZones dnsZones() {
return dnsZoneManager.zones();
}
/**
* @return entry point to managing web apps.
*/
@Beta
public WebApps webApps() {
return appServiceManager.webApps();
}
/**
* @return entry point to managing app services.
*/
@Beta
public AppServiceManager appServices() {
return appServiceManager;
}
/**
* @return entry point to managing Sql server.
*/
public SqlServers sqlServers() {
return sqlServerManager.sqlServers();
}
/**
* @return entry point to managing Service Bus.
*/
public ServiceBusNamespaces serviceBusNamespaces() {
return serviceBusManager.namespaces();
}
}
| |
/*
* Copyright (c) 2012, United States Government, as represented by the Secretary of Health and Human Services.
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
* * Redistributions of source code must retain the above
* copyright notice, this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* * Neither the name of the United States Government nor the
* names of its contributors may be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE UNITED STATES GOVERNMENT BE LIABLE FOR ANY
* DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package gov.hhs.fha.nhinc.openSAML.extraction;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.fail;
import gov.hhs.fha.nhinc.common.nhinccommon.AssertionType;
import gov.hhs.fha.nhinc.common.nhinccommon.CeType;
import gov.hhs.fha.nhinc.common.nhinccommon.HomeCommunityType;
import gov.hhs.fha.nhinc.common.nhinccommon.PersonNameType;
import gov.hhs.fha.nhinc.common.nhinccommon.SamlAuthnStatementType;
import gov.hhs.fha.nhinc.common.nhinccommon.SamlAuthzDecisionStatementEvidenceAssertionType;
import gov.hhs.fha.nhinc.common.nhinccommon.SamlAuthzDecisionStatementEvidenceConditionsType;
import gov.hhs.fha.nhinc.common.nhinccommon.SamlAuthzDecisionStatementEvidenceType;
import gov.hhs.fha.nhinc.common.nhinccommon.SamlAuthzDecisionStatementType;
import gov.hhs.fha.nhinc.common.nhinccommon.SamlIssuerType;
import gov.hhs.fha.nhinc.common.nhinccommon.SamlSignatureKeyInfoType;
import gov.hhs.fha.nhinc.common.nhinccommon.SamlSignatureType;
import gov.hhs.fha.nhinc.common.nhinccommon.UserType;
import java.io.File;
import java.net.URI;
import java.net.URISyntaxException;
import java.util.List;
import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.DocumentBuilderFactory;
import org.apache.ws.security.saml.ext.OpenSAMLUtil;
import org.junit.Test;
import org.w3c.dom.Document;
import org.w3c.dom.Element;
/**
* @author dharley
*
*/
public class OpenSAMLAssertionExtractorImplTest {
private final OpenSAMLAssertionExtractorImpl openSAMLAssertionExtractorImpl = new OpenSAMLAssertionExtractorImpl();
static {
OpenSAMLUtil.initSamlEngine();
}
/**
* When the SAML file is null, assertion extracted will be null.
* @throws Exception on error.
*/
@Test
public void testNullAssertionElement() throws Exception {
assertNull(openSAMLAssertionExtractorImpl.extractSAMLAssertion(null));
}
/**
* Tests SAML Assertion populated with all possible Assertion elements and attributes, verify they are populated.
* @throws Exception on error.
*/
@Test
public void testCompleteSamlAssertion() throws Exception {
// the first "/" is intentionally not using File.separator due to differences in how windows and unix based
// operating systems handle the class.getResource method. Please see GATEWAY-2873 for more details.
AssertionType assertionType = openSAMLAssertionExtractorImpl.extractSAMLAssertion(getElementForSamlFile(
"/" + "testing_saml" + File.separator + "complete_saml.xml"));
assertNotNull(assertionType);
verifyHomeCommunity(assertionType.getHomeCommunity(), "2.16.840.1.113883.3.424", null);
verifyIssuer(assertionType.getSamlIssuer());
verifyDecisionStatement(assertionType.getSamlAuthzDecisionStatement());
verifyUser(assertionType.getUserInfo());
verifyAuthnStatement(assertionType.getSamlAuthnStatement());
verifyUniquePatientId(assertionType.getUniquePatientId());
verifyCeType(assertionType.getPurposeOfDisclosureCoded(), "OPERATIONS", "2.16.840.1.113883.3.18.7.1",
"nhin-purpose", "Healthcare Operations");
verifySignature(assertionType.getSamlSignature());
}
private void verifyIssuer(SamlIssuerType issuer) {
assertEquals("CN=SAML User,OU=SU,O=SAML User,L=Los Angeles,ST=CA,C=US", issuer.getIssuer());
assertEquals("urn:oasis:names:tc:SAML:1.1:nameid-format:X509SubjectName", issuer.getIssuerFormat());
}
private void verifyHomeCommunity(HomeCommunityType homeCommunity, String id, String name) {
assertEquals(id, homeCommunity.getHomeCommunityId());
assertEquals(name, homeCommunity.getName());
}
private void verifyUniquePatientId(List<String> uniquePatientId) {
assertEquals(1, uniquePatientId.size());
assertEquals("RI1.101.00043^^^&2.16.840.1.113883.3.424&ISO", uniquePatientId.get(0));
}
private void verifyAuthnStatement(SamlAuthnStatementType statement) {
assertEquals("urn:oasis:names:tc:SAML:2.0:ac:classes:X509", statement.getAuthContextClassRef());
assertEquals("2010-05-01T02:09:01.089Z", statement.getAuthInstant());
assertEquals("123456", statement.getSessionIndex());
assertEquals(null, statement.getSubjectLocalityAddress());
assertEquals(null, statement.getSubjectLocalityDNSName());
}
private void verifyDecisionStatement(SamlAuthzDecisionStatementType decisionStatement) {
assertNotNull(decisionStatement);
// verify decision statement
assertEquals("Permit", decisionStatement.getDecision());
assertEquals("https://nhinri1c23.aegis.net:8181/NhinConnect/EntityPatientDiscoverySecured",
decisionStatement.getResource());
assertEquals("Execute", decisionStatement.getAction());
// verify decision statement evidence
SamlAuthzDecisionStatementEvidenceType evidence = decisionStatement.getEvidence();
SamlAuthzDecisionStatementEvidenceAssertionType evidenceAssertion = evidence.getAssertion();
assertEquals("759724ff-e9ce-4a7f-a55b-fc41ffe21a75", evidenceAssertion.getId());
assertEquals("2010-05-01T02:09:01.104Z", evidenceAssertion.getIssueInstant());
assertEquals("2.0", evidenceAssertion.getVersion());
assertEquals("CN=SAML User,OU=SU,O=SAML User,L=Los Angeles,ST=CA,C=US", evidenceAssertion.getIssuer());
assertEquals("urn:oasis:names:tc:SAML:1.1:nameid-format:X509SubjectName", evidenceAssertion.getIssuerFormat());
SamlAuthzDecisionStatementEvidenceConditionsType evidenceConditions = evidenceAssertion.getConditions();
assertEquals("2010-05-01T02:09:01.104Z", evidenceConditions.getNotBefore());
assertEquals("2010-05-01T02:09:01.104Z", evidenceConditions.getNotOnOrAfter());
}
private void verifyUser(UserType user) {
verifyHomeCommunity(user.getOrg(), "2.16.840.1.113883.3.424", "2.16.840.1.113883.3.424");
PersonNameType personName = user.getPersonName();
assertEquals("Testcase", personName.getFamilyName());
assertEquals("Interop\n IT Testcase", personName.getFullName());
assertEquals("Interop", personName.getGivenName());
assertEquals("IT", personName.getSecondNameOrInitials());
assertNull(personName.getNameType());
assertNull(personName.getPrefix());
assertNull(personName.getSuffix());
verifyCeType(user.getRoleCoded(), "46255001", "2.16.840.1.113883.6.96", "SNOMED_CT", "Pharmacist");
assertEquals("UID=Scenario 45 PDR-5.7", user.getUserName());
}
private void verifyCeType(CeType ceType, String code, String codeSystem, String codeSystemName,
String displayName) {
assertEquals(code, ceType.getCode());
assertEquals(codeSystem, ceType.getCodeSystem());
assertEquals(codeSystemName, ceType.getCodeSystemName());
assertEquals(displayName, ceType.getDisplayName());
}
private void verifySignature(SamlSignatureType signature) {
assertNotNull(signature.getSignatureValue());
SamlSignatureKeyInfoType keyInfo = signature.getKeyInfo();
assertNotNull(keyInfo.getRsaKeyValueExponent());
assertNotNull(keyInfo.getRsaKeyValueModulus());
}
private Element getElementForSamlFile(String samlFileName) throws Exception {
DocumentBuilderFactory documentBuilderFactory = DocumentBuilderFactory.newInstance();
documentBuilderFactory.setNamespaceAware(true);
DocumentBuilder documentBuilder = documentBuilderFactory.newDocumentBuilder();
Document document = documentBuilder.parse(getSamlFile(samlFileName));
return document.getDocumentElement();
}
private File getSamlFile(String samlFileName) {
URI uri = null;
try {
uri = this.getClass().getResource(samlFileName).toURI();
} catch (URISyntaxException e) {
fail("Could not build URI for filepath. " + e.getMessage());
}
return new File(uri);
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.beam.runners.core;
import static org.apache.beam.runners.core.WindowMatchers.isSingleWindowedValue;
import static org.apache.beam.runners.core.WindowMatchers.isWindowedValue;
import static org.hamcrest.Matchers.anyOf;
import static org.hamcrest.Matchers.contains;
import static org.hamcrest.Matchers.containsInAnyOrder;
import static org.hamcrest.Matchers.emptyIterable;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.greaterThanOrEqualTo;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertThat;
import static org.junit.Assert.assertTrue;
import static org.mockito.Matchers.any;
import static org.mockito.Mockito.doAnswer;
import static org.mockito.Mockito.doNothing;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
import static org.mockito.Mockito.withSettings;
import com.google.common.collect.Iterables;
import java.util.List;
import org.apache.beam.runners.core.triggers.TriggerStateMachine;
import org.apache.beam.sdk.coders.VarIntCoder;
import org.apache.beam.sdk.options.PipelineOptions;
import org.apache.beam.sdk.options.PipelineOptionsFactory;
import org.apache.beam.sdk.transforms.CombineWithContext.CombineFnWithContext;
import org.apache.beam.sdk.transforms.CombineWithContext.Context;
import org.apache.beam.sdk.transforms.Sum;
import org.apache.beam.sdk.transforms.windowing.AfterEach;
import org.apache.beam.sdk.transforms.windowing.AfterFirst;
import org.apache.beam.sdk.transforms.windowing.AfterPane;
import org.apache.beam.sdk.transforms.windowing.AfterProcessingTime;
import org.apache.beam.sdk.transforms.windowing.AfterWatermark;
import org.apache.beam.sdk.transforms.windowing.BoundedWindow;
import org.apache.beam.sdk.transforms.windowing.FixedWindows;
import org.apache.beam.sdk.transforms.windowing.GlobalWindow;
import org.apache.beam.sdk.transforms.windowing.GlobalWindows;
import org.apache.beam.sdk.transforms.windowing.IntervalWindow;
import org.apache.beam.sdk.transforms.windowing.Never;
import org.apache.beam.sdk.transforms.windowing.OutputTimeFns;
import org.apache.beam.sdk.transforms.windowing.PaneInfo;
import org.apache.beam.sdk.transforms.windowing.PaneInfo.Timing;
import org.apache.beam.sdk.transforms.windowing.Repeatedly;
import org.apache.beam.sdk.transforms.windowing.Sessions;
import org.apache.beam.sdk.transforms.windowing.SlidingWindows;
import org.apache.beam.sdk.transforms.windowing.Trigger;
import org.apache.beam.sdk.transforms.windowing.Window.ClosingBehavior;
import org.apache.beam.sdk.transforms.windowing.WindowFn;
import org.apache.beam.sdk.transforms.windowing.WindowMappingFn;
import org.apache.beam.sdk.util.SideInputReader;
import org.apache.beam.sdk.util.TimeDomain;
import org.apache.beam.sdk.util.WindowedValue;
import org.apache.beam.sdk.util.WindowingStrategy;
import org.apache.beam.sdk.util.WindowingStrategy.AccumulationMode;
import org.apache.beam.sdk.values.PCollectionView;
import org.apache.beam.sdk.values.TimestampedValue;
import org.joda.time.Duration;
import org.joda.time.Instant;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.JUnit4;
import org.mockito.Matchers;
import org.mockito.Mock;
import org.mockito.Mockito;
import org.mockito.MockitoAnnotations;
import org.mockito.invocation.InvocationOnMock;
import org.mockito.stubbing.Answer;
/**
* Tests for {@link ReduceFnRunner}. These tests instantiate a full "stack" of
* {@link ReduceFnRunner} with enclosed {@link ReduceFn}, down to the installed {@link Trigger}
* (sometimes mocked). They proceed by injecting elements and advancing watermark and
* processing time, then verifying produced panes and counters.
*/
@RunWith(JUnit4.class)
public class ReduceFnRunnerTest {
@Mock private SideInputReader mockSideInputReader;
private TriggerStateMachine mockTriggerStateMachine;
private PCollectionView<Integer> mockView;
private IntervalWindow firstWindow;
private static TriggerStateMachine.TriggerContext anyTriggerContext() {
return Mockito.<TriggerStateMachine.TriggerContext>any();
}
private static TriggerStateMachine.OnElementContext anyElementContext() {
return Mockito.<TriggerStateMachine.OnElementContext>any();
}
@Before
public void setUp() {
MockitoAnnotations.initMocks(this);
mockTriggerStateMachine = mock(TriggerStateMachine.class, withSettings().serializable());
@SuppressWarnings("unchecked")
PCollectionView<Integer> mockViewUnchecked =
mock(PCollectionView.class, withSettings().serializable());
mockView = mockViewUnchecked;
firstWindow = new IntervalWindow(new Instant(0), new Instant(10));
}
private void injectElement(ReduceFnTester<Integer, ?, IntervalWindow> tester, int element)
throws Exception {
doNothing().when(mockTriggerStateMachine).onElement(anyElementContext());
tester.injectElements(TimestampedValue.of(element, new Instant(element)));
}
private void triggerShouldFinish(TriggerStateMachine mockTrigger) throws Exception {
doAnswer(new Answer<Void>() {
@Override
public Void answer(InvocationOnMock invocation) throws Exception {
@SuppressWarnings("unchecked")
TriggerStateMachine.TriggerContext context =
(TriggerStateMachine.TriggerContext) invocation.getArguments()[0];
context.trigger().setFinished(true);
return null;
}
})
.when(mockTrigger).onFire(anyTriggerContext());
}
@Test
public void testOnElementBufferingDiscarding() throws Exception {
// Test basic execution of a trigger using a non-combining window set and discarding mode.
ReduceFnTester<Integer, Iterable<Integer>, IntervalWindow> tester =
ReduceFnTester.nonCombining(FixedWindows.of(Duration.millis(10)), mockTriggerStateMachine,
AccumulationMode.DISCARDING_FIRED_PANES, Duration.millis(100),
ClosingBehavior.FIRE_IF_NON_EMPTY);
// Pane of {1, 2}
injectElement(tester, 1);
when(mockTriggerStateMachine.shouldFire(anyTriggerContext())).thenReturn(true);
injectElement(tester, 2);
assertThat(tester.extractOutput(),
contains(isSingleWindowedValue(containsInAnyOrder(1, 2), 1, 0, 10)));
// Pane of just 3, and finish
when(mockTriggerStateMachine.shouldFire(anyTriggerContext())).thenReturn(true);
triggerShouldFinish(mockTriggerStateMachine);
injectElement(tester, 3);
assertThat(tester.extractOutput(),
contains(isSingleWindowedValue(containsInAnyOrder(3), 3, 0, 10)));
assertTrue(tester.isMarkedFinished(firstWindow));
tester.assertHasOnlyGlobalAndFinishedSetsFor(firstWindow);
// This element shouldn't be seen, because the trigger has finished
injectElement(tester, 4);
assertEquals(1, tester.getElementsDroppedDueToClosedWindow());
}
@Test
public void testOnElementBufferingAccumulating() throws Exception {
// Test basic execution of a trigger using a non-combining window set and accumulating mode.
ReduceFnTester<Integer, Iterable<Integer>, IntervalWindow> tester =
ReduceFnTester.nonCombining(
FixedWindows.of(Duration.millis(10)),
mockTriggerStateMachine,
AccumulationMode.ACCUMULATING_FIRED_PANES,
Duration.millis(100),
ClosingBehavior.FIRE_IF_NON_EMPTY);
injectElement(tester, 1);
// Fires {1, 2}
when(mockTriggerStateMachine.shouldFire(anyTriggerContext())).thenReturn(true);
injectElement(tester, 2);
// Fires {1, 2, 3} because we are in accumulating mode
when(mockTriggerStateMachine.shouldFire(anyTriggerContext())).thenReturn(true);
triggerShouldFinish(mockTriggerStateMachine);
injectElement(tester, 3);
// This element shouldn't be seen, because the trigger has finished
injectElement(tester, 4);
assertThat(
tester.extractOutput(),
contains(
isSingleWindowedValue(containsInAnyOrder(1, 2), 1, 0, 10),
isSingleWindowedValue(containsInAnyOrder(1, 2, 3), 3, 0, 10)));
assertTrue(tester.isMarkedFinished(firstWindow));
tester.assertHasOnlyGlobalAndFinishedSetsFor(firstWindow);
}
@Test
public void testOnElementCombiningDiscarding() throws Exception {
// Test basic execution of a trigger using a non-combining window set and discarding mode.
WindowingStrategy<?, IntervalWindow> strategy =
WindowingStrategy.of((WindowFn<?, IntervalWindow>) FixedWindows.of(Duration.millis(10)))
.withOutputTimeFn(OutputTimeFns.outputAtEarliestInputTimestamp())
.withMode(AccumulationMode.DISCARDING_FIRED_PANES)
.withAllowedLateness(Duration.millis(100));
ReduceFnTester<Integer, Integer, IntervalWindow> tester =
ReduceFnTester.combining(
strategy,
mockTriggerStateMachine,
Sum.ofIntegers().<String>asKeyedFn(),
VarIntCoder.of());
injectElement(tester, 2);
when(mockTriggerStateMachine.shouldFire(anyTriggerContext())).thenReturn(true);
injectElement(tester, 3);
when(mockTriggerStateMachine.shouldFire(anyTriggerContext())).thenReturn(true);
triggerShouldFinish(mockTriggerStateMachine);
injectElement(tester, 4);
// This element shouldn't be seen, because the trigger has finished
injectElement(tester, 6);
assertThat(
tester.extractOutput(),
contains(
isSingleWindowedValue(equalTo(5), 2, 0, 10),
isSingleWindowedValue(equalTo(4), 4, 0, 10)));
assertTrue(tester.isMarkedFinished(firstWindow));
tester.assertHasOnlyGlobalAndFinishedSetsFor(firstWindow);
}
/**
* Tests that the garbage collection time for a fixed window does not overflow the end of time.
*/
@Test
public void testFixedWindowEndOfTimeGarbageCollection() throws Exception {
Duration allowedLateness = Duration.standardDays(365);
Duration windowSize = Duration.millis(10);
WindowFn<Object, IntervalWindow> windowFn = FixedWindows.of(windowSize);
// This timestamp falls into a window where the end of the window is before the end of the
// global window - the "end of time" - yet its expiration time is after.
final Instant elementTimestamp =
GlobalWindow.INSTANCE.maxTimestamp().minus(allowedLateness).plus(1);
IntervalWindow window = Iterables.getOnlyElement(
windowFn.assignWindows(
windowFn.new AssignContext() {
@Override
public Object element() {
throw new UnsupportedOperationException();
}
@Override
public Instant timestamp() {
return elementTimestamp;
}
@Override
public BoundedWindow window() {
throw new UnsupportedOperationException();
}
}));
assertTrue(
window.maxTimestamp().isBefore(GlobalWindow.INSTANCE.maxTimestamp()));
assertTrue(
window.maxTimestamp().plus(allowedLateness).isAfter(GlobalWindow.INSTANCE.maxTimestamp()));
// Test basic execution of a trigger using a non-combining window set and accumulating mode.
WindowingStrategy<?, IntervalWindow> strategy =
WindowingStrategy.of((WindowFn<?, IntervalWindow>) windowFn)
.withOutputTimeFn(OutputTimeFns.outputAtEarliestInputTimestamp())
.withTrigger(AfterWatermark.pastEndOfWindow().withLateFirings(Never.ever()))
.withMode(AccumulationMode.DISCARDING_FIRED_PANES)
.withAllowedLateness(allowedLateness);
ReduceFnTester<Integer, Integer, IntervalWindow> tester =
ReduceFnTester
.combining(strategy, Sum.ofIntegers().<String>asKeyedFn(), VarIntCoder.of());
tester.injectElements(TimestampedValue.of(13, elementTimestamp));
// Should fire ON_TIME pane and there will be a checkState that the cleanup time
// is prior to timestamp max value
tester.advanceInputWatermark(window.maxTimestamp());
// Nothing in the ON_TIME pane (not governed by triggers, but by ReduceFnRunner)
assertThat(tester.extractOutput(), emptyIterable());
tester.injectElements(TimestampedValue.of(42, elementTimestamp));
// Now the final pane should fire, demonstrating that the GC time was truncated
tester.advanceInputWatermark(GlobalWindow.INSTANCE.maxTimestamp());
assertThat(tester.extractOutput(), contains(isWindowedValue(equalTo(55))));
}
@Test
public void testOnElementCombiningAccumulating() throws Exception {
// Test basic execution of a trigger using a non-combining window set and accumulating mode.
WindowingStrategy<?, IntervalWindow> strategy =
WindowingStrategy.of((WindowFn<?, IntervalWindow>) FixedWindows.of(Duration.millis(10)))
.withOutputTimeFn(OutputTimeFns.outputAtEarliestInputTimestamp())
.withMode(AccumulationMode.ACCUMULATING_FIRED_PANES)
.withAllowedLateness(Duration.millis(100));
ReduceFnTester<Integer, Integer, IntervalWindow> tester =
ReduceFnTester.combining(
strategy,
mockTriggerStateMachine,
Sum.ofIntegers().<String>asKeyedFn(),
VarIntCoder.of());
injectElement(tester, 1);
when(mockTriggerStateMachine.shouldFire(anyTriggerContext())).thenReturn(true);
injectElement(tester, 2);
when(mockTriggerStateMachine.shouldFire(anyTriggerContext())).thenReturn(true);
triggerShouldFinish(mockTriggerStateMachine);
injectElement(tester, 3);
// This element shouldn't be seen, because the trigger has finished
injectElement(tester, 4);
assertThat(
tester.extractOutput(),
contains(
isSingleWindowedValue(equalTo(3), 1, 0, 10),
isSingleWindowedValue(equalTo(6), 3, 0, 10)));
assertTrue(tester.isMarkedFinished(firstWindow));
tester.assertHasOnlyGlobalAndFinishedSetsFor(firstWindow);
}
@Test
public void testOnElementCombiningWithContext() throws Exception {
// Create values at timestamps 0 .. 8, windowed into fixed windows of 2.
// Side input windowed into fixed windows of 4:
// main: [ 0 1 ] [ 2 3 ] [ 4 5 ] [ 6 7 ]
// side: [ 100 ] [ 104 ]
// Combine using a CombineFn "side input + sum(main inputs)".
final int firstWindowSideInput = 100;
final int secondWindowSideInput = 104;
final Integer expectedValue = firstWindowSideInput;
WindowingStrategy<?, IntervalWindow> mainInputWindowingStrategy =
WindowingStrategy.of(FixedWindows.of(Duration.millis(2)))
.withMode(AccumulationMode.ACCUMULATING_FIRED_PANES);
WindowMappingFn<?> sideInputWindowMappingFn =
FixedWindows.of(Duration.millis(4)).getDefaultWindowMappingFn();
when(mockView.getWindowMappingFn()).thenReturn((WindowMappingFn) sideInputWindowMappingFn);
TestOptions options = PipelineOptionsFactory.as(TestOptions.class);
options.setValue(expectedValue);
when(mockSideInputReader.contains(Matchers.<PCollectionView<Integer>>any())).thenReturn(true);
when(mockSideInputReader.get(
Matchers.<PCollectionView<Integer>>any(), any(BoundedWindow.class)))
.then(
new Answer<Integer>() {
@Override
public Integer answer(InvocationOnMock invocation) throws Throwable {
IntervalWindow sideInputWindow = (IntervalWindow) invocation.getArguments()[1];
long startMs = sideInputWindow.start().getMillis();
long endMs = sideInputWindow.end().getMillis();
// Window should have been produced by sideInputWindowingStrategy.
assertThat(startMs, anyOf(equalTo(0L), equalTo(4L)));
assertThat(endMs - startMs, equalTo(4L));
// If startMs == 4 (second window), equal to secondWindowSideInput.
return firstWindowSideInput + (int) startMs;
}
});
SumAndVerifyContextFn combineFn = new SumAndVerifyContextFn(mockView, expectedValue);
ReduceFnTester<Integer, Integer, IntervalWindow> tester = ReduceFnTester.combining(
mainInputWindowingStrategy, mockTriggerStateMachine, combineFn.<String>asKeyedFn(),
VarIntCoder.of(), options, mockSideInputReader);
when(mockTriggerStateMachine.shouldFire(anyTriggerContext())).thenReturn(true);
for (int i = 0; i < 8; ++i) {
injectElement(tester, i);
}
assertThat(
tester.extractOutput(),
contains(
isSingleWindowedValue(equalTo(0 + firstWindowSideInput), 1, 0, 2),
isSingleWindowedValue(equalTo(0 + 1 + firstWindowSideInput), 1, 0, 2),
isSingleWindowedValue(equalTo(2 + firstWindowSideInput), 3, 2, 4),
isSingleWindowedValue(equalTo(2 + 3 + firstWindowSideInput), 3, 2, 4),
isSingleWindowedValue(equalTo(4 + secondWindowSideInput), 5, 4, 6),
isSingleWindowedValue(equalTo(4 + 5 + secondWindowSideInput), 5, 4, 6),
isSingleWindowedValue(equalTo(6 + secondWindowSideInput), 7, 6, 8),
isSingleWindowedValue(equalTo(6 + 7 + secondWindowSideInput), 7, 6, 8)));
}
@Test
public void testWatermarkHoldAndLateData() throws Exception {
// Test handling of late data. Specifically, ensure the watermark hold is correct.
ReduceFnTester<Integer, Iterable<Integer>, IntervalWindow> tester =
ReduceFnTester.nonCombining(FixedWindows.of(Duration.millis(10)), mockTriggerStateMachine,
AccumulationMode.ACCUMULATING_FIRED_PANES, Duration.millis(10),
ClosingBehavior.FIRE_IF_NON_EMPTY);
// Input watermark -> null
assertEquals(null, tester.getWatermarkHold());
assertEquals(null, tester.getOutputWatermark());
// All on time data, verify watermark hold.
injectElement(tester, 1);
injectElement(tester, 3);
assertEquals(new Instant(1), tester.getWatermarkHold());
when(mockTriggerStateMachine.shouldFire(anyTriggerContext())).thenReturn(true);
injectElement(tester, 2);
List<WindowedValue<Iterable<Integer>>> output = tester.extractOutput();
assertThat(output, contains(
isSingleWindowedValue(containsInAnyOrder(1, 2, 3),
1, // timestamp
0, // window start
10))); // window end
assertThat(output.get(0).getPane(),
equalTo(PaneInfo.createPane(true, false, Timing.EARLY, 0, -1)));
// Holding for the end-of-window transition.
assertEquals(new Instant(9), tester.getWatermarkHold());
// Nothing dropped.
assertEquals(0, tester.getElementsDroppedDueToClosedWindow());
// Input watermark -> 4, output watermark should advance that far as well
tester.advanceInputWatermark(new Instant(4));
assertEquals(new Instant(4), tester.getOutputWatermark());
// Some late, some on time. Verify that we only hold to the minimum of on-time.
when(mockTriggerStateMachine.shouldFire(anyTriggerContext())).thenReturn(false);
tester.advanceInputWatermark(new Instant(4));
injectElement(tester, 2);
injectElement(tester, 3);
assertEquals(new Instant(9), tester.getWatermarkHold());
injectElement(tester, 5);
assertEquals(new Instant(5), tester.getWatermarkHold());
when(mockTriggerStateMachine.shouldFire(anyTriggerContext())).thenReturn(true);
injectElement(tester, 4);
output = tester.extractOutput();
assertThat(output,
contains(
isSingleWindowedValue(containsInAnyOrder(
1, 2, 3, // earlier firing
2, 3, 4, 5), // new elements
4, // timestamp
0, // window start
10))); // window end
assertThat(output.get(0).getPane(),
equalTo(PaneInfo.createPane(false, false, Timing.EARLY, 1, -1)));
// All late -- output at end of window timestamp.
when(mockTriggerStateMachine.shouldFire(anyTriggerContext())).thenReturn(false);
tester.advanceInputWatermark(new Instant(8));
injectElement(tester, 6);
injectElement(tester, 5);
assertEquals(new Instant(9), tester.getWatermarkHold());
injectElement(tester, 4);
// Fire the ON_TIME pane
when(mockTriggerStateMachine.shouldFire(anyTriggerContext())).thenReturn(true);
tester.advanceInputWatermark(new Instant(10));
// Output time is end of the window, because all the new data was late, but the pane
// is the ON_TIME pane.
output = tester.extractOutput();
assertThat(output,
contains(isSingleWindowedValue(
containsInAnyOrder(1, 2, 3, // earlier firing
2, 3, 4, 5, // earlier firing
4, 5, 6), // new elements
9, // timestamp
0, // window start
10))); // window end
assertThat(output.get(0).getPane(),
equalTo(PaneInfo.createPane(false, false, Timing.ON_TIME, 2, 0)));
// This is "pending" at the time the watermark makes it way-late.
// Because we're about to expire the window, we output it.
when(mockTriggerStateMachine.shouldFire(anyTriggerContext())).thenReturn(false);
injectElement(tester, 8);
assertEquals(0, tester.getElementsDroppedDueToClosedWindow());
// Exceed the GC limit, triggering the last pane to be fired
tester.advanceInputWatermark(new Instant(50));
output = tester.extractOutput();
// Output time is still end of the window, because the new data (8) was behind
// the output watermark.
assertThat(output,
contains(isSingleWindowedValue(
containsInAnyOrder(1, 2, 3, // earlier firing
2, 3, 4, 5, // earlier firing
4, 5, 6, // earlier firing
8), // new element prior to window becoming expired
9, // timestamp
0, // window start
10))); // window end
assertThat(
output.get(0).getPane(),
equalTo(PaneInfo.createPane(false, true, Timing.LATE, 3, 1)));
assertEquals(new Instant(50), tester.getOutputWatermark());
assertEquals(null, tester.getWatermarkHold());
// Late timers are ignored
tester.fireTimer(new IntervalWindow(new Instant(0), new Instant(10)), new Instant(12),
TimeDomain.EVENT_TIME);
// And because we're past the end of window + allowed lateness, everything should be cleaned up.
assertFalse(tester.isMarkedFinished(firstWindow));
tester.assertHasOnlyGlobalAndFinishedSetsFor();
}
@Test
public void dontSetHoldIfTooLateForEndOfWindowTimer() throws Exception {
// Make sure holds are only set if they are accompanied by an end-of-window timer.
ReduceFnTester<Integer, Iterable<Integer>, IntervalWindow> tester =
ReduceFnTester.nonCombining(FixedWindows.of(Duration.millis(10)), mockTriggerStateMachine,
AccumulationMode.ACCUMULATING_FIRED_PANES, Duration.millis(10),
ClosingBehavior.FIRE_ALWAYS);
tester.setAutoAdvanceOutputWatermark(false);
// Case: Unobservably late
tester.advanceInputWatermark(new Instant(15));
tester.advanceOutputWatermark(new Instant(11));
injectElement(tester, 14);
// Hold was applied, waiting for end-of-window timer.
assertEquals(new Instant(14), tester.getWatermarkHold());
assertEquals(new Instant(19), tester.getNextTimer(TimeDomain.EVENT_TIME));
// Trigger the end-of-window timer.
when(mockTriggerStateMachine.shouldFire(anyTriggerContext())).thenReturn(true);
tester.advanceInputWatermark(new Instant(20));
when(mockTriggerStateMachine.shouldFire(anyTriggerContext())).thenReturn(false);
// Hold has been replaced with garbage collection hold. Waiting for garbage collection.
assertEquals(new Instant(29), tester.getWatermarkHold());
assertEquals(new Instant(29), tester.getNextTimer(TimeDomain.EVENT_TIME));
// Case: Maybe late 1
injectElement(tester, 13);
// No change to hold or timers.
assertEquals(new Instant(29), tester.getWatermarkHold());
assertEquals(new Instant(29), tester.getNextTimer(TimeDomain.EVENT_TIME));
// Trigger the garbage collection timer.
tester.advanceInputWatermark(new Instant(30));
// Everything should be cleaned up.
assertFalse(tester.isMarkedFinished(new IntervalWindow(new Instant(10), new Instant(20))));
tester.assertHasOnlyGlobalAndFinishedSetsFor();
}
@Test
public void testPaneInfoAllStates() throws Exception {
ReduceFnTester<Integer, Iterable<Integer>, IntervalWindow> tester =
ReduceFnTester.nonCombining(FixedWindows.of(Duration.millis(10)), mockTriggerStateMachine,
AccumulationMode.DISCARDING_FIRED_PANES, Duration.millis(100),
ClosingBehavior.FIRE_IF_NON_EMPTY);
tester.advanceInputWatermark(new Instant(0));
when(mockTriggerStateMachine.shouldFire(anyTriggerContext())).thenReturn(true);
injectElement(tester, 1);
assertThat(tester.extractOutput(), contains(
WindowMatchers.valueWithPaneInfo(PaneInfo.createPane(true, false, Timing.EARLY))));
when(mockTriggerStateMachine.shouldFire(anyTriggerContext())).thenReturn(true);
injectElement(tester, 2);
assertThat(tester.extractOutput(), contains(
WindowMatchers.valueWithPaneInfo(PaneInfo.createPane(false, false, Timing.EARLY, 1, -1))));
when(mockTriggerStateMachine.shouldFire(anyTriggerContext())).thenReturn(false);
tester.advanceInputWatermark(new Instant(15));
when(mockTriggerStateMachine.shouldFire(anyTriggerContext())).thenReturn(true);
injectElement(tester, 3);
assertThat(tester.extractOutput(), contains(
WindowMatchers.valueWithPaneInfo(
PaneInfo.createPane(false, false, Timing.ON_TIME, 2, 0))));
when(mockTriggerStateMachine.shouldFire(anyTriggerContext())).thenReturn(true);
injectElement(tester, 4);
assertThat(tester.extractOutput(), contains(
WindowMatchers.valueWithPaneInfo(
PaneInfo.createPane(false, false, Timing.LATE, 3, 1))));
when(mockTriggerStateMachine.shouldFire(anyTriggerContext())).thenReturn(true);
triggerShouldFinish(mockTriggerStateMachine);
injectElement(tester, 5);
assertThat(tester.extractOutput(), contains(
WindowMatchers.valueWithPaneInfo(PaneInfo.createPane(false, true, Timing.LATE, 4, 2))));
}
@Test
public void testPaneInfoAllStatesAfterWatermark() throws Exception {
ReduceFnTester<Integer, Iterable<Integer>, IntervalWindow> tester = ReduceFnTester.nonCombining(
WindowingStrategy.of(FixedWindows.of(Duration.millis(10)))
.withTrigger(Repeatedly.forever(AfterFirst.of(
AfterPane.elementCountAtLeast(2),
AfterWatermark.pastEndOfWindow())))
.withMode(AccumulationMode.DISCARDING_FIRED_PANES)
.withAllowedLateness(Duration.millis(100))
.withOutputTimeFn(OutputTimeFns.outputAtEarliestInputTimestamp())
.withClosingBehavior(ClosingBehavior.FIRE_ALWAYS));
tester.advanceInputWatermark(new Instant(0));
tester.injectElements(
TimestampedValue.of(1, new Instant(1)), TimestampedValue.of(2, new Instant(2)));
List<WindowedValue<Iterable<Integer>>> output = tester.extractOutput();
assertThat(
output,
contains(WindowMatchers.valueWithPaneInfo(
PaneInfo.createPane(true, false, Timing.EARLY, 0, -1))));
assertThat(
output,
contains(
WindowMatchers.isSingleWindowedValue(containsInAnyOrder(1, 2), 1, 0, 10)));
tester.advanceInputWatermark(new Instant(50));
// We should get the ON_TIME pane even though it is empty,
// because we have an AfterWatermark.pastEndOfWindow() trigger.
output = tester.extractOutput();
assertThat(
output,
contains(WindowMatchers.valueWithPaneInfo(
PaneInfo.createPane(false, false, Timing.ON_TIME, 1, 0))));
assertThat(
output,
contains(
WindowMatchers.isSingleWindowedValue(emptyIterable(), 9, 0, 10)));
// We should get the final pane even though it is empty.
tester.advanceInputWatermark(new Instant(150));
output = tester.extractOutput();
assertThat(
output,
contains(
WindowMatchers.valueWithPaneInfo(PaneInfo.createPane(false, true, Timing.LATE, 2, 1))));
assertThat(
output,
contains(
WindowMatchers.isSingleWindowedValue(emptyIterable(), 9, 0, 10)));
}
@Test
public void noEmptyPanesFinalIfNonEmpty() throws Exception {
ReduceFnTester<Integer, Iterable<Integer>, IntervalWindow> tester = ReduceFnTester.nonCombining(
WindowingStrategy.of(FixedWindows.of(Duration.millis(10)))
.withTrigger(Repeatedly.<IntervalWindow>forever(AfterFirst.<IntervalWindow>of(
AfterPane.elementCountAtLeast(2),
AfterWatermark.pastEndOfWindow())))
.withMode(AccumulationMode.ACCUMULATING_FIRED_PANES)
.withAllowedLateness(Duration.millis(100))
.withOutputTimeFn(OutputTimeFns.outputAtEarliestInputTimestamp())
.withClosingBehavior(ClosingBehavior.FIRE_IF_NON_EMPTY));
tester.advanceInputWatermark(new Instant(0));
tester.injectElements(
TimestampedValue.of(1, new Instant(1)),
TimestampedValue.of(2, new Instant(2)));
tester.advanceInputWatermark(new Instant(20));
tester.advanceInputWatermark(new Instant(250));
List<WindowedValue<Iterable<Integer>>> output = tester.extractOutput();
assertThat(output, contains(
// Trigger with 2 elements
WindowMatchers.isSingleWindowedValue(containsInAnyOrder(1, 2), 1, 0, 10),
// Trigger for the empty on time pane
WindowMatchers.isSingleWindowedValue(containsInAnyOrder(1, 2), 9, 0, 10)));
}
@Test
public void noEmptyPanesFinalAlways() throws Exception {
ReduceFnTester<Integer, Iterable<Integer>, IntervalWindow> tester = ReduceFnTester.nonCombining(
WindowingStrategy.of(FixedWindows.of(Duration.millis(10)))
.withTrigger(Repeatedly.<IntervalWindow>forever(AfterFirst.<IntervalWindow>of(
AfterPane.elementCountAtLeast(2),
AfterWatermark.pastEndOfWindow())))
.withMode(AccumulationMode.ACCUMULATING_FIRED_PANES)
.withAllowedLateness(Duration.millis(100))
.withOutputTimeFn(OutputTimeFns.outputAtEarliestInputTimestamp())
.withClosingBehavior(ClosingBehavior.FIRE_ALWAYS));
tester.advanceInputWatermark(new Instant(0));
tester.injectElements(
TimestampedValue.of(1, new Instant(1)),
TimestampedValue.of(2, new Instant(2)));
tester.advanceInputWatermark(new Instant(20));
tester.advanceInputWatermark(new Instant(250));
List<WindowedValue<Iterable<Integer>>> output = tester.extractOutput();
assertThat(output, contains(
// Trigger with 2 elements
WindowMatchers.isSingleWindowedValue(containsInAnyOrder(1, 2), 1, 0, 10),
// Trigger for the empty on time pane
WindowMatchers.isSingleWindowedValue(containsInAnyOrder(1, 2), 9, 0, 10),
// Trigger for the final pane
WindowMatchers.isSingleWindowedValue(containsInAnyOrder(1, 2), 9, 0, 10)));
}
@Test
public void testPaneInfoAllStatesAfterWatermarkAccumulating() throws Exception {
ReduceFnTester<Integer, Iterable<Integer>, IntervalWindow> tester = ReduceFnTester.nonCombining(
WindowingStrategy.of(FixedWindows.of(Duration.millis(10)))
.withTrigger(Repeatedly.forever(AfterFirst.of(
AfterPane.elementCountAtLeast(2),
AfterWatermark.pastEndOfWindow())))
.withMode(AccumulationMode.ACCUMULATING_FIRED_PANES)
.withAllowedLateness(Duration.millis(100))
.withOutputTimeFn(OutputTimeFns.outputAtEarliestInputTimestamp())
.withClosingBehavior(ClosingBehavior.FIRE_ALWAYS));
tester.advanceInputWatermark(new Instant(0));
tester.injectElements(
TimestampedValue.of(1, new Instant(1)), TimestampedValue.of(2, new Instant(2)));
List<WindowedValue<Iterable<Integer>>> output = tester.extractOutput();
assertThat(
output,
contains(WindowMatchers.valueWithPaneInfo(
PaneInfo.createPane(true, false, Timing.EARLY, 0, -1))));
assertThat(
output,
contains(
WindowMatchers.isSingleWindowedValue(containsInAnyOrder(1, 2), 1, 0, 10)));
tester.advanceInputWatermark(new Instant(50));
// We should get the ON_TIME pane even though it is empty,
// because we have an AfterWatermark.pastEndOfWindow() trigger.
output = tester.extractOutput();
assertThat(
output,
contains(WindowMatchers.valueWithPaneInfo(
PaneInfo.createPane(false, false, Timing.ON_TIME, 1, 0))));
assertThat(
output,
contains(
WindowMatchers.isSingleWindowedValue(containsInAnyOrder(1, 2), 9, 0, 10)));
// We should get the final pane even though it is empty.
tester.advanceInputWatermark(new Instant(150));
output = tester.extractOutput();
assertThat(
output,
contains(
WindowMatchers.valueWithPaneInfo(PaneInfo.createPane(false, true, Timing.LATE, 2, 1))));
assertThat(
output,
contains(
WindowMatchers.isSingleWindowedValue(containsInAnyOrder(1, 2), 9, 0, 10)));
}
@Test
public void testPaneInfoFinalAndOnTime() throws Exception {
ReduceFnTester<Integer, Iterable<Integer>, IntervalWindow> tester = ReduceFnTester.nonCombining(
WindowingStrategy.of(FixedWindows.of(Duration.millis(10)))
.withTrigger(
Repeatedly.forever(AfterPane.elementCountAtLeast(2))
.orFinally(AfterWatermark.pastEndOfWindow()))
.withMode(AccumulationMode.DISCARDING_FIRED_PANES)
.withAllowedLateness(Duration.millis(100))
.withClosingBehavior(ClosingBehavior.FIRE_ALWAYS));
tester.advanceInputWatermark(new Instant(0));
// Should trigger due to element count
tester.injectElements(
TimestampedValue.of(1, new Instant(1)), TimestampedValue.of(2, new Instant(2)));
assertThat(
tester.extractOutput(),
contains(WindowMatchers.valueWithPaneInfo(
PaneInfo.createPane(true, false, Timing.EARLY, 0, -1))));
tester.advanceInputWatermark(new Instant(150));
assertThat(tester.extractOutput(), contains(
WindowMatchers.valueWithPaneInfo(PaneInfo.createPane(false, true, Timing.ON_TIME, 1, 0))));
}
@Test
public void testPaneInfoSkipToFinish() throws Exception {
ReduceFnTester<Integer, Iterable<Integer>, IntervalWindow> tester =
ReduceFnTester.nonCombining(FixedWindows.of(Duration.millis(10)), mockTriggerStateMachine,
AccumulationMode.DISCARDING_FIRED_PANES, Duration.millis(100),
ClosingBehavior.FIRE_IF_NON_EMPTY);
tester.advanceInputWatermark(new Instant(0));
when(mockTriggerStateMachine.shouldFire(anyTriggerContext())).thenReturn(true);
triggerShouldFinish(mockTriggerStateMachine);
injectElement(tester, 1);
assertThat(tester.extractOutput(), contains(
WindowMatchers.valueWithPaneInfo(PaneInfo.createPane(true, true, Timing.EARLY))));
}
@Test
public void testPaneInfoSkipToNonSpeculativeAndFinish() throws Exception {
ReduceFnTester<Integer, Iterable<Integer>, IntervalWindow> tester =
ReduceFnTester.nonCombining(FixedWindows.of(Duration.millis(10)), mockTriggerStateMachine,
AccumulationMode.DISCARDING_FIRED_PANES, Duration.millis(100),
ClosingBehavior.FIRE_IF_NON_EMPTY);
tester.advanceInputWatermark(new Instant(15));
when(mockTriggerStateMachine.shouldFire(anyTriggerContext())).thenReturn(true);
triggerShouldFinish(mockTriggerStateMachine);
injectElement(tester, 1);
assertThat(tester.extractOutput(), contains(
WindowMatchers.valueWithPaneInfo(PaneInfo.createPane(true, true, Timing.LATE))));
}
@Test
public void testMergeBeforeFinalizing() throws Exception {
// Verify that we merge windows before producing output so users don't see undesired
// unmerged windows.
ReduceFnTester<Integer, Iterable<Integer>, IntervalWindow> tester =
ReduceFnTester.nonCombining(Sessions.withGapDuration(Duration.millis(10)),
mockTriggerStateMachine,
AccumulationMode.DISCARDING_FIRED_PANES, Duration.millis(0),
ClosingBehavior.FIRE_IF_NON_EMPTY);
// All on time data, verify watermark hold.
// These two windows should pre-merge immediately to [1, 20)
tester.injectElements(
TimestampedValue.of(1, new Instant(1)), // in [1, 11)
TimestampedValue.of(10, new Instant(10))); // in [10, 20)
// And this should fire the end-of-window timer
tester.advanceInputWatermark(new Instant(100));
List<WindowedValue<Iterable<Integer>>> output = tester.extractOutput();
assertThat(output.size(), equalTo(1));
assertThat(output.get(0),
isSingleWindowedValue(containsInAnyOrder(1, 10),
1, // timestamp
1, // window start
20)); // window end
assertThat(
output.get(0).getPane(),
equalTo(PaneInfo.createPane(true, true, Timing.ON_TIME, 0, 0)));
}
/**
* It is possible for a session window's trigger to be closed at the point at which
* the (merged) session window is garbage collected. Make sure we don't accidentally
* assume the window is still active.
*/
@Test
public void testMergingWithCloseBeforeGC() throws Exception {
ReduceFnTester<Integer, Iterable<Integer>, IntervalWindow> tester =
ReduceFnTester.nonCombining(Sessions.withGapDuration(Duration.millis(10)),
mockTriggerStateMachine,
AccumulationMode.DISCARDING_FIRED_PANES, Duration.millis(50),
ClosingBehavior.FIRE_IF_NON_EMPTY);
// Two elements in two overlapping session windows.
tester.injectElements(
TimestampedValue.of(1, new Instant(1)), // in [1, 11)
TimestampedValue.of(10, new Instant(10))); // in [10, 20)
// Close the trigger, but the gargbage collection timer is still pending.
when(mockTriggerStateMachine.shouldFire(anyTriggerContext())).thenReturn(true);
triggerShouldFinish(mockTriggerStateMachine);
tester.advanceInputWatermark(new Instant(30));
// Now the garbage collection timer will fire, finding the trigger already closed.
tester.advanceInputWatermark(new Instant(100));
List<WindowedValue<Iterable<Integer>>> output = tester.extractOutput();
assertThat(output.size(), equalTo(1));
assertThat(output.get(0),
isSingleWindowedValue(containsInAnyOrder(1, 10),
1, // timestamp
1, // window start
20)); // window end
assertThat(
output.get(0).getPane(),
equalTo(PaneInfo.createPane(true, true, Timing.ON_TIME, 0, 0)));
}
/**
* Ensure a closed trigger has its state recorded in the merge result window.
*/
@Test
public void testMergingWithCloseTrigger() throws Exception {
ReduceFnTester<Integer, Iterable<Integer>, IntervalWindow> tester =
ReduceFnTester.nonCombining(Sessions.withGapDuration(Duration.millis(10)),
mockTriggerStateMachine,
AccumulationMode.DISCARDING_FIRED_PANES, Duration.millis(50),
ClosingBehavior.FIRE_IF_NON_EMPTY);
// Create a new merged session window.
tester.injectElements(TimestampedValue.of(1, new Instant(1)),
TimestampedValue.of(2, new Instant(2)));
// Force the trigger to be closed for the merged window.
when(mockTriggerStateMachine.shouldFire(anyTriggerContext())).thenReturn(true);
triggerShouldFinish(mockTriggerStateMachine);
tester.advanceInputWatermark(new Instant(13));
// Trigger is now closed.
assertTrue(tester.isMarkedFinished(new IntervalWindow(new Instant(1), new Instant(12))));
when(mockTriggerStateMachine.shouldFire(anyTriggerContext())).thenReturn(false);
// Revisit the same session window.
tester.injectElements(TimestampedValue.of(1, new Instant(1)),
TimestampedValue.of(2, new Instant(2)));
// Trigger is still closed.
assertTrue(tester.isMarkedFinished(new IntervalWindow(new Instant(1), new Instant(12))));
}
/**
* If a later event tries to reuse an earlier session window which has been closed, we
* should reject that element and not fail due to the window no longer being active.
*/
@Test
public void testMergingWithReusedWindow() throws Exception {
ReduceFnTester<Integer, Iterable<Integer>, IntervalWindow> tester =
ReduceFnTester.nonCombining(Sessions.withGapDuration(Duration.millis(10)),
mockTriggerStateMachine,
AccumulationMode.DISCARDING_FIRED_PANES, Duration.millis(50),
ClosingBehavior.FIRE_IF_NON_EMPTY);
// One elements in one session window.
tester.injectElements(TimestampedValue.of(1, new Instant(1))); // in [1, 11), gc at 21.
// Close the trigger, but the gargbage collection timer is still pending.
when(mockTriggerStateMachine.shouldFire(anyTriggerContext())).thenReturn(true);
triggerShouldFinish(mockTriggerStateMachine);
tester.advanceInputWatermark(new Instant(15));
// Another element in the same session window.
// Should be discarded with 'window closed'.
tester.injectElements(TimestampedValue.of(1, new Instant(1))); // in [1, 11), gc at 21.
// And nothing should be left in the active window state.
assertTrue(tester.hasNoActiveWindows());
// Now the garbage collection timer will fire, finding the trigger already closed.
tester.advanceInputWatermark(new Instant(100));
List<WindowedValue<Iterable<Integer>>> output = tester.extractOutput();
assertThat(output.size(), equalTo(1));
assertThat(output.get(0),
isSingleWindowedValue(containsInAnyOrder(1),
1, // timestamp
1, // window start
11)); // window end
assertThat(
output.get(0).getPane(),
equalTo(PaneInfo.createPane(true, true, Timing.ON_TIME, 0, 0)));
}
/**
* When a merged window's trigger is closed we record that state using the merged window rather
* than the original windows.
*/
@Test
public void testMergingWithClosedRepresentative() throws Exception {
ReduceFnTester<Integer, Iterable<Integer>, IntervalWindow> tester =
ReduceFnTester.nonCombining(Sessions.withGapDuration(Duration.millis(10)),
mockTriggerStateMachine,
AccumulationMode.DISCARDING_FIRED_PANES, Duration.millis(50),
ClosingBehavior.FIRE_IF_NON_EMPTY);
// 2 elements into merged session window.
// Close the trigger, but the garbage collection timer is still pending.
when(mockTriggerStateMachine.shouldFire(anyTriggerContext())).thenReturn(true);
triggerShouldFinish(mockTriggerStateMachine);
tester.injectElements(TimestampedValue.of(1, new Instant(1)), // in [1, 11), gc at 21.
TimestampedValue.of(8, new Instant(8))); // in [8, 18), gc at 28.
// More elements into the same merged session window.
// It has not yet been gced.
// Should be discarded with 'window closed'.
tester.injectElements(TimestampedValue.of(1, new Instant(1)), // in [1, 11), gc at 21.
TimestampedValue.of(2, new Instant(2)), // in [2, 12), gc at 22.
TimestampedValue.of(8, new Instant(8))); // in [8, 18), gc at 28.
// Now the garbage collection timer will fire, finding the trigger already closed.
tester.advanceInputWatermark(new Instant(100));
List<WindowedValue<Iterable<Integer>>> output = tester.extractOutput();
assertThat(output.size(), equalTo(1));
assertThat(output.get(0),
isSingleWindowedValue(containsInAnyOrder(1, 8),
1, // timestamp
1, // window start
18)); // window end
assertThat(
output.get(0).getPane(),
equalTo(PaneInfo.createPane(true, true, Timing.EARLY, 0, 0)));
}
/**
* If an element for a closed session window ends up being merged into other still-open
* session windows, the resulting session window is not 'poisoned'.
*/
@Test
public void testMergingWithClosedDoesNotPoison() throws Exception {
ReduceFnTester<Integer, Iterable<Integer>, IntervalWindow> tester =
ReduceFnTester.nonCombining(Sessions.withGapDuration(Duration.millis(10)),
mockTriggerStateMachine,
AccumulationMode.DISCARDING_FIRED_PANES, Duration.millis(50),
ClosingBehavior.FIRE_IF_NON_EMPTY);
// 1 element, force its trigger to close.
when(mockTriggerStateMachine.shouldFire(anyTriggerContext())).thenReturn(true);
triggerShouldFinish(mockTriggerStateMachine);
tester.injectElements(TimestampedValue.of(2, new Instant(2)));
// 3 elements, one already closed.
when(mockTriggerStateMachine.shouldFire(anyTriggerContext())).thenReturn(false);
tester.injectElements(TimestampedValue.of(1, new Instant(1)),
TimestampedValue.of(2, new Instant(2)),
TimestampedValue.of(3, new Instant(3)));
tester.advanceInputWatermark(new Instant(100));
List<WindowedValue<Iterable<Integer>>> output = tester.extractOutput();
assertThat(output.size(), equalTo(2));
assertThat(output.get(0),
isSingleWindowedValue(containsInAnyOrder(2),
2, // timestamp
2, // window start
12)); // window end
assertThat(
output.get(0).getPane(),
equalTo(PaneInfo.createPane(true, true, Timing.EARLY, 0, 0)));
assertThat(output.get(1),
isSingleWindowedValue(containsInAnyOrder(1, 2, 3),
1, // timestamp
1, // window start
13)); // window end
assertThat(
output.get(1).getPane(),
equalTo(PaneInfo.createPane(true, true, Timing.ON_TIME, 0, 0)));
}
/**
* Tests that when data is assigned to multiple windows but some of those windows have
* had their triggers finish, then the data is dropped and counted accurately.
*/
@Test
public void testDropDataMultipleWindowsFinishedTrigger() throws Exception {
ReduceFnTester<Integer, Integer, IntervalWindow> tester = ReduceFnTester.combining(
WindowingStrategy.of(
SlidingWindows.of(Duration.millis(100)).every(Duration.millis(30)))
.withTrigger(AfterWatermark.pastEndOfWindow())
.withAllowedLateness(Duration.millis(1000)),
Sum.ofIntegers().<String>asKeyedFn(), VarIntCoder.of());
tester.injectElements(
// assigned to [-60, 40), [-30, 70), [0, 100)
TimestampedValue.of(10, new Instant(23)),
// assigned to [-30, 70), [0, 100), [30, 130)
TimestampedValue.of(12, new Instant(40)));
assertEquals(0, tester.getElementsDroppedDueToClosedWindow());
tester.advanceInputWatermark(new Instant(70));
tester.injectElements(
// assigned to [-30, 70), [0, 100), [30, 130)
// but [-30, 70) is closed by the trigger
TimestampedValue.of(14, new Instant(60)));
assertEquals(1, tester.getElementsDroppedDueToClosedWindow());
tester.advanceInputWatermark(new Instant(130));
// assigned to [-30, 70), [0, 100), [30, 130)
// but they are all closed
tester.injectElements(TimestampedValue.of(16, new Instant(40)));
assertEquals(4, tester.getElementsDroppedDueToClosedWindow());
}
@Test
public void testIdempotentEmptyPanesDiscarding() throws Exception {
// Test uninteresting (empty) panes don't increment the index or otherwise
// modify PaneInfo.
ReduceFnTester<Integer, Iterable<Integer>, IntervalWindow> tester =
ReduceFnTester.nonCombining(FixedWindows.of(Duration.millis(10)), mockTriggerStateMachine,
AccumulationMode.DISCARDING_FIRED_PANES, Duration.millis(100),
ClosingBehavior.FIRE_IF_NON_EMPTY);
// Inject a couple of on-time elements and fire at the window end.
injectElement(tester, 1);
injectElement(tester, 2);
tester.advanceInputWatermark(new Instant(12));
// Fire the on-time pane
when(mockTriggerStateMachine.shouldFire(anyTriggerContext())).thenReturn(true);
tester.fireTimer(firstWindow, new Instant(9), TimeDomain.EVENT_TIME);
// Fire another timer (with no data, so it's an uninteresting pane that should not be output).
when(mockTriggerStateMachine.shouldFire(anyTriggerContext())).thenReturn(true);
tester.fireTimer(firstWindow, new Instant(9), TimeDomain.EVENT_TIME);
// Finish it off with another datum.
when(mockTriggerStateMachine.shouldFire(anyTriggerContext())).thenReturn(true);
triggerShouldFinish(mockTriggerStateMachine);
injectElement(tester, 3);
// The intermediate trigger firing shouldn't result in any output.
List<WindowedValue<Iterable<Integer>>> output = tester.extractOutput();
assertThat(output.size(), equalTo(2));
// The on-time pane is as expected.
assertThat(output.get(0), isSingleWindowedValue(containsInAnyOrder(1, 2), 1, 0, 10));
// The late pane has the correct indices.
assertThat(output.get(1).getValue(), contains(3));
assertThat(
output.get(1).getPane(), equalTo(PaneInfo.createPane(false, true, Timing.LATE, 1, 1)));
assertTrue(tester.isMarkedFinished(firstWindow));
tester.assertHasOnlyGlobalAndFinishedSetsFor(firstWindow);
assertEquals(0, tester.getElementsDroppedDueToClosedWindow());
}
@Test
public void testIdempotentEmptyPanesAccumulating() throws Exception {
// Test uninteresting (empty) panes don't increment the index or otherwise
// modify PaneInfo.
ReduceFnTester<Integer, Iterable<Integer>, IntervalWindow> tester =
ReduceFnTester.nonCombining(FixedWindows.of(Duration.millis(10)), mockTriggerStateMachine,
AccumulationMode.ACCUMULATING_FIRED_PANES, Duration.millis(100),
ClosingBehavior.FIRE_IF_NON_EMPTY);
// Inject a couple of on-time elements and fire at the window end.
injectElement(tester, 1);
injectElement(tester, 2);
tester.advanceInputWatermark(new Instant(12));
// Trigger the on-time pane
when(mockTriggerStateMachine.shouldFire(anyTriggerContext())).thenReturn(true);
tester.fireTimer(firstWindow, new Instant(9), TimeDomain.EVENT_TIME);
List<WindowedValue<Iterable<Integer>>> output = tester.extractOutput();
assertThat(output.size(), equalTo(1));
assertThat(output.get(0), isSingleWindowedValue(containsInAnyOrder(1, 2), 1, 0, 10));
assertThat(output.get(0).getPane(),
equalTo(PaneInfo.createPane(true, false, Timing.ON_TIME, 0, 0)));
// Fire another timer with no data; the empty pane should not be output even though the
// trigger is ready to fire
when(mockTriggerStateMachine.shouldFire(anyTriggerContext())).thenReturn(true);
tester.fireTimer(firstWindow, new Instant(9), TimeDomain.EVENT_TIME);
assertThat(tester.extractOutput().size(), equalTo(0));
// Finish it off with another datum, which is late
when(mockTriggerStateMachine.shouldFire(anyTriggerContext())).thenReturn(true);
triggerShouldFinish(mockTriggerStateMachine);
injectElement(tester, 3);
output = tester.extractOutput();
assertThat(output.size(), equalTo(1));
// The late pane has the correct indices.
assertThat(output.get(0).getValue(), containsInAnyOrder(1, 2, 3));
assertThat(output.get(0).getPane(),
equalTo(PaneInfo.createPane(false, true, Timing.LATE, 1, 1)));
assertTrue(tester.isMarkedFinished(firstWindow));
tester.assertHasOnlyGlobalAndFinishedSetsFor(firstWindow);
assertEquals(0, tester.getElementsDroppedDueToClosedWindow());
}
/**
* Test that we receive an empty on-time pane when an or-finally waiting for the watermark fires.
* Specifically, verify the proper triggerings and pane-info of a typical speculative/on-time/late
* when the on-time pane is empty.
*/
@Test
public void testEmptyOnTimeFromOrFinally() throws Exception {
WindowingStrategy<?, IntervalWindow> strategy =
WindowingStrategy.of((WindowFn<?, IntervalWindow>) FixedWindows.of(Duration.millis(10)))
.withOutputTimeFn(OutputTimeFns.outputAtEarliestInputTimestamp())
.withTrigger(
AfterEach.<IntervalWindow>inOrder(
Repeatedly.forever(
AfterProcessingTime.pastFirstElementInPane()
.plusDelayOf(new Duration(5)))
.orFinally(AfterWatermark.pastEndOfWindow()),
Repeatedly.forever(
AfterProcessingTime.pastFirstElementInPane()
.plusDelayOf(new Duration(25)))))
.withMode(AccumulationMode.ACCUMULATING_FIRED_PANES)
.withAllowedLateness(Duration.millis(100));
ReduceFnTester<Integer, Integer, IntervalWindow> tester =
ReduceFnTester
.combining(strategy, Sum.ofIntegers().<String>asKeyedFn(), VarIntCoder.of());
tester.advanceInputWatermark(new Instant(0));
tester.advanceProcessingTime(new Instant(0));
// Processing time timer for 5
tester.injectElements(
TimestampedValue.of(1, new Instant(1)),
TimestampedValue.of(1, new Instant(3)),
TimestampedValue.of(1, new Instant(7)),
TimestampedValue.of(1, new Instant(5)));
// Should fire early pane
tester.advanceProcessingTime(new Instant(6));
// Should fire empty on time pane
tester.advanceInputWatermark(new Instant(11));
List<WindowedValue<Integer>> output = tester.extractOutput();
assertEquals(2, output.size());
assertThat(output.get(0), WindowMatchers.isSingleWindowedValue(4, 1, 0, 10));
assertThat(output.get(1), WindowMatchers.isSingleWindowedValue(4, 9, 0, 10));
assertThat(
output.get(0),
WindowMatchers.valueWithPaneInfo(PaneInfo.createPane(true, false, Timing.EARLY, 0, -1)));
assertThat(
output.get(1),
WindowMatchers.valueWithPaneInfo(PaneInfo.createPane(false, false, Timing.ON_TIME, 1, 0)));
}
/**
* Tests for processing time firings after the watermark passes the end of the window.
* Specifically, verify the proper triggerings and pane-info of a typical speculative/on-time/late
* when the on-time pane is non-empty.
*/
@Test
public void testProcessingTime() throws Exception {
WindowingStrategy<?, IntervalWindow> strategy =
WindowingStrategy.of((WindowFn<?, IntervalWindow>) FixedWindows.of(Duration.millis(10)))
.withOutputTimeFn(OutputTimeFns.outputAtEarliestInputTimestamp())
.withTrigger(AfterEach.<IntervalWindow>inOrder(
Repeatedly
.forever(
AfterProcessingTime.pastFirstElementInPane().plusDelayOf(
new Duration(5)))
.orFinally(AfterWatermark.pastEndOfWindow()),
Repeatedly.forever(
AfterProcessingTime.pastFirstElementInPane().plusDelayOf(
new Duration(25)))))
.withMode(AccumulationMode.ACCUMULATING_FIRED_PANES)
.withAllowedLateness(Duration.millis(100));
ReduceFnTester<Integer, Integer, IntervalWindow> tester =
ReduceFnTester
.combining(strategy, Sum.ofIntegers().<String>asKeyedFn(), VarIntCoder.of());
tester.advanceInputWatermark(new Instant(0));
tester.advanceProcessingTime(new Instant(0));
tester.injectElements(TimestampedValue.of(1, new Instant(1)),
TimestampedValue.of(1, new Instant(3)), TimestampedValue.of(1, new Instant(7)),
TimestampedValue.of(1, new Instant(5)));
// 4 elements all at processing time 0
tester.advanceProcessingTime(new Instant(6)); // fire [1,3,7,5] since 6 > 0 + 5
tester.injectElements(
TimestampedValue.of(1, new Instant(8)),
TimestampedValue.of(1, new Instant(4)));
// 6 elements
tester.advanceInputWatermark(new Instant(11)); // fire [1,3,7,5,8,4] since 11 > 9
tester.injectElements(
TimestampedValue.of(1, new Instant(8)),
TimestampedValue.of(1, new Instant(4)),
TimestampedValue.of(1, new Instant(5)));
// 9 elements
tester.advanceInputWatermark(new Instant(12));
tester.injectElements(
TimestampedValue.of(1, new Instant(3)));
// 10 elements
tester.advanceProcessingTime(new Instant(15));
tester.injectElements(
TimestampedValue.of(1, new Instant(5)));
// 11 elements
tester.advanceProcessingTime(new Instant(32)); // fire since 32 > 6 + 25
tester.injectElements(
TimestampedValue.of(1, new Instant(3)));
// 12 elements
// fire [1,3,7,5,8,4,8,4,5,3,5,3] since 125 > 6 + 25
tester.advanceInputWatermark(new Instant(125));
List<WindowedValue<Integer>> output = tester.extractOutput();
assertEquals(4, output.size());
assertThat(output.get(0), WindowMatchers.isSingleWindowedValue(4, 1, 0, 10));
assertThat(output.get(1), WindowMatchers.isSingleWindowedValue(6, 4, 0, 10));
assertThat(output.get(2), WindowMatchers.isSingleWindowedValue(11, 9, 0, 10));
assertThat(output.get(3), WindowMatchers.isSingleWindowedValue(12, 9, 0, 10));
assertThat(
output.get(0),
WindowMatchers.valueWithPaneInfo(PaneInfo.createPane(true, false, Timing.EARLY, 0, -1)));
assertThat(
output.get(1),
WindowMatchers.valueWithPaneInfo(PaneInfo.createPane(false, false, Timing.ON_TIME, 1, 0)));
assertThat(
output.get(2),
WindowMatchers.valueWithPaneInfo(PaneInfo.createPane(false, false, Timing.LATE, 2, 1)));
assertThat(
output.get(3),
WindowMatchers.valueWithPaneInfo(PaneInfo.createPane(false, true, Timing.LATE, 3, 2)));
}
/**
* We should fire a non-empty ON_TIME pane in the GlobalWindow when the watermark moves to
* end-of-time.
*/
@Test
public void fireNonEmptyOnDrainInGlobalWindow() throws Exception {
ReduceFnTester<Integer, Iterable<Integer>, GlobalWindow> tester =
ReduceFnTester.nonCombining(
WindowingStrategy.of(new GlobalWindows())
.withTrigger(Repeatedly.<GlobalWindow>forever(
AfterPane.elementCountAtLeast(3)))
.withMode(AccumulationMode.DISCARDING_FIRED_PANES));
tester.advanceInputWatermark(new Instant(0));
final int n = 20;
for (int i = 0; i < n; i++) {
tester.injectElements(TimestampedValue.of(i, new Instant(i)));
}
List<WindowedValue<Iterable<Integer>>> output = tester.extractOutput();
assertEquals(n / 3, output.size());
for (int i = 0; i < output.size(); i++) {
assertEquals(Timing.EARLY, output.get(i).getPane().getTiming());
assertEquals(i, output.get(i).getPane().getIndex());
assertEquals(3, Iterables.size(output.get(i).getValue()));
}
tester.advanceInputWatermark(BoundedWindow.TIMESTAMP_MAX_VALUE);
output = tester.extractOutput();
assertEquals(1, output.size());
assertEquals(Timing.ON_TIME, output.get(0).getPane().getTiming());
assertEquals(n / 3, output.get(0).getPane().getIndex());
assertEquals(n - ((n / 3) * 3), Iterables.size(output.get(0).getValue()));
}
/**
* We should fire an empty ON_TIME pane in the GlobalWindow when the watermark moves to
* end-of-time.
*/
@Test
public void fireEmptyOnDrainInGlobalWindowIfRequested() throws Exception {
ReduceFnTester<Integer, Iterable<Integer>, GlobalWindow> tester =
ReduceFnTester.nonCombining(
WindowingStrategy.of(new GlobalWindows())
.withTrigger(Repeatedly.<GlobalWindow>forever(
AfterProcessingTime.pastFirstElementInPane().plusDelayOf(
new Duration(3))))
.withMode(AccumulationMode.DISCARDING_FIRED_PANES));
final int n = 20;
for (int i = 0; i < n; i++) {
tester.advanceProcessingTime(new Instant(i));
tester.injectElements(TimestampedValue.of(i, new Instant(i)));
}
tester.advanceProcessingTime(new Instant(n + 4));
List<WindowedValue<Iterable<Integer>>> output = tester.extractOutput();
assertEquals((n + 3) / 4, output.size());
for (int i = 0; i < output.size(); i++) {
assertEquals(Timing.EARLY, output.get(i).getPane().getTiming());
assertEquals(i, output.get(i).getPane().getIndex());
assertEquals(4, Iterables.size(output.get(i).getValue()));
}
tester.advanceInputWatermark(BoundedWindow.TIMESTAMP_MAX_VALUE);
output = tester.extractOutput();
assertEquals(1, output.size());
assertEquals(Timing.ON_TIME, output.get(0).getPane().getTiming());
assertEquals((n + 3) / 4, output.get(0).getPane().getIndex());
assertEquals(0, Iterables.size(output.get(0).getValue()));
}
/**
* Late elements should still have a garbage collection hold set so that they
* can make a late pane rather than be dropped due to lateness.
*/
@Test
public void setGarbageCollectionHoldOnLateElements() throws Exception {
ReduceFnTester<Integer, Iterable<Integer>, IntervalWindow> tester =
ReduceFnTester.nonCombining(
WindowingStrategy.of(FixedWindows.of(Duration.millis(10)))
.withTrigger(
AfterWatermark.pastEndOfWindow()
.withLateFirings(AfterPane.elementCountAtLeast(2)))
.withMode(AccumulationMode.DISCARDING_FIRED_PANES)
.withAllowedLateness(Duration.millis(100))
.withClosingBehavior(ClosingBehavior.FIRE_IF_NON_EMPTY));
tester.advanceInputWatermark(new Instant(0));
tester.advanceOutputWatermark(new Instant(0));
tester.injectElements(TimestampedValue.of(1, new Instant(1)));
// Fire ON_TIME pane @ 9 with 1
tester.advanceInputWatermark(new Instant(109));
tester.advanceOutputWatermark(new Instant(109));
tester.injectElements(TimestampedValue.of(2, new Instant(2)));
// We should have set a garbage collection hold for the final pane.
Instant hold = tester.getWatermarkHold();
assertEquals(new Instant(109), hold);
tester.advanceInputWatermark(new Instant(110));
tester.advanceOutputWatermark(new Instant(110));
// Fire final LATE pane @ 9 with 2
List<WindowedValue<Iterable<Integer>>> output = tester.extractOutput();
assertEquals(2, output.size());
}
private static class SumAndVerifyContextFn
extends CombineFnWithContext<Integer, Integer, Integer> {
private final PCollectionView<Integer> view;
private final int expectedValue;
private SumAndVerifyContextFn(PCollectionView<Integer> view, int expectedValue) {
this.view = view;
this.expectedValue = expectedValue;
}
private void verifyContext(Context c) {
assertThat(expectedValue, equalTo(c.getPipelineOptions().as(TestOptions.class).getValue()));
assertThat(c.sideInput(view), greaterThanOrEqualTo(100));
}
@Override
public Integer createAccumulator(Context c) {
verifyContext(c);
return 0;
}
@Override
public Integer addInput(Integer accumulator, Integer input, Context c) {
verifyContext(c);
return accumulator + input;
}
@Override
public Integer mergeAccumulators(Iterable<Integer> accumulators, Context c) {
verifyContext(c);
int res = 0;
for (Integer accum : accumulators) {
res += accum;
}
return res;
}
@Override
public Integer extractOutput(Integer accumulator, Context c) {
verifyContext(c);
return accumulator + c.sideInput(view);
}
}
/**
* A {@link PipelineOptions} to test combining with context.
*/
public interface TestOptions extends PipelineOptions {
int getValue();
void setValue(int value);
}
}
| |
// Copyright 2015 Google Inc. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.api.ads.adwords.axis.utils.v201509.batchjob;
import com.google.api.ads.adwords.axis.utils.AxisDeserializer;
import com.google.api.ads.adwords.axis.v201509.cm.AdGroupAdServiceSoapBindingStub;
import com.google.api.ads.adwords.axis.v201509.cm.AdGroupBidModifierServiceSoapBindingStub;
import com.google.api.ads.adwords.axis.v201509.cm.AdGroupCriterionServiceSoapBindingStub;
import com.google.api.ads.adwords.axis.v201509.cm.AdGroupServiceSoapBindingStub;
import com.google.api.ads.adwords.axis.v201509.cm.ApiError;
import com.google.api.ads.adwords.axis.v201509.cm.BatchJob;
import com.google.api.ads.adwords.axis.v201509.cm.BudgetServiceSoapBindingStub;
import com.google.api.ads.adwords.axis.v201509.cm.CampaignCriterionServiceSoapBindingStub;
import com.google.api.ads.adwords.axis.v201509.cm.CampaignServiceSoapBindingStub;
import com.google.api.ads.adwords.axis.v201509.cm.FeedItemServiceSoapBindingStub;
import com.google.api.ads.adwords.axis.v201509.cm.Operand;
import com.google.api.ads.adwords.axis.v201509.cm.Operation;
import com.google.api.ads.adwords.lib.client.AdWordsSession;
import com.google.api.ads.adwords.lib.utils.AdWordsInternals;
import com.google.api.ads.adwords.lib.utils.BatchJobException;
import com.google.api.ads.adwords.lib.utils.BatchJobHelperInterface;
import com.google.api.ads.adwords.lib.utils.BatchJobUploadResponse;
import com.google.api.ads.adwords.lib.utils.BatchJobUploadStatus;
import com.google.api.ads.adwords.lib.utils.BatchJobUploader;
import com.google.api.ads.adwords.lib.utils.logging.BatchJobLogger;
import com.google.common.collect.ImmutableList;
import org.apache.axis.client.Call;
import org.apache.axis.encoding.TypeMapping;
import java.net.URL;
import java.rmi.RemoteException;
import java.util.List;
import java.util.concurrent.atomic.AtomicReference;
import javax.xml.namespace.QName;
import javax.xml.rpc.ServiceException;
/**
* Utility for uploading operations and downloading results for a {@link BatchJob}.
*/
public class BatchJobHelper implements BatchJobHelperInterface<Operation, Operand, ApiError,
MutateResult, BatchJobMutateResponse> {
private final BatchJobUploader<Operand, ApiError, MutateResult, BatchJobMutateResponse> uploader;
private final BatchJobLogger batchJobLogger;
private final QName resultQName;
private final QName operandQName;
/**
* Atomic reference to the lazily initialized list of type mappings. Use {@link
* #getServiceTypeMappings()}
* to retrieve the initialized list in a thread safe manner.
*/
private static final AtomicReference<ImmutableList<TypeMapping>> SERVICE_TYPE_MAPPINGS_REF =
new AtomicReference<ImmutableList<TypeMapping>>();
public BatchJobHelper(AdWordsSession session) {
uploader = new BatchJobUploader<Operand, ApiError, MutateResult, BatchJobMutateResponse>(
session, false);
batchJobLogger = AdWordsInternals.getInstance().getAdWordsServiceLoggers().getBatchJobLogger();
resultQName = new QName("https://adwords.google.com/api/adwords/cm/v201509", "MutateResult");
operandQName = new QName("https://adwords.google.com/api/adwords/cm/v201509", "Operand");
}
@Override
public BatchJobUploadResponse uploadBatchJobOperations(Iterable<Operation> operations,
String uploadUrl) throws BatchJobException {
BatchJobMutateRequest request = new BatchJobMutateRequest();
request.addOperations(operations);
return uploader.uploadBatchJobOperations(request, uploadUrl);
}
@Override
public BatchJobUploadResponse uploadIncrementalBatchJobOperations(
Iterable<? extends Operation> operations, boolean isLastRequest,
BatchJobUploadStatus batchJobUploadStatus) throws BatchJobException {
BatchJobMutateRequest request = new BatchJobMutateRequest();
request.addOperations(operations);
return uploader.uploadIncrementalBatchJobOperations(
request, isLastRequest, batchJobUploadStatus);
}
@Override
public BatchJobMutateResponse downloadBatchJobMutateResponse(String downloadUrl)
throws BatchJobException {
AxisDeserializer deserializer = new AxisDeserializer();
List<MutateResult> mutateResults;
try {
mutateResults = deserializer.deserializeBatchJobMutateResults(new URL(downloadUrl),
getServiceTypeMappings(), MutateResult.class, resultQName, Operand.class, operandQName);
} catch (Exception e) {
batchJobLogger.logDownload(downloadUrl, null, e);
throw new BatchJobException(
"Failed to download batch job mutate response from URL: " + downloadUrl, e);
}
BatchJobMutateResponse response = new BatchJobMutateResponse();
response.setMutateResults(mutateResults.toArray(new MutateResult[mutateResults.size()]));
batchJobLogger.logDownload(downloadUrl, response, null);
return response;
}
/**
* Returns all of the service type mappings required to serialize/deserialize Axis
* objects.
*/
static List<TypeMapping> getServiceTypeMappings() {
// Lazily initialize the list of type mappings.
if (SERVICE_TYPE_MAPPINGS_REF.get() == null) {
// Build the list of type mappings based on the synchronous service of each Operation
// subclass supported by BatchJobService for this version of the API.
ImmutableList.Builder<TypeMapping> mappings = ImmutableList.builder();
try {
mappings.add(new AdGroupAdServiceSoapBindingStub() {
@Override
public Call _createCall() throws ServiceException {
try {
return super.createCall();
} catch (RemoteException e) {
throw new RuntimeException(e);
}
}
}._createCall().getTypeMapping());
mappings.add(new AdGroupBidModifierServiceSoapBindingStub() {
@Override
public Call _createCall() throws ServiceException {
try {
return super.createCall();
} catch (RemoteException e) {
throw new RuntimeException(e);
}
}
}._createCall().getTypeMapping());
mappings.add(new AdGroupCriterionServiceSoapBindingStub() {
@Override
public Call _createCall() throws ServiceException {
try {
return super.createCall();
} catch (RemoteException e) {
throw new RuntimeException(e);
}
}
}._createCall().getTypeMapping());
mappings.add(new AdGroupServiceSoapBindingStub() {
@Override
public Call _createCall() throws ServiceException {
try {
return super.createCall();
} catch (RemoteException e) {
throw new RuntimeException(e);
}
}
}._createCall().getTypeMapping());
mappings.add(new BudgetServiceSoapBindingStub() {
@Override
public Call _createCall() throws ServiceException {
try {
return super.createCall();
} catch (RemoteException e) {
throw new RuntimeException(e);
}
}
}._createCall().getTypeMapping());
mappings.add(new CampaignCriterionServiceSoapBindingStub() {
@Override
public Call _createCall() throws ServiceException {
try {
return super.createCall();
} catch (RemoteException e) {
throw new RuntimeException(e);
}
}
}._createCall().getTypeMapping());
mappings.add(new CampaignServiceSoapBindingStub() {
@Override
public Call _createCall() throws ServiceException {
try {
return super.createCall();
} catch (RemoteException e) {
throw new RuntimeException(e);
}
}
}._createCall().getTypeMapping());
mappings.add(new FeedItemServiceSoapBindingStub() {
@Override
public Call _createCall() throws ServiceException {
try {
return super.createCall();
} catch (RemoteException e) {
throw new RuntimeException(e);
}
}
}._createCall().getTypeMapping());
} catch (Exception e) {
throw new RuntimeException("Failed to initialize service type mappings", e);
}
// Set the type mappings reference if another thread has not preempted this thread.
SERVICE_TYPE_MAPPINGS_REF.compareAndSet(null, mappings.build());
}
return SERVICE_TYPE_MAPPINGS_REF.get();
}
}
| |
package jj.jjmessage;
// lololololol java
import static jj.jjmessage.JJMessage.Type.*;
import static jj.util.StringUtils.*;
import java.util.Map;
import jj.http.server.websocket.WebSocketMessage;
import jj.script.Continuation;
import jj.script.PendingKey;
import com.fasterxml.jackson.annotation.JsonIgnore;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.annotation.JsonInclude.Include;
import com.fasterxml.jackson.databind.ObjectMapper;
/**
* <p>
* Encapsulates a communication between client and server
* </p>
*
* TODO replace the serialized forms with field tags, after setting up a way of
* preprocessing js being served to the client so that socket-connect.js (or
* wherever everything ends up) can use easy-to-read symbols in the source and tags
* in the execution phase
* @author jason
*
*/
public class JJMessage implements Continuation, WebSocketMessage {
private static final ObjectMapper mapper = new ObjectMapper();
static {
mapper.setSerializationInclusion(Include.NON_NULL);
}
public static enum Type {
/** server --> client append request */
Append,
/** server --> client event binding request */
Bind,
/** server --> client general invocation, expecting a result */
Call,
/** server --> client element creation request, followed by result containing selector */
Create,
/** client --> server event fired */
Event,
/** client --> server element result */
Element,
/** server --> client getter invocation, followed by result containing value */
Get,
/** server --> client general invocation, expecting no result! */
Invoke,
/** client --> server general string result */
Result,
/** server --> client request information in client storage */
Retrieve,
/** server --> client setter invocation */
Set,
/** server --> client request to store information on the client */
Store,
/** server --> client event unbinding request */
Unbind;
}
public static JJMessage makeGet(String selector, String type) {
return makeGet(selector, type, null);
}
public static JJMessage makeGet(String selector, String type, String name) {
JJMessage result = new JJMessage(Get);
result.get().selector = selector;
result.get().type = type;
result.get().name = name;
return result;
}
public static JJMessage makeSet(String selector, String type, String value) {
return makeSet(selector, type, null, value);
}
public static JJMessage makeSet(String selector, String type, String name, String value) {
JJMessage result = new JJMessage(Set);
result.set().selector = selector;
result.set().type = type;
result.set().name = name;
result.set().value = value;
return result;
}
public static JJMessage makeInlineCreate(String html, Map<?,?> args) {
JJMessage result = new JJMessage(Create);
result.create().html = html;
result.create().args = args;
return result;
}
public static JJMessage makeCreate(String html, Map<?,?> args) {
JJMessage result = new JJMessage(Create);
result.create().html = html;
result.create().args = args;
return result;
}
public static JJMessage makeAppend(String parent, String child) {
assert !isEmpty(parent) : "append message requires parent";
assert !isEmpty(child) : "append message requires child";
JJMessage result = new JJMessage(Append);
result.append().parent = parent;
result.append().child = child;
return result;
}
public static JJMessage makeBind(String context, String selector, String type) {
assert !isEmpty(type) : "bind message requires type";
JJMessage result = new JJMessage(Bind);
result.bind().context = context;
result.bind().selector = selector;
result.bind().type = type;
return result;
}
/**
* The Invoke messages invokes a function on the client, expecting a result.
*
* @param name the remote function to invoke
* @param args the JSON reprepesentation of the arguments array
* @return
*/
public static JJMessage makeInvoke(String name, String args) {
assert !isEmpty(name) : "invoke message requires type";
assert !isEmpty(args) && args.startsWith("[") && args.endsWith("]") :
"invoke message must have a JSON array argument";
JJMessage result = new JJMessage(Invoke);
result.invoke().name = name;
result.invoke().args = args;
return result;
}
/**
* The Call message calls a function on the client, expecting no result.
*
* @param name the remote function to call
* @param args the JSON reprepesentation of the arguments array
* @return
*/
public static JJMessage makeCall(String name, String args) {
assert !isEmpty(name) : "call message requires type";
assert !isEmpty(args) && args.startsWith("[") && args.endsWith("]") :
"call message must have a JSON array argument";
JJMessage result = new JJMessage(Call);
result.call().name = name;
result.call().args = args;
return result;
}
public static JJMessage makeStore(String key, String value) {
JJMessage result = new JJMessage(Store);
result.store().key = key;
result.store().value = value;
return result;
}
public static JJMessage makeRetrieve(String key) {
JJMessage result = new JJMessage(Retrieve);
result.retrieve().key = key;
return result;
}
public static JJMessage makeUnbind(String context, String selector, String type) {
assert !isEmpty(type) : "unbind message requires type";
JJMessage result = new JJMessage(Unbind);
result.unbind().context = context;
result.unbind().selector = selector;
result.unbind().type = type;
return result;
}
JJMessage() {}
JJMessage(final Type type) {
switch(this.type = type) {
case Append:
message = new Append();
break;
case Bind:
message = new Bind();
break;
case Call:
message = new Invoke();
break;
case Create:
message = new Create();
break;
case Get:
message = new Get();
break;
case Invoke:
message = new Invoke();
break;
case Retrieve:
message = new Retrieve();
break;
case Set:
message = new Set();
break;
case Store:
message = new Store();
break;
case Unbind:
message = new Unbind();
break;
default:
throw new AssertionError("can't create a JJMessage of type " + type);
}
}
// -- type flag. used this way to keep memory use efficient
// but still with a convenient API for reading/writing
@JsonIgnore
private Type type;
@JsonIgnore
public Type type() {
return type;
}
@JsonIgnore
private Object message;
// --- script messages
@JsonProperty
public Bind bind() {
return (Bind)(type == Bind ? message : null);
}
@JsonProperty
public Event event() {
return (Event)(type == Event ? message : null);
}
@JsonProperty // setters only needed for client -- > server
void event(Event event) {
type = Event;
message = event;
}
@JsonProperty
public Element element() {
return (Element)(type == Element ? message : null);
}
@JsonProperty
void element(Element element) {
type = Element;
message = element;
}
@JsonProperty
public Get get() {
return (Get)(type == Get ? message : null);
}
@JsonProperty
public Set set() {
return (Set)(type == Set ? message : null);
}
@JsonProperty
public Store store() {
return (Store)(type == Store ? message : null);
}
@JsonProperty
public Retrieve retrieve() {
return (Retrieve)(type == Retrieve ? message : null);
}
@JsonProperty
public Result result() {
return (Result)(type == Result ? message : null);
}
@JsonProperty // setters only needed for client -- > server
void result(Result result) {
type = Result;
message = result;
}
@JsonProperty
public Create create() {
return (Create)(type == Create ? message : null);
}
@JsonProperty
public Append append() {
return (Append)(type == Append ? message : null);
}
@JsonProperty
public Invoke invoke() {
return (Invoke)(type == Invoke ? message : null);
}
@JsonProperty
public Invoke call() {
return (Invoke)(type == Call ? message : null);
}
@JsonProperty
public Unbind unbind() {
return (Unbind)(type == Unbind ? message : null);
}
@Override
public boolean equals(Object obj) {
return obj != null && obj instanceof JJMessage && toString().equals(obj.toString());
}
@Override
public String toString() {
try {
return mapper.writeValueAsString(this);
} catch (Exception e) {
throw new JJMessageException(e);
}
}
@Override
public String stringify() {
return toString();
}
public static JJMessage fromString(String input) {
try {
return mapper.readValue(input, JJMessage.class);
} catch (Exception e) {
throw new JJMessageException(e);
}
}
// WE ARE CONTINUABLE! MIGHTY BABY MIGHTY!
@JsonIgnore
@Override
public PendingKey pendingKey() {
return (message instanceof HasResultID) ? new PendingKey(((HasResultID)message).id) : null;
}
@JsonIgnore
@Override
public void pendingKey(PendingKey pendingKey) {
if (message instanceof HasResultID) {
((HasResultID)message).id = pendingKey.id();
}
}
}
| |
/*
* Copyright 2015 Baidu, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package com.baidubce.services.media.model;
import com.baidubce.model.AbstractBceResponse;
public class GetWaterMarkResponse extends AbstractBceResponse {
/**
* the auto-generated watermark Id
**/
private String watermarkId = null;
/**
* the create time of the watermark
**/
private String createTime = null;
/**
* Bos bucket
**/
private String bucket = null;
/**
* Bos key
**/
private String key = null;
/**
* vertical offset in pixel
**/
private Integer verticalOffsetInPixel = null;
/**
* horizontal offset in pixel
**/
private Integer horizontalOffsetInPixel = null;
/**
* vertical alignment, options include left, center, right
**/
private String verticalAlignment = null;
/**
* horizontal alignment, options include left, center, right
**/
private String horizontalAlignment = null;
/**
* display timeline setting
**/
private Timeline timeline = null;
/**
* display repeated times, 0 for infinite times
**/
private Integer repeated = null;
/**
* allow watermarks to scale automatically
*/
private Boolean allowScaling = null;
/**
* horizontal offset in pixel or percent
* example:
* "100" means 100 pixel
* "0.1" means 10%
**/
private String dx;
/**
* vertical offset in pixel or percent
* example:
* "100" means 100 pixel
* "0.1" means 10%
**/
private String dy;
/**
* width of watermark in pixel or percent
* example:
* "100" means 100 pixel
* "0.1" means 10%
**/
private String width;
/**
* height of watermark in pixel or percent
* example:
* "100" means 100 pixel
* "0.1" means 10%
**/
private String height;
public String getWatermarkId() {
return watermarkId;
}
public void setWatermarkId(String watermarkId) {
this.watermarkId = watermarkId;
}
public String getCreateTime() {
return createTime;
}
public void setCreateTime(String createTime) {
this.createTime = createTime;
}
public String getBucket() {
return bucket;
}
public void setBucket(String bucket) {
this.bucket = bucket;
}
public String getKey() {
return key;
}
public void setKey(String key) {
this.key = key;
}
public Integer getVerticalOffsetInPixel() {
return verticalOffsetInPixel;
}
public void setVerticalOffsetInPixel(Integer verticalOffsetInPixel) {
this.verticalOffsetInPixel = verticalOffsetInPixel;
}
public Integer getHorizontalOffsetInPixel() {
return horizontalOffsetInPixel;
}
public void setHorizontalOffsetInPixel(Integer horizontalOffsetInPixel) {
this.horizontalOffsetInPixel = horizontalOffsetInPixel;
}
public String getVerticalAlignment() {
return verticalAlignment;
}
public void setVerticalAlignment(String verticalAlignment) {
this.verticalAlignment = verticalAlignment;
}
public String getHorizontalAlignment() {
return horizontalAlignment;
}
public void setHorizontalAlignment(String horizontalAlignment) {
this.horizontalAlignment = horizontalAlignment;
}
public Timeline getTimeline() {
return timeline;
}
public void setTimeline(Timeline timeline) {
this.timeline = timeline;
}
public Integer getRepeated() {
return repeated;
}
public void setRepeated(Integer repeated) {
this.repeated = repeated;
}
public Boolean getAllowScaling() {
return allowScaling;
}
public void setAllowScaling(Boolean allowScaling) {
this.allowScaling = allowScaling;
}
public String getDx() {
return dx;
}
public void setDx(String dx) {
this.dx = dx;
}
public GetWaterMarkResponse withDx(String dx) {
this.dx = dx;
return this;
}
public String getDy() {
return dy;
}
public void setDy(String dy) {
this.dy = dy;
}
public GetWaterMarkResponse withDy(String dy) {
this.dy = dy;
return this;
}
public String getHeight() {
return height;
}
public void setHeight(String height) {
this.height = height;
}
public GetWaterMarkResponse withHeight(String height) {
this.height = height;
return this;
}
public String getWidth() {
return width;
}
public void setWidth(String width) {
this.width = width;
}
public GetWaterMarkResponse withWidth(String width) {
this.width = width;
return this;
}
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("class GetWaterMarkResponse {\n");
sb.append(" watermarkId: ").append(watermarkId).append("\n");
sb.append(" createTime: ").append(createTime).append("\n");
sb.append(" bucket: ").append(bucket).append("\n");
sb.append(" key: ").append(key).append("\n");
sb.append(" horizontalAlignment: ").append(horizontalAlignment).append("\n");
sb.append(" verticalAlignment: ").append(verticalAlignment).append("\n");
sb.append(" verticalOffsetInPixel: ").append(verticalOffsetInPixel).append("\n");
sb.append(" horizontalOffsetInPixel: ").append(horizontalOffsetInPixel).append("\n");
sb.append(" dx: ").append(dx).append("\n");
sb.append(" dy: ").append(dy).append("\n");
sb.append(" width: ").append(width).append("\n");
sb.append(" height: ").append(height).append("\n");
sb.append(" timeline: ").append(timeline).append("\n");
sb.append(" repeated: ").append(repeated).append("\n");
sb.append(" allowScaling: ").append(allowScaling).append("\n");
sb.append("}\n");
return sb.toString();
}
}
| |
/*
* MX - Essential Cheminformatics
*
* Copyright (c) 2007-2009 Metamolecular, LLC
*
* http://metamolecular.com/mx
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package com.metamolecular.mx.test;
import com.metamolecular.mx.model.Atom;
import com.metamolecular.mx.model.Bond;
import com.metamolecular.mx.model.DefaultMolecule;
import com.metamolecular.mx.model.Molecule;
import javax.swing.event.ChangeEvent;
import javax.swing.event.ChangeListener;
import junit.framework.TestCase;
/**
* @author Richard L. Apodaca <rapodaca at metamolecular.com>
*/
public class MoleculeTest extends TestCase
{
//TODO: test getSubstructure with invalid id throws
//TODO: test removing substructure from different molecule throws
//TODO: test removing nonexistant substructure (from same molecule) throws - i.e.,
// the substructure was already deleted once before
//TODO: test adding substructure fires event
//TODO: test removing substructure fires event
public void testItShouldAddAValidAtom()
{
Molecule molecule = new DefaultMolecule();
Atom atom = molecule.addAtom("C");
assertEquals(1, molecule.countAtoms());
assertEquals("C", molecule.getAtom(0).getSymbol());
assertEquals(atom, molecule.getAtom(0));
}
public void testItShouldLeavAnAtomWithANullMoleculeWhenDeletingIt()
{
Molecule molecule = new DefaultMolecule();
Atom atom = molecule.addAtom("C");
molecule.removeAtom(atom);
assertEquals(0, molecule.countAtoms());
assertNull(atom.getMolecule());
}
public void testItShouldConnectTwoValidAtomsWithAValidBondType()
{
Molecule molecule = new DefaultMolecule();
Atom atom1 = molecule.addAtom("C");
Atom atom2 = molecule.addAtom("C");
Bond bond = molecule.connect(atom1, atom2, 1);
assertEquals(atom1, bond.getSource());
assertEquals(atom2, bond.getTarget());
assertEquals(1, atom1.countNeighbors());
assertEquals(1, atom2.countNeighbors());
assertEquals(1, bond.getType());
assertEquals(1, molecule.countBonds());
}
public void testItShouldDisconnectAtomsLeavingNoBondBetweenThem()
{
Molecule molecule = new DefaultMolecule();
Atom atom1 = molecule.addAtom("C");
Atom atom2 = molecule.addAtom("C");
molecule.connect(atom1, atom2, 1);
molecule.disconnect(atom1, atom2);
assertEquals(2, molecule.countAtoms());
assertEquals(0, atom1.countNeighbors());
assertEquals(0, atom2.countNeighbors());
assertEquals(0, molecule.countBonds());
}
public void testItShouldDeleteAllBondsToAnAtomItDeletes()
{
Molecule molecule = new DefaultMolecule();
Atom atom1 = molecule.addAtom("C");
Atom atom2 = molecule.addAtom("C");
molecule.connect(atom1, atom2, 1);
molecule.removeAtom(atom2);
assertEquals(1, molecule.countAtoms());
assertEquals(0, atom1.countNeighbors());
assertEquals(0, atom2.countNeighbors());
assertEquals(0, molecule.countBonds());
}
public void testItShouldThrowWhenConnectingAnAtomThatDoesntBelong()
{
Molecule molecule1 = new DefaultMolecule();
Molecule molecule2 = new DefaultMolecule();
Atom atom1 = molecule1.addAtom("C");
Atom atom2 = molecule2.addAtom("C");
boolean thrown = false;
try
{
molecule1.connect(atom1, atom2, 1);
}
catch (Exception e)
{
thrown = true;
}
assertTrue(thrown);
}
public void testItShouldRemoveAnAtomThatBelongs()
{
Molecule molecule = new DefaultMolecule();
Atom atom = molecule.addAtom("C");
molecule.removeAtom(atom);
assertEquals(0, molecule.countAtoms());
}
public void testItShouldThrowWhenRemovingAnAtomThatDoesntBelong()
{
Molecule molecule1 = new DefaultMolecule();
Molecule molecule2 = new DefaultMolecule();
Atom atom2 = molecule2.addAtom("C");
boolean thrown = false;
molecule1.addAtom("C");
try
{
molecule1.removeAtom(atom2);
}
catch (Exception e)
{
thrown = true;
}
assertTrue(thrown);
}
public void testItShouldThrowWhenAddingInvalidAtom()
{
Molecule molecule = new DefaultMolecule();
boolean thrown = false;
try
{
molecule.addAtom("fail");
}
catch (Exception e)
{
thrown = true;
}
assertTrue(thrown);
}
public void testItShouldFireAtomAddEvent()
{
Molecule molecule = new DefaultMolecule();
Listener listener = new Listener();
molecule.addChangeListener(listener);
molecule.addAtom("C");
assertEquals(1, listener.count);
}
public void testItShouldFireBondAddEvent()
{
Molecule molecule = new DefaultMolecule();
Listener listener = new Listener();
molecule.addChangeListener(listener);
Atom atom1 = molecule.addAtom("C");
Atom atom2 = molecule.addAtom("C");
molecule.connect(atom1, atom2, 1);
assertEquals(3, listener.count);
}
public void testItShouldIgnoreAtomAddChangesWhenBeginModifySet()
{
Molecule molecule = new DefaultMolecule();
Listener listener = new Listener();
molecule.addChangeListener(listener);
molecule.beginModify();
molecule.addAtom("C");
assertEquals(0, listener.count);
}
public void testItShouldFireSingleChangeEventAfterEndModify()
{
Molecule molecule = new DefaultMolecule();
Listener listener = new Listener();
molecule.addChangeListener(listener);
molecule.beginModify();
molecule.addAtom("C");
molecule.addAtom("C");
molecule.addAtom("C");
molecule.endModify();
assertEquals(1, listener.count);
}
public void testItShouldRemoveAChangeListener()
{
Molecule molecule = new DefaultMolecule();
Listener listener = new Listener();
molecule.addChangeListener(listener);
molecule.removeChangeListener(listener);
molecule.addAtom("C");
assertEquals(0, listener.count);
}
public void testItShouldThrowWhenConnectingAnAtomToItself()
{
Molecule molecule = new DefaultMolecule();
Atom atom1 = molecule.addAtom("C");
try
{
molecule.connect(atom1, atom1, 1);
fail();
}
catch (IllegalStateException ignore)
{
}
}
public void testItShouldThrowWhenRemovingANonmemberAtom()
{
Molecule m1 = new DefaultMolecule();
Molecule m2 = new DefaultMolecule();
Atom a1 = m1.addAtom("C");
Atom a2 = m2.addAtom("C");
try
{
m1.removeAtom(a2);
fail();
}
catch(IllegalStateException ignore)
{
}
}
public void testCopiedMethaneGivesAtomWithCorrectIndex()
{
Molecule molecule = new DefaultMolecule();
molecule.addAtom("C");
Molecule copy = molecule.copy();
assertEquals(0, copy.getAtom(0).getIndex());
}
public void testCopiedAtomHasCorrectParentMolecule()
{
Molecule molecule = new DefaultMolecule();
molecule.addAtom("C");
Molecule copy = molecule.copy();
assertEquals(copy, copy.getAtom(0).getMolecule());
}
private class Listener implements ChangeListener
{
private int count = 0;
public void stateChanged(ChangeEvent e)
{
count++;
}
}
}
| |
package at.tugraz.kti.pdftable.data;
import java.io.BufferedWriter;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileWriter;
import java.io.IOException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Properties;
import java.util.logging.FileHandler;
import java.util.logging.Handler;
import java.util.logging.Logger;
import javax.xml.parsers.ParserConfigurationException;
import javax.xml.transform.TransformerException;
import javax.xml.transform.TransformerFactoryConfigurationError;
import org.apache.commons.io.FileUtils;
import org.apache.pdfbox.pdmodel.PDDocument;
import org.codehaus.jackson.JsonGenerationException;
import org.codehaus.jackson.JsonParseException;
import org.codehaus.jackson.map.DeserializationConfig;
import org.codehaus.jackson.map.JsonMappingException;
import org.codehaus.jackson.map.ObjectMapper;
import org.codehaus.jackson.type.TypeReference;
import org.eclipse.jetty.util.log.LoggerLog;
import org.xml.sax.SAXException;
import at.tugraz.kti.pdftable.document.DocumentTable;
import at.tugraz.kti.pdftable.document.RichDocument;
import at.tugraz.kti.pdftable.document.TableModel;
import at.tugraz.kti.pdftable.document.DocumentTables;
import at.tugraz.kti.pdftable.document.export.ExportException;
import at.tugraz.kti.pdftable.document.export.Importer;
import at.tugraz.kti.pdftable.document.export.StructureExport;
import at.tugraz.kti.pdftable.extract.BlockInfo;
import at.tugraz.kti.pdftable.extract.TableBoundaryClassifier;
import at.tugraz.kti.pdftable.extract.WordExtractor;
/**
* Manage data (source-files, annotations). Works on a directory where
* source-files and annotations are stored.
*
* A JSON index of all documents is kept an managed.
*
* Annotations are stored with regard to a "working set", which can be simply a
* user id or user name, so different users/groups can work on separate
* annotation sets.
*
* @author "matthias frey"
*/
public class RepositoryAccess {
protected DocumentIndex index;
protected Properties properties;
protected String workingSet = "default";
protected String srcpath;
protected String datapath;
protected String tablesFilename = "tables_v2.json";
protected String repositoryName;
public boolean externalMode = false;
public static final String EXTERNAL_MODE = "__external__";
protected Logger logger;
public RepositoryAccess(String dir, String working_set) {
init(dir, working_set);
properties = new Properties();
}
public void setLogger(Logger logger) {
this.logger = logger;
}
public void setOptions(String basedir, String index, String datadir) {
srcpath = basedir;
datapath = datadir;
this.index = new DocumentIndex(index, basedir);
checkRepo();
}
protected void init(String dir, String working_set) {
File f = new File(dir);
repositoryName = f.getName();
workingSet = working_set;
srcpath = dir + "/src";
datapath = dir + "/data";
index = new DocumentIndex(srcpath + "/documents.json", srcpath);
// checkRepo();
externalMode = false;
}
public static RepositoryAccess getInstance(String repo, String wset) {
return new RepositoryAccess(repo, wset);
}
public void setProperties(Properties prop) {
properties = prop;
}
public String getRepositoryName() {
return externalMode ?
RepositoryAccess.EXTERNAL_MODE : repositoryName;
}
public String getWorkingSet() {
return workingSet;
}
/**
* Test if repository directory structure is present and usable.
* If not, attempt to create and initialise.
*/
public boolean checkRepo() {
File datadir = new File(datapath);
File basedir = new File(datadir.getParent());
if (!basedir.isDirectory()) {
basedir.mkdir();
}
if (!datadir.isDirectory() && !datadir.mkdir()) {
throw new RuntimeException(datadir.getAbsolutePath()
+ " could not be created");
}
File srcrdir = new File(srcpath);
if (!srcrdir.isDirectory() && !srcrdir.mkdir()) {
throw new RuntimeException(srcrdir.getAbsolutePath()
+ " could not be created");
}
try {
index.init();
} catch (RepositoryException e) {
e.printStackTrace();
return false;
}
return true;
}
/**
* return id/names of all documents in the repository
*
* @return HashMap<Integer, String> document name and id
* @throws RepositoryException
*/
public HashMap<Integer, String> getDocuments() throws RepositoryException {
return index.getDocuments();
}
/**
* Load and return document as a RichDocument object (by id). Document is
* opened an parsed, the annotated tables are added.
*
* @param id
* @return
* @throws IOException
*/
public RichDocument getDocumentById(int id) throws RepositoryException,
IOException {
File f = index.getDocumentById(id);
if (f == null) {
return null;
}
String filename = f.getAbsolutePath();
RichDocument d = new RichDocument(filename);
d.open();
d.setTableAnnotations(getAnnotatedTables(id));
return d;
}
public String getSourceDirectory() {
return index.getBasePath().toString();
// return srcpath;
}
/**
* Add (or update) a table annotation to/in a document's page
*
* @param docid
* @param pagen
* @param tabledef
* as string in json format
* @throws JsonParseException
* @throws JsonMappingException
* @throws IOException
*/
public void addTableToDocument(int docid, int pagen,
TableModel tabledef) throws JsonParseException,
JsonMappingException, IOException, RepositoryException {
// instantiate doc to validate docid
RichDocument doc = getDocumentById(docid);
// Load the set of tables from file
File file = new File(_getDataDir(docid), tablesFilename);
DocumentTables tdefs = loadTableDefinitions(file);
// Create entry page if non-existent
if (!tdefs.annotationsOnPage.containsKey(pagen)) {
tdefs.annotationsOnPage.put(pagen, new ArrayList<DocumentTable>());
}
// Set up table instance.
// Try to load by id if set, otherwise create new.
int table_id = Integer.parseInt(tabledef.id);
DocumentTable dt = null;
if (table_id != 0) {
dt = tdefs.getTable(table_id);
}
if (dt == null) {
dt = DocumentTable.createTable();
tdefs.annotationsOnPage.get(pagen).add(dt);
}
// Set incoming data (pagenumber, cells) to table.
dt.page = pagen;
dt.trs = tabledef.trs;
// Save tables to file.
saveTableDefinitions(file, tdefs);
if (properties.getProperty("autosave-export", "") != "") {
// Update doc before saving auto export.
doc.setTableAnnotations(tdefs);
_handleAutoExports(doc);
}
}
/**
* Load a DocumentTables data-structure from a JSON file.
*
* @param file
* @return
* @throws JsonParseException
* @throws JsonMappingException
* @throws IOException
*/
public static DocumentTables loadTableDefinitions(File file)
throws JsonParseException, JsonMappingException, IOException {
ObjectMapper mapper = new ObjectMapper();
mapper.configure(
DeserializationConfig.Feature.FAIL_ON_UNKNOWN_PROPERTIES, false);
DocumentTables tdefs = new DocumentTables();
if (file.isFile()) {
tdefs = mapper.readValue(file, DocumentTables.class);
}
return tdefs;
}
/**
* Save DocumentTables data-structure to disk as JSON.
*
* @param file
* @param tdefs
* @throws JsonGenerationException
* @throws JsonMappingException
* @throws IOException
*/
public static void saveTableDefinitions(File file, DocumentTables tdefs)
throws JsonGenerationException, JsonMappingException, IOException {
ObjectMapper mapper = new ObjectMapper();
mapper.configure(
DeserializationConfig.Feature.FAIL_ON_UNKNOWN_PROPERTIES, false);
mapper.writeValue(file, tdefs);
}
/**
* Load the definitions of annotated tables that are stored for a document.
*
*
* @param docid
* @return
* @throws JsonParseException
* @throws JsonMappingException
* @throws IOException
*/
public DocumentTables getAnnotatedTables(int docid)
throws JsonParseException, JsonMappingException, IOException {
File file = new File(_getDataDir(docid), tablesFilename);
DocumentTables tdefs = loadTableDefinitions(file);
return tdefs;
}
public ArrayList<DocumentTable> getAnnotatedTables(int docid, int pageid)
throws JsonParseException, JsonMappingException, IOException {
DocumentTables tdefs = getAnnotatedTables(docid);
if (tdefs.annotationsOnPage.containsKey(pageid)) {
return tdefs.annotationsOnPage.get(pageid);
} else {
return new ArrayList<DocumentTable>();
}
}
/**
* Delete a table-annotation, by table id, page and document-id.
*
* @param docid
* @param pagen
* @param tableid
* @throws IOException
*/
public void clearTable(int docid, int pagen, int tableid)
throws IOException {
File file = new File(_getDataDir(docid), tablesFilename);
DocumentTables tdefs = loadTableDefinitions(file);
tdefs.removeTable(pagen, tableid);
saveTableDefinitions(file, tdefs);
}
/**
* Clear all annotations for a document.
*
* @param docid
*/
public void resetAnnotations(int docid) throws IOException {
File file = new File(_getDataDir(docid), tablesFilename);
DocumentTables tdefs = new DocumentTables();
saveTableDefinitions(file, tdefs);
}
/**
* Clear annotations for a whole document/page.
*
* @param docid
* @param pagen
* @throws JsonParseException
* @throws JsonMappingException
* @throws IOException
*/
public void clearAnnotations(int docid, int pagen)
throws JsonParseException, JsonMappingException, IOException {
File file = new File(_getDataDir(docid), tablesFilename);
DocumentTables tdefs = loadTableDefinitions(file);
if (tdefs.annotationsOnPage.containsKey(pagen)) {
tdefs.annotationsOnPage.remove(pagen);
}
saveTableDefinitions(file, tdefs);
}
/**
* Retrieve data of some model, classifier or preprocessing step regarding a
* document. Data is returned in JSON format (no strict types,
* interoperability) .
*
* @param docid
* @param what
* @param pagen
* @return
* @throws JsonGenerationException
* @throws JsonMappingException
* @throws IOException
*/
public String getDataLayerAsJson(int docid, int pagen, String what)
throws RepositoryException, IOException {
ObjectMapper mapper = new ObjectMapper();
// instantiate doc to validate docid
RichDocument doc = getDocumentById(docid);
if (what.equals("chars")) {
return mapper.writeValueAsString(doc.charactersOnPage.get(pagen));
} else if (what.equals("words")) {
ArrayList<ArrayList<BlockInfo>> blockinfo = null;
WordExtractor we = new WordExtractor();
we.setCharacters(doc.charactersOnPage.get(pagen));
we.extractWords();
return mapper.writeValueAsString(we.getWords());
} else if (what.equals("table")) {
// Try to find sparse lines by using
// the line detection of WordExtractor,
// then TableBoundaryClassifier
// (experimental / very basic).
WordExtractor we = new WordExtractor();
we.setCharacters(doc.charactersOnPage.get(pagen));
we.extractWords();
TableBoundaryClassifier tbc = new TableBoundaryClassifier();
tbc.setLines(we.getLines());
tbc.classifyLines();
ArrayList<BlockInfo> myLines = new ArrayList<BlockInfo>();
for (ArrayList<BlockInfo> raw_line : tbc.getTableLines()) {
BlockInfo curline = new BlockInfo();
for (BlockInfo b : raw_line) {
curline.addBlock(b);
}
myLines.add(curline);
}
return mapper.writeValueAsString(myLines);
} else if (what.equals("import")) {
// Find/guess structure file
File file = _findExternalStructFile(doc);
if (file != null) {
// Found, import external structure file.
Importer imp = new Importer();
DocumentTables tdefs = null;
try {
tdefs = imp.getTableDefinitions(file, doc);
} catch (ParserConfigurationException e) {
e.printStackTrace();
} catch (SAXException e) {
e.printStackTrace();
}
if (tdefs.annotationsOnPage.containsKey(pagen)) {
return mapper.writeValueAsString(tdefs.annotationsOnPage.get(pagen));
} else {
// no tables for page
return "[]";
}
} else {
// no structure file found
return "[]";
}
} else {
return "";
}
}
/**
* Add a document to the repository (by filename) . Throws IOException in
* case of invalid file (no pdf) or duplicate file. Copy the file into
* repository.
*
* @param doc
* @throws IOException
* @throws RepositoryException
*/
public void addDocument(File doc) throws IOException, RepositoryException {
File importfile = new File(srcpath + '/' + doc.getName());
FileUtils.copyFile(doc, importfile); // or doc.renameTo(..);
// Open to validate
PDDocument document = PDDocument.load(doc.getAbsolutePath());
document.close();
// Add to index
indexDocument(new File(doc.getName()));
}
/**
* Add a document to index ( but do not copy or move file ) .
*
* @param doc
* @throws RepositoryException
*/
public int indexDocument(File doc) throws RepositoryException {
return index.add(doc);
}
public DocumentIndex getDocumentIndex() {
return index;
}
public Logger getLogger() {
return logger;
}
protected File _findExternalStructFile(RichDocument doc) {
File source = doc.getSourcePath();
int pos = source.getName().lastIndexOf(".");
String trunc_name = pos > 0 ? source.getName().substring(0, pos)
: source.getName();
File file;
file = new File(source.getAbsolutePath() + "-str.xml");
if (file.isFile()) {
return file;
}
file = new File(source.getParent(), trunc_name + "-str.xml");
if (file.isFile()) {
return file;
}
file = new File(properties.getProperty("external-datadir", ""),
doc.getFilename() + "-str.xml");
if (file.isFile()) {
return file;
}
file = new File(properties.getProperty("external-datadir", ""),
trunc_name + "-str.xml");
if (file.isFile()) {
return file;
}
return null;
}
/**
* A helper to retrieve (and possible create beforehand) the directory for
* storing annotation information for a document.
*
* @param docid
* @return
*/
protected File _getDataDir(int docid) {
File datadir = new File(datapath);
if (!datadir.isDirectory() && !datadir.mkdir()) {
throw new RuntimeException(datadir.getAbsolutePath()
+ " could not be created");
}
File working_setdir = new File(datapath, workingSet);
if (!working_setdir.isDirectory() && !working_setdir.mkdir()) {
throw new RuntimeException(datadir.getAbsolutePath()
+ " could not be created");
}
File dir = new File(working_setdir, String.valueOf(docid));
if (!dir.isDirectory() && !dir.mkdir()) {
throw new RuntimeException(dir.getAbsolutePath()
+ " could not be created");
}
return dir;
}
public void writeExports(RichDocument doc) throws JsonGenerationException,
JsonMappingException, ExportException,
IOException {
File parent = new File(datapath, "exports");
if (!parent.isDirectory()) {
parent.mkdir();
}
writeExports(parent, doc);
}
public void writeExports(File parent, RichDocument doc)
throws ExportException, JsonGenerationException, JsonMappingException, IOException {
File source = doc.getSourcePath();
int pos = source.getName().lastIndexOf(".");
String trunc_name = pos > 0 ? source.getName().substring(0, pos)
: source.getName();
// Save structure export.
StructureExport structure_exp = new StructureExport();
structure_exp.export(doc, doc.getTableAnnotations());
structure_exp.toFile(new File(parent, trunc_name + "-str.xml"));
// // Save JSON.
// saveTableDefinitions(new File(parent, trunc_name + ".json"),
// doc.getTableAnnotations());
}
/**
* Handle "auto Exports" , i.e. saving an export whenever incoming table
* data is stored to disk internally.
*
* @param doc
*/
protected void _handleAutoExports(RichDocument doc) {
// Determine parent directory.
File parent = new File(doc.getSourcePath().getParent());
if (properties.getProperty("autosave-target", "") != "") {
parent = new File(properties.getProperty("autosave-target"));
}
try {
writeExports(parent, doc);
} catch (Exception e) {
System.out.println("Failed to write auto-exports...");
e.printStackTrace();
}
}
}
| |
/*
* Autopsy Forensic Browser
*
* Copyright 2013 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.sleuthkit.autopsy.casemodule;
import java.beans.PropertyChangeListener;
import java.beans.PropertyChangeSupport;
import java.io.File;
import java.util.Set;
import java.util.TreeSet;
import javax.swing.JFileChooser;
import javax.swing.JPanel;
import org.openide.util.NbBundle;
import org.sleuthkit.autopsy.corecomponentinterfaces.DataSourceProcessor;
import org.sleuthkit.autopsy.coreutils.MessageNotifyUtil;
import java.util.logging.Level;
import org.sleuthkit.autopsy.coreutils.Logger;
/**
* Add input wizard subpanel for adding local files / dirs to the case
*/
class LocalFilesPanel extends JPanel {
private PropertyChangeSupport pcs = null;
private Set<File> currentFiles = new TreeSet<File>(); //keep currents in a set to disallow duplicates per add
private boolean enableNext = false;
private static LocalFilesPanel instance;
public static final String FILES_SEP = ",";
private static final Logger logger = Logger.getLogger(LocalFilesPanel.class.getName());
/**
* Creates new form LocalFilesPanel
*/
private LocalFilesPanel() {
initComponents();
customInit();
}
static synchronized LocalFilesPanel getDefault() {
if (instance == null) {
instance = new LocalFilesPanel();
}
return instance;
}
private void customInit() {
localFileChooser.setMultiSelectionEnabled(true);
selectedPaths.setText("");
}
//@Override
public String getContentPaths() {
//TODO consider interface change to return list of paths instead
if (currentFiles == null) {
return "";
}
StringBuilder b = new StringBuilder();
for (File f : currentFiles) {
b.append(f.getAbsolutePath());
b.append(FILES_SEP);
}
return b.toString();
}
//@Override
public void setContentPath(String s) {
//for the local file panel we don't need to restore the last paths used
//when the wizard restarts
}
//@Override
public String getContentType() {
return NbBundle.getMessage(this.getClass(), "LocalFilesPanel.contentType.text");
}
//@Override
public boolean validatePanel() {
return enableNext;
}
//@Override
public void select() {
reset();
}
//@Override
public void reset() {
currentFiles.clear();
selectedPaths.setText("");
enableNext = false;
//pcs.firePropertyChange(AddImageWizardChooseDataSourceVisual.EVENT.UPDATE_UI.toString(), false, true);
}
@Override
public synchronized void addPropertyChangeListener(PropertyChangeListener pcl) {
super.addPropertyChangeListener(pcl);
if (pcs == null) {
pcs = new PropertyChangeSupport(this);
}
pcs.addPropertyChangeListener(pcl);
}
@Override
public void removePropertyChangeListener(PropertyChangeListener pcl) {
super.removePropertyChangeListener(pcl);
pcs.removePropertyChangeListener(pcl);
}
@Override
public String toString() {
return NbBundle.getMessage(this.getClass(), "LocalFilesDSProcessor.toString.text");
}
/**
* This method is called from within the constructor to initialize the form.
* WARNING: Do NOT modify this code. The content of this method is always
* regenerated by the Form Editor.
*/
@SuppressWarnings("unchecked")
// <editor-fold defaultstate="collapsed" desc="Generated Code">//GEN-BEGIN:initComponents
private void initComponents() {
localFileChooser = new javax.swing.JFileChooser();
jScrollPane1 = new javax.swing.JScrollPane();
jTextArea1 = new javax.swing.JTextArea();
selectButton = new javax.swing.JButton();
infoLabel = new javax.swing.JLabel();
clearButton = new javax.swing.JButton();
jScrollPane2 = new javax.swing.JScrollPane();
selectedPaths = new javax.swing.JTextArea();
localFileChooser.setApproveButtonText(org.openide.util.NbBundle.getMessage(LocalFilesPanel.class, "LocalFilesPanel.localFileChooser.approveButtonText")); // NOI18N
localFileChooser.setApproveButtonToolTipText(org.openide.util.NbBundle.getMessage(LocalFilesPanel.class, "LocalFilesPanel.localFileChooser.approveButtonToolTipText")); // NOI18N
localFileChooser.setDialogTitle(org.openide.util.NbBundle.getMessage(LocalFilesPanel.class, "LocalFilesPanel.localFileChooser.dialogTitle")); // NOI18N
localFileChooser.setFileSelectionMode(javax.swing.JFileChooser.FILES_AND_DIRECTORIES);
jTextArea1.setColumns(20);
jTextArea1.setRows(5);
jScrollPane1.setViewportView(jTextArea1);
org.openide.awt.Mnemonics.setLocalizedText(selectButton, org.openide.util.NbBundle.getMessage(LocalFilesPanel.class, "LocalFilesPanel.selectButton.text")); // NOI18N
selectButton.setToolTipText(org.openide.util.NbBundle.getMessage(LocalFilesPanel.class, "LocalFilesPanel.selectButton.toolTipText")); // NOI18N
selectButton.setActionCommand(org.openide.util.NbBundle.getMessage(LocalFilesPanel.class, "LocalFilesPanel.selectButton.actionCommand")); // NOI18N
selectButton.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
selectButtonActionPerformed(evt);
}
});
org.openide.awt.Mnemonics.setLocalizedText(infoLabel, org.openide.util.NbBundle.getMessage(LocalFilesPanel.class, "LocalFilesPanel.infoLabel.text")); // NOI18N
org.openide.awt.Mnemonics.setLocalizedText(clearButton, org.openide.util.NbBundle.getMessage(LocalFilesPanel.class, "LocalFilesPanel.clearButton.text")); // NOI18N
clearButton.setToolTipText(org.openide.util.NbBundle.getMessage(LocalFilesPanel.class, "LocalFilesPanel.clearButton.toolTipText")); // NOI18N
clearButton.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
clearButtonActionPerformed(evt);
}
});
selectedPaths.setEditable(false);
selectedPaths.setColumns(20);
selectedPaths.setRows(5);
selectedPaths.setToolTipText(org.openide.util.NbBundle.getMessage(LocalFilesPanel.class, "LocalFilesPanel.selectedPaths.toolTipText")); // NOI18N
jScrollPane2.setViewportView(selectedPaths);
javax.swing.GroupLayout layout = new javax.swing.GroupLayout(this);
this.setLayout(layout);
layout.setHorizontalGroup(
layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(layout.createSequentialGroup()
.addComponent(infoLabel)
.addGap(0, 0, Short.MAX_VALUE))
.addGroup(javax.swing.GroupLayout.Alignment.TRAILING, layout.createSequentialGroup()
.addComponent(jScrollPane2, javax.swing.GroupLayout.DEFAULT_SIZE, 353, Short.MAX_VALUE)
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING, false)
.addComponent(selectButton, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE)
.addComponent(clearButton, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE))
.addGap(2, 2, 2))
);
layout.setVerticalGroup(
layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(layout.createSequentialGroup()
.addComponent(infoLabel)
.addGap(5, 5, 5)
.addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(layout.createSequentialGroup()
.addComponent(selectButton)
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED, 17, Short.MAX_VALUE)
.addComponent(clearButton))
.addComponent(jScrollPane2, javax.swing.GroupLayout.PREFERRED_SIZE, 0, Short.MAX_VALUE))
.addGap(0, 0, 0))
);
}// </editor-fold>//GEN-END:initComponents
private void selectButtonActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_selectButtonActionPerformed
int returnVal = localFileChooser.showOpenDialog(this);
if (returnVal == JFileChooser.APPROVE_OPTION) {
File[] files = localFileChooser.getSelectedFiles();
for (File f : files) {
currentFiles.add(f);
}
//update label
StringBuilder allPaths = new StringBuilder();
for (File f : currentFiles) {
allPaths.append(f.getAbsolutePath()).append("\n");
}
this.selectedPaths.setText(allPaths.toString());
this.selectedPaths.setToolTipText(allPaths.toString());
}
if (!currentFiles.isEmpty()) {
enableNext = true;
} else {
enableNext = false;
}
try {
pcs.firePropertyChange(DataSourceProcessor.DSP_PANEL_EVENT.UPDATE_UI.toString(), false, true);
} catch (Exception e) {
logger.log(Level.SEVERE, "LocalFilesPanel listener threw exception", e); //NON-NLS
MessageNotifyUtil.Notify.show(NbBundle.getMessage(this.getClass(), "LocalFilesPanel.moduleErr"),
NbBundle.getMessage(this.getClass(), "LocalFilesPanel.moduleErr.msg"),
MessageNotifyUtil.MessageType.ERROR);
}
}//GEN-LAST:event_selectButtonActionPerformed
private void clearButtonActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_clearButtonActionPerformed
reset();
}//GEN-LAST:event_clearButtonActionPerformed
// Variables declaration - do not modify//GEN-BEGIN:variables
private javax.swing.JButton clearButton;
private javax.swing.JLabel infoLabel;
private javax.swing.JScrollPane jScrollPane1;
private javax.swing.JScrollPane jScrollPane2;
private javax.swing.JTextArea jTextArea1;
private javax.swing.JFileChooser localFileChooser;
private javax.swing.JButton selectButton;
private javax.swing.JTextArea selectedPaths;
// End of variables declaration//GEN-END:variables
}
| |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.query;
import org.apache.lucene.geo.Rectangle;
import org.apache.lucene.search.MatchNoDocsQuery;
import org.apache.lucene.search.Query;
import org.apache.lucene.spatial.geopoint.document.GeoPointField;
import org.apache.lucene.spatial.geopoint.search.GeoPointInBBoxQuery;
import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.Version;
import org.elasticsearch.common.Numbers;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.ParsingException;
import org.elasticsearch.common.geo.GeoHashUtils;
import org.elasticsearch.common.geo.GeoPoint;
import org.elasticsearch.common.geo.GeoUtils;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.fielddata.IndexGeoPointFieldData;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.geo.BaseGeoPointFieldMapper;
import org.elasticsearch.index.mapper.geo.GeoPointFieldMapperLegacy;
import org.elasticsearch.index.search.geo.InMemoryGeoBoundingBoxQuery;
import org.elasticsearch.index.search.geo.IndexedGeoBoundingBoxQuery;
import java.io.IOException;
import java.util.Objects;
import java.util.Optional;
/**
* Creates a Lucene query that will filter for all documents that lie within the specified
* bounding box.
*
* This query can only operate on fields of type geo_point that have latitude and longitude
* enabled.
* */
public class GeoBoundingBoxQueryBuilder extends AbstractQueryBuilder<GeoBoundingBoxQueryBuilder> {
public static final String NAME = "geo_bounding_box";
public static final ParseField QUERY_NAME_FIELD = new ParseField(NAME, "geo_bbox");
/** Default type for executing this query (memory as of this writing). */
public static final GeoExecType DEFAULT_TYPE = GeoExecType.MEMORY;
/**
* The default value for ignore_unmapped.
*/
public static final boolean DEFAULT_IGNORE_UNMAPPED = false;
private static final ParseField TYPE_FIELD = new ParseField("type");
private static final ParseField VALIDATION_METHOD_FIELD = new ParseField("validation_method");
private static final ParseField COERCE_FIELD =new ParseField("coerce", "normalize")
.withAllDeprecated("use field validation_method instead");
private static final ParseField IGNORE_MALFORMED_FIELD = new ParseField("ignore_malformed")
.withAllDeprecated("use field validation_method instead");
private static final ParseField FIELD_FIELD = new ParseField("field");
private static final ParseField TOP_FIELD = new ParseField("top");
private static final ParseField BOTTOM_FIELD = new ParseField("bottom");
private static final ParseField LEFT_FIELD = new ParseField("left");
private static final ParseField RIGHT_FIELD = new ParseField("right");
private static final ParseField TOP_LEFT_FIELD = new ParseField("top_left");
private static final ParseField BOTTOM_RIGHT_FIELD = new ParseField("bottom_right");
private static final ParseField TOP_RIGHT_FIELD = new ParseField("top_right");
private static final ParseField BOTTOM_LEFT_FIELD = new ParseField("bottom_left");
private static final ParseField IGNORE_UNMAPPED_FIELD = new ParseField("ignore_unmapped");
/** Name of field holding geo coordinates to compute the bounding box on.*/
private final String fieldName;
/** Top left corner coordinates of bounding box. */
private GeoPoint topLeft = new GeoPoint(Double.NaN, Double.NaN);
/** Bottom right corner coordinates of bounding box.*/
private GeoPoint bottomRight = new GeoPoint(Double.NaN, Double.NaN);
/** How to deal with incorrect coordinates.*/
private GeoValidationMethod validationMethod = GeoValidationMethod.DEFAULT;
/** How the query should be run. */
private GeoExecType type = DEFAULT_TYPE;
private boolean ignoreUnmapped = DEFAULT_IGNORE_UNMAPPED;
/**
* Create new bounding box query.
* @param fieldName name of index field containing geo coordinates to operate on.
* */
public GeoBoundingBoxQueryBuilder(String fieldName) {
if (fieldName == null) {
throw new IllegalArgumentException("Field name must not be empty.");
}
this.fieldName = fieldName;
}
/**
* Read from a stream.
*/
public GeoBoundingBoxQueryBuilder(StreamInput in) throws IOException {
super(in);
fieldName = in.readString();
topLeft = in.readGeoPoint();
bottomRight = in.readGeoPoint();
type = GeoExecType.readFromStream(in);
validationMethod = GeoValidationMethod.readFromStream(in);
ignoreUnmapped = in.readBoolean();
}
@Override
protected void doWriteTo(StreamOutput out) throws IOException {
out.writeString(fieldName);
out.writeGeoPoint(topLeft);
out.writeGeoPoint(bottomRight);
type.writeTo(out);
validationMethod.writeTo(out);
out.writeBoolean(ignoreUnmapped);
}
/**
* Adds top left point.
* @param top The top latitude
* @param left The left longitude
* @param bottom The bottom latitude
* @param right The right longitude
*/
public GeoBoundingBoxQueryBuilder setCorners(double top, double left, double bottom, double right) {
if (GeoValidationMethod.isIgnoreMalformed(validationMethod) == false) {
if (Numbers.isValidDouble(top) == false) {
throw new IllegalArgumentException("top latitude is invalid: " + top);
}
if (Numbers.isValidDouble(left) == false) {
throw new IllegalArgumentException("left longitude is invalid: " + left);
}
if (Numbers.isValidDouble(bottom) == false) {
throw new IllegalArgumentException("bottom latitude is invalid: " + bottom);
}
if (Numbers.isValidDouble(right) == false) {
throw new IllegalArgumentException("right longitude is invalid: " + right);
}
// all corners are valid after above checks - make sure they are in the right relation
if (top < bottom) {
throw new IllegalArgumentException("top is below bottom corner: " +
top + " vs. " + bottom);
} else if (top == bottom) {
throw new IllegalArgumentException("top cannot be the same as bottom: " +
top + " == " + bottom);
} else if (left == right) {
throw new IllegalArgumentException("left cannot be the same as right: " +
left + " == " + right);
}
// we do not check longitudes as the query generation code can deal with flipped left/right values
}
topLeft.reset(top, left);
bottomRight.reset(bottom, right);
return this;
}
/**
* Adds points.
* @param topLeft topLeft point to add.
* @param bottomRight bottomRight point to add.
* */
public GeoBoundingBoxQueryBuilder setCorners(GeoPoint topLeft, GeoPoint bottomRight) {
return setCorners(topLeft.getLat(), topLeft.getLon(), bottomRight.getLat(), bottomRight.getLon());
}
/**
* Adds points from a single geohash.
* @param geohash The geohash for computing the bounding box.
*/
public GeoBoundingBoxQueryBuilder setCorners(final String geohash) {
// get the bounding box of the geohash and set topLeft and bottomRight
Rectangle ghBBox = GeoHashUtils.bbox(geohash);
return setCorners(new GeoPoint(ghBBox.maxLat, ghBBox.minLon), new GeoPoint(ghBBox.minLat, ghBBox.maxLon));
}
/**
* Adds points.
* @param topLeft topLeft point to add as geohash.
* @param bottomRight bottomRight point to add as geohash.
* */
public GeoBoundingBoxQueryBuilder setCorners(String topLeft, String bottomRight) {
return setCorners(GeoPoint.fromGeohash(topLeft), GeoPoint.fromGeohash(bottomRight));
}
/** Returns the top left corner of the bounding box. */
public GeoPoint topLeft() {
return topLeft;
}
/** Returns the bottom right corner of the bounding box. */
public GeoPoint bottomRight() {
return bottomRight;
}
/**
* Adds corners in OGC standard bbox/ envelop format.
*
* @param bottomLeft bottom left corner of bounding box.
* @param topRight top right corner of bounding box.
*/
public GeoBoundingBoxQueryBuilder setCornersOGC(GeoPoint bottomLeft, GeoPoint topRight) {
return setCorners(topRight.getLat(), bottomLeft.getLon(), bottomLeft.getLat(), topRight.getLon());
}
/**
* Adds corners in OGC standard bbox/ envelop format.
*
* @param bottomLeft bottom left corner geohash.
* @param topRight top right corner geohash.
*/
public GeoBoundingBoxQueryBuilder setCornersOGC(String bottomLeft, String topRight) {
return setCornersOGC(GeoPoint.fromGeohash(bottomLeft), GeoPoint.fromGeohash(topRight));
}
/**
* Specify whether or not to ignore validation errors of bounding boxes.
* Can only be set if coerce set to false, otherwise calling this
* method has no effect.
**/
public GeoBoundingBoxQueryBuilder setValidationMethod(GeoValidationMethod method) {
this.validationMethod = method;
return this;
}
/**
* Returns geo coordinate validation method to use.
* */
public GeoValidationMethod getValidationMethod() {
return this.validationMethod;
}
/**
* Sets the type of executing of the geo bounding box. Can be either `memory` or `indexed`. Defaults
* to `memory`.
*/
public GeoBoundingBoxQueryBuilder type(GeoExecType type) {
if (type == null) {
throw new IllegalArgumentException("Type is not allowed to be null.");
}
this.type = type;
return this;
}
/**
* For BWC: Parse type from type name.
* */
public GeoBoundingBoxQueryBuilder type(String type) {
this.type = GeoExecType.fromString(type);
return this;
}
/** Returns the execution type of the geo bounding box.*/
public GeoExecType type() {
return type;
}
/** Returns the name of the field to base the bounding box computation on. */
public String fieldName() {
return this.fieldName;
}
/**
* Sets whether the query builder should ignore unmapped fields (and run a
* {@link MatchNoDocsQuery} in place of this query) or throw an exception if
* the field is unmapped.
*/
public GeoBoundingBoxQueryBuilder ignoreUnmapped(boolean ignoreUnmapped) {
this.ignoreUnmapped = ignoreUnmapped;
return this;
}
/**
* Gets whether the query builder will ignore unmapped fields (and run a
* {@link MatchNoDocsQuery} in place of this query) or throw an exception if
* the field is unmapped.
*/
public boolean ignoreUnmapped() {
return ignoreUnmapped;
}
QueryValidationException checkLatLon(boolean indexCreatedBeforeV2_0) {
// validation was not available prior to 2.x, so to support bwc percolation queries we only ignore_malformed on 2.x created indexes
if (GeoValidationMethod.isIgnoreMalformed(validationMethod) == true || indexCreatedBeforeV2_0) {
return null;
}
QueryValidationException validationException = null;
// For everything post 2.0 validate latitude and longitude unless validation was explicitly turned off
if (GeoUtils.isValidLatitude(topLeft.getLat()) == false) {
validationException = addValidationError("top latitude is invalid: " + topLeft.getLat(),
validationException);
}
if (GeoUtils.isValidLongitude(topLeft.getLon()) == false) {
validationException = addValidationError("left longitude is invalid: " + topLeft.getLon(),
validationException);
}
if (GeoUtils.isValidLatitude(bottomRight.getLat()) == false) {
validationException = addValidationError("bottom latitude is invalid: " + bottomRight.getLat(),
validationException);
}
if (GeoUtils.isValidLongitude(bottomRight.getLon()) == false) {
validationException = addValidationError("right longitude is invalid: " + bottomRight.getLon(),
validationException);
}
return validationException;
}
@Override
public Query doToQuery(QueryShardContext context) {
MappedFieldType fieldType = context.fieldMapper(fieldName);
if (fieldType == null) {
if (ignoreUnmapped) {
return new MatchNoDocsQuery();
} else {
throw new QueryShardException(context, "failed to find geo_point field [" + fieldName + "]");
}
}
if (!(fieldType instanceof BaseGeoPointFieldMapper.GeoPointFieldType)) {
throw new QueryShardException(context, "field [" + fieldName + "] is not a geo_point field");
}
QueryValidationException exception = checkLatLon(context.indexVersionCreated().before(Version.V_2_0_0));
if (exception != null) {
throw new QueryShardException(context, "couldn't validate latitude/ longitude values", exception);
}
GeoPoint luceneTopLeft = new GeoPoint(topLeft);
GeoPoint luceneBottomRight = new GeoPoint(bottomRight);
final Version indexVersionCreated = context.indexVersionCreated();
if (indexVersionCreated.onOrAfter(Version.V_2_2_0) || GeoValidationMethod.isCoerce(validationMethod)) {
// Special case: if the difference between the left and right is 360 and the right is greater than the left, we are asking for
// the complete longitude range so need to set longitude to the complete longitude range
double right = luceneBottomRight.getLon();
double left = luceneTopLeft.getLon();
boolean completeLonRange = ((right - left) % 360 == 0 && right > left);
GeoUtils.normalizePoint(luceneTopLeft, true, !completeLonRange);
GeoUtils.normalizePoint(luceneBottomRight, true, !completeLonRange);
if (completeLonRange) {
luceneTopLeft.resetLon(-180);
luceneBottomRight.resetLon(180);
}
}
if (indexVersionCreated.onOrAfter(Version.V_2_2_0)) {
// if index created V_2_2 use (soon to be legacy) numeric encoding postings format
// if index created V_2_3 > use prefix encoded postings format
final GeoPointField.TermEncoding encoding = (indexVersionCreated.before(Version.V_2_3_0)) ?
GeoPointField.TermEncoding.NUMERIC : GeoPointField.TermEncoding.PREFIX;
return new GeoPointInBBoxQuery(fieldType.name(), encoding, luceneBottomRight.lat(), luceneTopLeft.lat(),
luceneTopLeft.lon(), luceneBottomRight.lon());
}
Query query;
switch(type) {
case INDEXED:
GeoPointFieldMapperLegacy.GeoPointFieldType geoFieldType = ((GeoPointFieldMapperLegacy.GeoPointFieldType) fieldType);
query = IndexedGeoBoundingBoxQuery.create(luceneTopLeft, luceneBottomRight, geoFieldType);
break;
case MEMORY:
IndexGeoPointFieldData indexFieldData = context.getForField(fieldType);
query = new InMemoryGeoBoundingBoxQuery(luceneTopLeft, luceneBottomRight, indexFieldData);
break;
default:
// Someone extended the type enum w/o adjusting this switch statement.
throw new IllegalStateException("geo bounding box type [" + type + "] not supported.");
}
return query;
}
@Override
protected void doXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject(NAME);
builder.startObject(fieldName);
builder.array(TOP_LEFT_FIELD.getPreferredName(), topLeft.getLon(), topLeft.getLat());
builder.array(BOTTOM_RIGHT_FIELD.getPreferredName(), bottomRight.getLon(), bottomRight.getLat());
builder.endObject();
builder.field(VALIDATION_METHOD_FIELD.getPreferredName(), validationMethod);
builder.field(TYPE_FIELD.getPreferredName(), type);
builder.field(IGNORE_UNMAPPED_FIELD.getPreferredName(), ignoreUnmapped);
printBoostAndQueryName(builder);
builder.endObject();
}
public static Optional<GeoBoundingBoxQueryBuilder> fromXContent(QueryParseContext parseContext) throws IOException {
XContentParser parser = parseContext.parser();
String fieldName = null;
double top = Double.NaN;
double bottom = Double.NaN;
double left = Double.NaN;
double right = Double.NaN;
float boost = AbstractQueryBuilder.DEFAULT_BOOST;
String queryName = null;
String currentFieldName = null;
XContentParser.Token token;
boolean coerce = GeoValidationMethod.DEFAULT_LENIENT_PARSING;
boolean ignoreMalformed = GeoValidationMethod.DEFAULT_LENIENT_PARSING;
GeoValidationMethod validationMethod = null;
boolean ignoreUnmapped = DEFAULT_IGNORE_UNMAPPED;
GeoPoint sparse = new GeoPoint();
String type = "memory";
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) {
currentFieldName = parser.currentName();
} else if (token == XContentParser.Token.START_OBJECT) {
fieldName = currentFieldName;
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) {
currentFieldName = parser.currentName();
token = parser.nextToken();
if (parseContext.isDeprecatedSetting(currentFieldName)) {
// skip
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, FIELD_FIELD)) {
fieldName = parser.text();
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, TOP_FIELD)) {
top = parser.doubleValue();
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, BOTTOM_FIELD)) {
bottom = parser.doubleValue();
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, LEFT_FIELD)) {
left = parser.doubleValue();
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, RIGHT_FIELD)) {
right = parser.doubleValue();
} else {
if (parseContext.getParseFieldMatcher().match(currentFieldName, TOP_LEFT_FIELD)) {
GeoUtils.parseGeoPoint(parser, sparse);
top = sparse.getLat();
left = sparse.getLon();
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, BOTTOM_RIGHT_FIELD)) {
GeoUtils.parseGeoPoint(parser, sparse);
bottom = sparse.getLat();
right = sparse.getLon();
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, TOP_RIGHT_FIELD)) {
GeoUtils.parseGeoPoint(parser, sparse);
top = sparse.getLat();
right = sparse.getLon();
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, BOTTOM_LEFT_FIELD)) {
GeoUtils.parseGeoPoint(parser, sparse);
bottom = sparse.getLat();
left = sparse.getLon();
} else {
throw new ElasticsearchParseException("failed to parse [{}] query. unexpected field [{}]",
QUERY_NAME_FIELD.getPreferredName(), currentFieldName);
}
}
} else {
throw new ElasticsearchParseException("failed to parse [{}] query. field name expected but [{}] found",
QUERY_NAME_FIELD.getPreferredName(), token);
}
}
} else if (token.isValue()) {
if (parseContext.getParseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.NAME_FIELD)) {
queryName = parser.text();
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.BOOST_FIELD)) {
boost = parser.floatValue();
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, COERCE_FIELD)) {
coerce = parser.booleanValue();
if (coerce) {
ignoreMalformed = true;
}
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, VALIDATION_METHOD_FIELD)) {
validationMethod = GeoValidationMethod.fromString(parser.text());
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, IGNORE_UNMAPPED_FIELD)) {
ignoreUnmapped = parser.booleanValue();
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, TYPE_FIELD)) {
type = parser.text();
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, IGNORE_MALFORMED_FIELD)) {
ignoreMalformed = parser.booleanValue();
} else {
throw new ParsingException(parser.getTokenLocation(), "failed to parse [{}] query. unexpected field [{}]",
QUERY_NAME_FIELD.getPreferredName(), currentFieldName);
}
}
}
final GeoPoint topLeft = sparse.reset(top, left); //just keep the object
final GeoPoint bottomRight = new GeoPoint(bottom, right);
GeoBoundingBoxQueryBuilder builder = new GeoBoundingBoxQueryBuilder(fieldName);
builder.setCorners(topLeft, bottomRight);
builder.queryName(queryName);
builder.boost(boost);
builder.type(GeoExecType.fromString(type));
builder.ignoreUnmapped(ignoreUnmapped);
if (validationMethod != null) {
// ignore deprecated coerce/ignoreMalformed settings if validationMethod is set
builder.setValidationMethod(validationMethod);
} else {
builder.setValidationMethod(GeoValidationMethod.infer(coerce, ignoreMalformed));
}
return Optional.of(builder);
}
@Override
protected boolean doEquals(GeoBoundingBoxQueryBuilder other) {
return Objects.equals(topLeft, other.topLeft) &&
Objects.equals(bottomRight, other.bottomRight) &&
Objects.equals(type, other.type) &&
Objects.equals(validationMethod, other.validationMethod) &&
Objects.equals(fieldName, other.fieldName) &&
Objects.equals(ignoreUnmapped, other.ignoreUnmapped);
}
@Override
protected int doHashCode() {
return Objects.hash(topLeft, bottomRight, type, validationMethod, fieldName, ignoreUnmapped);
}
@Override
public String getWriteableName() {
return NAME;
}
}
| |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.validate;
import com.google.common.base.Charsets;
import org.elasticsearch.action.admin.indices.alias.Alias;
import org.elasticsearch.action.admin.indices.validate.query.ValidateQueryResponse;
import org.elasticsearch.client.Client;
import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.geo.GeoDistance;
import org.elasticsearch.common.settings.ImmutableSettings;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.DistanceUnit;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.index.cache.filter.AutoFilterCachingPolicy;
import org.elasticsearch.index.cache.filter.FilterCacheModule;
import org.elasticsearch.index.cache.filter.none.NoneFilterCache;
import org.elasticsearch.index.cache.filter.weighted.WeightedFilterCache;
import org.elasticsearch.index.query.FilterBuilders;
import org.elasticsearch.index.query.QueryBuilder;
import org.elasticsearch.index.query.QueryBuilders;
import org.elasticsearch.indices.IndexMissingException;
import org.elasticsearch.test.ElasticsearchIntegrationTest;
import org.elasticsearch.test.ElasticsearchIntegrationTest.ClusterScope;
import org.elasticsearch.test.ElasticsearchIntegrationTest.Scope;
import org.hamcrest.Matcher;
import org.joda.time.DateTime;
import org.joda.time.DateTimeZone;
import org.joda.time.format.ISODateTimeFormat;
import org.junit.BeforeClass;
import org.junit.Test;
import java.io.IOException;
import static org.elasticsearch.index.query.QueryBuilders.queryStringQuery;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures;
import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.nullValue;
/**
*
*/
@ClusterScope(randomDynamicTemplates = false, scope = Scope.SUITE)
public class SimpleValidateQueryTests extends ElasticsearchIntegrationTest {
static Boolean hasFilterCache;
@BeforeClass
public static void enableFilterCache() {
assert hasFilterCache == null;
hasFilterCache = randomBoolean();
}
@Override
protected Settings nodeSettings(int nodeOrdinal) {
ImmutableSettings.Builder builder = ImmutableSettings.settingsBuilder().put(super.nodeSettings(nodeOrdinal));
if (hasFilterCache) {
// cache everything
builder.put(FilterCacheModule.FilterCacheSettings.FILTER_CACHE_TYPE, WeightedFilterCache.class)
.put(AutoFilterCachingPolicy.AGGRESSIVE_CACHING_SETTINGS);
} else {
builder.put(FilterCacheModule.FilterCacheSettings.FILTER_CACHE_TYPE, NoneFilterCache.class);
}
return builder.build();
}
@Test
public void simpleValidateQuery() throws Exception {
createIndex("test");
ensureGreen();
client().admin().indices().preparePutMapping("test").setType("type1")
.setSource(XContentFactory.jsonBuilder().startObject().startObject("type1").startObject("properties")
.startObject("foo").field("type", "string").endObject()
.startObject("bar").field("type", "integer").endObject()
.endObject().endObject().endObject())
.execute().actionGet();
refresh();
assertThat(client().admin().indices().prepareValidateQuery("test").setSource("foo".getBytes(Charsets.UTF_8)).execute().actionGet().isValid(), equalTo(false));
assertThat(client().admin().indices().prepareValidateQuery("test").setQuery(QueryBuilders.queryStringQuery("_id:1")).execute().actionGet().isValid(), equalTo(true));
assertThat(client().admin().indices().prepareValidateQuery("test").setQuery(QueryBuilders.queryStringQuery("_i:d:1")).execute().actionGet().isValid(), equalTo(false));
assertThat(client().admin().indices().prepareValidateQuery("test").setQuery(QueryBuilders.queryStringQuery("foo:1")).execute().actionGet().isValid(), equalTo(true));
assertThat(client().admin().indices().prepareValidateQuery("test").setQuery(QueryBuilders.queryStringQuery("bar:hey")).execute().actionGet().isValid(), equalTo(false));
assertThat(client().admin().indices().prepareValidateQuery("test").setQuery(QueryBuilders.queryStringQuery("nonexistent:hello")).execute().actionGet().isValid(), equalTo(true));
assertThat(client().admin().indices().prepareValidateQuery("test").setQuery(QueryBuilders.queryStringQuery("foo:1 AND")).execute().actionGet().isValid(), equalTo(false));
}
private static String filter(String uncachedFilter) {
String filter = uncachedFilter;
if (hasFilterCache) {
filter = "cache(" + filter + ")";
}
return filter;
}
@Test
public void explainValidateQuery() throws Exception {
createIndex("test");
ensureGreen();
client().admin().indices().preparePutMapping("test").setType("type1")
.setSource(XContentFactory.jsonBuilder().startObject().startObject("type1").startObject("properties")
.startObject("foo").field("type", "string").endObject()
.startObject("bar").field("type", "integer").endObject()
.startObject("baz").field("type", "string").field("analyzer", "snowball").endObject()
.startObject("pin").startObject("properties").startObject("location").field("type", "geo_point").endObject().endObject().endObject()
.endObject().endObject().endObject())
.execute().actionGet();
client().admin().indices().preparePutMapping("test").setType("child-type")
.setSource(XContentFactory.jsonBuilder().startObject().startObject("child-type")
.startObject("_parent").field("type", "type1").endObject()
.startObject("properties")
.startObject("foo").field("type", "string").endObject()
.endObject()
.endObject().endObject())
.execute().actionGet();
refresh();
ValidateQueryResponse response;
response = client().admin().indices().prepareValidateQuery("test")
.setSource("foo".getBytes(Charsets.UTF_8))
.setExplain(true)
.execute().actionGet();
assertThat(response.isValid(), equalTo(false));
assertThat(response.getQueryExplanation().size(), equalTo(1));
assertThat(response.getQueryExplanation().get(0).getError(), containsString("Failed to parse"));
assertThat(response.getQueryExplanation().get(0).getExplanation(), nullValue());
final String typeFilter = filter("_type:type1");
assertExplanation(QueryBuilders.queryStringQuery("_id:1"), equalTo("filtered(ConstantScore(_uid:type1#1))->" + typeFilter));
assertExplanation(QueryBuilders.idsQuery("type1").addIds("1").addIds("2"),
equalTo("filtered(ConstantScore(_uid:type1#1 _uid:type1#2))->" + typeFilter));
assertExplanation(QueryBuilders.queryStringQuery("foo"), equalTo("filtered(_all:foo)->" + typeFilter));
assertExplanation(QueryBuilders.filteredQuery(
QueryBuilders.termQuery("foo", "1"),
FilterBuilders.orFilter(
FilterBuilders.termFilter("bar", "2"),
FilterBuilders.termFilter("baz", "3")
)
), equalTo("filtered(filtered(foo:1)->" + filter(filter("bar:[2 TO 2]") + " " + filter("baz:3")) + ")->" + typeFilter));
assertExplanation(QueryBuilders.filteredQuery(
QueryBuilders.termQuery("foo", "1"),
FilterBuilders.orFilter(
FilterBuilders.termFilter("bar", "2")
)
), equalTo("filtered(filtered(foo:1)->" + filter(filter("bar:[2 TO 2]")) + ")->" + typeFilter));
assertExplanation(QueryBuilders.filteredQuery(
QueryBuilders.matchAllQuery(),
FilterBuilders.geoPolygonFilter("pin.location")
.addPoint(40, -70)
.addPoint(30, -80)
.addPoint(20, -90)
.addPoint(40, -70) // closing polygon
), equalTo("filtered(ConstantScore(" + filter("GeoPolygonFilter(pin.location, [[40.0, -70.0], [30.0, -80.0], [20.0, -90.0], [40.0, -70.0]]))") + ")->" + typeFilter));
assertExplanation(QueryBuilders.constantScoreQuery(FilterBuilders.geoBoundingBoxFilter("pin.location")
.topLeft(40, -80)
.bottomRight(20, -70)
), equalTo("filtered(ConstantScore(" + filter("GeoBoundingBoxFilter(pin.location, [40.0, -80.0], [20.0, -70.0]))") + ")->" + typeFilter));
assertExplanation(QueryBuilders.constantScoreQuery(FilterBuilders.geoDistanceFilter("pin.location")
.lat(10).lon(20).distance(15, DistanceUnit.DEFAULT).geoDistance(GeoDistance.PLANE)
), equalTo("filtered(ConstantScore(" + filter("GeoDistanceFilter(pin.location, PLANE, 15.0, 10.0, 20.0))") + ")->" + typeFilter));
assertExplanation(QueryBuilders.constantScoreQuery(FilterBuilders.geoDistanceFilter("pin.location")
.lat(10).lon(20).distance(15, DistanceUnit.DEFAULT).geoDistance(GeoDistance.PLANE)
), equalTo("filtered(ConstantScore(" + filter("GeoDistanceFilter(pin.location, PLANE, 15.0, 10.0, 20.0))") + ")->" + typeFilter));
assertExplanation(QueryBuilders.constantScoreQuery(FilterBuilders.geoDistanceRangeFilter("pin.location")
.lat(10).lon(20).from("15m").to("25m").geoDistance(GeoDistance.PLANE)
), equalTo("filtered(ConstantScore(" + filter("GeoDistanceRangeFilter(pin.location, PLANE, [15.0 - 25.0], 10.0, 20.0))") + ")->" + typeFilter));
assertExplanation(QueryBuilders.constantScoreQuery(FilterBuilders.geoDistanceRangeFilter("pin.location")
.lat(10).lon(20).from("15miles").to("25miles").geoDistance(GeoDistance.PLANE)
), equalTo("filtered(ConstantScore(" + filter("GeoDistanceRangeFilter(pin.location, PLANE, [" + DistanceUnit.DEFAULT.convert(15.0, DistanceUnit.MILES) + " - " + DistanceUnit.DEFAULT.convert(25.0, DistanceUnit.MILES) + "], 10.0, 20.0))") + ")->" + typeFilter));
assertExplanation(QueryBuilders.filteredQuery(
QueryBuilders.termQuery("foo", "1"),
FilterBuilders.andFilter(
FilterBuilders.termFilter("bar", "2"),
FilterBuilders.termFilter("baz", "3")
)
), equalTo("filtered(filtered(foo:1)->" + filter("+" + filter("bar:[2 TO 2]") + " +" + filter("baz:3")) + ")->" + typeFilter));
assertExplanation(QueryBuilders.constantScoreQuery(FilterBuilders.termsFilter("foo", "1", "2", "3")),
equalTo("filtered(ConstantScore(" + filter("foo:1 foo:2 foo:3") + "))->" + typeFilter));
assertExplanation(QueryBuilders.constantScoreQuery(FilterBuilders.notFilter(FilterBuilders.termFilter("foo", "bar"))),
equalTo("filtered(ConstantScore(" + filter("NotFilter(" + filter("foo:bar") + ")") + "))->" + typeFilter));
assertExplanation(QueryBuilders.filteredQuery(
QueryBuilders.termQuery("foo", "1"),
FilterBuilders.hasChildFilter(
"child-type",
QueryBuilders.matchQuery("foo", "1")
)
), equalTo("filtered(filtered(foo:1)->CustomQueryWrappingFilter(child_filter[child-type/type1](filtered(foo:1)->" + filter("_type:child-type") + ")))->" + typeFilter));
assertExplanation(QueryBuilders.filteredQuery(
QueryBuilders.termQuery("foo", "1"),
FilterBuilders.scriptFilter("true")
), equalTo("filtered(filtered(foo:1)->" + filter("ScriptFilter(true)") + ")->" + typeFilter));
}
@Test
public void explainValidateQueryTwoNodes() throws IOException {
createIndex("test");
ensureGreen();
client().admin().indices().preparePutMapping("test").setType("type1")
.setSource(XContentFactory.jsonBuilder().startObject().startObject("type1").startObject("properties")
.startObject("foo").field("type", "string").endObject()
.startObject("bar").field("type", "integer").endObject()
.startObject("baz").field("type", "string").field("analyzer", "snowball").endObject()
.startObject("pin").startObject("properties").startObject("location").field("type", "geo_point").endObject().endObject().endObject()
.endObject().endObject().endObject())
.execute().actionGet();
refresh();
for (Client client : internalCluster()) {
ValidateQueryResponse response = client.admin().indices().prepareValidateQuery("test")
.setSource("foo".getBytes(Charsets.UTF_8))
.setExplain(true)
.execute().actionGet();
assertThat(response.isValid(), equalTo(false));
assertThat(response.getQueryExplanation().size(), equalTo(1));
assertThat(response.getQueryExplanation().get(0).getError(), containsString("Failed to parse"));
assertThat(response.getQueryExplanation().get(0).getExplanation(), nullValue());
}
for (Client client : internalCluster()) {
ValidateQueryResponse response = client.admin().indices().prepareValidateQuery("test")
.setQuery(QueryBuilders.queryStringQuery("foo"))
.setExplain(true)
.execute().actionGet();
assertThat(response.isValid(), equalTo(true));
assertThat(response.getQueryExplanation().size(), equalTo(1));
assertThat(response.getQueryExplanation().get(0).getExplanation(), equalTo("_all:foo"));
assertThat(response.getQueryExplanation().get(0).getError(), nullValue());
}
}
@Test //https://github.com/elasticsearch/elasticsearch/issues/3629
public void explainDateRangeInQueryString() {
assertAcked(prepareCreate("test").setSettings(ImmutableSettings.settingsBuilder()
.put(indexSettings())
.put("index.number_of_shards", 1)));
String aMonthAgo = ISODateTimeFormat.yearMonthDay().print(new DateTime(DateTimeZone.UTC).minusMonths(1));
String aMonthFromNow = ISODateTimeFormat.yearMonthDay().print(new DateTime(DateTimeZone.UTC).plusMonths(1));
client().prepareIndex("test", "type", "1").setSource("past", aMonthAgo, "future", aMonthFromNow).get();
refresh();
ValidateQueryResponse response = client().admin().indices().prepareValidateQuery()
.setQuery(queryStringQuery("past:[now-2M/d TO now/d]")).setExplain(true).get();
assertNoFailures(response);
assertThat(response.getQueryExplanation().size(), equalTo(1));
assertThat(response.getQueryExplanation().get(0).getError(), nullValue());
DateTime twoMonthsAgo = new DateTime(DateTimeZone.UTC).minusMonths(2).withTimeAtStartOfDay();
DateTime now = new DateTime(DateTimeZone.UTC).plusDays(1).withTimeAtStartOfDay().minusMillis(1);
assertThat(response.getQueryExplanation().get(0).getExplanation(),
equalTo("past:[" + twoMonthsAgo.getMillis() + " TO " + now.getMillis() + "]"));
assertThat(response.isValid(), equalTo(true));
}
@Test(expected = IndexMissingException.class)
public void validateEmptyCluster() {
client().admin().indices().prepareValidateQuery().get();
}
@Test
public void explainNoQuery() {
createIndex("test");
ensureGreen();
ValidateQueryResponse validateQueryResponse = client().admin().indices().prepareValidateQuery().setExplain(true).get();
assertThat(validateQueryResponse.isValid(), equalTo(true));
assertThat(validateQueryResponse.getQueryExplanation().size(), equalTo(1));
assertThat(validateQueryResponse.getQueryExplanation().get(0).getIndex(), equalTo("test"));
assertThat(validateQueryResponse.getQueryExplanation().get(0).getExplanation(), equalTo("ConstantScore(*:*)"));
}
@Test
public void explainFilteredAlias() {
assertAcked(prepareCreate("test")
.addMapping("test", "field", "type=string")
.addAlias(new Alias("alias").filter(FilterBuilders.termFilter("field", "value1"))));
ensureGreen();
ValidateQueryResponse validateQueryResponse = client().admin().indices().prepareValidateQuery("alias")
.setQuery(QueryBuilders.matchAllQuery()).setExplain(true).get();
assertThat(validateQueryResponse.isValid(), equalTo(true));
assertThat(validateQueryResponse.getQueryExplanation().size(), equalTo(1));
assertThat(validateQueryResponse.getQueryExplanation().get(0).getIndex(), equalTo("test"));
assertThat(validateQueryResponse.getQueryExplanation().get(0).getExplanation(), containsString("field:value1"));
}
@Test
public void explainMatchPhrasePrefix() {
assertAcked(prepareCreate("test").setSettings(
ImmutableSettings.settingsBuilder().put(indexSettings())
.put("index.analysis.filter.syns.type", "synonym")
.putArray("index.analysis.filter.syns.synonyms", "one,two")
.put("index.analysis.analyzer.syns.tokenizer", "standard")
.putArray("index.analysis.analyzer.syns.filter", "syns")
).addMapping("test", "field","type=string,analyzer=syns"));
ensureGreen();
ValidateQueryResponse validateQueryResponse = client().admin().indices().prepareValidateQuery("test")
.setQuery(QueryBuilders.matchPhrasePrefixQuery("field", "foo")).setExplain(true).get();
assertThat(validateQueryResponse.isValid(), equalTo(true));
assertThat(validateQueryResponse.getQueryExplanation().size(), equalTo(1));
assertThat(validateQueryResponse.getQueryExplanation().get(0).getExplanation(), containsString("field:\"foo*\""));
validateQueryResponse = client().admin().indices().prepareValidateQuery("test")
.setQuery(QueryBuilders.matchPhrasePrefixQuery("field", "foo bar")).setExplain(true).get();
assertThat(validateQueryResponse.isValid(), equalTo(true));
assertThat(validateQueryResponse.getQueryExplanation().size(), equalTo(1));
assertThat(validateQueryResponse.getQueryExplanation().get(0).getExplanation(), containsString("field:\"foo bar*\""));
// Stacked tokens
validateQueryResponse = client().admin().indices().prepareValidateQuery("test")
.setQuery(QueryBuilders.matchPhrasePrefixQuery("field", "one bar")).setExplain(true).get();
assertThat(validateQueryResponse.isValid(), equalTo(true));
assertThat(validateQueryResponse.getQueryExplanation().size(), equalTo(1));
assertThat(validateQueryResponse.getQueryExplanation().get(0).getExplanation(), containsString("field:\"(one two) bar*\""));
validateQueryResponse = client().admin().indices().prepareValidateQuery("test")
.setQuery(QueryBuilders.matchPhrasePrefixQuery("field", "foo one")).setExplain(true).get();
assertThat(validateQueryResponse.isValid(), equalTo(true));
assertThat(validateQueryResponse.getQueryExplanation().size(), equalTo(1));
assertThat(validateQueryResponse.getQueryExplanation().get(0).getExplanation(), containsString("field:\"foo (one* two*)\""));
}
@Test
public void irrelevantPropertiesBeforeQuery() throws IOException {
createIndex("test");
ensureGreen();
refresh();
assertThat(client().admin().indices().prepareValidateQuery("test").setSource(new BytesArray("{\"foo\": \"bar\", \"query\": {\"term\" : { \"user\" : \"kimchy\" }}}")).get().isValid(), equalTo(false));
}
@Test
public void irrelevantPropertiesAfterQuery() throws IOException {
createIndex("test");
ensureGreen();
refresh();
assertThat(client().admin().indices().prepareValidateQuery("test").setSource(new BytesArray("{\"query\": {\"term\" : { \"user\" : \"kimchy\" }}, \"foo\": \"bar\"}")).get().isValid(), equalTo(false));
}
private void assertExplanation(QueryBuilder queryBuilder, Matcher<String> matcher) {
ValidateQueryResponse response = client().admin().indices().prepareValidateQuery("test")
.setTypes("type1")
.setQuery(queryBuilder)
.setExplain(true)
.execute().actionGet();
assertThat(response.getQueryExplanation().size(), equalTo(1));
assertThat(response.getQueryExplanation().get(0).getError(), nullValue());
assertThat(response.getQueryExplanation().get(0).getExplanation(), matcher);
assertThat(response.isValid(), equalTo(true));
}
}
| |
/*
* Copyright (C) 2007 The Guava Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.common.collect;
import static com.google.common.collect.Iterators.advance;
import static com.google.common.collect.Iterators.get;
import static com.google.common.collect.Iterators.getLast;
import static com.google.common.collect.Lists.newArrayList;
import static com.google.common.collect.testing.IteratorFeature.MODIFIABLE;
import static com.google.common.collect.testing.IteratorFeature.UNMODIFIABLE;
import static com.google.common.truth.Truth.assertThat;
import static java.util.Arrays.asList;
import static java.util.Collections.singleton;
import com.google.common.annotations.GwtCompatible;
import com.google.common.annotations.GwtIncompatible;
import com.google.common.base.Function;
import com.google.common.base.Predicate;
import com.google.common.base.Predicates;
import com.google.common.collect.testing.IteratorFeature;
import com.google.common.collect.testing.IteratorTester;
import com.google.common.collect.testing.ListTestSuiteBuilder;
import com.google.common.collect.testing.TestStringListGenerator;
import com.google.common.collect.testing.features.CollectionFeature;
import com.google.common.collect.testing.features.CollectionSize;
import com.google.common.collect.testing.features.ListFeature;
import com.google.common.testing.NullPointerTester;
import junit.framework.AssertionFailedError;
import junit.framework.Test;
import junit.framework.TestCase;
import junit.framework.TestSuite;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.ConcurrentModificationException;
import java.util.Enumeration;
import java.util.Iterator;
import java.util.List;
import java.util.ListIterator;
import java.util.NoSuchElementException;
import java.util.RandomAccess;
import java.util.Set;
import java.util.Vector;
/**
* Unit test for {@code Iterators}.
*
* @author Kevin Bourrillion
*/
@GwtCompatible(emulated = true)
public class IteratorsTest extends TestCase {
@GwtIncompatible("suite")
@SuppressUnderAndroid
public static Test suite() {
TestSuite suite = new TestSuite(IteratorsTest.class.getSimpleName());
suite.addTest(testsForRemoveAllAndRetainAll());
suite.addTestSuite(IteratorsTest.class);
return suite;
}
public void testEmptyIterator() {
Iterator<String> iterator = Iterators.emptyIterator();
assertFalse(iterator.hasNext());
try {
iterator.next();
fail("no exception thrown");
} catch (NoSuchElementException expected) {
}
try {
iterator.remove();
fail("no exception thrown");
} catch (UnsupportedOperationException expected) {
}
}
public void testEmptyListIterator() {
ListIterator<String> iterator = Iterators.emptyListIterator();
assertFalse(iterator.hasNext());
assertFalse(iterator.hasPrevious());
assertEquals(0, iterator.nextIndex());
assertEquals(-1, iterator.previousIndex());
try {
iterator.next();
fail("no exception thrown");
} catch (NoSuchElementException expected) {
}
try {
iterator.previous();
fail("no exception thrown");
} catch (NoSuchElementException expected) {
}
try {
iterator.remove();
fail("no exception thrown");
} catch (UnsupportedOperationException expected) {
}
try {
iterator.set("a");
fail("no exception thrown");
} catch (UnsupportedOperationException expected) {
}
try {
iterator.add("a");
fail("no exception thrown");
} catch (UnsupportedOperationException expected) {
}
}
public void testEmptyModifiableIterator() {
Iterator<String> iterator = Iterators.emptyModifiableIterator();
assertFalse(iterator.hasNext());
try {
iterator.next();
fail("Expected NoSuchElementException");
} catch (NoSuchElementException expected) {
}
try {
iterator.remove();
fail("Expected IllegalStateException");
} catch (IllegalStateException expected) {
}
}
public void testSize0() {
Iterator<String> iterator = Iterators.emptyIterator();
assertEquals(0, Iterators.size(iterator));
}
public void testSize1() {
Iterator<Integer> iterator = Collections.singleton(0).iterator();
assertEquals(1, Iterators.size(iterator));
}
public void testSize_partiallyConsumed() {
Iterator<Integer> iterator = asList(1, 2, 3, 4, 5).iterator();
iterator.next();
iterator.next();
assertEquals(3, Iterators.size(iterator));
}
public void test_contains_nonnull_yes() {
Iterator<String> set = asList("a", null, "b").iterator();
assertTrue(Iterators.contains(set, "b"));
}
public void test_contains_nonnull_no() {
Iterator<String> set = asList("a", "b").iterator();
assertFalse(Iterators.contains(set, "c"));
}
public void test_contains_null_yes() {
Iterator<String> set = asList("a", null, "b").iterator();
assertTrue(Iterators.contains(set, null));
}
public void test_contains_null_no() {
Iterator<String> set = asList("a", "b").iterator();
assertFalse(Iterators.contains(set, null));
}
public void testGetOnlyElement_noDefault_valid() {
Iterator<String> iterator = Collections.singletonList("foo").iterator();
assertEquals("foo", Iterators.getOnlyElement(iterator));
}
public void testGetOnlyElement_noDefault_empty() {
Iterator<String> iterator = Iterators.emptyIterator();
try {
Iterators.getOnlyElement(iterator);
fail();
} catch (NoSuchElementException expected) {
}
}
public void testGetOnlyElement_noDefault_moreThanOneLessThanFiveElements() {
Iterator<String> iterator = asList("one", "two").iterator();
try {
Iterators.getOnlyElement(iterator);
fail();
} catch (IllegalArgumentException expected) {
assertEquals("expected one element but was: <one, two>",
expected.getMessage());
}
}
public void testGetOnlyElement_noDefault_fiveElements() {
Iterator<String> iterator =
asList("one", "two", "three", "four", "five").iterator();
try {
Iterators.getOnlyElement(iterator);
fail();
} catch (IllegalArgumentException expected) {
assertEquals("expected one element but was: "
+ "<one, two, three, four, five>",
expected.getMessage());
}
}
public void testGetOnlyElement_noDefault_moreThanFiveElements() {
Iterator<String> iterator =
asList("one", "two", "three", "four", "five", "six").iterator();
try {
Iterators.getOnlyElement(iterator);
fail();
} catch (IllegalArgumentException expected) {
assertEquals("expected one element but was: "
+ "<one, two, three, four, five, ...>",
expected.getMessage());
}
}
public void testGetOnlyElement_withDefault_singleton() {
Iterator<String> iterator = Collections.singletonList("foo").iterator();
assertEquals("foo", Iterators.getOnlyElement(iterator, "bar"));
}
public void testGetOnlyElement_withDefault_empty() {
Iterator<String> iterator = Iterators.emptyIterator();
assertEquals("bar", Iterators.getOnlyElement(iterator, "bar"));
}
public void testGetOnlyElement_withDefault_empty_null() {
Iterator<String> iterator = Iterators.emptyIterator();
assertNull(Iterators.getOnlyElement(iterator, null));
}
public void testGetOnlyElement_withDefault_two() {
Iterator<String> iterator = asList("foo", "bar").iterator();
try {
Iterators.getOnlyElement(iterator, "x");
fail();
} catch (IllegalArgumentException expected) {
assertEquals("expected one element but was: <foo, bar>",
expected.getMessage());
}
}
@GwtIncompatible("Iterators.toArray(Iterator, Class)")
public void testToArrayEmpty() {
Iterator<String> iterator = Collections.<String>emptyList().iterator();
String[] array = Iterators.toArray(iterator, String.class);
assertTrue(Arrays.equals(new String[0], array));
}
@GwtIncompatible("Iterators.toArray(Iterator, Class)")
public void testToArraySingleton() {
Iterator<String> iterator = Collections.singletonList("a").iterator();
String[] array = Iterators.toArray(iterator, String.class);
assertTrue(Arrays.equals(new String[] { "a" }, array));
}
@GwtIncompatible("Iterators.toArray(Iterator, Class)")
public void testToArray() {
String[] sourceArray = new String[] {"a", "b", "c"};
Iterator<String> iterator = asList(sourceArray).iterator();
String[] newArray = Iterators.toArray(iterator, String.class);
assertTrue(Arrays.equals(sourceArray, newArray));
}
public void testFilterSimple() {
Iterator<String> unfiltered = Lists.newArrayList("foo", "bar").iterator();
Iterator<String> filtered = Iterators.filter(unfiltered,
Predicates.equalTo("foo"));
List<String> expected = Collections.singletonList("foo");
List<String> actual = Lists.newArrayList(filtered);
assertEquals(expected, actual);
}
public void testFilterNoMatch() {
Iterator<String> unfiltered = Lists.newArrayList("foo", "bar").iterator();
Iterator<String> filtered = Iterators.filter(unfiltered,
Predicates.alwaysFalse());
List<String> expected = Collections.emptyList();
List<String> actual = Lists.newArrayList(filtered);
assertEquals(expected, actual);
}
public void testFilterMatchAll() {
Iterator<String> unfiltered = Lists.newArrayList("foo", "bar").iterator();
Iterator<String> filtered = Iterators.filter(unfiltered,
Predicates.alwaysTrue());
List<String> expected = Lists.newArrayList("foo", "bar");
List<String> actual = Lists.newArrayList(filtered);
assertEquals(expected, actual);
}
public void testFilterNothing() {
Iterator<String> unfiltered = Collections.<String>emptyList().iterator();
Iterator<String> filtered = Iterators.filter(unfiltered,
new Predicate<String>() {
@Override
public boolean apply(String s) {
throw new AssertionFailedError("Should never be evaluated");
}
});
List<String> expected = Collections.emptyList();
List<String> actual = Lists.newArrayList(filtered);
assertEquals(expected, actual);
}
@GwtIncompatible("unreasonably slow")
public void testFilterUsingIteratorTester() {
final List<Integer> list = asList(1, 2, 3, 4, 5);
final Predicate<Integer> isEven = new Predicate<Integer>() {
@Override
public boolean apply(Integer integer) {
return integer % 2 == 0;
}
};
new IteratorTester<Integer>(5, UNMODIFIABLE, asList(2, 4),
IteratorTester.KnownOrder.KNOWN_ORDER) {
@Override protected Iterator<Integer> newTargetIterator() {
return Iterators.filter(list.iterator(), isEven);
}
}.test();
}
public void testAny() {
List<String> list = Lists.newArrayList();
Predicate<String> predicate = Predicates.equalTo("pants");
assertFalse(Iterators.any(list.iterator(), predicate));
list.add("cool");
assertFalse(Iterators.any(list.iterator(), predicate));
list.add("pants");
assertTrue(Iterators.any(list.iterator(), predicate));
}
public void testAll() {
List<String> list = Lists.newArrayList();
Predicate<String> predicate = Predicates.equalTo("cool");
assertTrue(Iterators.all(list.iterator(), predicate));
list.add("cool");
assertTrue(Iterators.all(list.iterator(), predicate));
list.add("pants");
assertFalse(Iterators.all(list.iterator(), predicate));
}
public void testFind_firstElement() {
Iterable<String> list = Lists.newArrayList("cool", "pants");
Iterator<String> iterator = list.iterator();
assertEquals("cool", Iterators.find(iterator, Predicates.equalTo("cool")));
assertEquals("pants", iterator.next());
}
public void testFind_lastElement() {
Iterable<String> list = Lists.newArrayList("cool", "pants");
Iterator<String> iterator = list.iterator();
assertEquals("pants", Iterators.find(iterator,
Predicates.equalTo("pants")));
assertFalse(iterator.hasNext());
}
public void testFind_notPresent() {
Iterable<String> list = Lists.newArrayList("cool", "pants");
Iterator<String> iterator = list.iterator();
try {
Iterators.find(iterator, Predicates.alwaysFalse());
fail();
} catch (NoSuchElementException e) {
}
assertFalse(iterator.hasNext());
}
public void testFind_matchAlways() {
Iterable<String> list = Lists.newArrayList("cool", "pants");
Iterator<String> iterator = list.iterator();
assertEquals("cool", Iterators.find(iterator, Predicates.alwaysTrue()));
}
public void testFind_withDefault_first() {
Iterable<String> list = Lists.newArrayList("cool", "pants");
Iterator<String> iterator = list.iterator();
assertEquals("cool",
Iterators.find(iterator, Predicates.equalTo("cool"), "woot"));
assertEquals("pants", iterator.next());
}
public void testFind_withDefault_last() {
Iterable<String> list = Lists.newArrayList("cool", "pants");
Iterator<String> iterator = list.iterator();
assertEquals("pants",
Iterators.find(iterator, Predicates.equalTo("pants"), "woot"));
assertFalse(iterator.hasNext());
}
public void testFind_withDefault_notPresent() {
Iterable<String> list = Lists.newArrayList("cool", "pants");
Iterator<String> iterator = list.iterator();
assertEquals("woot",
Iterators.find(iterator, Predicates.alwaysFalse(), "woot"));
assertFalse(iterator.hasNext());
}
public void testFind_withDefault_notPresent_nullReturn() {
Iterable<String> list = Lists.newArrayList("cool", "pants");
Iterator<String> iterator = list.iterator();
assertNull(
Iterators.find(iterator, Predicates.alwaysFalse(), null));
assertFalse(iterator.hasNext());
}
public void testFind_withDefault_matchAlways() {
Iterable<String> list = Lists.newArrayList("cool", "pants");
Iterator<String> iterator = list.iterator();
assertEquals("cool",
Iterators.find(iterator, Predicates.alwaysTrue(), "woot"));
assertEquals("pants", iterator.next());
}
public void testTryFind_firstElement() {
Iterable<String> list = Lists.newArrayList("cool", "pants");
Iterator<String> iterator = list.iterator();
assertEquals("cool",
Iterators.tryFind(iterator, Predicates.equalTo("cool")).get());
}
public void testTryFind_lastElement() {
Iterable<String> list = Lists.newArrayList("cool", "pants");
Iterator<String> iterator = list.iterator();
assertEquals("pants",
Iterators.tryFind(iterator, Predicates.equalTo("pants")).get());
}
public void testTryFind_alwaysTrue() {
Iterable<String> list = Lists.newArrayList("cool", "pants");
Iterator<String> iterator = list.iterator();
assertEquals("cool",
Iterators.tryFind(iterator, Predicates.alwaysTrue()).get());
}
public void testTryFind_alwaysFalse_orDefault() {
Iterable<String> list = Lists.newArrayList("cool", "pants");
Iterator<String> iterator = list.iterator();
assertEquals("woot",
Iterators.tryFind(iterator, Predicates.alwaysFalse()).or("woot"));
assertFalse(iterator.hasNext());
}
public void testTryFind_alwaysFalse_isPresent() {
Iterable<String> list = Lists.newArrayList("cool", "pants");
Iterator<String> iterator = list.iterator();
assertFalse(
Iterators.tryFind(iterator, Predicates.alwaysFalse()).isPresent());
assertFalse(iterator.hasNext());
}
public void testTransform() {
Iterator<String> input = asList("1", "2", "3").iterator();
Iterator<Integer> result = Iterators.transform(input,
new Function<String, Integer>() {
@Override
public Integer apply(String from) {
return Integer.valueOf(from);
}
});
List<Integer> actual = Lists.newArrayList(result);
List<Integer> expected = asList(1, 2, 3);
assertEquals(expected, actual);
}
public void testTransformRemove() {
List<String> list = Lists.newArrayList("1", "2", "3");
Iterator<String> input = list.iterator();
Iterator<Integer> iterator = Iterators.transform(input,
new Function<String, Integer>() {
@Override
public Integer apply(String from) {
return Integer.valueOf(from);
}
});
assertEquals(Integer.valueOf(1), iterator.next());
assertEquals(Integer.valueOf(2), iterator.next());
iterator.remove();
assertEquals(asList("1", "3"), list);
}
public void testPoorlyBehavedTransform() {
Iterator<String> input = asList("1", null, "3").iterator();
Iterator<Integer> result = Iterators.transform(input,
new Function<String, Integer>() {
@Override
public Integer apply(String from) {
return Integer.valueOf(from);
}
});
result.next();
try {
result.next();
fail("Expected NFE");
} catch (NumberFormatException nfe) {
// Expected to fail.
}
}
public void testNullFriendlyTransform() {
Iterator<Integer> input = asList(1, 2, null, 3).iterator();
Iterator<String> result = Iterators.transform(input,
new Function<Integer, String>() {
@Override
public String apply(Integer from) {
return String.valueOf(from);
}
});
List<String> actual = Lists.newArrayList(result);
List<String> expected = asList("1", "2", "null", "3");
assertEquals(expected, actual);
}
public void testCycleOfEmpty() {
// "<String>" for javac 1.5.
Iterator<String> cycle = Iterators.<String>cycle();
assertFalse(cycle.hasNext());
}
public void testCycleOfOne() {
Iterator<String> cycle = Iterators.cycle("a");
for (int i = 0; i < 3; i++) {
assertTrue(cycle.hasNext());
assertEquals("a", cycle.next());
}
}
public void testCycleOfOneWithRemove() {
Iterable<String> iterable = Lists.newArrayList("a");
Iterator<String> cycle = Iterators.cycle(iterable);
assertTrue(cycle.hasNext());
assertEquals("a", cycle.next());
cycle.remove();
assertEquals(Collections.emptyList(), iterable);
assertFalse(cycle.hasNext());
}
public void testCycleOfTwo() {
Iterator<String> cycle = Iterators.cycle("a", "b");
for (int i = 0; i < 3; i++) {
assertTrue(cycle.hasNext());
assertEquals("a", cycle.next());
assertTrue(cycle.hasNext());
assertEquals("b", cycle.next());
}
}
public void testCycleOfTwoWithRemove() {
Iterable<String> iterable = Lists.newArrayList("a", "b");
Iterator<String> cycle = Iterators.cycle(iterable);
assertTrue(cycle.hasNext());
assertEquals("a", cycle.next());
assertTrue(cycle.hasNext());
assertEquals("b", cycle.next());
assertTrue(cycle.hasNext());
assertEquals("a", cycle.next());
cycle.remove();
assertEquals(Collections.singletonList("b"), iterable);
assertTrue(cycle.hasNext());
assertEquals("b", cycle.next());
assertTrue(cycle.hasNext());
assertEquals("b", cycle.next());
cycle.remove();
assertEquals(Collections.emptyList(), iterable);
assertFalse(cycle.hasNext());
}
public void testCycleRemoveWithoutNext() {
Iterator<String> cycle = Iterators.cycle("a", "b");
assertTrue(cycle.hasNext());
try {
cycle.remove();
fail("no exception thrown");
} catch (IllegalStateException expected) {
}
}
public void testCycleRemoveSameElementTwice() {
Iterator<String> cycle = Iterators.cycle("a", "b");
cycle.next();
cycle.remove();
try {
cycle.remove();
fail("no exception thrown");
} catch (IllegalStateException expected) {
}
}
public void testCycleWhenRemoveIsNotSupported() {
Iterable<String> iterable = asList("a", "b");
Iterator<String> cycle = Iterators.cycle(iterable);
cycle.next();
try {
cycle.remove();
fail("no exception thrown");
} catch (UnsupportedOperationException expected) {
}
}
public void testCycleRemoveAfterHasNext() {
Iterable<String> iterable = Lists.newArrayList("a");
Iterator<String> cycle = Iterators.cycle(iterable);
assertTrue(cycle.hasNext());
assertEquals("a", cycle.next());
assertTrue(cycle.hasNext());
cycle.remove();
assertEquals(Collections.emptyList(), iterable);
assertFalse(cycle.hasNext());
}
public void testCycleNoSuchElementException() {
Iterable<String> iterable = Lists.newArrayList("a");
Iterator<String> cycle = Iterators.cycle(iterable);
assertTrue(cycle.hasNext());
assertEquals("a", cycle.next());
cycle.remove();
assertFalse(cycle.hasNext());
try {
cycle.next();
fail();
} catch (NoSuchElementException expected) {}
}
@GwtIncompatible("unreasonably slow")
public void testCycleUsingIteratorTester() {
new IteratorTester<Integer>(5, UNMODIFIABLE, asList(1, 2, 1, 2, 1,
2, 1, 2, 1, 2, 1, 2), IteratorTester.KnownOrder.KNOWN_ORDER) {
@Override protected Iterator<Integer> newTargetIterator() {
return Iterators.cycle(asList(1, 2));
}
}.test();
}
@GwtIncompatible("slow (~5s)")
public void testConcatNoIteratorsYieldsEmpty() {
new EmptyIteratorTester() {
@SuppressWarnings("unchecked")
@Override protected Iterator<Integer> newTargetIterator() {
return Iterators.concat();
}
}.test();
}
@GwtIncompatible("slow (~5s)")
public void testConcatOneEmptyIteratorYieldsEmpty() {
new EmptyIteratorTester() {
@SuppressWarnings("unchecked")
@Override protected Iterator<Integer> newTargetIterator() {
return Iterators.concat(iterateOver());
}
}.test();
}
@GwtIncompatible("slow (~5s)")
public void testConcatMultipleEmptyIteratorsYieldsEmpty() {
new EmptyIteratorTester() {
@Override protected Iterator<Integer> newTargetIterator() {
return Iterators.concat(iterateOver(), iterateOver());
}
}.test();
}
@GwtIncompatible("slow (~3s)")
public void testConcatSingletonYieldsSingleton() {
new SingletonIteratorTester() {
@SuppressWarnings("unchecked")
@Override protected Iterator<Integer> newTargetIterator() {
return Iterators.concat(iterateOver(1));
}
}.test();
}
@GwtIncompatible("slow (~5s)")
public void testConcatEmptyAndSingletonAndEmptyYieldsSingleton() {
new SingletonIteratorTester() {
@Override protected Iterator<Integer> newTargetIterator() {
return Iterators.concat(iterateOver(), iterateOver(1), iterateOver());
}
}.test();
}
@GwtIncompatible("fairly slow (~40s)")
public void testConcatSingletonAndSingletonYieldsDoubleton() {
new DoubletonIteratorTester() {
@Override protected Iterator<Integer> newTargetIterator() {
return Iterators.concat(iterateOver(1), iterateOver(2));
}
}.test();
}
@GwtIncompatible("fairly slow (~40s)")
public void testConcatSingletonAndSingletonWithEmptiesYieldsDoubleton() {
new DoubletonIteratorTester() {
@Override protected Iterator<Integer> newTargetIterator() {
return Iterators.concat(
iterateOver(1), iterateOver(), iterateOver(), iterateOver(2));
}
}.test();
}
@GwtIncompatible("fairly slow (~50s)")
public void testConcatUnmodifiable() {
new IteratorTester<Integer>(5, UNMODIFIABLE, asList(1, 2),
IteratorTester.KnownOrder.KNOWN_ORDER) {
@Override protected Iterator<Integer> newTargetIterator() {
return Iterators.concat(asList(1).iterator(),
Arrays.<Integer>asList().iterator(), asList(2).iterator());
}
}.test();
}
/**
* Illustrates the somewhat bizarre behavior when a null is passed in.
*/
public void testConcatContainingNull() {
@SuppressWarnings("unchecked")
Iterator<Iterator<Integer>> input
= asList(iterateOver(1, 2), null, iterateOver(3)).iterator();
Iterator<Integer> result = Iterators.concat(input);
assertEquals(1, (int) result.next());
assertEquals(2, (int) result.next());
try {
result.hasNext();
fail("no exception thrown");
} catch (NullPointerException e) {
}
try {
result.next();
fail("no exception thrown");
} catch (NullPointerException e) {
}
// There is no way to get "through" to the 3. Buh-bye
}
@SuppressWarnings("unchecked")
public void testConcatVarArgsContainingNull() {
try {
Iterators.concat(iterateOver(1, 2), null, iterateOver(3), iterateOver(4),
iterateOver(5));
fail("no exception thrown");
} catch (NullPointerException e) {
}
}
public void testAddAllWithEmptyIterator() {
List<String> alreadyThere = Lists.newArrayList("already", "there");
boolean changed = Iterators.addAll(alreadyThere,
Iterators.<String>emptyIterator());
assertThat(alreadyThere).containsExactly("already", "there").inOrder();
assertFalse(changed);
}
public void testAddAllToList() {
List<String> alreadyThere = Lists.newArrayList("already", "there");
List<String> freshlyAdded = Lists.newArrayList("freshly", "added");
boolean changed = Iterators.addAll(alreadyThere, freshlyAdded.iterator());
assertThat(alreadyThere).containsExactly("already", "there", "freshly", "added");
assertTrue(changed);
}
public void testAddAllToSet() {
Set<String> alreadyThere
= Sets.newLinkedHashSet(asList("already", "there"));
List<String> oneMore = Lists.newArrayList("there");
boolean changed = Iterators.addAll(alreadyThere, oneMore.iterator());
assertThat(alreadyThere).containsExactly("already", "there").inOrder();
assertFalse(changed);
}
@GwtIncompatible("NullPointerTester")
public void testNullPointerExceptions() {
NullPointerTester tester = new NullPointerTester();
tester.testAllPublicStaticMethods(Iterators.class);
}
@GwtIncompatible("Only used by @GwtIncompatible code")
private static abstract class EmptyIteratorTester
extends IteratorTester<Integer> {
protected EmptyIteratorTester() {
super(3, MODIFIABLE, Collections.<Integer>emptySet(),
IteratorTester.KnownOrder.KNOWN_ORDER);
}
}
@GwtIncompatible("Only used by @GwtIncompatible code")
private static abstract class SingletonIteratorTester
extends IteratorTester<Integer> {
protected SingletonIteratorTester() {
super(3, MODIFIABLE, singleton(1), IteratorTester.KnownOrder.KNOWN_ORDER);
}
}
@GwtIncompatible("Only used by @GwtIncompatible code")
private static abstract class DoubletonIteratorTester
extends IteratorTester<Integer> {
protected DoubletonIteratorTester() {
super(5, MODIFIABLE, newArrayList(1, 2),
IteratorTester.KnownOrder.KNOWN_ORDER);
}
}
private static Iterator<Integer> iterateOver(final Integer... values) {
return newArrayList(values).iterator();
}
public void testElementsEqual() {
Iterable<?> a;
Iterable<?> b;
// Base case.
a = Lists.newArrayList();
b = Collections.emptySet();
assertTrue(Iterators.elementsEqual(a.iterator(), b.iterator()));
// A few elements.
a = asList(4, 8, 15, 16, 23, 42);
b = asList(4, 8, 15, 16, 23, 42);
assertTrue(Iterators.elementsEqual(a.iterator(), b.iterator()));
// The same, but with nulls.
a = asList(4, 8, null, 16, 23, 42);
b = asList(4, 8, null, 16, 23, 42);
assertTrue(Iterators.elementsEqual(a.iterator(), b.iterator()));
// Different Iterable types (still equal elements, though).
a = ImmutableList.of(4, 8, 15, 16, 23, 42);
b = asList(4, 8, 15, 16, 23, 42);
assertTrue(Iterators.elementsEqual(a.iterator(), b.iterator()));
// An element differs.
a = asList(4, 8, 15, 12, 23, 42);
b = asList(4, 8, 15, 16, 23, 42);
assertFalse(Iterators.elementsEqual(a.iterator(), b.iterator()));
// null versus non-null.
a = asList(4, 8, 15, null, 23, 42);
b = asList(4, 8, 15, 16, 23, 42);
assertFalse(Iterators.elementsEqual(a.iterator(), b.iterator()));
assertFalse(Iterators.elementsEqual(b.iterator(), a.iterator()));
// Different lengths.
a = asList(4, 8, 15, 16, 23);
b = asList(4, 8, 15, 16, 23, 42);
assertFalse(Iterators.elementsEqual(a.iterator(), b.iterator()));
assertFalse(Iterators.elementsEqual(b.iterator(), a.iterator()));
// Different lengths, one is empty.
a = Collections.emptySet();
b = asList(4, 8, 15, 16, 23, 42);
assertFalse(Iterators.elementsEqual(a.iterator(), b.iterator()));
assertFalse(Iterators.elementsEqual(b.iterator(), a.iterator()));
}
public void testPartition_badSize() {
Iterator<Integer> source = Iterators.singletonIterator(1);
try {
Iterators.partition(source, 0);
fail();
} catch (IllegalArgumentException expected) {
}
}
public void testPartition_empty() {
Iterator<Integer> source = Iterators.emptyIterator();
Iterator<List<Integer>> partitions = Iterators.partition(source, 1);
assertFalse(partitions.hasNext());
}
public void testPartition_singleton1() {
Iterator<Integer> source = Iterators.singletonIterator(1);
Iterator<List<Integer>> partitions = Iterators.partition(source, 1);
assertTrue(partitions.hasNext());
assertTrue(partitions.hasNext());
assertEquals(ImmutableList.of(1), partitions.next());
assertFalse(partitions.hasNext());
}
public void testPartition_singleton2() {
Iterator<Integer> source = Iterators.singletonIterator(1);
Iterator<List<Integer>> partitions = Iterators.partition(source, 2);
assertTrue(partitions.hasNext());
assertTrue(partitions.hasNext());
assertEquals(ImmutableList.of(1), partitions.next());
assertFalse(partitions.hasNext());
}
@GwtIncompatible("fairly slow (~50s)")
public void testPartition_general() {
new IteratorTester<List<Integer>>(5,
IteratorFeature.UNMODIFIABLE,
ImmutableList.of(
asList(1, 2, 3),
asList(4, 5, 6),
asList(7)),
IteratorTester.KnownOrder.KNOWN_ORDER) {
@Override protected Iterator<List<Integer>> newTargetIterator() {
Iterator<Integer> source = Iterators.forArray(1, 2, 3, 4, 5, 6, 7);
return Iterators.partition(source, 3);
}
}.test();
}
public void testPartition_view() {
List<Integer> list = asList(1, 2);
Iterator<List<Integer>> partitions
= Iterators.partition(list.iterator(), 1);
// Changes before the partition is retrieved are reflected
list.set(0, 3);
List<Integer> first = partitions.next();
// Changes after are not
list.set(0, 4);
assertEquals(ImmutableList.of(3), first);
}
@GwtIncompatible("?")
// TODO: Figure out why this is failing in GWT.
public void testPartitionRandomAccess() {
Iterator<Integer> source = asList(1, 2, 3).iterator();
Iterator<List<Integer>> partitions = Iterators.partition(source, 2);
assertTrue(partitions.next() instanceof RandomAccess);
assertTrue(partitions.next() instanceof RandomAccess);
}
public void testPaddedPartition_badSize() {
Iterator<Integer> source = Iterators.singletonIterator(1);
try {
Iterators.paddedPartition(source, 0);
fail();
} catch (IllegalArgumentException expected) {
}
}
public void testPaddedPartition_empty() {
Iterator<Integer> source = Iterators.emptyIterator();
Iterator<List<Integer>> partitions = Iterators.paddedPartition(source, 1);
assertFalse(partitions.hasNext());
}
public void testPaddedPartition_singleton1() {
Iterator<Integer> source = Iterators.singletonIterator(1);
Iterator<List<Integer>> partitions = Iterators.paddedPartition(source, 1);
assertTrue(partitions.hasNext());
assertTrue(partitions.hasNext());
assertEquals(ImmutableList.of(1), partitions.next());
assertFalse(partitions.hasNext());
}
public void testPaddedPartition_singleton2() {
Iterator<Integer> source = Iterators.singletonIterator(1);
Iterator<List<Integer>> partitions = Iterators.paddedPartition(source, 2);
assertTrue(partitions.hasNext());
assertTrue(partitions.hasNext());
assertEquals(asList(1, null), partitions.next());
assertFalse(partitions.hasNext());
}
@GwtIncompatible("fairly slow (~50s)")
public void testPaddedPartition_general() {
new IteratorTester<List<Integer>>(5,
IteratorFeature.UNMODIFIABLE,
ImmutableList.of(
asList(1, 2, 3),
asList(4, 5, 6),
asList(7, null, null)),
IteratorTester.KnownOrder.KNOWN_ORDER) {
@Override protected Iterator<List<Integer>> newTargetIterator() {
Iterator<Integer> source = Iterators.forArray(1, 2, 3, 4, 5, 6, 7);
return Iterators.paddedPartition(source, 3);
}
}.test();
}
public void testPaddedPartition_view() {
List<Integer> list = asList(1, 2);
Iterator<List<Integer>> partitions
= Iterators.paddedPartition(list.iterator(), 1);
// Changes before the PaddedPartition is retrieved are reflected
list.set(0, 3);
List<Integer> first = partitions.next();
// Changes after are not
list.set(0, 4);
assertEquals(ImmutableList.of(3), first);
}
public void testPaddedPartitionRandomAccess() {
Iterator<Integer> source = asList(1, 2, 3).iterator();
Iterator<List<Integer>> partitions = Iterators.paddedPartition(source, 2);
assertTrue(partitions.next() instanceof RandomAccess);
assertTrue(partitions.next() instanceof RandomAccess);
}
public void testForArrayEmpty() {
String[] array = new String[0];
Iterator<String> iterator = Iterators.forArray(array);
assertFalse(iterator.hasNext());
try {
iterator.next();
fail();
} catch (NoSuchElementException expected) {}
}
public void testForArrayTypical() {
String[] array = {"foo", "bar"};
Iterator<String> iterator = Iterators.forArray(array);
assertTrue(iterator.hasNext());
assertEquals("foo", iterator.next());
assertTrue(iterator.hasNext());
try {
iterator.remove();
fail();
} catch (UnsupportedOperationException expected) {}
assertEquals("bar", iterator.next());
assertFalse(iterator.hasNext());
try {
iterator.next();
fail();
} catch (NoSuchElementException expected) {}
}
public void testForArrayOffset() {
String[] array = {"foo", "bar", "cat", "dog"};
Iterator<String> iterator = Iterators.forArray(array, 1, 2, 0);
assertTrue(iterator.hasNext());
assertEquals("bar", iterator.next());
assertTrue(iterator.hasNext());
assertEquals("cat", iterator.next());
assertFalse(iterator.hasNext());
try {
Iterators.forArray(array, 2, 3, 0);
fail();
} catch (IndexOutOfBoundsException expected) {}
}
public void testForArrayLength0() {
String[] array = {"foo", "bar"};
assertFalse(Iterators.forArray(array, 0, 0, 0).hasNext());
assertFalse(Iterators.forArray(array, 1, 0, 0).hasNext());
assertFalse(Iterators.forArray(array, 2, 0, 0).hasNext());
try {
Iterators.forArray(array, -1, 0, 0);
fail();
} catch (IndexOutOfBoundsException expected) {}
try {
Iterators.forArray(array, 3, 0, 0);
fail();
} catch (IndexOutOfBoundsException expected) {}
}
@GwtIncompatible("unreasonably slow")
public void testForArrayUsingTester() {
new IteratorTester<Integer>(6, UNMODIFIABLE, asList(1, 2, 3),
IteratorTester.KnownOrder.KNOWN_ORDER) {
@Override protected Iterator<Integer> newTargetIterator() {
return Iterators.forArray(1, 2, 3);
}
}.test();
}
@GwtIncompatible("unreasonably slow")
public void testForArrayWithOffsetUsingTester() {
new IteratorTester<Integer>(6, UNMODIFIABLE, asList(1, 2, 3),
IteratorTester.KnownOrder.KNOWN_ORDER) {
@Override protected Iterator<Integer> newTargetIterator() {
return Iterators.forArray(new Integer[] { 0, 1, 2, 3, 4 }, 1, 3, 0);
}
}.test();
}
public void testForEnumerationEmpty() {
Enumeration<Integer> enumer = enumerate();
Iterator<Integer> iter = Iterators.forEnumeration(enumer);
assertFalse(iter.hasNext());
try {
iter.next();
fail();
} catch (NoSuchElementException expected) {
}
}
public void testForEnumerationSingleton() {
Enumeration<Integer> enumer = enumerate(1);
Iterator<Integer> iter = Iterators.forEnumeration(enumer);
assertTrue(iter.hasNext());
assertTrue(iter.hasNext());
assertEquals(1, (int) iter.next());
try {
iter.remove();
fail();
} catch (UnsupportedOperationException expected) {
}
assertFalse(iter.hasNext());
try {
iter.next();
fail();
} catch (NoSuchElementException expected) {
}
}
public void testForEnumerationTypical() {
Enumeration<Integer> enumer = enumerate(1, 2, 3);
Iterator<Integer> iter = Iterators.forEnumeration(enumer);
assertTrue(iter.hasNext());
assertEquals(1, (int) iter.next());
assertTrue(iter.hasNext());
assertEquals(2, (int) iter.next());
assertTrue(iter.hasNext());
assertEquals(3, (int) iter.next());
assertFalse(iter.hasNext());
}
public void testAsEnumerationEmpty() {
Iterator<Integer> iter = Iterators.emptyIterator();
Enumeration<Integer> enumer = Iterators.asEnumeration(iter);
assertFalse(enumer.hasMoreElements());
try {
enumer.nextElement();
fail();
} catch (NoSuchElementException expected) {
}
}
public void testAsEnumerationSingleton() {
Iterator<Integer> iter = ImmutableList.of(1).iterator();
Enumeration<Integer> enumer = Iterators.asEnumeration(iter);
assertTrue(enumer.hasMoreElements());
assertTrue(enumer.hasMoreElements());
assertEquals(1, (int) enumer.nextElement());
assertFalse(enumer.hasMoreElements());
try {
enumer.nextElement();
fail();
} catch (NoSuchElementException expected) {
}
}
public void testAsEnumerationTypical() {
Iterator<Integer> iter = ImmutableList.of(1, 2, 3).iterator();
Enumeration<Integer> enumer = Iterators.asEnumeration(iter);
assertTrue(enumer.hasMoreElements());
assertEquals(1, (int) enumer.nextElement());
assertTrue(enumer.hasMoreElements());
assertEquals(2, (int) enumer.nextElement());
assertTrue(enumer.hasMoreElements());
assertEquals(3, (int) enumer.nextElement());
assertFalse(enumer.hasMoreElements());
}
private static Enumeration<Integer> enumerate(Integer... ints) {
Vector<Integer> vector = new Vector<Integer>();
vector.addAll(asList(ints));
return vector.elements();
}
public void testToString() {
Iterator<String> iterator = Lists.newArrayList("yam", "bam", "jam", "ham").iterator();
assertEquals("[yam, bam, jam, ham]", Iterators.toString(iterator));
}
public void testToStringWithNull() {
Iterator<String> iterator = Lists.newArrayList("hello", null, "world").iterator();
assertEquals("[hello, null, world]", Iterators.toString(iterator));
}
public void testToStringEmptyIterator() {
Iterator<String> iterator = Collections.<String>emptyList().iterator();
assertEquals("[]", Iterators.toString(iterator));
}
public void testLimit() {
List<String> list = newArrayList();
try {
Iterators.limit(list.iterator(), -1);
fail("expected exception");
} catch (IllegalArgumentException expected) {
// expected
}
assertFalse(Iterators.limit(list.iterator(), 0).hasNext());
assertFalse(Iterators.limit(list.iterator(), 1).hasNext());
list.add("cool");
assertFalse(Iterators.limit(list.iterator(), 0).hasNext());
assertEquals(list, newArrayList(Iterators.limit(list.iterator(), 1)));
assertEquals(list, newArrayList(Iterators.limit(list.iterator(), 2)));
list.add("pants");
assertFalse(Iterators.limit(list.iterator(), 0).hasNext());
assertEquals(ImmutableList.of("cool"),
newArrayList(Iterators.limit(list.iterator(), 1)));
assertEquals(list, newArrayList(Iterators.limit(list.iterator(), 2)));
assertEquals(list, newArrayList(Iterators.limit(list.iterator(), 3)));
}
public void testLimitRemove() {
List<String> list = newArrayList();
list.add("cool");
list.add("pants");
Iterator<String> iterator = Iterators.limit(list.iterator(), 1);
iterator.next();
iterator.remove();
assertFalse(iterator.hasNext());
assertEquals(1, list.size());
assertEquals("pants", list.get(0));
}
@GwtIncompatible("fairly slow (~30s)")
public void testLimitUsingIteratorTester() {
final List<Integer> list = Lists.newArrayList(1, 2, 3, 4, 5);
new IteratorTester<Integer>(5, MODIFIABLE, newArrayList(1, 2, 3),
IteratorTester.KnownOrder.KNOWN_ORDER) {
@Override protected Iterator<Integer> newTargetIterator() {
return Iterators.limit(Lists.newArrayList(list).iterator(), 3);
}
}.test();
}
public void testGetNext_withDefault_singleton() {
Iterator<String> iterator = Collections.singletonList("foo").iterator();
assertEquals("foo", Iterators.getNext(iterator, "bar"));
}
public void testGetNext_withDefault_empty() {
Iterator<String> iterator = Iterators.emptyIterator();
assertEquals("bar", Iterators.getNext(iterator, "bar"));
}
public void testGetNext_withDefault_empty_null() {
Iterator<String> iterator = Iterators.emptyIterator();
assertNull(Iterators.getNext(iterator, null));
}
public void testGetNext_withDefault_two() {
Iterator<String> iterator = asList("foo", "bar").iterator();
assertEquals("foo", Iterators.getNext(iterator, "x"));
}
public void testGetLast_basic() {
List<String> list = newArrayList();
list.add("a");
list.add("b");
assertEquals("b", getLast(list.iterator()));
}
public void testGetLast_exception() {
List<String> list = newArrayList();
try {
getLast(list.iterator());
fail();
} catch (NoSuchElementException expected) {
}
}
public void testGetLast_withDefault_singleton() {
Iterator<String> iterator = Collections.singletonList("foo").iterator();
assertEquals("foo", Iterators.getLast(iterator, "bar"));
}
public void testGetLast_withDefault_empty() {
Iterator<String> iterator = Iterators.emptyIterator();
assertEquals("bar", Iterators.getLast(iterator, "bar"));
}
public void testGetLast_withDefault_empty_null() {
Iterator<String> iterator = Iterators.emptyIterator();
assertNull(Iterators.getLast(iterator, null));
}
public void testGetLast_withDefault_two() {
Iterator<String> iterator = asList("foo", "bar").iterator();
assertEquals("bar", Iterators.getLast(iterator, "x"));
}
public void testGet_basic() {
List<String> list = newArrayList();
list.add("a");
list.add("b");
Iterator<String> iterator = list.iterator();
assertEquals("b", get(iterator, 1));
assertFalse(iterator.hasNext());
}
public void testGet_atSize() {
List<String> list = newArrayList();
list.add("a");
list.add("b");
Iterator<String> iterator = list.iterator();
try {
get(iterator, 2);
fail();
} catch (IndexOutOfBoundsException expected) {}
assertFalse(iterator.hasNext());
}
public void testGet_pastEnd() {
List<String> list = newArrayList();
list.add("a");
list.add("b");
Iterator<String> iterator = list.iterator();
try {
get(iterator, 5);
fail();
} catch (IndexOutOfBoundsException expected) {}
assertFalse(iterator.hasNext());
}
public void testGet_empty() {
List<String> list = newArrayList();
Iterator<String> iterator = list.iterator();
try {
get(iterator, 0);
fail();
} catch (IndexOutOfBoundsException expected) {}
assertFalse(iterator.hasNext());
}
public void testGet_negativeIndex() {
List<String> list = newArrayList("a", "b", "c");
Iterator<String> iterator = list.iterator();
try {
get(iterator, -1);
fail();
} catch (IndexOutOfBoundsException expected) {}
}
public void testGet_withDefault_basic() {
List<String> list = newArrayList();
list.add("a");
list.add("b");
Iterator<String> iterator = list.iterator();
assertEquals("a", get(iterator, 0, "c"));
assertTrue(iterator.hasNext());
}
public void testGet_withDefault_atSize() {
List<String> list = newArrayList();
list.add("a");
list.add("b");
Iterator<String> iterator = list.iterator();
assertEquals("c", get(iterator, 2, "c"));
assertFalse(iterator.hasNext());
}
public void testGet_withDefault_pastEnd() {
List<String> list = newArrayList();
list.add("a");
list.add("b");
Iterator<String> iterator = list.iterator();
assertEquals("c", get(iterator, 3, "c"));
assertFalse(iterator.hasNext());
}
public void testGet_withDefault_negativeIndex() {
List<String> list = newArrayList();
list.add("a");
list.add("b");
Iterator<String> iterator = list.iterator();
try {
get(iterator, -1, "c");
fail();
} catch (IndexOutOfBoundsException expected) {
// pass
}
assertTrue(iterator.hasNext());
}
public void testAdvance_basic() {
List<String> list = newArrayList();
list.add("a");
list.add("b");
Iterator<String> iterator = list.iterator();
advance(iterator, 1);
assertEquals("b", iterator.next());
}
public void testAdvance_pastEnd() {
List<String> list = newArrayList();
list.add("a");
list.add("b");
Iterator<String> iterator = list.iterator();
advance(iterator, 5);
assertFalse(iterator.hasNext());
}
public void testAdvance_illegalArgument() {
List<String> list = newArrayList("a", "b", "c");
Iterator<String> iterator = list.iterator();
try {
advance(iterator, -1);
fail();
} catch (IllegalArgumentException expected) {}
}
public void testFrequency() {
List<String> list = newArrayList("a", null, "b", null, "a", null);
assertEquals(2, Iterators.frequency(list.iterator(), "a"));
assertEquals(1, Iterators.frequency(list.iterator(), "b"));
assertEquals(0, Iterators.frequency(list.iterator(), "c"));
assertEquals(0, Iterators.frequency(list.iterator(), 4.2));
assertEquals(3, Iterators.frequency(list.iterator(), null));
}
@GwtIncompatible("slow (~4s)")
public void testSingletonIterator() {
new IteratorTester<Integer>(
3, UNMODIFIABLE, singleton(1), IteratorTester.KnownOrder.KNOWN_ORDER) {
@Override protected Iterator<Integer> newTargetIterator() {
return Iterators.singletonIterator(1);
}
}.test();
}
public void testRemoveAll() {
List<String> list = newArrayList("a", "b", "c", "d", "e");
assertTrue(Iterators.removeAll(
list.iterator(), newArrayList("b", "d", "f")));
assertEquals(newArrayList("a", "c", "e"), list);
assertFalse(Iterators.removeAll(
list.iterator(), newArrayList("x", "y", "z")));
assertEquals(newArrayList("a", "c", "e"), list);
}
public void testRemoveIf() {
List<String> list = newArrayList("a", "b", "c", "d", "e");
assertTrue(Iterators.removeIf(
list.iterator(),
new Predicate<String>() {
@Override
public boolean apply(String s) {
return s.equals("b") || s.equals("d") || s.equals("f");
}
}));
assertEquals(newArrayList("a", "c", "e"), list);
assertFalse(Iterators.removeIf(
list.iterator(),
new Predicate<String>() {
@Override
public boolean apply(String s) {
return s.equals("x") || s.equals("y") || s.equals("z");
}
}));
assertEquals(newArrayList("a", "c", "e"), list);
}
public void testRetainAll() {
List<String> list = newArrayList("a", "b", "c", "d", "e");
assertTrue(Iterators.retainAll(
list.iterator(), newArrayList("b", "d", "f")));
assertEquals(newArrayList("b", "d"), list);
assertFalse(Iterators.retainAll(
list.iterator(), newArrayList("b", "e", "d")));
assertEquals(newArrayList("b", "d"), list);
}
@GwtIncompatible("ListTestSuiteBuilder")
@SuppressUnderAndroid
private static Test testsForRemoveAllAndRetainAll() {
return ListTestSuiteBuilder.using(new TestStringListGenerator() {
@Override public List<String> create(final String[] elements) {
final List<String> delegate = newArrayList(elements);
return new ForwardingList<String>() {
@Override protected List<String> delegate() {
return delegate;
}
@Override public boolean removeAll(Collection<?> c) {
return Iterators.removeAll(iterator(), c);
}
@Override public boolean retainAll(Collection<?> c) {
return Iterators.retainAll(iterator(), c);
}
};
}
})
.named("ArrayList with Iterators.removeAll and retainAll")
.withFeatures(
ListFeature.GENERAL_PURPOSE,
CollectionFeature.ALLOWS_NULL_VALUES,
CollectionSize.ANY)
.createTestSuite();
}
public void testConsumingIterator() {
// Test data
List<String> list = Lists.newArrayList("a", "b");
// Test & Verify
Iterator<String> consumingIterator =
Iterators.consumingIterator(list.iterator());
assertEquals("Iterators.consumingIterator(...)", consumingIterator.toString());
assertThat(list).containsExactly("a", "b").inOrder();
assertTrue(consumingIterator.hasNext());
assertThat(list).containsExactly("a", "b").inOrder();
assertEquals("a", consumingIterator.next());
assertThat(list).contains("b");
assertTrue(consumingIterator.hasNext());
assertEquals("b", consumingIterator.next());
assertThat(list).isEmpty();
assertFalse(consumingIterator.hasNext());
}
@GwtIncompatible("?")
// TODO: Figure out why this is failing in GWT.
public void testConsumingIterator_duelingIterators() {
// Test data
List<String> list = Lists.newArrayList("a", "b");
// Test & Verify
Iterator<String> i1 = Iterators.consumingIterator(list.iterator());
Iterator<String> i2 = Iterators.consumingIterator(list.iterator());
i1.next();
try {
i2.next();
fail("Concurrent modification should throw an exception.");
} catch (ConcurrentModificationException cme) {
// Pass
}
}
public void testIndexOf_consumedData() {
Iterator<String> iterator =
Lists.newArrayList("manny", "mo", "jack").iterator();
assertEquals(1, Iterators.indexOf(iterator, Predicates.equalTo("mo")));
assertEquals("jack", iterator.next());
assertFalse(iterator.hasNext());
}
public void testIndexOf_consumedDataWithDuplicates() {
Iterator<String> iterator =
Lists.newArrayList("manny", "mo", "mo", "jack").iterator();
assertEquals(1, Iterators.indexOf(iterator, Predicates.equalTo("mo")));
assertEquals("mo", iterator.next());
assertEquals("jack", iterator.next());
assertFalse(iterator.hasNext());
}
public void testIndexOf_consumedDataNoMatch() {
Iterator<String> iterator =
Lists.newArrayList("manny", "mo", "mo", "jack").iterator();
assertEquals(-1, Iterators.indexOf(iterator, Predicates.equalTo("bob")));
assertFalse(iterator.hasNext());
}
@SuppressWarnings("deprecation")
public void testUnmodifiableIteratorShortCircuit() {
Iterator<String> mod = Lists.newArrayList("a", "b", "c").iterator();
UnmodifiableIterator<String> unmod = Iterators.unmodifiableIterator(mod);
assertNotSame(mod, unmod);
assertSame(unmod, Iterators.unmodifiableIterator(unmod));
assertSame(unmod, Iterators.unmodifiableIterator((Iterator<String>) unmod));
}
@SuppressWarnings("deprecation")
public void testPeekingIteratorShortCircuit() {
Iterator<String> nonpeek = Lists.newArrayList("a", "b", "c").iterator();
PeekingIterator<String> peek = Iterators.peekingIterator(nonpeek);
assertNotSame(peek, nonpeek);
assertSame(peek, Iterators.peekingIterator(peek));
assertSame(peek, Iterators.peekingIterator((Iterator<String>) peek));
}
}
| |
package net.morimekta.providence.model;
/**
* <variant> {
* (<field> ([,;])?)*
* }
*/
@SuppressWarnings("unused")
@javax.annotation.Generated("providence-maven-plugin")
@javax.annotation.concurrent.Immutable
public class MessageType
implements net.morimekta.providence.PMessage<MessageType,MessageType._Field>,
Comparable<MessageType>,
java.io.Serializable,
net.morimekta.providence.serializer.binary.BinaryWriter {
private final static long serialVersionUID = -6520335138583998154L;
private final static net.morimekta.providence.model.MessageVariant kDefaultVariant = net.morimekta.providence.model.MessageVariant.STRUCT;
private final static String kDefaultName = "";
private final static java.util.List<net.morimekta.providence.model.FieldType> kDefaultFields = new net.morimekta.providence.descriptor.PList.DefaultBuilder<net.morimekta.providence.model.FieldType>()
.build();
private final transient String mDocumentation;
private final transient net.morimekta.providence.model.MessageVariant mVariant;
private final transient String mName;
private final transient java.util.List<net.morimekta.providence.model.FieldType> mFields;
private final transient java.util.Map<String,String> mAnnotations;
private final transient net.morimekta.providence.model.FilePos mStartPos;
private final transient net.morimekta.providence.model.FilePos mEndPos;
private volatile transient int tHashCode;
// Transient object used during java deserialization.
private transient MessageType tSerializeInstance;
private MessageType(_Builder builder) {
mDocumentation = builder.mDocumentation;
mVariant = builder.mVariant;
if (builder.isSetName()) {
mName = builder.mName;
} else {
mName = kDefaultName;
}
if (builder.isSetFields()) {
mFields = com.google.common.collect.ImmutableList.copyOf(builder.mFields);
} else {
mFields = kDefaultFields;
}
if (builder.isSetAnnotations()) {
mAnnotations = com.google.common.collect.ImmutableSortedMap.copyOf(builder.mAnnotations);
} else {
mAnnotations = null;
}
mStartPos = builder.mStartPos_builder != null ? builder.mStartPos_builder.build() : builder.mStartPos;
mEndPos = builder.mEndPos_builder != null ? builder.mEndPos_builder.build() : builder.mEndPos;
}
public boolean hasDocumentation() {
return mDocumentation != null;
}
/**
* @return The <code>documentation</code> value
*/
public String getDocumentation() {
return mDocumentation;
}
/**
* @return Optional of the <code>documentation</code> field value.
*/
@javax.annotation.Nonnull
public java.util.Optional<String> optionalDocumentation() {
return java.util.Optional.ofNullable(mDocumentation);
}
public boolean hasVariant() {
return mVariant != null;
}
/**
* @return The <code>variant</code> value
*/
public net.morimekta.providence.model.MessageVariant getVariant() {
return hasVariant() ? mVariant : kDefaultVariant;
}
/**
* @return Optional of the <code>variant</code> field value.
*/
@javax.annotation.Nonnull
public java.util.Optional<net.morimekta.providence.model.MessageVariant> optionalVariant() {
return java.util.Optional.ofNullable(mVariant);
}
public boolean hasName() {
return true;
}
/**
* @return The <code>name</code> value
*/
@javax.annotation.Nonnull
public String getName() {
return mName;
}
public int numFields() {
return mFields != null ? mFields.size() : 0;
}
public boolean hasFields() {
return true;
}
/**
* @return The <code>fields</code> value
*/
@javax.annotation.Nonnull
public java.util.List<net.morimekta.providence.model.FieldType> getFields() {
return mFields;
}
public int numAnnotations() {
return mAnnotations != null ? mAnnotations.size() : 0;
}
public boolean hasAnnotations() {
return mAnnotations != null;
}
/**
* @return The <code>annotations</code> value
*/
public java.util.Map<String,String> getAnnotations() {
return mAnnotations;
}
/**
* @return Optional of the <code>annotations</code> field value.
*/
@javax.annotation.Nonnull
public java.util.Optional<java.util.Map<String,String>> optionalAnnotations() {
return java.util.Optional.ofNullable(mAnnotations);
}
public boolean hasStartPos() {
return mStartPos != null;
}
/**
* The start of the definition (position of 'struct' / message type)
*
* @return The <code>start_pos</code> value
*/
public net.morimekta.providence.model.FilePos getStartPos() {
return mStartPos;
}
/**
* The start of the definition (position of 'struct' / message type)
*
* @return Optional of the <code>start_pos</code> field value.
*/
@javax.annotation.Nonnull
public java.util.Optional<net.morimekta.providence.model.FilePos> optionalStartPos() {
return java.util.Optional.ofNullable(mStartPos);
}
public boolean hasEndPos() {
return mEndPos != null;
}
/**
* The end of the definition (position of '}')
*
* @return The <code>end_pos</code> value
*/
public net.morimekta.providence.model.FilePos getEndPos() {
return mEndPos;
}
/**
* The end of the definition (position of '}')
*
* @return Optional of the <code>end_pos</code> field value.
*/
@javax.annotation.Nonnull
public java.util.Optional<net.morimekta.providence.model.FilePos> optionalEndPos() {
return java.util.Optional.ofNullable(mEndPos);
}
@Override
public boolean has(int key) {
switch(key) {
case 1: return mDocumentation != null;
case 2: return mVariant != null;
case 3: return true;
case 4: return true;
case 5: return mAnnotations != null;
case 10: return mStartPos != null;
case 11: return mEndPos != null;
default: return false;
}
}
@Override
@SuppressWarnings("unchecked")
public <T> T get(int key) {
switch(key) {
case 1: return (T) mDocumentation;
case 2: return (T) mVariant;
case 3: return (T) mName;
case 4: return (T) mFields;
case 5: return (T) mAnnotations;
case 10: return (T) mStartPos;
case 11: return (T) mEndPos;
default: return null;
}
}
@Override
public boolean equals(Object o) {
if (o == this) return true;
if (o == null || !o.getClass().equals(getClass())) return false;
MessageType other = (MessageType) o;
return java.util.Objects.equals(mDocumentation, other.mDocumentation) &&
java.util.Objects.equals(mVariant, other.mVariant) &&
java.util.Objects.equals(mName, other.mName) &&
java.util.Objects.equals(mFields, other.mFields) &&
java.util.Objects.equals(mAnnotations, other.mAnnotations) &&
java.util.Objects.equals(mStartPos, other.mStartPos) &&
java.util.Objects.equals(mEndPos, other.mEndPos);
}
@Override
public int hashCode() {
if (tHashCode == 0) {
tHashCode = java.util.Objects.hash(
MessageType.class,
_Field.DOCUMENTATION, mDocumentation,
_Field.VARIANT, mVariant,
_Field.NAME, mName,
_Field.FIELDS, mFields,
_Field.ANNOTATIONS, mAnnotations,
_Field.START_POS, mStartPos,
_Field.END_POS, mEndPos);
}
return tHashCode;
}
@Override
public String toString() {
return "pmodel.MessageType" + asString();
}
@Override
@javax.annotation.Nonnull
public String asString() {
StringBuilder out = new StringBuilder();
out.append("{");
boolean first = true;
if (hasDocumentation()) {
first = false;
out.append("documentation:")
.append('\"')
.append(net.morimekta.util.Strings.escape(mDocumentation))
.append('\"');
}
if (hasVariant()) {
if (first) first = false;
else out.append(',');
out.append("variant:")
.append(mVariant.asString());
}
if (!first) out.append(',');
out.append("name:")
.append('\"')
.append(net.morimekta.util.Strings.escape(mName))
.append('\"');
out.append(',');
out.append("fields:")
.append(net.morimekta.util.Strings.asString(mFields));
if (hasAnnotations()) {
out.append(',');
out.append("annotations:")
.append(net.morimekta.util.Strings.asString(mAnnotations));
}
if (hasStartPos()) {
out.append(',');
out.append("start_pos:")
.append(mStartPos.asString());
}
if (hasEndPos()) {
out.append(',');
out.append("end_pos:")
.append(mEndPos.asString());
}
out.append('}');
return out.toString();
}
@Override
public int compareTo(MessageType other) {
int c;
c = Boolean.compare(mDocumentation != null, other.mDocumentation != null);
if (c != 0) return c;
if (mDocumentation != null) {
c = mDocumentation.compareTo(other.mDocumentation);
if (c != 0) return c;
}
c = Boolean.compare(mVariant != null, other.mVariant != null);
if (c != 0) return c;
if (mVariant != null) {
c = Integer.compare(mVariant.ordinal(), mVariant.ordinal());
if (c != 0) return c;
}
c = mName.compareTo(other.mName);
if (c != 0) return c;
c = Integer.compare(mFields.hashCode(), other.mFields.hashCode());
if (c != 0) return c;
c = Boolean.compare(mAnnotations != null, other.mAnnotations != null);
if (c != 0) return c;
if (mAnnotations != null) {
c = Integer.compare(mAnnotations.hashCode(), other.mAnnotations.hashCode());
if (c != 0) return c;
}
c = Boolean.compare(mStartPos != null, other.mStartPos != null);
if (c != 0) return c;
if (mStartPos != null) {
c = mStartPos.compareTo(other.mStartPos);
if (c != 0) return c;
}
c = Boolean.compare(mEndPos != null, other.mEndPos != null);
if (c != 0) return c;
if (mEndPos != null) {
c = mEndPos.compareTo(other.mEndPos);
if (c != 0) return c;
}
return 0;
}
private void writeObject(java.io.ObjectOutputStream oos) throws java.io.IOException {
oos.defaultWriteObject();
net.morimekta.providence.serializer.BinarySerializer serializer = new net.morimekta.providence.serializer.BinarySerializer(false);
serializer.serialize(oos, this);
}
private void readObject(java.io.ObjectInputStream ois)
throws java.io.IOException, ClassNotFoundException {
ois.defaultReadObject();
net.morimekta.providence.serializer.BinarySerializer serializer = new net.morimekta.providence.serializer.BinarySerializer(false);
tSerializeInstance = serializer.deserialize(ois, kDescriptor);
}
private Object readResolve() throws java.io.ObjectStreamException {
return tSerializeInstance;
}
@Override
public int writeBinary(net.morimekta.util.io.BigEndianBinaryWriter writer) throws java.io.IOException {
int length = 0;
if (hasDocumentation()) {
length += writer.writeByte((byte) 11);
length += writer.writeShort((short) 1);
net.morimekta.util.Binary tmp_1 = net.morimekta.util.Binary.wrap(mDocumentation.getBytes(java.nio.charset.StandardCharsets.UTF_8));
length += writer.writeUInt32(tmp_1.length());
length += writer.writeBinary(tmp_1);
}
if (hasVariant()) {
length += writer.writeByte((byte) 8);
length += writer.writeShort((short) 2);
length += writer.writeInt(mVariant.asInteger());
}
length += writer.writeByte((byte) 11);
length += writer.writeShort((short) 3);
net.morimekta.util.Binary tmp_2 = net.morimekta.util.Binary.wrap(mName.getBytes(java.nio.charset.StandardCharsets.UTF_8));
length += writer.writeUInt32(tmp_2.length());
length += writer.writeBinary(tmp_2);
length += writer.writeByte((byte) 15);
length += writer.writeShort((short) 4);
length += writer.writeByte((byte) 12);
length += writer.writeUInt32(mFields.size());
for (net.morimekta.providence.model.FieldType entry_3 : mFields) {
length += net.morimekta.providence.serializer.binary.BinaryFormatUtils.writeMessage(writer, entry_3);
}
if (hasAnnotations()) {
length += writer.writeByte((byte) 13);
length += writer.writeShort((short) 5);
length += writer.writeByte((byte) 11);
length += writer.writeByte((byte) 11);
length += writer.writeUInt32(mAnnotations.size());
for (java.util.Map.Entry<String,String> entry_4 : mAnnotations.entrySet()) {
net.morimekta.util.Binary tmp_5 = net.morimekta.util.Binary.wrap(entry_4.getKey().getBytes(java.nio.charset.StandardCharsets.UTF_8));
length += writer.writeUInt32(tmp_5.length());
length += writer.writeBinary(tmp_5);
net.morimekta.util.Binary tmp_6 = net.morimekta.util.Binary.wrap(entry_4.getValue().getBytes(java.nio.charset.StandardCharsets.UTF_8));
length += writer.writeUInt32(tmp_6.length());
length += writer.writeBinary(tmp_6);
}
}
if (hasStartPos()) {
length += writer.writeByte((byte) 12);
length += writer.writeShort((short) 10);
length += net.morimekta.providence.serializer.binary.BinaryFormatUtils.writeMessage(writer, mStartPos);
}
if (hasEndPos()) {
length += writer.writeByte((byte) 12);
length += writer.writeShort((short) 11);
length += net.morimekta.providence.serializer.binary.BinaryFormatUtils.writeMessage(writer, mEndPos);
}
length += writer.writeByte((byte) 0);
return length;
}
@javax.annotation.Nonnull
@Override
public _Builder mutate() {
return new _Builder(this);
}
public enum _Field implements net.morimekta.providence.descriptor.PField {
DOCUMENTATION(1, net.morimekta.providence.descriptor.PRequirement.OPTIONAL, "documentation", net.morimekta.providence.descriptor.PPrimitive.STRING.provider(), null),
VARIANT(2, net.morimekta.providence.descriptor.PRequirement.OPTIONAL, "variant", net.morimekta.providence.model.MessageVariant.provider(), new net.morimekta.providence.descriptor.PDefaultValueProvider<>(kDefaultVariant)),
NAME(3, net.morimekta.providence.descriptor.PRequirement.REQUIRED, "name", net.morimekta.providence.descriptor.PPrimitive.STRING.provider(), null),
FIELDS(4, net.morimekta.providence.descriptor.PRequirement.DEFAULT, "fields", net.morimekta.providence.descriptor.PList.provider(net.morimekta.providence.model.FieldType.provider()), null),
ANNOTATIONS(5, net.morimekta.providence.descriptor.PRequirement.OPTIONAL, "annotations", net.morimekta.providence.descriptor.PMap.sortedProvider(net.morimekta.providence.descriptor.PPrimitive.STRING.provider(),net.morimekta.providence.descriptor.PPrimitive.STRING.provider()), null),
START_POS(10, net.morimekta.providence.descriptor.PRequirement.OPTIONAL, "start_pos", net.morimekta.providence.model.FilePos.provider(), null),
END_POS(11, net.morimekta.providence.descriptor.PRequirement.OPTIONAL, "end_pos", net.morimekta.providence.model.FilePos.provider(), null),
;
private final int mId;
private final net.morimekta.providence.descriptor.PRequirement mRequired;
private final String mName;
private final net.morimekta.providence.descriptor.PDescriptorProvider mTypeProvider;
private final net.morimekta.providence.descriptor.PValueProvider<?> mDefaultValue;
_Field(int id, net.morimekta.providence.descriptor.PRequirement required, String name, net.morimekta.providence.descriptor.PDescriptorProvider typeProvider, net.morimekta.providence.descriptor.PValueProvider<?> defaultValue) {
mId = id;
mRequired = required;
mName = name;
mTypeProvider = typeProvider;
mDefaultValue = defaultValue;
}
@Override
public int getId() { return mId; }
@Override
public net.morimekta.providence.descriptor.PRequirement getRequirement() { return mRequired; }
@Override
public net.morimekta.providence.descriptor.PDescriptor getDescriptor() { return mTypeProvider.descriptor(); }
@Override
public String getName() { return mName; }
@Override
public boolean hasDefaultValue() { return mDefaultValue != null; }
@Override
public Object getDefaultValue() {
return hasDefaultValue() ? mDefaultValue.get() : null;
}
@Override
public String toString() {
return net.morimekta.providence.descriptor.PField.asString(this);
}
/**
* @param id Field name
* @return The identified field or null
*/
public static _Field findById(int id) {
switch (id) {
case 1: return _Field.DOCUMENTATION;
case 2: return _Field.VARIANT;
case 3: return _Field.NAME;
case 4: return _Field.FIELDS;
case 5: return _Field.ANNOTATIONS;
case 10: return _Field.START_POS;
case 11: return _Field.END_POS;
}
return null;
}
/**
* @param name Field name
* @return The named field or null
*/
public static _Field findByName(String name) {
switch (name) {
case "documentation": return _Field.DOCUMENTATION;
case "variant": return _Field.VARIANT;
case "name": return _Field.NAME;
case "fields": return _Field.FIELDS;
case "annotations": return _Field.ANNOTATIONS;
case "start_pos": return _Field.START_POS;
case "end_pos": return _Field.END_POS;
}
return null;
}
/**
* @param id Field name
* @return The identified field
* @throws IllegalArgumentException If no such field
*/
public static _Field fieldForId(int id) {
_Field field = findById(id);
if (field == null) {
throw new IllegalArgumentException("No such field id " + id + " in pmodel.MessageType");
}
return field;
}
/**
* @param name Field name
* @return The named field
* @throws IllegalArgumentException If no such field
*/
public static _Field fieldForName(String name) {
_Field field = findByName(name);
if (field == null) {
throw new IllegalArgumentException("No such field \"" + name + "\" in pmodel.MessageType");
}
return field;
}
}
@javax.annotation.Nonnull
public static net.morimekta.providence.descriptor.PStructDescriptorProvider<MessageType,_Field> provider() {
return new _Provider();
}
@Override
@javax.annotation.Nonnull
public net.morimekta.providence.descriptor.PStructDescriptor<MessageType,_Field> descriptor() {
return kDescriptor;
}
public static final net.morimekta.providence.descriptor.PStructDescriptor<MessageType,_Field> kDescriptor;
private static class _Descriptor
extends net.morimekta.providence.descriptor.PStructDescriptor<MessageType,_Field> {
public _Descriptor() {
super("pmodel", "MessageType", _Builder::new, false);
}
@Override
@javax.annotation.Nonnull
public _Field[] getFields() {
return _Field.values();
}
@Override
@javax.annotation.Nullable
public _Field findFieldByName(String name) {
return _Field.findByName(name);
}
@Override
@javax.annotation.Nullable
public _Field findFieldById(int id) {
return _Field.findById(id);
}
}
static {
kDescriptor = new _Descriptor();
}
private final static class _Provider extends net.morimekta.providence.descriptor.PStructDescriptorProvider<MessageType,_Field> {
@Override
public net.morimekta.providence.descriptor.PStructDescriptor<MessageType,_Field> descriptor() {
return kDescriptor;
}
}
/**
* Make a <code>pmodel.MessageType</code> builder.
* @return The builder instance.
*/
public static _Builder builder() {
return new _Builder();
}
/**
* <variant> {
* (<field> ([,;])?)*
* }
*/
public static class _Builder
extends net.morimekta.providence.PMessageBuilder<MessageType,_Field>
implements net.morimekta.providence.serializer.binary.BinaryReader {
private java.util.BitSet optionals;
private java.util.BitSet modified;
private String mDocumentation;
private net.morimekta.providence.model.MessageVariant mVariant;
private String mName;
private java.util.List<net.morimekta.providence.model.FieldType> mFields;
private java.util.Map<String,String> mAnnotations;
private net.morimekta.providence.model.FilePos mStartPos;
private net.morimekta.providence.model.FilePos._Builder mStartPos_builder;
private net.morimekta.providence.model.FilePos mEndPos;
private net.morimekta.providence.model.FilePos._Builder mEndPos_builder;
/**
* Make a pmodel.MessageType builder instance.
*/
public _Builder() {
optionals = new java.util.BitSet(7);
modified = new java.util.BitSet(7);
mName = kDefaultName;
mFields = kDefaultFields;
}
/**
* Make a mutating builder off a base pmodel.MessageType.
*
* @param base The base MessageType
*/
public _Builder(MessageType base) {
this();
if (base.hasDocumentation()) {
optionals.set(0);
mDocumentation = base.mDocumentation;
}
if (base.hasVariant()) {
optionals.set(1);
mVariant = base.mVariant;
}
optionals.set(2);
mName = base.mName;
optionals.set(3);
mFields = base.mFields;
if (base.hasAnnotations()) {
optionals.set(4);
mAnnotations = base.mAnnotations;
}
if (base.hasStartPos()) {
optionals.set(5);
mStartPos = base.mStartPos;
}
if (base.hasEndPos()) {
optionals.set(6);
mEndPos = base.mEndPos;
}
}
@javax.annotation.Nonnull
@Override
public _Builder merge(MessageType from) {
if (from.hasDocumentation()) {
optionals.set(0);
modified.set(0);
mDocumentation = from.getDocumentation();
}
if (from.hasVariant()) {
optionals.set(1);
modified.set(1);
mVariant = from.getVariant();
}
optionals.set(2);
modified.set(2);
mName = from.getName();
optionals.set(3);
modified.set(3);
mFields = from.getFields();
if (from.hasAnnotations()) {
optionals.set(4);
modified.set(4);
mutableAnnotations().putAll(from.getAnnotations());
}
if (from.hasStartPos()) {
optionals.set(5);
modified.set(5);
if (mStartPos_builder != null) {
mStartPos_builder.merge(from.getStartPos());
} else if (mStartPos != null) {
mStartPos_builder = mStartPos.mutate().merge(from.getStartPos());
mStartPos = null;
} else {
mStartPos = from.getStartPos();
}
}
if (from.hasEndPos()) {
optionals.set(6);
modified.set(6);
if (mEndPos_builder != null) {
mEndPos_builder.merge(from.getEndPos());
} else if (mEndPos != null) {
mEndPos_builder = mEndPos.mutate().merge(from.getEndPos());
mEndPos = null;
} else {
mEndPos = from.getEndPos();
}
}
return this;
}
/**
* Set the <code>documentation</code> field value.
*
* @param value The new value
* @return The builder
*/
@javax.annotation.Nonnull
public _Builder setDocumentation(String value) {
if (value == null) {
return clearDocumentation();
}
optionals.set(0);
modified.set(0);
mDocumentation = value;
return this;
}
/**
* Checks for presence of the <code>documentation</code> field.
*
* @return True if documentation has been set.
*/
public boolean isSetDocumentation() {
return optionals.get(0);
}
/**
* Checks if the <code>documentation</code> field has been modified since the
* builder was created.
*
* @return True if documentation has been modified.
*/
public boolean isModifiedDocumentation() {
return modified.get(0);
}
/**
* Clear the <code>documentation</code> field.
*
* @return The builder
*/
@javax.annotation.Nonnull
public _Builder clearDocumentation() {
optionals.clear(0);
modified.set(0);
mDocumentation = null;
return this;
}
/**
* @return The <code>documentation</code> field value
*/
public String getDocumentation() {
return mDocumentation;
}
/**
* Set the <code>variant</code> field value.
*
* @param value The new value
* @return The builder
*/
@javax.annotation.Nonnull
public _Builder setVariant(net.morimekta.providence.model.MessageVariant value) {
if (value == null) {
return clearVariant();
}
optionals.set(1);
modified.set(1);
mVariant = value;
return this;
}
/**
* Checks for presence of the <code>variant</code> field.
*
* @return True if variant has been set.
*/
public boolean isSetVariant() {
return optionals.get(1);
}
/**
* Checks if the <code>variant</code> field has been modified since the
* builder was created.
*
* @return True if variant has been modified.
*/
public boolean isModifiedVariant() {
return modified.get(1);
}
/**
* Clear the <code>variant</code> field.
*
* @return The builder
*/
@javax.annotation.Nonnull
public _Builder clearVariant() {
optionals.clear(1);
modified.set(1);
mVariant = null;
return this;
}
/**
* @return The <code>variant</code> field value
*/
public net.morimekta.providence.model.MessageVariant getVariant() {
return isSetVariant() ? mVariant : kDefaultVariant;
}
/**
* Set the <code>name</code> field value.
*
* @param value The new value
* @return The builder
*/
@javax.annotation.Nonnull
public _Builder setName(String value) {
if (value == null) {
return clearName();
}
optionals.set(2);
modified.set(2);
mName = value;
return this;
}
/**
* Checks for presence of the <code>name</code> field.
*
* @return True if name has been set.
*/
public boolean isSetName() {
return optionals.get(2);
}
/**
* Checks if the <code>name</code> field has been modified since the
* builder was created.
*
* @return True if name has been modified.
*/
public boolean isModifiedName() {
return modified.get(2);
}
/**
* Clear the <code>name</code> field.
*
* @return The builder
*/
@javax.annotation.Nonnull
public _Builder clearName() {
optionals.clear(2);
modified.set(2);
mName = kDefaultName;
return this;
}
/**
* @return The <code>name</code> field value
*/
public String getName() {
return mName;
}
/**
* Set the <code>fields</code> field value.
*
* @param value The new value
* @return The builder
*/
@javax.annotation.Nonnull
public _Builder setFields(java.util.Collection<net.morimekta.providence.model.FieldType> value) {
if (value == null) {
return clearFields();
}
optionals.set(3);
modified.set(3);
mFields = com.google.common.collect.ImmutableList.copyOf(value);
return this;
}
/**
* Adds entries to the <code>fields</code> list.
*
* @param values The added value
* @return The builder
*/
@javax.annotation.Nonnull
public _Builder addToFields(net.morimekta.providence.model.FieldType... values) {
optionals.set(3);
modified.set(3);
java.util.List<net.morimekta.providence.model.FieldType> _container = mutableFields();
for (net.morimekta.providence.model.FieldType item : values) {
_container.add(item);
}
return this;
}
/**
* Checks for presence of the <code>fields</code> field.
*
* @return True if fields has been set.
*/
public boolean isSetFields() {
return optionals.get(3);
}
/**
* Checks if the <code>fields</code> field has been modified since the
* builder was created.
*
* @return True if fields has been modified.
*/
public boolean isModifiedFields() {
return modified.get(3);
}
/**
* Clear the <code>fields</code> field.
*
* @return The builder
*/
@javax.annotation.Nonnull
public _Builder clearFields() {
optionals.clear(3);
modified.set(3);
mFields = kDefaultFields;
return this;
}
/**
* Get the builder for the contained <code>fields</code> message field.
*
* @return The field message builder
*/
@javax.annotation.Nonnull
/**
* @return The mutable <code>fields</code> container
*/
public java.util.List<net.morimekta.providence.model.FieldType> mutableFields() {
optionals.set(3);
modified.set(3);
if (mFields == null) {
mFields = new java.util.ArrayList<>();
} else if (!(mFields instanceof java.util.ArrayList)) {
mFields = new java.util.ArrayList<>(mFields);
}
return mFields;
}
/**
* Set the <code>annotations</code> field value.
*
* @param value The new value
* @return The builder
*/
@javax.annotation.Nonnull
public _Builder setAnnotations(java.util.Map<String,String> value) {
if (value == null) {
return clearAnnotations();
}
optionals.set(4);
modified.set(4);
mAnnotations = com.google.common.collect.ImmutableSortedMap.copyOf(value);
return this;
}
/**
* Adds a mapping to the <code>annotations</code> map.
*
* @param key The inserted key
* @param value The inserted value
* @return The builder
*/
@javax.annotation.Nonnull
public _Builder putInAnnotations(String key, String value) {
optionals.set(4);
modified.set(4);
mutableAnnotations().put(key, value);
return this;
}
/**
* Checks for presence of the <code>annotations</code> field.
*
* @return True if annotations has been set.
*/
public boolean isSetAnnotations() {
return optionals.get(4);
}
/**
* Checks if the <code>annotations</code> field has been modified since the
* builder was created.
*
* @return True if annotations has been modified.
*/
public boolean isModifiedAnnotations() {
return modified.get(4);
}
/**
* Clear the <code>annotations</code> field.
*
* @return The builder
*/
@javax.annotation.Nonnull
public _Builder clearAnnotations() {
optionals.clear(4);
modified.set(4);
mAnnotations = null;
return this;
}
/**
* Get the builder for the contained <code>annotations</code> message field.
*
* @return The field message builder
*/
@javax.annotation.Nonnull
/**
* @return The mutable <code>annotations</code> container
*/
public java.util.Map<String,String> mutableAnnotations() {
optionals.set(4);
modified.set(4);
if (mAnnotations == null) {
mAnnotations = new java.util.TreeMap<>();
} else if (!(mAnnotations instanceof java.util.TreeMap)) {
mAnnotations = new java.util.TreeMap<>(mAnnotations);
}
return mAnnotations;
}
/**
* Set the <code>start_pos</code> field value.
* <p>
* The start of the definition (position of 'struct' / message type)
*
* @param value The new value
* @return The builder
*/
@javax.annotation.Nonnull
public _Builder setStartPos(net.morimekta.providence.model.FilePos value) {
if (value == null) {
return clearStartPos();
}
optionals.set(5);
modified.set(5);
mStartPos = value;
mStartPos_builder = null;
return this;
}
/**
* Set the <code>start_pos</code> field value.
* <p>
* The start of the definition (position of 'struct' / message type)
*
* @param builder builder for the new value
* @return The builder
*/
@javax.annotation.Nonnull
public _Builder setStartPos(net.morimekta.providence.model.FilePos._Builder builder) {
return setStartPos(builder == null ? null : builder.build());
}
/**
* Checks for presence of the <code>start_pos</code> field.
*
* @return True if start_pos has been set.
*/
public boolean isSetStartPos() {
return optionals.get(5);
}
/**
* Checks if the <code>start_pos</code> field has been modified since the
* builder was created.
*
* @return True if start_pos has been modified.
*/
public boolean isModifiedStartPos() {
return modified.get(5);
}
/**
* Clear the <code>start_pos</code> field.
*
* @return The builder
*/
@javax.annotation.Nonnull
public _Builder clearStartPos() {
optionals.clear(5);
modified.set(5);
mStartPos = null;
mStartPos_builder = null;
return this;
}
/**
* Get the builder for the contained <code>start_pos</code> message field.
* <p>
* The start of the definition (position of 'struct' / message type)
*
* @return The field message builder
*/
@javax.annotation.Nonnull
public net.morimekta.providence.model.FilePos._Builder mutableStartPos() {
optionals.set(5);
modified.set(5);
if (mStartPos != null) {
mStartPos_builder = mStartPos.mutate();
mStartPos = null;
} else if (mStartPos_builder == null) {
mStartPos_builder = net.morimekta.providence.model.FilePos.builder();
}
return mStartPos_builder;
}
/**
* The start of the definition (position of 'struct' / message type)
*
* @return The field value
*/
public net.morimekta.providence.model.FilePos getStartPos() {
if (mStartPos_builder != null) {
return mStartPos_builder.build();
}
return mStartPos;
}
/**
* Set the <code>end_pos</code> field value.
* <p>
* The end of the definition (position of '}')
*
* @param value The new value
* @return The builder
*/
@javax.annotation.Nonnull
public _Builder setEndPos(net.morimekta.providence.model.FilePos value) {
if (value == null) {
return clearEndPos();
}
optionals.set(6);
modified.set(6);
mEndPos = value;
mEndPos_builder = null;
return this;
}
/**
* Set the <code>end_pos</code> field value.
* <p>
* The end of the definition (position of '}')
*
* @param builder builder for the new value
* @return The builder
*/
@javax.annotation.Nonnull
public _Builder setEndPos(net.morimekta.providence.model.FilePos._Builder builder) {
return setEndPos(builder == null ? null : builder.build());
}
/**
* Checks for presence of the <code>end_pos</code> field.
*
* @return True if end_pos has been set.
*/
public boolean isSetEndPos() {
return optionals.get(6);
}
/**
* Checks if the <code>end_pos</code> field has been modified since the
* builder was created.
*
* @return True if end_pos has been modified.
*/
public boolean isModifiedEndPos() {
return modified.get(6);
}
/**
* Clear the <code>end_pos</code> field.
*
* @return The builder
*/
@javax.annotation.Nonnull
public _Builder clearEndPos() {
optionals.clear(6);
modified.set(6);
mEndPos = null;
mEndPos_builder = null;
return this;
}
/**
* Get the builder for the contained <code>end_pos</code> message field.
* <p>
* The end of the definition (position of '}')
*
* @return The field message builder
*/
@javax.annotation.Nonnull
public net.morimekta.providence.model.FilePos._Builder mutableEndPos() {
optionals.set(6);
modified.set(6);
if (mEndPos != null) {
mEndPos_builder = mEndPos.mutate();
mEndPos = null;
} else if (mEndPos_builder == null) {
mEndPos_builder = net.morimekta.providence.model.FilePos.builder();
}
return mEndPos_builder;
}
/**
* The end of the definition (position of '}')
*
* @return The field value
*/
public net.morimekta.providence.model.FilePos getEndPos() {
if (mEndPos_builder != null) {
return mEndPos_builder.build();
}
return mEndPos;
}
@Override
public boolean equals(Object o) {
if (o == this) return true;
if (o == null || !o.getClass().equals(getClass())) return false;
MessageType._Builder other = (MessageType._Builder) o;
return java.util.Objects.equals(optionals, other.optionals) &&
java.util.Objects.equals(mDocumentation, other.mDocumentation) &&
java.util.Objects.equals(mVariant, other.mVariant) &&
java.util.Objects.equals(mName, other.mName) &&
java.util.Objects.equals(mFields, other.mFields) &&
java.util.Objects.equals(mAnnotations, other.mAnnotations) &&
java.util.Objects.equals(getStartPos(), other.getStartPos()) &&
java.util.Objects.equals(getEndPos(), other.getEndPos());
}
@Override
public int hashCode() {
return java.util.Objects.hash(
MessageType.class, optionals,
_Field.DOCUMENTATION, mDocumentation,
_Field.VARIANT, mVariant,
_Field.NAME, mName,
_Field.FIELDS, mFields,
_Field.ANNOTATIONS, mAnnotations,
_Field.START_POS, getStartPos(),
_Field.END_POS, getEndPos());
}
@Override
@SuppressWarnings("unchecked")
public net.morimekta.providence.PMessageBuilder mutator(int key) {
switch (key) {
case 10: return mutableStartPos();
case 11: return mutableEndPos();
default: throw new IllegalArgumentException("Not a message field ID: " + key);
}
}
@javax.annotation.Nonnull
@Override
@SuppressWarnings("unchecked")
public _Builder set(int key, Object value) {
if (value == null) return clear(key);
switch (key) {
case 1: setDocumentation((String) value); break;
case 2: setVariant((net.morimekta.providence.model.MessageVariant) value); break;
case 3: setName((String) value); break;
case 4: setFields((java.util.List<net.morimekta.providence.model.FieldType>) value); break;
case 5: setAnnotations((java.util.Map<String,String>) value); break;
case 10: setStartPos((net.morimekta.providence.model.FilePos) value); break;
case 11: setEndPos((net.morimekta.providence.model.FilePos) value); break;
default: break;
}
return this;
}
@Override
public boolean isSet(int key) {
switch (key) {
case 1: return optionals.get(0);
case 2: return optionals.get(1);
case 3: return optionals.get(2);
case 4: return optionals.get(3);
case 5: return optionals.get(4);
case 10: return optionals.get(5);
case 11: return optionals.get(6);
default: break;
}
return false;
}
@Override
public boolean isModified(int key) {
switch (key) {
case 1: return modified.get(0);
case 2: return modified.get(1);
case 3: return modified.get(2);
case 4: return modified.get(3);
case 5: return modified.get(4);
case 10: return modified.get(5);
case 11: return modified.get(6);
default: break;
}
return false;
}
@Override
public _Builder addTo(int key, Object value) {
switch (key) {
case 4: addToFields((net.morimekta.providence.model.FieldType) value); break;
default: break;
}
return this;
}
@javax.annotation.Nonnull
@Override
public _Builder clear(int key) {
switch (key) {
case 1: clearDocumentation(); break;
case 2: clearVariant(); break;
case 3: clearName(); break;
case 4: clearFields(); break;
case 5: clearAnnotations(); break;
case 10: clearStartPos(); break;
case 11: clearEndPos(); break;
default: break;
}
return this;
}
@Override
public boolean valid() {
return optionals.get(2);
}
@Override
public void validate() {
if (!valid()) {
java.util.ArrayList<String> missing = new java.util.ArrayList<>();
if (!optionals.get(2)) {
missing.add("name");
}
throw new java.lang.IllegalStateException(
"Missing required fields " +
String.join(",", missing) +
" in message pmodel.MessageType");
}
}
@javax.annotation.Nonnull
@Override
public net.morimekta.providence.descriptor.PStructDescriptor<MessageType,_Field> descriptor() {
return kDescriptor;
}
@Override
public void readBinary(net.morimekta.util.io.BigEndianBinaryReader reader, boolean strict) throws java.io.IOException {
byte type = reader.expectByte();
while (type != 0) {
int field = reader.expectShort();
switch (field) {
case 1: {
if (type == 11) {
int len_1 = reader.expectUInt32();
mDocumentation = new String(reader.expectBytes(len_1), java.nio.charset.StandardCharsets.UTF_8);
optionals.set(0);
} else {
throw new net.morimekta.providence.serializer.SerializerException("Wrong type " + net.morimekta.providence.serializer.binary.BinaryType.asString(type) + " for pmodel.MessageType.documentation, should be struct(12)");
}
break;
}
case 2: {
if (type == 8) {
mVariant = net.morimekta.providence.model.MessageVariant.findById(reader.expectInt());
optionals.set(1);
} else {
throw new net.morimekta.providence.serializer.SerializerException("Wrong type " + net.morimekta.providence.serializer.binary.BinaryType.asString(type) + " for pmodel.MessageType.variant, should be struct(12)");
}
break;
}
case 3: {
if (type == 11) {
int len_2 = reader.expectUInt32();
mName = new String(reader.expectBytes(len_2), java.nio.charset.StandardCharsets.UTF_8);
optionals.set(2);
} else {
throw new net.morimekta.providence.serializer.SerializerException("Wrong type " + net.morimekta.providence.serializer.binary.BinaryType.asString(type) + " for pmodel.MessageType.name, should be struct(12)");
}
break;
}
case 4: {
if (type == 15) {
net.morimekta.providence.descriptor.PList.DefaultBuilder<net.morimekta.providence.model.FieldType> b_3 = new net.morimekta.providence.descriptor.PList.DefaultBuilder<>();
byte t_5 = reader.expectByte();
if (t_5 == 12) {
final int len_4 = reader.expectUInt32();
for (int i_6 = 0; i_6 < len_4; ++i_6) {
net.morimekta.providence.model.FieldType key_7 = net.morimekta.providence.serializer.binary.BinaryFormatUtils.readMessage(reader, net.morimekta.providence.model.FieldType.kDescriptor, strict);
b_3.add(key_7);
}
mFields = b_3.build();
} else {
throw new net.morimekta.providence.serializer.SerializerException("Wrong item type " + net.morimekta.providence.serializer.binary.BinaryType.asString(t_5) + " for pmodel.MessageType.fields, should be struct(12)");
}
optionals.set(3);
} else {
throw new net.morimekta.providence.serializer.SerializerException("Wrong type " + net.morimekta.providence.serializer.binary.BinaryType.asString(type) + " for pmodel.MessageType.fields, should be struct(12)");
}
break;
}
case 5: {
if (type == 13) {
net.morimekta.providence.descriptor.PMap.SortedBuilder<String,String> b_8 = new net.morimekta.providence.descriptor.PMap.SortedBuilder<>();
byte t_10 = reader.expectByte();
byte t_11 = reader.expectByte();
if (t_10 == 11 && t_11 == 11) {
final int len_9 = reader.expectUInt32();
for (int i_12 = 0; i_12 < len_9; ++i_12) {
int len_15 = reader.expectUInt32();
String key_13 = new String(reader.expectBytes(len_15), java.nio.charset.StandardCharsets.UTF_8);
int len_16 = reader.expectUInt32();
String val_14 = new String(reader.expectBytes(len_16), java.nio.charset.StandardCharsets.UTF_8);
b_8.put(key_13, val_14);
}
mAnnotations = b_8.build();
} else {
throw new net.morimekta.providence.serializer.SerializerException(
"Wrong key type " + net.morimekta.providence.serializer.binary.BinaryType.asString(t_10) +
" or value type " + net.morimekta.providence.serializer.binary.BinaryType.asString(t_11) +
" for pmodel.MessageType.annotations, should be string(11) and string(11)");
}
optionals.set(4);
} else {
throw new net.morimekta.providence.serializer.SerializerException("Wrong type " + net.morimekta.providence.serializer.binary.BinaryType.asString(type) + " for pmodel.MessageType.annotations, should be struct(12)");
}
break;
}
case 10: {
if (type == 12) {
mStartPos = net.morimekta.providence.serializer.binary.BinaryFormatUtils.readMessage(reader, net.morimekta.providence.model.FilePos.kDescriptor, strict);
optionals.set(5);
} else {
throw new net.morimekta.providence.serializer.SerializerException("Wrong type " + net.morimekta.providence.serializer.binary.BinaryType.asString(type) + " for pmodel.MessageType.start_pos, should be struct(12)");
}
break;
}
case 11: {
if (type == 12) {
mEndPos = net.morimekta.providence.serializer.binary.BinaryFormatUtils.readMessage(reader, net.morimekta.providence.model.FilePos.kDescriptor, strict);
optionals.set(6);
} else {
throw new net.morimekta.providence.serializer.SerializerException("Wrong type " + net.morimekta.providence.serializer.binary.BinaryType.asString(type) + " for pmodel.MessageType.end_pos, should be struct(12)");
}
break;
}
default: {
net.morimekta.providence.serializer.binary.BinaryFormatUtils.readFieldValue(reader, new net.morimekta.providence.serializer.binary.BinaryFormatUtils.FieldInfo(field, type), null, false);
break;
}
}
type = reader.expectByte();
}
}
@Override
public MessageType build() {
return new MessageType(this);
}
}
}
| |
// Copyright (C) 2014 The Android Open Source Project
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.gerrit.server.notedb;
import static com.google.gerrit.server.notedb.ChangeNoteUtil.FOOTER_ASSIGNEE;
import static com.google.gerrit.server.notedb.ChangeNoteUtil.FOOTER_BRANCH;
import static com.google.gerrit.server.notedb.ChangeNoteUtil.FOOTER_CHANGE_ID;
import static com.google.gerrit.server.notedb.ChangeNoteUtil.FOOTER_COMMIT;
import static com.google.gerrit.server.notedb.ChangeNoteUtil.FOOTER_CURRENT;
import static com.google.gerrit.server.notedb.ChangeNoteUtil.FOOTER_GROUPS;
import static com.google.gerrit.server.notedb.ChangeNoteUtil.FOOTER_HASHTAGS;
import static com.google.gerrit.server.notedb.ChangeNoteUtil.FOOTER_LABEL;
import static com.google.gerrit.server.notedb.ChangeNoteUtil.FOOTER_PATCH_SET;
import static com.google.gerrit.server.notedb.ChangeNoteUtil.FOOTER_PATCH_SET_DESCRIPTION;
import static com.google.gerrit.server.notedb.ChangeNoteUtil.FOOTER_PRIVATE;
import static com.google.gerrit.server.notedb.ChangeNoteUtil.FOOTER_READ_ONLY_UNTIL;
import static com.google.gerrit.server.notedb.ChangeNoteUtil.FOOTER_REAL_USER;
import static com.google.gerrit.server.notedb.ChangeNoteUtil.FOOTER_REVERT_OF;
import static com.google.gerrit.server.notedb.ChangeNoteUtil.FOOTER_STATUS;
import static com.google.gerrit.server.notedb.ChangeNoteUtil.FOOTER_SUBJECT;
import static com.google.gerrit.server.notedb.ChangeNoteUtil.FOOTER_SUBMISSION_ID;
import static com.google.gerrit.server.notedb.ChangeNoteUtil.FOOTER_SUBMITTED_WITH;
import static com.google.gerrit.server.notedb.ChangeNoteUtil.FOOTER_TAG;
import static com.google.gerrit.server.notedb.ChangeNoteUtil.FOOTER_TOPIC;
import static com.google.gerrit.server.notedb.ChangeNoteUtil.FOOTER_WORK_IN_PROGRESS;
import static com.google.gerrit.server.notedb.NoteDbTable.CHANGES;
import static java.util.stream.Collectors.joining;
import com.google.auto.value.AutoValue;
import com.google.common.base.Enums;
import com.google.common.base.Splitter;
import com.google.common.collect.HashBasedTable;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.ImmutableTable;
import com.google.common.collect.LinkedListMultimap;
import com.google.common.collect.ListMultimap;
import com.google.common.collect.Lists;
import com.google.common.collect.MultimapBuilder;
import com.google.common.collect.Sets;
import com.google.common.collect.Table;
import com.google.common.collect.Tables;
import com.google.common.primitives.Ints;
import com.google.gerrit.common.data.LabelType;
import com.google.gerrit.common.data.SubmitRecord;
import com.google.gerrit.metrics.Timer1;
import com.google.gerrit.reviewdb.client.Account;
import com.google.gerrit.reviewdb.client.Change;
import com.google.gerrit.reviewdb.client.ChangeMessage;
import com.google.gerrit.reviewdb.client.Comment;
import com.google.gerrit.reviewdb.client.LabelId;
import com.google.gerrit.reviewdb.client.PatchLineComment;
import com.google.gerrit.reviewdb.client.PatchSet;
import com.google.gerrit.reviewdb.client.PatchSetApproval;
import com.google.gerrit.reviewdb.client.RefNames;
import com.google.gerrit.reviewdb.client.RevId;
import com.google.gerrit.reviewdb.server.ReviewDbUtil;
import com.google.gerrit.server.ReviewerByEmailSet;
import com.google.gerrit.server.ReviewerSet;
import com.google.gerrit.server.ReviewerStatusUpdate;
import com.google.gerrit.server.mail.Address;
import com.google.gerrit.server.notedb.ChangeNotesCommit.ChangeNotesRevWalk;
import com.google.gerrit.server.util.LabelVote;
import java.io.IOException;
import java.nio.charset.Charset;
import java.sql.Timestamp;
import java.text.ParseException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.Objects;
import java.util.Optional;
import java.util.Set;
import java.util.TreeSet;
import java.util.function.Function;
import org.eclipse.jgit.errors.ConfigInvalidException;
import org.eclipse.jgit.errors.InvalidObjectIdException;
import org.eclipse.jgit.lib.ObjectId;
import org.eclipse.jgit.lib.ObjectReader;
import org.eclipse.jgit.lib.PersonIdent;
import org.eclipse.jgit.notes.NoteMap;
import org.eclipse.jgit.revwalk.FooterKey;
import org.eclipse.jgit.util.GitDateParser;
import org.eclipse.jgit.util.RawParseUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
class ChangeNotesParser {
private static final Logger log = LoggerFactory.getLogger(ChangeNotesParser.class);
// Sentinel RevId indicating a mutable field on a patch set was parsed, but
// the parser does not yet know its commit SHA-1.
private static final RevId PARTIAL_PATCH_SET = new RevId("INVALID PARTIAL PATCH SET");
@AutoValue
abstract static class ApprovalKey {
abstract PatchSet.Id psId();
abstract Account.Id accountId();
abstract String label();
private static ApprovalKey create(PatchSet.Id psId, Account.Id accountId, String label) {
return new AutoValue_ChangeNotesParser_ApprovalKey(psId, accountId, label);
}
}
// Private final members initialized in the constructor.
private final ChangeNoteUtil noteUtil;
private final NoteDbMetrics metrics;
private final Change.Id id;
private final ObjectId tip;
private final ChangeNotesRevWalk walk;
// Private final but mutable members initialized in the constructor and filled
// in during the parsing process.
private final Table<Account.Id, ReviewerStateInternal, Timestamp> reviewers;
private final Table<Address, ReviewerStateInternal, Timestamp> reviewersByEmail;
private final List<Account.Id> allPastReviewers;
private final List<ReviewerStatusUpdate> reviewerUpdates;
private final List<SubmitRecord> submitRecords;
private final ListMultimap<RevId, Comment> comments;
private final Map<PatchSet.Id, PatchSet> patchSets;
private final Set<PatchSet.Id> deletedPatchSets;
private final Map<PatchSet.Id, PatchSetState> patchSetStates;
private final List<PatchSet.Id> currentPatchSets;
private final Map<ApprovalKey, PatchSetApproval> approvals;
private final List<PatchSetApproval> bufferedApprovals;
private final List<ChangeMessage> allChangeMessages;
private final ListMultimap<PatchSet.Id, ChangeMessage> changeMessagesByPatchSet;
// Non-final private members filled in during the parsing process.
private String branch;
private Change.Status status;
private String topic;
private Optional<Account.Id> assignee;
private List<Account.Id> pastAssignees;
private Set<String> hashtags;
private Timestamp createdOn;
private Timestamp lastUpdatedOn;
private Account.Id ownerId;
private String changeId;
private String subject;
private String originalSubject;
private String submissionId;
private String tag;
private RevisionNoteMap<ChangeRevisionNote> revisionNoteMap;
private Timestamp readOnlyUntil;
private Boolean isPrivate;
private Boolean workInProgress;
private Boolean previousWorkInProgressFooter;
private Boolean hasReviewStarted;
private ReviewerSet pendingReviewers;
private ReviewerByEmailSet pendingReviewersByEmail;
private Change.Id revertOf;
ChangeNotesParser(
Change.Id changeId,
ObjectId tip,
ChangeNotesRevWalk walk,
ChangeNoteUtil noteUtil,
NoteDbMetrics metrics) {
this.id = changeId;
this.tip = tip;
this.walk = walk;
this.noteUtil = noteUtil;
this.metrics = metrics;
approvals = new LinkedHashMap<>();
bufferedApprovals = new ArrayList<>();
reviewers = HashBasedTable.create();
reviewersByEmail = HashBasedTable.create();
pendingReviewers = ReviewerSet.empty();
pendingReviewersByEmail = ReviewerByEmailSet.empty();
allPastReviewers = new ArrayList<>();
reviewerUpdates = new ArrayList<>();
submitRecords = Lists.newArrayListWithExpectedSize(1);
allChangeMessages = new ArrayList<>();
changeMessagesByPatchSet = LinkedListMultimap.create();
comments = MultimapBuilder.hashKeys().arrayListValues().build();
patchSets = new HashMap<>();
deletedPatchSets = new HashSet<>();
patchSetStates = new HashMap<>();
currentPatchSets = new ArrayList<>();
}
ChangeNotesState parseAll() throws ConfigInvalidException, IOException {
// Don't include initial parse in timer, as this might do more I/O to page
// in the block containing most commits. Later reads are not guaranteed to
// avoid I/O, but often should.
walk.reset();
walk.markStart(walk.parseCommit(tip));
try (Timer1.Context timer = metrics.parseLatency.start(CHANGES)) {
ChangeNotesCommit commit;
while ((commit = walk.next()) != null) {
parse(commit);
}
if (hasReviewStarted == null) {
if (previousWorkInProgressFooter == null) {
hasReviewStarted = true;
} else {
hasReviewStarted = !previousWorkInProgressFooter;
}
}
parseNotes();
allPastReviewers.addAll(reviewers.rowKeySet());
pruneReviewers();
pruneReviewersByEmail();
updatePatchSetStates();
checkMandatoryFooters();
}
return buildState();
}
RevisionNoteMap<ChangeRevisionNote> getRevisionNoteMap() {
return revisionNoteMap;
}
private ChangeNotesState buildState() {
return ChangeNotesState.create(
tip.copy(),
id,
new Change.Key(changeId),
createdOn,
lastUpdatedOn,
ownerId,
branch,
buildCurrentPatchSetId(),
subject,
topic,
originalSubject,
submissionId,
assignee != null ? assignee.orElse(null) : null,
status,
Sets.newLinkedHashSet(Lists.reverse(pastAssignees)),
hashtags,
patchSets,
buildApprovals(),
ReviewerSet.fromTable(Tables.transpose(reviewers)),
ReviewerByEmailSet.fromTable(Tables.transpose(reviewersByEmail)),
pendingReviewers,
pendingReviewersByEmail,
allPastReviewers,
buildReviewerUpdates(),
submitRecords,
buildAllMessages(),
buildMessagesByPatchSet(),
comments,
readOnlyUntil,
isPrivate,
workInProgress,
hasReviewStarted,
revertOf);
}
private PatchSet.Id buildCurrentPatchSetId() {
// currentPatchSets are in parse order, i.e. newest first. Pick the first
// patch set that was marked as current, excluding deleted patch sets.
for (PatchSet.Id psId : currentPatchSets) {
if (patchSets.containsKey(psId)) {
return psId;
}
}
return null;
}
private ListMultimap<PatchSet.Id, PatchSetApproval> buildApprovals() {
ListMultimap<PatchSet.Id, PatchSetApproval> result =
MultimapBuilder.hashKeys().arrayListValues().build();
for (PatchSetApproval a : approvals.values()) {
if (!patchSets.containsKey(a.getPatchSetId())) {
continue; // Patch set deleted or missing.
} else if (allPastReviewers.contains(a.getAccountId())
&& !reviewers.containsRow(a.getAccountId())) {
continue; // Reviewer was explicitly removed.
}
result.put(a.getPatchSetId(), a);
}
for (Collection<PatchSetApproval> v : result.asMap().values()) {
Collections.sort((List<PatchSetApproval>) v, ChangeNotes.PSA_BY_TIME);
}
return result;
}
private List<ReviewerStatusUpdate> buildReviewerUpdates() {
List<ReviewerStatusUpdate> result = new ArrayList<>();
HashMap<Account.Id, ReviewerStateInternal> lastState = new HashMap<>();
for (ReviewerStatusUpdate u : Lists.reverse(reviewerUpdates)) {
if (!Objects.equals(ownerId, u.reviewer()) && lastState.get(u.reviewer()) != u.state()) {
result.add(u);
lastState.put(u.reviewer(), u.state());
}
}
return result;
}
private List<ChangeMessage> buildAllMessages() {
return Lists.reverse(allChangeMessages);
}
private ListMultimap<PatchSet.Id, ChangeMessage> buildMessagesByPatchSet() {
for (Collection<ChangeMessage> v : changeMessagesByPatchSet.asMap().values()) {
Collections.reverse((List<ChangeMessage>) v);
}
return changeMessagesByPatchSet;
}
private void parse(ChangeNotesCommit commit) throws ConfigInvalidException {
Timestamp ts = new Timestamp(commit.getCommitterIdent().getWhen().getTime());
createdOn = ts;
parseTag(commit);
if (branch == null) {
branch = parseBranch(commit);
}
PatchSet.Id psId = parsePatchSetId(commit);
PatchSetState psState = parsePatchSetState(commit);
if (psState != null) {
if (!patchSetStates.containsKey(psId)) {
patchSetStates.put(psId, psState);
}
if (psState == PatchSetState.DELETED) {
deletedPatchSets.add(psId);
}
}
Account.Id accountId = parseIdent(commit);
if (accountId != null) {
ownerId = accountId;
}
Account.Id realAccountId = parseRealAccountId(commit, accountId);
if (changeId == null) {
changeId = parseChangeId(commit);
}
String currSubject = parseSubject(commit);
if (currSubject != null) {
if (subject == null) {
subject = currSubject;
}
originalSubject = currSubject;
}
parseChangeMessage(psId, accountId, realAccountId, commit, ts);
if (topic == null) {
topic = parseTopic(commit);
}
parseHashtags(commit);
parseAssignee(commit);
if (submissionId == null) {
submissionId = parseSubmissionId(commit);
}
ObjectId currRev = parseRevision(commit);
if (currRev != null) {
parsePatchSet(psId, currRev, accountId, ts);
}
parseGroups(psId, commit);
parseCurrentPatchSet(psId, commit);
if (submitRecords.isEmpty()) {
// Only parse the most recent set of submit records; any older ones are
// still there, but not currently used.
parseSubmitRecords(commit.getFooterLineValues(FOOTER_SUBMITTED_WITH));
}
if (status == null) {
status = parseStatus(commit);
}
// Parse approvals after status to treat approvals in the same commit as
// "Status: merged" as non-post-submit.
for (String line : commit.getFooterLineValues(FOOTER_LABEL)) {
parseApproval(psId, accountId, realAccountId, ts, line);
}
for (ReviewerStateInternal state : ReviewerStateInternal.values()) {
for (String line : commit.getFooterLineValues(state.getFooterKey())) {
parseReviewer(ts, state, line);
}
for (String line : commit.getFooterLineValues(state.getByEmailFooterKey())) {
parseReviewerByEmail(ts, state, line);
}
// Don't update timestamp when a reviewer was added, matching RevewDb
// behavior.
}
if (readOnlyUntil == null) {
parseReadOnlyUntil(commit);
}
if (isPrivate == null) {
parseIsPrivate(commit);
}
if (revertOf == null) {
revertOf = parseRevertOf(commit);
}
previousWorkInProgressFooter = null;
parseWorkInProgress(commit);
if (lastUpdatedOn == null || ts.after(lastUpdatedOn)) {
lastUpdatedOn = ts;
}
parseDescription(psId, commit);
}
private String parseSubmissionId(ChangeNotesCommit commit) throws ConfigInvalidException {
return parseOneFooter(commit, FOOTER_SUBMISSION_ID);
}
private String parseBranch(ChangeNotesCommit commit) throws ConfigInvalidException {
String branch = parseOneFooter(commit, FOOTER_BRANCH);
return branch != null ? RefNames.fullName(branch) : null;
}
private String parseChangeId(ChangeNotesCommit commit) throws ConfigInvalidException {
return parseOneFooter(commit, FOOTER_CHANGE_ID);
}
private String parseSubject(ChangeNotesCommit commit) throws ConfigInvalidException {
return parseOneFooter(commit, FOOTER_SUBJECT);
}
private Account.Id parseRealAccountId(ChangeNotesCommit commit, Account.Id effectiveAccountId)
throws ConfigInvalidException {
String realUser = parseOneFooter(commit, FOOTER_REAL_USER);
if (realUser == null) {
return effectiveAccountId;
}
PersonIdent ident = RawParseUtils.parsePersonIdent(realUser);
return noteUtil.parseIdent(ident, id);
}
private String parseTopic(ChangeNotesCommit commit) throws ConfigInvalidException {
return parseOneFooter(commit, FOOTER_TOPIC);
}
private String parseOneFooter(ChangeNotesCommit commit, FooterKey footerKey)
throws ConfigInvalidException {
List<String> footerLines = commit.getFooterLineValues(footerKey);
if (footerLines.isEmpty()) {
return null;
} else if (footerLines.size() > 1) {
throw expectedOneFooter(footerKey, footerLines);
}
return footerLines.get(0);
}
private String parseExactlyOneFooter(ChangeNotesCommit commit, FooterKey footerKey)
throws ConfigInvalidException {
String line = parseOneFooter(commit, footerKey);
if (line == null) {
throw expectedOneFooter(footerKey, Collections.<String>emptyList());
}
return line;
}
private ObjectId parseRevision(ChangeNotesCommit commit) throws ConfigInvalidException {
String sha = parseOneFooter(commit, FOOTER_COMMIT);
if (sha == null) {
return null;
}
try {
return ObjectId.fromString(sha);
} catch (InvalidObjectIdException e) {
ConfigInvalidException cie = invalidFooter(FOOTER_COMMIT, sha);
cie.initCause(e);
throw cie;
}
}
private void parsePatchSet(PatchSet.Id psId, ObjectId rev, Account.Id accountId, Timestamp ts)
throws ConfigInvalidException {
if (accountId == null) {
throw parseException("patch set %s requires an identified user as uploader", psId.get());
}
PatchSet ps = patchSets.get(psId);
if (ps == null) {
ps = new PatchSet(psId);
patchSets.put(psId, ps);
} else if (!ps.getRevision().equals(PARTIAL_PATCH_SET)) {
if (deletedPatchSets.contains(psId)) {
// Do not update PS details as PS was deleted and this meta data is of
// no relevance
return;
}
throw new ConfigInvalidException(
String.format(
"Multiple revisions parsed for patch set %s: %s and %s",
psId.get(), patchSets.get(psId).getRevision(), rev.name()));
}
ps.setRevision(new RevId(rev.name()));
ps.setUploader(accountId);
ps.setCreatedOn(ts);
}
private void parseGroups(PatchSet.Id psId, ChangeNotesCommit commit)
throws ConfigInvalidException {
String groupsStr = parseOneFooter(commit, FOOTER_GROUPS);
if (groupsStr == null) {
return;
}
PatchSet ps = patchSets.get(psId);
if (ps == null) {
ps = new PatchSet(psId);
ps.setRevision(PARTIAL_PATCH_SET);
patchSets.put(psId, ps);
} else if (!ps.getGroups().isEmpty()) {
return;
}
ps.setGroups(PatchSet.splitGroups(groupsStr));
}
private void parseCurrentPatchSet(PatchSet.Id psId, ChangeNotesCommit commit)
throws ConfigInvalidException {
// This commit implies a new current patch set if either it creates a new
// patch set, or sets the current field explicitly.
boolean current = false;
if (parseOneFooter(commit, FOOTER_COMMIT) != null) {
current = true;
} else {
String currentStr = parseOneFooter(commit, FOOTER_CURRENT);
if (Boolean.TRUE.toString().equalsIgnoreCase(currentStr)) {
current = true;
} else if (currentStr != null) {
// Only "true" is allowed; unsetting the current patch set makes no
// sense.
throw invalidFooter(FOOTER_CURRENT, currentStr);
}
}
if (current) {
currentPatchSets.add(psId);
}
}
private void parseHashtags(ChangeNotesCommit commit) throws ConfigInvalidException {
// Commits are parsed in reverse order and only the last set of hashtags
// should be used.
if (hashtags != null) {
return;
}
List<String> hashtagsLines = commit.getFooterLineValues(FOOTER_HASHTAGS);
if (hashtagsLines.isEmpty()) {
return;
} else if (hashtagsLines.size() > 1) {
throw expectedOneFooter(FOOTER_HASHTAGS, hashtagsLines);
} else if (hashtagsLines.get(0).isEmpty()) {
hashtags = ImmutableSet.of();
} else {
hashtags = Sets.newHashSet(Splitter.on(',').split(hashtagsLines.get(0)));
}
}
private void parseAssignee(ChangeNotesCommit commit) throws ConfigInvalidException {
if (pastAssignees == null) {
pastAssignees = Lists.newArrayList();
}
String assigneeValue = parseOneFooter(commit, FOOTER_ASSIGNEE);
if (assigneeValue != null) {
Optional<Account.Id> parsedAssignee;
if (assigneeValue.equals("")) {
// Empty footer found, assignee deleted
parsedAssignee = Optional.empty();
} else {
PersonIdent ident = RawParseUtils.parsePersonIdent(assigneeValue);
parsedAssignee = Optional.ofNullable(noteUtil.parseIdent(ident, id));
}
if (assignee == null) {
assignee = parsedAssignee;
}
if (parsedAssignee.isPresent()) {
pastAssignees.add(parsedAssignee.get());
}
}
}
private void parseTag(ChangeNotesCommit commit) throws ConfigInvalidException {
tag = null;
List<String> tagLines = commit.getFooterLineValues(FOOTER_TAG);
if (tagLines.isEmpty()) {
return;
} else if (tagLines.size() == 1) {
tag = tagLines.get(0);
} else {
throw expectedOneFooter(FOOTER_TAG, tagLines);
}
}
private Change.Status parseStatus(ChangeNotesCommit commit) throws ConfigInvalidException {
List<String> statusLines = commit.getFooterLineValues(FOOTER_STATUS);
if (statusLines.isEmpty()) {
return null;
} else if (statusLines.size() > 1) {
throw expectedOneFooter(FOOTER_STATUS, statusLines);
}
Change.Status status =
Enums.getIfPresent(Change.Status.class, statusLines.get(0).toUpperCase()).orNull();
if (status == null) {
throw invalidFooter(FOOTER_STATUS, statusLines.get(0));
}
// All approvals after MERGED and before the next status change get the postSubmit
// bit. (Currently the state can't change from MERGED to something else, but just in case.) The
// exception is the legacy SUBM approval, which is never considered post-submit, but might end
// up sorted after the submit during rebuilding.
if (status == Change.Status.MERGED) {
for (PatchSetApproval psa : bufferedApprovals) {
if (!psa.isLegacySubmit()) {
psa.setPostSubmit(true);
}
}
}
bufferedApprovals.clear();
return status;
}
private PatchSet.Id parsePatchSetId(ChangeNotesCommit commit) throws ConfigInvalidException {
String psIdLine = parseExactlyOneFooter(commit, FOOTER_PATCH_SET);
int s = psIdLine.indexOf(' ');
String psIdStr = s < 0 ? psIdLine : psIdLine.substring(0, s);
Integer psId = Ints.tryParse(psIdStr);
if (psId == null) {
throw invalidFooter(FOOTER_PATCH_SET, psIdStr);
}
return new PatchSet.Id(id, psId);
}
private PatchSetState parsePatchSetState(ChangeNotesCommit commit) throws ConfigInvalidException {
String psIdLine = parseExactlyOneFooter(commit, FOOTER_PATCH_SET);
int s = psIdLine.indexOf(' ');
if (s < 0) {
return null;
}
String withParens = psIdLine.substring(s + 1);
if (withParens.startsWith("(") && withParens.endsWith(")")) {
PatchSetState state =
Enums.getIfPresent(
PatchSetState.class,
withParens.substring(1, withParens.length() - 1).toUpperCase())
.orNull();
if (state != null) {
return state;
}
}
throw invalidFooter(FOOTER_PATCH_SET, psIdLine);
}
private void parseDescription(PatchSet.Id psId, ChangeNotesCommit commit)
throws ConfigInvalidException {
List<String> descLines = commit.getFooterLineValues(FOOTER_PATCH_SET_DESCRIPTION);
if (descLines.isEmpty()) {
return;
} else if (descLines.size() == 1) {
String desc = descLines.get(0).trim();
PatchSet ps = patchSets.get(psId);
if (ps == null) {
ps = new PatchSet(psId);
ps.setRevision(PARTIAL_PATCH_SET);
patchSets.put(psId, ps);
}
if (ps.getDescription() == null) {
ps.setDescription(desc);
}
} else {
throw expectedOneFooter(FOOTER_PATCH_SET_DESCRIPTION, descLines);
}
}
private void parseChangeMessage(
PatchSet.Id psId,
Account.Id accountId,
Account.Id realAccountId,
ChangeNotesCommit commit,
Timestamp ts) {
byte[] raw = commit.getRawBuffer();
int size = raw.length;
Charset enc = RawParseUtils.parseEncoding(raw);
int subjectStart = RawParseUtils.commitMessage(raw, 0);
if (subjectStart < 0 || subjectStart >= size) {
return;
}
int subjectEnd = RawParseUtils.endOfParagraph(raw, subjectStart);
if (subjectEnd == size) {
return;
}
int changeMessageStart;
if (raw[subjectEnd] == '\n') {
changeMessageStart = subjectEnd + 2; // \n\n ends paragraph
} else if (raw[subjectEnd] == '\r') {
changeMessageStart = subjectEnd + 4; // \r\n\r\n ends paragraph
} else {
return;
}
int ptr = size - 1;
int changeMessageEnd = -1;
while (ptr > changeMessageStart) {
ptr = RawParseUtils.prevLF(raw, ptr, '\r');
if (ptr == -1) {
break;
}
if (raw[ptr] == '\n') {
changeMessageEnd = ptr - 1;
break;
} else if (raw[ptr] == '\r') {
changeMessageEnd = ptr - 3;
break;
}
}
if (ptr <= changeMessageStart) {
return;
}
String changeMsgString =
RawParseUtils.decode(enc, raw, changeMessageStart, changeMessageEnd + 1);
ChangeMessage changeMessage =
new ChangeMessage(
new ChangeMessage.Key(psId.getParentKey(), commit.name()), accountId, ts, psId);
changeMessage.setMessage(changeMsgString);
changeMessage.setTag(tag);
changeMessage.setRealAuthor(realAccountId);
changeMessagesByPatchSet.put(psId, changeMessage);
allChangeMessages.add(changeMessage);
}
private void parseNotes() throws IOException, ConfigInvalidException {
ObjectReader reader = walk.getObjectReader();
ChangeNotesCommit tipCommit = walk.parseCommit(tip);
revisionNoteMap =
RevisionNoteMap.parse(
noteUtil,
id,
reader,
NoteMap.read(reader, tipCommit),
PatchLineComment.Status.PUBLISHED);
Map<RevId, ChangeRevisionNote> rns = revisionNoteMap.revisionNotes;
for (Map.Entry<RevId, ChangeRevisionNote> e : rns.entrySet()) {
for (Comment c : e.getValue().getComments()) {
comments.put(e.getKey(), c);
}
}
for (PatchSet ps : patchSets.values()) {
ChangeRevisionNote rn = rns.get(ps.getRevision());
if (rn != null && rn.getPushCert() != null) {
ps.setPushCertificate(rn.getPushCert());
}
}
}
private void parseApproval(
PatchSet.Id psId, Account.Id accountId, Account.Id realAccountId, Timestamp ts, String line)
throws ConfigInvalidException {
if (accountId == null) {
throw parseException("patch set %s requires an identified user as uploader", psId.get());
}
PatchSetApproval psa;
if (line.startsWith("-")) {
psa = parseRemoveApproval(psId, accountId, realAccountId, ts, line);
} else {
psa = parseAddApproval(psId, accountId, realAccountId, ts, line);
}
bufferedApprovals.add(psa);
}
private PatchSetApproval parseAddApproval(
PatchSet.Id psId, Account.Id committerId, Account.Id realAccountId, Timestamp ts, String line)
throws ConfigInvalidException {
// There are potentially 3 accounts involved here:
// 1. The account from the commit, which is the effective IdentifiedUser
// that produced the update.
// 2. The account in the label footer itself, which is used during submit
// to copy other users' labels to a new patch set.
// 3. The account in the Real-user footer, indicating that the whole
// update operation was executed by this user on behalf of the effective
// user.
Account.Id effectiveAccountId;
String labelVoteStr;
int s = line.indexOf(' ');
if (s > 0) {
// Account in the label line (2) becomes the effective ID of the
// approval. If there is a real user (3) different from the commit user
// (2), we actually don't store that anywhere in this case; it's more
// important to record that the real user (3) actually initiated submit.
labelVoteStr = line.substring(0, s);
PersonIdent ident = RawParseUtils.parsePersonIdent(line.substring(s + 1));
checkFooter(ident != null, FOOTER_LABEL, line);
effectiveAccountId = noteUtil.parseIdent(ident, id);
} else {
labelVoteStr = line;
effectiveAccountId = committerId;
}
LabelVote l;
try {
l = LabelVote.parseWithEquals(labelVoteStr);
} catch (IllegalArgumentException e) {
ConfigInvalidException pe = parseException("invalid %s: %s", FOOTER_LABEL, line);
pe.initCause(e);
throw pe;
}
PatchSetApproval psa =
new PatchSetApproval(
new PatchSetApproval.Key(psId, effectiveAccountId, new LabelId(l.label())),
l.value(),
ts);
psa.setTag(tag);
if (!Objects.equals(realAccountId, committerId)) {
psa.setRealAccountId(realAccountId);
}
ApprovalKey k = ApprovalKey.create(psId, effectiveAccountId, l.label());
if (!approvals.containsKey(k)) {
approvals.put(k, psa);
}
return psa;
}
private PatchSetApproval parseRemoveApproval(
PatchSet.Id psId, Account.Id committerId, Account.Id realAccountId, Timestamp ts, String line)
throws ConfigInvalidException {
// See comments in parseAddApproval about the various users involved.
Account.Id effectiveAccountId;
String label;
int s = line.indexOf(' ');
if (s > 0) {
label = line.substring(1, s);
PersonIdent ident = RawParseUtils.parsePersonIdent(line.substring(s + 1));
checkFooter(ident != null, FOOTER_LABEL, line);
effectiveAccountId = noteUtil.parseIdent(ident, id);
} else {
label = line.substring(1);
effectiveAccountId = committerId;
}
try {
LabelType.checkNameInternal(label);
} catch (IllegalArgumentException e) {
ConfigInvalidException pe = parseException("invalid %s: %s", FOOTER_LABEL, line);
pe.initCause(e);
throw pe;
}
// Store an actual 0-vote approval in the map for a removed approval, for
// several reasons:
// - This is closer to the ReviewDb representation, which leads to less
// confusion and special-casing of NoteDb.
// - More importantly, ApprovalCopier needs an actual approval in order to
// block copying an earlier approval over a later delete.
PatchSetApproval remove =
new PatchSetApproval(
new PatchSetApproval.Key(psId, effectiveAccountId, new LabelId(label)), (short) 0, ts);
if (!Objects.equals(realAccountId, committerId)) {
remove.setRealAccountId(realAccountId);
}
ApprovalKey k = ApprovalKey.create(psId, effectiveAccountId, label);
if (!approvals.containsKey(k)) {
approvals.put(k, remove);
}
return remove;
}
private void parseSubmitRecords(List<String> lines) throws ConfigInvalidException {
SubmitRecord rec = null;
for (String line : lines) {
int c = line.indexOf(": ");
if (c < 0) {
rec = new SubmitRecord();
submitRecords.add(rec);
int s = line.indexOf(' ');
String statusStr = s >= 0 ? line.substring(0, s) : line;
rec.status = Enums.getIfPresent(SubmitRecord.Status.class, statusStr).orNull();
checkFooter(rec.status != null, FOOTER_SUBMITTED_WITH, line);
if (s >= 0) {
rec.errorMessage = line.substring(s);
}
} else {
checkFooter(rec != null, FOOTER_SUBMITTED_WITH, line);
SubmitRecord.Label label = new SubmitRecord.Label();
if (rec.labels == null) {
rec.labels = new ArrayList<>();
}
rec.labels.add(label);
label.status =
Enums.getIfPresent(SubmitRecord.Label.Status.class, line.substring(0, c)).orNull();
checkFooter(label.status != null, FOOTER_SUBMITTED_WITH, line);
int c2 = line.indexOf(": ", c + 2);
if (c2 >= 0) {
label.label = line.substring(c + 2, c2);
PersonIdent ident = RawParseUtils.parsePersonIdent(line.substring(c2 + 2));
checkFooter(ident != null, FOOTER_SUBMITTED_WITH, line);
label.appliedBy = noteUtil.parseIdent(ident, id);
} else {
label.label = line.substring(c + 2);
}
}
}
}
private Account.Id parseIdent(ChangeNotesCommit commit) throws ConfigInvalidException {
// Check if the author name/email is the same as the committer name/email,
// i.e. was the server ident at the time this commit was made.
PersonIdent a = commit.getAuthorIdent();
PersonIdent c = commit.getCommitterIdent();
if (a.getName().equals(c.getName()) && a.getEmailAddress().equals(c.getEmailAddress())) {
return null;
}
return noteUtil.parseIdent(commit.getAuthorIdent(), id);
}
private void parseReviewer(Timestamp ts, ReviewerStateInternal state, String line)
throws ConfigInvalidException {
PersonIdent ident = RawParseUtils.parsePersonIdent(line);
if (ident == null) {
throw invalidFooter(state.getFooterKey(), line);
}
Account.Id accountId = noteUtil.parseIdent(ident, id);
reviewerUpdates.add(ReviewerStatusUpdate.create(ts, ownerId, accountId, state));
if (!reviewers.containsRow(accountId)) {
reviewers.put(accountId, state, ts);
}
}
private void parseReviewerByEmail(Timestamp ts, ReviewerStateInternal state, String line)
throws ConfigInvalidException {
Address adr;
try {
adr = Address.parse(line);
} catch (IllegalArgumentException e) {
throw invalidFooter(state.getByEmailFooterKey(), line);
}
if (!reviewersByEmail.containsRow(adr)) {
reviewersByEmail.put(adr, state, ts);
}
}
private void parseReadOnlyUntil(ChangeNotesCommit commit) throws ConfigInvalidException {
String raw = parseOneFooter(commit, FOOTER_READ_ONLY_UNTIL);
if (raw == null) {
return;
}
try {
readOnlyUntil = new Timestamp(GitDateParser.parse(raw, null, Locale.US).getTime());
} catch (ParseException e) {
ConfigInvalidException cie = invalidFooter(FOOTER_READ_ONLY_UNTIL, raw);
cie.initCause(e);
throw cie;
}
}
private void parseIsPrivate(ChangeNotesCommit commit) throws ConfigInvalidException {
String raw = parseOneFooter(commit, FOOTER_PRIVATE);
if (raw == null) {
return;
} else if (Boolean.TRUE.toString().equalsIgnoreCase(raw)) {
isPrivate = true;
return;
} else if (Boolean.FALSE.toString().equalsIgnoreCase(raw)) {
isPrivate = false;
return;
}
throw invalidFooter(FOOTER_PRIVATE, raw);
}
private void parseWorkInProgress(ChangeNotesCommit commit) throws ConfigInvalidException {
String raw = parseOneFooter(commit, FOOTER_WORK_IN_PROGRESS);
if (raw == null) {
// No change to WIP state in this revision.
previousWorkInProgressFooter = null;
return;
} else if (Boolean.TRUE.toString().equalsIgnoreCase(raw)) {
// This revision moves the change into WIP.
previousWorkInProgressFooter = true;
if (workInProgress == null) {
// Because this is the first time workInProgress is being set, we know
// that this change's current state is WIP. All the reviewer updates
// we've seen so far are pending, so take a snapshot of the reviewers
// and reviewersByEmail tables.
pendingReviewers =
ReviewerSet.fromTable(Tables.transpose(ImmutableTable.copyOf(reviewers)));
pendingReviewersByEmail =
ReviewerByEmailSet.fromTable(Tables.transpose(ImmutableTable.copyOf(reviewersByEmail)));
workInProgress = true;
}
return;
} else if (Boolean.FALSE.toString().equalsIgnoreCase(raw)) {
previousWorkInProgressFooter = false;
hasReviewStarted = true;
if (workInProgress == null) {
workInProgress = false;
}
return;
}
throw invalidFooter(FOOTER_WORK_IN_PROGRESS, raw);
}
private Change.Id parseRevertOf(ChangeNotesCommit commit) throws ConfigInvalidException {
String footer = parseOneFooter(commit, FOOTER_REVERT_OF);
if (footer == null) {
return null;
}
Integer revertOf = Ints.tryParse(footer);
if (revertOf == null) {
throw invalidFooter(FOOTER_REVERT_OF, footer);
}
return new Change.Id(revertOf);
}
private void pruneReviewers() {
Iterator<Table.Cell<Account.Id, ReviewerStateInternal, Timestamp>> rit =
reviewers.cellSet().iterator();
while (rit.hasNext()) {
Table.Cell<Account.Id, ReviewerStateInternal, Timestamp> e = rit.next();
if (e.getColumnKey() == ReviewerStateInternal.REMOVED) {
rit.remove();
}
}
}
private void pruneReviewersByEmail() {
Iterator<Table.Cell<Address, ReviewerStateInternal, Timestamp>> rit =
reviewersByEmail.cellSet().iterator();
while (rit.hasNext()) {
Table.Cell<Address, ReviewerStateInternal, Timestamp> e = rit.next();
if (e.getColumnKey() == ReviewerStateInternal.REMOVED) {
rit.remove();
}
}
}
private void updatePatchSetStates() {
Set<PatchSet.Id> missing = new TreeSet<>(ReviewDbUtil.intKeyOrdering());
for (Iterator<PatchSet> it = patchSets.values().iterator(); it.hasNext(); ) {
PatchSet ps = it.next();
if (ps.getRevision().equals(PARTIAL_PATCH_SET)) {
missing.add(ps.getId());
it.remove();
}
}
for (Map.Entry<PatchSet.Id, PatchSetState> e : patchSetStates.entrySet()) {
switch (e.getValue()) {
case PUBLISHED:
default:
break;
case DELETED:
patchSets.remove(e.getKey());
break;
}
}
// Post-process other collections to remove items corresponding to deleted
// (or otherwise missing) patch sets. This is safer than trying to prevent
// insertion, as it will also filter out items racily added after the patch
// set was deleted.
changeMessagesByPatchSet.keys().retainAll(patchSets.keySet());
int pruned =
pruneEntitiesForMissingPatchSets(allChangeMessages, ChangeMessage::getPatchSetId, missing);
pruned +=
pruneEntitiesForMissingPatchSets(
comments.values(), c -> new PatchSet.Id(id, c.key.patchSetId), missing);
pruned +=
pruneEntitiesForMissingPatchSets(
approvals.values(), PatchSetApproval::getPatchSetId, missing);
if (!missing.isEmpty()) {
log.warn("ignoring {} additional entities due to missing patch sets: {}", pruned, missing);
}
}
private <T> int pruneEntitiesForMissingPatchSets(
Iterable<T> ents, Function<T, PatchSet.Id> psIdFunc, Set<PatchSet.Id> missing) {
int pruned = 0;
for (Iterator<T> it = ents.iterator(); it.hasNext(); ) {
PatchSet.Id psId = psIdFunc.apply(it.next());
if (!patchSets.containsKey(psId)) {
pruned++;
missing.add(psId);
it.remove();
} else if (deletedPatchSets.contains(psId)) {
it.remove(); // Not an error we need to report, don't increment pruned.
}
}
return pruned;
}
private void checkMandatoryFooters() throws ConfigInvalidException {
List<FooterKey> missing = new ArrayList<>();
if (branch == null) {
missing.add(FOOTER_BRANCH);
}
if (changeId == null) {
missing.add(FOOTER_CHANGE_ID);
}
if (originalSubject == null || subject == null) {
missing.add(FOOTER_SUBJECT);
}
if (!missing.isEmpty()) {
throw parseException(
"Missing footers: " + missing.stream().map(FooterKey::getName).collect(joining(", ")));
}
}
private ConfigInvalidException expectedOneFooter(FooterKey footer, List<String> actual) {
return parseException("missing or multiple %s: %s", footer.getName(), actual);
}
private ConfigInvalidException invalidFooter(FooterKey footer, String actual) {
return parseException("invalid %s: %s", footer.getName(), actual);
}
private void checkFooter(boolean expr, FooterKey footer, String actual)
throws ConfigInvalidException {
if (!expr) {
throw invalidFooter(footer, actual);
}
}
private ConfigInvalidException parseException(String fmt, Object... args) {
return ChangeNotes.parseException(id, fmt, args);
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jclouds.cloudstack.predicates;
import static com.google.common.base.Preconditions.checkNotNull;
import static com.google.common.base.Predicates.alwaysTrue;
import java.util.Set;
import org.jclouds.cloudstack.domain.IngressRule;
import org.jclouds.cloudstack.domain.SecurityGroup;
import com.google.common.base.Predicate;
import com.google.common.collect.Iterables;
import com.google.common.collect.Multimap;
public class SecurityGroupPredicates {
/**
*
* @return true, if the security group contains an ingress rule with the given port in the port range
*/
public static Predicate<SecurityGroup> portInRange(final int port) {
return new Predicate<SecurityGroup>() {
@Override
public boolean apply(SecurityGroup group) {
return Iterables.any(group.getIngressRules(), new Predicate<IngressRule>() {
@Override
public boolean apply(IngressRule rule) {
return rule.getStartPort() <= port && rule.getEndPort() >= port;
}
});
}
@Override
public String toString() {
return "portInRange(" + port + ")";
}
};
}
/**
*
* @return true, if the security group contains an ingress rule with the given cidr
*/
public static Predicate<SecurityGroup> hasCidr(final String cidr) {
return new Predicate<SecurityGroup>() {
@Override
public boolean apply(SecurityGroup group) {
return Iterables.any(group.getIngressRules(), new Predicate<IngressRule>() {
@Override
public boolean apply(IngressRule rule) {
return rule.getCIDR() != null
&& rule.getCIDR().equals(cidr);
}
});
}
@Override
public String toString() {
return "hasCidr(" + cidr + ")";
}
};
}
/**
*
* @return true, if the security group contains an ingress rule with the given cidr and the given port in range
*/
public static Predicate<SecurityGroup> portInRangeForCidr(final int port, final String cidr) {
return new Predicate<SecurityGroup>() {
@Override
public boolean apply(SecurityGroup group) {
return Iterables.any(group.getIngressRules(), new Predicate<IngressRule>() {
@Override
public boolean apply(IngressRule rule) {
return rule.getCIDR() != null
&& rule.getCIDR().equals(cidr)
&& rule.getStartPort() <= port
&& rule.getEndPort() >= port;
}
});
}
@Override
public String toString() {
return "portInRangeForCidr(" + port + ", " + cidr + ")";
}
};
}
/**
*
* @return always returns true.
*/
public static Predicate<SecurityGroup> any() {
return alwaysTrue();
}
/**
* matches name of the given security group
*
* @param name
* @return predicate that matches name
*/
public static Predicate<SecurityGroup> nameEquals(final String name) {
checkNotNull(name, "name must be defined");
return new Predicate<SecurityGroup>() {
@Override
public boolean apply(SecurityGroup ext) {
return name.equals(ext.getName());
}
@Override
public String toString() {
return "nameEquals(" + name + ")";
}
};
}
/**
* matches name of the given security group
*
* @param name
* @return predicate that matches name
*/
public static Predicate<SecurityGroup> nameMatches(final Predicate<String> name) {
checkNotNull(name, "name must be defined");
return new Predicate<SecurityGroup>() {
@Override
public boolean apply(SecurityGroup ext) {
return name.apply(ext.getName());
}
@Override
public String toString() {
return "nameMatches(" + name + ")";
}
};
}
/**
* matches IngressRules with the given protocol, start and end port, and
* any of the given CIDRs.
*
* @param protocol
* @param startPort
* @param endPort
* @param cidrs
*
* @return predicate that matches as described
*/
public static Predicate<IngressRule> ruleCidrMatches(final String protocol,
final int startPort,
final int endPort,
final Set<String> cidrs) {
checkNotNull(protocol, "protocol");
checkNotNull(cidrs, "cidrs");
return new Predicate<IngressRule>() {
@Override
public boolean apply(IngressRule rule) {
return protocol.equals(rule.getProtocol())
&& startPort == rule.getStartPort()
&& endPort == rule.getEndPort()
&& cidrs.contains(rule.getCIDR());
}
@Override
public String toString() {
return "ruleCidrMatches(protocol:" + protocol
+ ",startPort:" + startPort
+ ",endPort:" + endPort
+ ",cidrs:[" + cidrs
+ "])";
}
};
}
/**
* matches IngressRules with the given protocol, start and end port, and
* any of the given account/security group name pairs.
*
* @param protocol
* @param startPort
* @param endPort
* @param accountGroupNames
*
* @return predicate that matches as described
*/
public static Predicate<IngressRule> ruleGroupMatches(final String protocol,
final int startPort,
final int endPort,
final Multimap<String, String> accountGroupNames) {
checkNotNull(protocol, "protocol");
checkNotNull(accountGroupNames, "accountGroupNames");
return new Predicate<IngressRule>() {
@Override
public boolean apply(IngressRule rule) {
return protocol.equals(rule.getProtocol())
&& startPort == rule.getStartPort()
&& endPort == rule.getEndPort()
&& accountGroupNames.containsEntry(rule.getAccount(), rule.getSecurityGroupName());
}
@Override
public String toString() {
return "ruleGroupMatches(protocol:" + protocol
+ ",startPort:" + startPort
+ ",endPort:" + endPort
+ ",accountGroupNames:[" + accountGroupNames
+ "])";
}
};
}
}
| |
/*
* $Id: Submit.java 678154 2008-07-19 13:14:31Z musachy $
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.struts2.dojo.components;
import java.io.Writer;
import java.util.Random;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.apache.struts2.components.Form;
import org.apache.struts2.components.FormButton;
import org.apache.struts2.views.annotations.StrutsTag;
import org.apache.struts2.views.annotations.StrutsTagAttribute;
import org.apache.struts2.views.annotations.StrutsTagSkipInheritance;
import com.opensymphony.xwork2.util.ValueStack;
import com.opensymphony.xwork2.util.logging.Logger;
import com.opensymphony.xwork2.util.logging.LoggerFactory;
/**
* <!-- START SNIPPET: javadoc -->
* Renders a submit button that can submit a form asynchronously.
* The submit can have three different types of rendering:
* <ul>
* <li>input: renders as html <input type="submit"...></li>
* <li>image: renders as html <input type="image"...></li>
* <li>button: renders as html <button type="submit"...></li>
* </ul>
* Please note that the button type has advantages by adding the possibility to seperate the submitted value from the
* text shown on the button face, but has issues with Microsoft Internet Explorer at least up to 6.0
* <!-- END SNIPPET: javadoc -->
*
* <p>Examples</p>
* <!-- START SNIPPET: example1 -->
* <sx:submit value="%{'Submit'}" />
* <!-- END SNIPPET: example1 -->
*
* <!-- START SNIPPET: example2 -->
* <sx:submit type="image" value="%{'Submit'}" label="Submit the form" src="submit.gif"/>
* <!-- END SNIPPET: example2 -->
* <!-- START SNIPPET: example3 -->
* <sx:submit type="button" value="%{'Submit'}" label="Submit the form"/>
* <!-- END SNIPPET: example3 -->
*
* <!-- START SNIPPET: example4 -->
* <div id="div1">Div 1</div>
* <s:url id="ajaxTest" value="/AjaxTest.action"/>
*
* <sx:submit id="link1" href="%{ajaxTest}" target="div1" />
* <!-- END SNIPPET: example4 -->
*
* <!-- START SNIPPET: example5 -->
* <s:form id="form" action="AjaxTest">
* <input type="textbox" name="data">
* <sx:submit />
* </s:form>
* <!-- END SNIPPET: example5 -->
*
* <!-- START SNIPPET: example6 -->
* <s:form id="form" action="AjaxTest">
* <input type="textbox" name="data">
* </s:form>
*
* <sx:submit formId="form" />
* <!-- END SNIPPET: example6 -->
*
* <!-- START SNIPPET: example7 -->
* <script type="text/javascript">
* dojo.event.topic.subscribe("/before", function(event, widget){
* alert('inside a topic event. before request');
* //event: set event.cancel = true, to cancel request
* //widget: widget that published the topic
* });
* </script>
*
* <sx:submit beforeNotifyTopics="/before" />
* <!-- END SNIPPET: example7 -->
*
* <!-- START SNIPPET: example8 -->
* <script type="text/javascript">
* dojo.event.topic.subscribe("/after", function(data, request, widget){
* alert('inside a topic event. after request');
* //data : text returned from request(the html)
* //request: XMLHttpRequest object
* //widget: widget that published the topic
* });
* </script>
*
* <sx:submit afterNotifyTopics="/after" highlightColor="red" href="%{#ajaxTest}" />
* <!-- END SNIPPET: example8 -->
*
* <!-- START SNIPPET: example9 -->
* <script type="text/javascript">
* dojo.event.topic.subscribe("/error", function(error, request, widget){
* alert('inside a topic event. on error');
* //error : error object (error.message has the error message)
* //request: XMLHttpRequest object
* //widget: widget that published the topic
* });
* </script>
*
* <img id="ind1" src="${pageContext.request.contextPath}/images/indicator.gif" style="display:none"/>
* <sx:submit errorNotifyTopics="/error" indicator="ind1" href="%{#ajaxTest}" />
* <!-- END SNIPPET: example9 -->
*/
@StrutsTag(name="submit", tldTagClass="org.apache.struts2.dojo.views.jsp.ui.SubmitTag", description="Render a submit button")
public class Submit extends FormButton implements RemoteBean {
private static final Logger LOG = LoggerFactory.getLogger(Submit.class);
private final static transient Random RANDOM = new Random();
final public static String OPEN_TEMPLATE = "submit";
final public static String TEMPLATE = "submit-close";
protected String href;
protected String errorText;
protected String executeScripts;
protected String loadingText;
protected String listenTopics;
protected String handler;
protected String formId;
protected String formFilter;
protected String src;
protected String notifyTopics;
protected String showErrorTransportText;
protected String indicator;
protected String showLoadingText;
protected String targets;
protected String beforeNotifyTopics;
protected String afterNotifyTopics;
protected String errorNotifyTopics;
protected String highlightColor;
protected String highlightDuration;
protected String validate;
protected String ajaxAfterValidation;
protected String separateScripts;
protected String transport;
protected String parseContent;
public Submit(ValueStack stack, HttpServletRequest request, HttpServletResponse response) {
super(stack, request, response);
}
protected String getDefaultTemplate() {
return TEMPLATE;
}
@Override
public String getDefaultOpenTemplate() {
return OPEN_TEMPLATE;
}
public void evaluateParams() {
if ((key == null) && (value == null)) {
value = "Submit";
}
if (((key != null)) && (value == null)) {
this.value = "%{getText('"+key +"')}";
}
super.evaluateParams();
}
public void evaluateExtraParams() {
super.evaluateExtraParams();
if (href != null)
addParameter("href", findString(href));
if (errorText != null)
addParameter("errorText", findString(errorText));
if (loadingText != null)
addParameter("loadingText", findString(loadingText));
if (executeScripts != null)
addParameter("executeScripts", findValue(executeScripts, Boolean.class));
if (listenTopics != null)
addParameter("listenTopics", findString(listenTopics));
if (notifyTopics != null)
addParameter("notifyTopics", findString(notifyTopics));
if (handler != null)
addParameter("handler", findString(handler));
if (formId != null)
addParameter("formId", findString(formId));
if (formFilter != null)
addParameter("formFilter", findString(formFilter));
if (src != null)
addParameter("src", findString(src));
if (indicator != null)
addParameter("indicator", findString(indicator));
if (targets != null)
addParameter("targets", findString(targets));
if (showLoadingText != null)
addParameter("showLoadingText", findString(showLoadingText));
if (showLoadingText != null)
addParameter("showLoadingText", findString(showLoadingText));
if (beforeNotifyTopics != null)
addParameter("beforeNotifyTopics", findString(beforeNotifyTopics));
if (afterNotifyTopics != null)
addParameter("afterNotifyTopics", findString(afterNotifyTopics));
if (errorNotifyTopics != null)
addParameter("errorNotifyTopics", findString(errorNotifyTopics));
if (highlightColor != null)
addParameter("highlightColor", findString(highlightColor));
if (highlightDuration != null)
addParameter("highlightDuration", findString(highlightDuration));
if (separateScripts != null)
addParameter("separateScripts", findValue(separateScripts, Boolean.class));
if (transport != null)
addParameter("transport", findString(transport));
if (parseContent != null)
addParameter("parseContent", findValue(parseContent, Boolean.class));
Boolean validateValue = false;
if (validate != null) {
validateValue = (Boolean) findValue(validate, Boolean.class);
addParameter("validate", validateValue);
}
Form form = (Form) findAncestor(Form.class);
if (form != null)
addParameter("parentTheme", form.getTheme());
if (ajaxAfterValidation != null)
addParameter("ajaxAfterValidation", findValue(ajaxAfterValidation, Boolean.class));
// generate a random ID if not explicitly set and not parsing the content
Boolean parseContent = (Boolean)stack.getContext().get(Head.PARSE_CONTENT);
boolean generateId = (parseContent != null ? !parseContent : true);
addParameter("pushId", generateId);
if ((this.id == null || this.id.length() == 0) && generateId) {
// resolves Math.abs(Integer.MIN_VALUE) issue reported by FindBugs
// http://findbugs.sourceforge.net/bugDescriptions.html#RV_ABSOLUTE_VALUE_OF_RANDOM_INT
int nextInt = RANDOM.nextInt();
nextInt = nextInt == Integer.MIN_VALUE ? Integer.MAX_VALUE : Math.abs(nextInt);
this.id = "widget_" + String.valueOf(nextInt);
addParameter("id", this.id);
}
}
@Override
@StrutsTagSkipInheritance
public void setTheme(String theme) {
super.setTheme(theme);
}
@Override
public String getTheme() {
return "ajax";
}
/**
* Indicate whether the concrete button supports the type "image".
*
* @return <tt>true</tt> to indicate type image is supported.
*/
protected boolean supportsImageType() {
return true;
}
/**
* Overrides to be able to render body in a template rather than always before the template
*/
public boolean end(Writer writer, String body) {
evaluateParams();
try {
addParameter("body", body);
mergeTemplate(writer, buildTemplateName(template, getDefaultTemplate()));
} catch (Exception e) {
LOG.error("error when rendering", e);
}
finally {
popComponentStack();
}
return false;
}
@StrutsTagAttribute(description="Topic that will trigger the remote call")
public void setListenTopics(String listenTopics) {
this.listenTopics = listenTopics;
}
@StrutsTagAttribute(description="The URL to call to obtain the content. Note: If used with ajax context, the value must be set as an url tag value.")
public void setHref(String href) {
this.href = href;
}
@StrutsTagAttribute(description="The text to display to the user if the is an error fetching the content")
public void setErrorText(String errorText) {
this.errorText = errorText;
}
@StrutsTagAttribute(description="Javascript code in the fetched content will be executed", type="Boolean", defaultValue="false")
public void setExecuteScripts(String executeScripts) {
this.executeScripts = executeScripts;
}
@StrutsTagAttribute(description="Text to be shown while content is being fetched", defaultValue="Loading...")
public void setLoadingText(String loadingText) {
this.loadingText = loadingText;
}
@StrutsTagAttribute(description="Javascript function name that will make the request")
public void setHandler(String handler) {
this.handler = handler;
}
@StrutsTagAttribute(description="Function name used to filter the fields of the form.")
public void setFormFilter(String formFilter) {
this.formFilter = formFilter;
}
@StrutsTagAttribute(description="Form id whose fields will be serialized and passed as parameters")
public void setFormId(String formId) {
this.formId = formId;
}
@StrutsTagAttribute(description="Supply an image src for <i>image</i> type submit button. Will have no effect for types <i>input</i> and <i>button</i>.")
public void setSrc(String src) {
this.src = src;
}
@StrutsTagAttribute(description="Comma delimited list of ids of the elements whose content will be updated")
public void setTargets(String targets) {
this.targets = targets;
}
@StrutsTagAttribute(description="Comma delimmited list of topics that will published before and after the request, and on errors")
public void setNotifyTopics(String notifyTopics) {
this.notifyTopics = notifyTopics;
}
@StrutsTagAttribute(description="Set whether errors will be shown or not", type="Boolean", defaultValue="true")
public void setShowErrorTransportText(String showErrorTransportText) {
this.showErrorTransportText = showErrorTransportText;
}
@StrutsTagAttribute(description="Set indicator")
public void setIndicator(String indicator) {
this.indicator = indicator;
}
@StrutsTagAttribute(description="Show loading text on targets", type="Boolean", defaultValue="false")
public void setShowLoadingText(String showLoadingText) {
this.showLoadingText = showLoadingText;
}
@StrutsTagAttribute(description="The css class to use for element")
public void setCssClass(String cssClass) {
super.setCssClass(cssClass);
}
@StrutsTagAttribute(description="The css style to use for element")
public void setCssStyle(String cssStyle) {
super.setCssStyle(cssStyle);
}
@StrutsTagAttribute(description="The id to use for the element")
public void setId(String id) {
super.setId(id);
}
@StrutsTagAttribute(description="The name to set for element")
public void setName(String name) {
super.setName(name);
}
@StrutsTagAttribute(description="The type of submit to use. Valid values are <i>input</i>, " +
"<i>button</i> and <i>image</i>.", defaultValue="input")
public void setType(String type) {
super.setType(type);
}
@StrutsTagAttribute(description="Preset the value of input element.")
public void setValue(String value) {
super.setValue(value);
}
@StrutsTagAttribute(description="Label expression used for rendering a element specific label")
public void setLabel(String label) {
super.setLabel(label);
}
@StrutsTagAttribute(description="Comma delimmited list of topics that will published after the request(if the request succeeds)")
public void setAfterNotifyTopics(String afterNotifyTopics) {
this.afterNotifyTopics = afterNotifyTopics;
}
@StrutsTagAttribute(description="Comma delimmited list of topics that will published before the request")
public void setBeforeNotifyTopics(String beforeNotifyTopics) {
this.beforeNotifyTopics = beforeNotifyTopics;
}
@StrutsTagAttribute(description="Comma delimmited list of topics that will published after the request(if the request fails)")
public void setErrorNotifyTopics(String errorNotifyTopics) {
this.errorNotifyTopics = errorNotifyTopics;
}
@StrutsTagAttribute(description = "Color used to perform a highlight effect on the elements specified in the 'targets' attribute",
defaultValue = "none")
public void setHighlightColor(String highlightColor) {
this.highlightColor = highlightColor;
}
@StrutsTagAttribute(description = "Duration of highlight effect in milliseconds. Only valid if 'highlightColor' attribute is set",
defaultValue = "1000")
public void setHighlightDuration(String highlightDuration) {
this.highlightDuration = highlightDuration;
}
@StrutsTagAttribute(description = "Perform Ajax validation. 'ajaxValidation' interceptor must be applied to action", type="Boolean",
defaultValue = "false")
public void setValidate(String validate) {
this.validate = validate;
}
@StrutsTagAttribute(description = "Make an asynchronous request if validation succeeds. Only valid if 'validate' is 'true'", type="Boolean",
defaultValue = "false")
public void setAjaxAfterValidation(String ajaxAfterValidation) {
this.ajaxAfterValidation = ajaxAfterValidation;
}
@StrutsTagSkipInheritance
public void setAction(String action) {
super.setAction(action);
}
@StrutsTagAttribute(description="Run scripts in a separate scope, unique for each tag", defaultValue="true")
public void setSeparateScripts(String separateScripts) {
this.separateScripts = separateScripts;
}
@StrutsTagAttribute(description="Transport used by Dojo to make the request", defaultValue="XMLHTTPTransport")
public void setTransport(String transport) {
this.transport = transport;
}
@StrutsTagAttribute(description="Parse returned HTML for Dojo widgets", defaultValue="true", type="Boolean")
public void setParseContent(String parseContent) {
this.parseContent = parseContent;
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.component.jetty9;
import java.io.ByteArrayOutputStream;
import java.io.Closeable;
import java.io.IOException;
import java.io.InputStream;
import java.io.UnsupportedEncodingException;
import java.lang.reflect.Method;
import java.net.MalformedURLException;
import java.nio.ByteBuffer;
import java.util.Collection;
import java.util.Map;
import java.util.TreeMap;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.TimeoutException;
import static javax.servlet.http.HttpServletResponse.SC_OK;
import org.apache.camel.AsyncCallback;
import org.apache.camel.CamelExchangeException;
import org.apache.camel.Exchange;
import org.apache.camel.ExchangeTimedOutException;
import org.apache.camel.component.jetty.JettyContentExchange;
import org.apache.camel.component.jetty.JettyHttpBinding;
import org.eclipse.jetty.client.HttpClient;
import org.eclipse.jetty.client.api.Request;
import org.eclipse.jetty.client.api.Response;
import org.eclipse.jetty.client.api.Result;
import org.eclipse.jetty.client.util.BytesContentProvider;
import org.eclipse.jetty.client.util.InputStreamContentProvider;
import org.eclipse.jetty.client.util.InputStreamResponseListener;
import org.eclipse.jetty.client.util.StringContentProvider;
import org.eclipse.jetty.http.HttpFields;
import org.eclipse.jetty.util.Callback;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Jetty specific exchange which keeps track of the the request and response.
*/
public class JettyContentExchange9 implements JettyContentExchange {
private static final Logger LOG = LoggerFactory.getLogger(JettyContentExchange9.class);
private volatile Exchange exchange;
private volatile AsyncCallback callback;
private volatile JettyHttpBinding jettyBinding;
private volatile HttpClient client;
private final CountDownLatch done = new CountDownLatch(1);
private Request request;
private Response response;
private byte[] responseContent;
private String requestContentType;
private boolean supportRedirect;
public void init(Exchange exchange, JettyHttpBinding jettyBinding,
final HttpClient client, AsyncCallback callback) {
this.exchange = exchange;
this.jettyBinding = jettyBinding;
this.client = client;
this.callback = callback;
}
protected void onRequestComplete() {
LOG.trace("onRequestComplete");
closeRequestContentSource();
}
protected void onResponseComplete(Result result, byte[] content) {
LOG.trace("onResponseComplete");
done.countDown();
this.response = result.getResponse();
this.responseContent = content;
if (callback == null) {
// this is only for the async callback
return;
}
try {
jettyBinding.populateResponse(exchange, this);
} catch (Exception e) {
exchange.setException(e);
} finally {
callback.done(false);
}
}
protected void onExpire() {
LOG.trace("onExpire");
// need to close the request input stream
closeRequestContentSource();
doTaskCompleted(new ExchangeTimedOutException(exchange, client.getConnectTimeout()));
}
protected void onException(Throwable ex) {
LOG.trace("onException {}", ex);
// need to close the request input stream
closeRequestContentSource();
doTaskCompleted(ex);
}
protected void onConnectionFailed(Throwable ex) {
LOG.trace("onConnectionFailed {}", ex);
// need to close the request input stream
closeRequestContentSource();
doTaskCompleted(ex);
}
public byte[] getBody() {
// must return the content as raw bytes
return getResponseContentBytes();
}
public String getUrl() {
try {
return this.request.getURI().toURL().toExternalForm();
} catch (MalformedURLException e) {
throw new IllegalStateException(e.getMessage(), e);
}
}
protected void closeRequestContentSource() {
tryClose(this.request.getContent());
}
private void tryClose(Object obj) {
if (obj instanceof Closeable) {
try {
((Closeable) obj).close();
} catch (IOException e) {
// Ignore
}
}
}
protected void doTaskCompleted(Throwable ex) {
if (ex instanceof TimeoutException) {
exchange.setException(new ExchangeTimedOutException(exchange, request.getTimeout()));
} else {
exchange.setException(new CamelExchangeException("JettyClient failed cause by: " + ex.getMessage(), exchange, ex));
}
done.countDown();
if (callback != null) {
// now invoke callback to indicate we are done async
callback.done(false);
}
}
public void setRequestContentType(String contentType) {
this.requestContentType = contentType;
}
public int getResponseStatus() {
return this.response.getStatus();
}
public void setMethod(String method) {
this.request.method(method);
}
public void setTimeout(long timeout) {
this.request.timeout(timeout, TimeUnit.MILLISECONDS);
}
public void setURL(String url) {
this.request = client.newRequest(url);
}
public void setRequestContent(byte[] byteArray) {
this.request.content(new BytesContentProvider(byteArray), this.requestContentType);
}
public void setRequestContent(String data, String charset) throws UnsupportedEncodingException {
StringContentProvider cp = charset != null ? new StringContentProvider(data, charset) : new StringContentProvider(data);
this.request.content(cp, this.requestContentType);
}
public void setRequestContent(InputStream ins) {
this.request.content(new InputStreamContentProvider(ins), this.requestContentType);
}
public void addRequestHeader(String key, String s) {
this.request.header(key, s);
}
public void send(HttpClient client) throws IOException {
org.eclipse.jetty.client.api.Request.Listener listener = new Request.Listener.Adapter() {
@Override
public void onSuccess(Request request) {
onRequestComplete();
}
@Override
public void onFailure(Request request, Throwable failure) {
onConnectionFailed(failure);
}
};
InputStreamResponseListener responseListener = new InputStreamResponseListener() {
ByteArrayOutputStream baos = new ByteArrayOutputStream();
@Override
public void onContent(Response response, ByteBuffer content, Callback callback) {
byte[] buffer = new byte[content.limit()];
content.get(buffer);
baos.write(buffer, 0, buffer.length);
callback.succeeded();
}
@Override
public void onComplete(Result result) {
if (result.isFailed()) {
doTaskCompleted(result.getFailure());
} else {
onResponseComplete(result, baos.toByteArray());
}
}
};
request.followRedirects(supportRedirect).listener(listener).send(responseListener);
}
protected void setResponse(Response response) {
this.response = response;
}
public byte[] getResponseContentBytes() {
return responseContent;
}
private Map<String, Collection<String>> getFieldsAsMap(HttpFields fields) {
final Map<String, Collection<String>> result = new TreeMap<>(String.CASE_INSENSITIVE_ORDER);
for (String name : getFieldNamesCollection(fields)) {
result.put(name, fields.getValuesList(name));
}
return result;
}
@SuppressWarnings("unchecked")
private Collection<String> getFieldNamesCollection(HttpFields fields) {
try {
return fields.getFieldNamesCollection();
} catch (NoSuchMethodError e) {
try {
// In newer versions of Jetty the return type has been changed to Set.
// This causes problems at byte-code level. Try recovering.
Method reflGetFieldNamesCollection = HttpFields.class.getMethod("getFieldNamesCollection");
Object result = reflGetFieldNamesCollection.invoke(fields);
return (Collection<String>) result;
} catch (Exception reflectionException) {
// Suppress, throwing the original exception
throw e;
}
}
}
public Map<String, Collection<String>> getRequestHeaders() {
return getFieldsAsMap(request.getHeaders());
}
public Map<String, Collection<String>> getResponseHeaders() {
return getFieldsAsMap(response.getHeaders());
}
@Override
public void setSupportRedirect(boolean supportRedirect) {
this.supportRedirect = supportRedirect;
}
}
| |
//pakage joney_000[let_me_start]
import java.util.*;
import java.lang.*;
import java.io.*;
import java.math.*;
/******************** Main Class ***********************/
public class A
{
public static InputStream inputStream = System.in;
public static OutputStream outputStream = System.out;
public static FastReader in = new FastReader(inputStream);;
public static PrintWriter out = new PrintWriter(outputStream);;
/*
Overhead [Additional Temporary Strorage]
*/
public static int tempints[] = new int[100005];
public static long templongs[] = new long[100005];
public static double tempdoubles[] = new double[100005];
public static char tempchars[] = new char[100005];
public static long mod = 1000000000+7;
public static void main(String[] args) throws java.lang.Exception{
//let_me_start
int n = i(); int m = i();
//int arr[] = is((int)n);
//String ans = "NO";
long ans=0;
for(int i=1;i<=n;i++){
}
out.write(""+ans+"\n");
out.flush();
return;
}
//****************************** Utilities ***********************//
public static boolean isPrime(long n)throws Exception{
if(n==1)return false;
if(n<=3)return true;
if(n%2==0)return false;
for(int i=2 ;i <= Math.sqrt(n); i++){
if(n%i==0)return false;
}
return true;
}
// sieve
public static int[] primes(int n)throws Exception{ // for(int i=1;i<=arr.length-1;i++)out.write(""+arr[i]+" ");
boolean arr[] = new boolean[n+1];
Arrays.fill(arr,true);
for(int i=1;i<=Math.sqrt(n);i++){
if(!arr[i])continue;
for(int j = 2*i ;j<=n;j+=i){
arr[i]=false;
}
}
LinkedList<Integer> ll = new LinkedList<Integer>();
for(int i=1;i<=n;i++){
if(arr[i])ll.add(i);
}
n = ll.size();
int primes[] = new int[n+1];
for(int i=1;i<=n;i++){
primes[i]=ll.removeFirst();
}
return primes;
}
public static long gcd (long a , long b)throws Exception{
if(b==0)return a;
return gcd(b , a%b);
}
public static long lcm (long a , long b)throws Exception{
if(a==0||b==0)return 0;
return (a*b)/gcd(a,b);
}
public static long mulmod(long a , long b ,long mod)throws Exception{
if(a==0||b==0)return 0;
if(b==1)return a;
long ans = mulmod(a,b/2,mod);
ans = (ans*2)% mod;
if(b%2==1)ans = (a + ans)% mod;
return ans;
}
public static long pow(long a , long b ,long mod)throws Exception{
if(b==0)return 1;
if(b==1)return a;
long ans = pow(a,b/2,mod);
ans = (ans * ans)% mod;
if(b%2==1)ans = (a * ans)% mod;
return ans;
}
// 20*20 nCr Pascal Table
public static long[][] ncrTable()throws Exception{
long ncr[][] = new long[21][21];
for(int i=0 ;i<=20 ;i++){ncr[i][0]=1;ncr[i][i]=1;}
for(int j=0;j<=20 ;j++){
for(int i=j+1;i<= 20 ;i++){
ncr[i][j] = ncr[i-1][j]+ncr[i-1][j-1];
}
}
return ncr;
}
//*******************************I/O******************************//
public static int i()throws Exception{
//return Integer.parseInt(br.readLine().trim());
return in.nextInt();
}
public static int[] is(int n)throws Exception{
//int arr[] = new int[n+1];
for(int i=1 ; i <= n ;i++)tempints[i] = in.nextInt();
return tempints;
}
public static long l()throws Exception{
return in.nextLong();
}
public static long[] ls(int n)throws Exception{
for(int i=1 ; i <= n ;i++)templongs[i] = in.nextLong();
return templongs;
}
public static double d()throws Exception{
return in.nextDouble();
}
public static double[] ds(int n)throws Exception{
for(int i=1 ; i <= n ;i++)tempdoubles[i] = in.nextDouble();
return tempdoubles;
}
public static char c()throws Exception{
return in.nextCharacter();
}
public static char[] cs(int n)throws Exception{
for(int i=1 ; i <= n ;i++)tempchars[i] = in.nextCharacter();
return tempchars;
}
public static String s()throws Exception{
return in.nextLine();
}
public static BigInteger bi()throws Exception{
return in.nextBigInteger();
}
//***********************I/O ENDS ***********************//
//*********************** 0.3%f [precision]***********************//
/* roundoff upto 2 digits
double roundOff = Math.round(a * 100.0) / 100.0;
or
System.out.printf("%.2f", val);
*/
/*
print upto 2 digits after decimal
val = ((long)(val * 100.0))/100.0;
*/
}
class FastReader{
private boolean finished = false;
private InputStream stream;
private byte[] buf = new byte[1024];
private int curChar;
private int numChars;
private SpaceCharFilter filter;
public FastReader(InputStream stream){
this.stream = stream;
}
public int read(){
if (numChars == -1){
throw new InputMismatchException ();
}
if (curChar >= numChars){
curChar = 0;
try{
numChars = stream.read (buf);
} catch (IOException e){
throw new InputMismatchException ();
}
if (numChars <= 0){
return -1;
}
}
return buf[curChar++];
}
public int peek(){
if (numChars == -1){
return -1;
}
if (curChar >= numChars){
curChar = 0;
try{
numChars = stream.read (buf);
} catch (IOException e){
return -1;
}
if (numChars <= 0){
return -1;
}
}
return buf[curChar];
}
public int nextInt(){
int c = read ();
while (isSpaceChar (c))
c = read ();
int sgn = 1;
if (c == '-'){
sgn = -1;
c = read ();
}
int res = 0;
do{
if(c==','){
c = read();
}
if (c < '0' || c > '9'){
throw new InputMismatchException ();
}
res *= 10;
res += c - '0';
c = read ();
} while (!isSpaceChar (c));
return res * sgn;
}
public long nextLong(){
int c = read ();
while (isSpaceChar (c))
c = read ();
int sgn = 1;
if (c == '-'){
sgn = -1;
c = read ();
}
long res = 0;
do{
if (c < '0' || c > '9'){
throw new InputMismatchException ();
}
res *= 10;
res += c - '0';
c = read ();
} while (!isSpaceChar (c));
return res * sgn;
}
public String nextString(){
int c = read ();
while (isSpaceChar (c))
c = read ();
StringBuilder res = new StringBuilder ();
do{
res.appendCodePoint (c);
c = read ();
} while (!isSpaceChar (c));
return res.toString ();
}
public boolean isSpaceChar(int c){
if (filter != null){
return filter.isSpaceChar (c);
}
return isWhitespace (c);
}
public static boolean isWhitespace(int c){
return c == ' ' || c == '\n' || c == '\r' || c == '\t' || c == -1;
}
private String readLine0(){
StringBuilder buf = new StringBuilder ();
int c = read ();
while (c != '\n' && c != -1){
if (c != '\r'){
buf.appendCodePoint (c);
}
c = read ();
}
return buf.toString ();
}
public String nextLine(){
String s = readLine0 ();
while (s.trim ().length () == 0)
s = readLine0 ();
return s;
}
public String nextLine(boolean ignoreEmptyLines){
if (ignoreEmptyLines){
return nextLine ();
}else{
return readLine0 ();
}
}
public BigInteger nextBigInteger(){
try{
return new BigInteger (nextString ());
} catch (NumberFormatException e){
throw new InputMismatchException ();
}
}
public char nextCharacter(){
int c = read ();
while (isSpaceChar (c))
c = read ();
return (char) c;
}
public double nextDouble(){
int c = read ();
while (isSpaceChar (c))
c = read ();
int sgn = 1;
if (c == '-'){
sgn = -1;
c = read ();
}
double res = 0;
while (!isSpaceChar (c) && c != '.'){
if (c == 'e' || c == 'E'){
return res * Math.pow (10, nextInt ());
}
if (c < '0' || c > '9'){
throw new InputMismatchException ();
}
res *= 10;
res += c - '0';
c = read ();
}
if (c == '.'){
c = read ();
double m = 1;
while (!isSpaceChar (c)){
if (c == 'e' || c == 'E'){
return res * Math.pow (10, nextInt ());
}
if (c < '0' || c > '9'){
throw new InputMismatchException ();
}
m /= 10;
res += (c - '0') * m;
c = read ();
}
}
return res * sgn;
}
public boolean isExhausted(){
int value;
while (isSpaceChar (value = peek ()) && value != -1)
read ();
return value == -1;
}
public String next(){
return nextString ();
}
public SpaceCharFilter getFilter(){
return filter;
}
public void setFilter(SpaceCharFilter filter){
this.filter = filter;
}
public interface SpaceCharFilter{
public boolean isSpaceChar(int ch);
}
}
/******************** Pair class ***********************/
class Pair implements Comparable<Pair>{
public int a;
public int b;
public Pair(){
this.a = 0;
this.b = 0;
}
public Pair(int a,int b){
this.a = a;
this.b = b;
}
public int compareTo(Pair p){
if(this.a==p.a){
return this.b-p.b;
}
return this.a-p.a;
}
public String toString(){
return "a="+this.a+" b="+this.b;
}
}
| |
package org.radargun.service;
import java.io.IOException;
import java.io.Serializable;
import java.lang.reflect.Field;
import java.lang.reflect.Method;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.Comparator;
import java.util.List;
import java.util.Map;
import com.hazelcast.core.IMap;
import com.hazelcast.query.PagingPredicate;
import com.hazelcast.query.Predicate;
import com.hazelcast.query.Predicates;
import org.radargun.logging.Log;
import org.radargun.logging.LogFactory;
import org.radargun.traits.Queryable;
import org.radargun.utils.OptimizedMap;
import org.radargun.utils.Projections;
/**
* @author Radim Vansa <rvansa@redhat.com>
*/
public class HazelcastQueryable implements Queryable {
private static final Log log = LogFactory.getLog(HazelcastQueryable.class);
protected final Hazelcast3Service service;
public HazelcastQueryable(Hazelcast3Service service) {
this.service = service;
}
@Override
public QueryBuilder getBuilder(String mapName, Class<?> clazz) {
return new HazelcastQueryBuilder(service.getMap(mapName));
}
@Override
public void reindex(String containerName) {
// noop
}
private class HazelcastQueryBuilder implements QueryBuilder {
private final IMap map;
private Predicate predicate;
private Comparator comparator;
private int limit = -1;
private int offset = 0;
private String[] projection;
private HazelcastQueryBuilder(IMap<Object, Object> map) {
this.map = map;
}
@Override
public QueryBuilder subquery() {
return new HazelcastQueryBuilder(null);
}
private void implicitAnd(Predicate p) {
if (predicate == null) {
predicate = p;
} else {
predicate = Predicates.and(predicate, p);
}
}
@Override
public QueryBuilder eq(String attribute, Object value) {
implicitAnd(Predicates.equal(attribute, (Comparable) value));
return this;
}
@Override
public QueryBuilder lt(String attribute, Object value) {
implicitAnd(Predicates.lessThan(attribute, (Comparable) value));
return this;
}
@Override
public QueryBuilder le(String attribute, Object value) {
implicitAnd(Predicates.lessEqual(attribute, (Comparable) value));
return this;
}
@Override
public QueryBuilder gt(String attribute, Object value) {
implicitAnd(Predicates.greaterThan(attribute, (Comparable) value));
return this;
}
@Override
public QueryBuilder ge(String attribute, Object value) {
implicitAnd(Predicates.greaterEqual(attribute, (Comparable) value));
return this;
}
@Override
public QueryBuilder isNull(String attribute) {
implicitAnd(Predicates.equal(attribute, null));
return this;
}
@Override
public QueryBuilder like(String attribute, String pattern) {
implicitAnd(Predicates.like(attribute, pattern));
return this;
}
@Override
public QueryBuilder contains(String attribute, Object value) {
throw new UnsupportedOperationException();
}
@Override
public QueryBuilder not(QueryBuilder subquery) {
implicitAnd(Predicates.not(((HazelcastQueryBuilder) subquery).predicate));
return this;
}
@Override
public QueryBuilder any(QueryBuilder... subqueries) {
Predicate p = null;
for (QueryBuilder subquery : subqueries) {
if (p == null) {
p = ((HazelcastQueryBuilder) subquery).predicate;
} else {
p = Predicates.or(p, ((HazelcastQueryBuilder) subquery).predicate);
}
}
implicitAnd(p);
return this;
}
@Override
public QueryBuilder orderBy(String attribute, SortOrder order) {
if (order == SortOrder.DESCENDING) {
comparator = new InverseComparator(attribute);
} else {
comparator = new RegularComparator(attribute);
}
return this;
}
@Override
public QueryBuilder projection(String... attributes) {
log.warn("Projection is emulated; no native support for projection.");
this.projection = attributes;
return this;
}
@Override
public QueryBuilder offset(long offset) {
log.warn("Offset is emulated; first records will be loaded anyway.");
this.offset = (int) offset;
return this;
}
@Override
public QueryBuilder limit(long limit) {
this.limit = (int) limit;
return this;
}
@Override
public Query build() {
Predicate finalPredicate;
if (comparator == null) {
if (limit < 0) finalPredicate = predicate;
else finalPredicate = new PagingPredicate(predicate, limit);
} else {
if (limit < 0) finalPredicate = new PagingPredicate(predicate, comparator, Integer.MAX_VALUE);
else finalPredicate = new PagingPredicate(predicate, comparator, limit);
}
return new HazelcastQuery(map, finalPredicate, offset, projection);
}
}
private class HazelcastQuery implements Query {
private final IMap map;
private final Predicate predicate;
private final int offset;
private final String[] projection;
public HazelcastQuery(IMap map, Predicate predicate, int offset, String[] projection) {
this.map = map;
this.predicate = predicate;
this.offset = offset;
this.projection = projection;
}
@Override
public QueryResult execute() {
if (predicate == null) return new HazelcastQueryResult(map.values(), offset, projection);
else return new HazelcastQueryResult(map.values(predicate), offset, projection);
}
}
private class HazelcastQueryResult implements QueryResult {
private final Collection values;
public HazelcastQueryResult(Collection values, int offset, String[] projection) {
if (offset > 0) {
values = Projections.subset(values, offset, Integer.MAX_VALUE);
}
if (projection != null) {
values = Projections.project(values, new ReflectionProjector(projection));
}
this.values = values;
}
@Override
public int size() {
return values.size();
}
@Override
public Collection values() {
return Collections.unmodifiableCollection(values);
}
}
private abstract static class ReflexiveComparator implements Comparator<Map.Entry>, Serializable {
protected transient Map<Class, Accessor> accessors = new OptimizedMap<Class, Accessor>();
protected final String attribute;
protected ReflexiveComparator(String attribute) {
this.attribute = attribute;
}
// magic deserialization method
private void readObject(java.io.ObjectInputStream in) throws IOException, ClassNotFoundException {
in.defaultReadObject();
accessors = new OptimizedMap<Class, Accessor>();
}
@Override
public int compare(Map.Entry e1, Map.Entry e2) {
try {
Comparable o1 = (Comparable) extractValue(e1.getValue());
Comparable o2 = (Comparable) extractValue(e2.getValue());
return compare(o1, o2);
} catch (Exception e) {
throw new IllegalArgumentException("Cannot extract " + attribute + " from " + e1.getValue() + " or " + e2.getValue(), e);
}
}
private Object extractValue(Object o) {
Class<?> clazz = o.getClass();
Accessor accessor = accessors.get(clazz);
if (accessor == null) {
accessors.put(clazz, accessor = getAccessor(clazz, attribute));
}
return accessor.get(o);
}
protected abstract int compare(Comparable o1, Comparable o2);
}
private static class RegularComparator extends ReflexiveComparator {
private RegularComparator(String attribute) {
super(attribute);
}
@Override
protected int compare(Comparable o1, Comparable o2) {
return o1.compareTo(o2);
}
}
private static class InverseComparator extends ReflexiveComparator {
public InverseComparator(String attribute) {
super(attribute);
}
@Override
protected int compare(Comparable o1, Comparable o2) {
return -o1.compareTo(o2);
}
}
private static class ReflectionProjector implements Projections.Func {
private final String[] projection;
private transient Map<Class<?>, ArrayList<Accessor>> accessorMap = new OptimizedMap<Class<?>, ArrayList<Accessor>>();
public ReflectionProjector(String[] projection) {
this.projection = projection;
}
// magic deserialization method
private void readObject(java.io.ObjectInputStream in) throws IOException, ClassNotFoundException {
in.readObject();
accessorMap = new OptimizedMap<Class<?>, ArrayList<Accessor>>();
}
@Override
public Object project(Object o) {
Class<?> clazz = o.getClass();
ArrayList<Accessor> accessors = accessorMap.get(clazz);
if (accessors == null) {
accessors = new ArrayList<Accessor>();
for (String attribute : projection) {
accessors.add(getAccessor(clazz, attribute));
}
accessorMap.put(clazz, accessors);
}
Object[] projected = new Object[projection.length];
int i = 0;
for (Accessor accessor : accessors) {
projected[i] = accessor.get(o);
++i;
}
return projected;
}
}
private static Accessor getAccessor(Class<?> clazz, String attribute) {
try {
ArrayList<Accessor> list = new ArrayList<Accessor>();
for (String attributePart : attribute.split("\\.")) {
Field f = clazz.getField(attribute);
if (f != null) {
f.setAccessible(true);
list.add(new FieldAccessor(f));
clazz = f.getType();
continue;
}
Method m = clazz.getMethod("get" + Character.toUpperCase(attributePart.charAt(0)) + attributePart.substring(1));
if (m == null) {
m = clazz.getMethod("is" + Character.toUpperCase(attributePart.charAt(0)) + attributePart.substring(1));
}
if (m != null) {
m.setAccessible(true);
list.add(new MethodAccessor(m));
clazz = m.getReturnType();
continue;
}
throw new IllegalArgumentException("Cannot find attribute part " + attributePart + " in " + clazz);
}
if (list.size() == 1) return list.get(0);
else return new ChainedAccessor(list);
} catch (Exception e) {
log.debug("Cannot access attribute " + attribute, e);
throw new RuntimeException(e);
}
}
private interface Accessor {
Object get(Object o);
}
private static class FieldAccessor implements Accessor {
private final Field f;
private FieldAccessor(Field f) {
this.f = f;
}
@Override
public Object get(Object o) {
try {
return f.get(o);
} catch (IllegalAccessException e) {
log.debug("Cannot access field " + f.getDeclaringClass() + "." + f.getName(), e);
throw new RuntimeException(e);
}
}
}
private static class MethodAccessor implements Accessor {
private final Method m;
private MethodAccessor(Method m) {
this.m = m;
}
@Override
public Object get(Object o) {
try {
return m.invoke(o);
} catch (Exception e) {
log.debug("Cannot invoke method " + m.getDeclaringClass() + "." + m.getName(), e);
throw new RuntimeException(e);
}
}
}
private static class ChainedAccessor implements Accessor {
private final List<Accessor> accessors;
public ChainedAccessor(List<Accessor> list) {
this.accessors = list;
}
@Override
public Object get(Object o) {
for (Accessor a : accessors) {
o = a.get(o);
}
return o;
}
}
}
| |
/* ========================================================
* EditorPane.java
*
* Author: kmchugh
* Created: Feb 17, 2011, 11:53:46 AM
*
* Description
* --------------------------------------------------------
* General Class Description.
*
* Change Log
* --------------------------------------------------------
* Init.Date Ref. Description
* --------------------------------------------------------
*
* ===================================================== */
package Goliath.UI.Controls.RichEditors;
import Goliath.Applications.Application;
import Goliath.Collections.HashTable;
import Goliath.Delegate;
import Goliath.DynamicCode.Java;
import Goliath.Event;
import Goliath.Exceptions.FileNotFoundException;
import Goliath.Graphics.Constants.Orientation;
import Goliath.Graphics.Image;
import Goliath.Interfaces.Collections.IList;
import Goliath.Interfaces.UI.Controls.IButton;
import Goliath.Interfaces.UI.Controls.IContainer;
import Goliath.Interfaces.UI.Controls.IControl;
import Goliath.UI.Controls.Button;
import Goliath.UI.Controls.Combobox;
import Goliath.UI.Controls.Group;
import Goliath.UI.Controls.Layouts.FlowLayoutManager;
import Goliath.UI.Constants.UIEventType;
import java.util.List;
/**
* Class Description.
* For example:
* <pre>
* Example usage
* </pre>
*
* @see Related Class
* @version 1.0 Feb 17, 2011
* @author kmchugh
**/
public abstract class EditorPane<T extends RichEditor> extends Group
{
public abstract class EditorState
{
public EditorState()
{
}
public final String getStateName()
{
return getClass().getSimpleName();
}
public final boolean moveToState(EditorState toState)
{
return canMoveToState(toState);
}
public final void updateState()
{
if (m_oEditorControls != null && m_oEditorControls.size() > 0)
{
IList<String> loEnabled = getEnabledControls();
IList<String> loVisible = getVisibleControls();
for (String lcKey : m_oEditorControls.keySet())
{
suspendLayout();
IControl loControl = m_oEditorControls.get(lcKey);
loControl.setEnabled(loEnabled.contains(lcKey));
loControl.setVisible(loVisible.contains(lcKey));
loControl.setParticipatesInLayout(loControl.isVisible());
resumeLayout();
}
}
}
protected boolean isOneOf(EditorState toState, Class<? extends EditorState>... taEditors)
{
for (int i=0, lnLength = taEditors.length; i<lnLength; i++)
{
if (taEditors[i].equals(toState.getClass()))
{
return true;
}
}
return false;
}
@Override
public boolean equals(Object obj)
{
if (obj == null)
{
return false;
}
if (getClass() != obj.getClass())
{
return false;
}
final EditorState other = (EditorState) obj;
if ((this.getStateName() == null) ? (other.getStateName() != null) : !this.getStateName().equals(other.getStateName()))
{
return false;
}
return true;
}
@Override
public int hashCode()
{
int hash = 7;
hash = 59 * hash + (this.getStateName() != null ? this.getStateName().hashCode() : 0);
return hash;
}
protected abstract IList<String> getEnabledControls();
protected abstract IList<String> getVisibleControls();
protected abstract boolean canMoveToState(EditorState toState);
}
private T m_oEditor;
private EditorState m_oCurrentState;
private HashTable<String, IControl> m_oEditorControls;
/**
* Creates a new instance of EditorPane
*/
public EditorPane(T toEditor)
{
registerEditor(toEditor);
m_oEditor = toEditor;
initialiseComponent();
}
/**
* Initialises the component, sets the initial state
*/
private void initialiseComponent()
{
//this.setLayoutManager(new FlowLayoutManager(Orientation.HORIZONTAL()));
this.setLayoutManager(FlowLayoutManager.class);
}
/**
* Sets the internal state for this editor
* @param toState the new state for the editor
* @return true if the state changed as a result of this call
*/
protected final boolean setState(EditorState toState)
{
if (m_oCurrentState == null)
{
m_oCurrentState = toState;
m_oCurrentState.updateState();
fireEvent(UIEventType.ONSTATECHANGED(), new Event<EditorPane>(this));
return true;
}
else if (!m_oCurrentState.equals(toState) && m_oCurrentState.canMoveToState(toState))
{
m_oCurrentState = toState;
m_oCurrentState.updateState();
fireEvent(UIEventType.ONSTATECHANGED(), new Event<EditorPane>(this));
return true;
}
return false;
}
protected EditorState getCurrentState()
{
return m_oCurrentState;
}
protected void updateCurrentState()
{
if (m_oCurrentState != null)
{
m_oCurrentState.updateState();
}
}
/**
* Gets the editor that has been registered to this control
* @return the editor registered to this control
*/
public T getEditor()
{
return m_oEditor;
}
/**
* Registers the editor with this pane for event listening
*/
private void registerEditor(T toEditor)
{
if (m_oEditor != null)
{
unRegisterEditor();
}
m_oEditor = toEditor;
// Hook up all the event listening
toEditor.addEventListener(UIEventType.ONSELECTIONCHANGED(), Delegate.build(this, "selectionChanged"));
setState(getInitialState());
}
protected abstract EditorState getInitialState();
protected <K extends EditorState> K getEditorStateByClass(Class<K> toClass)
{
try
{
return Java.createObject(toClass, new Object[]{this});
}
catch (Throwable ex)
{
return null;
}
}
private void selectionChanged(Event<T> toEvent)
{
onSelectionChanged(toEvent);
}
protected void onSelectionChanged(Event<T> toEvent)
{
}
/**
* Unregisters the current editor from this pane, this will unhook all events and remove the reference to the editor
*/
public void unRegisterEditor()
{
// Unhook all the event listeners
m_oEditor = null;
}
/**
* Helper function to create editor buttons for the editor pane
* @param tcKey the key to store the button as
* @param tcTitle the title of the button
* @param tcIconURL the icon for the button
* @param tcCallback the callback for the button
* @return the button created
*/
protected final IButton createEditorButton(String tcKey, String tcTitle, String tcIconURL, String tcCallback)
{
Button loButton = null;
Image loImage = null;
try
{
loImage = new Goliath.Graphics.Image(tcIconURL);
loImage.setScaleable(true);
}
catch (FileNotFoundException ex)
{
Application.getInstance().log(ex);
}
if (loImage != null)
{
loButton = new Button(tcTitle, loImage, Delegate.build(this, tcCallback));
}
else
{
loButton = new Button(tcTitle, Delegate.build(this, tcCallback));
}
storeControl(tcKey, loButton);
return loButton;
}
protected final <T> Combobox<T> createEditorDropdown(String tcKey, IList<T> toData, String tcCallback)
{
Combobox<T> loCombobox = new Combobox<T>(toData);
loCombobox.addEventListener(UIEventType.ONCHANGED(), Delegate.build(this, tcCallback));
storeControl(tcKey, loCombobox);
return loCombobox;
}
/**
* Helper function to create distinct editor groups for controls in the editor pane
* @param toControls the list of controls to put in the group
* @return the container with the controls
*/
protected final IContainer createEditorGroup(List<? extends IControl> toControls)
{
Group loGroup = new Group();
//loGroup.setBorderSize(2, 2, 2, 2);
//loGroup.setLayoutManager(new FlowLayoutManager(Orientation.HORIZONTAL()));
loGroup.setLayoutManager(FlowLayoutManager.class);
Image loLBGImage = null;
Image loRBGImage = null;
Image loBGImage = null;
try
{
loLBGImage = new Image("./resources/images/buttons/small_button_left.png");
loRBGImage = new Image("./resources/images/buttons/small_button_right.png");
loBGImage = new Image("./resources/images/buttons/small_button_center.png");
}
catch(Throwable ex)
{
}
for (int i=0, lnLength = toControls.size(); i<lnLength; i++)
{
IControl loControl = toControls.get(i);
loGroup.addControl(loControl);
if (Java.isEqualOrAssignable(Button.class, loControl.getClass()))
{
if (i==0 && lnLength > 1)
{
loControl.setBackground(loLBGImage);
loControl.setSize(loLBGImage.getSize());
}
else if (i == lnLength-1 && lnLength > 1)
{
loControl.setBackground(loRBGImage);
loControl.setSize(loRBGImage.getSize());
}
else
{
loControl.setBackground(loBGImage);
loControl.setSize(loBGImage.getSize());
}
Image loIcon = ((IButton)loControl).getImage();
if (loIcon != null)
{
Image loBG = loControl.getBackgroundImage();
if (loBG != null)
{
loIcon.setSize(loBG.getSize());
}
}
}
}
return loGroup;
}
/**
* Stores the control so it can be retrieved at a later time
* @param tcKey the key for the control
* @param toControl the control to store
*/
protected void storeControl(String tcKey, IControl toControl)
{
if (m_oEditorControls == null)
{
m_oEditorControls = new HashTable<String, IControl>();
}
m_oEditorControls.put(tcKey, toControl);
}
/**
* Gets the cached editor control, or null if a control does not exist
* @param tcKey the key of the control to get
* @return the control for the key, or null
*/
protected IControl getStoredControl(String tcKey)
{
return (m_oEditorControls != null && m_oEditorControls.containsKey(tcKey)) ? m_oEditorControls.get(tcKey) : null;
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.