gt stringclasses 1
value | context stringlengths 2.05k 161k |
|---|---|
/*
* #%L
* SCIFIO library for reading and converting scientific file formats.
* %%
* Copyright (C) 2011 - 2021 SCIFIO developers.
* %%
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDERS OR CONTRIBUTORS BE
* LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
* #L%
*/
package io.scif.formats;
import io.scif.AbstractFormat;
import io.scif.AbstractMetadata;
import io.scif.AbstractParser;
import io.scif.AbstractTranslator;
import io.scif.AbstractWriter;
import io.scif.BufferedImagePlane;
import io.scif.FormatException;
import io.scif.ImageMetadata;
import io.scif.Plane;
import io.scif.Translator;
import io.scif.config.SCIFIOConfig;
import io.scif.gui.AWTImageTools;
import io.scif.gui.BufferedImageReader;
import io.scif.util.FormatTools;
import io.scif.util.SCIFIOMetadataTools;
import java.awt.image.BufferedImage;
import java.io.DataInputStream;
import java.io.IOException;
import java.util.List;
import javax.imageio.ImageIO;
import net.imagej.axis.Axes;
import net.imglib2.Interval;
import org.scijava.Priority;
import org.scijava.io.handle.DataHandle;
import org.scijava.io.handle.DataHandleInputStream;
import org.scijava.io.handle.DataHandleOutputStream;
import org.scijava.io.location.Location;
import org.scijava.plugin.Plugin;
/**
* ImageIOReader is the superclass for file format readers that use the
* javax.imageio package.
*
* @author Curtis Rueden
* @author Mark Hiner
*/
public abstract class ImageIOFormat extends AbstractFormat {
// -- Nested classes --
public static class Metadata extends AbstractMetadata {
// -- Fields --
private transient BufferedImage img;
// -- ImageIOMetadata API methods --
public BufferedImage getImg() {
return img;
}
public void setImg(final BufferedImage img) {
this.img = img;
}
// -- Metadata API Methods --
@Override
public void populateImageMetadata() {
final ImageMetadata iMeta = get(0);
if (img != null) {
iMeta.setAxisLength(Axes.X, img.getWidth());
iMeta.setAxisLength(Axes.Y, img.getHeight());
iMeta.setPlanarAxisCount(2);
final int channels = img.getRaster().getNumBands();
if (channels > 1) {
iMeta.setPlanarAxisCount(3);
iMeta.setAxisLength(Axes.CHANNEL, img.getRaster().getNumBands());
}
iMeta.setPixelType(AWTImageTools.getPixelType(img));
}
iMeta.setLittleEndian(false);
iMeta.setMetadataComplete(true);
iMeta.setIndexed(false);
iMeta.setFalseColor(false);
}
@Override
public void close(final boolean fileOnly) throws IOException {
super.close(fileOnly);
if (!fileOnly) {
img = null;
}
}
}
public static class Parser<M extends Metadata> extends AbstractParser<M> {
@Override
protected void typedParse(final DataHandle<Location> handle, final M meta,
final SCIFIOConfig config) throws IOException, FormatException
{
log().info("Populating metadata");
final DataInputStream dis = new DataInputStream(
new DataHandleInputStream<>(handle));
final BufferedImage img = ImageIO.read(dis);
if (img == null) throw new FormatException("Invalid image stream");
meta.setImg(img);
meta.createImageMetadata(1);
}
}
public static class Reader<M extends Metadata> extends
BufferedImageReader<M>
{
// -- AbstractReader API Methods --
@Override
protected String[] createDomainArray() {
return new String[] { FormatTools.GRAPHICS_DOMAIN };
}
// -- Reader API methods --
@Override
public BufferedImagePlane openPlane(final int imageIndex,
final long planeIndex, final BufferedImagePlane plane,
final Interval bounds, final SCIFIOConfig config) throws FormatException,
IOException
{
final Metadata meta = getMetadata();
plane.setData(AWTImageTools.getSubimage(meta.getImg(), //
meta.get(imageIndex).isLittleEndian(), bounds));
return plane;
}
@Override
public long getOptimalTileHeight(final int imageIndex) {
return getMetadata().get(imageIndex).getAxisLength(Axes.Y);
}
}
public static class Writer<M extends Metadata> extends AbstractWriter<M> {
// -- Fields --
private final String kind;
// -- Constructors --
public Writer(final String kind) {
this.kind = kind;
}
// -- AbstractWriter Methods --
@Override
protected String[] makeCompressionTypes() {
return new String[0];
}
@Override
public void writePlane(final int imageIndex, final long planeIndex,
final Plane plane, final Interval bounds) throws FormatException,
IOException
{
final Metadata meta = getMetadata();
if (!SCIFIOMetadataTools.wholePlane(imageIndex, meta, bounds)) {
throw new FormatException(
"ImageIOWriter does not support writing tiles");
}
BufferedImage img = null;
if (!(plane instanceof BufferedImagePlane)) {
final ImageMetadata imageMetadata = meta.get(imageIndex);
final int type = imageMetadata.getPixelType();
img = AWTImageTools.makeImage(plane.getBytes(), //
(int) imageMetadata.getAxisLength(Axes.X), //
(int) imageMetadata.getAxisLength(Axes.Y), //
(int) imageMetadata.getAxisLength(Axes.CHANNEL), //
plane.getImageMetadata().getInterleavedAxisCount() > 0, //
FormatTools.getBytesPerPixel(type), //
FormatTools.isFloatingPoint(type), //
imageMetadata.isLittleEndian(), //
FormatTools.isSigned(type));
}
else {
img = ((BufferedImagePlane) plane).getData();
}
ImageIO.write(img, kind, new DataHandleOutputStream<>(getHandle()));
}
@Override
public int[] getPixelTypes(final String codec) {
return new int[] { FormatTools.UINT8, FormatTools.UINT16 };
}
}
@Plugin(type = Translator.class, priority = Priority.LOW)
public static class ImageIOTranslator extends
AbstractTranslator<io.scif.Metadata, Metadata>
{
// -- Translator API Methods --
@Override
public Class<? extends io.scif.Metadata> source() {
return io.scif.Metadata.class;
}
@Override
public Class<? extends io.scif.Metadata> dest() {
return Metadata.class;
}
@Override
protected void translateImageMetadata(final List<ImageMetadata> source,
final Metadata dest)
{
dest.createImageMetadata(1);
final ImageMetadata imgMeta = source.get(0);
dest.setImg(AWTImageTools.blankImage(imgMeta, imgMeta
.getAxesLengthsPlanar(), imgMeta.getPixelType()));
}
}
}
| |
package com.jetbrains.edu.learning.actions;
import com.intellij.icons.AllIcons;
import com.intellij.ide.ui.UISettings;
import com.intellij.openapi.actionSystem.*;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.editor.Editor;
import com.intellij.openapi.fileEditor.FileDocumentManager;
import com.intellij.openapi.project.DumbAware;
import com.intellij.openapi.project.DumbAwareAction;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.ui.popup.JBPopup;
import com.intellij.openapi.ui.popup.JBPopupAdapter;
import com.intellij.openapi.ui.popup.JBPopupFactory;
import com.intellij.openapi.ui.popup.LightweightWindowEvent;
import com.intellij.openapi.vfs.VirtualFile;
import com.intellij.openapi.wm.IdeFocusManager;
import com.intellij.ui.tabs.TabInfo;
import com.intellij.ui.tabs.TabsListener;
import com.intellij.ui.tabs.impl.JBEditorTabs;
import com.intellij.util.PlatformIcons;
import com.jetbrains.edu.learning.StudyTaskManager;
import com.jetbrains.edu.learning.StudyUtils;
import com.jetbrains.edu.learning.core.EduNames;
import com.jetbrains.edu.learning.core.EduUtils;
import com.jetbrains.edu.learning.courseFormat.TaskFile;
import com.jetbrains.edu.learning.courseFormat.UserTest;
import com.jetbrains.edu.learning.courseFormat.tasks.Task;
import com.jetbrains.edu.learning.editor.StudyEditor;
import com.jetbrains.edu.learning.ui.StudyTestContentPanel;
import icons.EducationalCoreIcons;
import org.jetbrains.annotations.NotNull;
import javax.swing.*;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.PrintWriter;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
public class StudyEditInputAction extends DumbAwareAction {
private static final Logger LOG = Logger.getInstance(StudyEditInputAction.class.getName());
private JBEditorTabs tabbedPane;
private Map<TabInfo, UserTest> myEditableTabs = new HashMap<>();
public StudyEditInputAction() {
super("Watch Test Input", "Watch test input", EducationalCoreIcons.WatchInput);
}
public void showInput(final Project project) {
final Editor selectedEditor = StudyUtils.getSelectedEditor(project);
if (selectedEditor != null) {
FileDocumentManager fileDocumentManager = FileDocumentManager.getInstance();
final VirtualFile openedFile = fileDocumentManager.getFile(selectedEditor.getDocument());
final StudyTaskManager studyTaskManager = StudyTaskManager.getInstance(project);
assert openedFile != null;
TaskFile taskFile = StudyUtils.getTaskFile(project, openedFile);
assert taskFile != null;
final Task currentTask = taskFile.getTask();
tabbedPane = new JBEditorTabs(project, ActionManager.getInstance(), IdeFocusManager.findInstance(), project);
tabbedPane.addListener(new TabsListener.Adapter() {
@Override
public void selectionChanged(TabInfo oldSelection, TabInfo newSelection) {
if (newSelection.getIcon() != null) {
int tabCount = tabbedPane.getTabCount();
VirtualFile taskDir = StudyUtils.getTaskDir(openedFile);
VirtualFile testsDir = taskDir.findChild(EduNames.USER_TESTS);
assert testsDir != null;
UserTest userTest = createUserTest(testsDir, currentTask, studyTaskManager);
userTest.setEditable(true);
StudyTestContentPanel testContentPanel = new StudyTestContentPanel(userTest);
TabInfo testTab = addTestTab(tabbedPane.getTabCount(), testContentPanel, currentTask, true);
myEditableTabs.put(testTab, userTest);
tabbedPane.addTabSilently(testTab, tabCount - 1);
tabbedPane.select(testTab, true);
}
}
});
List<UserTest> userTests = studyTaskManager.getUserTests(currentTask);
int i = 1;
for (UserTest userTest : userTests) {
String inputFileText = StudyUtils.getFileText(null, userTest.getInput(), false, "UTF-8");
String outputFileText = StudyUtils.getFileText(null, userTest.getOutput(), false, "UTF-8");
StudyTestContentPanel myContentPanel = new StudyTestContentPanel(userTest);
myContentPanel.addInputContent(inputFileText);
myContentPanel.addOutputContent(outputFileText);
TabInfo testTab = addTestTab(i, myContentPanel, currentTask, userTest.isEditable());
tabbedPane.addTabSilently(testTab, i - 1);
if (userTest.isEditable()) {
myEditableTabs.put(testTab, userTest);
}
i++;
}
TabInfo plusTab = new TabInfo(new JPanel());
plusTab.setIcon(PlatformIcons.ADD_ICON);
tabbedPane.addTabSilently(plusTab, tabbedPane.getTabCount());
final JBPopup hint =
JBPopupFactory.getInstance().createComponentPopupBuilder(tabbedPane.getComponent(), tabbedPane.getComponent())
.setResizable(true)
.setMovable(true)
.setRequestFocus(true)
.createPopup();
StudyEditor selectedStudyEditor = StudyUtils.getSelectedStudyEditor(project);
assert selectedStudyEditor != null;
hint.showInCenterOf(selectedStudyEditor.getComponent());
hint.addListener(new HintClosedListener(currentTask, studyTaskManager));
}
}
private static void flushBuffer(@NotNull final StringBuilder buffer, @NotNull final File file) {
PrintWriter printWriter = null;
try {
//noinspection IOResourceOpenedButNotSafelyClosed
printWriter = new PrintWriter(new FileOutputStream(file));
printWriter.print(buffer.toString());
}
catch (FileNotFoundException e) {
LOG.error(e);
}
finally {
StudyUtils.closeSilently(printWriter);
}
EduUtils.synchronize();
}
private static UserTest createUserTest(@NotNull final VirtualFile testsDir,
@NotNull final Task currentTask,
StudyTaskManager studyTaskManager) {
UserTest userTest = new UserTest();
List<UserTest> userTests = studyTaskManager.getUserTests(currentTask);
int testNum = userTests.size() + 1;
String inputName = EduNames.USER_TEST_INPUT + testNum;
File inputFile = new File(testsDir.getPath(), inputName);
String outputName = EduNames.USER_TEST_OUTPUT + testNum;
File outputFile = new File(testsDir.getPath(), outputName);
userTest.setInput(inputFile.getPath());
userTest.setOutput(outputFile.getPath());
studyTaskManager.addUserTest(currentTask, userTest);
return userTest;
}
private TabInfo addTestTab(int nameIndex, final StudyTestContentPanel contentPanel, @NotNull final Task currentTask, boolean toBeClosable) {
TabInfo testTab = toBeClosable ? createClosableTab(contentPanel, currentTask) : new TabInfo(contentPanel);
return testTab.setText(EduNames.TEST_TAB_NAME + String.valueOf(nameIndex));
}
private TabInfo createClosableTab(StudyTestContentPanel contentPanel, Task currentTask) {
TabInfo closableTab = new TabInfo(contentPanel);
final DefaultActionGroup tabActions = new DefaultActionGroup();
tabActions.add(new CloseTab(closableTab, currentTask));
closableTab.setTabLabelActions(tabActions, ActionPlaces.EDITOR_TAB);
return closableTab;
}
@Override
public void actionPerformed(AnActionEvent e) {
showInput(e.getProject());
}
private static class HintClosedListener extends JBPopupAdapter {
private final Task myTask;
private final StudyTaskManager myStudyTaskManager;
private HintClosedListener(@NotNull final Task task, StudyTaskManager studyTaskManager) {
myTask = task;
myStudyTaskManager = studyTaskManager;
}
@Override
public void onClosed(LightweightWindowEvent event) {
for (final UserTest userTest : myStudyTaskManager.getUserTests(myTask)) {
ApplicationManager.getApplication().runWriteAction(() -> {
if (userTest.isEditable()) {
File inputFile = new File(userTest.getInput());
File outputFile = new File(userTest.getOutput());
flushBuffer(userTest.getInputBuffer(), inputFile);
flushBuffer(userTest.getOutputBuffer(), outputFile);
}
});
}
}
}
private class CloseTab extends AnAction implements DumbAware {
private final TabInfo myTabInfo;
private final Task myTask;
public CloseTab(final TabInfo info, @NotNull final Task task) {
myTabInfo = info;
myTask = task;
}
@Override
public void update(final AnActionEvent e) {
e.getPresentation().setIcon(AllIcons.Actions.Close);
e.getPresentation().setHoveredIcon(AllIcons.Actions.CloseHovered);
e.getPresentation().setVisible(UISettings.getInstance().getShowCloseButton());
e.getPresentation().setText("Delete test");
}
@Override
public void actionPerformed(final AnActionEvent e) {
tabbedPane.removeTab(myTabInfo);
UserTest userTest = myEditableTabs.get(myTabInfo);
File testInputFile = new File(userTest.getInput());
File testOutputFile = new File(userTest.getOutput());
if (testInputFile.delete() && testOutputFile.delete()) {
EduUtils.synchronize();
} else {
LOG.error("failed to delete user tests");
}
final Project project = e.getProject();
if (project != null) {
StudyTaskManager.getInstance(project).removeUserTest(myTask, userTest);
}
}
}
@Override
public void update(final AnActionEvent e) {
EduUtils.enableAction(e, false);
final Project project = e.getProject();
if (project != null) {
StudyEditor studyEditor = StudyUtils.getSelectedStudyEditor(project);
if (studyEditor != null) {
final List<UserTest> userTests = StudyTaskManager.getInstance(project).getUserTests(studyEditor.getTaskFile().getTask());
if (!userTests.isEmpty()) {
EduUtils.enableAction(e, true);
}
}
}
}
}
| |
/*
* $Id: DispatcherTest.java 1436878 2013-01-22 12:44:45Z lukaszlenart $
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.struts2.dispatcher;
import com.mockobjects.dynamic.C;
import com.mockobjects.dynamic.Mock;
import com.opensymphony.xwork2.ObjectFactory;
import com.opensymphony.xwork2.XWorkConstants;
import com.opensymphony.xwork2.config.Configuration;
import com.opensymphony.xwork2.config.ConfigurationManager;
import com.opensymphony.xwork2.config.entities.InterceptorMapping;
import com.opensymphony.xwork2.config.entities.InterceptorStackConfig;
import com.opensymphony.xwork2.config.entities.PackageConfig;
import com.opensymphony.xwork2.inject.Container;
import com.opensymphony.xwork2.interceptor.Interceptor;
import com.opensymphony.xwork2.util.LocalizedTextUtil;
import org.apache.struts2.StrutsConstants;
import org.apache.struts2.StrutsTestCase;
import org.apache.struts2.dispatcher.FilterDispatcherTest.InnerDestroyableObjectFactory;
import org.springframework.mock.web.MockHttpServletRequest;
import org.springframework.mock.web.MockHttpServletResponse;
import org.springframework.mock.web.MockServletContext;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import java.util.HashMap;
import java.util.Locale;
import java.util.Map;
/**
* Test case for Dispatcher.
*
*/
public class DispatcherTest extends StrutsTestCase {
public void testDefaultResurceBundlePropertyLoaded() throws Exception {
Locale.setDefault(Locale.US); // force to US locale as we also have _de and _da properties
// some i18n messages from xwork-messages.properties
assertEquals(
LocalizedTextUtil.findDefaultText("xwork.error.action.execution", Locale.US),
"Error during Action invocation");
// some i18n messages from struts-messages.properties
assertEquals(
LocalizedTextUtil.findDefaultText("struts.messages.error.uploading", Locale.US,
new Object[] { "some error messages" }),
"Error uploading: some error messages");
}
public void testPrepareSetEncodingProperly() throws Exception {
HttpServletRequest req = new MockHttpServletRequest();
HttpServletResponse res = new MockHttpServletResponse();
Dispatcher du = initDispatcher(new HashMap<String, String>() {{
put(StrutsConstants.STRUTS_I18N_ENCODING, "utf-8");
}});
du.prepare(req, res);
assertEquals(req.getCharacterEncoding(), "utf-8");
}
public void testEncodingForXMLHttpRequest() throws Exception {
// given
MockHttpServletRequest req = new MockHttpServletRequest();
req.addHeader("X-Requested-With", "XMLHttpRequest");
req.setCharacterEncoding("UTF-8");
HttpServletResponse res = new MockHttpServletResponse();
Dispatcher du = initDispatcher(new HashMap<String, String>() {{
put(StrutsConstants.STRUTS_I18N_ENCODING, "latin-2");
}});
// when
du.prepare(req, res);
// then
assertEquals(req.getCharacterEncoding(), "UTF-8");
}
public void testSetEncodingIfDiffer() throws Exception {
// given
Mock mock = new Mock(HttpServletRequest.class);
mock.expectAndReturn("getCharacterEncoding", "utf-8");
mock.expectAndReturn("getHeader", "X-Requested-With", "");
mock.expectAndReturn("getLocale", Locale.getDefault());
mock.expectAndReturn("getCharacterEncoding", "utf-8");
HttpServletRequest req = (HttpServletRequest) mock.proxy();
HttpServletResponse res = new MockHttpServletResponse();
Dispatcher du = initDispatcher(new HashMap<String, String>() {{
put(StrutsConstants.STRUTS_I18N_ENCODING, "utf-8");
}});
// when
du.prepare(req, res);
// then
assertEquals(req.getCharacterEncoding(), "utf-8");
mock.verify();
}
public void testPrepareSetEncodingPropertyWithMultipartRequest() throws Exception {
MockHttpServletRequest req = new MockHttpServletRequest();
MockHttpServletResponse res = new MockHttpServletResponse();
req.setContentType("multipart/form-data");
Dispatcher du = initDispatcher(new HashMap<String, String>() {{
put(StrutsConstants.STRUTS_I18N_ENCODING, "utf-8");
}});
du.prepare(req, res);
assertEquals("utf-8", req.getCharacterEncoding());
}
public void testDispatcherListener() throws Exception {
final DispatcherListenerState state = new DispatcherListenerState();
Dispatcher.addDispatcherListener(new DispatcherListener() {
public void dispatcherDestroyed(Dispatcher du) {
state.isDestroyed = true;
}
public void dispatcherInitialized(Dispatcher du) {
state.isInitialized = true;
}
});
assertFalse(state.isDestroyed);
assertFalse(state.isInitialized);
Dispatcher du = initDispatcher(new HashMap<String, String>() );
assertTrue(state.isInitialized);
du.cleanup();
assertTrue(state.isDestroyed);
}
public void testConfigurationManager() {
Dispatcher du;
InternalConfigurationManager configurationManager = new InternalConfigurationManager();
try {
du = new Dispatcher(new MockServletContext(), new HashMap<String, String>());
du.setConfigurationManager(configurationManager);
du.init();
Dispatcher.setInstance(du);
assertFalse(configurationManager.destroyConfiguration);
du.cleanup();
assertTrue(configurationManager.destroyConfiguration);
}
finally {
Dispatcher.setInstance(null);
}
}
public void testObjectFactoryDestroy() throws Exception {
final InnerDestroyableObjectFactory destroyedObjectFactory = new InnerDestroyableObjectFactory();
Dispatcher du = new Dispatcher(new MockServletContext(), new HashMap<String, String>());
ConfigurationManager cm = new ConfigurationManager();
Mock mockConfiguration = new Mock(Configuration.class);
cm.setConfiguration((Configuration)mockConfiguration.proxy());
Mock mockContainer = new Mock(Container.class);
String reloadConfigs = container.getInstance(String.class, XWorkConstants.RELOAD_XML_CONFIGURATION);
mockContainer.expectAndReturn("getInstance", C.args(C.eq(String.class), C.eq(StrutsConstants.STRUTS_CONFIGURATION_XML_RELOAD)),
reloadConfigs);
mockContainer.expectAndReturn("getInstance", C.args(C.eq(String.class), C.eq(XWorkConstants.RELOAD_XML_CONFIGURATION)),
reloadConfigs);
mockContainer.expectAndReturn("getInstance", C.args(C.eq(ObjectFactory.class)), destroyedObjectFactory);
mockConfiguration.expectAndReturn("getContainer", mockContainer.proxy());
mockConfiguration.expectAndReturn("getContainer", mockContainer.proxy());
mockConfiguration.expect("destroy");
mockConfiguration.matchAndReturn("getPackageConfigs", new HashMap<String, PackageConfig>());
du.setConfigurationManager(cm);
assertFalse(destroyedObjectFactory.destroyed);
du.cleanup();
assertTrue(destroyedObjectFactory.destroyed);
mockConfiguration.verify();
mockContainer.verify();
}
public void testInterceptorDestroy() throws Exception {
Mock mockInterceptor = new Mock(Interceptor.class);
mockInterceptor.matchAndReturn("hashCode", 0);
mockInterceptor.expect("destroy");
InterceptorMapping interceptorMapping = new InterceptorMapping("test", (Interceptor) mockInterceptor.proxy());
InterceptorStackConfig isc = new InterceptorStackConfig.Builder("test").addInterceptor(interceptorMapping).build();
PackageConfig packageConfig = new PackageConfig.Builder("test").addInterceptorStackConfig(isc).build();
Map<String, PackageConfig> packageConfigs = new HashMap<String, PackageConfig>();
packageConfigs.put("test", packageConfig);
Mock mockContainer = new Mock(Container.class);
mockContainer.matchAndReturn("getInstance", C.args(C.eq(ObjectFactory.class)), new ObjectFactory());
String reloadConfigs = container.getInstance(String.class, XWorkConstants.RELOAD_XML_CONFIGURATION);
mockContainer.expectAndReturn("getInstance", C.args(C.eq(String.class), C.eq(StrutsConstants.STRUTS_CONFIGURATION_XML_RELOAD)),
reloadConfigs);
mockContainer.expectAndReturn("getInstance", C.args(C.eq(String.class), C.eq(XWorkConstants.RELOAD_XML_CONFIGURATION)),
reloadConfigs);
Mock mockConfiguration = new Mock(Configuration.class);
mockConfiguration.matchAndReturn("getPackageConfigs", packageConfigs);
mockConfiguration.matchAndReturn("getContainer", mockContainer.proxy());
mockConfiguration.expect("destroy");
ConfigurationManager configurationManager = new ConfigurationManager();
configurationManager.setConfiguration((Configuration) mockConfiguration.proxy());
Dispatcher dispatcher = new Dispatcher(new MockServletContext(), new HashMap<String, String>());
dispatcher.setConfigurationManager(configurationManager);
dispatcher.cleanup();
mockInterceptor.verify();
mockContainer.verify();
mockConfiguration.verify();
}
class InternalConfigurationManager extends ConfigurationManager {
public boolean destroyConfiguration = false;
@Override
public synchronized void destroyConfiguration() {
super.destroyConfiguration();
destroyConfiguration = true;
}
}
class DispatcherListenerState {
public boolean isInitialized = false;
public boolean isDestroyed = false;
}
}
| |
/*
* The MIT License
*
* Copyright 2016 L. Arthur Lewis II.
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package io.github.lal872k.pbib.ui;
import io.github.lal872k.pbib.Citation;
import io.github.lal872k.pbib.Footnote;
import java.awt.BorderLayout;
import java.awt.Color;
import java.awt.Component;
import java.awt.Dimension;
import java.awt.GridBagConstraints;
import java.awt.GridBagLayout;
import java.awt.GridLayout;
import java.awt.Insets;
import java.util.ArrayList;
import javax.swing.BorderFactory;
import javax.swing.Box;
import javax.swing.BoxLayout;
import javax.swing.JButton;
import javax.swing.JLabel;
import javax.swing.JList;
import javax.swing.JOptionPane;
import javax.swing.JPanel;
import javax.swing.JScrollPane;
import javax.swing.JTextArea;
import javax.swing.border.EmptyBorder;
import javax.swing.border.TitledBorder;
import javax.swing.text.DefaultCaret;
/**
*
* @author L. Arthur Lewis II
*/
public final class MakeFootnoteFrame extends MakeFrame<Footnote[]>{
public static final String WINDOW_TITLE = "Selecting Footnotes";
private final String document;
private final Citation[] cites;
private final ArrayList<Footnote> footnotes;
// swing stuff
private JPanel doc;
private JTextArea doc_text;
private JScrollPane doc_scroll;
private JPanel doc_bottom;
private JLabel doc_pos;
private JPanel doc_fn;
private JPanel fn; // footnote
private JList fn_list, fn_cites;
private JPanel fn_bottom, fn_lists;
private JButton fn_remove, fn_add;
private JPanel bot;
private JButton bot_finish;
private JButton bot_cancel;
public MakeFootnoteFrame(Component parentFrame, String document, Citation[] cites){
super(parentFrame, WINDOW_TITLE);
this.document = document;
this.cites = cites;
footnotes = new ArrayList();
}
@Override
public void initializeComponents() {
JPanel addpanel; // use for adding borders to panel before adding to parent
GridBagConstraints bag = new GridBagConstraints();
bag.insets = new Insets(5, 5, 5, 5);
// main doc
doc = new JPanel();
doc.setLayout(new GridBagLayout());
doc.setBorder(BorderFactory.createTitledBorder(
BorderFactory.createLineBorder(Color.BLACK), "Document"));
doc_pos = new JLabel();
doc_pos.setName("0");
doc_pos.setText("Position: 0");
doc_text = new JTextArea();
doc_text.setEditable(false);
doc_text.setWrapStyleWord(true);
doc_text.setLineWrap(true);
doc_text.addCaretListener(l -> {
doc_pos.setName(String.valueOf(l.getDot()));
doc_pos.setText("Position: " + l.getDot());
});
doc_text.setText(document);
doc_text.setCaret(new DefaultCaret(){
@Override
public void setSelectionVisible(boolean hasFocus) {
super.setSelectionVisible(true);
}
@Override
public void setVisible(boolean visible){
super.setVisible(true);
}
});
doc_text.getCaret().setVisible(true);
doc_scroll = new JScrollPane(doc_text, JScrollPane.VERTICAL_SCROLLBAR_AS_NEEDED,
JScrollPane.HORIZONTAL_SCROLLBAR_NEVER);
doc_bottom = new JPanel();
doc_bottom.setLayout(new BorderLayout());
doc_bottom.add(doc_pos, BorderLayout.EAST);
addpanel = addComponent(doc_scroll, 0, 0, 1, 1, 1, 1, bag);
doc.add(addpanel, bag);
addpanel = addComponent(doc_bottom, 0, 1, 1, 1, 1, 0, bag);
doc.add(addpanel, bag);
// all current footnotes
fn = new JPanel();
fn.setLayout(new GridBagLayout());
fn.setBorder(BorderFactory.createTitledBorder(
BorderFactory.createLineBorder(Color.BLACK), "Footnotes / Citations"));
fn_list = new JList();
fn_list.setBorder(BorderFactory.createTitledBorder(
BorderFactory.createLineBorder(Color.BLACK), "All Footnotes"));
fn_cites = new JList();
fn_cites.setBorder(BorderFactory.createTitledBorder(
BorderFactory.createLineBorder(Color.BLACK), "All Citations"));
String[] citeLists = new String[cites.length];
for (int q = 0; q < cites.length; q++){
Citation cite = cites[q];
citeLists[q] = cite.getName();
}
fn_cites.setListData(citeLists);
fn_lists = new JPanel();
fn_lists.setLayout(new GridLayout(1, 2));
fn_lists.add(fn_cites);
fn_lists.add(fn_list);
fn_bottom = new JPanel();
fn_remove = new JButton();
fn_remove.setText("Remove Footnote");
fn_remove.addActionListener(l -> {
if (fn_list.isSelectionEmpty()){
JOptionPane.showMessageDialog(null, "No footnote selected.", "Error",
JOptionPane.ERROR_MESSAGE);
return;
}
footnotes.remove(fn_list.getSelectedIndex());
updateFootnoteList();
});
fn_add = new JButton();
fn_add.setText("Add Footnote");
fn_add.addActionListener(l -> {
if (fn_cites.isSelectionEmpty()){
JOptionPane.showMessageDialog(null, "No citation selected.", "Error",
JOptionPane.ERROR_MESSAGE);
return;
}
Footnote foot = new Footnote(cites[fn_cites.getSelectedIndex()], Integer.parseInt(doc_pos.getName()));
footnotes.add(foot);
updateFootnoteList();
});
fn_bottom.setBorder(new EmptyBorder(5, 5, 5, 5));
fn_bottom.setLayout(new BoxLayout(fn_bottom, BoxLayout.X_AXIS));
fn_bottom.add(Box.createHorizontalGlue());
fn_bottom.add(fn_add, BorderLayout.LINE_END);
fn_bottom.add(Box.createRigidArea(new Dimension(5,0)));
fn_bottom.add(fn_remove, BorderLayout.EAST);
addpanel = addComponent(fn_lists, 0, 0, 1, 1, 1, 1, bag);
fn.add(addpanel, bag);
addpanel = addComponent(fn_bottom, 0, 1, 1, 1, 1, 0, bag);
fn.add(addpanel, bag);
doc_fn = new JPanel();
doc_fn.setLayout(new GridLayout(2, 1));
doc_fn.add(doc);
doc_fn.add(fn);
// bottom
bot = new JPanel();
bot_finish = new JButton();
bot_finish.setText("Add Footnotes");
bot_finish.addActionListener(l -> {
if (footnotes.isEmpty()){
JOptionPane.showMessageDialog(null, "You must have at least one footnote.",
"Error", JOptionPane.ERROR_MESSAGE);
return;
}
setContents(footnotes.toArray(new Footnote[0]));
});
bot_cancel = new JButton();
bot_cancel.setText("Cancel");
bot_cancel.addActionListener(l -> {
setContents(null);
});
bot.setBorder(new EmptyBorder(5, 5, 5, 5));
bot.setLayout(new BoxLayout(bot, BoxLayout.X_AXIS));
bot.add(Box.createHorizontalGlue());
bot.add(bot_finish, BorderLayout.LINE_END);
bot.add(Box.createRigidArea(new Dimension(5,0)));
bot.add(bot_cancel, BorderLayout.EAST);
container = new JPanel();
container.setLayout(new GridBagLayout());
addpanel = addComponent(doc_fn, 0, 0, 1, 1, 1, 1, bag);
container.add(addpanel, bag);
addpanel = addComponent(bot, 0, 1, 1, 1, 1, 0, bag);
container.add(addpanel, bag);
}
@Override
public void styleComponents() {
doc_text.setFont(SourceManagerFrame.UNIVERSAL_FONT);
doc_pos.setFont(SourceManagerFrame.UNIVERSAL_FONT);
fn_cites.setFont(SourceManagerFrame.UNIVERSAL_FONT);
fn_add.setFont(SourceManagerFrame.UNIVERSAL_FONT);
fn_list.setFont(SourceManagerFrame.UNIVERSAL_FONT);
fn_remove.setFont(SourceManagerFrame.UNIVERSAL_FONT);
bot_finish.setFont(SourceManagerFrame.UNIVERSAL_FONT);
bot_cancel.setFont(SourceManagerFrame.UNIVERSAL_FONT);
((TitledBorder)doc.getBorder()).setTitleFont(SourceManagerFrame.UNIVERSAL_FONT);
((TitledBorder)fn.getBorder()).setTitleFont(SourceManagerFrame.UNIVERSAL_FONT);
((TitledBorder)fn_list.getBorder()).setTitleFont(SourceManagerFrame.UNIVERSAL_FONT);
((TitledBorder)fn_cites.getBorder()).setTitleFont(SourceManagerFrame.UNIVERSAL_FONT);
}
public void updateFootnoteList(){
String[] listData = new String[footnotes.size()];
for (int q = 0; q < footnotes.size(); q++){
Footnote cite = footnotes.get(q);
listData[q] = cite.getName()+" @ position:"+cite.getPosition();
}
fn_list.setListData(listData);
}
}
| |
package SprintSubIThink;
import battlecode.common.Clock;
import battlecode.common.Direction;
import battlecode.common.GameConstants;
import battlecode.common.MapLocation;
import battlecode.common.RobotController;
import battlecode.common.RobotType;
import battlecode.common.TerrainTile;
public class Mobile extends Arobot {
MapLocation location;
MapLocation target;
int assigned_job = 0;
int[] danger_levels = {0,0,0,0,0,0,0,0,0};
int my_danger_level = 8;
int count_down = 35;
int location_channel_x;
int location_channel_y;
int turn_nav_calculated = 0;
public int bugging_direction = 1;
public int turns_bugging = 0;
private int bugging_threshold = 20;
public boolean i_am_not_bugging = true;
public boolean i_have_stopped_bugging = false;
public boolean i_have_started_bugging = false;
public boolean sticky = false; //false == destination orientated. true == attack orientated
public boolean swarm_attack = false;
public MapLocation destination = null;
private Direction current_heading = Direction.NONE;
private Direction evaluate_heading = Direction.NONE;
static int bugging_directional_looks[] = new int[]{1,0,-1,-2,-3,-4,-5,-6,-7};
static int non_bugging_directional_looks[] = new int[]{0,1,-1};
static RouteNode navigator;
public Mobile(RobotController rc){
super(rc);
my_max_supply_level = max_mobile_supply_level;
my_min_supply_level = min_mobile_supply_level;
my_optimal_supply_level = optimal_mobile_supply_level;
location_channel_x = swarm_location_channel_x;
location_channel_y = swarm_location_channel_y;
destination = HQ_location;
}
public void basic_turn_loop(){
navigator = new RouteNode(robot_controller.getLocation(),HQ_location,robot_controller,3);
while(true){
while(!robot_controller.isCoreReady() && !robot_controller.isWeaponReady()){
if(!my_type.equals(RobotType.DRONE)){
request_help();
}
robot_controller.yield();
}
sensed_enemy_robots = robot_controller.senseNearbyRobots((int)(BEYOND_MAX_ATTACK_RANGE),enemy_team);
role_warfare();
if(robot_controller.isCoreReady()){
set_danger_levelz();
if(danger_levels[my_danger_level] == 0){
count_the_troops();
update_strategy();
check_for_builds();
}
if(danger_levels[my_danger_level] !=0){
evasive_move();
robot_controller.setIndicatorString(2, "Evading");
} else if(robot_controller.canMine()){
robot_controller.setIndicatorString(2, "tring to mine");
evaluate_mining_position();
go_mining();
} else{
robot_controller.setIndicatorString(2, "location");
if(attack_deadest_enemy_in_range()){
//If I attacked someone, stay where I am (not currently in danger here)
} else{
if(sticky && sensed_enemy_robots.length > 0){
//if there are enemies around, and I'm sticky, head for closest one.
MapLocation[] enemy_positions = new MapLocation[sensed_enemy_robots.length];
int num_of_loops = sensed_enemy_robots.length;
for(int i=0; i<num_of_loops;i++){
enemy_positions[i] = sensed_enemy_robots[i].location;
}
location = find_closest(robot_controller.getLocation(), enemy_positions);
robot_controller.setIndicatorString(2, "aggressive");
}else{
// if i'm not sticky, or no enemies head for location
}
// reset_simple_bug(location);
// simpleBug();
// applyMove();
if(!navigator.myDestination.equals(location) || Clock.getRoundNum() - turn_nav_calculated > 100){
turn_nav_calculated =Clock.getRoundNum();
navigator = new RouteNode(robot_controller.getLocation(),location,robot_controller,3);
navigator.process(robot_controller.getLocation());
}
move_towards_direction(robot_controller.getLocation().directionTo(navigator.getNextLocation()));
}
}
}
if(!my_type.equals(RobotType.DRONE)){
request_help();
}
dish_out_supply();
attack_deadest_enemy_in_range();
robot_controller.setIndicatorString(0, location.toString());
robot_controller.setIndicatorString(1, "heading for: " + navigator.getNextLocation().toString());
robot_controller.yield();
}
}
public boolean role_warfare(){
// default, no role.
//individual type clases orreride this.
location = new MapLocation(read_broadcast(location_channel_x),read_broadcast(location_channel_y));
return false;
}
public void evasive_move(){
reset_simple_bug(HQ_location);
simpleBug();
applyMove();
// location = HQ_location;
// if(navigator.myDestination.equals(location)){
// turn_nav_calculated =Clock.getRoundNum();
// navigator = new RouteNode(robot_controller.getLocation(),location,robot_controller,3);
// navigator.process(robot_controller.getLocation());
// }
// move_towards_direction(robot_controller.getLocation().directionTo(navigator.getNextLocation()));
}
public void set_danger_levelz(){
danger_levels = new int[]{0,0,0,0,0,0,0,0,0};
if(swarm_attack)
return;
MapLocation my_location;
// if robot can move is lower than my can move. (assume going to fire if i can)
//increase range by 1
//if I can move, and move again (assuming fireing if can) before enemy can move or fire,
//reduce range by 1
double core_amount = robot_controller.getCoreDelay();
double weapon_amount = robot_controller.getWeaponDelay();
double reduction_per_turn = 0.5;
if(robot_controller.getSupplyLevel()> (my_type.supplyUpkeep * my_type.movementDelay + my_type.loadingDelay)){
reduction_per_turn = 1;
}
int attack_radius;
if(!(robot_controller.getLocation().distanceSquaredTo(enemy_HQ_Location) > BEYOND_MAX_ATTACK_RANGE)){
attack_radius = get_attack_radius(RobotType.HQ);
for(int j=0; j<danger_levels.length;j++){
if(enemy_HQ_Location.distanceSquaredTo(robot_controller.getLocation().add(danger_directions[j])) <= attack_radius ){
danger_levels[j] += RobotType.HQ.attackPower;
}
}
}
attack_radius = get_attack_radius(RobotType.TOWER);
MapLocation[] sensed_enemy_towers = robot_controller.senseEnemyTowerLocations();
for(int i=0; i<sensed_enemy_towers.length;i++){
if(!(robot_controller.getLocation().distanceSquaredTo(sensed_enemy_towers[i]) > BEYOND_MAX_ATTACK_RANGE)){
for(int j=0; j<danger_levels.length;j++){
if(sensed_enemy_towers[i].distanceSquaredTo(robot_controller.getLocation().add(danger_directions[j])) <= attack_radius ){
danger_levels[j] += RobotType.TOWER.attackPower;
}
}
}
}
if(sensed_enemy_robots == null)
return;
for(int i=0; i<sensed_enemy_robots.length;i++){
attack_radius = get_attack_radius(sensed_enemy_robots[i].type);
double enemy_reduction = 0.5;
if(sensed_enemy_robots[i].supplyLevel > 0)
enemy_reduction = 1;
if((my_type.loadingDelay/reduction_per_turn) >= ((sensed_enemy_robots[i].coreDelay-1) / enemy_reduction) && sensed_enemy_robots[i].type.canMove())
attack_radius = increase_attack_radius(attack_radius,1);
if(((my_type.loadingDelay + my_type.movementDelay)/reduction_per_turn) < (Math.min(((sensed_enemy_robots[i].coreDelay-1) / enemy_reduction),((sensed_enemy_robots[i].weaponDelay-1) / enemy_reduction)))){
attack_radius = increase_attack_radius(attack_radius,-1);
}
for(int j=0; j<danger_levels.length;j++){
if(sensed_enemy_robots[i].location.distanceSquaredTo(robot_controller.getLocation().add(danger_directions[j])) <= attack_radius ){
danger_levels[j] += sensed_enemy_robots[i].type.attackPower;
}
}
}
}
public void go_mining(){
if(robot_controller.canMine()){
if(robot_controller.isCoreReady()){
try{
robot_controller.mine();
robot_controller.setIndicatorString(2, "mining");
}catch(Exception e){
print_exception(e);
}
}
}
}
public void evaluate_mining_position(){
if(robot_controller.isCoreReady()){
if(my_mining_rate(robot_controller.getLocation()) < mining_move_threshold){
for(int i = 1; i < 10; i++)
for (final Direction direction: directions){
MapLocation test_location = robot_controller.getLocation().add(direction,i);
if(my_mining_rate(test_location) > mining_move_threshold){
try{
if(robot_controller.canSenseLocation(test_location) && !robot_controller.isLocationOccupied(test_location)){
// move_towards_direction(direction);
location = test_location;
if(!navigator.myDestination.equals(location) || Clock.getRoundNum() - turn_nav_calculated > 20){
turn_nav_calculated =Clock.getRoundNum();
navigator = new RouteNode(robot_controller.getLocation(),location,robot_controller,3);
navigator.process(robot_controller.getLocation());
}
move_towards_direction(robot_controller.getLocation().directionTo(navigator.getNextLocation()));
return;
}
} catch(Exception e){
print_exception(e);
}
}
}
mining_move_threshold /=2;
}
}
}
public double my_mining_rate(MapLocation the_location){
if(robot_controller.canMine()){
double current_ore = robot_controller.senseOre(the_location);
if(current_ore == 0)
return 0;
//min(n, max(1, min(mm, n/mr)))
return Math.min(current_ore, Math.max(GameConstants.MINIMUM_MINE_AMOUNT,Math.min(mining_max,(current_ore/mining_rate))));
}
return 0;
}
public boolean move_towards_direction(Direction initial_direction){
if(initial_direction.equals(Direction.OMNI))
return false;
// System.out.println("Move towards: " + initial_direction);
for(int i=0; i<8;i++){
int test_direction = ((initial_direction.ordinal() + directional_looks[i] + 8)%8);
if(danger_levels[test_direction] == 0){
if(move(directions[test_direction]))
return true;
}
}
return false;
}
public boolean move(Direction direction){
if(!robot_controller.isCoreReady())
return false;
try{
if(direction.equals(Direction.NONE) || direction.equals(Direction.OMNI))
return true;
if (robot_controller.canMove(direction)){
robot_controller.move(direction);
return true;
}
} catch(Exception e){
print_exception(e);
}
return false;
}
public void check_for_builds() {
if(my_type.canBuild())
for(int build_ordinal: spawn_build_ordinals)
if(need_more_build(build_ordinal))
if(build_structure(robot_types[build_ordinal]))
break;
}
public boolean need_more_build(int build_ordinal) {
if(robot_census[build_ordinal] < robot_max[build_ordinal])
return true;
return false;
}
public boolean build_structure(RobotType required_type){
if(robot_controller.isCoreReady())
if(robot_controller.hasBuildRequirements(required_type))
for (final Direction direction: directions)
if(robot_controller.canBuild(direction, required_type))
try{
robot_controller.build(direction,required_type);
send_broadcast(cumulative_ore_spent, read_broadcast(cumulative_ore_spent) + required_type.oreCost);
// send_broadcast(troop_count_channel+required_type.ordinal(), read_broadcast(troop_count_channel+required_type.ordinal()) + 1);
return true;
} catch(Exception e){
print_exception(e);
}
return false;
}
public Direction simpleBug(){
if(i_am_not_bugging){
current_heading = robot_controller.getLocation().directionTo(destination);
if(current_heading.equals(Direction.OMNI)){
evaluate_heading = Direction.NONE;
return Direction.NONE;
}
for(int directionalOffset:non_bugging_directional_looks){
evaluate_heading = directions[(current_heading.ordinal()+(directionalOffset*bugging_direction)+8)%8];
if(can_move(evaluate_heading))
return evaluate_heading;
}
i_have_started_bugging = true;
}
i_am_not_bugging = false;
turns_bugging++;
for(int directionalOffset:bugging_directional_looks){
//System.out.println("I am bugging");
evaluate_heading = directions[(current_heading.ordinal()+(directionalOffset*bugging_direction)+8)%8];
if(can_move(evaluate_heading))
return evaluate_heading;
}
evaluate_heading = Direction.NONE;
return evaluate_heading;
}
public void applyMove(){
current_heading = evaluate_heading;
move(current_heading);
i_have_started_bugging = false;
i_have_stopped_bugging = false;
if(i_am_not_bugging){
current_heading = robot_controller.getLocation().directionTo(destination);
} else{
if (current_heading.equals(robot_controller.getLocation().directionTo(destination))){
i_am_not_bugging = true;
i_have_stopped_bugging = true;
turns_bugging = 0;
}
if(turns_bugging > bugging_threshold)
bugging_direction *= -1;
i_am_not_bugging = true;
i_have_stopped_bugging = true;
turns_bugging = 0;
}
}
public boolean can_move(Direction move_direction){
// System.out.println("direction: " + move_direction);
if(terrain_is_impassable(move_direction)){
// System.out.println("Terrain is impassable");
return false;
}
if(space_is_occupied(move_direction)){
// System.out.println("space is occupied");
return false;
}
if(danger_levels[move_direction.ordinal()] > 0){
// System.out.println("dangerous");
return false;
}
return true;
}
public boolean terrain_is_impassable(Direction move_direction){
MapLocation next_move = robot_controller.getLocation().add(move_direction);
TerrainTile test_terrain = robot_controller.senseTerrainTile(next_move);
if(test_terrain.equals(TerrainTile.NORMAL))
return false;
if(test_terrain.equals(TerrainTile.OFF_MAP))
return true;
if(test_terrain.equals(TerrainTile.VOID) && (robot_controller.getType().equals(RobotType.DRONE) || robot_controller.getType().equals(RobotType.MISSILE)))
return false;
if(test_terrain.equals(TerrainTile.VOID))
return true;
return false;
}
//Not even sure if this should be checked here....
public boolean space_is_occupied(Direction move_direction){
try{
return robot_controller.isLocationOccupied(robot_controller.getLocation().add(move_direction));
}catch(Exception e){
System.out.println("Exception in bug space_is_occupied");
}
return false;
}
public void reset_simple_bug(MapLocation the_destination){
if(destination != null && destination.equals(the_destination))
return;
bugging_direction = 1;
turns_bugging = 0;
bugging_threshold = 20;
i_am_not_bugging = true;
i_have_stopped_bugging = false;
i_have_started_bugging = false;
current_heading = Direction.NONE;
evaluate_heading = Direction.NONE;
destination = the_destination;
}
}
| |
/*
* Copyright 2000-2017 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.compiler.options;
import com.intellij.openapi.module.Module;
import com.intellij.openapi.module.ModuleManager;
import com.intellij.openapi.module.ModuleType;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.roots.ui.configuration.ChooseModulesDialog;
import com.intellij.openapi.util.Pair;
import com.intellij.openapi.util.io.FileUtil;
import com.intellij.openapi.wm.IdeFocusManager;
import com.intellij.ui.SpeedSearchBase;
import com.intellij.ui.SpeedSearchComparator;
import com.intellij.ui.TableUtil;
import com.intellij.ui.ToolbarDecorator;
import com.intellij.ui.table.JBTable;
import com.intellij.util.ui.EditableModel;
import com.intellij.util.ui.JBUI;
import com.intellij.util.ui.UIUtil;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import javax.swing.*;
import javax.swing.table.*;
import java.awt.*;
import java.util.*;
import java.util.List;
public class ProcessedModulesTable extends JPanel {
private JBTable myTable = null;
private MyTableModel myTableModel = null;
public ProcessedModulesTable(final Project project) {
super(new BorderLayout());
myTableModel = new MyTableModel(project);
myTable = new JBTable(myTableModel);
myTable.getEmptyText().setText("No modules configured");
//myTable.setShowGrid(false);
myTable.setIntercellSpacing(JBUI.emptySize());
myTable.setAutoResizeMode(JTable.AUTO_RESIZE_LAST_COLUMN);
myTable.setColumnSelectionAllowed(false);
final TableColumnModel columnModel = myTable.getColumnModel();
final TableColumn dirNameColumn = columnModel.getColumn(myTableModel.DIRNAME_COLUMN_INDEX);
final String title = "Generated Sources Directory Name";
dirNameColumn.setHeaderValue(title);
final JTableHeader tableHeader = myTable.getTableHeader();
final FontMetrics metrics = tableHeader.getFontMetrics(tableHeader.getFont());
final int preferredWidth = metrics.stringWidth(title) + 12;
dirNameColumn.setPreferredWidth(preferredWidth);
dirNameColumn.setMaxWidth(preferredWidth + 20);
dirNameColumn.setCellRenderer(new MyElementColumnCellRenderer());
final TableColumn moduleColumn = columnModel.getColumn(myTableModel.ELEMENT_COLUMN_INDEX);
moduleColumn.setHeaderValue("Module");
moduleColumn.setCellRenderer(new MyElementColumnCellRenderer());
final JPanel panel = ToolbarDecorator.createDecorator(myTable)
.disableUpDownActions()
.setPreferredSize(JBUI.size(100, 155))
.createPanel();
add(panel, BorderLayout.CENTER);
final SpeedSearchBase<JBTable> speedSearch = new SpeedSearchBase<JBTable>(myTable) {
public int getSelectedIndex() {
return myTable.getSelectedRow();
}
@Override
protected int convertIndexToModel(int viewIndex) {
return myTable.convertRowIndexToModel(viewIndex);
}
@NotNull
@Override
public Object[] getAllElements() {
final int count = myTableModel.getRowCount();
Object[] elements = new Object[count];
for (int idx = 0; idx < count; idx++) {
elements[idx] = myTableModel.getModuleAt(idx);
}
return elements;
}
public String getElementText(Object element) {
return ((Module)element).getName() + " (" + FileUtil.toSystemDependentName(((Module)element).getModuleFilePath()) + ")";
}
@Override
public void selectElement(Object element, String selectedText) {
final int count = myTableModel.getRowCount();
for (int row = 0; row < count; row++) {
if (element.equals(myTableModel.getModuleAt(row))) {
final int viewRow = myTable.convertRowIndexToView(row);
myTable.getSelectionModel().setSelectionInterval(viewRow, viewRow);
TableUtil.scrollSelectionToVisible(myTable);
break;
}
}
}
};
speedSearch.setComparator(new SpeedSearchComparator(false));
}
public void refresh() {
myTableModel.fireTableDataChanged();
}
public void refresh(Module element) {
final int row = myTableModel.getElementRow(element);
if (row >= 0) {
myTableModel.fireTableRowsUpdated(row, row);
}
}
private int[] mySavedSelection = null;
public void saveSelection() {
mySavedSelection = myTable.getSelectedRows();
}
public void restoreSelection() {
if (mySavedSelection != null) {
TableUtil.selectRows(myTable, mySavedSelection);
mySavedSelection = null;
}
}
public void addModule(Module element, String dirName) {
myTableModel.addElement(element, dirName);
selectRow(myTableModel.getRowCount() - 1);
IdeFocusManager.getGlobalInstance().doWhenFocusSettlesDown(() -> {
IdeFocusManager.getGlobalInstance().requestFocus(myTable, true);
});
}
public void removeModule(Module element) {
final int elementRow = myTableModel.getElementRow(element);
if (elementRow < 0) {
return; // no such element
}
final boolean wasSelected = myTable.getSelectionModel().isSelectedIndex(elementRow);
myTableModel.removeElement(element);
if (wasSelected) {
final int rowCount = myTableModel.getRowCount();
if (rowCount > 0) {
selectRow(elementRow % rowCount);
}
else {
myTable.getSelectionModel().clearSelection();
}
}
IdeFocusManager.getGlobalInstance().doWhenFocusSettlesDown(() -> {
IdeFocusManager.getGlobalInstance().requestFocus(myTable, true);
});
}
public void removeAllElements() {
myTableModel.removeAllElements();
myTable.getSelectionModel().clearSelection();
}
private void selectRow(final int row) {
myTable.getSelectionModel().setSelectionInterval(row, row);
myTable.scrollRectToVisible(myTable.getCellRect(row, 0, true));
}
@Nullable
public Module getSelectedElement() {
final int selectedRow = getSelectedElementRow();
return selectedRow < 0? null : myTableModel.getModuleAt(selectedRow);
}
public int getSelectedElementRow() {
return myTable.getSelectedRow();
}
public List<Module> getSelectedElements() {
final List<Module> elements = new ArrayList<>();
final int[] selectedRows = myTable.getSelectedRows();
for (int selectedRow : selectedRows) {
if (selectedRow < 0) {
continue;
}
elements.add(myTableModel.getModuleAt(selectedRow));
}
return elements;
}
public void selectElements(Collection<? extends Module> elements) {
if (elements.size() == 0) {
myTable.clearSelection();
return;
}
final int[] rows = getElementsRows(elements);
TableUtil.selectRows(myTable, rows);
TableUtil.scrollSelectionToVisible(myTable);
IdeFocusManager.getGlobalInstance().doWhenFocusSettlesDown(() -> {
IdeFocusManager.getGlobalInstance().requestFocus(myTable, true);
});
}
private int[] getElementsRows(final Collection<? extends Module> elements) {
final int[] rows = new int[elements.size()];
int index = 0;
for (final Module element : elements) {
rows[index++] = myTableModel.getElementRow(element);
}
return rows;
}
public List<Pair<Module, String>> getAllModules() {
final int count = myTableModel.getRowCount();
List<Pair<Module, String>> elements = new ArrayList<>();
for (int idx = 0; idx < count; idx++) {
final Module module = myTableModel.getModuleAt(idx);
elements.add(Pair.create(module, myTableModel.getGenDirName(module)));
}
return elements;
}
public void sort(Comparator<Module> comparator) {
myTableModel.sort(comparator);
}
public void setEnabled(boolean enabled) {
super.setEnabled(enabled);
myTable.setRowSelectionAllowed(enabled);
myTableModel.fireTableDataChanged();
}
public void stopEditing() {
TableCellEditor editor = myTable.getCellEditor();
if (editor != null) {
editor.stopCellEditing();
}
}
public JComponent getComponent() {
return myTable;
}
public void clear() {
myTableModel.clear();
}
public int getElementCount() {
return myTableModel.getRowCount();
}
public Module getElementAt(int row) {
return myTableModel.getModuleAt(row);
}
private final class MyTableModel extends AbstractTableModel implements EditableModel {
private final List<Module> myElements = new ArrayList<>();
private final Map<Module, String> myDirNameMap = new HashMap<>();
public final int ELEMENT_COLUMN_INDEX = 0;
public final int DIRNAME_COLUMN_INDEX = 1;
private final Project myProject;
private MyTableModel(Project project) {
myProject = project;
}
public void sort(Comparator<Module> comparator) {
Collections.sort(myElements, comparator);
fireTableDataChanged();
}
public List<Module> getAllModules() {
return Collections.unmodifiableList(myElements);
}
public Module getModuleAt(int index) {
return myElements.get(index);
}
public String getGenDirName(Module module) {
return myDirNameMap.get(module);
}
void addElement(Module module, final String dirName) {
myElements.add(module);
if (dirName != null && dirName.length() > 0) {
myDirNameMap.put(module, dirName);
}
int row = myElements.size() - 1;
fireTableRowsInserted(row, row);
}
@Override
public void addRow() {
final Set<Module> projectModules = new HashSet<>(Arrays.asList(ModuleManager.getInstance(myProject).getModules()));
projectModules.removeAll(getAllModules());
final ChooseModulesDialog chooser =
new ChooseModulesDialog(ProcessedModulesTable.this, new ArrayList<>(projectModules), "ChooseModule");
if (chooser.showAndGet()) {
final List<Module> chosen = chooser.getChosenElements();
for (Module module : chosen) {
addElement(module, null);
}
}
}
public void removeRow(int idx) {
final Module element = myElements.remove(idx);
myDirNameMap.remove(element);
fireTableRowsDeleted(idx, idx);
}
@Override
public void exchangeRows(int oldIndex, int newIndex) {
}
@Override
public boolean canExchangeRows(int oldIndex, int newIndex) {
return false;
}
public void removeElement(Module element) {
final boolean reallyRemoved = myElements.remove(element);
if (reallyRemoved) {
myDirNameMap.remove(element);
fireTableDataChanged();
}
}
public int getElementRow(Module element) {
return myElements.indexOf(element);
}
public void removeAllElements() {
myElements.clear();
fireTableDataChanged();
}
public int getRowCount() {
return myElements.size();
}
public int getColumnCount() {
return 2;
}
@Nullable
public Object getValueAt(int rowIndex, int columnIndex) {
Module element = myElements.get(rowIndex);
if (columnIndex == ELEMENT_COLUMN_INDEX) {
return element;
}
if (columnIndex == DIRNAME_COLUMN_INDEX) {
return myDirNameMap.get(element);
}
return null;
}
public void setValueAt(Object value, int rowIndex, int columnIndex) {
if (columnIndex == DIRNAME_COLUMN_INDEX) {
final Module module = myElements.get(rowIndex);
if (value != null) {
String dir = FileUtil.toSystemIndependentName((String)value);
while (dir.startsWith("/")) {
dir = dir.substring(1);
}
if (dir.length() > 0) {
myDirNameMap.put(module, dir);
}
else {
myDirNameMap.remove(module);
}
}
else {
myDirNameMap.remove(module);
}
fireTableRowsUpdated(rowIndex, rowIndex);
}
}
public Class getColumnClass(int columnIndex) {
if (columnIndex == DIRNAME_COLUMN_INDEX) {
return String.class;
}
return super.getColumnClass(columnIndex);
}
public boolean isCellEditable(int rowIndex, int columnIndex) {
if (!ProcessedModulesTable.this.isEnabled()) {
return false;
}
if (columnIndex == DIRNAME_COLUMN_INDEX) {
return true;
}
return false;
}
public void clear() {
myElements.clear();
myDirNameMap.clear();
fireTableDataChanged();
}
}
private class MyElementColumnCellRenderer extends DefaultTableCellRenderer {
public Component getTableCellRendererComponent(JTable table, Object value, boolean isSelected, boolean hasFocus, int row, int column) {
final Color color = UIUtil.getTableFocusCellBackground();
Component component;
final Module module = value instanceof Module? (Module)value : null;
try {
UIManager.put(UIUtil.TABLE_FOCUS_CELL_BACKGROUND_PROPERTY, table.getSelectionBackground());
component = super.getTableCellRendererComponent(table, value, isSelected, hasFocus, row, column);
if (module != null) {
setText(module.getName() + " (" + FileUtil.toSystemDependentName(module.getModuleFilePath()) + ")");
}
if (component instanceof JLabel) {
((JLabel)component).setBorder(noFocusBorder);
}
}
finally {
UIManager.put(UIUtil.TABLE_FOCUS_CELL_BACKGROUND_PROPERTY, color);
}
component.setEnabled(ProcessedModulesTable.this.isEnabled());
if (component instanceof JLabel) {
final Icon icon = module != null ? ModuleType.get(module).getIcon() : null;
JLabel label = (JLabel)component;
label.setIcon(icon);
label.setDisabledIcon(icon);
}
component.setForeground(isSelected ? table.getSelectionForeground() : table.getForeground());
return component;
}
}
}
| |
// Copyright 2000-2021 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.intellij.ide;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.application.ModalityState;
import com.intellij.openapi.diagnostic.DefaultLogger;
import com.intellij.openapi.extensions.ExtensionNotApplicableException;
import com.intellij.openapi.progress.ProcessCanceledException;
import com.intellij.openapi.util.EmptyRunnable;
import com.intellij.testFramework.LightPlatformTestCase;
import com.intellij.testFramework.LoggedErrorProcessor;
import com.intellij.testFramework.PlatformTestUtil;
import com.intellij.util.Alarm;
import com.intellij.util.ExceptionUtil;
import com.intellij.util.ReflectionUtil;
import com.intellij.util.TestTimeOut;
import com.intellij.util.concurrency.EdtExecutorService;
import com.intellij.util.ui.UIUtil;
import org.jetbrains.annotations.NotNull;
import javax.swing.*;
import java.awt.*;
import java.awt.event.*;
import java.util.HashSet;
import java.util.Set;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicReference;
public class IdeEventQueueTest extends LightPlatformTestCase {
public void testManyEventsStress() {
int N = 100000;
PlatformTestUtil.startPerformanceTest("Event queue dispatch", 10000, () -> {
UIUtil.dispatchAllInvocationEvents();
AtomicInteger count = new AtomicInteger();
for (int i = 0; i < N; i++) {
SwingUtilities.invokeLater(count::incrementAndGet);
}
UIUtil.dispatchAllInvocationEvents();
assertEquals(N, count.get());
}).assertTiming();
}
public void testKeyboardEventsAreDetected() throws InterruptedException {
assertTrue(EventQueue.isDispatchThread());
IdeEventQueue ideEventQueue = IdeEventQueue.getInstance();
assertSame(ideEventQueue, Toolkit.getDefaultToolkit().getSystemEventQueue());
PlatformTestUtil.dispatchAllEventsInIdeEventQueue();
Set<AWTEvent> isDispatched = new HashSet<>();
ideEventQueue.addDispatcher(e -> {
isDispatched.add(e);
LOG.debug("dispatch: "+e);
return false;
}, getTestRootDisposable());
ideEventQueue.addPostprocessor(e -> {
LOG.debug("post dispatch: "+e);
return false;
}, getTestRootDisposable());
ideEventQueue.addPostEventListener(e -> {
LOG.debug("post event hook: "+e);
return false;
}, getTestRootDisposable());
int posted = ideEventQueue.myKeyboardEventsPosted.get();
int dispatched = ideEventQueue.myKeyboardEventsDispatched.get();
KeyEvent pressX = new KeyEvent(new JLabel("mykeypress"), KeyEvent.KEY_PRESSED, 1, InputEvent.ALT_DOWN_MASK | InputEvent.CTRL_DOWN_MASK | InputEvent.SHIFT_DOWN_MASK, 11, 'x');
postCarefully(pressX);
assertEquals(posted+1, ideEventQueue.myKeyboardEventsPosted.get());
assertEquals(dispatched, ideEventQueue.myKeyboardEventsDispatched.get());
dispatchAllInvocationEventsUntilOtherEvent();
// either it's dispatched by this method or the f*@$ing VCSRefresh activity stomped in, started modal progress and consumed all events via IdeEventQueue.pumpEventsForHierarchy
assertTrue(isDispatched.contains(pressX) || isConsumed(pressX));
assertEquals(posted+1, ideEventQueue.myKeyboardEventsPosted.get());
assertEquals(dispatched+1, ideEventQueue.myKeyboardEventsDispatched.get());
// do not react to other events
AWTEvent ev2 = new ActionEvent(new JLabel(), ActionEvent.ACTION_PERFORMED, "myCommand");
postCarefully(ev2);
assertEquals(posted+1, ideEventQueue.myKeyboardEventsPosted.get());
assertEquals(dispatched+1, ideEventQueue.myKeyboardEventsDispatched.get());
dispatchAllInvocationEventsUntilOtherEvent();
// either it's dispatched by this method or the f*@$ing VCSRefresh activity stomped in, started modal progress and dispatched all events via IdeEventQueue.pumpEventsForHierarchy by itself
assertTrue(isDispatched.contains(ev2));
assertEquals(posted+1, ideEventQueue.myKeyboardEventsPosted.get());
assertEquals(dispatched+1, ideEventQueue.myKeyboardEventsDispatched.get());
KeyEvent keyRelease = new KeyEvent(new JLabel("mykeyrelease"), KeyEvent.KEY_RELEASED, 1, InputEvent.ALT_DOWN_MASK | InputEvent.CTRL_DOWN_MASK | InputEvent.SHIFT_DOWN_MASK, 11, 'x');
postCarefully(keyRelease);
assertEquals(posted+2, ideEventQueue.myKeyboardEventsPosted.get());
assertEquals(dispatched+1, ideEventQueue.myKeyboardEventsDispatched.get());
dispatchAllInvocationEventsUntilOtherEvent();
// either it's dispatched by this method or the f*@$ing VCSRefresh activity stomped in, started modal progress and consumed all events via IdeEventQueue.pumpEventsForHierarchy
assertTrue(isDispatched.contains(keyRelease) || isConsumed(keyRelease));
assertEquals(posted+2, ideEventQueue.myKeyboardEventsPosted.get());
assertEquals(dispatched+2, ideEventQueue.myKeyboardEventsDispatched.get());
}
private static void postCarefully(AWTEvent event) {
LOG.debug("posting " + event);
IdeEventQueue ideEventQueue = IdeEventQueue.getInstance();
boolean posted = ideEventQueue.doPostEvent(event);
assertTrue("Was not posted: "+event, posted);
boolean mustBeConsumed = event.getID() == ActionEvent.ACTION_PERFORMED;
assertEquals(mustBeConsumed, ReflectionUtil.getField(AWTEvent.class, event, boolean.class, "consumed").booleanValue());
assertTrue(ReflectionUtil.getField(AWTEvent.class, event, boolean.class, "isPosted"));
}
private static boolean isConsumed(InputEvent event) {
return event.isConsumed();
}
// need this because everybody can post some crazy stuff to IdeEventQueue, so we have to filter InvocationEvents out
private static void dispatchAllInvocationEventsUntilOtherEvent() throws InterruptedException {
while (true) {
AWTEvent event = PlatformTestUtil.dispatchNextEventIfAny();
LOG.debug("event dispatched in dispatchAll() "+event+"; -"+(event instanceof InvocationEvent ? "continuing" : "returning"));
if (!(event instanceof InvocationEvent)) break;
}
}
private static class MyException extends RuntimeException {
}
private static void throwMyException() {
throw new MyException();
}
private static void checkMyExceptionThrownImmediately() {
TestTimeOut t = TestTimeOut.setTimeout(10, TimeUnit.SECONDS);
while (true) {
try {
UIUtil.dispatchAllInvocationEvents();
}
catch (MyException e) {
break;
}
assertFalse(t.timedOut());
}
}
public void testExceptionInAlarmMustThrowImmediatelyInTests() {
Alarm alarm = new Alarm();
alarm.addRequest(()-> throwMyException(), 1);
checkMyExceptionThrownImmediately();
}
public void testExceptionInInvokeLateredRunnableMustThrowImmediatelyInTests() {
SwingUtilities.invokeLater(() -> throwMyException());
checkMyExceptionThrownImmediately();
}
public void testAppInvokeLateredRunnableMustThrowImmediatelyInTests() {
SwingUtilities.invokeLater(()->ApplicationManager.getApplication().invokeLater(()->throwMyException()));
checkMyExceptionThrownImmediately();
}
public void testEdtExecutorRunnableMustThrowImmediatelyInTests() {
EdtExecutorService.getInstance().execute(()->throwMyException(), ModalityState.NON_MODAL);
checkMyExceptionThrownImmediately();
}
public void testEdtScheduledExecutorRunnableMustThrowImmediatelyInTests() {
EdtExecutorService.getScheduledExecutorInstance().schedule(()->throwMyException(), 1, TimeUnit.MILLISECONDS);
checkMyExceptionThrownImmediately();
}
public void testNoExceptionEvenCreatedByThanosExtensionNotApplicableExceptionMustKillEDT() {
assert SwingUtilities.isEventDispatchThread();
DefaultLogger.disableStderrDumping(getTestRootDisposable());
throwInIdeEventQueueDispatch(ExtensionNotApplicableException.INSTANCE, null); // ControlFlowException silently ignored
throwInIdeEventQueueDispatch(new ProcessCanceledException(), null); // ControlFlowException silently ignored
Error error = new Error();
throwInIdeEventQueueDispatch(error, error);
}
private void throwInIdeEventQueueDispatch(@NotNull Throwable toThrow, Throwable expectedToBeLogged) {
AtomicBoolean run = new AtomicBoolean();
InvocationEvent event = new InvocationEvent(this, () -> {
run.set(true);
ExceptionUtil.rethrow(toThrow);
});
AtomicReference<Throwable> error = new AtomicReference<>();
LoggedErrorProcessor old = LoggedErrorProcessor.getInstance();
LoggedErrorProcessor.setNewInstance(new LoggedErrorProcessor() {
@Override
public boolean processError(@NotNull String category, String message, Throwable t, String @NotNull [] details) {
assertNull(error.get());
error.set(t);
return false;
}
});
IdeEventQueue ideEventQueue = IdeEventQueue.getInstance();
try {
ideEventQueue.executeInProductionModeEvenThoughWeAreInTests(() -> ideEventQueue.dispatchEvent(event));
}
finally {
LoggedErrorProcessor.setNewInstance(old);
}
assertTrue(run.get());
assertSame(expectedToBeLogged, error.get());
}
public void testPumpEventsForHierarchyMustExitOnIsCancelEventCondition() {
assert SwingUtilities.isEventDispatchThread();
IdeEventQueue ideEventQueue = IdeEventQueue.getInstance();
CompletableFuture<Object> future = new CompletableFuture<>();
TestTimeOut cancelEventTime = TestTimeOut.setTimeout(2, TimeUnit.SECONDS);
JLabel component = new JLabel();
long start = System.currentTimeMillis();
ideEventQueue.pumpEventsForHierarchy(component, future, event -> {
if (cancelEventTime.isTimedOut()) {
ideEventQueue.postEvent(new TextEvent(component, -239){
@Override
public String paramString() {
return "my";
}
});
}
// post InvocationEvent to give getNextEvent work to do
SwingUtilities.invokeLater(EmptyRunnable.getInstance());
return "my".equals(event.paramString());
});
long elapsedMs = System.currentTimeMillis() - start;
// check that first, we did exit the pumpEventsForHierarchy and second, at the right moment
assertTrue(String.valueOf(elapsedMs), cancelEventTime.isTimedOut());
}
public void testPumpEventsForHierarchyMustExitOnIsFutureDoneCondition() {
assert SwingUtilities.isEventDispatchThread();
IdeEventQueue ideEventQueue = IdeEventQueue.getInstance();
CompletableFuture<Object> future = new CompletableFuture<>();
TestTimeOut cancelEventTime = TestTimeOut.setTimeout(2, TimeUnit.SECONDS);
JLabel component = new JLabel();
long start = System.currentTimeMillis();
ideEventQueue.pumpEventsForHierarchy(component, future, __ -> {
if (cancelEventTime.isTimedOut()) {
future.complete(null);
}
// post InvocationEvent to give getNextEvent work to do
SwingUtilities.invokeLater(EmptyRunnable.getInstance());
return false;
});
long elapsedMs = System.currentTimeMillis() - start;
// check that first, we did exit the pumpEventsForHierarchy and second, at the right moment
assertTrue(String.valueOf(elapsedMs), cancelEventTime.isTimedOut());
}
}
| |
/*
* Copyright 2015 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.cloud.bigquery;
import static com.google.common.base.Preconditions.checkNotNull;
import com.google.cloud.bigquery.BigQuery.JobOption;
import com.google.cloud.bigquery.BigQuery.TableDataListOption;
import com.google.cloud.bigquery.BigQuery.TableOption;
import com.google.common.collect.ImmutableList;
import java.io.IOException;
import java.io.ObjectInputStream;
import java.math.BigInteger;
import java.util.List;
import java.util.Map;
import java.util.Objects;
/**
* A Google BigQuery Table.
*
* <p>Objects of this class are immutable. Operations that modify the table like {@link #update}
* return a new object. To get a {@code Table} object with the most recent information use {@link
* #reload}. {@code Table} adds a layer of service-related functionality over {@link TableInfo}.
*/
public class Table extends TableInfo {
private static final long serialVersionUID = 5744556727066570096L;
private final BigQueryOptions options;
private transient BigQuery bigquery;
/** A builder for {@code Table} objects. */
public static class Builder extends TableInfo.Builder {
private final BigQuery bigquery;
private final TableInfo.BuilderImpl infoBuilder;
Builder(BigQuery bigquery, TableId tableId, TableDefinition defintion) {
this.bigquery = bigquery;
this.infoBuilder = new TableInfo.BuilderImpl();
this.infoBuilder.setTableId(tableId).setDefinition(defintion);
}
Builder(Table table) {
this.bigquery = table.bigquery;
this.infoBuilder = new TableInfo.BuilderImpl(table);
}
@Override
Builder setCreationTime(Long creationTime) {
infoBuilder.setCreationTime(creationTime);
return this;
}
@Override
public Builder setDescription(String description) {
infoBuilder.setDescription(description);
return this;
}
@Override
Builder setEtag(String etag) {
infoBuilder.setEtag(etag);
return this;
}
@Override
public Builder setExpirationTime(Long expirationTime) {
infoBuilder.setExpirationTime(expirationTime);
return this;
}
@Override
public Builder setFriendlyName(String friendlyName) {
infoBuilder.setFriendlyName(friendlyName);
return this;
}
@Override
Builder setGeneratedId(String generatedId) {
infoBuilder.setGeneratedId(generatedId);
return this;
}
@Override
Builder setLastModifiedTime(Long lastModifiedTime) {
infoBuilder.setLastModifiedTime(lastModifiedTime);
return this;
}
@Override
Builder setNumBytes(Long numBytes) {
infoBuilder.setNumBytes(numBytes);
return this;
}
@Override
Builder setNumLongTermBytes(Long numLongTermBytes) {
infoBuilder.setNumLongTermBytes(numLongTermBytes);
return this;
}
@Override
Builder setNumRows(BigInteger numRows) {
infoBuilder.setNumRows(numRows);
return this;
}
@Override
Builder setSelfLink(String selfLink) {
infoBuilder.setSelfLink(selfLink);
return this;
}
@Override
public Builder setTableId(TableId tableId) {
infoBuilder.setTableId(tableId);
return this;
}
@Override
public Builder setDefinition(TableDefinition definition) {
infoBuilder.setDefinition(definition);
return this;
}
@Override
public TableInfo.Builder setEncryptionConfiguration(EncryptionConfiguration configuration) {
infoBuilder.setEncryptionConfiguration(configuration);
return this;
}
@Override
public Builder setLabels(Map<String, String> labels) {
infoBuilder.setLabels(labels);
return this;
}
@Override
public Builder setRequirePartitionFilter(Boolean requirePartitionFilter) {
infoBuilder.setRequirePartitionFilter(requirePartitionFilter);
return this;
}
@Override
public Table build() {
return new Table(bigquery, infoBuilder);
}
}
Table(BigQuery bigquery, TableInfo.BuilderImpl infoBuilder) {
super(infoBuilder);
this.bigquery = checkNotNull(bigquery);
this.options = bigquery.getOptions();
}
/**
* Checks if this table exists.
*
* <p>Example of checking if the table exists.
*
* <pre>{@code
* boolean exists = table.exists();
* if (exists) {
* // the table exists
* } else {
* // the table was not found
* }
* }</pre>
*
* @return {@code true} if this table exists, {@code false} otherwise
* @throws BigQueryException upon failure
*/
public boolean exists() {
return bigquery.getTable(getTableId(), TableOption.fields()) != null;
}
/**
* Fetches current table's latest information. Returns {@code null} if the table does not exist.
*
* <p>Example of fetching the table's latest information, specifying particular table fields to
* get.
*
* <pre>{@code
* TableField field1 = TableField.LAST_MODIFIED_TIME;
* TableField field2 = TableField.NUM_ROWS;
* Table latestTable = table.reload(TableOption.fields(field1, field2));
* if (latestTable == null) {
* // the table was not found
* }
* }</pre>
*
* @param options table options
* @return a {@code Table} object with latest information or {@code null} if not found
* @throws BigQueryException upon failure
*/
public Table reload(TableOption... options) {
return bigquery.getTable(getTableId(), options);
}
/**
* Updates the table's information with this table's information. Dataset's and table's
* user-defined ids cannot be changed. A new {@code Table} object is returned.
*
* <p>Example of updating the table's information.
*
* <pre>{@code
* Table updatedTable = table.toBuilder().setDescription("new description").build().update();
* }</pre>
*
* @param options dataset options
* @return a {@code Table} object with updated information
* @throws BigQueryException upon failure
*/
public Table update(TableOption... options) {
return bigquery.update(this, options);
}
/**
* Deletes this table.
*
* <p>Example of deleting the table.
*
* <pre>{@code
* boolean deleted = table.delete();
* if (deleted) {
* // the table was deleted
* } else {
* // the table was not found
* }
* }</pre>
*
* @return {@code true} if table was deleted, {@code false} if it was not found
* @throws BigQueryException upon failure
*/
public boolean delete() {
return bigquery.delete(getTableId());
}
/**
* Insert rows into the table.
*
* <p>Streaming inserts reside temporarily in the streaming buffer, which has different
* availability characteristics than managed storage. Certain operations do not interact with the
* streaming buffer, such as {@link #list(TableDataListOption...)} and {@link #copy(TableId,
* JobOption...)}. As such, recent streaming data will not be present in the destination table or
* output.
*
* <p>Example of inserting rows into the table.
*
* <pre>{@code
* String rowId1 = "rowId1";
* String rowId2 = "rowId2";
* List<RowToInsert> rows = new ArrayList<>();
* Map<String, Object> row1 = new HashMap<>();
* row1.put("stringField", "value1");
* row1.put("booleanField", true);
* Map<String, Object> row2 = new HashMap<>();
* row2.put("stringField", "value2");
* row2.put("booleanField", false);
* rows.add(RowToInsert.of(rowId1, row1));
* rows.add(RowToInsert.of(rowId2, row2));
* InsertAllResponse response = table.insert(rows);
* // do something with response
* }</pre>
*
* @param rows rows to be inserted
* @throws BigQueryException upon failure
*/
public InsertAllResponse insert(Iterable<InsertAllRequest.RowToInsert> rows)
throws BigQueryException {
return bigquery.insertAll(InsertAllRequest.of(getTableId(), rows));
}
/**
* Insert rows into the table.
*
* <p>Streaming inserts reside temporarily in the streaming buffer, which has different
* availability characteristics than managed storage. Certain operations do not interact with the
* streaming buffer, such as {@link #list(TableDataListOption...)} and {@link #copy(TableId,
* JobOption...)}. As such, recent streaming data will not be present in the destination table or
* output.
*
* <p>Example of inserting rows into the table, ignoring invalid rows.
*
* <pre>{@code
* String rowId1 = "rowId1";
* String rowId2 = "rowId2";
* List<RowToInsert> rows = new ArrayList<>();
* Map<String, Object> row1 = new HashMap<>();
* row1.put("stringField", 1);
* row1.put("booleanField", true);
* Map<String, Object> row2 = new HashMap<>();
* row2.put("stringField", "value2");
* row2.put("booleanField", false);
* rows.add(RowToInsert.of(rowId1, row1));
* rows.add(RowToInsert.of(rowId2, row2));
* InsertAllResponse response = table.insert(rows, true, true);
* // do something with response
* }</pre>
*
* @param rows rows to be inserted
* @param skipInvalidRows whether to insert all valid rows, even if invalid rows exist. If not set
* the entire insert operation will fail if rows to be inserted contain an invalid row
* @param ignoreUnknownValues whether to accept rows that contain values that do not match the
* schema. The unknown values are ignored. If not set, rows with unknown values are considered
* to be invalid
* @throws BigQueryException upon failure
*/
public InsertAllResponse insert(
Iterable<InsertAllRequest.RowToInsert> rows,
boolean skipInvalidRows,
boolean ignoreUnknownValues)
throws BigQueryException {
InsertAllRequest request =
InsertAllRequest.newBuilder(getTableId(), rows)
.setSkipInvalidRows(skipInvalidRows)
.setIgnoreUnknownValues(ignoreUnknownValues)
.build();
return bigquery.insertAll(request);
}
/**
* Returns the paginated list rows in this table.
*
* <p>Example of listing rows in the table.
*
* <pre>{@code
* // This example reads the result 100 rows per RPC call. If there's no need to limit the number,
* // simply omit the option.
* Page<FieldValueList> page = table.list(TableDataListOption.pageSize(100));
* for (FieldValueList row : page.iterateAll()) {
* // do something with the row
* }
* }</pre>
*
* @param options table data list options
* @throws BigQueryException upon failure
*/
public TableResult list(TableDataListOption... options) throws BigQueryException {
return bigquery.listTableData(getTableId(), options);
}
/**
* Returns the paginated list rows in this table.
*
* <p>Example of listing rows in the table given a schema.
*
* <pre>{@code
* Schema schema = ...;
* String field = "my_field";
* Page<FieldValueList> page = table.list(schema);
* for (FieldValueList row : page.iterateAll()) {
* row.get(field);
* }
* }</pre>
*
* @param options table data list options
* @throws BigQueryException upon failure
*/
public TableResult list(Schema schema, TableDataListOption... options) throws BigQueryException {
return bigquery.listTableData(getTableId(), schema, options);
}
/**
* Starts a BigQuery Job to copy the current table to the provided destination table. Returns the
* started {@link Job} object.
*
* <p>Example of copying the table to a destination table.
*
* <pre>{@code
* String datasetName = "my_dataset";
* String tableName = "my_destination_table";
* Job job = table.copy(datasetName, tableName);
* // Wait for the job to complete.
* try {
* Job completedJob = job.waitFor(RetryOption.initialRetryDelay(Duration.ofSeconds(1)),
* RetryOption.totalTimeout(Duration.ofMinutes(3)));
* if (completedJob != null && completedJob.getStatus().getError() == null) {
* // Job completed successfully
* } else {
* // Handle error case
* }
* } catch (InterruptedException e) {
* // Handle interrupted wait
* }
* }</pre>
*
* @param destinationDataset the user-defined id of the destination dataset
* @param destinationTable the user-defined id of the destination table
* @param options job options
* @throws BigQueryException upon failure
*/
public Job copy(String destinationDataset, String destinationTable, JobOption... options)
throws BigQueryException {
return copy(TableId.of(destinationDataset, destinationTable), options);
}
/**
* Starts a BigQuery Job to copy the current table to the provided destination table. Returns the
* started {@link Job} object.
*
* <p>Example copying the table to a destination table.
*
* <pre>{@code
* String dataset = "my_dataset";
* String tableName = "my_destination_table";
* TableId destinationId = TableId.of(dataset, tableName);
* JobOption options = JobOption.fields(JobField.STATUS, JobField.USER_EMAIL);
* Job job = table.copy(destinationId, options);
* // Wait for the job to complete.
* try {
* Job completedJob = job.waitFor(RetryOption.initialRetryDelay(Duration.ofSeconds(1)),
* RetryOption.totalTimeout(Duration.ofMinutes(3)));
* if (completedJob != null && completedJob.getStatus().getError() == null) {
* // Job completed successfully.
* } else {
* // Handle error case.
* }
* } catch (InterruptedException e) {
* // Handle interrupted wait
* }
* }</pre>
*
* @param destinationTable the destination table of the copy job
* @param options job options
* @throws BigQueryException upon failure
*/
public Job copy(TableId destinationTable, JobOption... options) throws BigQueryException {
CopyJobConfiguration configuration = CopyJobConfiguration.of(destinationTable, getTableId());
return bigquery.create(JobInfo.of(configuration), options);
}
/**
* Starts a BigQuery Job to extract the current table to the provided destination URI. Returns the
* started {@link Job} object.
*
* <p>Example extracting data to single Google Cloud Storage file.
*
* <pre>{@code
* String format = "CSV";
* String gcsUrl = "gs://my_bucket/filename.csv";
* Job job = table.extract(format, gcsUrl);
* // Wait for the job to complete
* try {
* Job completedJob = job.waitFor(RetryOption.initialRetryDelay(Duration.ofSeconds(1)),
* RetryOption.totalTimeout(Duration.ofMinutes(3)));
* if (completedJob != null && completedJob.getStatus().getError() == null) {
* // Job completed successfully
* } else {
* // Handle error case
* }
* } catch (InterruptedException e) {
* // Handle interrupted wait
* }
* }</pre>
*
* @param format the format of the extracted data
* @param destinationUri the fully-qualified Google Cloud Storage URI (e.g. gs://bucket/path)
* where the extracted table should be written
* @param options job options
* @throws BigQueryException upon failure
*/
public Job extract(String format, String destinationUri, JobOption... options)
throws BigQueryException {
return extract(format, ImmutableList.of(destinationUri), options);
}
/**
* Starts a BigQuery Job to extract the current table to the provided destination URIs. Returns
* the started {@link Job} object.
*
* <p>Example of partitioning data to a list of Google Cloud Storage files.
*
* <pre>{@code
* String format = "CSV";
* String gcsUrl1 = "gs://my_bucket/PartitionA_*.csv";
* String gcsUrl2 = "gs://my_bucket/PartitionB_*.csv";
* List<String> destinationUris = new ArrayList<>();
* destinationUris.add(gcsUrl1);
* destinationUris.add(gcsUrl2);
* Job job = table.extract(format, destinationUris);
* // Wait for the job to complete
* try {
* Job completedJob = job.waitFor(RetryOption.initialRetryDelay(Duration.ofSeconds(1)),
* RetryOption.totalTimeout(Duration.ofMinutes(3)));
* if (completedJob != null && completedJob.getStatus().getError() == null) {
* // Job completed successfully
* } else {
* // Handle error case
* }
* } catch (InterruptedException e) {
* // Handle interrupted wait
* }
* }</pre>
*
* @param format the format of the exported data
* @param destinationUris the fully-qualified Google Cloud Storage URIs (e.g. gs://bucket/path)
* where the extracted table should be written
* @param options job options
* @throws BigQueryException upon failure
*/
public Job extract(String format, List<String> destinationUris, JobOption... options)
throws BigQueryException {
ExtractJobConfiguration extractConfiguration =
ExtractJobConfiguration.of(getTableId(), destinationUris, format);
return bigquery.create(JobInfo.of(extractConfiguration), options);
}
/**
* Starts a BigQuery Job to load data into the current table from the provided source URI. Returns
* the started {@link Job} object.
*
* <p>Example loading data from a single Google Cloud Storage file.
*
* <pre>{@code
* String sourceUri = "gs://my_bucket/filename.csv";
* Job job = table.load(FormatOptions.csv(), sourceUri);
* // Wait for the job to complete
* try {
* Job completedJob = job.waitFor(RetryOption.initialRetryDelay(Duration.ofSeconds(1)),
* RetryOption.totalTimeout(Duration.ofMinutes(3)));
* if (completedJob != null && completedJob.getStatus().getError() == null) {
* // Job completed successfully
* } else {
* // Handle error case
* }
* } catch (InterruptedException e) {
* // Handle interrupted wait
* }
* }</pre>
*
* @param format the format of the data to load
* @param sourceUri the fully-qualified Google Cloud Storage URI (e.g. gs://bucket/path) from
* which to load the data
* @param options job options
* @throws BigQueryException upon failure
*/
public Job load(FormatOptions format, String sourceUri, JobOption... options)
throws BigQueryException {
return load(format, ImmutableList.of(sourceUri), options);
}
/**
* Starts a BigQuery Job to load data into the current table from the provided source URIs.
* Returns the started {@link Job} object.
*
* <p>Example loading data from a list of Google Cloud Storage files.
*
* <pre>{@code
* String gcsUrl1 = "gs://my_bucket/filename1.csv";
* String gcsUrl2 = "gs://my_bucket/filename2.csv";
* List<String> sourceUris = new ArrayList<>();
* sourceUris.add(gcsUrl1);
* sourceUris.add(gcsUrl2);
* Job job = table.load(FormatOptions.csv(), sourceUris);
* // Wait for the job to complete
* try {
* Job completedJob = job.waitFor(RetryOption.initialRetryDelay(Duration.ofSeconds(1)),
* RetryOption.totalTimeout(Duration.ofMinutes(3)));
* if (completedJob != null && completedJob.getStatus().getError() == null) {
* // Job completed successfully
* } else {
* // Handle error case
* }
* } catch (InterruptedException e) {
* // Handle interrupted wait
* }
* }</pre>
*
* @param format the format of the exported data
* @param sourceUris the fully-qualified Google Cloud Storage URIs (e.g. gs://bucket/path) from
* which to load the data
* @param options job options
* @throws BigQueryException upon failure
*/
public Job load(FormatOptions format, List<String> sourceUris, JobOption... options)
throws BigQueryException {
LoadJobConfiguration loadConfig = LoadJobConfiguration.of(getTableId(), sourceUris, format);
return bigquery.create(JobInfo.of(loadConfig), options);
}
/** Returns the table's {@code BigQuery} object used to issue requests. */
public BigQuery getBigQuery() {
return bigquery;
}
@Override
public Builder toBuilder() {
return new Builder(this);
}
@Override
public final boolean equals(Object obj) {
if (obj == this) {
return true;
}
if (obj == null || !obj.getClass().equals(Table.class)) {
return false;
}
Table other = (Table) obj;
return Objects.equals(toPb(), other.toPb()) && Objects.equals(options, other.options);
}
@Override
public final int hashCode() {
return Objects.hash(super.hashCode(), options);
}
private void readObject(ObjectInputStream in) throws IOException, ClassNotFoundException {
in.defaultReadObject();
this.bigquery = options.getService();
}
static Table fromPb(BigQuery bigquery, com.google.api.services.bigquery.model.Table tablePb) {
return new Table(bigquery, new TableInfo.BuilderImpl(tablePb));
}
}
| |
package aQute.bnd.deployer.repository.providers;
import static aQute.bnd.deployer.repository.api.Decision.accept;
import static aQute.bnd.deployer.repository.api.Decision.reject;
import static aQute.bnd.deployer.repository.api.Decision.undecided;
import static javax.xml.stream.XMLStreamConstants.END_ELEMENT;
import static javax.xml.stream.XMLStreamConstants.START_ELEMENT;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.net.URI;
import java.net.URISyntaxException;
import java.util.Set;
import javax.xml.stream.XMLInputFactory;
import javax.xml.stream.XMLStreamException;
import javax.xml.stream.XMLStreamReader;
import org.osgi.resource.Resource;
import org.osgi.service.log.LogService;
import org.osgi.service.repository.ContentNamespace;
import aQute.bnd.deployer.repository.api.CheckResult;
import aQute.bnd.deployer.repository.api.IRepositoryContentProvider;
import aQute.bnd.deployer.repository.api.IRepositoryIndexProcessor;
import aQute.bnd.deployer.repository.api.Referral;
import aQute.bnd.osgi.repository.SimpleIndexer;
import aQute.bnd.osgi.resource.CapReqBuilder;
import aQute.bnd.osgi.resource.ResourceBuilder;
import aQute.bnd.service.Registry;
public class R5RepoContentProvider implements IRepositoryContentProvider {
public static final String NAME = "R5";
private static final String NS_URI = "http://www.osgi.org/xmlns/repository/v1.0.0";
private static final String INDEX_NAME_COMPRESSED = "index.xml.gz";
private static final String INDEX_NAME_PRETTY = "index.xml";
private static final String TAG_REPOSITORY = "repository";
private static final String TAG_REFERRAL = "referral";
private static final String TAG_RESOURCE = "resource";
private static final String TAG_CAPABILITY = "capability";
private static final String TAG_REQUIREMENT = "requirement";
private static final String TAG_ATTRIBUTE = "attribute";
private static final String TAG_DIRECTIVE = "directive";
private static final String ATTR_REFERRAL_URL = "url";
private static final String ATTR_REFERRAL_DEPTH = "depth";
private static final String ATTR_NAMESPACE = "namespace";
private static final String ATTR_NAME = "name";
private static final String ATTR_VALUE = "value";
private static final String ATTR_TYPE = "type";
@Override
public String getName() {
return NAME;
}
@Override
public String getDefaultIndexName(boolean pretty) {
return pretty ? INDEX_NAME_PRETTY : INDEX_NAME_COMPRESSED;
}
private enum ParserState {
beforeRoot,
inRoot,
inResource,
inCapability
}
@Override
public CheckResult checkStream(String name, InputStream stream) throws IOException {
XMLStreamReader reader = null;
try {
XMLInputFactory inputFactory = XMLInputFactory.newInstance();
inputFactory.setProperty(XMLInputFactory.IS_NAMESPACE_AWARE, true);
inputFactory.setProperty(XMLInputFactory.IS_VALIDATING, false);
inputFactory.setProperty(XMLInputFactory.SUPPORT_DTD, false);
reader = inputFactory.createXMLStreamReader(stream);
ParserState state = ParserState.beforeRoot;
while (reader.hasNext()) {
int type = reader.next();
String localName;
switch (type) {
case START_ELEMENT :
localName = reader.getLocalName();
switch (state) {
case beforeRoot :
String nsUri = reader.getNamespaceURI();
if (nsUri != null)
return CheckResult.fromBool(NS_URI.equals(nsUri), "Corrent namespace",
"Incorrect namespace: " + nsUri, null);
if (!TAG_REPOSITORY.equals(localName))
return new CheckResult(reject, "Incorrect root element name", null);
state = ParserState.inRoot;
break;
case inRoot :
if (TAG_RESOURCE.equals(localName)) {
state = ParserState.inResource;
}
break;
case inResource :
if (TAG_REQUIREMENT.equals(localName))
return new CheckResult(accept, "Recognised element 'requirement' in 'resource'",
null);
if (TAG_CAPABILITY.equals(localName))
state = ParserState.inCapability;
break;
case inCapability :
if (TAG_ATTRIBUTE.equals(localName) || TAG_DIRECTIVE.equals(localName)) {
return new CheckResult(accept, "Recognised element '%s' in 'capability'", null);
}
break;
}
break;
case END_ELEMENT :
localName = reader.getLocalName();
if (state == ParserState.inResource && TAG_RESOURCE.equals(localName))
state = ParserState.inRoot;
if (state == ParserState.inCapability && TAG_CAPABILITY.equals(localName))
state = ParserState.inResource;
break;
default :
break;
}
}
return new CheckResult(undecided, "Reached end of stream", null);
} catch (XMLStreamException e) {
return new CheckResult(reject, "Invalid XML", e);
} finally {
if (reader != null)
try {
reader.close();
} catch (XMLStreamException e) {}
}
}
@Override
public void parseIndex(InputStream stream, URI baseUri, IRepositoryIndexProcessor listener, LogService log)
throws Exception {
this.parseIndex(baseUri + "", stream, baseUri, listener, log);
}
public void parseIndex(String projectName, InputStream stream, URI baseUri, IRepositoryIndexProcessor listener,
LogService log)
throws Exception {
XMLStreamReader reader = null;
try {
XMLInputFactory inputFactory = XMLInputFactory.newInstance();
inputFactory.setProperty(XMLInputFactory.IS_NAMESPACE_AWARE, true);
inputFactory.setProperty(XMLInputFactory.IS_VALIDATING, false);
inputFactory.setProperty(XMLInputFactory.SUPPORT_DTD, false);
reader = inputFactory.createXMLStreamReader(stream);
ResourceBuilder resourceBuilder = null;
CapReqBuilder capReqBuilder = null;
while (reader.hasNext()) {
int type = reader.next();
String localName;
switch (type) {
case START_ELEMENT :
localName = reader.getLocalName();
if (TAG_REFERRAL.equals(localName)) {
String url = reader.getAttributeValue(null, ATTR_REFERRAL_URL);
String depth = reader.getAttributeValue(null, ATTR_REFERRAL_DEPTH);
Referral referral = new Referral(url, parseInt(depth));
listener.processReferral(baseUri, referral, 0, 0);
} else if (TAG_RESOURCE.equals(localName)) {
resourceBuilder = new ResourceBuilder();
} else if (TAG_CAPABILITY.equals(localName) || TAG_REQUIREMENT.equals(localName)) {
String namespace = reader.getAttributeValue(null, ATTR_NAMESPACE);
capReqBuilder = new CapReqBuilder(namespace);
} else if (TAG_ATTRIBUTE.equals(localName)) {
String name = reader.getAttributeValue(null, ATTR_NAME);
String valueStr = reader.getAttributeValue(null, ATTR_VALUE);
String typeAttr = reader.getAttributeValue(null, ATTR_TYPE);
if (capReqBuilder != null) {
// If the attribute is 'url' on the osgi.content
// namespace then resolve it relative to the
// base URI.
if (ContentNamespace.CONTENT_NAMESPACE.equals(capReqBuilder.getNamespace())
&& ContentNamespace.CAPABILITY_URL_ATTRIBUTE.equals(name)) {
URI resolvedUri = resolveUri(valueStr, baseUri);
capReqBuilder.addAttribute(name, resolvedUri);
} else {
Object convertedAttr = convertAttribute(valueStr, typeAttr);
capReqBuilder.addAttribute(name, convertedAttr);
}
}
} else if (TAG_DIRECTIVE.equals(localName)) {
String name = reader.getAttributeValue(null, ATTR_NAME);
String valueStr = reader.getAttributeValue(null, ATTR_VALUE);
if (capReqBuilder != null)
capReqBuilder.addDirective(name, valueStr);
}
break;
case END_ELEMENT :
localName = reader.getLocalName();
if (TAG_CAPABILITY.equals(localName)) {
if (resourceBuilder != null && capReqBuilder != null)
resourceBuilder.addCapability(capReqBuilder);
capReqBuilder = null;
} else if (TAG_REQUIREMENT.equals(localName)) {
if (resourceBuilder != null && capReqBuilder != null)
resourceBuilder.addRequirement(capReqBuilder);
capReqBuilder = null;
} else if (TAG_RESOURCE.equals(localName)) {
if (resourceBuilder != null) {
Resource resource = resourceBuilder.build();
listener.processResource(resource);
resourceBuilder = null;
}
}
break;
}
}
} finally {
if (reader != null) {
try {
reader.close();
} catch (Exception e) {}
}
}
}
private static URI resolveUri(String uriStr, URI baseUri) throws URISyntaxException {
URI resolved;
URI resourceUri = new URI(uriStr);
if (resourceUri.isAbsolute())
resolved = resourceUri;
else
resolved = baseUri.resolve(resourceUri);
return resolved;
}
private static int parseInt(String value) {
if (value == null || "".equals(value))
return 0;
return Integer.parseInt(value);
}
private static Object convertAttribute(String value, String type) {
AttributeType attType = AttributeType.parseTypeName(type);
return attType.parseString(value);
}
@Override
public boolean supportsGeneration() {
return true;
}
@Override
public void generateIndex(Set<File> files, OutputStream output, String repoName, URI baseUri, boolean pretty,
Registry registry, LogService log) throws Exception {
long modified = files.stream()
.mapToLong(File::lastModified)
.max()
.orElse(-1L);
new SimpleIndexer().files(files)
.base(baseUri)
.compress(!pretty)
.name(repoName)
.increment(modified)
.index(output);
}
}
| |
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
/**
* DescribePlacementGroups.java
*
* This file was auto-generated from WSDL
* by the Apache Axis2 version: 1.5.6 Built on : Aug 30, 2011 (10:01:01 CEST)
*/
package com.amazon.ec2;
/**
* DescribePlacementGroups bean class
*/
public class DescribePlacementGroups
implements org.apache.axis2.databinding.ADBBean{
public static final javax.xml.namespace.QName MY_QNAME = new javax.xml.namespace.QName(
"http://ec2.amazonaws.com/doc/2012-08-15/",
"DescribePlacementGroups",
"ns1");
private static java.lang.String generatePrefix(java.lang.String namespace) {
if(namespace.equals("http://ec2.amazonaws.com/doc/2012-08-15/")){
return "ns1";
}
return org.apache.axis2.databinding.utils.BeanUtil.getUniquePrefix();
}
/**
* field for DescribePlacementGroups
*/
protected com.amazon.ec2.DescribePlacementGroupsType localDescribePlacementGroups ;
/**
* Auto generated getter method
* @return com.amazon.ec2.DescribePlacementGroupsType
*/
public com.amazon.ec2.DescribePlacementGroupsType getDescribePlacementGroups(){
return localDescribePlacementGroups;
}
/**
* Auto generated setter method
* @param param DescribePlacementGroups
*/
public void setDescribePlacementGroups(com.amazon.ec2.DescribePlacementGroupsType param){
this.localDescribePlacementGroups=param;
}
/**
* isReaderMTOMAware
* @return true if the reader supports MTOM
*/
public static boolean isReaderMTOMAware(javax.xml.stream.XMLStreamReader reader) {
boolean isReaderMTOMAware = false;
try{
isReaderMTOMAware = java.lang.Boolean.TRUE.equals(reader.getProperty(org.apache.axiom.om.OMConstants.IS_DATA_HANDLERS_AWARE));
}catch(java.lang.IllegalArgumentException e){
isReaderMTOMAware = false;
}
return isReaderMTOMAware;
}
/**
*
* @param parentQName
* @param factory
* @return org.apache.axiom.om.OMElement
*/
public org.apache.axiom.om.OMElement getOMElement (
final javax.xml.namespace.QName parentQName,
final org.apache.axiom.om.OMFactory factory) throws org.apache.axis2.databinding.ADBException{
org.apache.axiom.om.OMDataSource dataSource =
new org.apache.axis2.databinding.ADBDataSource(this,MY_QNAME){
public void serialize(org.apache.axis2.databinding.utils.writer.MTOMAwareXMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException {
DescribePlacementGroups.this.serialize(MY_QNAME,factory,xmlWriter);
}
};
return new org.apache.axiom.om.impl.llom.OMSourcedElementImpl(
MY_QNAME,factory,dataSource);
}
public void serialize(final javax.xml.namespace.QName parentQName,
final org.apache.axiom.om.OMFactory factory,
org.apache.axis2.databinding.utils.writer.MTOMAwareXMLStreamWriter xmlWriter)
throws javax.xml.stream.XMLStreamException, org.apache.axis2.databinding.ADBException{
serialize(parentQName,factory,xmlWriter,false);
}
public void serialize(final javax.xml.namespace.QName parentQName,
final org.apache.axiom.om.OMFactory factory,
org.apache.axis2.databinding.utils.writer.MTOMAwareXMLStreamWriter xmlWriter,
boolean serializeType)
throws javax.xml.stream.XMLStreamException, org.apache.axis2.databinding.ADBException{
//We can safely assume an element has only one type associated with it
if (localDescribePlacementGroups==null){
throw new org.apache.axis2.databinding.ADBException("Property cannot be null!");
}
localDescribePlacementGroups.serialize(MY_QNAME,factory,xmlWriter);
}
/**
* Util method to write an attribute with the ns prefix
*/
private void writeAttribute(java.lang.String prefix,java.lang.String namespace,java.lang.String attName,
java.lang.String attValue,javax.xml.stream.XMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException{
if (xmlWriter.getPrefix(namespace) == null) {
xmlWriter.writeNamespace(prefix, namespace);
xmlWriter.setPrefix(prefix, namespace);
}
xmlWriter.writeAttribute(namespace,attName,attValue);
}
/**
* Util method to write an attribute without the ns prefix
*/
private void writeAttribute(java.lang.String namespace,java.lang.String attName,
java.lang.String attValue,javax.xml.stream.XMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException{
if (namespace.equals(""))
{
xmlWriter.writeAttribute(attName,attValue);
}
else
{
registerPrefix(xmlWriter, namespace);
xmlWriter.writeAttribute(namespace,attName,attValue);
}
}
/**
* Util method to write an attribute without the ns prefix
*/
private void writeQNameAttribute(java.lang.String namespace, java.lang.String attName,
javax.xml.namespace.QName qname, javax.xml.stream.XMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException {
java.lang.String attributeNamespace = qname.getNamespaceURI();
java.lang.String attributePrefix = xmlWriter.getPrefix(attributeNamespace);
if (attributePrefix == null) {
attributePrefix = registerPrefix(xmlWriter, attributeNamespace);
}
java.lang.String attributeValue;
if (attributePrefix.trim().length() > 0) {
attributeValue = attributePrefix + ":" + qname.getLocalPart();
} else {
attributeValue = qname.getLocalPart();
}
if (namespace.equals("")) {
xmlWriter.writeAttribute(attName, attributeValue);
} else {
registerPrefix(xmlWriter, namespace);
xmlWriter.writeAttribute(namespace, attName, attributeValue);
}
}
/**
* method to handle Qnames
*/
private void writeQName(javax.xml.namespace.QName qname,
javax.xml.stream.XMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException {
java.lang.String namespaceURI = qname.getNamespaceURI();
if (namespaceURI != null) {
java.lang.String prefix = xmlWriter.getPrefix(namespaceURI);
if (prefix == null) {
prefix = generatePrefix(namespaceURI);
xmlWriter.writeNamespace(prefix, namespaceURI);
xmlWriter.setPrefix(prefix,namespaceURI);
}
if (prefix.trim().length() > 0){
xmlWriter.writeCharacters(prefix + ":" + org.apache.axis2.databinding.utils.ConverterUtil.convertToString(qname));
} else {
// i.e this is the default namespace
xmlWriter.writeCharacters(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(qname));
}
} else {
xmlWriter.writeCharacters(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(qname));
}
}
private void writeQNames(javax.xml.namespace.QName[] qnames,
javax.xml.stream.XMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException {
if (qnames != null) {
// we have to store this data until last moment since it is not possible to write any
// namespace data after writing the charactor data
java.lang.StringBuffer stringToWrite = new java.lang.StringBuffer();
java.lang.String namespaceURI = null;
java.lang.String prefix = null;
for (int i = 0; i < qnames.length; i++) {
if (i > 0) {
stringToWrite.append(" ");
}
namespaceURI = qnames[i].getNamespaceURI();
if (namespaceURI != null) {
prefix = xmlWriter.getPrefix(namespaceURI);
if ((prefix == null) || (prefix.length() == 0)) {
prefix = generatePrefix(namespaceURI);
xmlWriter.writeNamespace(prefix, namespaceURI);
xmlWriter.setPrefix(prefix,namespaceURI);
}
if (prefix.trim().length() > 0){
stringToWrite.append(prefix).append(":").append(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(qnames[i]));
} else {
stringToWrite.append(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(qnames[i]));
}
} else {
stringToWrite.append(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(qnames[i]));
}
}
xmlWriter.writeCharacters(stringToWrite.toString());
}
}
/**
* Register a namespace prefix
*/
private java.lang.String registerPrefix(javax.xml.stream.XMLStreamWriter xmlWriter, java.lang.String namespace) throws javax.xml.stream.XMLStreamException {
java.lang.String prefix = xmlWriter.getPrefix(namespace);
if (prefix == null) {
prefix = generatePrefix(namespace);
while (xmlWriter.getNamespaceContext().getNamespaceURI(prefix) != null) {
prefix = org.apache.axis2.databinding.utils.BeanUtil.getUniquePrefix();
}
xmlWriter.writeNamespace(prefix, namespace);
xmlWriter.setPrefix(prefix, namespace);
}
return prefix;
}
/**
* databinding method to get an XML representation of this object
*
*/
public javax.xml.stream.XMLStreamReader getPullParser(javax.xml.namespace.QName qName)
throws org.apache.axis2.databinding.ADBException{
//We can safely assume an element has only one type associated with it
return localDescribePlacementGroups.getPullParser(MY_QNAME);
}
/**
* Factory class that keeps the parse method
*/
public static class Factory{
/**
* static method to create the object
* Precondition: If this object is an element, the current or next start element starts this object and any intervening reader events are ignorable
* If this object is not an element, it is a complex type and the reader is at the event just after the outer start element
* Postcondition: If this object is an element, the reader is positioned at its end element
* If this object is a complex type, the reader is positioned at the end element of its outer element
*/
public static DescribePlacementGroups parse(javax.xml.stream.XMLStreamReader reader) throws java.lang.Exception{
DescribePlacementGroups object =
new DescribePlacementGroups();
int event;
java.lang.String nillableValue = null;
java.lang.String prefix ="";
java.lang.String namespaceuri ="";
try {
while (!reader.isStartElement() && !reader.isEndElement())
reader.next();
// Note all attributes that were handled. Used to differ normal attributes
// from anyAttributes.
java.util.Vector handledAttributes = new java.util.Vector();
while(!reader.isEndElement()) {
if (reader.isStartElement() ){
if (reader.isStartElement() && new javax.xml.namespace.QName("http://ec2.amazonaws.com/doc/2012-08-15/","DescribePlacementGroups").equals(reader.getName())){
object.setDescribePlacementGroups(com.amazon.ec2.DescribePlacementGroupsType.Factory.parse(reader));
} // End of if for expected property start element
else{
// A start element we are not expecting indicates an invalid parameter was passed
throw new org.apache.axis2.databinding.ADBException("Unexpected subelement " + reader.getLocalName());
}
} else {
reader.next();
}
} // end of while loop
} catch (javax.xml.stream.XMLStreamException e) {
throw new java.lang.Exception(e);
}
return object;
}
}//end of factory class
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.client.config;
import org.apache.flink.table.client.SqlClientException;
import org.apache.flink.table.client.config.entries.CatalogEntry;
import org.apache.flink.table.client.config.entries.ConfigurationEntry;
import org.apache.flink.table.client.config.entries.DeploymentEntry;
import org.apache.flink.table.client.config.entries.ExecutionEntry;
import org.apache.flink.table.client.config.entries.FunctionEntry;
import org.apache.flink.table.client.config.entries.ModuleEntry;
import org.apache.flink.table.client.config.entries.TableEntry;
import org.apache.flink.shaded.jackson2.com.fasterxml.jackson.databind.JsonMappingException;
import java.io.IOException;
import java.net.URL;
import java.util.Collections;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
/**
* Environment configuration that represents the content of an environment file. Environment files
* define catalogs, tables, execution, and deployment behavior. An environment might be defined by
* default or as part of a session. Environments can be merged or enriched with properties (e.g.
* from CLI command).
*
* <p>In future versions, we might restrict the merging or enrichment of deployment properties to
* not allow overwriting of a deployment by a session.
*
* @deprecated This will be removed in Flink 1.14 with dropping support of {@code sql-client.yaml}
* configuration file.
*/
@Deprecated
public class Environment {
public static final String EXECUTION_ENTRY = "execution";
public static final String CONFIGURATION_ENTRY = "table";
public static final String DEPLOYMENT_ENTRY = "deployment";
private Map<String, ModuleEntry> modules;
private Map<String, CatalogEntry> catalogs;
private Map<String, TableEntry> tables;
private Map<String, FunctionEntry> functions;
private ExecutionEntry execution;
private ConfigurationEntry configuration;
private DeploymentEntry deployment;
public Environment() {
this.modules = new LinkedHashMap<>();
this.catalogs = Collections.emptyMap();
this.tables = Collections.emptyMap();
this.functions = Collections.emptyMap();
this.execution = ExecutionEntry.DEFAULT_INSTANCE;
this.configuration = ConfigurationEntry.DEFAULT_INSTANCE;
this.deployment = DeploymentEntry.DEFAULT_INSTANCE;
}
public Map<String, ModuleEntry> getModules() {
return modules;
}
public void setModules(List<Map<String, Object>> modules) {
this.modules = new LinkedHashMap<>(modules.size());
modules.forEach(
config -> {
final ModuleEntry entry = ModuleEntry.create(config);
if (this.modules.containsKey(entry.getName())) {
throw new SqlClientException(
String.format(
"Cannot register module '%s' because a module with this name is already registered.",
entry.getName()));
}
this.modules.put(entry.getName(), entry);
});
}
public Map<String, CatalogEntry> getCatalogs() {
return catalogs;
}
public void setCatalogs(List<Map<String, Object>> catalogs) {
this.catalogs = new HashMap<>(catalogs.size());
catalogs.forEach(
config -> {
final CatalogEntry catalog = CatalogEntry.create(config);
if (this.catalogs.containsKey(catalog.getName())) {
throw new SqlClientException(
String.format(
"Cannot create catalog '%s' because a catalog with this name is already registered.",
catalog.getName()));
}
this.catalogs.put(catalog.getName(), catalog);
});
}
public Map<String, TableEntry> getTables() {
return tables;
}
public void setTables(List<Map<String, Object>> tables) {
this.tables = new LinkedHashMap<>(tables.size());
tables.forEach(
config -> {
final TableEntry table = TableEntry.create(config);
if (this.tables.containsKey(table.getName())) {
throw new SqlClientException(
"Cannot create table '"
+ table.getName()
+ "' because a table with this name is already registered.");
}
this.tables.put(table.getName(), table);
});
}
public Map<String, FunctionEntry> getFunctions() {
return functions;
}
public void setFunctions(List<Map<String, Object>> functions) {
this.functions = new HashMap<>(functions.size());
functions.forEach(
config -> {
final FunctionEntry function = FunctionEntry.create(config);
if (this.functions.containsKey(function.getName())) {
throw new SqlClientException(
"Cannot create function '"
+ function.getName()
+ "' because a function with this name is already registered.");
}
this.functions.put(function.getName(), function);
});
}
public void setExecution(Map<String, Object> config) {
this.execution = ExecutionEntry.create(config);
}
public ExecutionEntry getExecution() {
return execution;
}
public void setConfiguration(Map<String, Object> config) {
this.configuration = ConfigurationEntry.create(config);
}
public ConfigurationEntry getConfiguration() {
return configuration;
}
public void setDeployment(Map<String, Object> config) {
this.deployment = DeploymentEntry.create(config);
}
public DeploymentEntry getDeployment() {
return deployment;
}
@Override
public String toString() {
final StringBuilder sb = new StringBuilder();
sb.append("===================== Modules =====================\n");
modules.forEach(
(name, module) -> {
sb.append("- ")
.append(ModuleEntry.MODULE_NAME)
.append(": ")
.append(name)
.append("\n");
module.asMap()
.forEach(
(k, v) ->
sb.append(" ")
.append(k)
.append(": ")
.append(v)
.append('\n'));
});
sb.append("===================== Catalogs =====================\n");
catalogs.forEach(
(name, catalog) -> {
sb.append("- ")
.append(CatalogEntry.CATALOG_NAME)
.append(": ")
.append(name)
.append("\n");
catalog.asMap()
.forEach(
(k, v) ->
sb.append(" ")
.append(k)
.append(": ")
.append(v)
.append('\n'));
});
sb.append("===================== Tables =====================\n");
tables.forEach(
(name, table) -> {
sb.append("- ")
.append(TableEntry.TABLES_NAME)
.append(": ")
.append(name)
.append("\n");
table.asMap()
.forEach(
(k, v) ->
sb.append(" ")
.append(k)
.append(": ")
.append(v)
.append('\n'));
});
sb.append("=================== Functions ====================\n");
functions.forEach(
(name, function) -> {
sb.append("- ")
.append(FunctionEntry.FUNCTIONS_NAME)
.append(": ")
.append(name)
.append("\n");
function.asMap()
.forEach(
(k, v) ->
sb.append(" ")
.append(k)
.append(": ")
.append(v)
.append('\n'));
});
sb.append("=================== Execution ====================\n");
execution
.asTopLevelMap()
.forEach((k, v) -> sb.append(k).append(": ").append(v).append('\n'));
sb.append("================== Configuration =================\n");
configuration.asMap().forEach((k, v) -> sb.append(k).append(": ").append(v).append('\n'));
sb.append("=================== Deployment ===================\n");
deployment
.asTopLevelMap()
.forEach((k, v) -> sb.append(k).append(": ").append(v).append('\n'));
return sb.toString();
}
// --------------------------------------------------------------------------------------------
/** Parses an environment file from an URL. */
public static Environment parse(URL url) throws IOException {
try {
return new ConfigUtil.LowerCaseYamlMapper().readValue(url, Environment.class);
} catch (JsonMappingException e) {
throw new SqlClientException(
"Could not parse environment file. Cause: " + e.getMessage());
}
}
/** Parses an environment file from an String. */
public static Environment parse(String content) throws IOException {
try {
return new ConfigUtil.LowerCaseYamlMapper().readValue(content, Environment.class);
} catch (JsonMappingException e) {
throw new SqlClientException(
"Could not parse environment file. Cause: " + e.getMessage());
}
}
/**
* Merges two environments. The properties of the first environment might be overwritten by the
* second one.
*/
public static Environment merge(Environment env1, Environment env2) {
final Environment mergedEnv = new Environment();
// merge modules
final Map<String, ModuleEntry> modules = new LinkedHashMap<>(env1.getModules());
modules.putAll(env2.getModules());
mergedEnv.modules = modules;
// merge catalogs
final Map<String, CatalogEntry> catalogs = new HashMap<>(env1.getCatalogs());
catalogs.putAll(env2.getCatalogs());
mergedEnv.catalogs = catalogs;
// merge tables
final Map<String, TableEntry> tables = new LinkedHashMap<>(env1.getTables());
tables.putAll(env2.getTables());
mergedEnv.tables = tables;
// merge functions
final Map<String, FunctionEntry> functions = new HashMap<>(env1.getFunctions());
functions.putAll(env2.getFunctions());
mergedEnv.functions = functions;
// merge execution properties
mergedEnv.execution = ExecutionEntry.merge(env1.getExecution(), env2.getExecution());
// merge configuration properties
mergedEnv.configuration =
ConfigurationEntry.merge(env1.getConfiguration(), env2.getConfiguration());
// merge deployment properties
mergedEnv.deployment = DeploymentEntry.merge(env1.getDeployment(), env2.getDeployment());
return mergedEnv;
}
public Environment clone() {
return enrich(this, Collections.emptyMap());
}
/** Enriches an environment with new/modified properties and returns the new instance. */
public static Environment enrich(Environment env, Map<String, String> properties) {
final Environment enrichedEnv = new Environment();
// copy modules
enrichedEnv.modules = new LinkedHashMap<>(env.getModules());
// copy catalogs
enrichedEnv.catalogs = new LinkedHashMap<>(env.getCatalogs());
// copy tables
enrichedEnv.tables = new LinkedHashMap<>(env.getTables());
// copy functions
enrichedEnv.functions = new HashMap<>(env.getFunctions());
// enrich execution properties
enrichedEnv.execution = ExecutionEntry.enrich(env.execution, properties);
// enrich configuration properties
enrichedEnv.configuration = ConfigurationEntry.enrich(env.configuration, properties);
// enrich deployment properties
enrichedEnv.deployment = DeploymentEntry.enrich(env.deployment, properties);
return enrichedEnv;
}
}
| |
/*
* Copyright 2017 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*
*/
package com.google.cloud.tools.eclipse.dataflow.core.project;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import static org.junit.Assume.assumeFalse;
import static org.junit.Assume.assumeTrue;
import com.google.common.collect.Iterables;
import java.util.Arrays;
import org.apache.maven.artifact.versioning.ArtifactVersion;
import org.apache.maven.artifact.versioning.DefaultArtifactVersion;
import org.apache.maven.artifact.versioning.Restriction;
import org.apache.maven.artifact.versioning.VersionRange;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
import org.junit.runners.Parameterized.Parameters;
/**
* Tests for {@link MajorVersion}.
*/
@RunWith(Parameterized.class)
public class MajorVersionTest {
@Parameters
public static Iterable<? extends Object> majorVersions() {
return Arrays.asList(MajorVersion.values());
}
private final MajorVersion majorVersion;
public MajorVersionTest(MajorVersion majorVersion) {
this.majorVersion = majorVersion;
}
@Test
public void testTruncatedVersionAtBeginningInternallyBackwardsCompatible() {
assumeTrue(majorVersion.hasStableApi());
assertEquals(
majorVersion.getVersionRange(),
majorVersion.getTruncatedVersionRange(majorVersion.getInitialVersion()));
}
@Test
public void testTruncatedVersionAtBeginningInternallyBackwardsIncompatible() {
assumeFalse(majorVersion.hasStableApi());
assertEquals(
VersionRange.createFromVersion(majorVersion.getInitialVersion().toString()),
majorVersion.getTruncatedVersionRange(majorVersion.getInitialVersion()));
}
@Test
public void testTruncatedVersionAtEnd() {
assumeFalse(majorVersion.getMaxVersion().toString().trim().isEmpty());
try {
majorVersion.getTruncatedVersionRange(majorVersion.getMaxVersion());
fail();
} catch (IllegalArgumentException ex) {
assertTrue(ex.getMessage().contains("it does not contain"));
assertTrue(ex.getMessage().contains(majorVersion.getMaxVersion().toString()));
assertTrue(ex.getMessage().contains(majorVersion.getVersionRange().toString()));
}
}
@Test
public void testTruncatedVersionBeforeBeginning() {
String version = "0.0.0-alpha";
try {
majorVersion.getTruncatedVersionRange(new DefaultArtifactVersion(version));
fail();
} catch (IllegalArgumentException ex) {
assertTrue(ex.getMessage().contains("it does not contain"));
assertTrue(ex.getMessage().contains(majorVersion.getMaxVersion().toString()));
assertTrue(ex.getMessage().contains(version));
}
}
// This runs three times even though it doesn't use the parameters
@Test
public void testTruncatedVersionIntermediate() throws Exception {
assertEquals(
VersionRange.createFromVersionSpec("[1.2.3, " + MajorVersion.ONE.getMaxVersion() + ")"),
MajorVersion.ONE.getTruncatedVersionRange(new DefaultArtifactVersion("1.2.3")));
assertEquals(
VersionRange.createFromVersionSpec("[2.4.8, " + MajorVersion.TWO.getMaxVersion() + ")"),
MajorVersion.TWO.getTruncatedVersionRange(new DefaultArtifactVersion("2.4.8")));
assertEquals(
VersionRange.createFromVersion("3.9.27-beta81"),
MajorVersion.THREE_PLUS.getTruncatedVersionRange(
new DefaultArtifactVersion("3.9.27-beta81")));
}
@Test
public void testInitialMaxVersionRange() throws Exception {
VersionRange expectedRange =
VersionRange.createFromVersionSpec(
String.format("[%s, %s)", majorVersion.getInitialVersion(), majorVersion.getMaxVersion()));
assertEquals(
"Major Versions should produce a version specification from "
+ "their initial version (inclusive) to their max version (exclusive)",
expectedRange,
majorVersion.getVersionRange());
}
@Test
public void testVersionRangeFromSpec() {
VersionRange versionRange = majorVersion.getVersionRange();
Restriction restriction = Iterables.getOnlyElement(versionRange.getRestrictions());
assertEquals(majorVersion.getVersionRange(), majorVersion.getVersionRange());
assertTrue(restriction.isLowerBoundInclusive());
assertEquals(majorVersion.getInitialVersion(), restriction.getLowerBound());
assertFalse(restriction.isUpperBoundInclusive());
if (majorVersion.getMaxVersion().toString().trim().isEmpty()) {
assertNull(
"No Upper Bound should be specified if the max version is empty",
restriction.getUpperBound());
} else {
assertEquals(majorVersion.getMaxVersion(), restriction.getUpperBound());
}
}
@Test
public void testTruncateVersionRange() {
ArtifactVersion initialVersion = majorVersion.getInitialVersion();
ArtifactVersion newStart =
new DefaultArtifactVersion(
String.format(
"%s.%s.%s",
initialVersion.getMajorVersion(),
initialVersion.getMajorVersion(),
initialVersion.getIncrementalVersion() + 5));
assumeTrue(majorVersion.getMaxVersion().compareTo(newStart) > 0);
VersionRange updatedRange = MajorVersion.truncateAtLatest(newStart, majorVersion.getVersionRange());
assertFalse(updatedRange.containsVersion(majorVersion.getInitialVersion()));
ArtifactVersion afterStart =
new DefaultArtifactVersion(
String.format(
"%s.%s.%s",
initialVersion.getMajorVersion(),
initialVersion.getMajorVersion(),
initialVersion.getIncrementalVersion() + 6));
if (majorVersion.hasStableApi()) {
assertTrue(updatedRange.containsVersion(afterStart));
} else {
assertFalse(updatedRange.containsVersion(afterStart));
}
}
@Test
public void testTruncateVersionRangeNotInRange() {
assumeTrue(!majorVersion.getMaxVersion().toString().isEmpty());
try {
MajorVersion.truncateAtLatest(majorVersion.getMaxVersion(), majorVersion.getVersionRange());
fail();
} catch (IllegalArgumentException ex) {
assertNotNull(ex.getMessage());
}
}
@Test
public void testStableVersionGetStableSelf() {
assumeTrue(majorVersion.hasStableApi());
assertEquals(majorVersion, majorVersion.getStableVersion());
}
@Test
public void testUnstableVersionGetStableStartsAtMaxVersion() {
assumeFalse(majorVersion.hasStableApi());
assumeFalse(majorVersion.getMaxVersion().toString().isEmpty());
assertEquals(
"The Stable Version Range for a version without a stable API should "
+ "start at the end of the Unstable Version Range",
majorVersion.getStableVersion().getInitialVersion(),
majorVersion.getMaxVersion());
}
@Test
public void testStableApiOrdering() {
assumeFalse(majorVersion.hasStableApi());
try {
MajorVersion stable = majorVersion.getStableVersion();
assertTrue(
"A version that is not stable should always precede the stable version that replaces it",
majorVersion.compareTo(stable) < 0);
} catch (IllegalArgumentException ignored) {
// Not all Major Versions have a stable version, even if they also do not have a stable API.
// This test only demonstrates the relation between versions without a stable API that have a
// future stable API.
}
}
}
| |
/*
* Copyright 2012-2017 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.boot.autoconfigure.web.reactive;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.Optional;
import javax.validation.ValidatorFactory;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.ExpectedException;
import org.springframework.beans.DirectFieldAccessor;
import org.springframework.boot.autoconfigure.validation.ValidationAutoConfiguration;
import org.springframework.boot.autoconfigure.validation.ValidatorAdapter;
import org.springframework.boot.autoconfigure.web.servlet.WebMvcAutoConfigurationTests.Config;
import org.springframework.boot.context.properties.EnableConfigurationProperties;
import org.springframework.boot.test.util.EnvironmentTestUtils;
import org.springframework.boot.web.reactive.context.GenericReactiveWebApplicationContext;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.Import;
import org.springframework.core.Ordered;
import org.springframework.core.annotation.Order;
import org.springframework.core.io.ClassPathResource;
import org.springframework.http.server.reactive.HttpHandler;
import org.springframework.test.util.ReflectionTestUtils;
import org.springframework.util.ObjectUtils;
import org.springframework.validation.Validator;
import org.springframework.validation.beanvalidation.LocalValidatorFactoryBean;
import org.springframework.web.reactive.HandlerMapping;
import org.springframework.web.reactive.accept.CompositeContentTypeResolver;
import org.springframework.web.reactive.config.WebFluxConfigurationSupport;
import org.springframework.web.reactive.config.WebFluxConfigurer;
import org.springframework.web.reactive.handler.SimpleUrlHandlerMapping;
import org.springframework.web.reactive.resource.CachingResourceResolver;
import org.springframework.web.reactive.resource.CachingResourceTransformer;
import org.springframework.web.reactive.resource.PathResourceResolver;
import org.springframework.web.reactive.resource.ResourceWebHandler;
import org.springframework.web.reactive.result.method.HandlerMethodArgumentResolver;
import org.springframework.web.reactive.result.method.annotation.RequestMappingHandlerAdapter;
import org.springframework.web.reactive.result.method.annotation.RequestMappingHandlerMapping;
import org.springframework.web.reactive.result.view.ViewResolutionResultHandler;
import org.springframework.web.reactive.result.view.ViewResolver;
import static org.assertj.core.api.Assertions.assertThat;
import static org.mockito.Mockito.mock;
/**
* Tests for {@link WebFluxAutoConfiguration}.
*
* @author Brian Clozel
* @author Andy Wilkinson
*/
public class WebFluxAutoConfigurationTests {
@Rule
public ExpectedException thrown = ExpectedException.none();
private GenericReactiveWebApplicationContext context;
@Test
public void shouldNotProcessIfExistingWebReactiveConfiguration() throws Exception {
load(WebFluxConfigurationSupport.class);
assertThat(this.context.getBeansOfType(RequestMappingHandlerMapping.class).size())
.isEqualTo(1);
assertThat(this.context.getBeansOfType(RequestMappingHandlerAdapter.class).size())
.isEqualTo(1);
}
@Test
public void shouldCreateDefaultBeans() throws Exception {
load();
assertThat(this.context.getBeansOfType(RequestMappingHandlerMapping.class).size())
.isEqualTo(1);
assertThat(this.context.getBeansOfType(RequestMappingHandlerAdapter.class).size())
.isEqualTo(1);
assertThat(this.context.getBeansOfType(CompositeContentTypeResolver.class).size())
.isEqualTo(1);
assertThat(this.context.getBean("resourceHandlerMapping", HandlerMapping.class))
.isNotNull();
}
@SuppressWarnings("unchecked")
@Test
public void shouldRegisterCustomHandlerMethodArgumentResolver() throws Exception {
load(CustomArgumentResolvers.class);
RequestMappingHandlerAdapter adapter = this.context
.getBean(RequestMappingHandlerAdapter.class);
assertThat((List<HandlerMethodArgumentResolver>) ReflectionTestUtils
.getField(adapter.getArgumentResolverConfigurer(), "customResolvers"))
.contains(
this.context.getBean("firstResolver",
HandlerMethodArgumentResolver.class),
this.context.getBean("secondResolver",
HandlerMethodArgumentResolver.class));
}
@Test
public void shouldRegisterResourceHandlerMapping() throws Exception {
load();
SimpleUrlHandlerMapping hm = this.context.getBean("resourceHandlerMapping",
SimpleUrlHandlerMapping.class);
assertThat(hm.getUrlMap().get("/**")).isInstanceOf(ResourceWebHandler.class);
ResourceWebHandler staticHandler = (ResourceWebHandler) hm.getUrlMap().get("/**");
assertThat(staticHandler.getLocations()).hasSize(5);
assertThat(hm.getUrlMap().get("/webjars/**"))
.isInstanceOf(ResourceWebHandler.class);
ResourceWebHandler webjarsHandler = (ResourceWebHandler) hm.getUrlMap()
.get("/webjars/**");
assertThat(webjarsHandler.getLocations()).hasSize(1);
assertThat(webjarsHandler.getLocations().get(0))
.isEqualTo(new ClassPathResource("/META-INF/resources/webjars/"));
}
@Test
public void shouldMapResourcesToCustomPath() throws Exception {
load("spring.webflux.static-path-pattern:/static/**");
SimpleUrlHandlerMapping hm = this.context.getBean("resourceHandlerMapping",
SimpleUrlHandlerMapping.class);
assertThat(hm.getUrlMap().get("/static/**"))
.isInstanceOf(ResourceWebHandler.class);
ResourceWebHandler staticHandler = (ResourceWebHandler) hm.getUrlMap()
.get("/static/**");
assertThat(staticHandler.getLocations()).hasSize(5);
}
@Test
public void shouldNotMapResourcesWhenDisabled() throws Exception {
load("spring.resources.add-mappings:false");
assertThat(this.context.getBean("resourceHandlerMapping"))
.isNotInstanceOf(SimpleUrlHandlerMapping.class);
}
@Test
public void resourceHandlerChainEnabled() throws Exception {
load("spring.resources.chain.enabled:true");
SimpleUrlHandlerMapping hm = this.context.getBean("resourceHandlerMapping",
SimpleUrlHandlerMapping.class);
assertThat(hm.getUrlMap().get("/**")).isInstanceOf(ResourceWebHandler.class);
ResourceWebHandler staticHandler = (ResourceWebHandler) hm.getUrlMap().get("/**");
assertThat(staticHandler.getResourceResolvers()).extractingResultOf("getClass")
.containsOnly(CachingResourceResolver.class, PathResourceResolver.class);
assertThat(staticHandler.getResourceTransformers()).extractingResultOf("getClass")
.containsOnly(CachingResourceTransformer.class);
}
@Test
public void shouldRegisterViewResolvers() throws Exception {
load(ViewResolvers.class);
ViewResolutionResultHandler resultHandler = this.context
.getBean(ViewResolutionResultHandler.class);
assertThat(resultHandler.getViewResolvers()).containsExactly(
this.context.getBean("aViewResolver", ViewResolver.class),
this.context.getBean("anotherViewResolver", ViewResolver.class));
}
@Test
public void validatorWhenNoValidatorShouldUseDefault() {
load(null, new Class<?>[] { ValidationAutoConfiguration.class });
assertThat(this.context.getBeansOfType(ValidatorFactory.class)).isEmpty();
assertThat(this.context.getBeansOfType(javax.validation.Validator.class))
.isEmpty();
String[] springValidatorBeans = this.context.getBeanNamesForType(Validator.class);
assertThat(springValidatorBeans).containsExactly("webFluxValidator");
}
@Test
public void validatorWhenNoCustomizationShouldUseAutoConfigured() {
load();
String[] jsrValidatorBeans = this.context
.getBeanNamesForType(javax.validation.Validator.class);
String[] springValidatorBeans = this.context.getBeanNamesForType(Validator.class);
assertThat(jsrValidatorBeans).containsExactly("defaultValidator");
assertThat(springValidatorBeans).containsExactly("defaultValidator", "webFluxValidator");
Validator validator = this.context.getBean("webFluxValidator", Validator.class);
assertThat(validator).isInstanceOf(ValidatorAdapter.class);
Object defaultValidator = this.context.getBean("defaultValidator");
assertThat(((ValidatorAdapter) validator).getTarget()).isSameAs(defaultValidator);
// Primary Spring validator is the one use by WebFlux behind the scenes
assertThat(this.context.getBean(Validator.class)).isEqualTo(defaultValidator);
}
@Test
public void validatorWithConfigurerShouldUseSpringValidator() {
load(ValidatorWebFluxConfigurer.class,
new Class<?>[] { ValidationAutoConfiguration.class });
assertThat(this.context.getBeansOfType(ValidatorFactory.class)).isEmpty();
assertThat(this.context.getBeansOfType(javax.validation.Validator.class))
.isEmpty();
String[] springValidatorBeans = this.context.getBeanNamesForType(Validator.class);
assertThat(springValidatorBeans).containsExactly("webFluxValidator");
assertThat(this.context.getBean("webFluxValidator"))
.isSameAs(this.context.getBean(ValidatorWebFluxConfigurer.class).validator);
}
@Test
public void validatorWithConfigurerDoesNotExposeJsr303() {
load(ValidatorJsr303WebFluxConfigurer.class, new Class<?>[] { ValidationAutoConfiguration.class });
assertThat(this.context.getBeansOfType(ValidatorFactory.class)).isEmpty();
assertThat(this.context.getBeansOfType(javax.validation.Validator.class))
.isEmpty();
String[] springValidatorBeans = this.context.getBeanNamesForType(Validator.class);
assertThat(springValidatorBeans).containsExactly("webFluxValidator");
Validator validator = this.context.getBean("webFluxValidator", Validator.class);
assertThat(validator).isInstanceOf(ValidatorAdapter.class);
assertThat(((ValidatorAdapter) validator).getTarget())
.isSameAs(this.context.getBean(ValidatorJsr303WebFluxConfigurer.class).validator);
}
@Test
public void validationCustomConfigurerTakesPrecedence() {
load(ValidatorWebFluxConfigurer.class);
assertThat(this.context.getBeansOfType(ValidatorFactory.class)).hasSize(1);
assertThat(this.context.getBeansOfType(javax.validation.Validator.class))
.hasSize(1);
String[] springValidatorBeans = this.context.getBeanNamesForType(Validator.class);
assertThat(springValidatorBeans)
.containsExactly("defaultValidator", "webFluxValidator");
assertThat(this.context.getBean("webFluxValidator"))
.isSameAs(this.context.getBean(ValidatorWebFluxConfigurer.class).validator);
// Primary Spring validator is the auto-configured one as the WebFlux one has been
// customized via a WebFluxConfigurer
assertThat(this.context.getBean(Validator.class))
.isEqualTo(this.context.getBean("defaultValidator"));
}
@Test
public void validatorWithCustomSpringValidatorIgnored() {
load(CustomSpringValidator.class);
String[] jsrValidatorBeans = this.context
.getBeanNamesForType(javax.validation.Validator.class);
String[] springValidatorBeans = this.context.getBeanNamesForType(Validator.class);
assertThat(jsrValidatorBeans).containsExactly("defaultValidator");
assertThat(springValidatorBeans).containsExactly(
"customValidator", "defaultValidator", "webFluxValidator");
Validator validator = this.context.getBean("webFluxValidator", Validator.class);
assertThat(validator).isInstanceOf(ValidatorAdapter.class);
Object defaultValidator = this.context.getBean("defaultValidator");
assertThat(((ValidatorAdapter) validator).getTarget())
.isSameAs(defaultValidator);
// Primary Spring validator is the one use by WebFlux behind the scenes
assertThat(this.context.getBean(Validator.class)).isEqualTo(defaultValidator);
}
@Test
public void validatorWithCustomJsr303ValidatorExposedAsSpringValidator() {
load(CustomJsr303Validator.class);
assertThat(this.context.getBeansOfType(ValidatorFactory.class)).isEmpty();
String[] jsrValidatorBeans = this.context
.getBeanNamesForType(javax.validation.Validator.class);
String[] springValidatorBeans = this.context.getBeanNamesForType(Validator.class);
assertThat(jsrValidatorBeans).containsExactly("customValidator");
assertThat(springValidatorBeans).containsExactly("webFluxValidator");
Validator validator = this.context.getBean(Validator.class);
assertThat(validator).isInstanceOf(ValidatorAdapter.class);
Validator target = ((ValidatorAdapter) validator).getTarget();
assertThat(new DirectFieldAccessor(target).getPropertyValue("targetValidator"))
.isSameAs(this.context.getBean("customValidator"));
}
private void load(String... environment) {
load(null, environment);
}
private void load(Class<?> config, String... environment) {
load(config, null, environment);
}
private void load(Class<?> config, Class<?>[] exclude, String... environment) {
this.context = new GenericReactiveWebApplicationContext();
EnvironmentTestUtils.addEnvironment(this.context, environment);
List<Class<?>> configClasses = new ArrayList<>();
if (config != null) {
configClasses.add(config);
}
configClasses.addAll(Arrays.asList(Config.class,
ValidationAutoConfiguration.class, BaseConfiguration.class));
if (!ObjectUtils.isEmpty(exclude)) {
configClasses.removeAll(Arrays.asList(exclude));
}
this.context.register(configClasses.toArray(new Class<?>[configClasses.size()]));
this.context.refresh();
}
@Configuration
protected static class CustomArgumentResolvers {
@Bean
public HandlerMethodArgumentResolver firstResolver() {
return mock(HandlerMethodArgumentResolver.class);
}
@Bean
public HandlerMethodArgumentResolver secondResolver() {
return mock(HandlerMethodArgumentResolver.class);
}
}
@Configuration
protected static class ViewResolvers {
@Bean
@Order(Ordered.HIGHEST_PRECEDENCE)
public ViewResolver aViewResolver() {
return mock(ViewResolver.class);
}
@Bean
public ViewResolver anotherViewResolver() {
return mock(ViewResolver.class);
}
}
@Configuration
@Import({ WebFluxAutoConfiguration.class })
@EnableConfigurationProperties(WebFluxProperties.class)
protected static class BaseConfiguration {
@Bean
public MockReactiveWebServerFactory mockReactiveWebServerFactory() {
return new MockReactiveWebServerFactory();
}
}
@Configuration
protected static class CustomHttpHandler {
@Bean
public HttpHandler httpHandler() {
return (serverHttpRequest, serverHttpResponse) -> null;
}
}
@Configuration
protected static class ValidatorWebFluxConfigurer implements WebFluxConfigurer {
private final Validator validator = mock(Validator.class);
@Override
public Optional<Validator> getValidator() {
return Optional.of(this.validator);
}
}
@Configuration
protected static class ValidatorJsr303WebFluxConfigurer implements WebFluxConfigurer {
private final LocalValidatorFactoryBean validator = new LocalValidatorFactoryBean();
@Override
public Optional<Validator> getValidator() {
return Optional.of(this.validator);
}
}
@Configuration
static class CustomJsr303Validator {
@Bean
public javax.validation.Validator customValidator() {
return mock(javax.validation.Validator.class);
}
}
@Configuration
static class CustomSpringValidator {
@Bean
public Validator customValidator() {
return mock(Validator.class);
}
}
}
| |
package org.ubicollab.ubibazaar.api.store;
import java.io.IOException;
import java.io.StringReader;
import java.io.StringWriter;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;
import java.util.Collection;
import java.util.Set;
import lombok.extern.slf4j.Slf4j;
import org.ubicollab.ubibazaar.api.ApiProperties;
import org.ubicollab.ubibazaar.core.Device;
import org.ubicollab.ubibazaar.core.Manager;
import org.ubicollab.ubibazaar.core.ManagerType;
import org.ubicollab.ubibazaar.core.User;
import com.github.mustachejava.DefaultMustacheFactory;
import com.github.mustachejava.MustacheFactory;
import com.google.common.base.Strings;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.Sets;
@Slf4j
public class ManagerStore {
public static Manager getById(String id) {
String sql = "SELECT * FROM manager WHERE id = ?";
try (Connection conn = Database.getConnection();
PreparedStatement ps = conn.prepareStatement(sql)) {
ps.setString(1, id);
ps.execute();
try (ResultSet rs = ps.getResultSet()) {
if (rs.next()) {
ManagerType managerType = ManagerTypeStore.getById(rs.getString("manager_type_id"));
User owner = UserStore.getUser(rs.getString("owner_id"));
Set<Device> devices = findManagedDevices(id);
Boolean installed = rs.getBoolean("installed");
return new Manager(id, managerType, owner, devices, null, installed);
} else {
return null;
}
}
} catch (SQLException e) {
log.error(e.getMessage(), e);
throw new RuntimeException("Database problem. See logs for details.", e);
}
}
public static void linkDeviceToManager(String managerId, String deviceId) {
String sql = "INSERT INTO managed_device (manager_id, device_id) VALUES (?,?)";
try (Connection conn = Database.getConnection();
PreparedStatement ps = conn.prepareStatement(sql)) {
ps.setString(1, managerId);
ps.setString(2, deviceId);
ps.execute();
} catch (SQLException e) {
log.error(e.getMessage(), e);
throw new RuntimeException("Database problem. See logs for details.", e);
}
}
private static Set<Device> findManagedDevices(String managerId) {
String sql =
""
+ "select d.id "
+ "from device d "
+ "join manager m on m.owner_id = d.owner_id "
+ "join manager_type mt on mt.id = m.manager_type_id and mt.platform_id = d.platform_id "
+ "left join managed_device md on md.device_id = d.id and md.manager_id = m.id "
+ "where ((md.device_id is not null) or (mt.cardinality = 'ALL')) "
+ "and m.id = ?";
try (Connection conn = Database.getConnection();
PreparedStatement ps = conn.prepareStatement(sql)) {
ps.setString(1, managerId);
ps.execute();
try (ResultSet rs = ps.getResultSet()) {
Set<Device> results = Sets.newHashSet();
while (rs.next()) {
String id = rs.getString("id");
results.add(DeviceStore.getById(id));
}
return results;
}
} catch (SQLException e) {
log.error(e.getMessage(), e);
throw new RuntimeException("Database problem. See logs for details.", e);
}
}
public static Collection<Manager> getAll() {
String sql = "SELECT * FROM manager";
try (Connection conn = Database.getConnection();
Statement ps = conn.createStatement()) {
ps.execute(sql);
try (ResultSet rs = ps.getResultSet()) {
Set<Manager> results = Sets.newHashSet();
while (rs.next()) {
String id = rs.getString("id");
ManagerType managerType = ManagerTypeStore.getById(rs.getString("manager_type_id"));
User owner = UserStore.getUser(rs.getString("owner_id"));
Set<Device> devices = findManagedDevices(id);
Boolean installed = rs.getBoolean("installed");
results.add(new Manager(id, managerType, owner, devices, null, installed));
}
return results;
}
} catch (SQLException e) {
log.error(e.getMessage(), e);
throw new RuntimeException("Database problem. See logs for details.", e);
}
}
public static Manager create(Manager manager) {
// generate user id
manager.setId(StoreUtil.generateRandomId());
String sql = "INSERT INTO manager (id, manager_type_id, owner_id, `key`) "
+ "VALUES (?,?,?,?)";
try (Connection conn = Database.getConnection();
PreparedStatement ps = conn.prepareStatement(sql)) {
ps.setString(1, manager.getId());
ps.setString(2, manager.getType().getId());
ps.setString(3, manager.getOwner().getId());
ps.setString(4, StoreUtil.generateRandomId());
ps.execute();
return manager;
} catch (SQLException e) {
log.error(e.getMessage(), e);
throw new RuntimeException("Database problem. See logs for details.", e);
}
}
public static void update(Manager manager) {
String sql = "UPDATE manager set installed = ? WHERE id = ?";
try (Connection conn = Database.getConnection();
PreparedStatement ps = conn.prepareStatement(sql)) {
ps.setBoolean(1, manager.getInstalled());
ps.setString(2, manager.getId());
ps.execute();
} catch (SQLException e) {
log.error(e.getMessage(), e);
throw new RuntimeException("Database problem. See logs for details.", e);
}
}
public static void delete(String managerId) {
String sql = "DELETE FROM manager WHERE id = ?";
try (Connection conn = Database.getConnection();
PreparedStatement ps = conn.prepareStatement(sql)) {
ps.setString(1, managerId);
ps.execute();
} catch (SQLException e) {
log.error(e.getMessage(), e);
throw new RuntimeException("Database problem. See logs for details.", e);
}
}
public static void setInstallationInstructions(Manager found) {
String sql = ""
+ "select * "
+ "from manager m "
+ "join manager_type mt on mt.id = m.manager_type_id "
+ "where m.installed = 0 and m.id = ?";
try (Connection conn = Database.getConnection();
PreparedStatement ps = conn.prepareStatement(sql)) {
ps.setString(1, found.getId());
ps.execute();
try (ResultSet rs = ps.getResultSet()) {
if (rs.next()) {
String id = rs.getString("id");
String key = rs.getString("key");
String instructions = rs.getString("installation_instructions");
String url = rs.getString("installation_url");
MustacheFactory mf = new DefaultMustacheFactory();
if (Strings.isNullOrEmpty(url)) {
found.setInstallationInstructions(instructions);
} else {
// prepare url
StringWriter urlWriter = new StringWriter();
mf.compile(new StringReader(url), "url")
.execute(urlWriter, ImmutableMap.builder()
.put("id", ""+id)
.put("key", ""+key)
.put("server", ""+ApiProperties.API_URL)
.build())
.flush();
// shorten url
String shortUrl = StoreUtil.shortenUrl(urlWriter.toString());
// prepare instructions
StringWriter instructionsWriter = new StringWriter();
mf.compile(new StringReader(instructions), "instructions")
.execute(instructionsWriter, ImmutableMap.builder()
.put("url", shortUrl)
.build())
.flush();
found.setInstallationInstructions(instructionsWriter.toString());
}
}
} catch (IOException e) {
log.error(e.getMessage(), e);
}
} catch (SQLException e) {
log.error(e.getMessage(), e);
throw new RuntimeException("Database problem. See logs for details.", e);
}
}
}
| |
/***
* ASM XML Adapter
* Copyright (c) 2004-2011, Eugene Kuleshov
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
* 3. Neither the name of the copyright holders nor the names of its
* contributors may be used to endorse or promote products derived from
* this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
* LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
* THE POSSIBILITY OF SUCH DAMAGE.
*/
package edu.columbia.cs.psl.phosphor.org.objectweb.asm.xml;
import java.io.BufferedOutputStream;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.io.OutputStreamWriter;
import java.io.Writer;
import java.util.zip.ZipEntry;
import java.util.zip.ZipInputStream;
import java.util.zip.ZipOutputStream;
import javax.xml.transform.Source;
import javax.xml.transform.Templates;
import javax.xml.transform.TransformerConfigurationException;
import javax.xml.transform.TransformerException;
import javax.xml.transform.TransformerFactory;
import javax.xml.transform.sax.SAXResult;
import javax.xml.transform.sax.SAXSource;
import javax.xml.transform.sax.SAXTransformerFactory;
import javax.xml.transform.sax.TransformerHandler;
import javax.xml.transform.stream.StreamSource;
import org.xml.sax.Attributes;
import org.xml.sax.ContentHandler;
import org.xml.sax.InputSource;
import org.xml.sax.SAXException;
import org.xml.sax.XMLReader;
import org.xml.sax.ext.LexicalHandler;
import org.xml.sax.helpers.AttributesImpl;
import org.xml.sax.helpers.DefaultHandler;
import org.xml.sax.helpers.XMLReaderFactory;
import edu.columbia.cs.psl.phosphor.org.objectweb.asm.ClassReader;
import edu.columbia.cs.psl.phosphor.org.objectweb.asm.ClassWriter;
/**
* Processor is a command line tool that can be used for bytecode waving
* directed by XSL transformation.
* <p>
* In order to use a concrete XSLT engine, system property
* <tt>javax.xml.transform.TransformerFactory</tt> must be set to one of the
* following values.
*
* <blockquote>
* <table border="1" cellspacing="0" cellpadding="3">
* <tr>
* <td>jd.xslt</td>
* <td>jd.xml.xslt.trax.TransformerFactoryImpl</td>
* </tr>
*
* <tr>
* <td>Saxon</td>
* <td>net.sf.saxon.TransformerFactoryImpl</td>
* </tr>
*
* <tr>
* <td>Caucho</td>
* <td>com.caucho.xsl.Xsl</td>
* </tr>
*
* <tr>
* <td>Xalan interpeter</td>
* <td>org.apache.xalan.processor.TransformerFactory</td>
* </tr>
*
* <tr>
* <td>Xalan xsltc</td>
* <td>org.apache.xalan.xsltc.trax.TransformerFactoryImpl</td>
* </tr>
* </table>
* </blockquote>
*
* @author Eugene Kuleshov
*/
public class Processor {
public static final int BYTECODE = 1;
public static final int MULTI_XML = 2;
public static final int SINGLE_XML = 3;
private static final String SINGLE_XML_NAME = "classes.xml";
private final int inRepresentation;
private final int outRepresentation;
private final InputStream input;
private final OutputStream output;
private final Source xslt;
private int n = 0;
public Processor(final int inRepresenation, final int outRepresentation,
final InputStream input, final OutputStream output,
final Source xslt) {
this.inRepresentation = inRepresenation;
this.outRepresentation = outRepresentation;
this.input = input;
this.output = output;
this.xslt = xslt;
}
public int process() throws TransformerException, IOException, SAXException {
ZipInputStream zis = new ZipInputStream(input);
final ZipOutputStream zos = new ZipOutputStream(output);
final OutputStreamWriter osw = new OutputStreamWriter(zos);
Thread.currentThread().setContextClassLoader(
getClass().getClassLoader());
TransformerFactory tf = TransformerFactory.newInstance();
if (!tf.getFeature(SAXSource.FEATURE)
|| !tf.getFeature(SAXResult.FEATURE)) {
return 0;
}
SAXTransformerFactory saxtf = (SAXTransformerFactory) tf;
Templates templates = null;
if (xslt != null) {
templates = saxtf.newTemplates(xslt);
}
// configuring outHandlerFactory
// ///////////////////////////////////////////////////////
EntryElement entryElement = getEntryElement(zos);
ContentHandler outDocHandler = null;
switch (outRepresentation) {
case BYTECODE:
outDocHandler = new OutputSlicingHandler(
new ASMContentHandlerFactory(zos), entryElement, false);
break;
case MULTI_XML:
outDocHandler = new OutputSlicingHandler(new SAXWriterFactory(osw,
true), entryElement, true);
break;
case SINGLE_XML:
ZipEntry outputEntry = new ZipEntry(SINGLE_XML_NAME);
zos.putNextEntry(outputEntry);
outDocHandler = new SAXWriter(osw, false);
break;
}
// configuring inputDocHandlerFactory
// /////////////////////////////////////////////////
ContentHandler inDocHandler;
if (templates == null) {
inDocHandler = outDocHandler;
} else {
inDocHandler = new InputSlicingHandler("class", outDocHandler,
new TransformerHandlerFactory(saxtf, templates,
outDocHandler));
}
ContentHandlerFactory inDocHandlerFactory = new SubdocumentHandlerFactory(
inDocHandler);
if (inDocHandler != null && inRepresentation != SINGLE_XML) {
inDocHandler.startDocument();
inDocHandler.startElement("", "classes", "classes",
new AttributesImpl());
}
int i = 0;
ZipEntry ze;
while ((ze = zis.getNextEntry()) != null) {
update(ze.getName(), n++);
if (isClassEntry(ze)) {
processEntry(zis, ze, inDocHandlerFactory);
} else {
OutputStream os = entryElement.openEntry(getName(ze));
copyEntry(zis, os);
entryElement.closeEntry();
}
i++;
}
if (inDocHandler != null && inRepresentation != SINGLE_XML) {
inDocHandler.endElement("", "classes", "classes");
inDocHandler.endDocument();
}
if (outRepresentation == SINGLE_XML) {
zos.closeEntry();
}
zos.flush();
zos.close();
return i;
}
private void copyEntry(final InputStream is, final OutputStream os)
throws IOException {
if (outRepresentation == SINGLE_XML) {
return;
}
byte[] buff = new byte[2048];
int i;
while ((i = is.read(buff)) != -1) {
os.write(buff, 0, i);
}
}
private boolean isClassEntry(final ZipEntry ze) {
String name = ze.getName();
return inRepresentation == SINGLE_XML && name.equals(SINGLE_XML_NAME)
|| name.endsWith(".class") || name.endsWith(".class.xml");
}
private void processEntry(final ZipInputStream zis, final ZipEntry ze,
final ContentHandlerFactory handlerFactory) {
ContentHandler handler = handlerFactory.createContentHandler();
try {
// if (CODE2ASM.equals(command)) { // read bytecode and process it
// // with TraceClassVisitor
// ClassReader cr = new ClassReader(readEntry(zis, ze));
// cr.accept(new TraceClassVisitor(null, new PrintWriter(os)),
// false);
// }
boolean singleInputDocument = inRepresentation == SINGLE_XML;
if (inRepresentation == BYTECODE) { // read bytecode and process it
// with handler
ClassReader cr = new ClassReader(readEntry(zis, ze));
cr.accept(new SAXClassAdapter(handler, singleInputDocument), 0);
} else { // read XML and process it with handler
XMLReader reader = XMLReaderFactory.createXMLReader();
reader.setContentHandler(handler);
reader.parse(new InputSource(
singleInputDocument ? (InputStream) new ProtectedInputStream(
zis) : new ByteArrayInputStream(readEntry(zis,
ze))));
}
} catch (Exception ex) {
update(ze.getName(), 0);
update(ex, 0);
}
}
private EntryElement getEntryElement(final ZipOutputStream zos) {
if (outRepresentation == SINGLE_XML) {
return new SingleDocElement(zos);
}
return new ZipEntryElement(zos);
}
// private ContentHandlerFactory getHandlerFactory(
// OutputStream os,
// SAXTransformerFactory saxtf,
// Templates templates)
// {
// ContentHandlerFactory factory = null;
// if (templates == null) {
// if (outputRepresentation == BYTECODE) { // factory used to write
// // bytecode
// factory = new ASMContentHandlerFactory(os, computeMax);
// } else { // factory used to write XML
// factory = new SAXWriterFactory(os, true);
// }
// } else {
// if (outputRepresentation == BYTECODE) { // factory used to transform
// // and then write bytecode
// factory = new ASMTransformerHandlerFactory(saxtf,
// templates,
// os,
// computeMax);
// } else { // factory used to transformand then write XML
// factory = new TransformerHandlerFactory(saxtf,
// templates,
// os,
// outputRepresentation == SINGLE_XML);
// }
// }
// return factory;
// }
private String getName(final ZipEntry ze) {
String name = ze.getName();
if (isClassEntry(ze)) {
if (inRepresentation != BYTECODE && outRepresentation == BYTECODE) {
name = name.substring(0, name.length() - 4); // .class.xml to
// .class
} else if (inRepresentation == BYTECODE
&& outRepresentation != BYTECODE) {
name += ".xml"; // .class to .class.xml
}
// } else if( CODE2ASM.equals( command)) {
// name = name.substring( 0, name.length()-6).concat( ".asm");
}
return name;
}
private static byte[] readEntry(final InputStream zis, final ZipEntry ze)
throws IOException {
long size = ze.getSize();
if (size > -1) {
byte[] buff = new byte[(int) size];
int k = 0;
int n;
while ((n = zis.read(buff, k, buff.length - k)) > 0) {
k += n;
}
return buff;
}
ByteArrayOutputStream bos = new ByteArrayOutputStream();
byte[] buff = new byte[4096];
int i;
while ((i = zis.read(buff)) != -1) {
bos.write(buff, 0, i);
}
return bos.toByteArray();
}
/*
* (non-Javadoc)
*
* @see java.util.Observer#update(java.util.Observable, java.lang.Object)
*/
protected void update(final Object arg, final int n) {
if (arg instanceof Throwable) {
((Throwable) arg).printStackTrace();
} else {
if (n % 100 == 0) {
System.err.println(n + " " + arg);
}
}
}
public static void main(final String[] args) throws Exception {
if (args.length < 2) {
showUsage();
return;
}
int inRepresentation = getRepresentation(args[0]);
int outRepresentation = getRepresentation(args[1]);
InputStream is = System.in;
OutputStream os = new BufferedOutputStream(System.out);
Source xslt = null;
// boolean computeMax = true;
for (int i = 2; i < args.length; i++) {
if ("-in".equals(args[i])) {
is = new FileInputStream(args[++i]);
} else if ("-out".equals(args[i])) {
os = new BufferedOutputStream(new FileOutputStream(args[++i]));
} else if ("-xslt".equals(args[i])) {
xslt = new StreamSource(new FileInputStream(args[++i]));
// } else if( "-computemax".equals( args[ i].toLowerCase())) {
// computeMax = true;
} else {
showUsage();
return;
}
}
if (inRepresentation == 0 || outRepresentation == 0) {
showUsage();
return;
}
Processor m = new Processor(inRepresentation, outRepresentation, is,
os, xslt);
long l1 = System.currentTimeMillis();
int n = m.process();
long l2 = System.currentTimeMillis();
System.err.println(n);
System.err.println((l2 - l1) + "ms " + 1000f * n / (l2 - l1)
+ " resources/sec");
}
private static int getRepresentation(final String s) {
if ("code".equals(s)) {
return BYTECODE;
} else if ("xml".equals(s)) {
return MULTI_XML;
} else if ("singlexml".equals(s)) {
return SINGLE_XML;
}
return 0;
}
private static void showUsage() {
System.err
.println("Usage: Main <in format> <out format> [-in <input jar>] [-out <output jar>] [-xslt <xslt fiel>]");
System.err
.println(" when -in or -out is omitted sysin and sysout would be used");
System.err
.println(" <in format> and <out format> - code | xml | singlexml");
}
/**
* IputStream wrapper class used to protect input streams from being closed
* by some stupid XML parsers.
*/
private static final class ProtectedInputStream extends InputStream {
private final InputStream is;
ProtectedInputStream(final InputStream is) {
this.is = is;
}
@Override
public final void close() throws IOException {
}
@Override
public final int read() throws IOException {
return is.read();
}
@Override
public final int read(final byte[] b, final int off, final int len)
throws IOException {
return is.read(b, off, len);
}
@Override
public final int available() throws IOException {
return is.available();
}
}
/**
* A {@link ContentHandlerFactory ContentHandlerFactory} is used to create
* {@link org.xml.sax.ContentHandler ContentHandler} instances for concrete
* context.
*/
private static interface ContentHandlerFactory {
/**
* Creates an instance of the content handler.
*
* @return content handler
*/
ContentHandler createContentHandler();
}
/**
* SAXWriterFactory
*/
private static final class SAXWriterFactory implements
ContentHandlerFactory {
private final Writer w;
private final boolean optimizeEmptyElements;
SAXWriterFactory(final Writer w, final boolean optimizeEmptyElements) {
this.w = w;
this.optimizeEmptyElements = optimizeEmptyElements;
}
public final ContentHandler createContentHandler() {
return new SAXWriter(w, optimizeEmptyElements);
}
}
/**
* ASMContentHandlerFactory
*/
private static final class ASMContentHandlerFactory implements
ContentHandlerFactory {
final OutputStream os;
ASMContentHandlerFactory(final OutputStream os) {
this.os = os;
}
public final ContentHandler createContentHandler() {
final ClassWriter cw = new ClassWriter(ClassWriter.COMPUTE_MAXS);
return new ASMContentHandler(cw) {
@Override
public void endDocument() throws SAXException {
try {
os.write(cw.toByteArray());
} catch (IOException e) {
throw new SAXException(e);
}
}
};
}
}
/**
* TransformerHandlerFactory
*/
private static final class TransformerHandlerFactory implements
ContentHandlerFactory {
private SAXTransformerFactory saxtf;
private final Templates templates;
private ContentHandler outputHandler;
TransformerHandlerFactory(final SAXTransformerFactory saxtf,
final Templates templates, final ContentHandler outputHandler) {
this.saxtf = saxtf;
this.templates = templates;
this.outputHandler = outputHandler;
}
public final ContentHandler createContentHandler() {
try {
TransformerHandler handler = saxtf
.newTransformerHandler(templates);
handler.setResult(new SAXResult(outputHandler));
return handler;
} catch (TransformerConfigurationException ex) {
throw new RuntimeException(ex.toString());
}
}
}
/**
* SubdocumentHandlerFactory
*/
private static final class SubdocumentHandlerFactory implements
ContentHandlerFactory {
private final ContentHandler subdocumentHandler;
SubdocumentHandlerFactory(final ContentHandler subdocumentHandler) {
this.subdocumentHandler = subdocumentHandler;
}
public final ContentHandler createContentHandler() {
return subdocumentHandler;
}
}
/**
* A {@link org.xml.sax.ContentHandler ContentHandler} and
* {@link org.xml.sax.ext.LexicalHandler LexicalHandler} that serializes XML
* from SAX 2.0 events into {@link java.io.Writer Writer}.
*
* <i><blockquote> This implementation does not support namespaces, entity
* definitions (uncluding DTD), CDATA and text elements. </blockquote></i>
*/
private static final class SAXWriter extends DefaultHandler implements
LexicalHandler {
private static final char[] OFF = " "
.toCharArray();
private Writer w;
private final boolean optimizeEmptyElements;
private boolean openElement = false;
private int ident = 0;
/**
* Creates <code>SAXWriter</code>.
*
* @param w
* writer
* @param optimizeEmptyElements
* if set to <code>true</code>, short XML syntax will be used
* for empty elements
*/
SAXWriter(final Writer w, final boolean optimizeEmptyElements) {
this.w = w;
this.optimizeEmptyElements = optimizeEmptyElements;
}
@Override
public final void startElement(final String ns, final String localName,
final String qName, final Attributes atts) throws SAXException {
try {
closeElement();
writeIdent();
w.write('<' + qName);
if (atts != null && atts.getLength() > 0) {
writeAttributes(atts);
}
if (optimizeEmptyElements) {
openElement = true;
} else {
w.write(">\n");
}
ident += 2;
} catch (IOException ex) {
throw new SAXException(ex);
}
}
@Override
public final void endElement(final String ns, final String localName,
final String qName) throws SAXException {
ident -= 2;
try {
if (openElement) {
w.write("/>\n");
openElement = false;
} else {
writeIdent();
w.write("</" + qName + ">\n");
}
} catch (IOException ex) {
throw new SAXException(ex);
}
}
@Override
public final void endDocument() throws SAXException {
try {
w.flush();
} catch (IOException ex) {
throw new SAXException(ex);
}
}
public final void comment(final char[] ch, final int off, final int len)
throws SAXException {
try {
closeElement();
writeIdent();
w.write("<!-- ");
w.write(ch, off, len);
w.write(" -->\n");
} catch (IOException ex) {
throw new SAXException(ex);
}
}
public final void startDTD(final String arg0, final String arg1,
final String arg2) throws SAXException {
}
public final void endDTD() throws SAXException {
}
public final void startEntity(final String arg0) throws SAXException {
}
public final void endEntity(final String arg0) throws SAXException {
}
public final void startCDATA() throws SAXException {
}
public final void endCDATA() throws SAXException {
}
private final void writeAttributes(final Attributes atts)
throws IOException {
StringBuffer sb = new StringBuffer();
int len = atts.getLength();
for (int i = 0; i < len; i++) {
sb.append(' ').append(atts.getLocalName(i)).append("=\"")
.append(esc(atts.getValue(i))).append('\"');
}
w.write(sb.toString());
}
/**
* Encode string with escaping.
*
* @param str
* string to encode.
* @return encoded string
*/
private static final String esc(final String str) {
StringBuffer sb = new StringBuffer(str.length());
for (int i = 0; i < str.length(); i++) {
char ch = str.charAt(i);
switch (ch) {
case '&':
sb.append("&");
break;
case '<':
sb.append("<");
break;
case '>':
sb.append(">");
break;
case '\"':
sb.append(""");
break;
default:
if (ch > 0x7f) {
sb.append("&#").append(Integer.toString(ch))
.append(';');
} else {
sb.append(ch);
}
}
}
return sb.toString();
}
private final void writeIdent() throws IOException {
int n = ident;
while (n > 0) {
if (n > OFF.length) {
w.write(OFF);
n -= OFF.length;
} else {
w.write(OFF, 0, n);
n = 0;
}
}
}
private final void closeElement() throws IOException {
if (openElement) {
w.write(">\n");
}
openElement = false;
}
}
/**
* A {@link org.xml.sax.ContentHandler ContentHandler} that splits XML
* documents into smaller chunks. Each chunk is processed by the nested
* {@link org.xml.sax.ContentHandler ContentHandler} obtained from
* {@link java.net.ContentHandlerFactory ContentHandlerFactory}. This is
* useful for running XSLT engine against large XML document that will
* hardly fit into the memory all together.
* <p>
* TODO use complete path for subdocumentRoot
*/
private static final class InputSlicingHandler extends DefaultHandler {
private String subdocumentRoot;
private final ContentHandler rootHandler;
private ContentHandlerFactory subdocumentHandlerFactory;
private boolean subdocument = false;
private ContentHandler subdocumentHandler;
/**
* Constructs a new {@link InputSlicingHandler SubdocumentHandler}
* object.
*
* @param subdocumentRoot
* name/path to the root element of the subdocument
* @param rootHandler
* content handler for the entire document (subdocument
* envelope).
* @param subdocumentHandlerFactory
* a {@link ContentHandlerFactory ContentHandlerFactory} used
* to create {@link ContentHandler ContentHandler} instances
* for subdocuments.
*/
InputSlicingHandler(final String subdocumentRoot,
final ContentHandler rootHandler,
final ContentHandlerFactory subdocumentHandlerFactory) {
this.subdocumentRoot = subdocumentRoot;
this.rootHandler = rootHandler;
this.subdocumentHandlerFactory = subdocumentHandlerFactory;
}
@Override
public final void startElement(final String namespaceURI,
final String localName, final String qName,
final Attributes list) throws SAXException {
if (subdocument) {
subdocumentHandler.startElement(namespaceURI, localName, qName,
list);
} else if (localName.equals(subdocumentRoot)) {
subdocumentHandler = subdocumentHandlerFactory
.createContentHandler();
subdocumentHandler.startDocument();
subdocumentHandler.startElement(namespaceURI, localName, qName,
list);
subdocument = true;
} else if (rootHandler != null) {
rootHandler.startElement(namespaceURI, localName, qName, list);
}
}
@Override
public final void endElement(final String namespaceURI,
final String localName, final String qName) throws SAXException {
if (subdocument) {
subdocumentHandler.endElement(namespaceURI, localName, qName);
if (localName.equals(subdocumentRoot)) {
subdocumentHandler.endDocument();
subdocument = false;
}
} else if (rootHandler != null) {
rootHandler.endElement(namespaceURI, localName, qName);
}
}
@Override
public final void startDocument() throws SAXException {
if (rootHandler != null) {
rootHandler.startDocument();
}
}
@Override
public final void endDocument() throws SAXException {
if (rootHandler != null) {
rootHandler.endDocument();
}
}
@Override
public final void characters(final char[] buff, final int offset,
final int size) throws SAXException {
if (subdocument) {
subdocumentHandler.characters(buff, offset, size);
} else if (rootHandler != null) {
rootHandler.characters(buff, offset, size);
}
}
}
/**
* A {@link org.xml.sax.ContentHandler ContentHandler} that splits XML
* documents into smaller chunks. Each chunk is processed by the nested
* {@link org.xml.sax.ContentHandler ContentHandler} obtained from
* {@link java.net.ContentHandlerFactory ContentHandlerFactory}. This is
* useful for running XSLT engine against large XML document that will
* hardly fit into the memory all together.
*
* <p>
* TODO use complete path for subdocumentRoot
*/
private static final class OutputSlicingHandler extends DefaultHandler {
private final String subdocumentRoot;
private ContentHandlerFactory subdocumentHandlerFactory;
private final EntryElement entryElement;
private boolean isXml;
private boolean subdocument = false;
private ContentHandler subdocumentHandler;
/**
* Constructs a new {@link OutputSlicingHandler SubdocumentHandler}
* object.
*
* @param subdocumentHandlerFactory
* a {@link ContentHandlerFactory ContentHandlerFactory} used
* to create {@link ContentHandler ContentHandler} instances
* for subdocuments.
* @param entryElement
* TODO.
* @param isXml
* TODO.
*/
OutputSlicingHandler(
final ContentHandlerFactory subdocumentHandlerFactory,
final EntryElement entryElement, final boolean isXml) {
this.subdocumentRoot = "class";
this.subdocumentHandlerFactory = subdocumentHandlerFactory;
this.entryElement = entryElement;
this.isXml = isXml;
}
@Override
public final void startElement(final String namespaceURI,
final String localName, final String qName,
final Attributes list) throws SAXException {
if (subdocument) {
subdocumentHandler.startElement(namespaceURI, localName, qName,
list);
} else if (localName.equals(subdocumentRoot)) {
String name = list.getValue("name");
if (name == null || name.length() == 0) {
throw new SAXException(
"Class element without name attribute.");
}
try {
entryElement.openEntry(isXml ? name + ".class.xml" : name
+ ".class");
} catch (IOException ex) {
throw new SAXException(ex.toString(), ex);
}
subdocumentHandler = subdocumentHandlerFactory
.createContentHandler();
subdocumentHandler.startDocument();
subdocumentHandler.startElement(namespaceURI, localName, qName,
list);
subdocument = true;
}
}
@Override
public final void endElement(final String namespaceURI,
final String localName, final String qName) throws SAXException {
if (subdocument) {
subdocumentHandler.endElement(namespaceURI, localName, qName);
if (localName.equals(subdocumentRoot)) {
subdocumentHandler.endDocument();
subdocument = false;
try {
entryElement.closeEntry();
} catch (IOException ex) {
throw new SAXException(ex.toString(), ex);
}
}
}
}
@Override
public final void startDocument() throws SAXException {
}
@Override
public final void endDocument() throws SAXException {
}
@Override
public final void characters(final char[] buff, final int offset,
final int size) throws SAXException {
if (subdocument) {
subdocumentHandler.characters(buff, offset, size);
}
}
}
private static interface EntryElement {
OutputStream openEntry(String name) throws IOException;
void closeEntry() throws IOException;
}
private static final class SingleDocElement implements EntryElement {
private final OutputStream os;
SingleDocElement(final OutputStream os) {
this.os = os;
}
public OutputStream openEntry(final String name) throws IOException {
return os;
}
public void closeEntry() throws IOException {
os.flush();
}
}
private static final class ZipEntryElement implements EntryElement {
private ZipOutputStream zos;
ZipEntryElement(final ZipOutputStream zos) {
this.zos = zos;
}
public OutputStream openEntry(final String name) throws IOException {
ZipEntry entry = new ZipEntry(name);
zos.putNextEntry(entry);
return zos;
}
public void closeEntry() throws IOException {
zos.flush();
zos.closeEntry();
}
}
}
| |
/**
* Copyright Pravega Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.pravega.segmentstore.server.logs;
import com.google.common.base.Preconditions;
import io.pravega.common.LoggerHelpers;
import io.pravega.segmentstore.contracts.ContainerException;
import io.pravega.segmentstore.contracts.StreamSegmentException;
import io.pravega.segmentstore.contracts.StreamSegmentMergedException;
import io.pravega.segmentstore.contracts.StreamSegmentNotExistsException;
import io.pravega.segmentstore.contracts.StreamSegmentSealedException;
import io.pravega.segmentstore.server.ContainerMetadata;
import io.pravega.segmentstore.server.SegmentMetadata;
import io.pravega.segmentstore.server.UpdateableContainerMetadata;
import io.pravega.segmentstore.server.logs.operations.Operation;
import java.util.ArrayDeque;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import java.util.function.Function;
import javax.annotation.concurrent.NotThreadSafe;
import lombok.extern.slf4j.Slf4j;
/**
* Transaction-based Metadata Updater for Log Operations.
*/
@Slf4j
@NotThreadSafe
class OperationMetadataUpdater implements ContainerMetadata {
//region Members
private static final long MAX_TRANSACTION = Long.MAX_VALUE;
private final String traceObjectId;
private final UpdateableContainerMetadata metadata;
private final ArrayDeque<ContainerMetadataUpdateTransaction> transactions;
private long nextTransactionId;
//endregion
//region Constructor
/**
* Creates a new instance of the OperationMetadataUpdater class.
*
* @param metadata The Container Metadata to update.
* @throws NullPointerException If any of the arguments are null.
*/
OperationMetadataUpdater(UpdateableContainerMetadata metadata) {
this.metadata = Preconditions.checkNotNull(metadata, "metadata");
this.traceObjectId = String.format("OperationMetadataUpdater[%d]", metadata.getContainerId());
this.nextTransactionId = 0;
this.transactions = new ArrayDeque<>();
}
//endregion
//region ContainerMetadata Implementation
@Override
public SegmentMetadata getStreamSegmentMetadata(long segmentId) {
return fromMetadata(m -> m.getStreamSegmentMetadata(segmentId));
}
@Override
public Collection<Long> getAllStreamSegmentIds() {
return fromMetadata(ContainerMetadata::getAllStreamSegmentIds);
}
@Override
public int getMaximumActiveSegmentCount() {
return this.metadata.getMaximumActiveSegmentCount(); // This never changes.
}
@Override
public int getActiveSegmentCount() {
return fromMetadata(ContainerMetadata::getActiveSegmentCount);
}
@Override
public long getStreamSegmentId(String segmentName, boolean updateLastUsed) {
return fromMetadata(m -> m.getStreamSegmentId(segmentName, updateLastUsed));
}
@Override
public int getContainerId() {
return this.metadata.getContainerId(); // This never changes.
}
@Override
public long getContainerEpoch() {
return this.metadata.getContainerEpoch(); // This never changes.
}
@Override
public boolean isRecoveryMode() {
return this.metadata.isRecoveryMode(); // This never changes.
}
@Override
public long getOperationSequenceNumber() {
return fromMetadata(ContainerMetadata::getOperationSequenceNumber);
}
//endregion
//region Processing
/**
* Seals the current UpdateTransaction (if any) and returns its id.
*
* @return The sealed UpdateTransaction Id.
*/
long sealTransaction() {
// Even if we have had no changes, still create a new (empty) transaction and seal it, since callers will expect
// a different Id every time which can be committed/rolled back.
getOrCreateTransaction().seal();
// Always return nextTransactionId - 1, since otherwise we are at risk of returning a value we previously returned
// (for example, if we rolled back a transaction).
return this.nextTransactionId - 1;
}
/**
* Commits all outstanding changes to the base Container Metadata.
*/
void commitAll() {
commit(MAX_TRANSACTION);
}
/**
* Commits all outstanding changes to the base Container Metadata, up to and including the one for the given
* UpdateTransaction.
* @param upToTransactionId The Id of the UpdateTransaction up to which to commit.
* @return The number of MetadataUpdateTransactions committed.
*/
int commit(long upToTransactionId) {
long traceId = LoggerHelpers.traceEnterWithContext(log, this.traceObjectId, "commit", upToTransactionId);
// Commit every UpdateTransaction, in order, until we reach our transaction id.
List<Long> commits = new ArrayList<>();
while (!this.transactions.isEmpty() && this.transactions.peekFirst().getTransactionId() <= upToTransactionId) {
ContainerMetadataUpdateTransaction txn = this.transactions.removeFirst();
txn.seal();
txn.commit(this.metadata);
commits.add(txn.getTransactionId());
}
// Rebase the first remaining UpdateTransaction over to the current metadata (it was previously pointing to the
// last committed UpdateTransaction).
if (commits.size() > 0 && !this.transactions.isEmpty()) {
this.transactions.peekFirst().rebase(this.metadata);
}
LoggerHelpers.traceLeave(log, this.traceObjectId, "commit", traceId, commits);
return commits.size();
}
/**
* Discards any outstanding changes, starting at the given UpdateTransaction forward.
*
* @param fromTransactionId The Id of the UpdateTransaction from which to rollback.
*/
void rollback(long fromTransactionId) {
long traceId = LoggerHelpers.traceEnterWithContext(log, this.traceObjectId, "rollback", fromTransactionId);
List<Long> rolledBack = new ArrayList<>();
while (!this.transactions.isEmpty() && this.transactions.peekLast().getTransactionId() >= fromTransactionId) {
ContainerMetadataUpdateTransaction txn = this.transactions.removeLast();
txn.seal();
rolledBack.add(txn.getTransactionId());
}
// At this point, the transaction list is either empty or its last one is sealed; any further changes would
// require creating a new transaction.
LoggerHelpers.traceLeave(log, this.traceObjectId, "rollback", traceId, rolledBack);
}
/**
* Gets the next available Operation Sequence Number. Atomically increments the value by 1 with every call.
*/
long nextOperationSequenceNumber() {
Preconditions.checkState(!isRecoveryMode(), "Cannot request new Operation Sequence Number in Recovery Mode.");
return this.metadata.nextOperationSequenceNumber();
}
/**
* Sets the operation sequence number in the current UpdateTransaction.
*/
void setOperationSequenceNumber(long value) {
Preconditions.checkState(isRecoveryMode(), "Can only set new Operation Sequence Number in Recovery Mode.");
getOrCreateTransaction().setOperationSequenceNumber(value);
}
/**
* Phase 1/2 of processing a Operation.
*
* If the given operation is a StorageOperation, the Operation is validated against the base Container Metadata and
* the pending UpdateTransaction and it is updated accordingly (if needed).
*
* If the given operation is a MetadataCheckpointOperation, the current state of the metadata (including pending
* UpdateTransactions) is serialized to it.
*
* For all other kinds of MetadataOperations (i.e., StreamSegmentMapOperation) this method only
* does anything if the base Container Metadata is in Recovery Mode (in which case the given MetadataOperation) is
* recorded in the pending UpdateTransaction.
*
* @param operation The operation to pre-process.
* @throws ContainerException If the given operation was rejected given the current state of the container
* metadata or most recent UpdateTransaction.
* @throws StreamSegmentNotExistsException If the given operation was for a Segment that was is marked as deleted.
* @throws StreamSegmentSealedException If the given operation was for a Segment that was previously sealed and
* that is incompatible with a sealed Segment.
* @throws StreamSegmentMergedException If the given operation was for a Segment that was previously merged.
* @throws NullPointerException If the operation is null.
*/
void preProcessOperation(Operation operation) throws ContainerException, StreamSegmentException {
log.trace("{}: PreProcess {}.", this.traceObjectId, operation);
getOrCreateTransaction().preProcessOperation(operation);
}
/**
* Phase 2/2 of processing an Operation. The Operation's effects are reflected in the pending UpdateTransaction.
*
* This method only has an effect on StorageOperations. It does nothing for MetadataOperations, regardless of whether
* the base Container Metadata is in Recovery Mode or not.
*
* @param operation The operation to accept.
* @throws MetadataUpdateException If the given operation was rejected given the current state of the metadata or
* most recent UpdateTransaction.
* @throws NullPointerException If the operation is null.
*/
void acceptOperation(Operation operation) throws MetadataUpdateException {
log.trace("{}: Accept {}.", this.traceObjectId, operation);
getOrCreateTransaction().acceptOperation(operation);
}
/**
* Returns the result of the given function applied either to the current UpdateTransaction (if any), or the base metadata,
* if no UpdateTransaction exists.
*
* @param getter The Function to apply.
* @param <T> Result type.
* @return The result of the given Function.
*/
private <T> T fromMetadata(Function<ContainerMetadata, T> getter) {
ContainerMetadataUpdateTransaction txn = getActiveTransaction();
return getter.apply(txn == null ? this.metadata : txn);
}
private ContainerMetadataUpdateTransaction getActiveTransaction() {
if (this.transactions.isEmpty()) {
return null;
}
ContainerMetadataUpdateTransaction last = this.transactions.peekLast();
if (last.isSealed()) {
return null;
} else {
return last;
}
}
private ContainerMetadataUpdateTransaction getOrCreateTransaction() {
if (this.transactions.isEmpty() || this.transactions.peekLast().isSealed()) {
// No transactions or last transaction is sealed. Create a new one.
ContainerMetadata previous = this.metadata;
if (!this.transactions.isEmpty()) {
previous = this.transactions.peekLast();
}
ContainerMetadataUpdateTransaction txn = new ContainerMetadataUpdateTransaction(previous, this.metadata, this.nextTransactionId);
this.nextTransactionId++;
this.transactions.addLast(txn);
}
return this.transactions.peekLast();
}
//endregion
}
| |
/*
* Copyright 2013 Google Inc.
* Copyright 2014 Andreas Schildbach
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.bitcoinj.wallet;
import org.bitcoinj.core.Coin;
import org.bitcoinj.core.ECKey;
import org.bitcoinj.core.ECKey.ECDSASignature;
import org.bitcoinj.core.NetworkParameters;
import org.bitcoinj.core.Transaction;
import org.bitcoinj.core.TransactionConfidence;
import org.bitcoinj.core.TransactionInput;
import org.bitcoinj.core.TransactionOutput;
import org.bitcoinj.core.Wallet;
import org.bitcoinj.crypto.TransactionSignature;
import org.bitcoinj.script.ScriptChunk;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.annotation.Nullable;
import java.util.List;
import static com.google.common.base.Preconditions.checkState;
/**
* <p>The default risk analysis. Currently, it only is concerned with whether a tx/dependency is non-final or not, and
* whether a tx/dependency violates the dust rules. Outside of specialised protocols you should not encounter non-final
* transactions.</p>
*/
public class DefaultRiskAnalysis implements RiskAnalysis {
private static final Logger log = LoggerFactory.getLogger(DefaultRiskAnalysis.class);
/**
* Any standard output smaller than this value (in satoshis) will be considered risky, as it's most likely be
* rejected by the network. This is usually the same as {@link Transaction#MIN_NONDUST_OUTPUT} but can be
* different when the fee is about to change in Bitcoin Core.
*/
public static final Coin MIN_ANALYSIS_NONDUST_OUTPUT = Transaction.MIN_NONDUST_OUTPUT;
protected final Transaction tx;
protected final List<Transaction> dependencies;
@Nullable protected final Wallet wallet;
private Transaction nonStandard;
protected Transaction nonFinal;
protected boolean analyzed;
private DefaultRiskAnalysis(Wallet wallet, Transaction tx, List<Transaction> dependencies) {
this.tx = tx;
this.dependencies = dependencies;
this.wallet = wallet;
}
@Override
public Result analyze() {
checkState(!analyzed);
analyzed = true;
Result result = analyzeIsFinal();
if (result != null && result != Result.OK)
return result;
return analyzeIsStandard();
}
@Nullable
private Result analyzeIsFinal() {
// Transactions we create ourselves are, by definition, not at risk of double spending against us.
if (tx.getConfidence().getSource() == TransactionConfidence.Source.SELF)
return Result.OK;
if (wallet == null)
return null;
final int height = wallet.getLastBlockSeenHeight();
final long time = wallet.getLastBlockSeenTimeSecs();
// If the transaction has a lock time specified in blocks, we consider that if the tx would become final in the
// next block it is not risky (as it would confirm normally).
final int adjustedHeight = height + 1;
if (!tx.isFinal(adjustedHeight, time)) {
nonFinal = tx;
return Result.NON_FINAL;
}
for (Transaction dep : dependencies) {
if (!dep.isFinal(adjustedHeight, time)) {
nonFinal = dep;
return Result.NON_FINAL;
}
}
return Result.OK;
}
/**
* The reason a transaction is considered non-standard, returned by
* {@link #isStandard(org.bitcoinj.core.Transaction)}.
*/
public enum RuleViolation {
NONE,
VERSION,
DUST,
SHORTEST_POSSIBLE_PUSHDATA,
NONEMPTY_STACK, // Not yet implemented (for post 0.12)
SIGNATURE_CANONICAL_ENCODING
}
/**
* <p>Checks if a transaction is considered "standard" by Bitcoin Core's IsStandardTx and AreInputsStandard
* functions.</p>
*
* <p>Note that this method currently only implements a minimum of checks. More to be added later.</p>
*/
public static RuleViolation isStandard(Transaction tx) {
// TODO: Finish this function off.
if (tx.getVersion() > 1 || tx.getVersion() < 1) {
log.warn("TX considered non-standard due to unknown version number {}", tx.getVersion());
return RuleViolation.VERSION;
}
final List<TransactionOutput> outputs = tx.getOutputs();
for (int i = 0; i < outputs.size(); i++) {
TransactionOutput output = outputs.get(i);
RuleViolation violation = isOutputStandard(output);
if (violation != RuleViolation.NONE) {
log.warn("TX considered non-standard due to output {} violating rule {}", i, violation);
return violation;
}
}
final List<TransactionInput> inputs = tx.getInputs();
for (int i = 0; i < inputs.size(); i++) {
TransactionInput input = inputs.get(i);
RuleViolation violation = isInputStandard(input);
if (violation != RuleViolation.NONE) {
log.warn("TX considered non-standard due to input {} violating rule {}", i, violation);
return violation;
}
}
return RuleViolation.NONE;
}
/**
* Checks the output to see if the script violates a standardness rule. Not complete.
*/
public static RuleViolation isOutputStandard(TransactionOutput output) {
if (output.getValue().compareTo(MIN_ANALYSIS_NONDUST_OUTPUT) < 0)
return RuleViolation.DUST;
for (ScriptChunk chunk : output.getScriptPubKey().getChunks()) {
if (chunk.isPushData() && !chunk.isShortestPossiblePushData())
return RuleViolation.SHORTEST_POSSIBLE_PUSHDATA;
}
return RuleViolation.NONE;
}
/** Checks if the given input passes some of the AreInputsStandard checks. Not complete. */
public static RuleViolation isInputStandard(TransactionInput input) {
for (ScriptChunk chunk : input.getScriptSig().getChunks()) {
if (chunk.data != null && !chunk.isShortestPossiblePushData())
return RuleViolation.SHORTEST_POSSIBLE_PUSHDATA;
if (chunk.isPushData()) {
ECDSASignature signature;
try {
signature = ECKey.ECDSASignature.decodeFromDER(chunk.data);
} catch (RuntimeException x) {
// Doesn't look like a signature.
signature = null;
}
if (signature != null) {
if (!TransactionSignature.isEncodingCanonical(chunk.data))
return RuleViolation.SIGNATURE_CANONICAL_ENCODING;
if (!signature.isCanonical())
return RuleViolation.SIGNATURE_CANONICAL_ENCODING;
}
}
}
return RuleViolation.NONE;
}
private Result analyzeIsStandard() {
// The IsStandard rules don't apply on testnet, because they're just a safety mechanism and we don't want to
// crush innovation with valueless test coins.
if (wallet != null && !wallet.getNetworkParameters().getId().equals(NetworkParameters.ID_MAINNET))
return Result.OK;
RuleViolation ruleViolation = isStandard(tx);
if (ruleViolation != RuleViolation.NONE) {
nonStandard = tx;
return Result.NON_STANDARD;
}
for (Transaction dep : dependencies) {
ruleViolation = isStandard(dep);
if (ruleViolation != RuleViolation.NONE) {
nonStandard = dep;
return Result.NON_STANDARD;
}
}
return Result.OK;
}
/** Returns the transaction that was found to be non-standard, or null. */
@Nullable
public Transaction getNonStandard() {
return nonStandard;
}
/** Returns the transaction that was found to be non-final, or null. */
@Nullable
public Transaction getNonFinal() {
return nonFinal;
}
@Override
public String toString() {
if (!analyzed)
return "Pending risk analysis for " + tx.getHashAsString();
else if (nonFinal != null)
return "Risky due to non-finality of " + nonFinal.getHashAsString();
else if (nonStandard != null)
return "Risky due to non-standard tx " + nonStandard.getHashAsString();
else
return "Non-risky";
}
public static class Analyzer implements RiskAnalysis.Analyzer {
@Override
public DefaultRiskAnalysis create(Wallet wallet, Transaction tx, List<Transaction> dependencies) {
return new DefaultRiskAnalysis(wallet, tx, dependencies);
}
}
public static Analyzer FACTORY = new Analyzer();
}
| |
package com.univ.helsinki.app.db;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import android.annotation.SuppressLint;
import android.content.Context;
import android.content.SharedPreferences;
import com.univ.helsinki.app.adapter.FeedEndPointAdapter;
import com.univ.helsinki.app.adapter.RecentActivityAdapter;
import com.univ.helsinki.app.core.DeviceFeed;
import com.univ.helsinki.app.core.SensorFeed;
import com.univ.helsinki.app.util.Constant;
public class FeedResource {
private SharedPreferences mSharedPrefs;
@SuppressLint("UseSparseArrays")
public Map<Integer,String> mAllSensorMap = new HashMap<Integer, String>();
public static Map<String,String> sAllSensorKeyNameMap = new HashMap<String, String>();
private List<SensorFeed> mSensorFeedList;
private DeviceFeed mDeviceFeed;
private List<Feed> mRecentFeedList;
private List<FeedEndPoint> mFeedEndPointList;
private DataSourceRecentFeed mDatasourceRecentFeed;
private DataSourceFeedEndPoint mDatasourceFeedEndPoint;
private RecentActivityAdapter mRecentAdapter;
private FeedEndPointAdapter mFeedEndPointAdapter;
private static FeedResource INSTANCE;
private Map<String,Boolean> mSelectedSensorMap = new HashMap<String,Boolean>();
private FeedResource(){
this.mRecentFeedList = new ArrayList<Feed>();
this.setFeedEndPointList(new ArrayList<FeedEndPoint>());
setDeviceFeed(new DeviceFeed());
setSensorFeedList(new ArrayList<SensorFeed>());
mAllSensorMap.put(1, "TYPE_ACCELEROMETER");
mAllSensorMap.put(2, "TYPE_MAGNETIC_FIELD");
mAllSensorMap.put(3, "TYPE_ORIENTATION");
mAllSensorMap.put(4, "TYPE_GYROSCOPE");
mAllSensorMap.put(5, "TYPE_LIGHT");
mAllSensorMap.put(6, "TYPE_PRESSURE");
mAllSensorMap.put(7, "TYPE_TEMPERATURE");
mAllSensorMap.put(8, "TYPE_PROXIMITY");
mAllSensorMap.put(9, "TYPE_GRAVITY");
mAllSensorMap.put(10, "TYPE_LINEAR_ACCELERATION");
mAllSensorMap.put(11, "TYPE_ROTATION_VECTOR");
mAllSensorMap.put(12, "TYPE_RELATIVE_HUMIDITY");
mAllSensorMap.put(13, "TYPE_AMBIENT_TEMPERATURE");
mAllSensorMap.put(14, "TYPE_MAGNETIC_FIELD_UNCALIBRATED");
mAllSensorMap.put(15, "TYPE_GAME_ROTATION_VECTOR");
mAllSensorMap.put(16, "TYPE_GYROSCOPE_UNCALIBRATED");
mAllSensorMap.put(17, "TYPE_SIGNIFICANT_MOTION");
mAllSensorMap.put(18, "TYPE_STEP_DETECTOR");
mAllSensorMap.put(19, "TYPE_STEP_COUNTER");
mAllSensorMap.put(20, "TYPE_GEOMAGNETIC_ROTATION_VECTOR");
sAllSensorKeyNameMap.put(Constant.PREF_TYPE_ACCELEROMETER_KEY, "ACCELEROMETER");
sAllSensorKeyNameMap.put(Constant.PREF_TYPE_MAGNETIC_FIELD_KEY, "MAGNETIC FIELD");
sAllSensorKeyNameMap.put(Constant.PREF_TYPE_ORIENTATION_KEY, "ORIENTATION");
sAllSensorKeyNameMap.put(Constant.PREF_TYPE_GYROSCOPE_KEY, "GYROSCOPE");
sAllSensorKeyNameMap.put(Constant.PREF_TYPE_LIGHT_KEY, "LIGHT");
sAllSensorKeyNameMap.put(Constant.PREF_TYPE_PRESSURE_KEY, "PRESSURE");
sAllSensorKeyNameMap.put(Constant.PREF_TYPE_TEMPERATURE_KEY, "TEMPERATURE");
sAllSensorKeyNameMap.put(Constant.PREF_TYPE_PROXIMITY_KEY, "PROXIMITY");
sAllSensorKeyNameMap.put(Constant.PREF_TYPE_GRAVITY_KEY, "GRAVITY");
sAllSensorKeyNameMap.put(Constant.PREF_TYPE_LINEAR_ACCELERATION_KEY, "LINEAR ACCELERATION");
sAllSensorKeyNameMap.put(Constant.PREF_TYPE_ROTATION_VECTOR_KEY, "ROTATION VECTOR");
sAllSensorKeyNameMap.put(Constant.PREF_TYPE_RELATIVE_HUMIDITY_KEY, "RELATIVE HUMIDITY");
sAllSensorKeyNameMap.put(Constant.PREF_TYPE_AMBIENT_TEMPERATURE_KEY, "AMBIENT TEMPERATURE");
sAllSensorKeyNameMap.put(Constant.PREF_TYPE_MAGNETIC_FIELD_UNCALIBRATED_KEY, "MAGNETIC FIELD UNCALIBRATED");
sAllSensorKeyNameMap.put(Constant.PREF_TYPE_GAME_ROTATION_VECTOR_KEY, "GAME ROTATION VECTOR");
sAllSensorKeyNameMap.put(Constant.PREF_TYPE_GYROSCOPE_UNCALIBRATED_KEY, "GYROSCOPE UNCALIBRATED");
sAllSensorKeyNameMap.put(Constant.PREF_TYPE_SIGNIFICANT_MOTION_KEY, "SIGNIFICANT MOTION");
sAllSensorKeyNameMap.put(Constant.PREF_TYPE_STEP_DETECTOR_KEY, "STEP DETECTOR");
sAllSensorKeyNameMap.put(Constant.PREF_TYPE_STEP_COUNTER_KEY, "STEP COUNTER");
sAllSensorKeyNameMap.put(Constant.PREF_TYPE_GEOMAGNETIC_ROTATION_VECTOR_KEY, "GEOMAGNETIC ROTATION VECTOR");
}
public static synchronized FeedResource getInstance(){
if(INSTANCE == null)
INSTANCE = new FeedResource();
return INSTANCE;
}
/////////////////////////////////////////////////////
private DatabaseHelper mDbHelper;
public DatabaseHelper getDbHelper(){
return this.mDbHelper;
}
/////////////////////////////////////////////////////
public List<SensorFeed> getSensorFeedList() {
return mSensorFeedList;
}
private void setSensorFeedList(List<SensorFeed> sensorFeedList) {
this.mSensorFeedList = sensorFeedList;
}
public DeviceFeed getDeviceFeed() {
return mDeviceFeed;
}
private void setDeviceFeed(DeviceFeed mDeviceFeed) {
this.mDeviceFeed = mDeviceFeed;
}
/*
* Manage Call for Recent Feeds
*/
public List<Feed> getAllFeed(){
this.mRecentFeedList = this.mDatasourceRecentFeed.getAllRecentFeed();;
return this.mRecentFeedList;
}
public void addRecentFeed(int location, Feed feed){
this.mRecentFeedList.add(location, feed);
this.mRecentAdapter.notifyDataChanged();
}
public Feed createRecentFeed(String title, String content) {
return this.mDatasourceRecentFeed.createRecentFeed(title, content);
}
public void removeRecentFeed(int location){
long id = this.mRecentFeedList.get(location).getId();
if(mDatasourceRecentFeed != null){
mDatasourceRecentFeed.delete(id);
}
this.mRecentFeedList.remove(location);
this.mRecentAdapter.notifyDataChanged();
}
/////////////////////////////////////////////////////
/*
* Manage Call for Feed End Points
*/
public void createFeedEndPoint(FeedEndPoint feedEndPoint) {
this.mDatasourceFeedEndPoint.createFeedEndPoint(feedEndPoint);
}
public void removeFeedEndPoint(int location){
long id = this.mFeedEndPointList.get(location).getId();
if(mDatasourceFeedEndPoint != null){
mDatasourceFeedEndPoint.delete(id);
}
mFeedEndPointList.remove(location);
this.mFeedEndPointAdapter.notifyDataChanged();
}
public void addFeedEndPoint(FeedEndPoint endPoint){
// save to the list
this.mFeedEndPointList.add(endPoint);
// save to the db
this.createFeedEndPoint(endPoint);
// notify adapter
this.mFeedEndPointAdapter.notifyDataChanged();
}
public List<FeedEndPoint> getAllFeedEndPoint(){
this.mFeedEndPointList = this.mDatasourceFeedEndPoint.getAllFeedEndPoint();;
return this.mFeedEndPointList;
}
public List<FeedEndPoint> getFeedEndPointList() {
return mFeedEndPointList;
}
private void setFeedEndPointList(List<FeedEndPoint> mFeedEndPointList) {
this.mFeedEndPointList = mFeedEndPointList;
}
/////////////////////////////////////////////////////
public void destory(){
if(mDatasourceRecentFeed != null)
mDatasourceRecentFeed.close();
if(mDatasourceFeedEndPoint != null)
mDatasourceFeedEndPoint.close();
this.mRecentFeedList.clear();
this.mFeedEndPointList.clear();
}
/**
* Manage Datasource
*/
public void inti(Context context) {
this.mSharedPrefs = context.getSharedPreferences(
Constant.SHARED_PREFS_FILENAME,
Context. MODE_PRIVATE );
// set value true for first time
getSharedPrefs().edit().putBoolean(
Constant.SHARED_PREFS_KEY_ISFIRST_LAUNCH, true).commit();
this.mDbHelper = new DatabaseHelper(context);
this.mDatasourceRecentFeed = new DataSourceRecentFeed(this.mDbHelper);
this.mDatasourceRecentFeed.open();
this.mDatasourceFeedEndPoint = new DataSourceFeedEndPoint(this.mDbHelper);
this.mDatasourceFeedEndPoint.open();
}
/**
* Should be called in Activity#onresume()
*/
public void openDataSource(){
if(mDatasourceRecentFeed != null)
mDatasourceRecentFeed.open();
if(mDatasourceFeedEndPoint != null)
mDatasourceFeedEndPoint.open();
}
/**
* Should be called in Activity#onpause()
*/
public void closeDataSource(){
if(mDatasourceRecentFeed != null)
mDatasourceRecentFeed.close();
if(mDatasourceFeedEndPoint != null)
mDatasourceFeedEndPoint.close();
}
/**
* Set Adapters for Recent and FeedEnd Point
*/
public void setRecentFeedAdapter(RecentActivityAdapter adapter) {
this.mRecentAdapter = adapter;
}
public void setFeedEndPointAdapter(FeedEndPointAdapter adapter) {
this.mFeedEndPointAdapter = adapter;
}
public Map<String,Boolean> getSelectedSensorMap() {
return mSelectedSensorMap;
}
public void setSelectedSensorMap(Map<String,Boolean> mSelectedSensorMap) {
this.mSelectedSensorMap = mSelectedSensorMap;
}
public SharedPreferences getSharedPrefs() {
return mSharedPrefs;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to you under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.calcite.adapter.geode.rel;
import org.apache.calcite.jdbc.CalciteConnection;
import org.apache.calcite.schema.SchemaPlus;
import org.apache.calcite.test.CalciteAssert;
import org.apache.geode.cache.Cache;
import org.apache.geode.cache.Region;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.api.Test;
import java.sql.Connection;
import java.sql.Date;
import java.sql.DriverManager;
import java.sql.SQLException;
import java.sql.Time;
import java.sql.Timestamp;
import java.util.Collections;
import java.util.List;
import java.util.Map;
/** Test with different types of data, like BOOLEAN, TIME, TIMESTAMP. */
class GeodeAllDataTypesTest extends AbstractGeodeTest {
@BeforeAll
public static void setUp() {
final Cache cache = POLICY.cache();
final Region<?, ?> region =
cache.<String, Object>createRegionFactory()
.create("allDataTypesRegion");
final List<Map<String, Object>> mapList = createMapList();
new JsonLoader(region).loadMapList(mapList);
}
private static List<Map<String, Object>> createMapList() {
return ImmutableList.of(
ImmutableMap.<String, Object>builder()
.put("booleanValue", true)
.put("dateValue", Date.valueOf("2018-02-03"))
.put("timeValue", Time.valueOf("02:22:23"))
.put("timestampValue", Timestamp.valueOf("2018-02-03 02:22:33"))
.put("stringValue", "abc")
.put("floatValue", 1.5678)
.build(),
ImmutableMap.<String, Object>builder()
.put("booleanValue", false)
.put("dateValue", Date.valueOf("2018-02-04"))
.put("timeValue", Time.valueOf("03:22:23"))
.put("timestampValue", Timestamp.valueOf("2018-02-04 04:22:33"))
.put("stringValue", "def")
.put("floatValue", 3.5678)
.build(),
ImmutableMap.<String, Object>builder()
.put("booleanValue", true)
.put("dateValue", Date.valueOf("2018-02-05"))
.put("timeValue", Time.valueOf("04:22:23"))
.put("timestampValue", Timestamp.valueOf("2018-02-05 04:22:33"))
.put("stringValue", "ghi")
.put("floatValue", 8.9267)
.build());
}
private CalciteAssert.ConnectionFactory newConnectionFactory() {
return new CalciteAssert.ConnectionFactory() {
@Override public Connection createConnection() throws SQLException {
final Connection connection = DriverManager.getConnection("jdbc:calcite:lex=JAVA");
final SchemaPlus root = connection.unwrap(CalciteConnection.class).getRootSchema();
root.add("geode",
new GeodeSchema(
POLICY.cache(),
Collections.singleton("allDataTypesRegion")));
return connection;
}
};
}
private CalciteAssert.AssertThat calciteAssert() {
return CalciteAssert.that()
.with(newConnectionFactory());
}
@Test void testSqlSingleBooleanWhereFilter() {
calciteAssert()
.query("SELECT booleanValue as booleanValue "
+ "FROM geode.allDataTypesRegion WHERE booleanValue = true")
.returnsCount(2)
.queryContains(
GeodeAssertions.query("SELECT booleanValue AS booleanValue FROM /allDataTypesRegion "
+ "WHERE booleanValue = true"));
}
@Test void testSqlBooleanColumnFilter() {
calciteAssert()
.query("SELECT booleanValue as booleanValue "
+ "FROM geode.allDataTypesRegion WHERE booleanValue")
.returnsCount(2)
.queryContains(
GeodeAssertions.query("SELECT booleanValue AS booleanValue FROM /allDataTypesRegion "
+ "WHERE booleanValue = true"));
}
@Test void testSqlBooleanColumnNotFilter() {
calciteAssert()
.query("SELECT booleanValue as booleanValue "
+ "FROM geode.allDataTypesRegion WHERE not booleanValue")
.returnsCount(1)
.queryContains(
GeodeAssertions.query("SELECT booleanValue AS booleanValue FROM /allDataTypesRegion "
+ "WHERE booleanValue = false"));
}
@Test void testSqlMultipleBooleanWhereFilter() {
calciteAssert()
.query("SELECT booleanValue as booleanValue "
+ "FROM geode.allDataTypesRegion WHERE booleanValue = true OR booleanValue = false")
.returnsCount(3)
.queryContains(
GeodeAssertions.query("SELECT booleanValue AS booleanValue FROM /allDataTypesRegion "
+ "WHERE booleanValue = true OR booleanValue = false"));
}
@Test void testSqlWhereWithMultipleOrForLiteralFields() {
calciteAssert()
.query("SELECT stringValue "
+ "FROM geode.allDataTypesRegion WHERE (stringValue = 'abc' OR stringValue = 'def') OR "
+ "(floatValue = 1.5678 OR floatValue = null) OR "
+ "(booleanValue = true OR booleanValue = false OR booleanValue = null)")
.returnsCount(3)
.queryContains(
GeodeAssertions.query("SELECT stringValue AS stringValue "
+ "FROM /allDataTypesRegion WHERE "
+ "stringValue IN SET('abc', 'def') OR floatValue = 1.5678 "
+ "OR booleanValue = true OR booleanValue = false"));
}
@Test void testSqlSingleDateWhereFilter() {
calciteAssert()
.query("SELECT dateValue\n"
+ "FROM geode.allDataTypesRegion\n"
+ "WHERE dateValue = DATE '2018-02-03'")
.returnsCount(1)
.queryContains(
GeodeAssertions.query("SELECT dateValue AS dateValue "
+ "FROM /allDataTypesRegion "
+ "WHERE dateValue = DATE '2018-02-03'"));
calciteAssert()
.query("SELECT dateValue\n"
+ "FROM geode.allDataTypesRegion\n"
+ "WHERE dateValue > DATE '2018-02-03'")
.returnsCount(2)
.queryContains(
GeodeAssertions.query("SELECT dateValue AS dateValue "
+ "FROM /allDataTypesRegion "
+ "WHERE dateValue > DATE '2018-02-03'"));
calciteAssert()
.query("SELECT dateValue\n"
+ "FROM geode.allDataTypesRegion\n"
+ "WHERE dateValue < DATE '2018-02-03'")
.returnsCount(0)
.queryContains(
GeodeAssertions.query("SELECT dateValue AS dateValue "
+ "FROM /allDataTypesRegion "
+ "WHERE dateValue < DATE '2018-02-03'"));
}
@Test void testSqlMultipleDateWhereFilter() {
calciteAssert()
.query("SELECT dateValue\n"
+ "FROM geode.allDataTypesRegion\n"
+ "WHERE dateValue = DATE '2018-02-03'\n"
+ " OR dateValue = DATE '2018-02-04'")
.returnsCount(2)
.queryContains(
GeodeAssertions.query("SELECT dateValue AS dateValue "
+ "FROM /allDataTypesRegion "
+ "WHERE dateValue IN SET(DATE '2018-02-03',"
+ " DATE '2018-02-04')"));
}
@Test void testSqlSingleTimeWhereFilter() {
calciteAssert()
.query("SELECT timeValue\n"
+ "FROM geode.allDataTypesRegion\n"
+ "WHERE timeValue = TIME '02:22:23'")
.returnsCount(1)
.queryContains(
GeodeAssertions.query("SELECT timeValue AS timeValue "
+ "FROM /allDataTypesRegion "
+ "WHERE timeValue = TIME '02:22:23'"));
calciteAssert()
.query("SELECT timeValue\n"
+ "FROM geode.allDataTypesRegion\n"
+ "WHERE timeValue > TIME '02:22:23'")
.returnsCount(2)
.queryContains(
GeodeAssertions.query("SELECT timeValue AS timeValue "
+ "FROM /allDataTypesRegion "
+ "WHERE timeValue > TIME '02:22:23'"));
calciteAssert()
.query("SELECT timeValue\n"
+ "FROM geode.allDataTypesRegion\n"
+ "WHERE timeValue < TIME '02:22:23'")
.returnsCount(0)
.queryContains(
GeodeAssertions.query("SELECT timeValue AS timeValue "
+ "FROM /allDataTypesRegion "
+ "WHERE timeValue < TIME '02:22:23'"));
}
@Test void testSqlMultipleTimeWhereFilter() {
calciteAssert()
.query("SELECT timeValue\n"
+ "FROM geode.allDataTypesRegion\n"
+ "WHERE timeValue = TIME '02:22:23'\n"
+ " OR timeValue = TIME '03:22:23'")
.returnsCount(2)
.queryContains(
GeodeAssertions.query("SELECT timeValue AS timeValue "
+ "FROM /allDataTypesRegion "
+ "WHERE timeValue IN SET(TIME '02:22:23', TIME '03:22:23')"));
}
@Test void testSqlSingleTimestampWhereFilter() {
calciteAssert()
.query("SELECT timestampValue\n"
+ "FROM geode.allDataTypesRegion\n"
+ "WHERE timestampValue = TIMESTAMP '2018-02-03 02:22:33'")
.returnsCount(1)
.queryContains(
GeodeAssertions.query("SELECT timestampValue AS timestampValue "
+ "FROM /allDataTypesRegion "
+ "WHERE timestampValue = TIMESTAMP '2018-02-03 02:22:33'"));
calciteAssert()
.query("SELECT timestampValue\n"
+ "FROM geode.allDataTypesRegion\n"
+ "WHERE timestampValue > TIMESTAMP '2018-02-03 02:22:33'")
.returnsCount(2)
.queryContains(
GeodeAssertions.query("SELECT timestampValue AS timestampValue "
+ "FROM /allDataTypesRegion "
+ "WHERE timestampValue > TIMESTAMP '2018-02-03 02:22:33'"));
calciteAssert()
.query("SELECT timestampValue\n"
+ "FROM geode.allDataTypesRegion\n"
+ "WHERE timestampValue < TIMESTAMP '2018-02-03 02:22:33'")
.returnsCount(0)
.queryContains(
GeodeAssertions.query("SELECT timestampValue AS timestampValue "
+ "FROM /allDataTypesRegion "
+ "WHERE timestampValue < TIMESTAMP '2018-02-03 02:22:33'"));
}
@Test void testSqlMultipleTimestampWhereFilter() {
calciteAssert()
.query("SELECT timestampValue\n"
+ "FROM geode.allDataTypesRegion\n"
+ "WHERE timestampValue = TIMESTAMP '2018-02-03 02:22:33'\n"
+ " OR timestampValue = TIMESTAMP '2018-02-05 04:22:33'")
.returnsCount(2)
.queryContains(
GeodeAssertions.query("SELECT timestampValue AS timestampValue "
+ "FROM /allDataTypesRegion "
+ "WHERE timestampValue IN SET("
+ "TIMESTAMP '2018-02-03 02:22:33', "
+ "TIMESTAMP '2018-02-05 04:22:33')"));
}
@Test void testSqlWhereWithMultipleOrForAllFields() {
calciteAssert()
.query("SELECT stringValue "
+ "FROM geode.allDataTypesRegion WHERE (stringValue = 'abc' OR stringValue = 'def') OR "
+ "(floatValue = 1.5678 OR floatValue = null) OR "
+ "(dateValue = DATE '2018-02-05' OR dateValue = DATE '2018-02-06' ) OR "
+ "(timeValue = TIME '03:22:23' OR timeValue = TIME '07:22:23') OR "
+ "(timestampValue = TIMESTAMP '2018-02-05 04:22:33' OR "
+ "timestampValue = TIMESTAMP '2017-02-05 04:22:33') OR "
+ "(booleanValue = true OR booleanValue = false OR booleanValue = null)")
.returnsCount(3)
.queryContains(
GeodeAssertions.query("SELECT stringValue AS stringValue "
+ "FROM /allDataTypesRegion WHERE "
+ "stringValue IN SET('abc', 'def') OR floatValue = 1.5678 OR dateValue "
+ "IN SET(DATE '2018-02-05', DATE '2018-02-06') OR timeValue "
+ "IN SET(TIME '03:22:23', TIME '07:22:23') OR timestampValue "
+ "IN SET(TIMESTAMP '2018-02-05 04:22:33', TIMESTAMP '2017-02-05 04:22:33') "
+ "OR booleanValue = true OR booleanValue = false"));
}
}
| |
/*******************************************************************************
*
* Pentaho Big Data
*
* Copyright (C) 2002-2021 by Hitachi Vantara : http://www.pentaho.com
*
*******************************************************************************
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
******************************************************************************/
package org.pentaho.big.data.impl.shim;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import com.pentaho.big.data.bundles.impl.shim.hbase.ByteConversionUtilImpl;
import com.pentaho.big.data.bundles.impl.shim.hbase.HBaseConnectionImpl;
import com.pentaho.big.data.bundles.impl.shim.hbase.ResultFactoryImpl;
import com.pentaho.big.data.bundles.impl.shim.hbase.mapping.ColumnFilterFactoryImpl;
import com.pentaho.big.data.bundles.impl.shim.hbase.mapping.MappingFactoryImpl;
import com.pentaho.big.data.bundles.impl.shim.hbase.meta.HBaseValueMetaInterfaceFactoryImpl;
import com.pentaho.big.data.bundles.impl.shim.hdfs.HadoopFileSystemImpl;
import org.apache.avro.Conversion;
import org.apache.avro.mapred.AvroWrapper;
import org.apache.commons.io.FileUtils;
import org.apache.commons.vfs2.FileObject;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.LocalFileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.oozie.client.OozieClient;
import org.apache.oozie.client.OozieClientException;
import org.apache.pig.ExecType;
import org.apache.pig.PigServer;
import org.apache.pig.backend.hadoop.datastorage.ConfigurationUtil;
import org.apache.pig.impl.PigContext;
import org.apache.pig.impl.util.PropertiesUtil;
import org.apache.pig.tools.grunt.GruntParser;
import org.apache.pig.tools.parameters.ParameterSubstitutionPreprocessor;
import org.apache.sqoop.Sqoop;
import org.osgi.framework.Bundle;
import org.osgi.framework.BundleContext;
import org.osgi.framework.Filter;
import org.osgi.framework.ServiceReference;
import org.osgi.framework.wiring.BundleWire;
import org.osgi.framework.wiring.BundleWiring;
import org.pentaho.big.data.impl.shim.oozie.OozieJobInfoDelegate;
import org.pentaho.big.data.impl.shim.oozie.OozieJobInfoImpl;
import org.pentaho.big.data.impl.shim.pig.PigResultImpl;
import org.pentaho.big.data.impl.shim.pig.WriterAppenderManager;
import org.pentaho.di.core.Const;
import org.pentaho.di.core.logging.LogChannelInterface;
import org.pentaho.di.core.logging.LogLevel;
import org.pentaho.di.core.variables.VariableSpace;
import org.pentaho.hadoop.shim.api.HadoopClientServices;
import org.pentaho.hadoop.shim.api.HadoopClientServicesException;
import org.pentaho.hadoop.shim.api.cluster.NamedCluster;
import org.pentaho.hadoop.shim.api.hbase.ByteConversionUtil;
import org.pentaho.hadoop.shim.api.hbase.HBaseConnection;
import org.pentaho.hadoop.shim.api.hbase.ResultFactory;
import org.pentaho.hadoop.shim.api.hdfs.HadoopFileSystem;
import org.pentaho.hadoop.shim.api.internal.Configuration;
import org.pentaho.hadoop.shim.api.internal.hbase.HBaseBytesUtilShim;
import org.pentaho.hadoop.shim.api.oozie.OozieJobInfo;
import org.pentaho.hadoop.shim.api.pig.PigResult;
import org.pentaho.hadoop.shim.common.ShimUtils;
import org.pentaho.hadoop.shim.spi.HadoopShim;
import org.pentaho.hbase.shim.common.CommonHBaseBytesUtil;
import org.slf4j.LoggerFactory;
import java.io.BufferedReader;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.StringReader;
import java.io.StringWriter;
import java.io.Reader;
import java.lang.reflect.Constructor;
import java.lang.reflect.Method;
import java.net.MalformedURLException;
import java.net.URI;
import java.net.URL;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Enumeration;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.ListIterator;
import java.util.Map;
import java.util.Properties;
import java.util.Set;
import java.util.zip.ZipEntry;
import java.util.zip.ZipFile;
import static org.apache.oozie.client.OozieClient.APP_PATH;
import static org.apache.oozie.client.OozieClient.BUNDLE_APP_PATH;
import static org.apache.oozie.client.OozieClient.COORDINATOR_APP_PATH;
public class HadoopClientServicesImpl implements HadoopClientServices {
private static final String[] EMPTY_STRING_ARRAY = new String[ 0 ];
private static final org.slf4j.Logger LOGGER = LoggerFactory.getLogger( HadoopClientServicesImpl.class );
public static final String SQOOP_THROW_ON_ERROR = "sqoop.throwOnError";
private static final String ALT_CLASSPATH = "hadoop.alt.classpath";
private static final String TMPJARS = "tmpjars";
protected NamedCluster namedCluster;
protected final OozieClient oozieClient;
protected final HadoopShim hadoopShim;
private BundleContext bundleContext;
private List<String> sqoopBundleFileLocations = new ArrayList<>();
private final WriterAppenderManager.Factory writerAppenderManagerFactory;
protected final HBaseBytesUtilShim bytesUtil;
private enum ExternalPigJars {
PIG( "pig" ),
AUTOMATON( "automaton" ),
ANTLR( "antlr-runtime" ),
GUAVA( "guava" ),
JACKSON_CORE( "jackson-core-asl" ),
JACKSON_MAPPER( "jackson-mapper-asl" ),
JODATIME( "joda-time" );
private final String jarName;
ExternalPigJars( String jarName ) {
this.jarName = jarName;
}
public String getJarName() {
return jarName;
}
}
public HadoopClientServicesImpl( NamedCluster namedCluster, HadoopShim hadoopShim, BundleContext bundleContext ) {
this.bundleContext = bundleContext;
this.hadoopShim = hadoopShim;
this.namedCluster = namedCluster;
this.oozieClient = namedCluster.getOozieUrl() != null ? new OozieClient( namedCluster.getOozieUrl() ) : null;
this.writerAppenderManagerFactory = new WriterAppenderManager.Factory();
this.bytesUtil = new CommonHBaseBytesUtil();
}
public String getOozieProtocolUrl() throws HadoopClientServicesException {
try {
return oozieClient.getProtocolUrl();
} catch ( OozieClientException e ) {
throw new HadoopClientServicesException( e, e.getErrorCode() );
}
}
public OozieJobInfo runOozie( Properties props ) throws HadoopClientServicesException {
try {
String jobId = oozieClient.run( props );
return new OozieJobInfoDelegate( new OozieJobInfoImpl( jobId, oozieClient ) );
} catch ( OozieClientException e ) {
throw new HadoopClientServicesException( e, e.getErrorCode() );
}
}
public void validateOozieWSVersion() throws HadoopClientServicesException {
try {
oozieClient.validateWSVersion();
} catch ( OozieClientException e ) {
throw new HadoopClientServicesException( e, e.getErrorCode() );
}
}
public boolean hasOozieAppPath( Properties props ) {
return props.containsKey( APP_PATH )
|| props.containsKey( COORDINATOR_APP_PATH )
|| props.containsKey( BUNDLE_APP_PATH );
}
public int runSqoop( List<String> argsList, Properties properties ) {
Configuration configuration = hadoopShim.createConfiguration( namedCluster );
for ( Map.Entry<String, String> entry : Maps.fromProperties( properties ).entrySet() ) {
configuration.set( entry.getKey(), entry.getValue() );
}
try {
// Make sure Sqoop throws exceptions instead of returning a status of 1
System.setProperty( SQOOP_THROW_ON_ERROR, Boolean.toString( true ) );
configureShim( configuration );
String[] args = argsList.toArray( new String[ argsList.size() ] );
Configuration c = configuration;
ClassLoader cl = Thread.currentThread().getContextClassLoader();
Thread.currentThread().setContextClassLoader( getClass().getClassLoader() );
String tmpPropertyHolder = System.getProperty( ALT_CLASSPATH );
try {
loadBundleFilesLocations();
System.setProperty( ALT_CLASSPATH, createHadoopAltClasspath() );
c.set( TMPJARS, getSqoopJarLocation( c ) );
if ( args.length > 0
&& ( Arrays.asList( args ).contains( "--as-avrodatafile" )
|| Arrays.asList( args ).contains( "--export-dir" ) ) ) { // BACKLOG-32217: Avro libs needed for export
addDependencyJars( c, Conversion.class, AvroWrapper.class );
}
if ( args.length > 0 && Arrays.asList( args ).contains( "--hbase-table" ) ) {
Filter serviceFilter = bundleContext.createFilter( "(shim=" + namedCluster.getShimIdentifier() + ")" );
ServiceReference serviceReference =
(ServiceReference) bundleContext.getServiceReferences( HadoopShim.class, serviceFilter.toString() )
.toArray()[ 0 ];
Object service = bundleContext.getService( serviceReference );
Class[] depClasses = (Class[]) service.getClass().getMethod( "getHbaseDependencyClasses" ).invoke( service );
addDependencyJars( c, depClasses );
}
return Sqoop.runTool( args, ShimUtils.asConfiguration( c ) );
} catch ( IOException e ) {
e.printStackTrace();
return -1;
} finally {
Thread.currentThread().setContextClassLoader( cl );
if ( tmpPropertyHolder == null ) {
System.clearProperty( ALT_CLASSPATH );
} else {
System.setProperty( ALT_CLASSPATH, tmpPropertyHolder );
}
}
} catch ( Exception e ) {
LOGGER.error( "Error executing sqoop", e );
return 1;
}
}
private void configureShim( Configuration conf ) throws Exception {
List<String> messages = Lists.newArrayList();
hadoopShim.configureConnectionInformation(
namedCluster.environmentSubstitute( namedCluster.getHdfsHost() ),
namedCluster.environmentSubstitute( namedCluster.getHdfsPort() ),
namedCluster.environmentSubstitute( namedCluster.getJobTrackerHost() ),
namedCluster.environmentSubstitute( namedCluster.getJobTrackerPort() ), conf, messages );
for ( String m : messages ) {
LOGGER.info( m );
}
}
private String getSqoopJarLocation( Configuration c ) {
StringBuilder sb = new StringBuilder();
for ( String bundleFileLocation : sqoopBundleFileLocations ) {
File filesInsideBundle = new File( bundleFileLocation );
Iterator<File> filesIterator = FileUtils.iterateFiles( filesInsideBundle, new String[] { "jar" }, true );
while ( filesIterator.hasNext() ) {
File file = filesIterator.next();
String name = file.getName();
if ( name.startsWith( "sqoop" ) ) {
sb.append( file.getAbsolutePath() );
}
}
}
try {
FileSystem fs = FileSystem.getLocal( ShimUtils.asConfiguration( c ) );
return new Path( sb.toString() ).makeQualified( fs ).toString();
} catch ( IOException e ) {
e.printStackTrace();
}
return sb.toString();
}
private void addDependencyJars( Configuration conf, Class... classes )
throws IOException {
List<String> classNames = new ArrayList<>();
for ( Class clazz : classes ) {
classNames.add( clazz.getCanonicalName().replace( ".", "/" ) + ".class" );
}
Set<String> tmpjars = new HashSet<>();
if ( conf.get( TMPJARS ) != null ) {
tmpjars.addAll( Arrays.asList( conf.get( TMPJARS ).split( "," ) ) );
}
for ( String bundleFileLocation : sqoopBundleFileLocations ) {
File filesInsideBundle = new File( bundleFileLocation );
Iterator<File> filesIterator = FileUtils.iterateFiles( filesInsideBundle, new String[] { "jar" }, true );
getOut:
while ( filesIterator.hasNext() ) {
File file = filesIterator.next();
// Process the jar file.
try ( ZipFile zip = new ZipFile( file ) ) {
// Loop through the jar entries and print the name of each one.
for ( Enumeration list = zip.entries(); list.hasMoreElements(); ) {
ZipEntry entry = (ZipEntry) list.nextElement();
if ( !entry.isDirectory() && entry.getName().endsWith( ".class" ) ) {
ListIterator<String> classNameIterator = classNames.listIterator();
while ( classNameIterator.hasNext() ) {
if ( entry.getName().endsWith( classNameIterator.next() ) ) {
// If here we found a class in this jar, add the jar to the list, and delete the class from
// classNames.
tmpjars.add( file.toURI().toURL().toString() );
classNameIterator.remove();
if ( classNames.size() == 0 ) {
break getOut;
}
}
}
}
}
}
}
}
StringBuilder sb = new StringBuilder();
if ( tmpjars.size() > 0 ) {
for ( String jarPath : tmpjars ) {
sb.append( "," ).append( jarPath );
}
conf.set( TMPJARS, sb.toString().substring( 1 ) );
}
}
private void loadBundleFilesLocations() {
sqoopBundleFileLocations.clear();
String bundleLocation = bundleContext.getBundle().getDataFile( "" ).getParent();
sqoopBundleFileLocations.add( bundleLocation );
BundleWiring wiring = bundleContext.getBundle().adapt( BundleWiring.class );
List<BundleWire> fragments = wiring.getProvidedWires( "osgi.wiring.host" );
for ( BundleWire fragment : fragments ) {
Bundle fragmentBundle = fragment.getRequirerWiring().getBundle();
String fragmentBundleLocation = fragmentBundle.getDataFile( "" ).getParent();
sqoopBundleFileLocations.add( fragmentBundleLocation );
}
}
private String createHadoopAltClasspath() {
StringBuilder sb = new StringBuilder();
for ( String bundleFileLocation : sqoopBundleFileLocations ) {
File filesInsideBundle = new File( bundleFileLocation );
Iterator<File> filesIterator = FileUtils.iterateFiles( filesInsideBundle, new String[] { "jar" }, true );
while ( filesIterator.hasNext() ) {
File file = filesIterator.next();
String name = file.getName();
if ( name.startsWith( "hadoop-common" )
|| name.startsWith( "hadoop-mapreduce-client-core" )
|| name.startsWith( "hadoop-core" )
|| name.startsWith( "sqoop" ) ) {
sb.append( file.getAbsolutePath() );
sb.append( File.pathSeparator );
}
}
}
return sb.toString();
}
public PigResult runPig( String scriptPath, PigExecutionMode executionMode, List<String> parameters, String name,
LogChannelInterface logChannelInterface, VariableSpace variableSpace,
LogLevel logLevel ) {
FileObject appenderFile = null;
try ( WriterAppenderManager appenderManager = writerAppenderManagerFactory.create( logChannelInterface, logLevel,
name ) ) {
appenderFile = appenderManager.getFile();
Configuration configuration = hadoopShim.createConfiguration( namedCluster );
if ( executionMode != PigExecutionMode.LOCAL ) {
List<String> configMessages = new ArrayList<String>();
hadoopShim.configureConnectionInformation( variableSpace.environmentSubstitute( namedCluster.getHdfsHost() ),
variableSpace.environmentSubstitute( namedCluster.getHdfsPort() ),
variableSpace.environmentSubstitute( namedCluster.getJobTrackerHost() ),
variableSpace.environmentSubstitute( namedCluster.getJobTrackerPort() ), configuration,
configMessages );
if ( logChannelInterface != null ) {
for ( String configMessage : configMessages ) {
logChannelInterface.logBasic( configMessage );
}
}
}
URL scriptU;
String scriptFileS = scriptPath;
scriptFileS = variableSpace.environmentSubstitute( scriptFileS );
if ( scriptFileS.indexOf( "://" ) == -1 ) {
File scriptFile = new File( scriptFileS );
scriptU = scriptFile.toURI().toURL();
} else {
scriptU = new URL( scriptFileS );
}
String pigScript = substitutePigScriptParameters( scriptU, parameters );
Properties properties = new Properties();
updatePigConfiguration( properties, executionMode == PigExecutionMode.LOCAL ? null : configuration );
return new PigResultImpl( appenderFile, executePigScript( pigScript,
executionMode == PigExecutionMode.LOCAL ? PigExecutionMode.LOCAL : PigExecutionMode.MAPREDUCE, properties ),
null );
} catch ( Exception e ) {
return new PigResultImpl( appenderFile, null, e );
}
}
private void updatePigConfiguration( Properties properties, Configuration configuration ) {
PropertiesUtil.loadDefaultProperties( properties );
if ( configuration != null ) {
properties.putAll( ConfigurationUtil.toProperties( ShimUtils.asConfiguration( configuration ) ) );
properties.setProperty( "pig.use.overriden.hadoop.configs", "true" );
}
}
private void addExternalJarsToPigContext( PigContext pigContext ) throws MalformedURLException {
File filesInsideBundle = new File( bundleContext.getBundle().getDataFile( "" ).getParent() );
Iterator<File> filesIterator = FileUtils.iterateFiles( filesInsideBundle, new String[] { "jar" }, true );
while ( filesIterator.hasNext() ) {
File file = filesIterator.next();
addMatchedJarToPigContext( pigContext, file );
}
}
private void addMatchedJarToPigContext( PigContext pigContext, File jarFile ) throws MalformedURLException {
String jarName = jarFile.getName();
for ( ExternalPigJars externalPigJars : ExternalPigJars.values() ) {
if ( jarName.startsWith( externalPigJars.getJarName() ) ) {
String jarPath = jarFile.getAbsolutePath();
pigContext.addJar( jarPath );
break;
}
}
}
private int[] executePigScript( String pigScript, PigExecutionMode mode, Properties properties )
throws IOException, org.apache.pig.tools.pigscript.parser.ParseException {
GruntParser grunt = null;
PigContext pigContext = new PigContext( getExecType( mode ), properties );
addExternalJarsToPigContext( pigContext );
PigServer pigServer = new PigServer( pigContext );
try {
Constructor constructor = GruntParser.class.getConstructor( Reader.class, PigServer.class );
grunt = (GruntParser) constructor.newInstance( new StringReader( pigScript ), pigServer );
} catch ( Exception e ) {
try {
Constructor constructor = GruntParser.class.getConstructor( Reader.class );
grunt = (GruntParser) constructor.newInstance( new StringReader( pigScript ) );
Method method = grunt.getClass().getMethod( "setParams", new Class[] { PigServer.class } );
method.invoke( grunt, pigServer );
} catch ( Exception e1 ) {
throw new org.apache.pig.tools.pigscript.parser.ParseException(
"Error constructing Grunt Parser in " + getClass().getName() );
}
}
if ( grunt == null ) {
throw new org.apache.pig.tools.pigscript.parser.ParseException(
"Grunt Parser is null in " + getClass().getName() );
}
grunt.setInteractive( false );
int[] retValues = grunt.parseStopOnError( false );
return retValues;
}
protected ExecType getExecType( PigExecutionMode mode ) {
switch ( mode ) {
case LOCAL:
return ExecType.LOCAL;
case MAPREDUCE:
return ExecType.MAPREDUCE;
default:
throw new IllegalStateException( "unknown execution mode: " + mode );
}
}
private String substitutePigScriptParameters( URL pigScript, List<String> paramList ) throws Exception {
final InputStream inStream = pigScript.openStream();
StringWriter writer = new StringWriter();
// do parameter substitution
ParameterSubstitutionPreprocessor psp = new ParameterSubstitutionPreprocessor( 50 );
psp.genSubstitutedFile( new BufferedReader( new InputStreamReader( inStream ) ),
writer,
paramList.size() > 0 ? paramList.toArray( EMPTY_STRING_ARRAY ) : null, null );
return writer.toString();
}
public NamedCluster getNamedCluster() {
return namedCluster;
}
public HadoopFileSystem getFileSystem( NamedCluster namedCluster, URI uri ) throws IOException {
final Configuration configuration = hadoopShim.createConfiguration( namedCluster );
FileSystem fileSystem = (FileSystem) hadoopShim.getFileSystem( configuration ).getDelegate();
if ( fileSystem instanceof LocalFileSystem ) {
LOGGER.error( "Got a local filesystem, was expecting an hdfs connection" );
throw new IOException( "Got a local filesystem, was expecting an hdfs connection" );
}
final URI finalUri = fileSystem.getUri() != null ? fileSystem.getUri() : uri;
HadoopFileSystem hadoopFileSystem = new HadoopFileSystemImpl( () -> {
try {
return finalUri != null
? (FileSystem) hadoopShim.getFileSystem( finalUri, configuration, (NamedCluster) namedCluster ).getDelegate()
: (FileSystem) hadoopShim.getFileSystem( configuration ).getDelegate();
} catch ( IOException | InterruptedException e ) {
LOGGER.debug( "Error looking up/creating the file system ", e );
return null;
}
} );
( (HadoopFileSystemImpl) hadoopFileSystem ).setNamedCluster( namedCluster );
return hadoopFileSystem;
}
protected HBaseConnectionImpl getConnectionImpl( Properties connProps, LogChannelInterface logChannelInterface )
throws IOException {
return new HBaseConnectionImpl( null, bytesUtil, connProps, logChannelInterface, namedCluster );
}
public HBaseConnection getHBaseConnection( VariableSpace variableSpace, String siteConfig, String defaultConfig,
LogChannelInterface logChannelInterface ) throws IOException {
Properties connProps = new Properties();
String zooKeeperHost = null;
String zooKeeperPort = null;
if ( namedCluster != null ) {
zooKeeperHost = variableSpace.environmentSubstitute( namedCluster.getZooKeeperHost() );
zooKeeperPort = variableSpace.environmentSubstitute( namedCluster.getZooKeeperPort() );
}
if ( !Const.isEmpty( zooKeeperHost ) ) {
connProps.setProperty( org.pentaho.hadoop.shim.spi.HBaseConnection.ZOOKEEPER_QUORUM_KEY, zooKeeperHost );
}
if ( !Const.isEmpty( zooKeeperPort ) ) {
connProps.setProperty( org.pentaho.hadoop.shim.spi.HBaseConnection.ZOOKEEPER_PORT_KEY, zooKeeperPort );
}
if ( !Const.isEmpty( siteConfig ) ) {
connProps.setProperty( org.pentaho.hadoop.shim.spi.HBaseConnection.SITE_KEY, siteConfig );
}
if ( !Const.isEmpty( defaultConfig ) ) {
connProps.setProperty( org.pentaho.hadoop.shim.spi.HBaseConnection.DEFAULTS_KEY, defaultConfig );
}
connProps.setProperty( "named.cluster", namedCluster.getName() );
return getConnectionImpl( connProps, logChannelInterface );
}
public ColumnFilterFactoryImpl getHBaseColumnFilterFactory() {
return new ColumnFilterFactoryImpl();
}
public MappingFactoryImpl getHBaseMappingFactory() {
return new MappingFactoryImpl( bytesUtil, getHBaseValueMetaInterfaceFactory() );
}
public HBaseValueMetaInterfaceFactoryImpl getHBaseValueMetaInterfaceFactory() {
return new HBaseValueMetaInterfaceFactoryImpl( bytesUtil );
}
public ByteConversionUtil getHBaseByteConversionUtil() {
return (ByteConversionUtil) new ByteConversionUtilImpl( bytesUtil );
}
public ResultFactory getHBaseResultFactory() {
return new ResultFactoryImpl( bytesUtil );
}
}
| |
package com.erc.dal;
import android.database.Cursor;
import android.database.sqlite.SQLiteDatabase;
import com.erc.dal.upgrade.DBConfig;
import java.lang.reflect.Type;
import java.util.ArrayList;
import java.util.Date;
/**
* Created by einar on 10/17/2016.
*/
class DBOperations {
SQLiteDatabase db;
private static DBOperations dbOperations;
private DBOperations() {
}
public static DBOperations getInstance() {
if (dbOperations == null) {
dbOperations = new DBOperations();
}
return dbOperations;
}
public synchronized Entity save(Entity entity, DBConfig dbConfig) {
String sql = "";
Pair pair = QueryBuilder.getPrimaryKey(entity);
if (pair != null) {
Entity entityToUpdate = getById(entity.getClass(), Long.parseLong(pair.getValue()), dbConfig);
if (entityToUpdate == null) {
if (pair.getValue().toString().isEmpty() || pair.getValue().toString().equals("0")) {
QueryBuilder.setID(entity, this, dbConfig);
}
sql = QueryBuilder.getQueryInsert(entity);
} else {
sql = QueryBuilder.getQueryUpdate(entity);
}
execSQL(sql, dbConfig);
if (db != null && db.isOpen()) {
db.close();
}
return entity;
} else {
return null;
}
}
public synchronized <T> T getById(Class classType, Object id, DBConfig dbConfig) {
T entity = null;
String sql = QueryBuilder.getQuery(classType, id);
if (!Util.isNullOrEmpty(sql)) {
Cursor cursor = rawQuery(sql, dbConfig);
if (cursor != null && cursor.moveToNext()) {
try {
entity = (T) ReflectionHelper.getInstance(classType, new Object[]{}, new Class[]{});
ArrayList<java.lang.reflect.Field> fields = ReflectionHelper.getFields(entity);
fillFields(fields, cursor, entity);
} catch (Exception e) {
Log.e("Fail to fill getById", e);
}
}
if (db != null && db.isOpen()) {
db.close();
}
return entity;
} else {
return null;
}
}
public synchronized <T> ArrayList<T> getAll(Class classType, DBConfig dbConfig, Options... options) {
ArrayList<T> entities = new ArrayList<>();
Options options_ = options.length == 0 ? new Options() : options[0];
String selectAll = QueryBuilder.getAllQuery(classType);
selectAll = options_.getSql(classType, selectAll) + Constant.SEMICOLON;
Cursor cursor = rawQuery(selectAll, dbConfig);
while (cursor != null && cursor.moveToNext()) {
try {
Object entity = ReflectionHelper.getInstance(classType, new Object[]{}, new Class[]{});
ArrayList<java.lang.reflect.Field> fields = ReflectionHelper.getFields(entity);
fillFields(fields, cursor, entity);
entities.add((T) entity);
} catch (Exception e) {
Log.e("Fail to fill getAll", e);
}
}
if (db != null && db.isOpen()) {
db.close();
}
return entities;
}
public synchronized <T> T calculate(Class classType, Aggregation aggregationOperator, DBConfig dbConfig, Options... options) {
T res = null;
try {
if (aggregationOperator != null) {
Options options_ = options.length == 0 ? new Options() : options[0];
String selectAll = QueryBuilder.getAllQuery(classType);
selectAll = options_.getSql(classType, selectAll, aggregationOperator) + Constant.SEMICOLON;
Cursor cursor = rawQuery(selectAll, dbConfig);
if (cursor != null && cursor.moveToNext()) {
if (cursor.getType(0) == Cursor.FIELD_TYPE_FLOAT) {
res = (T) new Float(cursor.getFloat(0));
}
if (cursor.getType(0) == Cursor.FIELD_TYPE_INTEGER) {
res = (T) new Long(cursor.getLong(0));
}
}
if (db != null && db.isOpen()) {
db.close();
}
} else {
Log.w("null aggregation Operator on Entity.Calculate");
}
} catch (Exception e) {
Log.e("fail to calculate:" + aggregationOperator.getOperator(), e);
}
return res;
}
public synchronized boolean remove(Class classType, Object id, DBConfig dbConfig) {
String sql = QueryBuilder.getQueryRemove(classType, id);
if (!Util.isNullOrEmpty(sql)) {
boolean success = execSQL(sql, dbConfig);
if (db != null && db.isOpen()) {
db.close();
}
return success;
}
return false;
}
public Cursor rawQuery(String sql, DBConfig dbConfig) {
db = SQLiteDatabaseManager.openReadOnly(dbConfig);
Cursor cursor = null;
try {
cursor = db.rawQuery(sql, null);
} catch (Exception e) {
Log.e("Failed to execute raw SQL", e);
} finally {
}
return cursor;
}
public boolean execSQL(String sql, DBConfig dbConfig) {
db = SQLiteDatabaseManager.open(dbConfig);
boolean res = false;
try {
db.execSQL(sql);
//Log.i(sql);
res = true;
} catch (Exception e) {
Log.e("Failed to execute SQL", e);
} finally {
}
return res;
}
private static void fillFields(ArrayList<java.lang.reflect.Field> fields, Cursor cursor, Object entity) throws IllegalAccessException {
Type type;
String currentField = "null";
try {
for (java.lang.reflect.Field field : fields) {
Object value = null;
type = field.getType();
currentField = field.getName();
do {
if (ReflectionHelper.getDataBaseNameOfField(field).equals("rowid")) {
value = -1;
break;
}
if (type.equals(String.class)) {
value = cursor.getString(cursor.getColumnIndex(ReflectionHelper.getDataBaseNameOfField(field)));
break;
}
if (type.equals(char.class) || type.equals(Character.class)) {
String charText = cursor.getString(cursor.getColumnIndex(ReflectionHelper.getDataBaseNameOfField(field)));
value = Util.isNullOrEmpty(charText) ? '\0' : charText.toCharArray()[0];
break;
}
if (type.equals(Date.class)) {
String dateIso = cursor.getString(cursor.getColumnIndex(ReflectionHelper.getDataBaseNameOfField(field)));
value = HelperDate.getDateFromFormat(dateIso, HelperDate.ISO_FORMAT);
break;
}
if (type.equals(short.class) || type.equals(Short.class)) {
value = cursor.getShort(cursor.getColumnIndex(ReflectionHelper.getDataBaseNameOfField(field)));
break;
}
if (type.equals(int.class) || type.equals(Integer.class)) {
value = cursor.getInt(cursor.getColumnIndex(ReflectionHelper.getDataBaseNameOfField(field)));
break;
}
if (type.equals(long.class) || type.equals(Long.class)) {
value = cursor.getLong(cursor.getColumnIndex(ReflectionHelper.getDataBaseNameOfField(field)));
break;
}
if (type.equals(boolean.class) || type.equals(Boolean.class)) {
value = cursor.getInt(cursor.getColumnIndex(ReflectionHelper.getDataBaseNameOfField(field))) == 1;
break;
}
if (type.equals(double.class) || type.equals(Double.class)) {
value = cursor.getDouble(cursor.getColumnIndex(ReflectionHelper.getDataBaseNameOfField(field)));
break;
}
if (type.equals(float.class) || type.equals(Float.class)) {
value = cursor.getFloat(cursor.getColumnIndex(ReflectionHelper.getDataBaseNameOfField(field)));
break;
}
break;
} while (true);
field.set(entity, value);
}
} catch (Exception e) {
Log.e("Failed to fill Field: \"" + currentField + "\" in " + entity.getClass().getName(), e);
}
}
}
| |
/*
* //******************************************************************
* //
* // Copyright 2016 Samsung Electronics All Rights Reserved.
* //
* //-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=
* //
* // Licensed under the Apache License, Version 2.0 (the "License");
* // you may not use this file except in compliance with the License.
* // You may obtain a copy of the License at
* //
* // http://www.apache.org/licenses/LICENSE-2.0
* //
* // Unless required by applicable law or agreed to in writing, software
* // distributed under the License is distributed on an "AS IS" BASIS,
* // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* // See the License for the specific language governing permissions and
* // limitations under the License.
* //
* //-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=
*/
package org.iotivity.cloud.base.protocols.coap;
import org.iotivity.cloud.util.Log;
import io.netty.channel.ChannelDuplexHandler;
import io.netty.channel.ChannelHandler.Sharable;
import io.netty.channel.ChannelHandlerContext;
import io.netty.channel.ChannelPromise;
/**
*
* This class provides a set of APIs to print out logs for CoAP request and
* response.
*
*/
@Sharable
public class CoapLogHandler extends ChannelDuplexHandler {
static final int MAX_LOGLEN = 100;
@Override
public void channelActive(ChannelHandlerContext ctx) throws Exception {
Log.v(ctx.channel().id().asLongText().substring(26)
+ " Connected, Address: "
+ ctx.channel().remoteAddress().toString());
ctx.fireChannelActive();
}
@Override
public void channelInactive(ChannelHandlerContext ctx) throws Exception {
ctx.fireChannelInactive();
Log.v(ctx.channel().id().asLongText().substring(26)
+ " Disconnected, Address: "
+ ctx.channel().remoteAddress().toString());
}
@Override
public void write(ChannelHandlerContext ctx, Object msg,
ChannelPromise promise) {
String log = null;
if (msg instanceof CoapRequest) {
log = composeCoapRequest(
ctx.channel().id().asLongText().substring(26),
(CoapRequest) msg);
} else {
log = composeCoapResponse(
ctx.channel().id().asLongText().substring(26),
(CoapResponse) msg);
}
Log.v(log);
ctx.writeAndFlush(msg);
}
@Override
public void channelRead(ChannelHandlerContext ctx, Object msg)
throws Exception {
String log = null;
if (msg instanceof CoapRequest) {
log = composeCoapRequest(
ctx.channel().id().asLongText().substring(26),
(CoapRequest) msg);
} else {
log = composeCoapResponse(
ctx.channel().id().asLongText().substring(26),
(CoapResponse) msg);
}
Log.v(log);
ctx.fireChannelRead(msg);
}
private String composeCoapRequest(String channelId, CoapRequest request) {
StringBuilder strBuilder = new StringBuilder();
strBuilder.append(channelId);
strBuilder.append(" " + request.getTokenString());
switch (request.getMethod()) {
case DELETE:
strBuilder.append(" DELETE ");
break;
case GET:
switch (request.getObserve()) {
case SUBSCRIBE:
strBuilder.append(" GET OBSERVE ");
break;
case UNSUBSCRIBE:
strBuilder.append(" GET OBSERVE CANCEL ");
break;
default:
strBuilder.append(" GET ");
break;
}
break;
case POST:
strBuilder.append(" POST ");
break;
case PUT:
strBuilder.append(" PUT ");
break;
}
strBuilder.append(request.getUriPath());
String query = request.getUriQuery();
if (query != null) {
strBuilder.append("/?" + query);
}
if (request.getPayloadSize() > 0) {
strBuilder.append(" SZ:" + request.getPayloadSize() + " P:"
+ new String(request.getPayload(), 0,
request.getPayloadSize() > MAX_LOGLEN ? MAX_LOGLEN
: request.getPayloadSize()));
}
return strBuilder.toString();
}
private String composeCoapResponse(String channelId,
CoapResponse response) {
StringBuilder strBuilder = new StringBuilder();
strBuilder.append(channelId);
strBuilder.append(" " + response.getTokenString());
switch (response.getStatus()) {
case BAD_GATEWAY:
strBuilder.append(" 5.02 Bad Gateway");
break;
case BAD_OPTION:
strBuilder.append(" 4.02 Bad Option");
break;
case BAD_REQUEST:
strBuilder.append(" 4.00 Bad Request");
break;
case CHANGED:
strBuilder.append(" 2.04 Changed");
break;
case CONTENT:
strBuilder.append(" 2.05 Content");
break;
case CREATED:
strBuilder.append(" 2.01 Created");
break;
case DELETED:
strBuilder.append(" 2.02 Deleted");
break;
case FORBIDDEN:
strBuilder.append(" 4.03 Forbidden");
break;
case GATEWAY_TIMEOUT:
strBuilder.append(" 5.04 Gateway Timeout");
break;
case INTERNAL_SERVER_ERROR:
strBuilder.append(" 5.00 Internal Server Error");
break;
case METHOD_NOT_ALLOWED:
strBuilder.append(" 4.05 Method Not Allowed");
break;
case NOT_ACCEPTABLE:
strBuilder.append(" 4.06 Not Acceptable");
break;
case NOT_FOUND:
strBuilder.append(" 4.04 Not Found");
break;
case NOT_IMPLEMENTED:
strBuilder.append(" 5.01 Not Implemented");
break;
case PRECONDITION_FAILED:
strBuilder.append(" 4.12 Precondition Failed");
break;
case PROXY_NOT_SUPPORTED:
strBuilder.append(" 5.05 Proxying Not Supported");
break;
case REQUEST_ENTITY_TOO_LARGE:
strBuilder.append(" 4.13 Request Entity Too Large");
break;
case SERVICE_UNAVAILABLE:
strBuilder.append(" 5.03 Service Unavailable");
break;
case UNAUTHORIZED:
strBuilder.append(" 4.01 Unauthorized");
break;
case UNSUPPORTED_CONTENT_FORMAT:
strBuilder.append(" 4.15 Unsupported Content-Format");
break;
case VALID:
strBuilder.append(" 2.03 Valid");
break;
default:
break;
}
switch (response.getObserve()) {
case SUBSCRIBE:
strBuilder.append(" OBSERVE");
break;
case UNSUBSCRIBE:
strBuilder.append(" OBSERVE CANCEL");
break;
case SEQUENCE_NUMBER:
strBuilder.append(" OBSERVE SEQ:");
strBuilder.append(response.getSequenceNumber());
break;
default:
break;
}
if (response.getPayloadSize() > 0) {
strBuilder.append(" SZ:" + response.getPayloadSize() + " P:"
+ new String(response.getPayload(), 0,
response.getPayloadSize() > MAX_LOGLEN ? MAX_LOGLEN
: response.getPayloadSize()));
}
return strBuilder.toString();
}
}
| |
/**
* Copyright (c) 2016 Couchbase, Inc. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
* except in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the
* License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.couchbase.lite;
import com.couchbase.lite.internal.InterfaceAudience;
import com.couchbase.lite.util.Log;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.Future;
/**
* Represents a query of a CouchbaseLite 'view', or of a view-like resource like _all_documents.
*/
public class Query {
/**
* Determines whether or when the view index is updated. By default, the index will be updated
* if necessary before the query runs -- this guarantees up-to-date results but can cause a delay.
*/
public enum IndexUpdateMode {
BEFORE, // Always update index if needed before querying (default)
NEVER, // Don't update the index; results may be out of date
AFTER // Update index _after_ querying (results may still be out of date)
}
/**
* Changes the behavior of a query created by queryAllDocuments.
*/
public enum AllDocsMode {
ALL_DOCS, // (the default), the query simply returns all non-deleted documents.
INCLUDE_DELETED, // in this mode it also returns deleted documents.
SHOW_CONFLICTS, // the .conflictingRevisions property of each row will return the conflicting revisions, if any, of that document.
ONLY_CONFLICTS, // _only_ documents in conflict will be returned. (This mode is especially useful for use with a CBLLiveQuery, so you can be notified of conflicts as they happen, i.e. when they're pulled in by a replication.)
BY_SEQUENCE // Order by sequence number (i.e. chronologically)
}
/**
* The database that contains this view.
*/
private Database database;
/**
* The view object associated with this query
*/
private View view; // null for _all_docs query
/**
* Is this query based on a temporary view?
*/
private boolean temporaryView;
/**
* The number of initial rows to skip. Default value is 0.
* Should only be used with small values. For efficient paging, use startKey and limit.
*/
private int skip;
/**
* The maximum number of rows to return. Default value is 0, meaning 'unlimited'.
*/
private int limit = Integer.MAX_VALUE;
/**
* If non-nil, the key value to start at.
*/
private Object startKey;
/**
* If non-nil, the key value to end after.
*/
private Object endKey;
/**
* If non-nil, the document ID to start at.
* (Useful if the view contains multiple identical keys, making .startKey ambiguous.)
*/
private String startKeyDocId;
/**
* If non-nil, the document ID to end at.
* (Useful if the view contains multiple identical keys, making .endKey ambiguous.)
*/
private String endKeyDocId;
/**
* If YES (the default) the startKey (or startKeyDocID) comparison uses ">=". Else it uses ">".
*/
private boolean inclusiveStart;
/**
* If YES (the default) the endKey (or endKeyDocID) comparison uses "<=". Else it uses "<".
*/
private boolean inclusiveEnd;
/**
* If set, the view will not be updated for this query, even if the database has changed.
* This allows faster results at the expense of returning possibly out-of-date data.
*/
private IndexUpdateMode indexUpdateMode;
/**
* Changes the behavior of a query created by -queryAllDocuments.
* <p/>
* - In mode kCBLAllDocs (the default), the query simply returns all non-deleted documents.
* - In mode kCBLIncludeDeleted, it also returns deleted documents.
* - In mode kCBLShowConflicts, the .conflictingRevisions property of each row will return the
* conflicting revisions, if any, of that document.
* - In mode kCBLOnlyConflicts, _only_ documents in conflict will be returned.
* (This mode is especially useful for use with a CBLLiveQuery, so you can be notified of
* conflicts as they happen, i.e. when they're pulled in by a replication.)
*/
private AllDocsMode allDocsMode;
/**
* Should the rows be returned in descending key order? Default value is NO.
*/
private boolean descending;
/**
* If set to YES, the results will include the entire document contents of the associated rows.
* These can be accessed via QueryRow's -documentProperties property.
* This slows down the query, but can be a good optimization if you know you'll need the entire
* contents of each document. (This property is equivalent to "include_docs" in the CouchDB API.)
*/
private boolean prefetch;
/**
* If set to YES, disables use of the reduce function.
* (Equivalent to setting "?reduce=false" in the REST API.)
*/
private boolean mapOnly;
/**
* If set to YES, queries created by -createAllDocumentsQuery will include deleted documents.
* This property has no effect in other types of queries.
*/
private boolean includeDeleted;
/**
* If non-nil, the query will fetch only the rows with the given keys.
*/
private List<Object> keys;
/**
* If non-zero, enables grouping of results, in views that have reduce functions.
*/
private int groupLevel;
/**
* If non-zero, enables prefix matching of string or array keys.
* <p/>
* A value of 1 treats the endKey itself as a prefix: if it's a string, keys in the index that
* come after the endKey, but begin with the same prefix, will be matched. (For example, if the
* endKey is "foo" then the key "foolish" in the index will be matched, but not "fong".)
* Or if the endKey is an array, any array beginning with those elements will be matched.
* (For example, if the endKey is [1], then [1, "x"] will match, but not [2].)
* If the key is any other type, there is no effect.
* <p/>
* A value of 2 assumes the endKey is an array and treats its final item as a prefix, using the
* rules above. (For example, an endKey of [1, "x"] will match [1, "xtc"] but not [1, "y"].)
* <p/>
* A value of 3 assumes the key is an array of arrays, etc.
*/
private int prefixMatchLevel;
/**
* An optional predicate that filters the resulting query rows.
* If present, it's called on every row returned from the index, and if it returns false the
* row is skipped.
*/
private Predicate<QueryRow> postFilter;
private long lastSequence;
/**
* Constructor
*/
@InterfaceAudience.Private
/* package */ Query(Database database, View view) {
this.database = database;
this.view = view;
limit = Integer.MAX_VALUE;
inclusiveStart = true;
inclusiveEnd = true;
mapOnly = (view != null && view.getReduce() == null);
indexUpdateMode = IndexUpdateMode.BEFORE;
allDocsMode = AllDocsMode.ALL_DOCS;
}
/**
* Constructor
*/
@InterfaceAudience.Private
/* package */ Query(Database database, Mapper mapFunction) {
this(database, database.makeAnonymousView());
temporaryView = true;
inclusiveStart = true;
inclusiveEnd = true;
view.setMap(mapFunction, "");
}
/**
* Constructor
*/
@InterfaceAudience.Private
/* package */ Query(Database database, Query query) {
this(database, query.getView());
limit = query.limit;
skip = query.skip;
startKey = query.startKey;
endKey = query.endKey;
descending = query.descending;
prefetch = query.prefetch;
keys = query.keys;
groupLevel = query.groupLevel;
prefixMatchLevel = query.prefixMatchLevel;
mapOnly = query.mapOnly;
startKeyDocId = query.startKeyDocId;
endKeyDocId = query.endKeyDocId;
indexUpdateMode = query.indexUpdateMode;
allDocsMode = query.allDocsMode;
inclusiveStart = query.inclusiveStart;
inclusiveEnd = query.inclusiveEnd;
postFilter = query.postFilter;
}
/**
* The database this query is associated with
*/
@InterfaceAudience.Public
public Database getDatabase() {
return database;
}
@InterfaceAudience.Public
public int getLimit() {
return limit;
}
@InterfaceAudience.Public
public void setLimit(int limit) {
this.limit = limit;
}
@InterfaceAudience.Public
public int getSkip() {
return skip;
}
@InterfaceAudience.Public
public void setSkip(int skip) {
this.skip = skip;
}
@InterfaceAudience.Public
public boolean isDescending() {
return descending;
}
@InterfaceAudience.Public
public void setDescending(boolean descending) {
this.descending = descending;
}
@InterfaceAudience.Public
public Object getStartKey() {
return startKey;
}
@InterfaceAudience.Public
public void setStartKey(Object startKey) {
this.startKey = startKey;
}
@InterfaceAudience.Public
public Object getEndKey() {
return endKey;
}
@InterfaceAudience.Public
public void setEndKey(Object endKey) {
this.endKey = endKey;
}
@InterfaceAudience.Public
public String getStartKeyDocId() {
return startKeyDocId;
}
@InterfaceAudience.Public
public void setStartKeyDocId(String startKeyDocId) {
this.startKeyDocId = startKeyDocId;
}
@InterfaceAudience.Public
public String getEndKeyDocId() {
return endKeyDocId;
}
@InterfaceAudience.Public
public void setEndKeyDocId(String endKeyDocId) {
this.endKeyDocId = endKeyDocId;
}
@InterfaceAudience.Public
public IndexUpdateMode getIndexUpdateMode() {
return indexUpdateMode;
}
@InterfaceAudience.Public
public void setIndexUpdateMode(IndexUpdateMode indexUpdateMode) {
this.indexUpdateMode = indexUpdateMode;
}
@InterfaceAudience.Public
public AllDocsMode getAllDocsMode() {
return allDocsMode;
}
@InterfaceAudience.Public
public void setAllDocsMode(AllDocsMode allDocsMode) {
this.allDocsMode = allDocsMode;
}
@InterfaceAudience.Public
public List<Object> getKeys() {
return keys;
}
@InterfaceAudience.Public
public void setKeys(List<Object> keys) {
this.keys = keys;
}
@InterfaceAudience.Public
public boolean isMapOnly() {
return mapOnly;
}
@InterfaceAudience.Public
public void setMapOnly(boolean mapOnly) {
this.mapOnly = mapOnly;
}
@InterfaceAudience.Public
public int getGroupLevel() {
return groupLevel;
}
@InterfaceAudience.Public
public void setGroupLevel(int groupLevel) {
this.groupLevel = groupLevel;
}
@InterfaceAudience.Public
public int getPrefixMatchLevel() {
return prefixMatchLevel;
}
@InterfaceAudience.Public
public void setPrefixMatchLevel(int prefixMatchLevel) {
this.prefixMatchLevel = prefixMatchLevel;
}
@InterfaceAudience.Public
public boolean isInclusiveStart() {
return inclusiveStart;
}
@InterfaceAudience.Public
public void setInclusiveStart(boolean inclusiveStart) {
this.inclusiveStart = inclusiveStart;
}
@InterfaceAudience.Public
public boolean isInclusiveEnd() {
return inclusiveEnd;
}
@InterfaceAudience.Public
public void setInclusiveEnd(boolean inclusiveEnd) {
this.inclusiveEnd = inclusiveEnd;
}
@InterfaceAudience.Public
public Predicate<QueryRow> getPostFilter() {
return postFilter;
}
@InterfaceAudience.Public
public void setPostFilter(final Predicate<QueryRow> pf) {
this.postFilter = new Predicate<QueryRow>() {
@Override
public boolean apply(QueryRow type) {
type.setDatabase(database);
return pf.apply(type);
}
};
//this.postFilter = pf;
}
@InterfaceAudience.Public
public boolean shouldPrefetch() {
return prefetch;
}
@InterfaceAudience.Public
public void setPrefetch(boolean prefetch) {
this.prefetch = prefetch;
}
@InterfaceAudience.Public
public boolean shouldIncludeDeleted() {
return allDocsMode == AllDocsMode.INCLUDE_DELETED;
}
@InterfaceAudience.Public
public void setIncludeDeleted(boolean includeDeletedParam) {
allDocsMode = (includeDeletedParam == true) ? AllDocsMode.INCLUDE_DELETED : AllDocsMode.ALL_DOCS;
}
/**
* Sends the query to the server and returns an enumerator over the result rows (Synchronous).
* If the query fails, this method returns nil and sets the query's .error property.
*/
@InterfaceAudience.Public
public QueryEnumerator run() throws CouchbaseLiteException {
List<Long> outSequence = new ArrayList<Long>();
String viewName = (view != null) ? view.getName() : null;
List<QueryRow> rows = database.queryViewNamed(viewName, getQueryOptions(), outSequence);
lastSequence = outSequence.get(0);
return new QueryEnumerator(database, rows, lastSequence);
}
/**
* Returns a live query with the same parameters.
*/
@InterfaceAudience.Public
public LiveQuery toLiveQuery() {
return new LiveQuery(this);
}
/**
* Starts an asynchronous query. Returns immediately, then calls the onLiveQueryChanged block when the
* query completes, passing it the row enumerator. If the query fails, the block will receive
* a non-nil enumerator but its .error property will be set to a value reflecting the error.
* The originating Query's .error property will NOT change.
*/
@InterfaceAudience.Public
public Future runAsync(final QueryCompleteListener onComplete) {
return runAsyncInternal(onComplete);
}
/**
* A delegate that can be called to signal the completion of a Query.
*/
@InterfaceAudience.Public
public interface QueryCompleteListener {
void completed(QueryEnumerator rows, Throwable error);
}
/**
* @exclude
*/
@InterfaceAudience.Private
Future runAsyncInternal(final QueryCompleteListener onComplete) {
return database.getManager().runAsync(new Runnable() {
@Override
public void run() {
try {
if (!getDatabase().isOpen()) {
throw new IllegalStateException("The database has been closed.");
}
String viewName = (view != null) ? view.getName() : null;
QueryOptions options = getQueryOptions();
List<Long> outSequence = new ArrayList<Long>();
List<QueryRow> rows = database.queryViewNamed(viewName, options, outSequence);
long sequenceNumber = outSequence.get(0);
QueryEnumerator enumerator = new QueryEnumerator(database, rows, sequenceNumber);
onComplete.completed(enumerator, null);
} catch (Throwable t) {
Log.e(Log.TAG_QUERY, "Exception caught in runAsyncInternal", t);
onComplete.completed(null, t);
}
}
});
}
/**
* @exclude
*/
@InterfaceAudience.Private
public View getView() {
return view;
}
@InterfaceAudience.Private
private QueryOptions getQueryOptions() {
QueryOptions queryOptions = new QueryOptions();
queryOptions.setStartKey(getStartKey());
queryOptions.setEndKey(getEndKey());
queryOptions.setStartKey(getStartKey());
queryOptions.setKeys(getKeys());
queryOptions.setSkip(getSkip());
queryOptions.setLimit(getLimit());
queryOptions.setReduce(!isMapOnly());
queryOptions.setReduceSpecified(true);
queryOptions.setGroupLevel(getGroupLevel());
queryOptions.setPrefixMatchLevel(getPrefixMatchLevel());
queryOptions.setDescending(isDescending());
queryOptions.setIncludeDocs(shouldPrefetch());
queryOptions.setUpdateSeq(true);
queryOptions.setInclusiveStart(inclusiveStart);
queryOptions.setInclusiveEnd(inclusiveEnd);
queryOptions.setStale(getIndexUpdateMode());
queryOptions.setAllDocsMode(getAllDocsMode());
queryOptions.setStartKeyDocId(getStartKeyDocId());
queryOptions.setEndKeyDocId(getEndKeyDocId());
queryOptions.setPostFilter(getPostFilter());
return queryOptions;
}
@Override
@InterfaceAudience.Private
protected void finalize() throws Throwable {
super.finalize();
if (temporaryView) {
view.delete();
}
}
}
| |
package za.org.grassroot.webapp.model.rest;
import com.fasterxml.jackson.annotation.JsonInclude;
import com.google.common.collect.Sets;
import lombok.extern.slf4j.Slf4j;
import za.org.grassroot.core.domain.Notification;
import za.org.grassroot.core.domain.notification.EventNotification;
import za.org.grassroot.core.domain.notification.TodoNotification;
import za.org.grassroot.core.domain.task.Event;
import za.org.grassroot.core.domain.task.Task;
import za.org.grassroot.core.domain.task.Todo;
import za.org.grassroot.core.enums.EventLogType;
import za.org.grassroot.core.enums.NotificationDetailedType;
import za.org.grassroot.core.enums.TaskType;
import za.org.grassroot.core.util.DateTimeUtil;
import java.time.Instant;
import java.time.format.DateTimeFormatter;
import java.util.Collections;
import java.util.Set;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
/**
* Created by paballo on 2016/04/13.
*/
@Slf4j
@JsonInclude(JsonInclude.Include.NON_NULL)
public class NotificationDTO {
private final String uid; // i.e., uid of the notification itself
private final String notificationType;
private final boolean delivered;
private final boolean read;
private final boolean viewedAndroid;
private final String groupUid;
private final String title;
private final String imageUrl;
private final String defaultImage;
private String entityUid; // i.e., uid of the event/etc that the logbook was attached to
private String message;
private String createdDatetime;
private String deadlineDateTime;
private String entityType;
private String changeType;
// should start including join request notifications & rsvp totals at some point
private static final Set<NotificationDetailedType> notificationsForAndroidList =
Collections.unmodifiableSet(Sets.newHashSet(
NotificationDetailedType.TODO_INFO,
NotificationDetailedType.TODO_REMINDER,
NotificationDetailedType.EVENT_INFO,
NotificationDetailedType.EVENT_CHANGED,
NotificationDetailedType.EVENT_CANCELLED,
NotificationDetailedType.EVENT_REMINDER,
NotificationDetailedType.VOTE_RESULTS,
NotificationDetailedType.MEETING_RSVP_TOTALS));
private final static Pattern dialMatcher = Pattern.compile("([\\.,]\\s[Dd].+\\*134\\*1994#.+)");
public static boolean isNotificationOfTypeForDTO(Notification notification) {
return notificationsForAndroidList.contains(notification.getNotificationDetailedType());
}
public static NotificationDTO convertToDto(Notification notification) {
if (notification instanceof EventNotification) {
Event event = ((EventNotification) notification).getEvent();
return new NotificationDTO(notification, event);
} else if(notification instanceof TodoNotification){
Todo todo = ((TodoNotification) notification).getTodo();
return new NotificationDTO(notification,todo);
} else {
throw new IllegalArgumentException("Error! Notification DTO called on unsupported notification type");
}
}
private NotificationDTO(Notification notification, Task task) {
this.uid = notification.getUid();
this.createdDatetime = convertInstantToStringISO(notification.getCreatedDateTime());
this.delivered = notification.isDelivered();
this.read = notification.isRead();
this.viewedAndroid = notification.isViewedOnAndroid();
this.notificationType = notification.getNotificationDetailedType().toString();
this.title = task.getAncestorGroup().getGroupName();
this.groupUid = task.getAncestorGroup().getUid();
this.imageUrl = task.getAncestorGroup().getImageUrl();
this.defaultImage = task.getAncestorGroup().getDefaultImage().toString();
}
private NotificationDTO(Notification notification, Event event) {
this(notification, (Task) event);
this.entityUid = event.getUid();
this.deadlineDateTime = convertInstantToStringISO(event.getDeadlineTime());
this.message = stripDialSuffix(stripTitleFromMessage(title, notification.getMessage()));
this.entityType = event.getEventType().toString();
this.changeType = notification.getEventLog() == null ? EventLogType.CREATED.toString() :
notification.getEventLog().getEventLogType().toString();
}
private NotificationDTO(Notification notification, Todo todo){
this(notification, (Task) todo);
this.entityUid = todo.getUid();
this.deadlineDateTime = convertInstantToStringISO(todo.getDeadlineTime());
final String originalMessage = (notification.getMessage() != null) ? notification.getMessage() : notification.getTodoLog().getMessage();
this.message = stripDialSuffix(stripTitleFromMessage(title, originalMessage));
this.entityType = TaskType.TODO.toString();
this.changeType = notification.getTodoLog().getType().toString();
}
private String convertInstantToStringISO(Instant instant) {
return DateTimeUtil.convertToUserTimeZone(instant, DateTimeUtil.getSAST()).format(DateTimeFormatter.ISO_DATE_TIME);
}
private String stripTitleFromMessage(final String title, final String message) {
if (!message.contains(title)) {
return message;
} else {
final Pattern groupNamePatter = Pattern.compile("^" + title + "\\s?:\\s+?");
final Matcher m = groupNamePatter.matcher(message);
if (m.find()) {
return message.substring(m.end());
} else {
return message;
}
}
}
private String stripDialSuffix(final String message) {
final Matcher m = dialMatcher.matcher(message);
if (m.find()) {
return message.substring(0, m.start());
} else {
return message;
}
}
public String getEntityType() {
return entityType;
}
public String getUid() {
return uid;
}
public String getEntityUid() {
return entityUid;
}
public String getTitle() {
return title;
}
public String getMessage() {
return message;
}
public String getCreatedDatetime() {
return createdDatetime;
}
public void setCreatedDatetime(String createdDatetime) {
this.createdDatetime = createdDatetime;
}
public void setDeadlineDateTime(String deadlineDateTime) {
this.deadlineDateTime = deadlineDateTime;
}
public String getNotificationType() {
return notificationType;
}
public boolean isDelivered() {
return delivered;
}
public boolean isRead() {
return read;
}
public boolean isViewedAndroid() { return viewedAndroid; }
public String getGroupUid() {
return groupUid;
}
public String getDeadlineDateTime() {
return deadlineDateTime;
}
public String getChangeType() { return changeType; }
public String getImageUrl() {
return imageUrl;
}
public String getDefaultImage() {
return defaultImage;
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.metron.rest.controller;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.ImmutableList;
import org.apache.metron.common.configuration.SensorParserConfig;
import org.apache.metron.integration.utils.TestUtils;
import org.apache.metron.rest.MetronRestConstants;
import org.apache.metron.rest.model.TopologyStatusCode;
import org.apache.metron.rest.service.GlobalConfigService;
import org.apache.metron.rest.service.SensorParserConfigService;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.ExtendWith;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.test.context.SpringBootTest;
import org.springframework.core.env.Environment;
import org.springframework.http.MediaType;
import org.springframework.test.context.ActiveProfiles;
import org.springframework.test.context.junit.jupiter.SpringExtension;
import org.springframework.test.web.servlet.MockMvc;
import org.springframework.test.web.servlet.ResultActions;
import org.springframework.test.web.servlet.setup.MockMvcBuilders;
import org.springframework.web.context.WebApplicationContext;
import java.util.HashMap;
import java.util.Map;
import static org.apache.metron.rest.MetronRestConstants.TEST_PROFILE;
import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.hasSize;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.springframework.security.test.web.servlet.request.SecurityMockMvcRequestPostProcessors.httpBasic;
import static org.springframework.security.test.web.servlet.setup.SecurityMockMvcConfigurers.springSecurity;
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.*;
@ExtendWith(SpringExtension.class)
@SpringBootTest(webEnvironment= SpringBootTest.WebEnvironment.RANDOM_PORT)
@ActiveProfiles(TEST_PROFILE)
public class StormControllerIntegrationTest {
@Autowired
private Environment environment;
@Autowired
private WebApplicationContext wac;
@Autowired
private GlobalConfigService globalConfigService;
@Autowired
private SensorParserConfigService sensorParserConfigService;
private MockMvc mockMvc;
private String stormUrl = "/api/v1/storm";
private String user = "user";
private String password = "password";
private String metronVersion;
@BeforeEach
public void setup() {
this.metronVersion = this.environment.getProperty("metron.version");
this.mockMvc = MockMvcBuilders.webAppContextSetup(this.wac).apply(springSecurity()).build();
}
@Test
public void testSecurity() throws Exception {
this.mockMvc.perform(get(stormUrl))
.andExpect(status().isUnauthorized());
this.mockMvc.perform(get(stormUrl + "/supervisors"))
.andExpect(status().isUnauthorized());
this.mockMvc.perform(get(stormUrl + "/broTest"))
.andExpect(status().isUnauthorized());
this.mockMvc.perform(get(stormUrl + "/parser/start/broTest"))
.andExpect(status().isUnauthorized());
this.mockMvc.perform(get(stormUrl + "/parser/stop/broTest"))
.andExpect(status().isUnauthorized());
this.mockMvc.perform(get(stormUrl + "/parser/activate/broTest"))
.andExpect(status().isUnauthorized());
this.mockMvc.perform(get(stormUrl + "/parser/deactivate/broTest"))
.andExpect(status().isUnauthorized());
this.mockMvc.perform(get("/enrichment"))
.andExpect(status().isUnauthorized());
this.mockMvc.perform(get(stormUrl + "/enrichment/start"))
.andExpect(status().isUnauthorized());
this.mockMvc.perform(get(stormUrl + "/enrichment/stop"))
.andExpect(status().isUnauthorized());
this.mockMvc.perform(get(stormUrl + "/enrichment/activate"))
.andExpect(status().isUnauthorized());
this.mockMvc.perform(get(stormUrl + "/enrichment/deactivate"))
.andExpect(status().isUnauthorized());
this.mockMvc.perform(get("/indexing"))
.andExpect(status().isUnauthorized());
this.mockMvc.perform(get(stormUrl + "/indexing/start"))
.andExpect(status().isUnauthorized());
this.mockMvc.perform(get(stormUrl + "/indexing/stop"))
.andExpect(status().isUnauthorized());
this.mockMvc.perform(get(stormUrl + "/indexing/activate"))
.andExpect(status().isUnauthorized());
this.mockMvc.perform(get(stormUrl + "/indexing/deactivate"))
.andExpect(status().isUnauthorized());
}
@Test
public void test() throws Exception {
this.mockMvc.perform(get(stormUrl).with(httpBasic(user,password)))
.andExpect(status().isOk())
.andExpect(jsonPath("$", hasSize(0)));
this.mockMvc.perform(get(stormUrl + "/broTest").with(httpBasic(user,password)))
.andExpect(status().isNotFound());
Map<String, Object> globalConfig = globalConfigService.get();
if (globalConfig == null) {
globalConfig = new HashMap<>();
}
globalConfigService.delete();
sensorParserConfigService.delete("broTest");
this.mockMvc.perform(get(stormUrl + "/parser/stop/broTest?stopNow=true").with(httpBasic(user,password)))
.andExpect(status().isOk())
.andExpect(jsonPath("$.status").value("ERROR"))
.andExpect(jsonPath("$.message").value(TopologyStatusCode.STOP_ERROR.toString()));
this.mockMvc.perform(get(stormUrl + "/parser/activate/broTest").with(httpBasic(user,password)))
.andExpect(status().isOk())
.andExpect(jsonPath("$.status").value("ERROR"))
.andExpect(jsonPath("$.message").value(TopologyStatusCode.TOPOLOGY_NOT_FOUND.name()));
this.mockMvc.perform(get(stormUrl + "/parser/deactivate/broTest").with(httpBasic(user,password)))
.andExpect(status().isOk())
.andExpect(jsonPath("$.status").value("ERROR"))
.andExpect(jsonPath("$.message").value(TopologyStatusCode.TOPOLOGY_NOT_FOUND.name()));
this.mockMvc.perform(get(stormUrl + "/parser/start/broTest").with(httpBasic(user,password)))
.andExpect(status().isOk())
.andExpect(jsonPath("$.status").value("ERROR"))
.andExpect(jsonPath("$.message").value(TopologyStatusCode.GLOBAL_CONFIG_MISSING.name()));
globalConfigService.save(globalConfig);
{
final Map<String, Object> expectedGlobalConfig = globalConfig;
//we must wait for the config to find its way into the config.
TestUtils.assertEventually(() -> assertEquals(expectedGlobalConfig, globalConfigService.get()));
}
this.mockMvc.perform(get(stormUrl + "/parser/start/broTest").with(httpBasic(user,password)))
.andExpect(status().isOk())
.andExpect(jsonPath("$.status").value("ERROR"))
.andExpect(jsonPath("$.message").value(TopologyStatusCode.SENSOR_PARSER_CONFIG_MISSING.name()));
SensorParserConfig sensorParserConfig = new SensorParserConfig();
sensorParserConfig.setParserClassName("org.apache.metron.parsers.bro.BasicBroParser");
sensorParserConfig.setSensorTopic("broTest");
sensorParserConfigService.save("broTest", sensorParserConfig);
{
final SensorParserConfig expectedSensorParserConfig = sensorParserConfig;
//we must wait for the config to find its way into the config.
TestUtils.assertEventually(() -> assertEquals(expectedSensorParserConfig, sensorParserConfigService.findOne("broTest")));
}
this.mockMvc.perform(get(stormUrl + "/parser/start/broTest").with(httpBasic(user,password)))
.andExpect(status().isOk())
.andExpect(jsonPath("$.status").value("SUCCESS"))
.andExpect(jsonPath("$.message").value(TopologyStatusCode.STARTED.name()));
this.mockMvc.perform(get(stormUrl + "/supervisors").with(httpBasic(user,password)))
.andExpect(status().isOk())
.andExpect(content().contentType(MediaType.parseMediaType("application/json;charset=UTF-8")))
.andExpect(jsonPath("$.supervisors[0]").exists())
.andExpect(jsonPath("$.supervisors[0].id").exists())
.andExpect(jsonPath("$.supervisors[0].host").exists())
.andExpect(jsonPath("$.supervisors[0].uptime").exists())
.andExpect(jsonPath("$.supervisors[0].slotsTotal").exists())
.andExpect(jsonPath("$.supervisors[0].slotsUsed").exists());
this.mockMvc.perform(get(stormUrl + "/broTest").with(httpBasic(user,password)))
.andExpect(status().isOk())
.andExpect(content().contentType(MediaType.parseMediaType("application/json;charset=UTF-8")))
.andExpect(jsonPath("$.name").value("broTest"))
.andExpect(jsonPath("$.id", containsString("broTest")))
.andExpect(jsonPath("$.status").value("ACTIVE"))
.andExpect(jsonPath("$.latency").exists())
.andExpect(jsonPath("$.throughput").exists())
.andExpect(jsonPath("$.emitted").exists())
.andExpect(jsonPath("$.acked").exists());
this.mockMvc.perform(get(stormUrl).with(httpBasic(user,password)))
.andExpect(status().isOk())
.andExpect(content().contentType(MediaType.parseMediaType("application/json;charset=UTF-8")))
.andExpect(jsonPath("$[?(@.name == 'broTest' && @.status == 'ACTIVE')]").exists());
this.mockMvc.perform(get(stormUrl + "/parser/stop/broTest?stopNow=true").with(httpBasic(user,password)))
.andExpect(status().isOk())
.andExpect(jsonPath("$.status").value("SUCCESS"))
.andExpect(jsonPath("$.message").value(TopologyStatusCode.STOPPED.name()));
this.mockMvc.perform(get(stormUrl + "/enrichment").with(httpBasic(user,password)))
.andExpect(status().isNotFound());
this.mockMvc.perform(get(stormUrl + "/enrichment/activate").with(httpBasic(user,password)))
.andExpect(status().isOk())
.andExpect(jsonPath("$.status").value("ERROR"))
.andExpect(jsonPath("$.message").value(TopologyStatusCode.TOPOLOGY_NOT_FOUND.name()));
this.mockMvc.perform(get(stormUrl + "/enrichment/deactivate").with(httpBasic(user,password)))
.andExpect(status().isOk())
.andExpect(jsonPath("$.status").value("ERROR"))
.andExpect(jsonPath("$.message").value(TopologyStatusCode.TOPOLOGY_NOT_FOUND.name()));
this.mockMvc.perform(get(stormUrl + "/enrichment/stop?stopNow=true").with(httpBasic(user,password)))
.andExpect(status().isOk())
.andExpect(jsonPath("$.status").value("ERROR"))
.andExpect(jsonPath("$.message").value(TopologyStatusCode.STOP_ERROR.toString()));
this.mockMvc.perform(get(stormUrl + "/enrichment/start").with(httpBasic(user,password)))
.andExpect(status().isOk())
.andExpect(jsonPath("$.status").value("SUCCESS"))
.andExpect(jsonPath("$.message").value(TopologyStatusCode.STARTED.toString()));
this.mockMvc.perform(get(stormUrl + "/enrichment/deactivate").with(httpBasic(user,password)))
.andExpect(status().isOk())
.andExpect(jsonPath("$.status").value("SUCCESS"))
.andExpect(jsonPath("$.message").value(TopologyStatusCode.INACTIVE.name()));
this.mockMvc.perform(get(stormUrl + "/enrichment/deactivate").with(httpBasic(user,password)))
.andExpect(status().isOk())
.andExpect(jsonPath("$.status").value("SUCCESS"))
.andExpect(jsonPath("$.message").value(TopologyStatusCode.INACTIVE.name()));
this.mockMvc.perform(get(stormUrl + "/enrichment/activate").with(httpBasic(user,password)))
.andExpect(status().isOk())
.andExpect(jsonPath("$.status").value("SUCCESS"))
.andExpect(jsonPath("$.message").value(TopologyStatusCode.ACTIVE.name()));
this.mockMvc.perform(get(stormUrl + "/enrichment").with(httpBasic(user,password)))
.andExpect(status().isOk())
.andExpect(content().contentType(MediaType.parseMediaType("application/json;charset=UTF-8")))
.andExpect(jsonPath("$.name").value("enrichment"))
.andExpect(jsonPath("$.id", containsString("enrichment")))
.andExpect(jsonPath("$.status").value("ACTIVE"))
.andExpect(jsonPath("$.latency").exists())
.andExpect(jsonPath("$.throughput").exists())
.andExpect(jsonPath("$.emitted").exists())
.andExpect(jsonPath("$.acked").exists());
this.mockMvc.perform(get(stormUrl).with(httpBasic(user,password)))
.andExpect(status().isOk())
.andExpect(content().contentType(MediaType.parseMediaType("application/json;charset=UTF-8")))
.andExpect(jsonPath("$[?(@.name == 'enrichment' && @.status == 'ACTIVE')]").exists());
this.mockMvc.perform(get(stormUrl + "/enrichment/stop").with(httpBasic(user,password)))
.andExpect(status().isOk())
.andExpect(jsonPath("$.status").value("SUCCESS"))
.andExpect(jsonPath("$.message").value(TopologyStatusCode.STOPPED.name()));
for(String type : ImmutableList.of("randomaccess", "batch")) {
this.mockMvc.perform(get(stormUrl + "/indexing/" + type).with(httpBasic(user,password)))
.andExpect(status().isNotFound());
this.mockMvc.perform(get(stormUrl + "/indexing/" + type + "/activate").with(httpBasic(user, password)))
.andExpect(status().isOk())
.andExpect(jsonPath("$.status").value("ERROR"))
.andExpect(jsonPath("$.message").value(TopologyStatusCode.TOPOLOGY_NOT_FOUND.name()));
this.mockMvc.perform(get(stormUrl + "/indexing/" + type + "/deactivate").with(httpBasic(user, password)))
.andExpect(status().isOk())
.andExpect(jsonPath("$.status").value("ERROR"))
.andExpect(jsonPath("$.message").value(TopologyStatusCode.TOPOLOGY_NOT_FOUND.name()));
this.mockMvc.perform(get(stormUrl + "/indexing/" + type + "/stop?stopNow=true").with(httpBasic(user, password)))
.andExpect(status().isOk())
.andExpect(jsonPath("$.status").value("ERROR"))
.andExpect(jsonPath("$.message").value(TopologyStatusCode.STOP_ERROR.toString()));
this.mockMvc.perform(get(stormUrl + "/indexing/" + type + "/start").with(httpBasic(user, password)))
.andExpect(status().isOk())
.andExpect(jsonPath("$.status").value("SUCCESS"))
.andExpect(jsonPath("$.message").value(TopologyStatusCode.STARTED.toString()));
ResultActions actions = this.mockMvc.perform(get(stormUrl + "/indexing/" + type + "/deactivate").with(httpBasic(user, password)));
actions.andExpect(status().isOk())
.andExpect(jsonPath("$.status").value("SUCCESS"))
.andExpect(jsonPath("$.message").value(TopologyStatusCode.INACTIVE.name()));
this.mockMvc.perform(get(stormUrl + "/indexing/" + type + "/activate").with(httpBasic(user, password)))
.andExpect(status().isOk())
.andExpect(jsonPath("$.status").value("SUCCESS"))
.andExpect(jsonPath("$.message").value(TopologyStatusCode.ACTIVE.name()));
String topologyName = type.equals("randomaccess")? MetronRestConstants.RANDOM_ACCESS_INDEXING_TOPOLOGY_NAME:MetronRestConstants.BATCH_INDEXING_TOPOLOGY_NAME;
this.mockMvc.perform(get(stormUrl + "/indexing/" + type).with(httpBasic(user, password)))
.andExpect(status().isOk())
.andExpect(content().contentType(MediaType.parseMediaType("application/json;charset=UTF-8")))
.andExpect(jsonPath("$.name").value(topologyName))
.andExpect(jsonPath("$.id", containsString("indexing")))
.andExpect(jsonPath("$.status").value("ACTIVE"))
.andExpect(jsonPath("$.latency").exists())
.andExpect(jsonPath("$.throughput").exists())
.andExpect(jsonPath("$.emitted").exists())
.andExpect(jsonPath("$.acked").exists());
this.mockMvc.perform(get(stormUrl).with(httpBasic(user,password)))
.andExpect(status().isOk())
.andExpect(content().contentType(MediaType.parseMediaType("application/json;charset=UTF-8")))
.andExpect(jsonPath("$[?(@.name == '" + topologyName + "' && @.status == 'ACTIVE')]").exists());
this.mockMvc.perform(get(stormUrl + "/indexing/" + type + "/stop").with(httpBasic(user,password)))
.andExpect(status().isOk())
.andExpect(jsonPath("$.status").value("SUCCESS"))
.andExpect(jsonPath("$.message").value(TopologyStatusCode.STOPPED.name()));
}
this.mockMvc.perform(get(stormUrl + "/client/status").with(httpBasic(user,password)))
.andExpect(status().isOk())
.andExpect(jsonPath("$.stormClientVersionInstalled").value("1.0.1"))
.andExpect(jsonPath("$.parserScriptPath").value("/usr/metron/" + metronVersion + "/bin/start_parser_topology.sh"))
.andExpect(jsonPath("$.enrichmentScriptPath").value("/usr/metron/" + metronVersion + "/bin/start_enrichment_topology.sh"))
.andExpect(jsonPath("$.randomAccessIndexingScriptPath").value("/usr/metron/" + metronVersion + "/bin/start_elasticsearch_topology.sh"))
.andExpect(jsonPath("$.batchIndexingScriptPath").value("/usr/metron/" + metronVersion + "/bin/start_hdfs_topology.sh"));
globalConfigService.delete();
sensorParserConfigService.delete("broTest");
}
}
| |
package org.usfirst.frc.team910.robot;
import com.kauailabs.navx.frc.AHRS;
import edu.wpi.first.wpilibj.CANTalon;
import edu.wpi.first.wpilibj.Encoder;
import edu.wpi.first.wpilibj.Talon;
import edu.wpi.first.wpilibj.smartdashboard.SmartDashboard;
public class DriveTrain {
AHRS navX;
Talon lmTalon;
Talon rmTalon;
CANTalon LFmCANTalon; // LF
CANTalon LBmCANTalon;
CANTalon RFmCANTalon; // RF
CANTalon RBmCANTalon; //
Encoder lEncoder;
Encoder rEncoder;
double MAX_RAMP_RATE = 0.03; // one second 0 to 1 ramp
public DriveTrain(AHRS x) {
if (Robot.TEST) {
LFmCANTalon = new CANTalon(2);
LBmCANTalon = new CANTalon(3);
RFmCANTalon = new CANTalon(0);
RBmCANTalon = new CANTalon(1);
lEncoder = new Encoder(IO.LEFT_DRIVE_A_ENCODER, IO.LEFT_DRIVE_B_ENCODER, false);
rEncoder = new Encoder(IO.RIGHT_DRIVE_A_ENCODER, IO.RIGHT_DRIVE_B_ENCODER, false);
lEncoder.setDistancePerPulse(120.0 / 2244.0);
rEncoder.setDistancePerPulse(-120.0 / 1571.0);
} else {
lmTalon = new Talon(IO.LEFT_DRIVE_MOTOR);
rmTalon = new Talon(IO.RIGHT_DRIVE_MOTOR);
lEncoder = new Encoder(IO.LEFT_DRIVE_A_ENCODER, IO.LEFT_DRIVE_B_ENCODER, false);
rEncoder = new Encoder(IO.RIGHT_DRIVE_A_ENCODER, IO.RIGHT_DRIVE_B_ENCODER, false);
lEncoder.setDistancePerPulse(120.0 / 3600.0);
rEncoder.setDistancePerPulse(120.0 / 3600.0);
}
navX = x;
}
double prevL = 0;
double prevR = 0;
double prevT = 0;
public void tankDrive(double YAxisLeft, double YAxisRight) {
// controls tank drive motors, obtaining joystick inputs
if (Math.abs(YAxisLeft) > 1)
YAxisLeft = YAxisLeft / Math.abs(YAxisLeft);
if (Math.abs(YAxisRight) > 1)
YAxisRight = YAxisRight / Math.abs(YAxisRight);
prevL = YAxisLeft;
prevR = YAxisRight;
if (Robot.TEST) {
LFmCANTalon.set(-YAxisLeft * 1);
LBmCANTalon.set(-YAxisLeft * 1);
RFmCANTalon.set(YAxisRight * 1);
RBmCANTalon.set(YAxisRight * 1);
} else {
lmTalon.set(YAxisLeft * 1);
rmTalon.set(-YAxisRight * 1);
}
}
// when ljoystick trigger is pressed get the intial encoder value
// while held, contiually wheel position to intial value initiate
// dynamicBreaking
// if encoders change value, engage motor power to move back to held value
// when reach held value stop
// smart way, pid gain value to get to 0
double startEncL, startEncR;
public void dynamicBraking(boolean firstTime) {
/*
* code for dynamic braking The first time it is called, it obtains the
* distances of the encoders, then power to the motors is set
*/
if (firstTime) {
startEncL = lEncoder.getDistance();
startEncR = rEncoder.getDistance();
} else {
// set encoder
double lPwr = startEncL - lEncoder.getDistance();
double rPwr = startEncR - rEncoder.getDistance();
tankDrive(lPwr, rPwr);
}
}
double intlevalue;
double intrevalue;
public void driveStraight(double lpower, boolean firstTime) {
// driveStraight allows the robot to be driven straight by moving one
// joystick with the right trigger held
double levalue;
double revalue;
double currentdiff;
double intdiff;
double gooddiff;
double adj;
if (firstTime) {
intlevalue = lEncoder.getDistance();
intrevalue = rEncoder.getDistance();
} else {
intdiff = intlevalue - intrevalue;
// SmartDashboard.putNumber("init diff", intdiff);
levalue = lEncoder.getDistance();
revalue = rEncoder.getDistance();
currentdiff = levalue - revalue;
// SmartDashboard.putNumber("curr diff", currentdiff);
gooddiff = currentdiff - intdiff;
// SmartDashboard.putNumber("good diff", gooddiff);
adj = gooddiff * .25; // Drive Straight P value; applying full power
// after 4 inches. pretty agressive
double lnew = lpower - adj;
double rnew = lpower + adj;
tankDrive(lnew, rnew);
}
}
boolean previousDbrake = false;
boolean previousSdrive = false;
boolean previousCdrive = false;
public void run(double yAxisLeft, double yAxisRight, double pov, boolean sDrive, boolean dBrake,
boolean compassDrive, double rThrottle) {
SmartDashboard.putNumber("Compass Power", rThrottle);
// ramp rate limiting left side
double driveL;
double driveR;
if (yAxisLeft > 0) {// for positive powers
if (yAxisLeft > prevL + MAX_RAMP_RATE) {// if increasing power,
// slowly ramp
driveL = prevL + MAX_RAMP_RATE;
} else {// if decreasing power, just do it
driveL = yAxisLeft;
}
} else {// for negative powers
if (yAxisLeft < prevL - MAX_RAMP_RATE) {// if increasing negative
// power, slowly ramp
driveL = prevL - MAX_RAMP_RATE;
} else {// if decreasing power, just do it
driveL = yAxisLeft;
}
}
// ramp rate limiting right side
if (yAxisRight > 0) {
if (yAxisRight > prevR + MAX_RAMP_RATE) {
driveR = prevR + MAX_RAMP_RATE;
} else {
driveR = yAxisRight;
}
} else {
if (yAxisRight < prevR - MAX_RAMP_RATE) {
driveR = prevR - MAX_RAMP_RATE;
} else {
driveR = yAxisRight;
}
}
if (dBrake) {
// Dynamic Braking Function//
dynamicBraking(!previousDbrake);
previousDbrake = true;
previousSdrive = false;
previousCdrive = false;
prevT = 0;
}
else if (sDrive) {
// Straight Drive Function//
driveStraight(yAxisRight, !previousSdrive);
previousDbrake = false;
previousSdrive = true;
previousCdrive = false;
prevT = 0;
} else if (pov != -5000 && navX.isConnected()) {
rThrottle = (-rThrottle + 1) / 2;
double power;
if (rThrottle > prevT + MAX_RAMP_RATE) {// if increasing power,
// slowly ramp
power = prevT + MAX_RAMP_RATE;
} else {// if decreasing power, just do it
power = rThrottle;
}
// Compass Drive Function//
compassDrive(power, navX.getYaw(), !previousCdrive, pov);
prevT = power;
previousCdrive = true;
previousDbrake = false;
previousSdrive = false;
}
else {// just drive
tankDrive(driveL, driveR);
previousDbrake = false;
previousSdrive = false;
previousCdrive = false;
prevT = 0;
}
SmartDashboard.putNumber("L Encoder", lEncoder.getDistance());
SmartDashboard.putNumber("R Encoder", rEncoder.getDistance());
}
double cmpsPrevPower = 0;
public void compassDrive(double power, double currentYAW, boolean firstYAW, double targetAngle) {
/*
* Compass drive uses field-oriented drive to drive in straight lines
* using the "WASD" buttons pushing the left button causes the robot to
* drive left, the right to move right, and so on. Each button is set to
* an angle that the robot turns.
*/
double actualPower = 0;
if (power > 0) {// for positive powers
if (power > cmpsPrevPower + MAX_RAMP_RATE) {// if increasing power,
// slowly ramp
actualPower = prevL + MAX_RAMP_RATE;
} else {// if decreasing power, just do it
actualPower = power;
}
} else {// for negative powers
if (power < cmpsPrevPower - MAX_RAMP_RATE) {// if increasing
// negative
// power, slowly ramp
actualPower = cmpsPrevPower - MAX_RAMP_RATE;
} else {// if decreasing power, just do it
actualPower = power;
}
}
double diff;
double adj;
double inverse = 1;
// SmartDashboard.putNumber("targetAngle", targetAngle);
boolean closeInvert = false;
if (Math.abs(targetAngle) != 900) {
double targetDiff = Math.abs(currentYAW - targetAngle);
if (targetDiff > 180) {
targetDiff = -(targetDiff - 360);
}
double oppositeDiff = Math.abs(targetDiff - 180);
closeInvert = oppositeDiff < targetDiff;
}
if (targetAngle > 134 || targetAngle < -134 || closeInvert) {
targetAngle = targetAngle + 180;
inverse = -1;
} else {
inverse = 1;
}
if (Math.abs(actualPower) > 1)
actualPower = actualPower / Math.abs(actualPower);
diff = currentYAW - targetAngle;
// SmartDashboard.putNumber("preAdjDiff", diff);
if (Math.abs(diff) > 360) {
if (diff > 0)
diff = diff - 360;
else
diff = diff + 360;
}
if (diff > 180) {
diff = -360 + diff;
} else if (diff < -180) {
diff = 360 + diff;
}
// SmartDashboard.putNumber("adjustedDiff", diff);
// SmartDashboard.putNumber("power", power);
// SmartDashboard.putNumber("inverse", inverse);
double turnAngle = IO.lookup(IO.COMPASS_ANGLE, IO.POWER_AXIS, Math.abs(actualPower));
// SmartDashboard.putNumber("turnAngle", turnAngle);
if (diff > turnAngle) {
tankDrive(-actualPower, actualPower);
} else if (diff < -turnAngle) {
tankDrive(actualPower, -actualPower);
} else {
adj = diff * .05; // was .02 compass drive P value (for driving
// straight) pretty low, but only increase if
// necessary
// SmartDashboard.putNumber("adjustment", adj);
// power = power * inverse;
double lnew = actualPower * inverse - adj;
double rnew = actualPower * inverse + adj;
double max = Math.max(Math.abs(lnew), Math.abs(rnew));
if (max > actualPower) {
lnew /= max;
rnew /= max;
lnew *= actualPower;
rnew *= actualPower;
}
tankDrive(lnew, rnew);
}
}
// returns the value of x and y
public double getAngle(double y, double x) {
return Math.atan2(y, x) * 180 / Math.PI;
}
// calculates the hypotenuse created with x and y
public double getR(double y, double x) {
double c;
c = (x * x) + (y * y);
return c;
}
public void shooterAlign(double cameraAngle, double botAngle) {
// Moves shooter to the camera's position
double diff;
diff = cameraAngle - botAngle;
if (Math.abs(diff) > 360) {
if (diff > 0)
diff = diff - 360;
else
diff = diff + 360;
}
if (diff > 180) {
diff = -360 + diff;
} else if (diff < -180) {
diff = 360 + diff;
}
double slowPower;
slowPower = diff * 0.15; // align to goal (vision) P value; could be
// increased by .05 or so
if (slowPower > 0.35) { // max power levels, consider increasing if no
// movement at large angles
slowPower = 0.35;
} else if (slowPower < -0.35) {
slowPower = -0.35;
}
tankDrive(slowPower, -slowPower);
}
public void resetEncoders() {
// Resets Encoders
lEncoder.reset();
rEncoder.reset();
}
public double getDistance() {
// Obtains the distance from each encoder
return (lEncoder.getDistance() + rEncoder.getDistance()) / 2;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.sysml.utils;
import java.lang.management.CompilationMXBean;
import java.lang.management.GarbageCollectorMXBean;
import java.lang.management.ManagementFactory;
import java.text.DecimalFormat;
import java.util.Arrays;
import java.util.Comparator;
import java.util.HashMap;
import java.util.List;
import java.util.Map.Entry;
import java.util.Set;
import java.util.concurrent.atomic.LongAdder;
import org.apache.sysml.api.DMLScript;
import org.apache.sysml.conf.ConfigurationManager;
import org.apache.sysml.hops.OptimizerUtils;
import org.apache.sysml.runtime.controlprogram.caching.CacheStatistics;
import org.apache.sysml.runtime.controlprogram.context.SparkExecutionContext;
import org.apache.sysml.runtime.instructions.Instruction;
import org.apache.sysml.runtime.instructions.InstructionUtils;
import org.apache.sysml.runtime.instructions.MRJobInstruction;
import org.apache.sysml.runtime.instructions.cp.FunctionCallCPInstruction;
import org.apache.sysml.runtime.instructions.spark.SPInstruction;
import org.apache.sysml.runtime.matrix.data.LibMatrixDNN;
/**
* This class captures all statistics.
*/
public class Statistics
{
private static long compileStartTime = 0;
private static long compileEndTime = 0;
private static long execStartTime = 0;
private static long execEndTime = 0;
// number of compiled/executed MR jobs
private static final LongAdder numExecutedMRJobs = new LongAdder();
private static final LongAdder numCompiledMRJobs = new LongAdder();
// number of compiled/executed SP instructions
private static final LongAdder numExecutedSPInst = new LongAdder();
private static final LongAdder numCompiledSPInst = new LongAdder();
//JVM stats (low frequency updates)
private static long jitCompileTime = 0; //in milli sec
private static long jvmGCTime = 0; //in milli sec
private static long jvmGCCount = 0; //count
//HOP DAG recompile stats (potentially high update frequency)
private static final LongAdder hopRecompileTime = new LongAdder(); //in nano sec
private static final LongAdder hopRecompilePred = new LongAdder(); //count
private static final LongAdder hopRecompileSB = new LongAdder(); //count
//CODEGEN
private static final LongAdder codegenCompileTime = new LongAdder(); //in nano
private static final LongAdder codegenClassCompileTime = new LongAdder(); //in nano
private static final LongAdder codegenHopCompile = new LongAdder(); //count
private static final LongAdder codegenCPlanCompile = new LongAdder(); //count
private static final LongAdder codegenClassCompile = new LongAdder(); //count
private static final LongAdder codegenEnumAll = new LongAdder(); //count
private static final LongAdder codegenEnumAllP = new LongAdder(); //count
private static final LongAdder codegenEnumEval = new LongAdder(); //count
private static final LongAdder codegenEnumEvalP = new LongAdder(); //count
private static final LongAdder codegenPlanCacheHits = new LongAdder(); //count
private static final LongAdder codegenPlanCacheTotal = new LongAdder(); //count
//Function recompile stats
private static final LongAdder funRecompileTime = new LongAdder(); //in nano sec
private static final LongAdder funRecompiles = new LongAdder(); //count
//Spark-specific stats
private static long sparkCtxCreateTime = 0;
private static final LongAdder sparkParallelize = new LongAdder();
private static final LongAdder sparkParallelizeCount = new LongAdder();
private static final LongAdder sparkCollect = new LongAdder();
private static final LongAdder sparkCollectCount = new LongAdder();
private static final LongAdder sparkBroadcast = new LongAdder();
private static final LongAdder sparkBroadcastCount = new LongAdder();
//PARFOR optimization stats (low frequency updates)
private static long parforOptTime = 0; //in milli sec
private static long parforOptCount = 0; //count
private static long parforInitTime = 0; //in milli sec
private static long parforMergeTime = 0; //in milli sec
//heavy hitter counts and times
private static HashMap<String,Long> _cpInstTime = new HashMap<>();
private static HashMap<String,Long> _cpInstCounts = new HashMap<>();
private static final LongAdder lTotalUIPVar = new LongAdder();
private static final LongAdder lTotalLix = new LongAdder();
private static final LongAdder lTotalLixUIP = new LongAdder();
public static synchronized long getNoOfExecutedMRJobs() {
return numExecutedMRJobs.longValue();
}
private static LongAdder numNativeFailures = new LongAdder();
public static LongAdder numNativeLibMatrixMultCalls = new LongAdder();
public static LongAdder numNativeConv2dCalls = new LongAdder();
public static LongAdder numNativeConv2dBwdDataCalls = new LongAdder();
public static LongAdder numNativeConv2dBwdFilterCalls = new LongAdder();
public static LongAdder numNativeSparseConv2dCalls = new LongAdder();
public static LongAdder numNativeSparseConv2dBwdFilterCalls = new LongAdder();
public static LongAdder numNativeSparseConv2dBwdDataCalls = new LongAdder();
public static long nativeLibMatrixMultTime = 0;
public static long nativeConv2dTime = 0;
public static long nativeConv2dBwdDataTime = 0;
public static long nativeConv2dBwdFilterTime = 0;
public static long recomputeNNZTime = 0;
public static long examSparsityTime = 0;
public static long allocateDoubleArrTime = 0;
public static void incrementNativeFailuresCounter() {
numNativeFailures.increment();
// This is very rare and am not sure it is possible at all. Our initial experiments never encountered this case.
// Note: all the native calls have a fallback to Java; so if the user wants she can recompile SystemML by
// commenting this exception and everything should work fine.
throw new RuntimeException("Unexpected ERROR: OOM caused during JNI transfer. Please disable native BLAS by setting enviroment variable: SYSTEMML_BLAS=none");
}
public static void incrementNoOfExecutedMRJobs() {
numExecutedMRJobs.increment();
}
public static void decrementNoOfExecutedMRJobs() {
numExecutedMRJobs.decrement();
}
public static long getNoOfCompiledMRJobs() {
return numCompiledMRJobs.longValue();
}
public static void incrementNoOfCompiledMRJobs() {
numCompiledMRJobs.increment();
}
public static long getNoOfExecutedSPInst() {
return numExecutedSPInst.longValue();
}
public static void incrementNoOfExecutedSPInst() {
numExecutedSPInst.increment();
}
public static void decrementNoOfExecutedSPInst() {
numExecutedSPInst.decrement();
}
public static long getNoOfCompiledSPInst() {
return numCompiledSPInst.longValue();
}
public static void incrementNoOfCompiledSPInst() {
numCompiledSPInst.increment();
}
public static long getTotalUIPVar() {
return lTotalUIPVar.longValue();
}
public static void incrementTotalUIPVar() {
lTotalUIPVar.increment();
}
public static long getTotalLixUIP() {
return lTotalLixUIP.longValue();
}
public static void incrementTotalLixUIP() {
lTotalLixUIP.increment();
}
public static long getTotalLix() {
return lTotalLix.longValue();
}
public static void incrementTotalLix() {
lTotalLix.increment();
}
public static void resetNoOfCompiledJobs( int count ) {
//reset both mr/sp for multiple tests within one jvm
numCompiledSPInst.reset();
numCompiledMRJobs.reset();
if( OptimizerUtils.isSparkExecutionMode() )
numCompiledSPInst.add(count);
else
numCompiledMRJobs.add(count);
}
public static void resetNoOfExecutedJobs() {
//reset both mr/sp for multiple tests within one jvm
numExecutedSPInst.reset();
numExecutedMRJobs.reset();
if( DMLScript.USE_ACCELERATOR )
GPUStatistics.setNoOfExecutedGPUInst(0);
}
public static synchronized void incrementJITCompileTime( long time ) {
jitCompileTime += time;
}
public static synchronized void incrementJVMgcTime( long time ) {
jvmGCTime += time;
}
public static synchronized void incrementJVMgcCount( long delta ) {
jvmGCCount += delta;
}
public static void incrementHOPRecompileTime( long delta ) {
hopRecompileTime.add(delta);
}
public static void incrementHOPRecompilePred() {
hopRecompilePred.increment();
}
public static void incrementHOPRecompilePred(long delta) {
hopRecompilePred.add(delta);
}
public static void incrementHOPRecompileSB() {
hopRecompileSB.increment();
}
public static void incrementHOPRecompileSB(long delta) {
hopRecompileSB.add(delta);
}
public static void incrementCodegenDAGCompile() {
codegenHopCompile.increment();
}
public static void incrementCodegenCPlanCompile(long delta) {
codegenCPlanCompile.add(delta);
}
public static void incrementCodegenEnumAll(long delta) {
codegenEnumAll.add(delta);
}
public static void incrementCodegenEnumAllP(long delta) {
codegenEnumAllP.add(delta);
}
public static void incrementCodegenEnumEval(long delta) {
codegenEnumEval.add(delta);
}
public static void incrementCodegenEnumEvalP(long delta) {
codegenEnumEvalP.add(delta);
}
public static void incrementCodegenClassCompile() {
codegenClassCompile.increment();
}
public static void incrementCodegenCompileTime(long delta) {
codegenCompileTime.add(delta);
}
public static void incrementCodegenClassCompileTime(long delta) {
codegenClassCompileTime.add(delta);
}
public static void incrementCodegenPlanCacheHits() {
codegenPlanCacheHits.increment();
}
public static void incrementCodegenPlanCacheTotal() {
codegenPlanCacheTotal.increment();
}
public static long getCodegenDAGCompile() {
return codegenHopCompile.longValue();
}
public static long getCodegenCPlanCompile() {
return codegenCPlanCompile.longValue();
}
public static long getCodegenEnumAll() {
return codegenEnumAll.longValue();
}
public static long getCodegenEnumAllP() {
return codegenEnumAllP.longValue();
}
public static long getCodegenEnumEval() {
return codegenEnumEval.longValue();
}
public static long getCodegenEnumEvalP() {
return codegenEnumEvalP.longValue();
}
public static long getCodegenClassCompile() {
return codegenClassCompile.longValue();
}
public static long getCodegenCompileTime() {
return codegenCompileTime.longValue();
}
public static long getCodegenClassCompileTime() {
return codegenClassCompileTime.longValue();
}
public static long getCodegenPlanCacheHits() {
return codegenPlanCacheHits.longValue();
}
public static long getCodegenPlanCacheTotal() {
return codegenPlanCacheTotal.longValue();
}
public static void incrementFunRecompileTime( long delta ) {
funRecompileTime.add(delta);
}
public static void incrementFunRecompiles() {
funRecompiles.increment();
}
public static synchronized void incrementParForOptimCount(){
parforOptCount ++;
}
public static synchronized void incrementParForOptimTime( long time ) {
parforOptTime += time;
}
public static synchronized void incrementParForInitTime( long time ) {
parforInitTime += time;
}
public static synchronized void incrementParForMergeTime( long time ) {
parforMergeTime += time;
}
public static void startCompileTimer() {
if( DMLScript.STATISTICS )
compileStartTime = System.nanoTime();
}
public static void stopCompileTimer() {
if( DMLScript.STATISTICS )
compileEndTime = System.nanoTime();
}
public static long getCompileTime() {
return compileEndTime - compileStartTime;
}
/**
* Starts the timer, should be invoked immediately before invoking
* Program.execute()
*/
public static void startRunTimer() {
execStartTime = System.nanoTime();
}
/**
* Stops the timer, should be invoked immediately after invoking
* Program.execute()
*/
public static void stopRunTimer() {
execEndTime = System.nanoTime();
}
/**
* Returns the total time of run in nanoseconds.
*
* @return run time in nanoseconds
*/
public static long getRunTime() {
return execEndTime - execStartTime;
}
public static void reset()
{
hopRecompileTime.reset();
hopRecompilePred.reset();
hopRecompileSB.reset();
funRecompiles.reset();
funRecompileTime.reset();
codegenHopCompile.reset();
codegenCPlanCompile.reset();
codegenClassCompile.reset();
codegenEnumAll.reset();
codegenEnumAllP.reset();
codegenEnumEval.reset();
codegenEnumEvalP.reset();
codegenCompileTime.reset();
codegenClassCompileTime.reset();
codegenPlanCacheHits.reset();
codegenPlanCacheTotal.reset();
parforOptCount = 0;
parforOptTime = 0;
parforInitTime = 0;
parforMergeTime = 0;
lTotalLix.reset();
lTotalLixUIP.reset();
lTotalUIPVar.reset();
CacheStatistics.reset();
resetJITCompileTime();
resetJVMgcTime();
resetJVMgcCount();
resetCPHeavyHitters();
GPUStatistics.reset();
numNativeLibMatrixMultCalls.reset();
numNativeSparseConv2dCalls.reset();
numNativeSparseConv2dBwdDataCalls.reset();
numNativeSparseConv2dBwdFilterCalls.reset();
numNativeConv2dCalls.reset();
numNativeConv2dBwdDataCalls.reset();
numNativeConv2dBwdFilterCalls.reset();
numNativeFailures.reset();
nativeLibMatrixMultTime = 0;
nativeConv2dTime = 0;
nativeConv2dBwdFilterTime = 0;
nativeConv2dBwdDataTime = 0;
LibMatrixDNN.resetStatistics();
}
public static void resetJITCompileTime(){
jitCompileTime = -1 * getJITCompileTime();
}
public static void resetJVMgcTime(){
jvmGCTime = -1 * getJVMgcTime();
}
public static void resetJVMgcCount(){
jvmGCTime = -1 * getJVMgcCount();
}
public static void resetCPHeavyHitters(){
_cpInstTime.clear();
_cpInstCounts.clear();
}
public static void setSparkCtxCreateTime(long ns) {
sparkCtxCreateTime = ns;
}
public static void accSparkParallelizeTime(long t) {
sparkParallelize.add(t);
}
public static void incSparkParallelizeCount(long c) {
sparkParallelizeCount.add(c);
}
public static void accSparkCollectTime(long t) {
sparkCollect.add(t);
}
public static void incSparkCollectCount(long c) {
sparkCollectCount.add(c);
}
public static void accSparkBroadCastTime(long t) {
sparkBroadcast.add(t);
}
public static void incSparkBroadcastCount(long c) {
sparkBroadcastCount.add(c);
}
public static String getCPHeavyHitterCode( Instruction inst )
{
String opcode = null;
if( inst instanceof MRJobInstruction )
{
MRJobInstruction mrinst = (MRJobInstruction) inst;
opcode = "MR-Job_"+mrinst.getJobType();
}
else if( inst instanceof SPInstruction )
{
opcode = "SP_"+InstructionUtils.getOpCode(inst.toString());
if( inst instanceof FunctionCallCPInstruction ) {
FunctionCallCPInstruction extfunct = (FunctionCallCPInstruction)inst;
opcode = extfunct.getFunctionName();
}
}
else //CPInstructions
{
opcode = InstructionUtils.getOpCode(inst.toString());
if( inst instanceof FunctionCallCPInstruction ) {
FunctionCallCPInstruction extfunct = (FunctionCallCPInstruction)inst;
opcode = extfunct.getFunctionName();
}
}
return opcode;
}
/**
* "Maintains" or adds time to per instruction/op timers, also increments associated count
* @param instructionName name of the instruction/op
* @param timeNanos time in nano seconds
*/
public synchronized static void maintainCPHeavyHitters( String instructionName, long timeNanos )
{
Long oldVal = _cpInstTime.getOrDefault(instructionName, 0L);
_cpInstTime.put(instructionName, oldVal + timeNanos);
Long oldCnt = _cpInstCounts.getOrDefault(instructionName, 0L);
_cpInstCounts.put(instructionName, oldCnt + 1);
}
public static Set<String> getCPHeavyHitterOpCodes() {
return _cpInstTime.keySet();
}
public static long getCPHeavyHitterCount(String opcode) {
Long tmp = _cpInstCounts.get(opcode);
return (tmp != null) ? tmp : 0;
}
/**
* Obtain a string tabular representation of the heavy hitter instructions
* that displays the time, instruction count, and optionally GPU stats about
* each instruction.
*
* @param num
* the maximum number of heavy hitters to display
* @return string representing the heavy hitter instructions in tabular
* format
*/
public static String getHeavyHitters(int num) {
int len = _cpInstTime.size();
if (num <= 0 || len <= 0)
return "-";
// get top k via sort
Entry<String, Long>[] tmp = _cpInstTime.entrySet().toArray(new Entry[len]);
Arrays.sort(tmp, new Comparator<Entry<String, Long>>() {
public int compare(Entry<String, Long> e1, Entry<String, Long> e2) {
return e1.getValue().compareTo(e2.getValue());
}
});
final String numCol = "#";
final String instCol = "Instruction";
final String timeSCol = "Time(s)";
final String countCol = "Count";
final String gpuCol = "Misc Timers";
StringBuilder sb = new StringBuilder();
int numHittersToDisplay = Math.min(num, len);
int maxNumLen = String.valueOf(numHittersToDisplay).length();
int maxInstLen = instCol.length();
int maxTimeSLen = timeSCol.length();
int maxCountLen = countCol.length();
DecimalFormat sFormat = new DecimalFormat("#,##0.000");
for (int i = 0; i < numHittersToDisplay; i++) {
Entry<String, Long> hh = tmp[len - 1 - i];
String instruction = hh.getKey();
Long timeNs = hh.getValue();
double timeS = (double) timeNs / 1000000000.0;
maxInstLen = Math.max(maxInstLen, instruction.length());
String timeSString = sFormat.format(timeS);
maxTimeSLen = Math.max(maxTimeSLen, timeSString.length());
maxCountLen = Math.max(maxCountLen, String.valueOf(_cpInstCounts.get(instruction)).length());
}
maxInstLen = Math.min(maxInstLen, DMLScript.STATISTICS_MAX_WRAP_LEN);
sb.append(String.format(
" %" + maxNumLen + "s %-" + maxInstLen + "s %" + maxTimeSLen + "s %" + maxCountLen + "s", numCol,
instCol, timeSCol, countCol));
if (DMLScript.FINEGRAINED_STATISTICS) {
sb.append(" ");
sb.append(gpuCol);
}
sb.append("\n");
for (int i = 0; i < numHittersToDisplay; i++) {
String instruction = tmp[len - 1 - i].getKey();
String [] wrappedInstruction = wrap(instruction, maxInstLen);
Long timeNs = tmp[len - 1 - i].getValue();
double timeS = (double) timeNs / 1000000000.0;
String timeSString = sFormat.format(timeS);
Long count = _cpInstCounts.get(instruction);
int numLines = wrappedInstruction.length;
String [] miscTimers = null;
if (DMLScript.FINEGRAINED_STATISTICS) {
miscTimers = wrap(GPUStatistics.getStringForCPMiscTimesPerInstruction(instruction), DMLScript.STATISTICS_MAX_WRAP_LEN);
numLines = Math.max(numLines, miscTimers.length);
}
String miscFormatString = (DMLScript.FINEGRAINED_STATISTICS) ? " %" + DMLScript.STATISTICS_MAX_WRAP_LEN + "s" : "%s";
for(int wrapIter = 0; wrapIter < numLines; wrapIter++) {
String instStr = (wrapIter < wrappedInstruction.length) ? wrappedInstruction[wrapIter] : "";
String miscTimerStr = ( (DMLScript.FINEGRAINED_STATISTICS) && wrapIter < miscTimers.length) ? miscTimers[wrapIter] : "";
if(wrapIter == 0) {
// Display instruction count
sb.append(String.format(
" %" + maxNumLen + "d %-" + maxInstLen + "s %" + maxTimeSLen + "s %" + maxCountLen + "d" + miscFormatString,
(i + 1), instStr, timeSString, count, miscTimerStr));
}
else {
sb.append(String.format(
" %" + maxNumLen + "s %-" + maxInstLen + "s %" + maxTimeSLen + "s %" + maxCountLen + "s" + miscFormatString,
"", instStr, "", "", miscTimerStr));
}
sb.append("\n");
}
}
return sb.toString();
}
/**
* Returns the total time of asynchronous JIT compilation in milliseconds.
*
* @return JIT compile time
*/
public static long getJITCompileTime(){
long ret = -1; //unsupported
CompilationMXBean cmx = ManagementFactory.getCompilationMXBean();
if( cmx.isCompilationTimeMonitoringSupported() )
{
ret = cmx.getTotalCompilationTime();
ret += jitCompileTime; //add from remote processes
}
return ret;
}
public static long getJVMgcTime(){
long ret = 0;
List<GarbageCollectorMXBean> gcxs = ManagementFactory.getGarbageCollectorMXBeans();
for( GarbageCollectorMXBean gcx : gcxs )
ret += gcx.getCollectionTime();
if( ret>0 )
ret += jvmGCTime;
return ret;
}
public static long getJVMgcCount(){
long ret = 0;
List<GarbageCollectorMXBean> gcxs = ManagementFactory.getGarbageCollectorMXBeans();
for( GarbageCollectorMXBean gcx : gcxs )
ret += gcx.getCollectionCount();
if( ret>0 )
ret += jvmGCCount;
return ret;
}
public static long getHopRecompileTime(){
return hopRecompileTime.longValue();
}
public static long getHopRecompiledPredDAGs(){
return hopRecompilePred.longValue();
}
public static long getHopRecompiledSBDAGs(){
return hopRecompileSB.longValue();
}
public static long getFunRecompileTime(){
return funRecompileTime.longValue();
}
public static long getFunRecompiles(){
return funRecompiles.longValue();
}
public static long getParforOptCount(){
return parforOptCount;
}
public static long getParforOptTime(){
return parforOptTime;
}
public static long getParforInitTime(){
return parforInitTime;
}
public static long getParforMergeTime(){
return parforMergeTime;
}
/**
* Returns statistics of the DML program that was recently completed as a string
* @return statistics as a string
*/
public static String display() {
return display(DMLScript.STATISTICS_COUNT);
}
private static String [] wrap(String str, int wrapLength) {
int numLines = (int) Math.ceil( ((double)str.length()) / wrapLength);
int len = str.length();
String [] ret = new String[numLines];
for(int i = 0; i < numLines; i++) {
ret[i] = str.substring(i*wrapLength, Math.min((i+1)*wrapLength, len));
}
return ret;
}
/**
* Returns statistics as a string
* @param maxHeavyHitters The maximum number of heavy hitters that are printed
* @return statistics as string
*/
public static String display(int maxHeavyHitters)
{
StringBuilder sb = new StringBuilder();
sb.append("SystemML Statistics:\n");
if( DMLScript.STATISTICS ) {
sb.append("Total elapsed time:\t\t" + String.format("%.3f", (getCompileTime()+getRunTime())*1e-9) + " sec.\n"); // nanoSec --> sec
sb.append("Total compilation time:\t\t" + String.format("%.3f", getCompileTime()*1e-9) + " sec.\n"); // nanoSec --> sec
}
sb.append("Total execution time:\t\t" + String.format("%.3f", getRunTime()*1e-9) + " sec.\n"); // nanoSec --> sec
if( OptimizerUtils.isSparkExecutionMode() ) {
if( DMLScript.STATISTICS ) //moved into stats on Shiv's request
sb.append("Number of compiled Spark inst:\t" + getNoOfCompiledSPInst() + ".\n");
sb.append("Number of executed Spark inst:\t" + getNoOfExecutedSPInst() + ".\n");
}
else {
if( DMLScript.STATISTICS ) //moved into stats on Shiv's request
sb.append("Number of compiled MR Jobs:\t" + getNoOfCompiledMRJobs() + ".\n");
sb.append("Number of executed MR Jobs:\t" + getNoOfExecutedMRJobs() + ".\n");
}
if( DMLScript.USE_ACCELERATOR && DMLScript.STATISTICS)
sb.append(GPUStatistics.getStringForCudaTimers());
//show extended caching/compilation statistics
if( DMLScript.STATISTICS )
{
if(NativeHelper.CURRENT_NATIVE_BLAS_STATE == NativeHelper.NativeBlasState.SUCCESSFULLY_LOADED_NATIVE_BLAS_AND_IN_USE) {
String blas = NativeHelper.getCurrentBLAS();
sb.append("Native " + blas + " calls (dense mult/conv/bwdF/bwdD):\t" + numNativeLibMatrixMultCalls.longValue() + "/" +
numNativeConv2dCalls.longValue() + "/" + numNativeConv2dBwdFilterCalls.longValue()
+ "/" + numNativeConv2dBwdDataCalls.longValue() + ".\n");
sb.append("Native " + blas + " calls (sparse conv/bwdF/bwdD):\t" +
numNativeSparseConv2dCalls.longValue() + "/" + numNativeSparseConv2dBwdFilterCalls.longValue()
+ "/" + numNativeSparseConv2dBwdDataCalls.longValue() + ".\n");
sb.append("Native " + blas + " times (dense mult/conv/bwdF/bwdD):\t" + String.format("%.3f", nativeLibMatrixMultTime*1e-9) + "/" +
String.format("%.3f", nativeConv2dTime*1e-9) + "/" + String.format("%.3f", nativeConv2dBwdFilterTime*1e-9) + "/" +
String.format("%.3f", nativeConv2dBwdDataTime*1e-9) + ".\n");
}
if(recomputeNNZTime != 0 || examSparsityTime != 0 || allocateDoubleArrTime != 0) {
sb.append("MatrixBlock times (recomputeNNZ/examSparsity/allocateDoubleArr):\t" + String.format("%.3f", recomputeNNZTime*1e-9) + "/" +
String.format("%.3f", examSparsityTime*1e-9) + "/" + String.format("%.3f", allocateDoubleArrTime*1e-9) + ".\n");
}
sb.append("Cache hits (Mem, WB, FS, HDFS):\t" + CacheStatistics.displayHits() + ".\n");
sb.append("Cache writes (WB, FS, HDFS):\t" + CacheStatistics.displayWrites() + ".\n");
sb.append("Cache times (ACQr/m, RLS, EXP):\t" + CacheStatistics.displayTime() + " sec.\n");
sb.append("HOP DAGs recompiled (PRED, SB):\t" + getHopRecompiledPredDAGs() + "/" + getHopRecompiledSBDAGs() + ".\n");
sb.append("HOP DAGs recompile time:\t" + String.format("%.3f", ((double)getHopRecompileTime())/1000000000) + " sec.\n");
if( getFunRecompiles()>0 ) {
sb.append("Functions recompiled:\t\t" + getFunRecompiles() + ".\n");
sb.append("Functions recompile time:\t" + String.format("%.3f", ((double)getFunRecompileTime())/1000000000) + " sec.\n");
}
if( ConfigurationManager.isCodegenEnabled() ) {
sb.append("Codegen compile (DAG,CP,JC):\t" + getCodegenDAGCompile() + "/"
+ getCodegenCPlanCompile() + "/" + getCodegenClassCompile() + ".\n");
sb.append("Codegen enum (ALLt/p,EVALt/p):\t" + getCodegenEnumAll() + "/" +
getCodegenEnumAllP() + "/" + getCodegenEnumEval() + "/" + getCodegenEnumEvalP() + ".\n");
sb.append("Codegen compile times (DAG,JC):\t" + String.format("%.3f", (double)getCodegenCompileTime()/1000000000) + "/" +
String.format("%.3f", (double)getCodegenClassCompileTime()/1000000000) + " sec.\n");
sb.append("Codegen plan cache hits:\t" + getCodegenPlanCacheHits() + "/" + getCodegenPlanCacheTotal() + ".\n");
}
if( OptimizerUtils.isSparkExecutionMode() ){
String lazy = SparkExecutionContext.isLazySparkContextCreation() ? "(lazy)" : "(eager)";
sb.append("Spark ctx create time "+lazy+":\t"+
String.format("%.3f", ((double)sparkCtxCreateTime)*1e-9) + " sec.\n" ); // nanoSec --> sec
sb.append("Spark trans counts (par,bc,col):" +
String.format("%d/%d/%d.\n", sparkParallelizeCount.longValue(),
sparkBroadcastCount.longValue(), sparkCollectCount.longValue()));
sb.append("Spark trans times (par,bc,col):\t" +
String.format("%.3f/%.3f/%.3f secs.\n",
((double)sparkParallelize.longValue())*1e-9,
((double)sparkBroadcast.longValue())*1e-9,
((double)sparkCollect.longValue())*1e-9));
}
if( parforOptCount>0 ){
sb.append("ParFor loops optimized:\t\t" + getParforOptCount() + ".\n");
sb.append("ParFor optimize time:\t\t" + String.format("%.3f", ((double)getParforOptTime())/1000) + " sec.\n");
sb.append("ParFor initialize time:\t\t" + String.format("%.3f", ((double)getParforInitTime())/1000) + " sec.\n");
sb.append("ParFor result merge time:\t" + String.format("%.3f", ((double)getParforMergeTime())/1000) + " sec.\n");
sb.append("ParFor total update in-place:\t" + lTotalUIPVar + "/" + lTotalLixUIP + "/" + lTotalLix + "\n");
}
sb.append("Total JIT compile time:\t\t" + ((double)getJITCompileTime())/1000 + " sec.\n");
sb.append("Total JVM GC count:\t\t" + getJVMgcCount() + ".\n");
sb.append("Total JVM GC time:\t\t" + ((double)getJVMgcTime())/1000 + " sec.\n");
LibMatrixDNN.appendStatistics(sb);
sb.append("Heavy hitter instructions:\n" + getHeavyHitters(maxHeavyHitters));
}
return sb.toString();
}
}
| |
/**
* Copyright 2016 Yahoo Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.yahoo.athenz.zpe_policy_updater;
import java.io.File;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.nio.file.StandardCopyOption;
import java.security.PublicKey;
import java.util.List;
import java.util.Map;
import java.util.Random;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.yahoo.rdl.JSON;
import com.yahoo.rdl.Timestamp;
import com.yahoo.athenz.auth.util.Crypto;
import com.yahoo.athenz.common.utils.SignUtils;
import com.yahoo.athenz.zts.DomainMetrics;
import com.yahoo.athenz.zts.DomainSignedPolicyData;
import com.yahoo.athenz.zts.PolicyData;
import com.yahoo.athenz.zts.SignedPolicyData;
import com.yahoo.athenz.zts.ZTSClient;
import com.yahoo.athenz.zts.ZTSClientException;
public class PolicyUpdater {
private static final Logger LOG = LoggerFactory.getLogger(PolicyUpdater.class);
private static final String POLICY_FILE_EXTENSION = ".pol";
private static final String TEMP_FILE_EXTENSION = ".tmp";
public static final String ZPE_METRIC_FILE_PATH = "/var/zpe_stat/";
public static final String ZPE_PROP_METRIC_FILE_PATH = "athenz.zpe.metric_file_path";
static final String METRIC_GENERAL_FAILURE = "zpu_general_fail_sum";
static final String METRIC_PROCESS_SUM = "zpu_process_sum";
static final String METRIC_DOMAIN_FAILURE = "domain_fail_sum";
static final String METRIC_DOMAIN_SUCCESS = "domain_good_sum";
static final String METRIC_DOMAIN_FAIL = "domain_fail";
static final String METRIC_DOMAIN_GOOD = "domain_good";
private enum ZPUExitCode {
SUCCESS(0),
CONFIG_CREATE_FAILURE(1),
CONFIG_INIT_FAILURE(2),
MAX_INSTANCE_FAILURE(3),
POLICY_UPDATE_FAILURE(4);
private int code;
ZPUExitCode(int code) {
this.code = code;
}
int getCode() {
return code;
}
};
public static void main(String[] args) throws IOException, InterruptedException {
PolicyUpdaterConfiguration configuration = null;
try {
configuration = new PolicyUpdaterConfiguration();
} catch (Exception ex) {
LOG.error("Unable to create configuration object: " + ex.getMessage());
System.exit(ZPUExitCode.CONFIG_CREATE_FAILURE.getCode());
}
Random randomGenerator = new Random();
int randmonSleepInterval = 0;
if (configuration.getStartupDelayIntervalInSecs() > 0) {
randmonSleepInterval = randomGenerator.nextInt(configuration.getStartupDelayIntervalInSecs());
LOG.info("Launching zpe_policy_updater in " + randmonSleepInterval + " seconds...");
for (int i = 0; i < randmonSleepInterval; i++) {
Thread.sleep(1000);
}
} else {
LOG.info("Launching zpe_policy_updater with no delay...");
}
ZPUExitCode exitCode = ZPUExitCode.SUCCESS;
try {
try {
configuration.init(null, null);
} catch (Exception ex) {
LOG.error("Unable to initialize configuration object: " + ex.getMessage());
exitCode = ZPUExitCode.CONFIG_INIT_FAILURE;
throw ex;
}
try {
PolicyUpdater.policyUpdater(configuration, new ZTSClientFactoryImpl());
} catch (Exception ex) {
LOG.error("PolicyUpdater: Unable to update policy data: " + ex.getMessage());
exitCode = ZPUExitCode.POLICY_UPDATE_FAILURE;
throw ex;
}
} catch (Exception exc) {
LOG.error("PolicyUpdater: Exiting upon error: " + exc.getMessage());
} finally {
System.exit(exitCode.getCode());
}
}
static void policyUpdater(PolicyUpdaterConfiguration configuration, ZTSClientFactory ztsFactory)
throws Exception {
try (ZTSClient zts = ztsFactory.create()) {
List<String> domainList = configuration.getDomainList();
LOG.info("policyUpdater: Number of domains to process:"
+ (domainList == null ? 0 : domainList.size()));
if (domainList == null) {
LOG.error("policyUpdater: no domain list to process from configuration");
throw new Exception("no configured domains to process");
}
for (String domain : domainList) {
LOG.info("Fetching signed policies for domain:" + domain);
String matchingTag = getEtagForExistingPolicy(zts, configuration, domain);
Map<String, List<String>> responseHeaders = null;
DomainSignedPolicyData domainSignedPolicyData = null;
try {
domainSignedPolicyData = zts.getDomainSignedPolicyData(domain, matchingTag,
responseHeaders);
} catch (Exception exc) {
domainSignedPolicyData = null;
LOG.error("PolicyUpdater: Unable to retrieve policies from zts for domain="
+ domain, exc);
}
if (domainSignedPolicyData == null) {
if (matchingTag != null && !matchingTag.isEmpty()) {
LOG.info("PolicyUpdater: Policies not updated since last fetch time");
}
} else if (validateSignedPolicies(zts, configuration, domainSignedPolicyData, domain)) {
writePolicies(configuration, domain, domainSignedPolicyData);
}
}
// now push the domain metrics files
postDomainMetrics(zts);
}
}
static boolean validateSignedPolicies(ZTSClient zts, PolicyUpdaterConfiguration configuration,
DomainSignedPolicyData domainSignedPolicyData, String domain) {
if (domainSignedPolicyData == null || domain == null) {
throw new IllegalArgumentException("null parameters are not valid arguments");
}
LOG.info("Checking expiration time for:" + domain);
Timestamp expires = domainSignedPolicyData.getSignedPolicyData().getExpires();
if (System.currentTimeMillis() > expires.millis()) {
LOG.error("Signed policy for domain:" + domain + " was expired.");
return false;
}
// first we're going to verify the ZTS signature for the data
LOG.info("Verifying ZTS signature for: " + domain);
SignedPolicyData signedPolicyData = domainSignedPolicyData.getSignedPolicyData();
LOG.debug("Policies retrieved from the ZTS server: " + signedPolicyData);
String signature = domainSignedPolicyData.getSignature();
String keyId = domainSignedPolicyData.getKeyId();
LOG.debug("validateSignedPolicies: domain=" + domain + " zts key id=" + keyId + " Digital ZTS signature=" + signature);
PublicKey ztsPublicKey = configuration.getZtsPublicKey(zts, keyId);
if (ztsPublicKey == null) {
LOG.error("validateSignedPolicies: Missing ZTS Public key for id: " + keyId);
return false;
}
boolean verified = Crypto.verify(SignUtils.asCanonicalString(signedPolicyData), ztsPublicKey, signature);
if (verified == false) {
LOG.error("Signed policy for domain:" + domain + " failed ZTS signature verification.");
LOG.error("ZTS Signature: " + signature + ". Policies data returned from ZTS: " + signedPolicyData);
return false;
}
// then we're going to verify the ZMS signature for the policy data
LOG.info("Verifying ZMS signature for: " + domain);
PolicyData policyData = signedPolicyData.getPolicyData();
signature = signedPolicyData.getZmsSignature();
LOG.debug("Digital ZMS signature: " + signature);
keyId = signedPolicyData.getZmsKeyId();
LOG.debug("Digital ZMS signature key Id: " + keyId);
PublicKey zmsPublicKey = configuration.getZmsPublicKey(zts, keyId);
if (zmsPublicKey == null) {
LOG.error("Missing ZMS Public key with id: " + keyId);
return false;
}
verified = Crypto.verify(SignUtils.asCanonicalString(policyData), zmsPublicKey, signature);
if (verified == false) {
LOG.error("Signed policy for domain:" + domain + " failed ZMS signature verification.");
LOG.error("ZMS Signature: " + signature + ". Policies data returned from ZTS: " + policyData);
}
return verified;
}
static void verifyTmpDirSetup(PolicyUpdaterConfiguration configuration) throws IOException {
// ensure tmp dir exists
String policyTmpDir = configuration.getPolicyFileTmpDir();
Path tmpDir = Paths.get(policyTmpDir);
if (java.nio.file.Files.exists(tmpDir)) {
return;
}
LOG.warn("The temp dir doesnt exist so will create it: " + tmpDir);
java.nio.file.Files.createDirectory(tmpDir);
// get the user from config file to perform chown aginst the tmp dir
// chown -R $zpu_user $ROOT/tmp/zpe
String user = configuration.getZpuDirOwner();
if (user == null) {
LOG.warn("Cannot chown of the temp dir: " + tmpDir + " : no configured user");
return;
}
try {
java.nio.file.attribute.UserPrincipalLookupService lookupSvc =
java.nio.file.FileSystems.getDefault().getUserPrincipalLookupService();
java.nio.file.attribute.UserPrincipal uprinc = lookupSvc.lookupPrincipalByName(user);
Files.setOwner(tmpDir, uprinc);
} catch (Exception exc) {
LOG.warn("Failed to chown of the temp dir: " + tmpDir
+ ", user: " + user + ", exc: " + exc.getMessage());
}
}
static void writePolicies(PolicyUpdaterConfiguration configuration, String domain,
DomainSignedPolicyData domainSignedPolicyData) throws IOException {
if (configuration == null) {
throw new IllegalArgumentException("null configuration");
}
String policyTmpDir = configuration.getPolicyFileTmpDir();
String policyDir = configuration.getPolicyFileDir();
if (policyTmpDir == null || policyDir == null || domain == null || domainSignedPolicyData == null) {
throw new IllegalArgumentException("null parameters are not valid arguments");
}
String pathToTempFile = policyTmpDir + File.separator + domain + TEMP_FILE_EXTENSION;
String pathToPolicyFile = policyDir + File.separator + domain + POLICY_FILE_EXTENSION;
// ensure tmp dir exists
verifyTmpDirSetup(configuration);
LOG.info("Writing temp policy file: " + pathToTempFile);
// Make a file object from the path name
File file = new File(pathToTempFile);
file.createNewFile();
Files.write(file.toPath(), JSON.bytes(domainSignedPolicyData));
Path sourceFile = Paths.get(pathToTempFile);
Path destinationFile = Paths.get(pathToPolicyFile);
try {
LOG.info("Moving temp file : " + sourceFile + " to destination: " + destinationFile);
Files.copy(sourceFile, destinationFile, StandardCopyOption.REPLACE_EXISTING);
Files.deleteIfExists(sourceFile);
} catch (IOException exc) {
LOG.error("PolicyUpdater: Moving temp file failure. source: " + sourceFile
+ " : destination: " + destinationFile + " : exc: " + exc);
}
}
static String getEtagForExistingPolicy(ZTSClient zts, PolicyUpdaterConfiguration configuration,
String domain) {
if (domain == null) {
throw new IllegalArgumentException("getEtagForExistingPolicy: null parameters are not valid arguments");
}
String policyDir = configuration.getPolicyFileDir();
if (policyDir == null) {
throw new IllegalArgumentException("getEtagForExistingPolicy: Invalid configuration: no policy directory path");
}
String policyDirPath;
if (policyDir.length() - 1 != policyDir.lastIndexOf(File.separator)) {
policyDirPath = policyDir + File.separator;
} else {
policyDirPath = policyDir;
}
String etag = null;
String policyFile = policyDirPath + domain + POLICY_FILE_EXTENSION;
LOG.info("Decoding " + policyFile + " to retrieve eTag from policy file.");
File file = new File(policyFile);
if (file.exists() == false) {
LOG.info("Policy file not found.");
return etag;
}
DomainSignedPolicyData domainSignedPolicyData = null;
try {
domainSignedPolicyData = JSON.fromBytes(Files.readAllBytes(file.toPath()),
DomainSignedPolicyData.class);
} catch (Exception ex) {
LOG.info("Unable to parse domain signed policy file: " + policyFile);
return etag;
}
// validate the signature before checking for expiration
if (validateSignedPolicies(zts, configuration, domainSignedPolicyData, domain) == false) {
LOG.info("Unable to validate domain signed policy file: " + policyFile);
return etag;
}
// Check expiration of policies and if its less than the configured interval defined by user
// to get updated policy then return null so that the policies are updated
LOG.info("Checking expiration time for: " + domain);
long now = System.currentTimeMillis() / 1000;
Timestamp expires = domainSignedPolicyData.getSignedPolicyData().getExpires();
long startupDelayInterval = configuration.getStartupDelayIntervalInSecs();
LOG.info("Expiration time for " + domain + " is: " + (expires.millis() / 1000));
LOG.info("Startup delay: " + startupDelayInterval);
LOG.info("Current time: " + now);
if (((expires.millis() / 1000) - now) < (startupDelayInterval)) {
LOG.info("Signed policies for domain:" + domain + " are expired, returning null.");
return null;
}
if (domainSignedPolicyData.getSignedPolicyData().getModified() != null) {
// ETags are quoted-strings based on the HTTP RFC
// http://www.w3.org/Protocols/rfc2616/rfc2616-sec3.html#sec3.11
// so we're going to quote our modified timestamp
etag = "\"" + domainSignedPolicyData.getSignedPolicyData().getModified().toString() + "\"";
LOG.info("ETag: " + etag);
} else {
LOG.info("No ETag found.");
}
return etag;
}
static String getFilePath() {
String rootDir = System.getenv("ROOT");
if (rootDir == null) {
rootDir = "/home/athenz";
}
final String defaultPath = rootDir + ZPE_METRIC_FILE_PATH;
String filePath = System.getProperty(ZPE_PROP_METRIC_FILE_PATH, defaultPath);
// verify it ends with the separator and handle accordingly
if (!filePath.endsWith(File.separator)) {
filePath = filePath.concat(File.separator);
}
return filePath;
}
public static void postDomainMetrics(ZTSClient zts) {
final String filepath = getFilePath();
File dir = new File(filepath);
File[] filenames = dir.listFiles();
// make sure we have valid list of metric files
if (filenames == null) {
return;
}
for (int i = 0; i < filenames.length; i++) {
String domainName = filenames[i].getName().split("_")[0];
DomainMetrics domainMetrics = null;
final String metricFile = filepath + filenames[i].getName();
try {
Path path = Paths.get(metricFile);
domainMetrics = JSON.fromBytes(Files.readAllBytes(path), DomainMetrics.class);
zts.postDomainMetrics(domainName, domainMetrics);
Files.deleteIfExists(path);
} catch (ZTSClientException | IOException ex) {
LOG.error("Unable to push domain metrics from {} - error: {}",
metricFile, ex.getMessage());
}
}
}
}
| |
/*
* Licensed to ElasticSearch and Shay Banon under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. ElasticSearch licenses this
* file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.action.admin.indices.status;
import org.elasticsearch.ElasticSearchException;
import org.elasticsearch.action.ShardOperationFailedException;
import org.elasticsearch.action.support.DefaultShardOperationFailedException;
import org.elasticsearch.action.support.broadcast.BroadcastShardOperationFailedException;
import org.elasticsearch.action.support.broadcast.BroadcastShardOperationRequest;
import org.elasticsearch.action.support.broadcast.TransportBroadcastOperationAction;
import org.elasticsearch.cluster.ClusterService;
import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.block.ClusterBlockException;
import org.elasticsearch.cluster.block.ClusterBlockLevel;
import org.elasticsearch.cluster.routing.GroupShardsIterator;
import org.elasticsearch.cluster.routing.ShardRouting;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.index.engine.Engine;
import org.elasticsearch.index.gateway.IndexShardGatewayService;
import org.elasticsearch.index.gateway.SnapshotStatus;
import org.elasticsearch.index.service.InternalIndexService;
import org.elasticsearch.index.shard.IndexShardState;
import org.elasticsearch.index.shard.service.InternalIndexShard;
import org.elasticsearch.indices.IndicesService;
import org.elasticsearch.indices.recovery.RecoveryStatus;
import org.elasticsearch.indices.recovery.RecoveryTarget;
import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.transport.TransportService;
import java.io.IOException;
import java.util.List;
import java.util.concurrent.atomic.AtomicReferenceArray;
import static com.google.common.collect.Lists.newArrayList;
/**
*
*/
public class TransportIndicesStatusAction extends TransportBroadcastOperationAction<IndicesStatusRequest, IndicesStatusResponse, TransportIndicesStatusAction.IndexShardStatusRequest, ShardStatus> {
private final IndicesService indicesService;
private final RecoveryTarget peerRecoveryTarget;
@Inject
public TransportIndicesStatusAction(Settings settings, ThreadPool threadPool, ClusterService clusterService, TransportService transportService,
IndicesService indicesService, RecoveryTarget peerRecoveryTarget) {
super(settings, threadPool, clusterService, transportService);
this.peerRecoveryTarget = peerRecoveryTarget;
this.indicesService = indicesService;
}
@Override
protected String executor() {
return ThreadPool.Names.MANAGEMENT;
}
@Override
protected String transportAction() {
return IndicesStatusAction.NAME;
}
@Override
protected IndicesStatusRequest newRequest() {
return new IndicesStatusRequest();
}
/**
* Status goes across *all* shards.
*/
@Override
protected GroupShardsIterator shards(ClusterState state, IndicesStatusRequest request, String[] concreteIndices) {
return state.routingTable().allAssignedShardsGrouped(concreteIndices, true);
}
@Override
protected ClusterBlockException checkGlobalBlock(ClusterState state, IndicesStatusRequest request) {
return state.blocks().globalBlockedException(ClusterBlockLevel.METADATA);
}
@Override
protected ClusterBlockException checkRequestBlock(ClusterState state, IndicesStatusRequest countRequest, String[] concreteIndices) {
return state.blocks().indicesBlockedException(ClusterBlockLevel.METADATA, concreteIndices);
}
@Override
protected IndicesStatusResponse newResponse(IndicesStatusRequest request, AtomicReferenceArray shardsResponses, ClusterState clusterState) {
int successfulShards = 0;
int failedShards = 0;
List<ShardOperationFailedException> shardFailures = null;
final List<ShardStatus> shards = newArrayList();
for (int i = 0; i < shardsResponses.length(); i++) {
Object shardResponse = shardsResponses.get(i);
if (shardResponse == null) {
// simply ignore non active shards
} else if (shardResponse instanceof BroadcastShardOperationFailedException) {
failedShards++;
if (shardFailures == null) {
shardFailures = newArrayList();
}
shardFailures.add(new DefaultShardOperationFailedException((BroadcastShardOperationFailedException) shardResponse));
} else {
shards.add((ShardStatus) shardResponse);
successfulShards++;
}
}
return new IndicesStatusResponse(shards.toArray(new ShardStatus[shards.size()]), clusterState, shardsResponses.length(), successfulShards, failedShards, shardFailures);
}
@Override
protected IndexShardStatusRequest newShardRequest() {
return new IndexShardStatusRequest();
}
@Override
protected IndexShardStatusRequest newShardRequest(ShardRouting shard, IndicesStatusRequest request) {
return new IndexShardStatusRequest(shard.index(), shard.id(), request);
}
@Override
protected ShardStatus newShardResponse() {
return new ShardStatus();
}
@Override
protected ShardStatus shardOperation(IndexShardStatusRequest request) throws ElasticSearchException {
InternalIndexService indexService = (InternalIndexService) indicesService.indexServiceSafe(request.index());
InternalIndexShard indexShard = (InternalIndexShard) indexService.shardSafe(request.shardId());
ShardStatus shardStatus = new ShardStatus(indexShard.routingEntry());
shardStatus.state = indexShard.state();
try {
shardStatus.storeSize = indexShard.store().estimateSize();
} catch (IOException e) {
// failure to get the store size...
}
if (indexShard.state() == IndexShardState.STARTED) {
// shardStatus.estimatedFlushableMemorySize = indexShard.estimateFlushableMemorySize();
shardStatus.translogId = indexShard.translog().currentId();
shardStatus.translogOperations = indexShard.translog().estimatedNumberOfOperations();
Engine.Searcher searcher = indexShard.acquireSearcher("indices_status");
try {
shardStatus.docs = new DocsStatus();
shardStatus.docs.numDocs = searcher.reader().numDocs();
shardStatus.docs.maxDoc = searcher.reader().maxDoc();
shardStatus.docs.deletedDocs = searcher.reader().numDeletedDocs();
} finally {
searcher.release();
}
shardStatus.mergeStats = indexShard.mergeScheduler().stats();
shardStatus.refreshStats = indexShard.refreshStats();
shardStatus.flushStats = indexShard.flushStats();
}
if (request.recovery) {
// check on going recovery (from peer or gateway)
RecoveryStatus peerRecoveryStatus = indexShard.peerRecoveryStatus();
if (peerRecoveryStatus == null) {
peerRecoveryStatus = peerRecoveryTarget.peerRecoveryStatus(indexShard.shardId());
}
if (peerRecoveryStatus != null) {
PeerRecoveryStatus.Stage stage;
switch (peerRecoveryStatus.stage()) {
case INIT:
stage = PeerRecoveryStatus.Stage.INIT;
break;
case INDEX:
stage = PeerRecoveryStatus.Stage.INDEX;
break;
case TRANSLOG:
stage = PeerRecoveryStatus.Stage.TRANSLOG;
break;
case FINALIZE:
stage = PeerRecoveryStatus.Stage.FINALIZE;
break;
case DONE:
stage = PeerRecoveryStatus.Stage.DONE;
break;
default:
stage = PeerRecoveryStatus.Stage.INIT;
}
shardStatus.peerRecoveryStatus = new PeerRecoveryStatus(stage, peerRecoveryStatus.startTime(), peerRecoveryStatus.time(),
peerRecoveryStatus.phase1TotalSize(), peerRecoveryStatus.phase1ExistingTotalSize(),
peerRecoveryStatus.currentFilesSize(), peerRecoveryStatus.currentTranslogOperations());
}
IndexShardGatewayService gatewayService = indexService.shardInjector(request.shardId()).getInstance(IndexShardGatewayService.class);
org.elasticsearch.index.gateway.RecoveryStatus gatewayRecoveryStatus = gatewayService.recoveryStatus();
if (gatewayRecoveryStatus != null) {
GatewayRecoveryStatus.Stage stage;
switch (gatewayRecoveryStatus.stage()) {
case INIT:
stage = GatewayRecoveryStatus.Stage.INIT;
break;
case INDEX:
stage = GatewayRecoveryStatus.Stage.INDEX;
break;
case TRANSLOG:
stage = GatewayRecoveryStatus.Stage.TRANSLOG;
break;
case DONE:
stage = GatewayRecoveryStatus.Stage.DONE;
break;
default:
stage = GatewayRecoveryStatus.Stage.INIT;
}
shardStatus.gatewayRecoveryStatus = new GatewayRecoveryStatus(stage, gatewayRecoveryStatus.startTime(), gatewayRecoveryStatus.time(),
gatewayRecoveryStatus.index().totalSize(), gatewayRecoveryStatus.index().reusedTotalSize(), gatewayRecoveryStatus.index().currentFilesSize(), gatewayRecoveryStatus.translog().currentTranslogOperations());
}
}
if (request.snapshot) {
IndexShardGatewayService gatewayService = indexService.shardInjector(request.shardId()).getInstance(IndexShardGatewayService.class);
SnapshotStatus snapshotStatus = gatewayService.snapshotStatus();
if (snapshotStatus != null) {
GatewaySnapshotStatus.Stage stage;
switch (snapshotStatus.stage()) {
case DONE:
stage = GatewaySnapshotStatus.Stage.DONE;
break;
case FAILURE:
stage = GatewaySnapshotStatus.Stage.FAILURE;
break;
case TRANSLOG:
stage = GatewaySnapshotStatus.Stage.TRANSLOG;
break;
case FINALIZE:
stage = GatewaySnapshotStatus.Stage.FINALIZE;
break;
case INDEX:
stage = GatewaySnapshotStatus.Stage.INDEX;
break;
default:
stage = GatewaySnapshotStatus.Stage.NONE;
break;
}
shardStatus.gatewaySnapshotStatus = new GatewaySnapshotStatus(stage, snapshotStatus.startTime(), snapshotStatus.time(),
snapshotStatus.index().totalSize(), snapshotStatus.translog().expectedNumberOfOperations());
}
}
return shardStatus;
}
public static class IndexShardStatusRequest extends BroadcastShardOperationRequest {
boolean recovery;
boolean snapshot;
IndexShardStatusRequest() {
}
IndexShardStatusRequest(String index, int shardId, IndicesStatusRequest request) {
super(index, shardId, request);
recovery = request.recovery();
snapshot = request.snapshot();
}
@Override
public void readFrom(StreamInput in) throws IOException {
super.readFrom(in);
recovery = in.readBoolean();
snapshot = in.readBoolean();
}
@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
out.writeBoolean(recovery);
out.writeBoolean(snapshot);
}
}
}
| |
/**
*
*/
package uk.co.jemos.podam.test.dto;
import java.io.Serializable;
/**
* @author mtedone
*
*/
public class AddressTestPojo implements Serializable {
private static final long serialVersionUID = 1L;
/** Address line 1 */
private String address1;
/** Address line 2 */
private String address2;
/** Address line 3 */
private String address3;
/** The city */
private String city;
/** The province */
private String province;
/** The zip code */
private String zipCode;
/** The country */
private String country;
/**
* @return the address1
*/
public String getAddress1() {
return address1;
}
/**
* @return the address2
*/
public String getAddress2() {
return address2;
}
/**
* @return the address3
*/
public String getAddress3() {
return address3;
}
/**
* @return the city
*/
public String getCity() {
return city;
}
/**
* @return the province
*/
public String getProvince() {
return province;
}
/**
* @return the zipCode
*/
public String getZipCode() {
return zipCode;
}
/**
* @return the country
*/
public String getCountry() {
return country;
}
/*
* (non-Javadoc)
*
* @see java.lang.Object#hashCode()
*/
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = (prime * result)
+ ((address1 == null) ? 0 : address1.hashCode());
result = (prime * result)
+ ((address2 == null) ? 0 : address2.hashCode());
result = (prime * result)
+ ((address3 == null) ? 0 : address3.hashCode());
result = (prime * result) + ((city == null) ? 0 : city.hashCode());
result = (prime * result)
+ ((country == null) ? 0 : country.hashCode());
result = (prime * result)
+ ((province == null) ? 0 : province.hashCode());
result = (prime * result)
+ ((zipCode == null) ? 0 : zipCode.hashCode());
return result;
}
/*
* (non-Javadoc)
*
* @see java.lang.Object#equals(java.lang.Object)
*/
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj == null) {
return false;
}
if (this.getClass() != obj.getClass()) {
return false;
}
AddressTestPojo other = (AddressTestPojo) obj;
if (address1 == null) {
if (other.address1 != null) {
return false;
}
} else if (!address1.equals(other.address1)) {
return false;
}
if (address2 == null) {
if (other.address2 != null) {
return false;
}
} else if (!address2.equals(other.address2)) {
return false;
}
if (address3 == null) {
if (other.address3 != null) {
return false;
}
} else if (!address3.equals(other.address3)) {
return false;
}
if (city == null) {
if (other.city != null) {
return false;
}
} else if (!city.equals(other.city)) {
return false;
}
if (country == null) {
if (other.country != null) {
return false;
}
} else if (!country.equals(other.country)) {
return false;
}
if (province == null) {
if (other.province != null) {
return false;
}
} else if (!province.equals(other.province)) {
return false;
}
if (zipCode == null) {
if (other.zipCode != null) {
return false;
}
} else if (!zipCode.equals(other.zipCode)) {
return false;
}
return true;
}
/**
* Constructs a <code>String</code> with all attributes in name = value
* format.
*
* @return a <code>String</code> representation of this object.
*/
@Override
public String toString() {
final String TAB = " ";
StringBuilder retValue = new StringBuilder();
retValue.append("AddressTestPojo ( ").append(TAB).append("address1 = ")
.append(address1).append(TAB).append("address2 = ")
.append(address2).append(TAB).append("address3 = ")
.append(address3).append(TAB).append("city = ").append(city)
.append(TAB).append("province = ").append(province).append(TAB)
.append("zipCode = ").append(zipCode).append(TAB)
.append("country = ").append(country).append(TAB).append(" )");
return retValue.toString();
}
}
| |
package permission;
import com.google.gson.JsonArray;
import events.Command;
import org.json.JSONArray;
import sx.blah.discord.handle.obj.IGuild;
import sx.blah.discord.handle.obj.IRole;
import sx.blah.discord.handle.obj.IUser;
import sx.blah.discord.handle.obj.Permissions;
import util.Console;
import util.Fast;
import util.Globals;
import java.util.*;
/**
* Created by ModdyLP on 01.07.2017. Website: https://moddylp.de/
*/
public class PermissionController implements Fast {
private static PermissionController instance;
private static String PERMFILE = "permissions.json";
public static PermissionController getInstance() {
if (instance == null) {
instance = new PermissionController();
}
return instance;
}
private HashMap<Command, String> permissions = new HashMap<>();
private HashMap<IRole, ArrayList<String>> grouppermissions = new HashMap<>();
public void addPermission(Command command) {
permissions.put(command, command.permission());
}
public boolean hasPermission(IUser user, IGuild server, String permission) {
boolean check = false;
if (permission.equalsIgnoreCase(Globals.BOT_OWNER)) {
if (user.equals(INIT.BOT.getApplicationOwner())) {
check = true;
}
} else {
for (IRole role : user.getRolesForGuild(server)) {
if (grouppermissions.get(role) != null && grouppermissions.get(role).contains(permission)) {
check = true;
}
}
}
if (DRIVER.getPropertyOnly(DRIVER.CONFIG, "ownerbypass").equals(true) && user.equals(INIT.BOT.getApplicationOwner())) {
check = true;
}
return check;
}
public IRole groupPermission(String permission) {
for (IRole role : grouppermissions.keySet()) {
if (!role.isDeleted()) {
if (grouppermissions.get(role).contains(permission)) {
return role;
}
}
}
return null;
}
public int getAccessAmount(IUser user, IGuild server) {
int count = 0;
for (Command command : COMMAND.getAllCommands()) {
if (hasPermission(user, server, command.permission())) {
count++;
}
}
return count;
}
public void addPermissionToGroup(IRole role, String permission) {
if (permissions.containsValue(permission)) {
ArrayList<String> grouppermission = grouppermissions.get(role);
if (grouppermission == null) {
grouppermission = new ArrayList<>();
}
grouppermission.add(permission);
grouppermissions.put(role, grouppermission);
} else {
Console.debug("This permission doesnt exist: " + permission);
}
savePermissions();
}
public void removePermissionToGroup(IRole role, String permission) {
if (permissions.containsValue(permission)) {
ArrayList<String> grouppermission = grouppermissions.get(role);
if (grouppermission == null) {
return;
}
grouppermission.remove(permission);
} else {
Console.debug("This permission doesnt exist: " + permission);
}
savePermissions();
}
public void removePermissionToGroup(IRole role, Command command) {
ArrayList<String> grouppermission = grouppermissions.get(role);
if (grouppermission == null) {
return;
}
grouppermission.remove(command.permission());
savePermissions();
}
public void addPermissionToGroup(IRole role, Command command) {
ArrayList<String> grouppermission = grouppermissions.get(role);
if (grouppermission == null) {
grouppermission = new ArrayList<>();
}
grouppermission.add(command.permission());
grouppermissions.put(role, grouppermission);
savePermissions();
}
public ArrayList<String> getStringPermissions() {
ArrayList<String> permission = new ArrayList<>();
permission.addAll(permissions.values());
return permission;
}
public HashMap<Command, String> getPermissions() {
return permissions;
}
public HashMap<IRole, ArrayList<String>> getGrouppermissions() {
return grouppermissions;
}
private void savePermissions() {
try {
DRIVER.createNewFile(PERMFILE);
for (IRole role : grouppermissions.keySet()) {
ArrayList<String> permissions = grouppermissions.get(role);
DRIVER.setProperty(PERMFILE, String.valueOf(role.getLongID()), permissions);
}
DRIVER.saveJson();
} catch (Exception ex) {
Console.error("Saving of Permissions failed");
Console.error(ex);
}
}
public void loadPermissions(List<IGuild> server) {
try {
DRIVER.createNewFile(PERMFILE);
HashMap<String, Object> values = DRIVER.getAllKeysWithValues(PERMFILE);
for (String roleid : values.keySet()) {
for (IGuild serverinstance : server) {
IRole role = serverinstance.getRoleByID(Long.valueOf(roleid));
if (role != null) {
ArrayList<String> permission = new ArrayList<>();
JSONArray jArray = (JSONArray) values.get(roleid);
if (jArray != null) {
for (int i = 0; i < jArray.length(); i++) {
if (!jArray.get(i).toString().equalsIgnoreCase("null")) {
permission.add(jArray.get(i).toString());
}
}
}
grouppermissions.put(role, permission);
for (String perm : permission) {
Console.debug("Permission: " + perm);
for (Command command : COMMAND.getCommandByPermission(perm)) {
permissions.put(command, perm);
}
}
}
}
}
} catch (Exception ex) {
Console.error("Failed to load Permissions");
Console.error(ex);
}
}
public void setDefaultPermissions(List<IGuild> server, boolean override) {
if (override || !DRIVER.checkIfFileExists(PERMFILE) || DRIVER.checkIfFileisEmpty(PERMFILE)) {
Console.debug("Load default permissions...");
List<IRole> adminroles = new ArrayList<>();
List<IRole> everyoneroles = new ArrayList<>();
for (IGuild serverinst : server) {
loadPermissions(server);
for (IRole role : serverinst.getRoles()) {
if (role.getPermissions().contains(Permissions.ADMINISTRATOR) && !grouppermissions.containsKey(role)) {
adminroles.add(role);
}
}
if (!grouppermissions.containsKey(serverinst.getEveryoneRole())) {
Console.debug("Add: " + serverinst.getEveryoneRole().getStringID() + " " + serverinst.getEveryoneRole().getGuild().getName());
PERM.addPermissionToGroup(serverinst.getEveryoneRole(), Globals.BOT_INFO);
}
}
for (IRole role : adminroles) {
Console.debug("Add: "+role.getStringID()+" "+role.getGuild().getName());
PERM.addPermissionToGroup(role, Globals.BOT_MANAGE);
}
Console.debug("Permission loaded: Admin:" + adminroles.size() + " Info: " + everyoneroles.size());
}
}
}
| |
/**
* Api Documentation
* Api Documentation
*
* OpenAPI spec version: 1.0
*
*
* NOTE: This class is auto generated by the swagger code generator program.
* https://github.com/swagger-api/swagger-codegen.git
* Do not edit the class manually.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.hevelian.identity.client.api;
import com.hevelian.identity.client.ApiCallback;
import com.hevelian.identity.client.ApiClient;
import com.hevelian.identity.client.ApiException;
import com.hevelian.identity.client.ApiResponse;
import com.hevelian.identity.client.Configuration;
import com.hevelian.identity.client.Pair;
import com.hevelian.identity.client.ProgressRequestBody;
import com.hevelian.identity.client.ProgressResponseBody;
import com.google.gson.reflect.TypeToken;
import java.io.IOException;
import com.hevelian.identity.client.model.PAPPolicy;
import java.lang.reflect.Type;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
public class PapcontrollerApi {
private ApiClient apiClient;
public PapcontrollerApi() {
this(Configuration.getDefaultApiClient());
}
public PapcontrollerApi(ApiClient apiClient) {
this.apiClient = apiClient;
}
public ApiClient getApiClient() {
return apiClient;
}
public void setApiClient(ApiClient apiClient) {
this.apiClient = apiClient;
}
/* Build call for testAddPolicyUsingPOST */
private com.squareup.okhttp.Call testAddPolicyUsingPOSTCall(final ProgressResponseBody.ProgressListener progressListener, final ProgressRequestBody.ProgressRequestListener progressRequestListener) throws ApiException {
Object localVarPostBody = null;
// create path and map variables
String localVarPath = "/PAPService/testAddPolicy".replaceAll("\\{format\\}","json");
List<Pair> localVarQueryParams = new ArrayList<Pair>();
Map<String, String> localVarHeaderParams = new HashMap<String, String>();
Map<String, Object> localVarFormParams = new HashMap<String, Object>();
final String[] localVarAccepts = {
"application/xml", "application/json"
};
final String localVarAccept = apiClient.selectHeaderAccept(localVarAccepts);
if (localVarAccept != null) localVarHeaderParams.put("Accept", localVarAccept);
final String[] localVarContentTypes = {
"application/xml", "application/json"
};
final String localVarContentType = apiClient.selectHeaderContentType(localVarContentTypes);
localVarHeaderParams.put("Content-Type", localVarContentType);
if(progressListener != null) {
apiClient.getHttpClient().networkInterceptors().add(new com.squareup.okhttp.Interceptor() {
@Override
public com.squareup.okhttp.Response intercept(com.squareup.okhttp.Interceptor.Chain chain) throws IOException {
com.squareup.okhttp.Response originalResponse = chain.proceed(chain.request());
return originalResponse.newBuilder()
.body(new ProgressResponseBody(originalResponse.body(), progressListener))
.build();
}
});
}
String[] localVarAuthNames = new String[] { "default" };
return apiClient.buildCall(localVarPath, "POST", localVarQueryParams, localVarPostBody, localVarHeaderParams, localVarFormParams, localVarAuthNames, progressRequestListener);
}
/**
* testAddPolicy
*
* @throws ApiException If fail to call the API, e.g. server error or cannot deserialize the response body
*/
public void testAddPolicyUsingPOST() throws ApiException {
testAddPolicyUsingPOSTWithHttpInfo();
}
/**
* testAddPolicy
*
* @return ApiResponse<Void>
* @throws ApiException If fail to call the API, e.g. server error or cannot deserialize the response body
*/
public ApiResponse<Void> testAddPolicyUsingPOSTWithHttpInfo() throws ApiException {
com.squareup.okhttp.Call call = testAddPolicyUsingPOSTCall(null, null);
return apiClient.execute(call);
}
/**
* testAddPolicy (asynchronously)
*
* @param callback The callback to be executed when the API call finishes
* @return The request call
* @throws ApiException If fail to process the API call, e.g. serializing the request body object
*/
public com.squareup.okhttp.Call testAddPolicyUsingPOSTAsync(final ApiCallback<Void> callback) throws ApiException {
ProgressResponseBody.ProgressListener progressListener = null;
ProgressRequestBody.ProgressRequestListener progressRequestListener = null;
if (callback != null) {
progressListener = new ProgressResponseBody.ProgressListener() {
@Override
public void update(long bytesRead, long contentLength, boolean done) {
callback.onDownloadProgress(bytesRead, contentLength, done);
}
};
progressRequestListener = new ProgressRequestBody.ProgressRequestListener() {
@Override
public void onRequestProgress(long bytesWritten, long contentLength, boolean done) {
callback.onUploadProgress(bytesWritten, contentLength, done);
}
};
}
com.squareup.okhttp.Call call = testAddPolicyUsingPOSTCall(progressListener, progressRequestListener);
apiClient.executeAsync(call, callback);
return call;
}
/* Build call for testGetAllUsingGET */
private com.squareup.okhttp.Call testGetAllUsingGETCall(final ProgressResponseBody.ProgressListener progressListener, final ProgressRequestBody.ProgressRequestListener progressRequestListener) throws ApiException {
Object localVarPostBody = null;
// create path and map variables
String localVarPath = "/PAPService/testGetAll".replaceAll("\\{format\\}","json");
List<Pair> localVarQueryParams = new ArrayList<Pair>();
Map<String, String> localVarHeaderParams = new HashMap<String, String>();
Map<String, Object> localVarFormParams = new HashMap<String, Object>();
final String[] localVarAccepts = {
"application/xml", "application/json"
};
final String localVarAccept = apiClient.selectHeaderAccept(localVarAccepts);
if (localVarAccept != null) localVarHeaderParams.put("Accept", localVarAccept);
final String[] localVarContentTypes = {
"application/xml", "application/json"
};
final String localVarContentType = apiClient.selectHeaderContentType(localVarContentTypes);
localVarHeaderParams.put("Content-Type", localVarContentType);
if(progressListener != null) {
apiClient.getHttpClient().networkInterceptors().add(new com.squareup.okhttp.Interceptor() {
@Override
public com.squareup.okhttp.Response intercept(com.squareup.okhttp.Interceptor.Chain chain) throws IOException {
com.squareup.okhttp.Response originalResponse = chain.proceed(chain.request());
return originalResponse.newBuilder()
.body(new ProgressResponseBody(originalResponse.body(), progressListener))
.build();
}
});
}
String[] localVarAuthNames = new String[] { "default" };
return apiClient.buildCall(localVarPath, "GET", localVarQueryParams, localVarPostBody, localVarHeaderParams, localVarFormParams, localVarAuthNames, progressRequestListener);
}
/**
* testGetAll
*
* @return List<PAPPolicy>
* @throws ApiException If fail to call the API, e.g. server error or cannot deserialize the response body
*/
public List<PAPPolicy> testGetAllUsingGET() throws ApiException {
ApiResponse<List<PAPPolicy>> resp = testGetAllUsingGETWithHttpInfo();
return resp.getData();
}
/**
* testGetAll
*
* @return ApiResponse<List<PAPPolicy>>
* @throws ApiException If fail to call the API, e.g. server error or cannot deserialize the response body
*/
public ApiResponse<List<PAPPolicy>> testGetAllUsingGETWithHttpInfo() throws ApiException {
com.squareup.okhttp.Call call = testGetAllUsingGETCall(null, null);
Type localVarReturnType = new TypeToken<List<PAPPolicy>>(){}.getType();
return apiClient.execute(call, localVarReturnType);
}
/**
* testGetAll (asynchronously)
*
* @param callback The callback to be executed when the API call finishes
* @return The request call
* @throws ApiException If fail to process the API call, e.g. serializing the request body object
*/
public com.squareup.okhttp.Call testGetAllUsingGETAsync(final ApiCallback<List<PAPPolicy>> callback) throws ApiException {
ProgressResponseBody.ProgressListener progressListener = null;
ProgressRequestBody.ProgressRequestListener progressRequestListener = null;
if (callback != null) {
progressListener = new ProgressResponseBody.ProgressListener() {
@Override
public void update(long bytesRead, long contentLength, boolean done) {
callback.onDownloadProgress(bytesRead, contentLength, done);
}
};
progressRequestListener = new ProgressRequestBody.ProgressRequestListener() {
@Override
public void onRequestProgress(long bytesWritten, long contentLength, boolean done) {
callback.onUploadProgress(bytesWritten, contentLength, done);
}
};
}
com.squareup.okhttp.Call call = testGetAllUsingGETCall(progressListener, progressRequestListener);
Type localVarReturnType = new TypeToken<List<PAPPolicy>>(){}.getType();
apiClient.executeAsync(call, localVarReturnType, callback);
return call;
}
/* Build call for testUpdatePolicyUsingPOST */
private com.squareup.okhttp.Call testUpdatePolicyUsingPOSTCall(final ProgressResponseBody.ProgressListener progressListener, final ProgressRequestBody.ProgressRequestListener progressRequestListener) throws ApiException {
Object localVarPostBody = null;
// create path and map variables
String localVarPath = "/PAPService/testUpdatePolicy".replaceAll("\\{format\\}","json");
List<Pair> localVarQueryParams = new ArrayList<Pair>();
Map<String, String> localVarHeaderParams = new HashMap<String, String>();
Map<String, Object> localVarFormParams = new HashMap<String, Object>();
final String[] localVarAccepts = {
"application/xml", "application/json"
};
final String localVarAccept = apiClient.selectHeaderAccept(localVarAccepts);
if (localVarAccept != null) localVarHeaderParams.put("Accept", localVarAccept);
final String[] localVarContentTypes = {
"application/xml", "application/json"
};
final String localVarContentType = apiClient.selectHeaderContentType(localVarContentTypes);
localVarHeaderParams.put("Content-Type", localVarContentType);
if(progressListener != null) {
apiClient.getHttpClient().networkInterceptors().add(new com.squareup.okhttp.Interceptor() {
@Override
public com.squareup.okhttp.Response intercept(com.squareup.okhttp.Interceptor.Chain chain) throws IOException {
com.squareup.okhttp.Response originalResponse = chain.proceed(chain.request());
return originalResponse.newBuilder()
.body(new ProgressResponseBody(originalResponse.body(), progressListener))
.build();
}
});
}
String[] localVarAuthNames = new String[] { "default" };
return apiClient.buildCall(localVarPath, "POST", localVarQueryParams, localVarPostBody, localVarHeaderParams, localVarFormParams, localVarAuthNames, progressRequestListener);
}
/**
* testUpdatePolicy
*
* @throws ApiException If fail to call the API, e.g. server error or cannot deserialize the response body
*/
public void testUpdatePolicyUsingPOST() throws ApiException {
testUpdatePolicyUsingPOSTWithHttpInfo();
}
/**
* testUpdatePolicy
*
* @return ApiResponse<Void>
* @throws ApiException If fail to call the API, e.g. server error or cannot deserialize the response body
*/
public ApiResponse<Void> testUpdatePolicyUsingPOSTWithHttpInfo() throws ApiException {
com.squareup.okhttp.Call call = testUpdatePolicyUsingPOSTCall(null, null);
return apiClient.execute(call);
}
/**
* testUpdatePolicy (asynchronously)
*
* @param callback The callback to be executed when the API call finishes
* @return The request call
* @throws ApiException If fail to process the API call, e.g. serializing the request body object
*/
public com.squareup.okhttp.Call testUpdatePolicyUsingPOSTAsync(final ApiCallback<Void> callback) throws ApiException {
ProgressResponseBody.ProgressListener progressListener = null;
ProgressRequestBody.ProgressRequestListener progressRequestListener = null;
if (callback != null) {
progressListener = new ProgressResponseBody.ProgressListener() {
@Override
public void update(long bytesRead, long contentLength, boolean done) {
callback.onDownloadProgress(bytesRead, contentLength, done);
}
};
progressRequestListener = new ProgressRequestBody.ProgressRequestListener() {
@Override
public void onRequestProgress(long bytesWritten, long contentLength, boolean done) {
callback.onUploadProgress(bytesWritten, contentLength, done);
}
};
}
com.squareup.okhttp.Call call = testUpdatePolicyUsingPOSTCall(progressListener, progressRequestListener);
apiClient.executeAsync(call, callback);
return call;
}
}
| |
/**
* generated by Xtext 2.17.1
*/
package com.specmate.cause_effect_patterns.internal.specDSL.impl;
import org.eclipse.emf.common.notify.Notification;
import org.eclipse.emf.common.notify.NotificationChain;
import org.eclipse.emf.ecore.EClass;
import org.eclipse.emf.ecore.InternalEObject;
import org.eclipse.emf.ecore.impl.ENotificationImpl;
import com.specmate.cause_effect_patterns.internal.specDSL.POSTag;
import com.specmate.cause_effect_patterns.internal.specDSL.SpecDSLPackage;
import com.specmate.cause_effect_patterns.internal.specDSL.Subtree;
import com.specmate.cause_effect_patterns.internal.specDSL.TreeNode;
/**
* <!-- begin-user-doc -->
* An implementation of the model object '<em><b>Tree Node</b></em>'.
* <!-- end-user-doc -->
* <p>
* The following features are implemented:
* </p>
* <ul>
* <li>{@link com.specmate.cause_effect_patterns.specDSL.impl.TreeNodeImpl#getPTag <em>PTag</em>}</li>
* <li>{@link com.specmate.cause_effect_patterns.specDSL.impl.TreeNodeImpl#getExpr <em>Expr</em>}</li>
* <li>{@link com.specmate.cause_effect_patterns.specDSL.impl.TreeNodeImpl#isAnyMatch <em>Any Match</em>}</li>
* <li>{@link com.specmate.cause_effect_patterns.specDSL.impl.TreeNodeImpl#getTree <em>Tree</em>}</li>
* </ul>
*
* @generated
*/
public class TreeNodeImpl extends NodeImpl implements TreeNode
{
/**
* The cached value of the '{@link #getPTag() <em>PTag</em>}' reference.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getPTag()
* @generated
* @ordered
*/
protected POSTag pTag;
/**
* The default value of the '{@link #getExpr() <em>Expr</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getExpr()
* @generated
* @ordered
*/
protected static final String EXPR_EDEFAULT = null;
/**
* The cached value of the '{@link #getExpr() <em>Expr</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getExpr()
* @generated
* @ordered
*/
protected String expr = EXPR_EDEFAULT;
/**
* The default value of the '{@link #isAnyMatch() <em>Any Match</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #isAnyMatch()
* @generated
* @ordered
*/
protected static final boolean ANY_MATCH_EDEFAULT = false;
/**
* The cached value of the '{@link #isAnyMatch() <em>Any Match</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #isAnyMatch()
* @generated
* @ordered
*/
protected boolean anyMatch = ANY_MATCH_EDEFAULT;
/**
* The cached value of the '{@link #getTree() <em>Tree</em>}' containment reference.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getTree()
* @generated
* @ordered
*/
protected Subtree tree;
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
protected TreeNodeImpl()
{
super();
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
protected EClass eStaticClass()
{
return SpecDSLPackage.Literals.TREE_NODE;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public POSTag getPTag()
{
if (pTag != null && pTag.eIsProxy())
{
InternalEObject oldPTag = (InternalEObject)pTag;
pTag = (POSTag)eResolveProxy(oldPTag);
if (pTag != oldPTag)
{
if (eNotificationRequired())
eNotify(new ENotificationImpl(this, Notification.RESOLVE, SpecDSLPackage.TREE_NODE__PTAG, oldPTag, pTag));
}
}
return pTag;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public POSTag basicGetPTag()
{
return pTag;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public void setPTag(POSTag newPTag)
{
POSTag oldPTag = pTag;
pTag = newPTag;
if (eNotificationRequired())
eNotify(new ENotificationImpl(this, Notification.SET, SpecDSLPackage.TREE_NODE__PTAG, oldPTag, pTag));
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public String getExpr()
{
return expr;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public void setExpr(String newExpr)
{
String oldExpr = expr;
expr = newExpr;
if (eNotificationRequired())
eNotify(new ENotificationImpl(this, Notification.SET, SpecDSLPackage.TREE_NODE__EXPR, oldExpr, expr));
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public boolean isAnyMatch()
{
return anyMatch;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public void setAnyMatch(boolean newAnyMatch)
{
boolean oldAnyMatch = anyMatch;
anyMatch = newAnyMatch;
if (eNotificationRequired())
eNotify(new ENotificationImpl(this, Notification.SET, SpecDSLPackage.TREE_NODE__ANY_MATCH, oldAnyMatch, anyMatch));
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public Subtree getTree()
{
return tree;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public NotificationChain basicSetTree(Subtree newTree, NotificationChain msgs)
{
Subtree oldTree = tree;
tree = newTree;
if (eNotificationRequired())
{
ENotificationImpl notification = new ENotificationImpl(this, Notification.SET, SpecDSLPackage.TREE_NODE__TREE, oldTree, newTree);
if (msgs == null) msgs = notification; else msgs.add(notification);
}
return msgs;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public void setTree(Subtree newTree)
{
if (newTree != tree)
{
NotificationChain msgs = null;
if (tree != null)
msgs = ((InternalEObject)tree).eInverseRemove(this, EOPPOSITE_FEATURE_BASE - SpecDSLPackage.TREE_NODE__TREE, null, msgs);
if (newTree != null)
msgs = ((InternalEObject)newTree).eInverseAdd(this, EOPPOSITE_FEATURE_BASE - SpecDSLPackage.TREE_NODE__TREE, null, msgs);
msgs = basicSetTree(newTree, msgs);
if (msgs != null) msgs.dispatch();
}
else if (eNotificationRequired())
eNotify(new ENotificationImpl(this, Notification.SET, SpecDSLPackage.TREE_NODE__TREE, newTree, newTree));
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public NotificationChain eInverseRemove(InternalEObject otherEnd, int featureID, NotificationChain msgs)
{
switch (featureID)
{
case SpecDSLPackage.TREE_NODE__TREE:
return basicSetTree(null, msgs);
}
return super.eInverseRemove(otherEnd, featureID, msgs);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public Object eGet(int featureID, boolean resolve, boolean coreType)
{
switch (featureID)
{
case SpecDSLPackage.TREE_NODE__PTAG:
if (resolve) return getPTag();
return basicGetPTag();
case SpecDSLPackage.TREE_NODE__EXPR:
return getExpr();
case SpecDSLPackage.TREE_NODE__ANY_MATCH:
return isAnyMatch();
case SpecDSLPackage.TREE_NODE__TREE:
return getTree();
}
return super.eGet(featureID, resolve, coreType);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public void eSet(int featureID, Object newValue)
{
switch (featureID)
{
case SpecDSLPackage.TREE_NODE__PTAG:
setPTag((POSTag)newValue);
return;
case SpecDSLPackage.TREE_NODE__EXPR:
setExpr((String)newValue);
return;
case SpecDSLPackage.TREE_NODE__ANY_MATCH:
setAnyMatch((Boolean)newValue);
return;
case SpecDSLPackage.TREE_NODE__TREE:
setTree((Subtree)newValue);
return;
}
super.eSet(featureID, newValue);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public void eUnset(int featureID)
{
switch (featureID)
{
case SpecDSLPackage.TREE_NODE__PTAG:
setPTag((POSTag)null);
return;
case SpecDSLPackage.TREE_NODE__EXPR:
setExpr(EXPR_EDEFAULT);
return;
case SpecDSLPackage.TREE_NODE__ANY_MATCH:
setAnyMatch(ANY_MATCH_EDEFAULT);
return;
case SpecDSLPackage.TREE_NODE__TREE:
setTree((Subtree)null);
return;
}
super.eUnset(featureID);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public boolean eIsSet(int featureID)
{
switch (featureID)
{
case SpecDSLPackage.TREE_NODE__PTAG:
return pTag != null;
case SpecDSLPackage.TREE_NODE__EXPR:
return EXPR_EDEFAULT == null ? expr != null : !EXPR_EDEFAULT.equals(expr);
case SpecDSLPackage.TREE_NODE__ANY_MATCH:
return anyMatch != ANY_MATCH_EDEFAULT;
case SpecDSLPackage.TREE_NODE__TREE:
return tree != null;
}
return super.eIsSet(featureID);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public String toString()
{
if (eIsProxy()) return super.toString();
StringBuffer result = new StringBuffer(super.toString());
result.append(" (expr: ");
result.append(expr);
result.append(", anyMatch: ");
result.append(anyMatch);
result.append(')');
return result.toString();
}
} //TreeNodeImpl
| |
/*
***************************************************************************
* Mica - the Java(tm) Graphics Framework *
***************************************************************************
* NOTICE: Permission to use, copy, and modify this software and its *
* documentation is hereby granted provided that this notice appears in *
* all copies. *
* *
* Permission to distribute un-modified copies of this software and its *
* documentation is hereby granted provided that no fee is charged and *
* that this notice appears in all copies. *
* *
* SOFTWARE FARM MAKES NO REPRESENTATIONS OR WARRANTIES ABOUT THE *
* SUITABILITY OF THE SOFTWARE, EITHER EXPRESS OR IMPLIED, INCLUDING, BUT *
* NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY, FITNESS FOR *
* A PARTICULAR PURPOSE, OR NON-INFRINGEMENT. SOFTWARE FARM SHALL NOT BE *
* LIABLE TO ANY PARTY FOR DIRECT, INDIRECT, SPECIAL, INCIDENTAL, OR *
* CONSEQUENTIAL DAMAGES ARISING OUT OF THE USE, MODIFICATION OR *
* DISTRIBUTION OF THIS SOFTWARE OR ITS DERIVATIVES. *
* *
* THIS SOFTWARE IS PROVIDED ON AN "AS IS" BASIS, AND THE AUTHORS AND *
* DISTRIBUTORS HAVE NO OBLIGATION TO PROVIDE MAINTENANCE, SUPPORT, *
* UPDATES, ENHANCEMENTS, OR MODIFICATIONS. *
* *
***************************************************************************
* Copyright (c) 1997-2004 Software Farm, Inc. All Rights Reserved. *
***************************************************************************
*/
package com.swfm.mica;
/**
* @version %I% %G%
* @author Michael L. Davis
* @release 1.4.1
* @module %M%
* @language Java (JDK 1.4)
*/
public class MiClosestValidManagedPointFinder
{
public static final int EXAMINE_CONN_POINT_MANAGERS = 1;
public static final int EXAMINE_COMMON_POINTS = 2;
public static final int EXAMINE_PARTS_OF_CANDIDATES = 4;
public static final int MIXED_METHODOLOGIES_IN_SAME_PART = 8;
public static final int EXAMINE_COMMON_POINTS_IF_NO_POINT_MANAGERS = 16;
private static final int DEFAULT_PICK_AREA_SIZE = 20;
public MiBounds tmpBounds = new MiBounds();
public MiPoint closestConnPt = new MiPoint();
public int closestConnPtID = MiiTypes.Mi_CENTER_LOCATION;
public MiPart closestObject;
private MiDistance pickAreaSize;
private int validConnPtLocations[];
private int methodology = EXAMINE_CONN_POINT_MANAGERS
| EXAMINE_COMMON_POINTS_IF_NO_POINT_MANAGERS;
private MiiManagedPointValidator validator;
private MiManagedPointManager pointManagerKind = new MiConnectionPointManager();
public MiClosestValidManagedPointFinder(MiiManagedPointValidator validator)
{
pickAreaSize = DEFAULT_PICK_AREA_SIZE;
this.validator = validator;
}
public void setMethodology(int m)
{
methodology = m;
}
public int getMethodology()
{
return(methodology);
}
public void setPickAreaSize(MiDistance size)
{
pickAreaSize = size;
}
public MiDistance getPickAreaSize()
{
return(pickAreaSize);
}
public void setValidConnPtLocations(int[] connPts)
{
validConnPtLocations = connPts;
}
public int[] getValidConnPtLocations()
{
return(validConnPtLocations);
}
// Sets values for closest object, the objects connPt and the location of the connPt
/****
public boolean findClosestManagedPoint(
MiEditor editor,
MiPart srcObj,
int srcConnPt,
MiPart ignoreObj,
MiPoint location)
{
return(findClosestManagedPoint(editor, srcObj, srcConnPt, ignoreObj, location, true));
}
****/
public boolean findClosestManagedPoint(
MiEditor editor,
MiPart srcObj,
int srcConnPt,
MiPart destObj,
int destConnPt,
MiPart ignoreObj,
MiPoint location,
boolean allowSameSrcAndDest,
boolean findSrcObj)
{
boolean found = false;
double closestDist = MiiTypes.Mi_MAX_DISTANCE_VALUE;
double dist;
MiPart obj;
MiBounds pickArea = MiBounds.newBounds();
MiManagedPointSearchResults connPtManResults = new MiManagedPointSearchResults();
MiManagedPointSearchResults commonPtResults = new MiManagedPointSearchResults();
MiManagedPointSearchResults winner = new MiManagedPointSearchResults();
boolean examinePartsOfObjects = ((methodology & EXAMINE_PARTS_OF_CANDIDATES) != 0);
boolean winnerIsAConnPt = false;
closestObject = null;
if (srcConnPt == MiiTypes.Mi_DEFAULT_LOCATION)
{
int[] connPt = new int[1];
MiPart connNode = MiManagedPointManager.getDefaultManagedPoint(
srcObj, connPt, pointManagerKind);
if (connNode != null)
{
srcObj = connNode;
srcConnPt = connPt[1];
}
}
pickArea.setBounds(0, 0, pickAreaSize, pickAreaSize);
MiEditorIterator iterator = new MiEditorIterator(editor);
pickArea.setCenter(location);
while ((obj = iterator.getNext()) != null)
{
if (obj.getDrawBounds(tmpBounds).intersects(pickArea))
{
if (((allowSameSrcAndDest)
|| ((findSrcObj) && (obj != destObj))
|| ((!findSrcObj) && (obj != srcObj)))
&& (obj != ignoreObj)
&& ((validator == null) || ((findSrcObj) ?
validator.isValidConnectionSource(obj, destObj) :
validator.isValidConnectionDestination(srcObj, obj))))
{
//MiDebug.println("methodology = " + methodology);
if ((methodology & EXAMINE_CONN_POINT_MANAGERS) != 0)
{
connPtManResults.init();
findClosestPointUsingConnectionManagers(
obj, location, examinePartsOfObjects,
validator, srcObj, srcConnPt,
destObj, destConnPt,
ignoreObj,
allowSameSrcAndDest, findSrcObj,
connPtManResults);
//MiDebug.println("EXAMINE_CONN_POINT_MANAGERS connPtManResults = " + connPtManResults);
winner = connPtManResults;
}
if (((methodology & EXAMINE_COMMON_POINTS) != 0)
|| ((MiManagedPointManager.getManager(obj, pointManagerKind) == null)
&& ((methodology & EXAMINE_COMMON_POINTS_IF_NO_POINT_MANAGERS) != 0)))
{
commonPtResults.init();
if (validConnPtLocations != null)
{
findClosestPointUsingValidCommonPoints(
obj, location, examinePartsOfObjects,
validConnPtLocations, validator,
srcObj, srcConnPt,
destObj, destConnPt,
ignoreObj,
allowSameSrcAndDest, findSrcObj,
commonPtResults);
//MiDebug.println("EXAMINE_COMMON_POINTS #1 connPtManResults = " + connPtManResults);
}
else
{
findClosestPointUsingCommonPoints(
obj, location, examinePartsOfObjects,
validator,
srcObj, srcConnPt,
destObj, destConnPt,
ignoreObj,
allowSameSrcAndDest, findSrcObj,
commonPtResults);
}
winner = commonPtResults;
//MiDebug.println("EXAMINE_COMMON_POINTS #2 commonPtResults = " + commonPtResults);
}
if ((methodology &
(EXAMINE_CONN_POINT_MANAGERS + EXAMINE_COMMON_POINTS))
== EXAMINE_CONN_POINT_MANAGERS + EXAMINE_COMMON_POINTS)
{
//MiDebug.println("EXAMINE_COMMON_POINTS #3 commonPtResults = " + commonPtResults);
if (connPtManResults.closestObject
== commonPtResults.closestObject)
{
if ((methodology
& MIXED_METHODOLOGIES_IN_SAME_PART) == 0)
{
winner = connPtManResults;
}
}
else
{
if (connPtManResults.closestDistSquared
< commonPtResults.closestDistSquared)
{
winner = connPtManResults;
}
else
{
winner = commonPtResults;
}
}
}
if (winner.closestDistSquared < closestDist)
{
//MiDebug.println("WINNER POINT Results = " + winner);
closestDist = winner.closestDistSquared;
closestConnPt.x = winner.closestConnPtLocation.x;
closestConnPt.y = winner.closestConnPtLocation.y;
closestObject = winner.closestObject;
closestConnPtID = winner.closestConnPtNumber;
found = true;
winnerIsAConnPt = (winner == connPtManResults);
}
}
}
}
//MiDebug.println("found = " + found);
//MiDebug.println("closestConnPtID = " + closestConnPtID);
//MiDebug.println("pickAreaSize = " + pickAreaSize);
if (found)
{
if (((Math.abs(closestConnPt.x - location.x) > pickAreaSize)
|| (Math.abs(closestConnPt.y - location.y) > pickAreaSize))
// Don't reject if location is within object... (esp. if
// we are connecting to the center common pt)...unless
// we are connecting to a connPt
&& ((winnerIsAConnPt) || (!closestObject.pick(pickArea))))
{
// Closest pt is still too far away.
//MiDebug.println("Closest pt is still too far away. ");
MiBounds.freeBounds(pickArea);
return(false);
}
MiBounds.freeBounds(pickArea);
return(true);
}
MiBounds.freeBounds(pickArea);
return(false);
}
protected void findClosestPointUsingConnectionManagers(
MiPart obj, MiPoint location,
boolean examinePartsOfObjects,
MiiManagedPointValidator validator,
MiPart srcObj, int srcConnPtNumber,
MiPart destObj, int destConnPtNumber,
MiPart ignoreObj,
boolean allowSameSrcAndDest, boolean findSrcObj,
MiManagedPointSearchResults results)
{
results.init();
MiManagedPointManager man = pointManagerKind.getManager(obj);
if ((man == null) && (!examinePartsOfObjects))
return;
if (examinePartsOfObjects)
{
MiManagedPointManager.getClosestManagedPointIncludingAllParts(
obj, location, validator,
srcObj, srcConnPtNumber,
destObj, destConnPtNumber,
ignoreObj,
allowSameSrcAndDest,
findSrcObj,
results, pointManagerKind);
}
else
{
man.getClosestManagedPoint(
obj, location, validator,
srcObj, srcConnPtNumber,
destObj, destConnPtNumber,
ignoreObj,
allowSameSrcAndDest,
findSrcObj,
results);
}
}
protected void findClosestPointUsingValidCommonPoints(
MiPart obj,
MiPoint location,
boolean examinePartsOfObjects,
int[] validConnPtLocations,
MiiManagedPointValidator validator,
MiPart srcObj, int srcConnPtNumber,
MiPart destObj, int destConnPtNumber,
MiPart ignoreObj,
boolean allowSameSrcAndDest, boolean findSrcObj,
MiManagedPointSearchResults results)
{
MiPoint connPtLocation = new MiPoint();
for (int i = 0; i < validConnPtLocations.length; ++i)
{
if ((obj != ignoreObj)
&& (((findSrcObj)
&& ((allowSameSrcAndDest) || (obj != destObj))
&& ((obj != destObj) || (validConnPtLocations[i] != destConnPtNumber))
&& ((validator == null)
||
(validator.isValidConnectionSource(
obj, validConnPtLocations[i], destObj, destConnPtNumber))))
|| ((!findSrcObj)
&& ((allowSameSrcAndDest) || (obj != srcObj))
&& ((obj != srcObj) || (validConnPtLocations[i] != srcConnPtNumber))
&& ((validator == null)
||
(validator.isValidConnectionDestination(
srcObj, srcConnPtNumber, obj, validConnPtLocations[i]))))))
{
MiManagedPointManager.getLocationOfCommonPoint(
obj,
validConnPtLocations[i],
connPtLocation);
double dist = location.getDistanceSquared(connPtLocation);
if (dist < results.closestDistSquared)
{
results.closestDistSquared = dist;
results.closestConnPtLocation.x = connPtLocation.x;
results.closestConnPtLocation.y = connPtLocation.y;
results.closestObject = obj;
results.closestConnPtNumber = validConnPtLocations[i];
}
}
}
if (examinePartsOfObjects)
{
for (int i = 0; i < obj.getNumberOfParts(); ++i)
{
findClosestPointUsingValidCommonPoints(
obj.getPart(i), location, examinePartsOfObjects,
validConnPtLocations, validator,
srcObj, srcConnPtNumber,
destObj, destConnPtNumber,
ignoreObj,
allowSameSrcAndDest, findSrcObj,
results);
}
}
}
protected void findClosestPointUsingCommonPoints(
MiPart obj,
MiPoint location,
boolean examinePartsOfObjects,
MiiManagedPointValidator validator,
MiPart srcObj, int srcConnPtNumber,
MiPart destObj, int destConnPtNumber,
MiPart ignoreObj,
boolean allowSameSrcAndDest, boolean findSrcObj,
MiManagedPointSearchResults results)
{
MiPoint connPtLocation = new MiPoint();
int aConnPtID = MiManagedPointManager.getClosestCommonPoint(
obj, location, connPtLocation);
double dist = location.getDistanceSquared(connPtLocation);
if ((obj != ignoreObj)
&& (((findSrcObj)
&& ((allowSameSrcAndDest) || (obj != destObj))
&& ((obj != destObj) || (aConnPtID != destConnPtNumber))
&& (validator.isValidConnectionSource(
obj, aConnPtID, destObj, destConnPtNumber)))
|| ((!findSrcObj)
&& ((allowSameSrcAndDest) || (obj != srcObj))
&& ((obj != srcObj) || (aConnPtID != srcConnPtNumber))
&& (validator.isValidConnectionDestination(
srcObj, srcConnPtNumber, obj, aConnPtID))))
&& (dist < results.closestDistSquared))
{
results.closestDistSquared = dist;
results.closestConnPtLocation.x = connPtLocation.x;
results.closestConnPtLocation.y = connPtLocation.y;
results.closestObject = obj;
results.closestConnPtNumber = aConnPtID;
}
if (examinePartsOfObjects)
{
for (int i = 0; i < obj.getNumberOfParts(); ++i)
{
findClosestPointUsingCommonPoints(
obj.getPart(i), location, examinePartsOfObjects,
validator,
srcObj, srcConnPtNumber,
destObj, destConnPtNumber,
ignoreObj,
allowSameSrcAndDest, findSrcObj,
results);
}
}
}
public String toString()
{
String str = super.toString() + "[closestObject=" + closestObject + "][closestConnPtID=" + closestConnPtID + "][closestConnPt=" + closestConnPt + "]";
return(str);
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.arrow.vector.types.pojo;
import static java.util.Arrays.asList;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import java.io.IOException;
import org.apache.arrow.vector.types.DateUnit;
import org.apache.arrow.vector.types.FloatingPointPrecision;
import org.apache.arrow.vector.types.IntervalUnit;
import org.apache.arrow.vector.types.TimeUnit;
import org.apache.arrow.vector.types.UnionMode;
import org.apache.arrow.vector.types.pojo.ArrowType.Binary;
import org.apache.arrow.vector.types.pojo.ArrowType.Bool;
import org.apache.arrow.vector.types.pojo.ArrowType.Date;
import org.apache.arrow.vector.types.pojo.ArrowType.Decimal;
import org.apache.arrow.vector.types.pojo.ArrowType.FloatingPoint;
import org.apache.arrow.vector.types.pojo.ArrowType.Int;
import org.apache.arrow.vector.types.pojo.ArrowType.Interval;
import org.apache.arrow.vector.types.pojo.ArrowType.List;
import org.apache.arrow.vector.types.pojo.ArrowType.Null;
import org.apache.arrow.vector.types.pojo.ArrowType.Struct;
import org.apache.arrow.vector.types.pojo.ArrowType.Time;
import org.apache.arrow.vector.types.pojo.ArrowType.Timestamp;
import org.apache.arrow.vector.types.pojo.ArrowType.Union;
import org.apache.arrow.vector.types.pojo.ArrowType.Utf8;
import org.junit.Test;
public class TestSchema {
private static Field field(String name, boolean nullable, ArrowType type, Field... children) {
return new Field(name, nullable, type, asList(children));
}
private static Field field(String name, ArrowType type, Field... children) {
return field(name, true, type, children);
}
@Test
public void testComplex() throws IOException {
Schema schema = new Schema(asList(
field("a", false, new Int(8, true)),
field("b", new Struct(),
field("c", new Int(16, true)),
field("d", new Utf8())),
field("e", new List(), field(null, new Date(DateUnit.MILLISECOND))),
field("f", new FloatingPoint(FloatingPointPrecision.SINGLE)),
field("g", new Timestamp(TimeUnit.MILLISECOND, "UTC")),
field("h", new Timestamp(TimeUnit.MICROSECOND, null)),
field("i", new Interval(IntervalUnit.DAY_TIME))
));
roundTrip(schema);
assertEquals(
"Schema<a: Int(8, true) not null, b: Struct<c: Int(16, true), d: Utf8>, e: List<Date(MILLISECOND)>, f: FloatingPoint(SINGLE), g: Timestamp(MILLISECOND, UTC), h: Timestamp(MICROSECOND, null), i: Interval(DAY_TIME)>",
schema.toString());
}
@Test
public void testAll() throws IOException {
Schema schema = new Schema(asList(
field("a", false, new Null()),
field("b", new Struct(), field("ba", new Null())),
field("c", new List(), field("ca", new Null())),
field("d", new Union(UnionMode.Sparse, new int[] {1, 2, 3}), field("da", new Null())),
field("e", new Int(8, true)),
field("f", new FloatingPoint(FloatingPointPrecision.SINGLE)),
field("g", new Utf8()),
field("h", new Binary()),
field("i", new Bool()),
field("j", new Decimal(5, 5)),
field("k", new Date(DateUnit.DAY)),
field("l", new Date(DateUnit.MILLISECOND)),
field("m", new Time(TimeUnit.SECOND, 32)),
field("n", new Time(TimeUnit.MILLISECOND, 32)),
field("o", new Time(TimeUnit.MICROSECOND, 64)),
field("p", new Time(TimeUnit.NANOSECOND, 64)),
field("q", new Timestamp(TimeUnit.MILLISECOND, "UTC")),
field("r", new Timestamp(TimeUnit.MICROSECOND, null)),
field("s", new Interval(IntervalUnit.DAY_TIME))
));
roundTrip(schema);
}
@Test
public void testUnion() throws IOException {
Schema schema = new Schema(asList(
field("d", new Union(UnionMode.Sparse, new int[] {1, 2, 3}), field("da", new Null()))
));
roundTrip(schema);
contains(schema, "Sparse");
}
@Test
public void testDate() throws IOException {
Schema schema = new Schema(asList(
field("a", new Date(DateUnit.DAY)),
field("b", new Date(DateUnit.MILLISECOND))
));
roundTrip(schema);
assertEquals(
"Schema<a: Date(DAY), b: Date(MILLISECOND)>",
schema.toString());
}
@Test
public void testTime() throws IOException {
Schema schema = new Schema(asList(
field("a", new Time(TimeUnit.SECOND, 32)),
field("b", new Time(TimeUnit.MILLISECOND, 32)),
field("c", new Time(TimeUnit.MICROSECOND, 64)),
field("d", new Time(TimeUnit.NANOSECOND, 64))
));
roundTrip(schema);
assertEquals(
"Schema<a: Time(SECOND, 32), b: Time(MILLISECOND, 32), c: Time(MICROSECOND, 64), d: Time(NANOSECOND, 64)>",
schema.toString());
}
@Test
public void testTS() throws IOException {
Schema schema = new Schema(asList(
field("a", new Timestamp(TimeUnit.SECOND, "UTC")),
field("b", new Timestamp(TimeUnit.MILLISECOND, "UTC")),
field("c", new Timestamp(TimeUnit.MICROSECOND, "UTC")),
field("d", new Timestamp(TimeUnit.NANOSECOND, "UTC")),
field("e", new Timestamp(TimeUnit.SECOND, null)),
field("f", new Timestamp(TimeUnit.MILLISECOND, null)),
field("g", new Timestamp(TimeUnit.MICROSECOND, null)),
field("h", new Timestamp(TimeUnit.NANOSECOND, null))
));
roundTrip(schema);
assertEquals(
"Schema<a: Timestamp(SECOND, UTC), b: Timestamp(MILLISECOND, UTC), c: Timestamp(MICROSECOND, UTC), d: Timestamp(NANOSECOND, UTC), e: Timestamp(SECOND, null), f: Timestamp(MILLISECOND, null), g: Timestamp(MICROSECOND, null), h: Timestamp(NANOSECOND, null)>",
schema.toString());
}
@Test
public void testInterval() throws IOException {
Schema schema = new Schema(asList(
field("a", new Interval(IntervalUnit.YEAR_MONTH)),
field("b", new Interval(IntervalUnit.DAY_TIME))
));
roundTrip(schema);
contains(schema, "YEAR_MONTH", "DAY_TIME");
}
@Test
public void testFP() throws IOException {
Schema schema = new Schema(asList(
field("a", new FloatingPoint(FloatingPointPrecision.HALF)),
field("b", new FloatingPoint(FloatingPointPrecision.SINGLE)),
field("c", new FloatingPoint(FloatingPointPrecision.DOUBLE))
));
roundTrip(schema);
contains(schema, "HALF", "SINGLE", "DOUBLE");
}
private void roundTrip(Schema schema) throws IOException {
String json = schema.toJson();
Schema actual = Schema.fromJSON(json);
assertEquals(schema.toJson(), actual.toJson());
assertEquals(schema, actual);
validateFieldsHashcode(schema.getFields(), actual.getFields());
assertEquals(schema.hashCode(), actual.hashCode());
}
private void validateFieldsHashcode(java.util.List<Field> schemaFields, java.util.List<Field> actualFields) {
assertEquals(schemaFields.size(), actualFields.size());
if (schemaFields.size() == 0) {
return;
}
for (int i = 0; i < schemaFields.size(); i++) {
Field schemaField = schemaFields.get(i);
Field actualField = actualFields.get(i);
validateFieldsHashcode(schemaField.getChildren(), actualField.getChildren());
validateHashCode(schemaField.getType(), actualField.getType());
validateHashCode(schemaField, actualField);
}
}
private void validateHashCode(Object o1, Object o2) {
assertEquals(o1, o2);
assertEquals(o1 + " == " + o2, o1.hashCode(), o2.hashCode());
}
private void contains(Schema schema, String... s) throws IOException {
String json = schema.toJson();
for (String string : s) {
assertTrue(json + " contains " + string, json.contains(string));
}
}
}
| |
// Copyright 2014 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
package com.android.webview.chromium;
import android.content.Context;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.Picture;
import android.net.Uri;
import android.net.http.SslError;
import android.os.Build;
import android.os.Handler;
import android.os.Message;
import android.util.Log;
import android.view.ActionMode;
import android.view.KeyEvent;
import android.view.View;
import android.webkit.ClientCertRequest;
import android.webkit.ConsoleMessage;
import android.webkit.DownloadListener;
import android.webkit.GeolocationPermissions;
import android.webkit.JsDialogHelper;
import android.webkit.JsPromptResult;
import android.webkit.JsResult;
import android.webkit.PermissionRequest;
import android.webkit.SslErrorHandler;
import android.webkit.ValueCallback;
import android.webkit.WebChromeClient;
import android.webkit.WebChromeClient.CustomViewCallback;
import android.webkit.WebResourceRequest;
import android.webkit.WebResourceResponse;
import android.webkit.WebView;
import android.webkit.WebViewClient;
import com.android.webview.chromium.WebViewDelegateFactory.WebViewDelegate;
import org.chromium.android_webview.AwContentsClient;
import org.chromium.android_webview.AwContentsClientBridge;
import org.chromium.android_webview.AwHttpAuthHandler;
import org.chromium.android_webview.AwWebResourceResponse;
import org.chromium.android_webview.JsPromptResultReceiver;
import org.chromium.android_webview.JsResultReceiver;
import org.chromium.android_webview.permission.AwPermissionRequest;
import org.chromium.base.ThreadUtils;
import org.chromium.base.TraceEvent;
import org.chromium.base.annotations.SuppressFBWarnings;
import org.chromium.content.browser.ContentView;
import org.chromium.content.browser.ContentViewClient;
import org.chromium.content.browser.ContentViewCore;
import org.chromium.content.browser.SelectActionMode;
import org.chromium.content.browser.SelectActionModeCallback;
import org.chromium.content.browser.SelectActionModeCallback.ActionHandler;
import java.lang.ref.WeakReference;
import java.security.Principal;
import java.security.PrivateKey;
import java.security.cert.X509Certificate;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Map;
import java.util.WeakHashMap;
/**
* An adapter class that forwards the callbacks from {@link ContentViewClient}
* to the appropriate {@link WebViewClient} or {@link WebChromeClient}.
*
* An instance of this class is associated with one {@link WebViewChromium}
* instance. A WebViewChromium is a WebView implementation provider (that is
* android.webkit.WebView delegates all functionality to it) and has exactly
* one corresponding {@link ContentView} instance.
*
* A {@link ContentViewClient} may be shared between multiple {@link ContentView}s,
* and hence multiple WebViews. Many WebViewClient methods pass the source
* WebView as an argument. This means that we either need to pass the
* corresponding ContentView to the corresponding ContentViewClient methods,
* or use an instance of ContentViewClientAdapter per WebViewChromium, to
* allow the source WebView to be injected by ContentViewClientAdapter. We
* choose the latter, because it makes for a cleaner design.
*/
@SuppressWarnings("deprecation")
public class WebViewContentsClientAdapter extends AwContentsClient {
// TAG is chosen for consistency with classic webview tracing.
private static final String TAG = "WebViewCallback";
// Enables API callback tracing
private static final boolean TRACE = false;
// Default WebViewClient used to avoid null checks.
private static WebViewClient sNullWebViewClient = new WebViewClient();
// The WebView instance that this adapter is serving.
private final WebView mWebView;
// The Context to use. This is different from mWebView.getContext(), which should not be used.
private final Context mContext;
// The WebViewClient instance that was passed to WebView.setWebViewClient().
private WebViewClient mWebViewClient = sNullWebViewClient;
// The WebChromeClient instance that was passed to WebView.setContentViewClient().
private WebChromeClient mWebChromeClient;
// The listener receiving find-in-page API results.
private WebView.FindListener mFindListener;
// The listener receiving notifications of screen updates.
private WebView.PictureListener mPictureListener;
private WebViewDelegate mWebViewDelegate;
private DownloadListener mDownloadListener;
private Handler mUiThreadHandler;
private static final int NEW_WEBVIEW_CREATED = 100;
private WeakHashMap<AwPermissionRequest, WeakReference<PermissionRequestAdapter>>
mOngoingPermissionRequests;
/**
* Adapter constructor.
*
* @param webView the {@link WebView} instance that this adapter is serving.
*/
WebViewContentsClientAdapter(WebView webView, Context context,
WebViewDelegate webViewDelegate) {
if (webView == null || webViewDelegate == null) {
throw new IllegalArgumentException("webView or delegate can't be null.");
}
if (context == null) {
throw new IllegalArgumentException("context can't be null.");
}
mContext = context;
mWebView = webView;
mWebViewDelegate = webViewDelegate;
setWebViewClient(null);
mUiThreadHandler = new Handler() {
@Override
public void handleMessage(Message msg) {
switch (msg.what) {
case NEW_WEBVIEW_CREATED:
WebView.WebViewTransport t = (WebView.WebViewTransport) msg.obj;
WebView newWebView = t.getWebView();
if (newWebView == mWebView) {
throw new IllegalArgumentException(
"Parent WebView cannot host it's own popup window. Please "
+ "use WebSettings.setSupportMultipleWindows(false)");
}
if (newWebView != null && newWebView.copyBackForwardList().getSize() != 0) {
throw new IllegalArgumentException(
"New WebView for popup window must not have been previously "
+ "navigated.");
}
WebViewChromium.completeWindowCreation(mWebView, newWebView);
break;
default:
throw new IllegalStateException();
}
}
};
}
void setWebViewClient(WebViewClient client) {
if (client != null) {
mWebViewClient = client;
} else {
mWebViewClient = sNullWebViewClient;
}
}
void setWebChromeClient(WebChromeClient client) {
mWebChromeClient = client;
}
void setDownloadListener(DownloadListener listener) {
mDownloadListener = listener;
}
void setFindListener(WebView.FindListener listener) {
mFindListener = listener;
}
void setPictureListener(WebView.PictureListener listener) {
mPictureListener = listener;
}
//--------------------------------------------------------------------------------------------
// Adapter for all the methods.
//--------------------------------------------------------------------------------------------
/**
* @see AwContentsClient#hasWebViewClient.
*/
@Override
public boolean hasWebViewClient() {
return mWebViewClient != sNullWebViewClient;
}
/**
* @see AwContentsClient#getVisitedHistory.
*/
@Override
public void getVisitedHistory(ValueCallback<String[]> callback) {
try {
TraceEvent.begin("WebViewContentsClientAdapter.getVisitedHistory");
if (mWebChromeClient != null) {
if (TRACE) Log.d(TAG, "getVisitedHistory");
mWebChromeClient.getVisitedHistory(callback);
}
} finally {
TraceEvent.end("WebViewContentsClientAdapter.getVisitedHistory");
}
}
/**
* @see AwContentsClient#doUpdateVisiteHistory(String, boolean)
*/
@Override
public void doUpdateVisitedHistory(String url, boolean isReload) {
try {
TraceEvent.begin("WebViewContentsClientAdapter.doUpdateVisitedHistory");
if (TRACE) Log.d(TAG, "doUpdateVisitedHistory=" + url + " reload=" + isReload);
mWebViewClient.doUpdateVisitedHistory(mWebView, url, isReload);
} finally {
TraceEvent.end("WebViewContentsClientAdapter.doUpdateVisitedHistory");
}
}
/**
* @see AwContentsClient#onProgressChanged(int)
*/
@Override
public void onProgressChanged(int progress) {
try {
TraceEvent.begin("WebViewContentsClientAdapter.onProgressChanged");
if (mWebChromeClient != null) {
if (TRACE) Log.d(TAG, "onProgressChanged=" + progress);
mWebChromeClient.onProgressChanged(mWebView, progress);
}
} finally {
TraceEvent.end("WebViewContentsClientAdapter.onProgressChanged");
}
}
private static class WebResourceRequestImpl implements WebResourceRequest {
private final AwWebResourceRequest mRequest;
public WebResourceRequestImpl(AwWebResourceRequest request) {
mRequest = request;
}
@Override
public Uri getUrl() {
return Uri.parse(mRequest.url);
}
@Override
public boolean isForMainFrame() {
return mRequest.isMainFrame;
}
@Override
public boolean hasGesture() {
return mRequest.hasUserGesture;
}
@Override
public String getMethod() {
return mRequest.method;
}
@Override
public Map<String, String> getRequestHeaders() {
return mRequest.requestHeaders;
}
}
/**
* @see AwContentsClient#shouldInterceptRequest(java.lang.String)
*/
@Override
public AwWebResourceResponse shouldInterceptRequest(AwWebResourceRequest request) {
try {
TraceEvent.begin("WebViewContentsClientAdapter.shouldInterceptRequest");
if (TRACE) Log.d(TAG, "shouldInterceptRequest=" + request.url);
WebResourceResponse response = mWebViewClient.shouldInterceptRequest(mWebView,
new WebResourceRequestImpl(request));
if (response == null) return null;
// AwWebResourceResponse should support null headers. b/16332774.
Map<String, String> responseHeaders = response.getResponseHeaders();
if (responseHeaders == null) responseHeaders = new HashMap<String, String>();
return new AwWebResourceResponse(
response.getMimeType(),
response.getEncoding(),
response.getData(),
response.getStatusCode(),
response.getReasonPhrase(),
responseHeaders);
} finally {
TraceEvent.end("WebViewContentsClientAdapter.shouldInterceptRequest");
}
}
/**
* @see AwContentsClient#shouldOverrideUrlLoading(java.lang.String)
*/
@Override
public boolean shouldOverrideUrlLoading(String url) {
try {
TraceEvent.begin("WebViewContentsClientAdapter.shouldOverrideUrlLoading");
if (TRACE) Log.d(TAG, "shouldOverrideUrlLoading=" + url);
boolean result = mWebViewClient.shouldOverrideUrlLoading(mWebView, url);
return result;
} finally {
TraceEvent.end("WebViewContentsClientAdapter.shouldOverrideUrlLoading");
}
}
/**
* @see AwContentsClient#onUnhandledKeyEvent(android.view.KeyEvent)
*/
@Override
public void onUnhandledKeyEvent(KeyEvent event) {
try {
TraceEvent.begin("WebViewContentsClientAdapter.onUnhandledKeyEvent");
if (TRACE) Log.d(TAG, "onUnhandledKeyEvent");
mWebViewClient.onUnhandledKeyEvent(mWebView, event);
} finally {
TraceEvent.end("WebViewContentsClientAdapter.onUnhandledKeyEvent");
}
}
/**
* @see AwContentsClient#onConsoleMessage(android.webkit.ConsoleMessage)
*/
@Override
public boolean onConsoleMessage(ConsoleMessage consoleMessage) {
try {
TraceEvent.begin("WebViewContentsClientAdapter.onConsoleMessage");
boolean result;
if (mWebChromeClient != null) {
if (TRACE) Log.d(TAG, "onConsoleMessage: " + consoleMessage.message());
result = mWebChromeClient.onConsoleMessage(consoleMessage);
} else {
result = false;
}
return result;
} finally {
TraceEvent.end("WebViewContentsClientAdapter.onConsoleMessage");
}
}
/**
* @see AwContentsClient#onFindResultReceived(int,int,boolean)
*/
@Override
public void onFindResultReceived(int activeMatchOrdinal, int numberOfMatches,
boolean isDoneCounting) {
try {
TraceEvent.begin("WebViewContentsClientAdapter.onFindResultReceived");
if (mFindListener == null) return;
if (TRACE) Log.d(TAG, "onFindResultReceived");
mFindListener.onFindResultReceived(activeMatchOrdinal, numberOfMatches, isDoneCounting);
} finally {
TraceEvent.end("WebViewContentsClientAdapter.onFindResultReceived");
}
}
/**
* @See AwContentsClient#onNewPicture(Picture)
*/
@Override
public void onNewPicture(Picture picture) {
try {
TraceEvent.begin("WebViewContentsClientAdapter.onNewPicture");
if (mPictureListener == null) return;
if (TRACE) Log.d(TAG, "onNewPicture");
mPictureListener.onNewPicture(mWebView, picture);
} finally {
TraceEvent.end("WebViewContentsClientAdapter.onNewPicture");
}
}
/**
* @See AwContentsClient#startActionMode(View,ActionHandler,boolean)
*/
@Override
public SelectActionMode startActionMode(
View view, ActionHandler actionHandler, boolean floating) {
try {
TraceEvent.begin("WebViewContentsClientAdapter.startActionMode");
if (TRACE) Log.d(TAG, "startActionMode");
if (floating) return null;
ActionMode.Callback callback = new SelectActionModeCallback(mContext, actionHandler);
ActionMode actionMode = view.startActionMode(callback);
return actionMode != null ? new SelectActionMode(actionMode) : null;
} finally {
TraceEvent.end("WebViewContentsClientAdapter.startActionMode");
}
}
/**
* @See AwContentsClient#supportsFloatingActionMode()
*/
@Override
public boolean supportsFloatingActionMode() {
return false;
}
@Override
public void onLoadResource(String url) {
try {
TraceEvent.begin("WebViewContentsClientAdapter.onLoadResource");
if (TRACE) Log.d(TAG, "onLoadResource=" + url);
mWebViewClient.onLoadResource(mWebView, url);
} finally {
TraceEvent.end("WebViewContentsClientAdapter.onLoadResource");
}
}
@Override
public boolean onCreateWindow(boolean isDialog, boolean isUserGesture) {
try {
TraceEvent.begin("WebViewContentsClientAdapter.onCreateWindow");
Message m = mUiThreadHandler.obtainMessage(
NEW_WEBVIEW_CREATED, mWebView.new WebViewTransport());
boolean result;
if (mWebChromeClient != null) {
if (TRACE) Log.d(TAG, "onCreateWindow");
result = mWebChromeClient.onCreateWindow(mWebView, isDialog, isUserGesture, m);
} else {
result = false;
}
return result;
} finally {
TraceEvent.end("WebViewContentsClientAdapter.onCreateWindow");
}
}
/**
* @see AwContentsClient#onCloseWindow()
*/
@Override
public void onCloseWindow() {
try {
TraceEvent.begin("WebViewContentsClientAdapter.onCloseWindow");
if (mWebChromeClient != null) {
if (TRACE) Log.d(TAG, "onCloseWindow");
mWebChromeClient.onCloseWindow(mWebView);
}
} finally {
TraceEvent.end("WebViewContentsClientAdapter.onCloseWindow");
}
}
/**
* @see AwContentsClient#onRequestFocus()
*/
@Override
public void onRequestFocus() {
try {
TraceEvent.begin("WebViewContentsClientAdapter.onRequestFocus");
if (mWebChromeClient != null) {
if (TRACE) Log.d(TAG, "onRequestFocus");
mWebChromeClient.onRequestFocus(mWebView);
}
} finally {
TraceEvent.end("WebViewContentsClientAdapter.onRequestFocus");
}
}
/**
* @see AwContentsClient#onReceivedTouchIconUrl(String url, boolean precomposed)
*/
@Override
public void onReceivedTouchIconUrl(String url, boolean precomposed) {
try {
TraceEvent.begin("WebViewContentsClientAdapter.onReceivedTouchIconUrl");
if (mWebChromeClient != null) {
if (TRACE) Log.d(TAG, "onReceivedTouchIconUrl=" + url);
mWebChromeClient.onReceivedTouchIconUrl(mWebView, url, precomposed);
}
} finally {
TraceEvent.end("WebViewContentsClientAdapter.onReceivedTouchIconUrl");
}
}
/**
* @see AwContentsClient#onReceivedIcon(Bitmap bitmap)
*/
@Override
public void onReceivedIcon(Bitmap bitmap) {
try {
TraceEvent.begin("WebViewContentsClientAdapter.onReceivedIcon");
if (mWebChromeClient != null) {
if (TRACE) Log.d(TAG, "onReceivedIcon");
mWebChromeClient.onReceivedIcon(mWebView, bitmap);
}
} finally {
TraceEvent.end("WebViewContentsClientAdapter.onReceivedIcon");
}
}
/**
* @see ContentViewClient#onPageStarted(String)
*/
@Override
public void onPageStarted(String url) {
try {
TraceEvent.begin("WebViewContentsClientAdapter.onPageStarted");
if (TRACE) Log.d(TAG, "onPageStarted=" + url);
mWebViewClient.onPageStarted(mWebView, url, mWebView.getFavicon());
} finally {
TraceEvent.end("WebViewContentsClientAdapter.onPageStarted");
}
}
/**
* @see ContentViewClient#onPageFinished(String)
*/
@Override
public void onPageFinished(String url) {
try {
TraceEvent.begin("WebViewContentsClientAdapter.onPageFinished");
if (TRACE) Log.d(TAG, "onPageFinished=" + url);
mWebViewClient.onPageFinished(mWebView, url);
// See b/8208948
// This fakes an onNewPicture callback after onPageFinished to allow
// CTS tests to run in an un-flaky manner. This is required as the
// path for sending Picture updates in Chromium are decoupled from the
// page loading callbacks, i.e. the Chrome compositor may draw our
// content and send the Picture before onPageStarted or onPageFinished
// are invoked. The CTS harness discards any pictures it receives before
// onPageStarted is invoked, so in the case we get the Picture before that and
// no further updates after onPageStarted, we'll fail the test by timing
// out waiting for a Picture.
if (mPictureListener != null) {
ThreadUtils.postOnUiThreadDelayed(new Runnable() {
@Override
public void run() {
if (mPictureListener != null) {
if (TRACE) Log.d(TAG, "onPageFinished-fake");
mPictureListener.onNewPicture(mWebView, new Picture());
}
}
}, 100);
}
} finally {
TraceEvent.end("WebViewContentsClientAdapter.onPageFinished");
}
}
@Override
public void onPageCommitVisible(String url) {
// TODO: implement once required framework changes land
// Please note that this needs an SDK build check. See crbug/461303 for details.
}
/**
* @see ContentViewClient#onReceivedError(int,String,String)
*/
@Override
public void onReceivedError(int errorCode, String description, String failingUrl) {
// TODO(mnaganov): In the next version of glue, this will look as follows:
// if (<next-level-api>) return;
// Currently, we should just run this code always.
if (Build.VERSION.SDK_INT > Build.VERSION_CODES.CUR_DEVELOPMENT + 1) return;
try {
TraceEvent.begin("WebViewContentsClientAdapter.onReceivedError");
if (description == null || description.isEmpty()) {
// ErrorStrings is @hidden, so we can't do this in AwContents. Normally the net/
// layer will set a valid description, but for synthesized callbacks (like in the
// case for intercepted requests) AwContents will pass in null.
description = mWebViewDelegate.getErrorString(mContext, errorCode);
}
if (TRACE) Log.d(TAG, "onReceivedError=" + failingUrl);
mWebViewClient.onReceivedError(
mWebView, errorCode, description, failingUrl);
} finally {
TraceEvent.end("WebViewContentsClientAdapter.onReceivedError");
}
}
/**
* @see ContentViewClient#onReceivedError(
* AwContentsClient.AwWebResourceRequest,AwContentsClient.AwWebResourceError)
*/
@Override
public void onReceivedError2(AwContentsClient.AwWebResourceRequest request,
AwContentsClient.AwWebResourceError error) {
// TODO(mnaganov): In the next version of glue, this will look as follows:
// if (!<next-level-api>) return;
// Currently, we should never run this code.
if (Build.VERSION.SDK_INT <= Build.VERSION_CODES.CUR_DEVELOPMENT + 1) return;
try {
TraceEvent.begin("WebViewContentsClientAdapter.onReceivedError");
if (error.description == null || error.description.isEmpty()) {
// ErrorStrings is @hidden, so we can't do this in AwContents. Normally the net/
// layer will set a valid description, but for synthesized callbacks (like in the
// case for intercepted requests) AwContents will pass in null.
error.description = mWebViewDelegate.getErrorString(mContext, error.errorCode);
}
if (TRACE) Log.d(TAG, "onReceivedError=" + request.url);
// TODO(mnaganov): When the new API becomes available, uncomment the following:
// mWebViewClient.onReceivedError(request, error);
} finally {
TraceEvent.end("WebViewContentsClientAdapter.onReceivedError");
}
}
@Override
public void onReceivedHttpError(AwWebResourceRequest request, AwWebResourceResponse response) {
try {
TraceEvent.begin("WebViewContentsClientAdapter.onReceivedHttpError");
if (TRACE) Log.d(TAG, "onReceivedHttpError=" + request.url);
// TODO(mnaganov): Call mWebViewClient.onReceivedHttpError(mWebView, request, response);
} finally {
TraceEvent.end("WebViewContentsClientAdapter.onReceivedHttpError");
}
}
/**
* @see ContentViewClient#onReceivedTitle(String)
*/
@Override
public void onReceivedTitle(String title) {
try {
TraceEvent.begin("WebViewContentsClientAdapter.onReceivedTitle");
if (mWebChromeClient != null) {
if (TRACE) Log.d(TAG, "onReceivedTitle=\"" + title + "\"");
mWebChromeClient.onReceivedTitle(mWebView, title);
}
} finally {
TraceEvent.end("WebViewContentsClientAdapter.onReceivedTitle");
}
}
/**
* @see ContentViewClient#shouldOverrideKeyEvent(KeyEvent)
*/
@Override
public boolean shouldOverrideKeyEvent(KeyEvent event) {
try {
TraceEvent.begin("WebViewContentsClientAdapter.shouldOverrideKeyEvent");
if (TRACE) Log.d(TAG, "shouldOverrideKeyEvent");
return mWebViewClient.shouldOverrideKeyEvent(mWebView, event);
} finally {
TraceEvent.end("WebViewContentsClientAdapter.shouldOverrideKeyEvent");
}
}
/**
* Returns true if a method with a given name and parameters is declared in a subclass
* of a given baseclass.
*/
private static <T> boolean isMethodDeclaredInSubClass(Class<T> baseClass,
Class<? extends T> subClass, String name, Class<?>... parameterTypes) {
try {
return !subClass.getMethod(name, parameterTypes).getDeclaringClass().equals(baseClass);
} catch (SecurityException e) {
return false;
} catch (NoSuchMethodException e) {
return false;
}
}
@Override
public void onGeolocationPermissionsShowPrompt(String origin,
GeolocationPermissions.Callback callback) {
try {
TraceEvent.begin("WebViewContentsClientAdapter.onGeolocationPermissionsShowPrompt");
if (mWebChromeClient == null) {
callback.invoke(origin, false, false);
return;
}
if (!isMethodDeclaredInSubClass(WebChromeClient.class,
mWebChromeClient.getClass(),
"onGeolocationPermissionsShowPrompt",
String.class,
GeolocationPermissions.Callback.class)) {
// This is only required for pre-M versions of android.
callback.invoke(origin, false, false);
return;
}
if (TRACE) Log.d(TAG, "onGeolocationPermissionsShowPrompt");
mWebChromeClient.onGeolocationPermissionsShowPrompt(origin, callback);
} finally {
TraceEvent.end("WebViewContentsClientAdapter.onGeolocationPermissionsShowPrompt");
}
}
@Override
public void onGeolocationPermissionsHidePrompt() {
try {
TraceEvent.begin("WebViewContentsClientAdapter.onGeolocationPermissionsHidePrompt");
if (mWebChromeClient != null) {
if (TRACE) Log.d(TAG, "onGeolocationPermissionsHidePrompt");
mWebChromeClient.onGeolocationPermissionsHidePrompt();
}
} finally {
TraceEvent.end("WebViewContentsClientAdapter.onGeolocationPermissionsHidePrompt");
}
}
@Override
public void onPermissionRequest(AwPermissionRequest permissionRequest) {
try {
TraceEvent.begin("WebViewContentsClientAdapter.onPermissionRequest");
if (mWebChromeClient != null) {
if (TRACE) Log.d(TAG, "onPermissionRequest");
if (mOngoingPermissionRequests == null) {
mOngoingPermissionRequests = new WeakHashMap<AwPermissionRequest,
WeakReference<PermissionRequestAdapter>>();
}
PermissionRequestAdapter adapter = new PermissionRequestAdapter(permissionRequest);
mOngoingPermissionRequests.put(
permissionRequest, new WeakReference<PermissionRequestAdapter>(adapter));
mWebChromeClient.onPermissionRequest(adapter);
} else {
// By default, we deny the permission.
permissionRequest.deny();
}
} finally {
TraceEvent.end("WebViewContentsClientAdapter.onPermissionRequest");
}
}
@Override
public void onPermissionRequestCanceled(AwPermissionRequest permissionRequest) {
try {
TraceEvent.begin("WebViewContentsClientAdapter.onPermissionRequestCanceled");
if (mWebChromeClient != null && mOngoingPermissionRequests != null) {
if (TRACE) Log.d(TAG, "onPermissionRequestCanceled");
WeakReference<PermissionRequestAdapter> weakRef =
mOngoingPermissionRequests.get(permissionRequest);
// We don't hold strong reference to PermissionRequestAdpater and don't expect the
// user only holds weak reference to it either, if so, user has no way to call
// grant()/deny(), and no need to be notified the cancellation of request.
if (weakRef != null) {
PermissionRequestAdapter adapter = weakRef.get();
if (adapter != null) mWebChromeClient.onPermissionRequestCanceled(adapter);
}
}
} finally {
TraceEvent.end("WebViewContentsClientAdapter.onPermissionRequestCanceled");
}
}
private static class JsPromptResultReceiverAdapter implements JsResult.ResultReceiver {
private JsPromptResultReceiver mChromePromptResultReceiver;
private JsResultReceiver mChromeResultReceiver;
// We hold onto the JsPromptResult here, just to avoid the need to downcast
// in onJsResultComplete.
private final JsPromptResult mPromptResult = new JsPromptResult(this);
public JsPromptResultReceiverAdapter(JsPromptResultReceiver receiver) {
mChromePromptResultReceiver = receiver;
}
public JsPromptResultReceiverAdapter(JsResultReceiver receiver) {
mChromeResultReceiver = receiver;
}
public JsPromptResult getPromptResult() {
return mPromptResult;
}
@Override
public void onJsResultComplete(JsResult result) {
if (mChromePromptResultReceiver != null) {
if (mPromptResult.getResult()) {
mChromePromptResultReceiver.confirm(mPromptResult.getStringResult());
} else {
mChromePromptResultReceiver.cancel();
}
} else {
if (mPromptResult.getResult()) {
mChromeResultReceiver.confirm();
} else {
mChromeResultReceiver.cancel();
}
}
}
}
@Override
public void handleJsAlert(String url, String message, JsResultReceiver receiver) {
try {
TraceEvent.begin("WebViewContentsClientAdapter.handleJsAlert");
if (mWebChromeClient != null) {
final JsPromptResult res =
new JsPromptResultReceiverAdapter(receiver).getPromptResult();
if (TRACE) Log.d(TAG, "onJsAlert");
if (!mWebChromeClient.onJsAlert(mWebView, url, message, res)) {
if (!showDefaultJsDialog(res, JsDialogHelper.ALERT, null, message, url)) {
receiver.cancel();
}
}
} else {
receiver.cancel();
}
} finally {
TraceEvent.end("WebViewContentsClientAdapter.handleJsAlert");
}
}
@Override
public void handleJsBeforeUnload(String url, String message, JsResultReceiver receiver) {
try {
TraceEvent.begin("WebViewContentsClientAdapter.handleJsBeforeUnload");
if (mWebChromeClient != null) {
final JsPromptResult res =
new JsPromptResultReceiverAdapter(receiver).getPromptResult();
if (TRACE) Log.d(TAG, "onJsBeforeUnload");
if (!mWebChromeClient.onJsBeforeUnload(mWebView, url, message, res)) {
if (!showDefaultJsDialog(res, JsDialogHelper.UNLOAD, null, message, url)) {
receiver.cancel();
}
}
} else {
receiver.cancel();
}
} finally {
TraceEvent.end("WebViewContentsClientAdapter.handleJsBeforeUnload");
}
}
@Override
public void handleJsConfirm(String url, String message, JsResultReceiver receiver) {
try {
TraceEvent.begin("WebViewContentsClientAdapter.handleJsConfirm");
if (mWebChromeClient != null) {
final JsPromptResult res =
new JsPromptResultReceiverAdapter(receiver).getPromptResult();
if (TRACE) Log.d(TAG, "onJsConfirm");
if (!mWebChromeClient.onJsConfirm(mWebView, url, message, res)) {
if (!showDefaultJsDialog(res, JsDialogHelper.CONFIRM, null, message, url)) {
receiver.cancel();
}
}
} else {
receiver.cancel();
}
} finally {
TraceEvent.end("WebViewContentsClientAdapter.handleJsConfirm");
}
}
@Override
public void handleJsPrompt(String url, String message, String defaultValue,
JsPromptResultReceiver receiver) {
try {
TraceEvent.begin("WebViewContentsClientAdapter.handleJsPrompt");
if (mWebChromeClient != null) {
final JsPromptResult res =
new JsPromptResultReceiverAdapter(receiver).getPromptResult();
if (TRACE) Log.d(TAG, "onJsPrompt");
if (!mWebChromeClient.onJsPrompt(mWebView, url, message, defaultValue, res)) {
if (!showDefaultJsDialog(
res, JsDialogHelper.PROMPT, defaultValue, message, url)) {
receiver.cancel();
}
}
} else {
receiver.cancel();
}
} finally {
TraceEvent.end("WebViewContentsClientAdapter.handleJsPrompt");
}
}
/**
* Try to show the default JS dialog and return whether the dialog was shown.
*/
private boolean showDefaultJsDialog(JsPromptResult res, int jsDialogType, String defaultValue,
String message, String url) {
Context activityContext = ContentViewCore.activityFromContext(mContext);
if (activityContext == null) {
return false;
}
// TODO(igsolla): the activity context should be retrieved inside JsDialogHelper.showDialog
// but for that we need it to return a boolean. Also, doing it here means that we can fix
// problem 2 in crbug/447607 before M is released.
new JsDialogHelper(res, jsDialogType, defaultValue, message, url)
.showDialog(activityContext);
return true;
}
@Override
public void onReceivedHttpAuthRequest(AwHttpAuthHandler handler, String host, String realm) {
try {
TraceEvent.begin("WebViewContentsClientAdapter.onReceivedHttpAuthRequest");
if (TRACE) Log.d(TAG, "onReceivedHttpAuthRequest=" + host);
mWebViewClient.onReceivedHttpAuthRequest(
mWebView, new AwHttpAuthHandlerAdapter(handler), host, realm);
} finally {
TraceEvent.end("WebViewContentsClientAdapter.onReceivedHttpAuthRequest");
}
}
@Override
public void onReceivedSslError(final ValueCallback<Boolean> callback, SslError error) {
try {
TraceEvent.begin("WebViewContentsClientAdapter.onReceivedSslError");
SslErrorHandler handler = new SslErrorHandler() {
@Override
public void proceed() {
callback.onReceiveValue(true);
}
@Override
public void cancel() {
callback.onReceiveValue(false);
}
};
if (TRACE) Log.d(TAG, "onReceivedSslError");
mWebViewClient.onReceivedSslError(mWebView, handler, error);
} finally {
TraceEvent.end("WebViewContentsClientAdapter.onReceivedSslError");
}
}
private static class ClientCertRequestImpl extends ClientCertRequest {
private final AwContentsClientBridge.ClientCertificateRequestCallback mCallback;
private final String[] mKeyTypes;
private final Principal[] mPrincipals;
private final String mHost;
private final int mPort;
public ClientCertRequestImpl(
AwContentsClientBridge.ClientCertificateRequestCallback callback, String[] keyTypes,
Principal[] principals, String host, int port) {
mCallback = callback;
mKeyTypes = keyTypes;
mPrincipals = principals;
mHost = host;
mPort = port;
}
@Override
public String[] getKeyTypes() {
// This is already a copy of native argument, so return directly.
return mKeyTypes;
}
@Override
public Principal[] getPrincipals() {
// This is already a copy of native argument, so return directly.
return mPrincipals;
}
@Override
public String getHost() {
return mHost;
}
@Override
public int getPort() {
return mPort;
}
@Override
public void proceed(final PrivateKey privateKey, final X509Certificate[] chain) {
mCallback.proceed(privateKey, chain);
}
@Override
public void ignore() {
mCallback.ignore();
}
@Override
public void cancel() {
mCallback.cancel();
}
}
@Override
public void onReceivedClientCertRequest(
AwContentsClientBridge.ClientCertificateRequestCallback callback, String[] keyTypes,
Principal[] principals, String host, int port) {
if (TRACE) Log.d(TAG, "onReceivedClientCertRequest");
try {
TraceEvent.begin("WebViewContentsClientAdapter.onReceivedClientCertRequest");
final ClientCertRequestImpl request =
new ClientCertRequestImpl(callback, keyTypes, principals, host, port);
mWebViewClient.onReceivedClientCertRequest(mWebView, request);
} finally {
TraceEvent.end("WebViewContentsClientAdapter.onReceivedClientCertRequest");
}
}
@Override
public void onReceivedLoginRequest(String realm, String account, String args) {
try {
TraceEvent.begin("WebViewContentsClientAdapter.onReceivedLoginRequest");
if (TRACE) Log.d(TAG, "onReceivedLoginRequest=" + realm);
mWebViewClient.onReceivedLoginRequest(mWebView, realm, account, args);
} finally {
TraceEvent.end("WebViewContentsClientAdapter.onReceivedLoginRequest");
}
}
@Override
public void onFormResubmission(Message dontResend, Message resend) {
try {
TraceEvent.begin("WebViewContentsClientAdapter.onFormResubmission");
if (TRACE) Log.d(TAG, "onFormResubmission");
mWebViewClient.onFormResubmission(mWebView, dontResend, resend);
} finally {
TraceEvent.end("WebViewContentsClientAdapter.onFormResubmission");
}
}
@Override
public void onDownloadStart(
String url,
String userAgent,
String contentDisposition,
String mimeType,
long contentLength) {
try {
TraceEvent.begin("WebViewContentsClientAdapter.onDownloadStart");
if (mDownloadListener != null) {
if (TRACE) Log.d(TAG, "onDownloadStart");
mDownloadListener.onDownloadStart(
url, userAgent, contentDisposition, mimeType, contentLength);
}
} finally {
TraceEvent.end("WebViewContentsClientAdapter.onDownloadStart");
}
}
@Override
public void showFileChooser(final ValueCallback<String[]> uploadFileCallback,
final AwContentsClient.FileChooserParamsImpl fileChooserParams) {
try {
TraceEvent.begin("WebViewContentsClientAdapter.showFileChooser");
if (mWebChromeClient == null) {
uploadFileCallback.onReceiveValue(null);
return;
}
if (TRACE) Log.d(TAG, "showFileChooser");
ValueCallback<Uri[]> callbackAdapter = new ValueCallback<Uri[]>() {
private boolean mCompleted;
@Override
public void onReceiveValue(Uri[] uriList) {
if (mCompleted) {
throw new IllegalStateException(
"showFileChooser result was already called");
}
mCompleted = true;
String s[] = null;
if (uriList != null) {
s = new String[uriList.length];
for (int i = 0; i < uriList.length; i++) {
s[i] = uriList[i].toString();
}
}
uploadFileCallback.onReceiveValue(s);
}
};
// Invoke the new callback introduced in Lollipop. If the app handles
// it, we're done here.
if (mWebChromeClient.onShowFileChooser(mWebView, callbackAdapter, fileChooserParams)) {
return;
}
// If the app did not handle it and we are running on Lollipop or newer, then
// abort.
if (mContext.getApplicationInfo().targetSdkVersion >= Build.VERSION_CODES.LOLLIPOP) {
uploadFileCallback.onReceiveValue(null);
return;
}
// Otherwise, for older apps, attempt to invoke the legacy (hidden) API for
// backwards compatibility.
ValueCallback<Uri> innerCallback = new ValueCallback<Uri>() {
private boolean mCompleted;
@Override
public void onReceiveValue(Uri uri) {
if (mCompleted) {
throw new IllegalStateException(
"showFileChooser result was already called");
}
mCompleted = true;
uploadFileCallback.onReceiveValue(
uri == null ? null : new String[] {uri.toString()});
}
};
if (TRACE) Log.d(TAG, "openFileChooser");
mWebChromeClient.openFileChooser(
innerCallback,
fileChooserParams.getAcceptTypesString(),
fileChooserParams.isCaptureEnabled() ? "*" : "");
} finally {
TraceEvent.end("WebViewContentsClientAdapter.showFileChooser");
}
}
@Override
public void onScaleChangedScaled(float oldScale, float newScale) {
try {
TraceEvent.begin("WebViewContentsClientAdapter.onScaleChangedScaled");
if (TRACE) Log.d(TAG, " onScaleChangedScaled");
mWebViewClient.onScaleChanged(mWebView, oldScale, newScale);
} finally {
TraceEvent.end("WebViewContentsClientAdapter.onScaleChangedScaled");
}
}
@Override
public void onShowCustomView(View view, CustomViewCallback cb) {
try {
TraceEvent.begin("WebViewContentsClientAdapter.onShowCustomView");
if (mWebChromeClient != null) {
if (TRACE) Log.d(TAG, "onShowCustomView");
mWebChromeClient.onShowCustomView(view, cb);
}
} finally {
TraceEvent.end("WebViewContentsClientAdapter.onShowCustomView");
}
}
@Override
public void onHideCustomView() {
try {
TraceEvent.begin("WebViewContentsClientAdapter.onHideCustomView");
if (mWebChromeClient != null) {
if (TRACE) Log.d(TAG, "onHideCustomView");
mWebChromeClient.onHideCustomView();
}
} finally {
TraceEvent.end("WebViewContentsClientAdapter.onHideCustomView");
}
}
@Override
protected View getVideoLoadingProgressView() {
try {
TraceEvent.begin("WebViewContentsClientAdapter.getVideoLoadingProgressView");
View result;
if (mWebChromeClient != null) {
if (TRACE) Log.d(TAG, "getVideoLoadingProgressView");
result = mWebChromeClient.getVideoLoadingProgressView();
} else {
result = null;
}
return result;
} finally {
TraceEvent.end("WebViewContentsClientAdapter.getVideoLoadingProgressView");
}
}
@Override
public Bitmap getDefaultVideoPoster() {
try {
TraceEvent.begin("WebViewContentsClientAdapter.getDefaultVideoPoster");
Bitmap result = null;
if (mWebChromeClient != null) {
if (TRACE) Log.d(TAG, "getDefaultVideoPoster");
result = mWebChromeClient.getDefaultVideoPoster();
}
if (result == null) {
// The ic_play_circle_outline_black_48dp icon is transparent so we need to draw it
// on a gray background.
Bitmap poster = BitmapFactory.decodeResource(
mContext.getResources(), R.drawable.ic_play_circle_outline_black_48dp);
result = Bitmap.createBitmap(
poster.getWidth(), poster.getHeight(), poster.getConfig());
result.eraseColor(Color.GRAY);
Canvas canvas = new Canvas(result);
canvas.drawBitmap(poster, 0f, 0f, null);
}
return result;
} finally {
TraceEvent.end("WebViewContentsClientAdapter.getDefaultVideoPoster");
}
}
// TODO: Move to upstream.
private static class AwHttpAuthHandlerAdapter extends android.webkit.HttpAuthHandler {
private AwHttpAuthHandler mAwHandler;
public AwHttpAuthHandlerAdapter(AwHttpAuthHandler awHandler) {
mAwHandler = awHandler;
}
@Override
public void proceed(String username, String password) {
if (username == null) {
username = "";
}
if (password == null) {
password = "";
}
mAwHandler.proceed(username, password);
}
@Override
public void cancel() {
mAwHandler.cancel();
}
@Override
public boolean useHttpAuthUsernamePassword() {
return mAwHandler.isFirstAttempt();
}
}
/**
* Type adaptation class for PermissionRequest.
* TODO: Move to the upstream once the PermissionRequest is part of SDK.
*/
public static class PermissionRequestAdapter extends PermissionRequest {
// TODO: Move the below definitions to AwPermissionRequest.
private static final long BITMASK_RESOURCE_VIDEO_CAPTURE = 1 << 1;
private static final long BITMASK_RESOURCE_AUDIO_CAPTURE = 1 << 2;
private static final long BITMASK_RESOURCE_PROTECTED_MEDIA_ID = 1 << 3;
private static final long BITMASK_RESOURCE_MIDI_SYSEX = 1 << 4;
public static long toAwPermissionResources(String[] resources) {
long result = 0;
for (String resource : resources) {
if (resource.equals(PermissionRequest.RESOURCE_VIDEO_CAPTURE)) {
result |= BITMASK_RESOURCE_VIDEO_CAPTURE;
} else if (resource.equals(PermissionRequest.RESOURCE_AUDIO_CAPTURE)) {
result |= BITMASK_RESOURCE_AUDIO_CAPTURE;
} else if (resource.equals(PermissionRequest.RESOURCE_PROTECTED_MEDIA_ID)) {
result |= BITMASK_RESOURCE_PROTECTED_MEDIA_ID;
} else if (resource.equals(AwPermissionRequest.RESOURCE_MIDI_SYSEX)) {
result |= BITMASK_RESOURCE_MIDI_SYSEX;
}
}
return result;
}
private static String[] toPermissionResources(long resources) {
ArrayList<String> result = new ArrayList<String>();
if ((resources & BITMASK_RESOURCE_VIDEO_CAPTURE) != 0) {
result.add(PermissionRequest.RESOURCE_VIDEO_CAPTURE);
}
if ((resources & BITMASK_RESOURCE_AUDIO_CAPTURE) != 0) {
result.add(PermissionRequest.RESOURCE_AUDIO_CAPTURE);
}
if ((resources & BITMASK_RESOURCE_PROTECTED_MEDIA_ID) != 0) {
result.add(PermissionRequest.RESOURCE_PROTECTED_MEDIA_ID);
}
if ((resources & BITMASK_RESOURCE_MIDI_SYSEX) != 0) {
result.add(AwPermissionRequest.RESOURCE_MIDI_SYSEX);
}
String[] resource_array = new String[result.size()];
return result.toArray(resource_array);
}
private AwPermissionRequest mAwPermissionRequest;
private String[] mResources;
public PermissionRequestAdapter(AwPermissionRequest awPermissionRequest) {
assert awPermissionRequest != null;
mAwPermissionRequest = awPermissionRequest;
}
@Override
public Uri getOrigin() {
return mAwPermissionRequest.getOrigin();
}
@SuppressFBWarnings("CHROMIUM_SYNCHRONIZED_THIS")
@Override
public String[] getResources() {
synchronized (this) {
if (mResources == null) {
mResources = toPermissionResources(mAwPermissionRequest.getResources());
}
return mResources;
}
}
@Override
public void grant(String[] resources) {
long requestedResource = mAwPermissionRequest.getResources();
if ((requestedResource & toAwPermissionResources(resources)) == requestedResource) {
mAwPermissionRequest.grant();
} else {
mAwPermissionRequest.deny();
}
}
@Override
public void deny() {
mAwPermissionRequest.deny();
}
}
}
| |
/*
Copyright 2015 Google Inc. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package com.google.security.zynamics.binnavi.Gui.Loaders;
import java.awt.Window;
import javax.swing.JTree;
import javax.swing.SwingUtilities;
import com.google.common.base.Preconditions;
import com.google.security.zynamics.binnavi.CUtilityFunctions;
import com.google.security.zynamics.binnavi.Database.Exceptions.CouldntLoadDataException;
import com.google.security.zynamics.binnavi.Database.Exceptions.LoadCancelledException;
import com.google.security.zynamics.binnavi.Gui.ErrorDialog.CNaviErrorDialog;
import com.google.security.zynamics.binnavi.Gui.MainWindow.Implementations.CNodeExpander;
import com.google.security.zynamics.binnavi.Gui.Progress.CGlobalProgressManager;
import com.google.security.zynamics.binnavi.Gui.Progress.IProgressOperation;
import com.google.security.zynamics.binnavi.disassembly.CProjectListenerAdapter;
import com.google.security.zynamics.binnavi.disassembly.INaviProject;
import com.google.security.zynamics.binnavi.disassembly.IProjectListener;
import com.google.security.zynamics.binnavi.disassembly.ProjectLoadEvents;
import com.google.security.zynamics.zylib.gui.SwingInvoker;
import com.google.security.zynamics.zylib.gui.ProgressDialogs.CProgressPanel;
/**
* This class can be used to load a project while showing the corresponding progress dialog.
*/
public final class CProjectLoader {
/**
* Static helper class.
*/
private CProjectLoader() {
// You are not supposed to instantiate this class
}
/**
* Loads a project inside a thread.
*
* @param parent Parent window used for dialogs.
* @param project Project to load.
* @param projectTree Project tree to expand on project loading. This argument can be null.
*/
private static void loadProjectInternal(final Window parent, final INaviProject project,
final JTree projectTree) {
final CProjectLoaderOperation operation = new CProjectLoaderOperation(project);
try {
project.load();
if (projectTree != null) {
new SwingInvoker() {
@Override
protected void operation() {
CNodeExpander.expandNode(projectTree, project);
}
}.invokeLater();
}
} catch (final CouldntLoadDataException exception) {
CUtilityFunctions.logException(exception);
final String message = "E00178: " + "Project data could not be loaded";
final String description =
CUtilityFunctions.createDescription(String.format(
"BinNavi could not load the project '%s'.", project.getConfiguration().getName()),
new String[] {"The connection dropped while the data was loaded."},
new String[] {"BinNavi can not open the project. To fix this situation try to "
+ "load the project again. Restart BinNavi if necessary and contact the "
+ "BinNavi support if the problem persists."});
CNaviErrorDialog.show(parent, message, description, exception);
} catch (final LoadCancelledException e) {
// Don't show the user that he cancelled the operation.
} finally {
operation.stop();
}
}
/**
* Loads a project inside a thread.
*
* @param parent Parent window used for dialogs.
* @param project Project to load.
* @param projectTree Project tree to expand on project loading. This argument can be null.
*/
private static void loadProjectThreaded(final Window parent, final INaviProject project,
final JTree projectTree) {
Preconditions.checkNotNull(parent, "IE00005: Parent argument can not be null");
Preconditions.checkNotNull(project, "IE01284: Project argument can not be null");
if (project.isLoading()) {
return;
}
new Thread() {
@Override
public void run() {
loadProjectInternal(parent, project, projectTree);
}
}.start();
}
/**
* Loads a project while showing a progress dialog.
*
* @param tree Project tree to expand on loading.
* @param project The project to load.
*/
public static void loadProject(final JTree tree, final INaviProject project) {
Preconditions.checkNotNull(tree, "IE01435: Tree argument can not be null");
Preconditions.checkNotNull(project, "IE01436: Project argument can not be null");
loadProjectThreaded(SwingUtilities.getWindowAncestor(tree), project, tree);
}
/**
* Operation class for project loading.
*/
private static class CProjectLoaderOperation implements IProgressOperation {
/**
* Project to be loaded.
*/
private final INaviProject m_project;
/**
* Displays progress information about the project load operation.
*/
private final CProgressPanel m_progressPanel = new CProgressPanel("", false, true) {
/**
* Used for serialization.
*/
private static final long serialVersionUID = -1163585238482641129L;
@Override
protected void closeRequested() {
setText("Cancelling project loading");
m_continue = false;
}
};
/**
* Used to cancel project initializations.
*/
private boolean m_continue = true;
/**
* Updates the GUI on relevant changes in the project.
*/
private final IProjectListener m_listener = new CProjectListenerAdapter() {
/**
* Flag that indicates whether the next event to arrive is the first one for a database load
* operation.
*/
private boolean m_first = true;
@Override
public boolean loading(final ProjectLoadEvents event, final int counter) {
if (!m_continue) {
m_continue = true;
return false;
}
m_progressPanel.next();
if (event == ProjectLoadEvents.Finished) {
m_progressPanel.setVisible(false);
m_first = true;
m_continue = true;
} else if (m_first) {
m_progressPanel.setText("Loading project");
m_progressPanel.setMaximum(ProjectLoadEvents.values().length);
m_progressPanel.setValue(counter);
m_first = false;
}
return true;
}
};
/**
* Creates a new loader operation.
*
* @param project Project to be loaded.
*/
public CProjectLoaderOperation(final INaviProject project) {
m_project = project;
CGlobalProgressManager.instance().add(this);
project.addListener(m_listener);
}
@Override
public String getDescription() {
return "Loading project";
}
@Override
public CProgressPanel getProgressPanel() {
return m_progressPanel;
}
/**
* Stops the load operation.
*/
public void stop() {
m_project.removeListener(m_listener);
CGlobalProgressManager.instance().remove(this);
}
}
}
| |
/*
* Copyright 2016-present Facebook, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License. You may obtain
* a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package com.facebook.buck.distributed;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertThat;
import com.facebook.buck.android.FakeAndroidDirectoryResolver;
import com.facebook.buck.cli.BuckConfig;
import com.facebook.buck.cli.FakeBuckConfig;
import com.facebook.buck.config.Config;
import com.facebook.buck.config.ConfigBuilder;
import com.facebook.buck.distributed.thrift.BuildJobState;
import com.facebook.buck.event.BuckEventBus;
import com.facebook.buck.event.BuckEventBusFactory;
import com.facebook.buck.event.listener.BroadcastEventListener;
import com.facebook.buck.io.ProjectFilesystem;
import com.facebook.buck.json.BuildFileParseException;
import com.facebook.buck.jvm.java.JavaLibraryBuilder;
import com.facebook.buck.jvm.java.JavaLibraryDescription;
import com.facebook.buck.model.BuildTarget;
import com.facebook.buck.model.BuildTargetFactory;
import com.facebook.buck.parser.DefaultParserTargetNodeFactory;
import com.facebook.buck.parser.Parser;
import com.facebook.buck.parser.ParserConfig;
import com.facebook.buck.parser.ParserTargetNodeFactory;
import com.facebook.buck.rules.ActionGraph;
import com.facebook.buck.rules.BuildRuleResolver;
import com.facebook.buck.rules.DefaultBuildTargetSourcePath;
import com.facebook.buck.rules.Cell;
import com.facebook.buck.rules.ConstructorArgMarshaller;
import com.facebook.buck.rules.DefaultCellPathResolver;
import com.facebook.buck.rules.DefaultTargetNodeToBuildRuleTransformer;
import com.facebook.buck.rules.KnownBuildRuleTypesFactory;
import com.facebook.buck.rules.PathSourcePath;
import com.facebook.buck.rules.SourcePathResolver;
import com.facebook.buck.rules.SourcePathRuleFinder;
import com.facebook.buck.rules.TargetGraph;
import com.facebook.buck.rules.TargetNode;
import com.facebook.buck.rules.TargetNodeFactory;
import com.facebook.buck.rules.TestCellBuilder;
import com.facebook.buck.rules.coercer.DefaultTypeCoercerFactory;
import com.facebook.buck.rules.coercer.TypeCoercerFactory;
import com.facebook.buck.testutil.FakeProjectFilesystem;
import com.facebook.buck.testutil.TargetGraphFactory;
import com.facebook.buck.testutil.TestConsole;
import com.facebook.buck.testutil.integration.ProjectWorkspace;
import com.facebook.buck.testutil.integration.TemporaryPaths;
import com.facebook.buck.testutil.integration.TestDataHelper;
import com.facebook.buck.util.DefaultProcessExecutor;
import com.facebook.buck.util.ObjectMappers;
import com.facebook.buck.util.ProcessExecutor;
import com.facebook.buck.util.cache.DefaultFileHashCache;
import com.facebook.buck.util.environment.Architecture;
import com.facebook.buck.util.environment.Platform;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.common.base.Function;
import com.google.common.base.Functions;
import com.google.common.base.Preconditions;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.ImmutableSortedSet;
import com.google.common.collect.Lists;
import com.google.common.util.concurrent.MoreExecutors;
import org.hamcrest.Matchers;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.ExpectedException;
import java.io.IOException;
import java.nio.file.Path;
import java.util.Map;
import java.util.Optional;
import java.util.concurrent.Executors;
import java.util.stream.Collectors;
public class DistBuildStateTest {
@Rule
public ExpectedException expectedException = ExpectedException.none();
@Rule
public TemporaryPaths temporaryFolder = new TemporaryPaths();
private ProcessExecutor processExecutor = new DefaultProcessExecutor(new TestConsole());
private KnownBuildRuleTypesFactory knownBuildRuleTypesFactory = new KnownBuildRuleTypesFactory(
processExecutor,
new FakeAndroidDirectoryResolver());
@Test
public void canReconstructConfig() throws IOException, InterruptedException {
ProjectFilesystem filesystem = createJavaOnlyFilesystem("/saving");
Config config = new Config(ConfigBuilder.rawFromLines());
BuckConfig buckConfig = new BuckConfig(
config,
filesystem,
Architecture.detect(),
Platform.detect(),
ImmutableMap.<String, String>builder()
.putAll(System.getenv())
.put("envKey", "envValue")
.build(),
new DefaultCellPathResolver(filesystem.getRootPath(), config));
Cell rootCellWhenSaving = new TestCellBuilder()
.setFilesystem(filesystem)
.setBuckConfig(buckConfig)
.build();
BuildJobState dump = DistBuildState.dump(
new DistBuildCellIndexer(rootCellWhenSaving),
emptyActionGraph(),
createDefaultCodec(rootCellWhenSaving, Optional.empty()),
createTargetGraph(filesystem),
ImmutableSet.of(BuildTargetFactory.newInstance(filesystem.getRootPath(), "//:dummy")));
Cell rootCellWhenLoading = new TestCellBuilder()
.setFilesystem(createJavaOnlyFilesystem("/loading"))
.build();
DistBuildState distributedBuildState =
DistBuildState.load(
Optional.empty(), dump, rootCellWhenLoading, knownBuildRuleTypesFactory);
ImmutableMap<Integer, Cell> cells = distributedBuildState.getCells();
assertThat(cells, Matchers.aMapWithSize(1));
assertThat(
cells.get(0).getBuckConfig(),
Matchers.equalTo(buckConfig));
}
@Test
public void canReconstructGraphAndTopLevelBuildTargets() throws Exception {
ProjectWorkspace projectWorkspace = TestDataHelper.createProjectWorkspaceForScenario(
this,
"simple_java_target",
temporaryFolder);
projectWorkspace.setUp();
Cell cell = projectWorkspace.asCell();
ProjectFilesystem projectFilesystem = cell.getFilesystem();
projectFilesystem.mkdirs(projectFilesystem.getBuckPaths().getBuckOut());
BuckConfig buckConfig = cell.getBuckConfig();
TypeCoercerFactory typeCoercerFactory =
new DefaultTypeCoercerFactory(ObjectMappers.newDefaultInstance());
ConstructorArgMarshaller constructorArgMarshaller =
new ConstructorArgMarshaller(typeCoercerFactory);
Parser parser = new Parser(
new BroadcastEventListener(),
buckConfig.getView(ParserConfig.class),
typeCoercerFactory,
constructorArgMarshaller);
TargetGraph targetGraph = parser.buildTargetGraph(
BuckEventBusFactory.newInstance(),
cell,
/* enableProfiling */ false,
MoreExecutors.listeningDecorator(Executors.newSingleThreadExecutor()),
ImmutableSet.of(
BuildTargetFactory.newInstance(projectFilesystem.getRootPath(), "//:lib1"),
BuildTargetFactory.newInstance(projectFilesystem.getRootPath(), "//:lib2"),
BuildTargetFactory.newInstance(projectFilesystem.getRootPath(), "//:lib3")));
DistBuildTargetGraphCodec targetGraphCodec =
createDefaultCodec(cell, Optional.of(parser));
BuildJobState dump = DistBuildState.dump(
new DistBuildCellIndexer(cell),
emptyActionGraph(),
targetGraphCodec,
targetGraph,
ImmutableSet.of(
BuildTargetFactory.newInstance(projectFilesystem.getRootPath(), "//:lib1"),
BuildTargetFactory.newInstance(projectFilesystem.getRootPath(), "//:lib2")));
Cell rootCellWhenLoading = new TestCellBuilder()
.setFilesystem(createJavaOnlyFilesystem("/loading"))
.build();
DistBuildState distributedBuildState =
DistBuildState.load(
Optional.empty(), dump, rootCellWhenLoading, knownBuildRuleTypesFactory);
ProjectFilesystem reconstructedCellFilesystem =
distributedBuildState.getCells().get(0).getFilesystem();
TargetGraph reconstructedGraph =
distributedBuildState.createTargetGraph(targetGraphCodec).getTargetGraph();
assertEquals(
reconstructedGraph.getNodes().stream()
.map(targetNode -> targetNode.castArg(JavaLibraryDescription.Arg.class).get())
.sorted()
.map(targetNode -> targetNode.getConstructorArg().srcs)
.collect(Collectors.toList()),
Lists.newArrayList("A.java", "B.java", "C.java").stream()
.map(f -> reconstructedCellFilesystem.getPath(f))
.map(p -> new PathSourcePath(reconstructedCellFilesystem, p))
.map(ImmutableSortedSet::of)
.collect(Collectors.toList()));
}
@Test
public void throwsOnPlatformMismatch() throws IOException, InterruptedException {
ProjectFilesystem filesystem = createJavaOnlyFilesystem("/opt/buck");
Config config = new Config(ConfigBuilder.rawFromLines());
BuckConfig buckConfig = new BuckConfig(
config,
filesystem,
Architecture.MIPSEL,
Platform.UNKNOWN,
ImmutableMap.<String, String>builder()
.putAll(System.getenv())
.put("envKey", "envValue")
.build(),
new DefaultCellPathResolver(filesystem.getRootPath(), config));
Cell cell = new TestCellBuilder()
.setFilesystem(filesystem)
.setBuckConfig(buckConfig)
.build();
BuildJobState dump = DistBuildState.dump(
new DistBuildCellIndexer(cell),
emptyActionGraph(),
createDefaultCodec(cell, Optional.empty()),
createTargetGraph(filesystem),
ImmutableSet.of(BuildTargetFactory.newInstance(filesystem.getRootPath(), "//:dummy")));
expectedException.expect(IllegalStateException.class);
DistBuildState.load(Optional.empty(), dump, cell, knownBuildRuleTypesFactory);
}
@Test
public void worksCrossCell() throws IOException, InterruptedException {
ProjectFilesystem parentFs = createJavaOnlyFilesystem("/saving");
Path cell1Root = parentFs.resolve("cell1");
Path cell2Root = parentFs.resolve("cell2");
parentFs.mkdirs(cell1Root);
parentFs.mkdirs(cell2Root);
ProjectFilesystem cell1Filesystem = new ProjectFilesystem(cell1Root);
ProjectFilesystem cell2Filesystem = new ProjectFilesystem(cell2Root);
Config config = new Config(ConfigBuilder.rawFromLines(
"[cache]",
"repository=somerepo",
"[repositories]",
"cell2 = " + cell2Root.toString()));
BuckConfig buckConfig = new BuckConfig(
config,
cell1Filesystem,
Architecture.detect(),
Platform.detect(),
ImmutableMap.<String, String>builder()
.putAll(System.getenv())
.put("envKey", "envValue")
.build(),
new DefaultCellPathResolver(cell1Root, config));
Cell rootCellWhenSaving = new TestCellBuilder()
.setFilesystem(cell1Filesystem)
.setBuckConfig(buckConfig)
.build();
BuildJobState dump = DistBuildState.dump(
new DistBuildCellIndexer(rootCellWhenSaving),
emptyActionGraph(),
createDefaultCodec(rootCellWhenSaving, Optional.empty()),
createCrossCellTargetGraph(cell1Filesystem, cell2Filesystem),
ImmutableSet.of(BuildTargetFactory.newInstance(
cell1Filesystem.getRootPath(),
"//:dummy")));
Cell rootCellWhenLoading = new TestCellBuilder()
.setFilesystem(createJavaOnlyFilesystem("/loading"))
.build();
Config localConfig = new Config(ConfigBuilder.rawFromLines(
"[cache]",
"slb_server_pool=http://someserver:8080"
));
BuckConfig localBuckConfig = new BuckConfig(
localConfig,
cell1Filesystem,
Architecture.detect(),
Platform.detect(),
ImmutableMap.<String, String>builder()
.putAll(System.getenv())
.put("envKey", "envValue")
.build(),
new DefaultCellPathResolver(cell1Root, localConfig));
DistBuildState distributedBuildState =
DistBuildState.load(
Optional.of(localBuckConfig),
dump,
rootCellWhenLoading,
knownBuildRuleTypesFactory);
ImmutableMap<Integer, Cell> cells = distributedBuildState.getCells();
assertThat(cells, Matchers.aMapWithSize(2));
BuckConfig rootCellBuckConfig = cells.get(0).getBuckConfig();
Optional<ImmutableMap<String, String>> cacheSection =
rootCellBuckConfig.getSection("cache");
assertTrue(cacheSection.isPresent());
assertTrue(cacheSection.get().containsKey("repository"));
assertThat(cacheSection.get().get("repository"), Matchers.equalTo("somerepo"));
assertThat(
cacheSection.get().get("slb_server_pool"),
Matchers.equalTo("http://someserver:8080"));
}
private DistBuildFileHashes emptyActionGraph() throws IOException {
ActionGraph actionGraph = new ActionGraph(ImmutableList.of());
BuildRuleResolver ruleResolver =
new BuildRuleResolver(TargetGraph.EMPTY, new DefaultTargetNodeToBuildRuleTransformer());
SourcePathRuleFinder ruleFinder = new SourcePathRuleFinder(ruleResolver);
SourcePathResolver sourcePathResolver = new SourcePathResolver(ruleFinder);
ProjectFilesystem projectFilesystem = createJavaOnlyFilesystem("/opt/buck");
return new DistBuildFileHashes(
actionGraph,
sourcePathResolver,
ruleFinder,
ImmutableList.of(DefaultFileHashCache.createDefaultFileHashCache(projectFilesystem)),
Functions.constant(0),
MoreExecutors.newDirectExecutorService(),
/* keySeed */ 0,
FakeBuckConfig.builder().build());
}
private static DistBuildTargetGraphCodec createDefaultCodec(
final Cell cell,
final Optional<Parser> parser) {
ObjectMapper objectMapper = ObjectMappers.newDefaultInstance(); // NOPMD confused by lambda
BuckEventBus eventBus = BuckEventBusFactory.newInstance();
Function<? super TargetNode<?, ?>, ? extends Map<String, Object>> nodeToRawNode;
if (parser.isPresent()) {
nodeToRawNode = (Function<TargetNode<?, ?>, Map<String, Object>>) input -> {
try {
return parser.get().getRawTargetNode(
eventBus,
cell.getCell(input.getBuildTarget()),
/* enableProfiling */ false,
MoreExecutors.listeningDecorator(MoreExecutors.newDirectExecutorService()),
input);
} catch (BuildFileParseException e) {
throw new RuntimeException(e);
}
};
} else {
nodeToRawNode = Functions.constant(ImmutableMap.<String, Object>of());
}
DistBuildTypeCoercerFactory typeCoercerFactory =
new DistBuildTypeCoercerFactory(objectMapper);
ParserTargetNodeFactory<TargetNode<?, ?>> parserTargetNodeFactory =
DefaultParserTargetNodeFactory.createForDistributedBuild(
new ConstructorArgMarshaller(typeCoercerFactory),
new TargetNodeFactory(typeCoercerFactory));
return new DistBuildTargetGraphCodec(
objectMapper,
parserTargetNodeFactory,
nodeToRawNode,
ImmutableSet.of());
}
private static TargetGraph createTargetGraph(ProjectFilesystem filesystem) {
return TargetGraphFactory.newInstance(
JavaLibraryBuilder.createBuilder(BuildTargetFactory.newInstance("//:foo"), filesystem)
.build());
}
private static TargetGraph createCrossCellTargetGraph(
ProjectFilesystem cellOneFilesystem,
ProjectFilesystem cellTwoFilesystem) {
Preconditions.checkArgument(!cellOneFilesystem.equals(cellTwoFilesystem));
BuildTarget target = BuildTargetFactory.newInstance(cellTwoFilesystem, "//:foo");
return TargetGraphFactory.newInstance(
JavaLibraryBuilder.createBuilder(
BuildTargetFactory.newInstance(cellOneFilesystem, "//:foo"),
cellOneFilesystem)
.addSrc(new DefaultBuildTargetSourcePath(target))
.build(),
JavaLibraryBuilder.createBuilder(
target,
cellTwoFilesystem)
.build()
);
}
private static ProjectFilesystem createJavaOnlyFilesystem(String rootPath) throws IOException {
ProjectFilesystem filesystem = FakeProjectFilesystem.createJavaOnlyFilesystem(rootPath);
filesystem.mkdirs(filesystem.getBuckPaths().getBuckOut());
return filesystem;
}
}
| |
/**
* This class is generated by jOOQ
*/
package nfl.playdb.model.gen.jooq.tables.records;
import java.math.BigDecimal;
import javax.annotation.Generated;
import nfl.playdb.model.gen.jooq.tables.Offense;
import org.jooq.impl.TableRecordImpl;
/**
* This class is generated by jOOQ.
*/
@Generated(
value = {
"http://www.jooq.org",
"jOOQ version:3.6.2"
},
comments = "This class is generated by jOOQ"
)
@SuppressWarnings({ "all", "unchecked", "rawtypes" })
public class OffenseRecord extends TableRecordImpl<OffenseRecord> {
private static final long serialVersionUID = -204361604;
/**
* Setter for <code>nfl_plays.offense.uid</code>.
*/
public void setUid(Integer value) {
setValue(0, value);
}
/**
* Getter for <code>nfl_plays.offense.uid</code>.
*/
public Integer getUid() {
return (Integer) getValue(0);
}
/**
* Setter for <code>nfl_plays.offense.gid</code>.
*/
public void setGid(Integer value) {
setValue(1, value);
}
/**
* Getter for <code>nfl_plays.offense.gid</code>.
*/
public Integer getGid() {
return (Integer) getValue(1);
}
/**
* Setter for <code>nfl_plays.offense.player</code>.
*/
public void setPlayer(String value) {
setValue(2, value);
}
/**
* Getter for <code>nfl_plays.offense.player</code>.
*/
public String getPlayer() {
return (String) getValue(2);
}
/**
* Setter for <code>nfl_plays.offense.pa</code>.
*/
public void setPa(Byte value) {
setValue(3, value);
}
/**
* Getter for <code>nfl_plays.offense.pa</code>.
*/
public Byte getPa() {
return (Byte) getValue(3);
}
/**
* Setter for <code>nfl_plays.offense.pc</code>.
*/
public void setPc(Byte value) {
setValue(4, value);
}
/**
* Getter for <code>nfl_plays.offense.pc</code>.
*/
public Byte getPc() {
return (Byte) getValue(4);
}
/**
* Setter for <code>nfl_plays.offense.py</code>.
*/
public void setPy(Integer value) {
setValue(5, value);
}
/**
* Getter for <code>nfl_plays.offense.py</code>.
*/
public Integer getPy() {
return (Integer) getValue(5);
}
/**
* Setter for <code>nfl_plays.offense.int</code>.
*/
public void setInt(Byte value) {
setValue(6, value);
}
/**
* Getter for <code>nfl_plays.offense.int</code>.
*/
public Byte getInt() {
return (Byte) getValue(6);
}
/**
* Setter for <code>nfl_plays.offense.tdp</code>.
*/
public void setTdp(Byte value) {
setValue(7, value);
}
/**
* Getter for <code>nfl_plays.offense.tdp</code>.
*/
public Byte getTdp() {
return (Byte) getValue(7);
}
/**
* Setter for <code>nfl_plays.offense.ra</code>.
*/
public void setRa(Byte value) {
setValue(8, value);
}
/**
* Getter for <code>nfl_plays.offense.ra</code>.
*/
public Byte getRa() {
return (Byte) getValue(8);
}
/**
* Setter for <code>nfl_plays.offense.sra</code>.
*/
public void setSra(Byte value) {
setValue(9, value);
}
/**
* Getter for <code>nfl_plays.offense.sra</code>.
*/
public Byte getSra() {
return (Byte) getValue(9);
}
/**
* Setter for <code>nfl_plays.offense.ry</code>.
*/
public void setRy(Integer value) {
setValue(10, value);
}
/**
* Getter for <code>nfl_plays.offense.ry</code>.
*/
public Integer getRy() {
return (Integer) getValue(10);
}
/**
* Setter for <code>nfl_plays.offense.tdr</code>.
*/
public void setTdr(Byte value) {
setValue(11, value);
}
/**
* Getter for <code>nfl_plays.offense.tdr</code>.
*/
public Byte getTdr() {
return (Byte) getValue(11);
}
/**
* Setter for <code>nfl_plays.offense.trg</code>.
*/
public void setTrg(Byte value) {
setValue(12, value);
}
/**
* Getter for <code>nfl_plays.offense.trg</code>.
*/
public Byte getTrg() {
return (Byte) getValue(12);
}
/**
* Setter for <code>nfl_plays.offense.rec</code>.
*/
public void setRec(Byte value) {
setValue(13, value);
}
/**
* Getter for <code>nfl_plays.offense.rec</code>.
*/
public Byte getRec() {
return (Byte) getValue(13);
}
/**
* Setter for <code>nfl_plays.offense.recy</code>.
*/
public void setRecy(Integer value) {
setValue(14, value);
}
/**
* Getter for <code>nfl_plays.offense.recy</code>.
*/
public Integer getRecy() {
return (Integer) getValue(14);
}
/**
* Setter for <code>nfl_plays.offense.tdre</code>.
*/
public void setTdre(Byte value) {
setValue(15, value);
}
/**
* Getter for <code>nfl_plays.offense.tdre</code>.
*/
public Byte getTdre() {
return (Byte) getValue(15);
}
/**
* Setter for <code>nfl_plays.offense.fuml</code>.
*/
public void setFuml(Byte value) {
setValue(16, value);
}
/**
* Getter for <code>nfl_plays.offense.fuml</code>.
*/
public Byte getFuml() {
return (Byte) getValue(16);
}
/**
* Setter for <code>nfl_plays.offense.peny</code>.
*/
public void setPeny(Byte value) {
setValue(17, value);
}
/**
* Getter for <code>nfl_plays.offense.peny</code>.
*/
public Byte getPeny() {
return (Byte) getValue(17);
}
/**
* Setter for <code>nfl_plays.offense.plays</code>.
*/
public void setPlays(Integer value) {
setValue(18, value);
}
/**
* Getter for <code>nfl_plays.offense.plays</code>.
*/
public Integer getPlays() {
return (Integer) getValue(18);
}
/**
* Setter for <code>nfl_plays.offense.fpts</code>.
*/
public void setFpts(BigDecimal value) {
setValue(19, value);
}
/**
* Getter for <code>nfl_plays.offense.fpts</code>.
*/
public BigDecimal getFpts() {
return (BigDecimal) getValue(19);
}
/**
* Setter for <code>nfl_plays.offense.game</code>.
*/
public void setGame(Byte value) {
setValue(20, value);
}
/**
* Getter for <code>nfl_plays.offense.game</code>.
*/
public Byte getGame() {
return (Byte) getValue(20);
}
/**
* Setter for <code>nfl_plays.offense.seas</code>.
*/
public void setSeas(Byte value) {
setValue(21, value);
}
/**
* Getter for <code>nfl_plays.offense.seas</code>.
*/
public Byte getSeas() {
return (Byte) getValue(21);
}
/**
* Setter for <code>nfl_plays.offense.year</code>.
*/
public void setYear(Integer value) {
setValue(22, value);
}
/**
* Getter for <code>nfl_plays.offense.year</code>.
*/
public Integer getYear() {
return (Integer) getValue(22);
}
/**
* Setter for <code>nfl_plays.offense.team</code>.
*/
public void setTeam(String value) {
setValue(23, value);
}
/**
* Getter for <code>nfl_plays.offense.team</code>.
*/
public String getTeam() {
return (String) getValue(23);
}
// -------------------------------------------------------------------------
// Constructors
// -------------------------------------------------------------------------
/**
* Create a detached OffenseRecord
*/
public OffenseRecord() {
super(Offense.OFFENSE);
}
/**
* Create a detached, initialised OffenseRecord
*/
public OffenseRecord(Integer uid, Integer gid, String player, Byte pa, Byte pc, Integer py, Byte int_, Byte tdp, Byte ra, Byte sra, Integer ry, Byte tdr, Byte trg, Byte rec, Integer recy, Byte tdre, Byte fuml, Byte peny, Integer plays, BigDecimal fpts, Byte game, Byte seas, Integer year, String team) {
super(Offense.OFFENSE);
setValue(0, uid);
setValue(1, gid);
setValue(2, player);
setValue(3, pa);
setValue(4, pc);
setValue(5, py);
setValue(6, int_);
setValue(7, tdp);
setValue(8, ra);
setValue(9, sra);
setValue(10, ry);
setValue(11, tdr);
setValue(12, trg);
setValue(13, rec);
setValue(14, recy);
setValue(15, tdre);
setValue(16, fuml);
setValue(17, peny);
setValue(18, plays);
setValue(19, fpts);
setValue(20, game);
setValue(21, seas);
setValue(22, year);
setValue(23, team);
}
}
| |
// Copyright (c) 2003 Compaq Corporation. All rights reserved.
// Portions Copyright (c) 2003 Microsoft Corporation. All rights reserved.
// Last modified on Mon 30 Apr 2007 at 13:30:03 PST by lamport
// modified on Wed Nov 14 23:26:07 PST 2001 by yuanyu
// modified on Wed Jun 28 12:00:16 PDT 2000 by rjoshi
package tlc2.tool;
import java.io.DataInputStream;
import java.io.DataOutputStream;
import java.io.File;
import java.io.IOException;
import java.util.HashMap;
import java.util.Map;
import tlc2.output.EC;
import tlc2.output.MP;
import tlc2.output.StatePrinter;
import tlc2.util.BufferedRandomAccessFile;
import tlc2.util.LongVec;
import util.FileUtil;
public class TLCTrace {
private static String filename;
private BufferedRandomAccessFile raf;
private long lastPtr;
private TraceApp tool;
public TLCTrace(String metadir, String specFile, TraceApp tool)
throws IOException {
filename = metadir + FileUtil.separator + specFile + ".st";
this.raf = new BufferedRandomAccessFile(filename, "rw");
this.lastPtr = 1L;
this.tool = tool;
}
/**
* @param fp A finger print of a state without a predecessor (init state)
* @return The new location (pointer) for the given finger print (state)
* @throws IOException
*/
public final synchronized long writeState(final long aFingerprint)
throws IOException {
return writeState(1, aFingerprint);
}
/**
* @param predecessor The predecessor state
* @param fp A finger print
* @return The new location (pointer) for the given finger print (state)
* @throws IOException
*/
public final synchronized long writeState(final TLCState predecessor, final long aFingerprint)
throws IOException {
return writeState(predecessor.uid, aFingerprint);
}
/**
* @param predecessorLoc The location of the state predecessor
* @param fp A finger print
* @return The new location (pointer) for the given finger print (state)
* @throws IOException
*/
private final synchronized long writeState(long predecessorLoc, long fp)
throws IOException {
this.lastPtr = this.raf.getFilePointer();
this.raf.writeLongNat(predecessorLoc);
this.raf.writeLong(fp);
return this.lastPtr;
}
public final void close() throws IOException {
this.raf.close();
}
private synchronized long getPrev(long loc) throws IOException {
this.raf.seek(loc);
return this.raf.readLongNat();
}
private synchronized long getFP(long loc) throws IOException {
this.raf.seek(loc);
this.raf.readLongNat(); /*drop*/
return this.raf.readLong();
}
/**
* Returns the level (monotonically increasing)!
*
* LL: The user has no real need of an accurate tree height. Breadth-first
* search is good because it provides the shortest possible error trace.
* Usually approximately breadth-first search is just as good because it
* makes little difference if the error trace isn't quite as short as
* possible. I believe that in most applications, after a short initial
* period, the height of the tree grows slowly. All workers are usually
* working on states of the same height except for brief periods when the
* height changes, and then the heights will differ by at most one.
* Reporting the height to the user gives him some information about how
* fast model checking is going. He will have no problem getting used to the
* idea that it's only an approximation. (I expect that few users even know
* what it means.) I'd like to make the reported value be monotonic because,
* if it's not, users may worry and people already have enough things in
* life to worry about.
*
* @see TLCTrace#getLevel()
*/
public final int getLevelForReporting() throws IOException {
final int calculatedLevel = getLevel(this.lastPtr);
if(calculatedLevel > previousLevel) {
previousLevel = calculatedLevel;
}
return previousLevel;
}
/**
* Stores the previous level reported to guarantee that it is monotonic
*/
private int previousLevel;
/**
* @see TLCTrace#getLevel(long)
*/
public final int getLevel() throws IOException {
// This assumption (lastPtr) only holds for the TLC in non-parallel mode.
// Generally the last line (logically a state) is not necessarily
// on the highest level of the state tree. This is only the case if
// states are explored strictly by breadth-first search.
return getLevel(this.lastPtr);
}
/**
* @param startLoc The start location (pointer) from where the level (height) of the state tree should be calculated
* @return The level (height) of the state tree.
* @throws IOException
*/
public synchronized final int getLevel(long startLoc) throws IOException {
// keep current location
long currentFilePointer = this.raf.getFilePointer();
// calculate level/depth based on start location
int level = 0;
for (long predecessorLoc = startLoc; predecessorLoc != 1; predecessorLoc = this
.getPrev(predecessorLoc)) {
level++;
}
// rewind to current location
this.raf.seek(currentFilePointer);
return level;
}
/**
* @return All states in the trace file
* @throws IOException
*/
public final TLCStateInfo[] getTrace() throws IOException {
final Map<Long, TLCStateInfo> locToState = new HashMap<Long, TLCStateInfo>();
synchronized (this) {
final long curLoc = this.raf.getFilePointer();
try {
long length = this.raf.length();
// go to first byte
this.raf.seek(0);
// read init state
this.raf.readLongNat(); /* drop predecessor of init state*/
TLCStateInfo state = this.tool.getState(this.raf.readLong());
locToState.put(0L, state);
for (long location = 12; location < length; location+=12) {
final long predecessorLocation = this.raf.readLongNat();
final long fp = this.raf.readLong();
// read predecessor from map
final TLCStateInfo predecessor = locToState.get(predecessorLocation);
// reconstruct current state
state = this.tool.getState(fp, predecessor.state);
// chain to predecessor
state.predecessorState = predecessor;
state.stateNumber = location / 12;
// store in map
locToState.put(location, state);
}
} finally {
// rewind
this.raf.seek(curLoc);
}
}
return locToState.values().toArray(new TLCStateInfo[locToState.size()]);
}
/**
* @param loc The start location (pointer) from where the trace should be computed
* @param included true if the start location state should be included
* @return An array of predecessor states
* @throws IOException
*/
public final TLCStateInfo[] getTrace(long loc, boolean included)
throws IOException {
LongVec fps = new LongVec();
synchronized(this) {
long curLoc = this.raf.getFilePointer();
long loc1 = (included) ? loc : this.getPrev(loc);
for (long ploc = loc1; ploc != 1; ploc = this.getPrev(ploc)) {
fps.addElement(this.getFP(ploc));
}
this.raf.seek(curLoc);
}
int stateNum = 0;
int len = fps.size();
TLCStateInfo[] res = new TLCStateInfo[len];
if (len > 0) {
long fp = fps.elementAt(len-1);
TLCStateInfo sinfo = this.tool.getState(fp);
if (sinfo == null)
{
MP.printError(EC.TLC_FAILED_TO_RECOVER_INIT);
MP.printError(EC.TLC_BUG, "1");
System.exit(1);
}
res[stateNum++] = sinfo;
for (int i = len - 2; i >= 0; i--) {
fp = fps.elementAt(i);
sinfo = this.tool.getState(fp, sinfo.state);
if (sinfo == null) {
/*
* The following error message is misleading, because it's triggered
* when TLC can't find a non-initial state from its fingerprint
* when it's generating an error trace. LL 7 Mar 2012
*/
MP.printError(EC.TLC_FAILED_TO_RECOVER_INIT);
MP.printError(EC.TLC_BUG, "2");
System.exit(1);
}
res[stateNum++] = sinfo;
}
}
return res;
}
/**
* Write out a sequence of states that reaches s2 from an initial
* state, according to the spec. s2 is a next state of s1.
*
* @param s1 may not be null.
* @param s2 may be null.
* @throws IOException
* @throws WorkerException
*/
public synchronized final void printTrace(final TLCState s1, final TLCState s2)
throws IOException, WorkerException
{
MP.printError(EC.TLC_BEHAVIOR_UP_TO_THIS_POINT);
// Print the prefix leading to s1:
long loc1 = s1.uid;
TLCState lastState = null;
TLCStateInfo[] prefix = this.getTrace(loc1, false);
int idx = 0;
while (idx < prefix.length)
{
StatePrinter.printState(prefix[idx], lastState, idx+1);
lastState = prefix[idx].state;
idx++;
}
// Print s1:
TLCStateInfo sinfo;
if (prefix.length == 0) {
sinfo = this.tool.getState(s1.fingerPrint());
if (sinfo == null)
{
MP.printError(EC.TLC_FAILED_TO_RECOVER_INIT);
MP.printError(EC.TLC_BUG, "3");
System.exit(1);
}
}
else
{
TLCState s0 = prefix[prefix.length-1].state;
sinfo = this.tool.getState(s1.fingerPrint(), s0);
if (sinfo == null)
{
MP.printError(EC.TLC_FAILED_TO_RECOVER_INIT);
MP.printError(EC.TLC_BUG, "4");
StatePrinter.printState(s1);
System.exit(1);
}
}
if (s2 == null)
{
lastState = null;
}
StatePrinter.printState(sinfo, lastState, ++idx);
lastState = sinfo.state;
// Print s2:
if (s2 != null) {
sinfo = this.tool.getState(s2, s1);
if (sinfo == null)
{
MP.printError(EC.TLC_FAILED_TO_RECOVER_INIT);
MP.printError(EC.TLC_BUG, "5");
StatePrinter.printState(s2);
System.exit(1);
}
StatePrinter.printState(sinfo, null, ++idx);
}
}
/**
* Returns a sequence of states that reaches, but excludes the
* state with fingerprint fp.
*/
@SuppressWarnings("unused")
private final TLCStateInfo[] printPrefix(long fp) throws IOException {
// First, find the location for fp:
this.raf.seek(0);
this.raf.readLongNat(); /*drop*/
while (this.raf.readLong() != fp) {
this.raf.readLongNat(); /*drop*/
}
// Print the states corresponding to the fps:
TLCState lastState = null;
TLCStateInfo[] prefix = this.getTrace(this.lastPtr, false);
int idx = 0;
while (idx < prefix.length) {
StatePrinter.printState(prefix[idx], lastState, idx+1);
lastState = prefix[idx].state;
idx++;
}
return prefix;
}
/* Checkpoint. */
public synchronized final void beginChkpt() throws IOException {
this.raf.flush();
// SZ Feb 24, 2009: FileUtil introduced
DataOutputStream dos = FileUtil.newDFOS(filename + ".tmp");
dos.writeLong(this.raf.getFilePointer());
dos.writeLong(this.lastPtr);
dos.close();
}
public final void commitChkpt() throws IOException {
File oldChkpt = new File(filename + ".chkpt");
File newChkpt = new File(filename + ".tmp");
if ((oldChkpt.exists() && !oldChkpt.delete()) ||
!newChkpt.renameTo(oldChkpt)) {
throw new IOException("Trace.commitChkpt: cannot delete " + oldChkpt);
}
}
public final void recover() throws IOException {
// SZ Feb 24, 2009: FileUtil introduced
DataInputStream dis = FileUtil.newDFIS(filename + ".chkpt");
long filePos = dis.readLong();
this.lastPtr = dis.readLong();
dis.close();
this.raf.seek(filePos);
}
public static String getFilename() { return filename; }
public static long getRecoverPtr() throws IOException {
// SZ Feb 24, 2009: FileUtil introduced
DataInputStream dis = FileUtil.newDFIS(filename + ".chkpt");
long res = dis.readLong();
dis.close();
return res;
}
@SuppressWarnings("unused")
private long[] addBlock(long fp[], long prev[]) throws IOException {
// Reuse prev.
for (int i = 0; i < fp.length; i++) {
prev[i] = this.writeState(prev[i], fp[i]);
}
return prev;
}
public synchronized final Enumerator elements() throws IOException {
return new Enumerator();
}
final class Enumerator {
long len;
BufferedRandomAccessFile enumRaf;
Enumerator() throws IOException {
this.len = raf.length();
this.enumRaf = new BufferedRandomAccessFile(filename, "r");
}
final void reset(long pos) throws IOException {
this.len = raf.length();
if (pos == -1) {
pos = this.enumRaf.getFilePointer();
}
this.enumRaf = new BufferedRandomAccessFile(filename, "r");
this.enumRaf.seek(pos);
}
final long nextPos() {
long fpos = this.enumRaf.getFilePointer();
if (fpos < this.len) { return fpos; }
return -1;
}
final long nextFP() throws IOException {
this.enumRaf.readLongNat(); /*drop*/
return this.enumRaf.readLong();
}
}
}
| |
package com.netflix.karyon.swagger;
import com.sun.jersey.api.core.HttpContext;
import com.sun.jersey.core.spi.component.ComponentContext;
import com.sun.jersey.core.spi.component.ComponentScope;
import com.sun.jersey.server.impl.inject.AbstractHttpContextInjectable;
import com.sun.jersey.spi.inject.Injectable;
import com.sun.jersey.spi.inject.InjectableProvider;
import javax.servlet.*;
import javax.ws.rs.core.Context;
import javax.ws.rs.ext.Provider;
import java.io.InputStream;
import java.lang.reflect.Type;
import java.net.MalformedURLException;
import java.net.URL;
import java.util.Enumeration;
import java.util.Set;
/**
* Created by rbolles on 2/17/16.
*
* A hack to allow Swagger, Jersey, and Guice/Karyon to play nice together. (https://github.com/swagger-api/swagger-core/issues/1619)
*
* It is common practice at Netflix to expose your JAX-RS Resources via filter and GuiceContainer.
*
* Without any intervention, Jersey will blow up with a cryptic message saying something to the effect:
* SEVERE: Missing dependency for method public javax.ws.rs.core.Response io.swagger.jaxrs.listing.ApiListingResource.getListing(javax.ws.rs.core.Application,javax.servlet.ServletConfig,javax.ws.rs.core.HttpHeaders,javax.ws.rs.core.UriInfo,java.lang.String) at parameter at index 1
*
* This is error is due to the fact that when your expose your JAX-RS resources via filter, there is no ServletConfig object
* to inject into Swagger's ApiListingResource. Jersey blows up and prevents your app from starting up.
*
* A careful inspection of the Swagger code path, starting at ApiListingResource.java, does not actually require a ServletConfig
* object in order to expose the /swagger.json endpoint. However, Swagger does allow you to plug in your own implementation of certain
* Swagger classes (e.g. JaxrsScanner). As such, the Swagger maintainers were hesitant to remove ServletConfig references from their codebase.
*
* This class, when registered with Jersey (either directly or indirectly via Guice) provides a "dummy" ServletConfig
* that Jersey can inject and go along on its merry way.
*
*/
@Provider
public class ServletConfigProvider extends AbstractHttpContextInjectable<ServletConfig> implements InjectableProvider<Context, java.lang.reflect.Type> {
@Override
public ServletConfig getValue(HttpContext c) {
return new ServletConfig() {
@Context
ServletContext servletContext;
@Override
public String getServletName() {
return null;
}
@Override
public ServletContext getServletContext() {
return new ServletContext() {
@Override
public String getContextPath() {
return null;
}
@Override
public ServletContext getContext(String uripath) {
return null;
}
@Override
public int getMajorVersion() {
return 0;
}
@Override
public int getMinorVersion() {
return 0;
}
@Override
public String getMimeType(String file) {
return null;
}
@Override
public Set getResourcePaths(String path) {
return null;
}
@Override
public URL getResource(String path) throws MalformedURLException {
return null;
}
@Override
public InputStream getResourceAsStream(String path) {
return null;
}
@Override
public RequestDispatcher getRequestDispatcher(String path) {
return null;
}
@Override
public RequestDispatcher getNamedDispatcher(String name) {
return null;
}
@Override
public Servlet getServlet(String name) throws ServletException {
return null;
}
@Override
public Enumeration getServlets() {
return null;
}
@Override
public Enumeration getServletNames() {
return null;
}
@Override
public void log(String msg) {
}
@Override
public void log(Exception exception, String msg) {
}
@Override
public void log(String message, Throwable throwable) {
}
@Override
public String getRealPath(String path) {
return null;
}
@Override
public String getServerInfo() {
return null;
}
@Override
public String getInitParameter(String name) {
return null;
}
@Override
public Enumeration getInitParameterNames() {
return null;
}
@Override
public Object getAttribute(String name) {
return null;
}
@Override
public Enumeration getAttributeNames() {
return null;
}
@Override
public void setAttribute(String name, Object object) {
}
@Override
public void removeAttribute(String name) {
}
@Override
public String getServletContextName() {
return null;
}
@Override
public String toString() {
return "DUMMY SERVLET CONTEXT TO GET SWAGGER WORKING WITH KARYON";
}
};
}
@Override
public String getInitParameter(String name) {
return null;
}
@Override
public Enumeration<String> getInitParameterNames() {
return null;
}
@Override
public String toString() {
return "DUMMY SERVLET CONFIG TO GET SWAGGER WORKING WITH KARYON";
}
};
}
@Override
public ComponentScope getScope() {
return ComponentScope.Singleton;
}
@Override
public Injectable getInjectable(ComponentContext ic, Context context, Type type) {
if (type.equals(ServletConfig.class)) {
return this;
}
return null;
}
}
| |
package openmods.core;
import com.google.common.base.Functions;
import com.google.common.base.Joiner;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Iterables;
import com.google.common.collect.Maps;
import java.util.List;
import java.util.Map;
import java.util.Set;
import net.minecraft.launchwrapper.IClassTransformer;
import net.minecraftforge.fml.common.discovery.ASMDataTable;
import net.minecraftforge.fml.common.discovery.ASMDataTable.ASMData;
import openmods.Log;
import openmods.api.IResultListener;
import openmods.asm.TransformerState;
import openmods.asm.VisitorHelper;
import openmods.asm.VisitorHelper.TransformProvider;
import openmods.config.simple.ConfigProcessor;
import openmods.config.simple.ConfigProcessor.UpdateListener;
import openmods.core.fixes.HorseNullFix;
import openmods.include.IncludingClassVisitor;
import openmods.renderer.PlayerRendererHookVisitor;
import openmods.renderer.PreWorldRenderHookVisitor;
import openmods.utils.StateTracker;
import openmods.utils.StateTracker.StateUpdater;
import org.objectweb.asm.ClassVisitor;
import org.objectweb.asm.ClassWriter;
public class OpenModsClassTransformer implements IClassTransformer {
private static OpenModsClassTransformer INSTANCE;
private static final List<String> IGNORED_PREFIXES = ImmutableList.of(
"cpw.mods.fml.",
"net.minecraftforge.",
"io.netty.",
"gnu.trove.",
"com.google.",
"com.mojang.",
"joptsimple.",
"tv.twitch.");
private final Map<String, TransformProvider> vanillaPatches = Maps.newHashMap();
private final StateTracker<TransformerState> states = StateTracker.create(TransformerState.DISABLED);
private Set<String> includedClasses;
private abstract class ConfigOption implements UpdateListener {
private final StateUpdater<TransformerState> state;
public ConfigOption(String name) {
state = states.register(name);
}
@Override
public void valueSet(String value) {
if ("true".equalsIgnoreCase(value)) {
state.update(TransformerState.ENABLED);
onActivate(state);
}
}
protected abstract void onActivate(StateUpdater<TransformerState> state);
}
private static IResultListener createResultListener(final StateUpdater<TransformerState> updater) {
return new IResultListener() {
@Override
public void onSuccess() {
updater.update(TransformerState.FINISHED);
}
@Override
public void onFailure() {
updater.update(TransformerState.FAILED);
}
};
}
public OpenModsClassTransformer() {
INSTANCE = this;
}
public static OpenModsClassTransformer instance() {
return INSTANCE;
}
public void addConfigValues(ConfigProcessor config) {
config.addEntry("activate_player_render_hook", 0, "true", new ConfigOption("player_render_hook") {
@Override
protected void onActivate(final StateUpdater<TransformerState> state) {
vanillaPatches.put("net.minecraft.client.renderer.entity.RenderPlayer", new TransformProvider(ClassWriter.COMPUTE_FRAMES) {
@Override
public ClassVisitor createVisitor(String name, ClassVisitor cv) {
Log.debug("Trying to apply player render hook (class: %s)", name);
state.update(TransformerState.ACTIVATED);
return new PlayerRendererHookVisitor(name, cv, createResultListener(state));
}
});
}
},
"Purpose: add hook to player rendering code",
"Modified class: net.minecraft.client.renderer.entity.RenderPlayer",
"Known users: OpenBlocks hangglider",
"When disabled: code may fallback to less compatible mechanism (like replacing renderer)");
config.addEntry("hook_pre_world_rendering", 0, "true", new ConfigOption("pre_world_render_hook") {
@Override
protected void onActivate(final StateUpdater<TransformerState> state) {
vanillaPatches.put("net.minecraft.client.renderer.EntityRenderer", new TransformProvider(0) {
@Override
public ClassVisitor createVisitor(String name, ClassVisitor cv) {
Log.debug("Trying to patch EntityRenderer (class: %s)", name);
state.update(TransformerState.ACTIVATED);
return new PreWorldRenderHookVisitor(name, cv, createResultListener(state));
}
});
}
},
"Purpose: hook in world rendering, triggered between sky and terrain",
"Modified class: net.minecraft.client.renderer.EntityRenderer",
"Known users: Sky block",
"When disabled: Sky block will not render properly");
config.addEntry("horse_base_null_fix", 0, "true", new ConfigOption("horse_base_null_fix") {
@Override
protected void onActivate(final StateUpdater<TransformerState> state) {
vanillaPatches.put("net.minecraft.entity.passive.AbstractHorse", new TransformProvider(ClassWriter.COMPUTE_FRAMES) {
@Override
public ClassVisitor createVisitor(String name, ClassVisitor cv) {
Log.debug("Trying to patch AbstractHorse (class: %s)", name);
state.update(TransformerState.ACTIVATED);
return new HorseNullFix.Base(name, cv, createResultListener(state));
}
});
}
},
"Purpose: prevent NPE when creating horse without world",
"Modified class: net.minecraft.entity.passive.AbstractHorse",
"Known users: Trophy",
"When disabled: Trophy for any horse variant cannot be rendered");
config.addEntry("horse_null_fix", 0, "true", new ConfigOption("horse_null_fix") {
@Override
protected void onActivate(final StateUpdater<TransformerState> state) {
vanillaPatches.put("net.minecraft.entity.passive.EntityHorse", new TransformProvider(ClassWriter.COMPUTE_FRAMES) {
@Override
public ClassVisitor createVisitor(String name, ClassVisitor cv) {
Log.debug("Trying to patch EntityHorse (class: %s)", name);
state.update(TransformerState.ACTIVATED);
return new HorseNullFix.Horse(name, cv, createResultListener(state));
}
});
}
},
"Purpose: prevent NPE when creating horse without world",
"Modified class: net.minecraft.entity.passive.EntityHorse",
"Known users: Trophy",
"When disabled: Horse trophy cannot be rendered");
config.addEntry("llama_null_fix", 0, "true", new ConfigOption("llama_null_fix") {
@Override
protected void onActivate(final StateUpdater<TransformerState> state) {
vanillaPatches.put("net.minecraft.entity.passive.EntityLlama", new TransformProvider(ClassWriter.COMPUTE_FRAMES) {
@Override
public ClassVisitor createVisitor(String name, ClassVisitor cv) {
Log.debug("Trying to patch EntityLlama (class: %s)", name);
state.update(TransformerState.ACTIVATED);
return new HorseNullFix.Llama(name, cv, createResultListener(state));
}
});
}
},
"Purpose: prevent NPE when creating llama without world",
"Modified class: net.minecraft.entity.passive.EntityLlama",
"Known users: Trophy",
"When disabled: Llama trophy cannot be rendered");
}
private final static TransformProvider INCLUDING_CV = new TransformProvider(0) {
@Override
public ClassVisitor createVisitor(String name, ClassVisitor cv) {
return new IncludingClassVisitor(cv);
}
};
public void injectAsmData(ASMDataTable table) {
ImmutableSet.Builder<String> includedClasses = ImmutableSet.builder();
for (ASMData data : table.getAll("openmods.include.IncludeInterface"))
includedClasses.add(data.getClassName());
for (ASMData data : table.getAll("openmods.include.IncludeOverride"))
includedClasses.add(data.getClassName());
this.includedClasses = includedClasses.build();
}
private boolean shouldTryIncluding(String clsName) {
if (includedClasses != null) return includedClasses.contains(clsName);
for (String prefix : IGNORED_PREFIXES)
if (clsName.startsWith(prefix)) return false;
return true;
}
@Override
public byte[] transform(String name, String transformedName, byte[] bytes) {
if (bytes == null) return null;
if (transformedName.startsWith("net.minecraft.")) {
TransformProvider provider = vanillaPatches.get(transformedName);
return (provider != null)? VisitorHelper.apply(bytes, name, provider) : bytes;
}
if (shouldTryIncluding(transformedName)) return applyIncludes(name, transformedName, bytes);
return bytes;
}
protected byte[] applyIncludes(final String name, String transformedName, byte[] bytes) {
try {
return VisitorHelper.apply(bytes, name, INCLUDING_CV);
} catch (Throwable t) {
Log.severe(t, "Failed to apply including transformer on %s(%s)", name, transformedName);
throw t;
}
}
public String listStates() {
return Joiner.on(',').join(Iterables.transform(states.states(), Functions.toStringFunction()));
}
}
| |
/*
* Licensed to Metamarkets Group Inc. (Metamarkets) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. Metamarkets licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package io.druid.indexer;
import com.fasterxml.jackson.databind.InjectableValues;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.jsontype.NamedType;
import com.google.common.base.Throwables;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.Iterables;
import com.google.common.collect.Lists;
import com.metamx.common.Granularity;
import io.druid.data.input.Firehose;
import io.druid.data.input.InputRow;
import io.druid.data.input.impl.CSVParseSpec;
import io.druid.data.input.impl.DimensionsSpec;
import io.druid.data.input.impl.StringInputRowParser;
import io.druid.data.input.impl.TimestampSpec;
import io.druid.granularity.QueryGranularity;
import io.druid.indexer.hadoop.WindowedDataSegment;
import io.druid.jackson.DefaultObjectMapper;
import io.druid.query.aggregation.AggregatorFactory;
import io.druid.query.aggregation.LongSumAggregatorFactory;
import io.druid.query.aggregation.hyperloglog.HyperUniquesAggregatorFactory;
import io.druid.segment.IndexIO;
import io.druid.segment.QueryableIndex;
import io.druid.segment.QueryableIndexStorageAdapter;
import io.druid.segment.StorageAdapter;
import io.druid.segment.indexing.DataSchema;
import io.druid.segment.indexing.granularity.UniformGranularitySpec;
import io.druid.segment.loading.LocalDataSegmentPuller;
import io.druid.segment.realtime.firehose.IngestSegmentFirehose;
import io.druid.segment.realtime.firehose.WindowedStorageAdapter;
import io.druid.timeline.DataSegment;
import io.druid.timeline.partition.HashBasedNumberedShardSpec;
import org.apache.commons.io.FileUtils;
import org.joda.time.DateTime;
import org.joda.time.Interval;
import org.junit.Assert;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.TemporaryFolder;
import java.io.File;
import java.io.IOException;
import java.util.List;
import java.util.Map;
public class BatchDeltaIngestionTest
{
@Rule
public final TemporaryFolder temporaryFolder = new TemporaryFolder();
private static final ObjectMapper MAPPER;
private static final IndexIO INDEX_IO;
private static final Interval INTERVAL_FULL = new Interval("2014-10-22T00:00:00Z/P1D");
private static final Interval INTERVAL_PARTIAL = new Interval("2014-10-22T00:00:00Z/PT2H");
private static final DataSegment SEGMENT;
static {
MAPPER = new DefaultObjectMapper();
MAPPER.registerSubtypes(new NamedType(HashBasedNumberedShardSpec.class, "hashed"));
InjectableValues inject = new InjectableValues.Std().addValue(ObjectMapper.class, MAPPER);
MAPPER.setInjectableValues(inject);
INDEX_IO = HadoopDruidIndexerConfig.INDEX_IO;
try {
SEGMENT = new DefaultObjectMapper()
.readValue(
BatchDeltaIngestionTest.class.getClassLoader().getResource("test-segment/descriptor.json"),
DataSegment.class
)
.withLoadSpec(
ImmutableMap.<String, Object>of(
"type",
"local",
"path",
BatchDeltaIngestionTest.class.getClassLoader().getResource("test-segment/index.zip").getPath()
)
);
}
catch (IOException e) {
throw Throwables.propagate(e);
}
}
@Test
public void testReindexing() throws Exception
{
List<WindowedDataSegment> segments = ImmutableList.of(new WindowedDataSegment(SEGMENT, INTERVAL_FULL));
HadoopDruidIndexerConfig config = makeHadoopDruidIndexerConfig(
ImmutableMap.<String, Object>of(
"type",
"dataSource",
"ingestionSpec",
ImmutableMap.of(
"dataSource",
"xyz",
"interval",
INTERVAL_FULL
),
"segments",
segments
),
temporaryFolder.newFolder()
);
List<ImmutableMap<String, Object>> expectedRows = ImmutableList.of(
ImmutableMap.<String, Object>of(
"time", DateTime.parse("2014-10-22T00:00:00.000Z"),
"host", ImmutableList.of("a.example.com"),
"visited_sum", 100L,
"unique_hosts", 1.0d
),
ImmutableMap.<String, Object>of(
"time", DateTime.parse("2014-10-22T01:00:00.000Z"),
"host", ImmutableList.of("b.example.com"),
"visited_sum", 150L,
"unique_hosts", 1.0d
),
ImmutableMap.<String, Object>of(
"time", DateTime.parse("2014-10-22T02:00:00.000Z"),
"host", ImmutableList.of("c.example.com"),
"visited_sum", 200L,
"unique_hosts", 1.0d
)
);
testIngestion(config, expectedRows, Iterables.getOnlyElement(segments));
}
@Test
public void testReindexingWithPartialWindow() throws Exception
{
List<WindowedDataSegment> segments = ImmutableList.of(new WindowedDataSegment(SEGMENT, INTERVAL_PARTIAL));
HadoopDruidIndexerConfig config = makeHadoopDruidIndexerConfig(
ImmutableMap.<String, Object>of(
"type",
"dataSource",
"ingestionSpec",
ImmutableMap.of(
"dataSource",
"xyz",
"interval",
INTERVAL_FULL
),
"segments",
segments
),
temporaryFolder.newFolder()
);
List<ImmutableMap<String, Object>> expectedRows = ImmutableList.of(
ImmutableMap.<String, Object>of(
"time", DateTime.parse("2014-10-22T00:00:00.000Z"),
"host", ImmutableList.of("a.example.com"),
"visited_sum", 100L,
"unique_hosts", 1.0d
),
ImmutableMap.<String, Object>of(
"time", DateTime.parse("2014-10-22T01:00:00.000Z"),
"host", ImmutableList.of("b.example.com"),
"visited_sum", 150L,
"unique_hosts", 1.0d
)
);
testIngestion(config, expectedRows, Iterables.getOnlyElement(segments));
}
@Test
public void testDeltaIngestion() throws Exception
{
File tmpDir = temporaryFolder.newFolder();
File dataFile1 = new File(tmpDir, "data1");
FileUtils.writeLines(
dataFile1,
ImmutableList.of(
"2014102200,a.example.com,a.example.com,90",
"2014102201,b.example.com,b.example.com,25"
)
);
File dataFile2 = new File(tmpDir, "data2");
FileUtils.writeLines(
dataFile2,
ImmutableList.of(
"2014102202,c.example.com,c.example.com,70"
)
);
//using a hadoop glob path to test that it continues to work with hadoop MultipleInputs usage and not
//affected by
//https://issues.apache.org/jira/browse/MAPREDUCE-5061
String inputPath = tmpDir.getPath() + "/{data1,data2}";
List<WindowedDataSegment> segments = ImmutableList.of(new WindowedDataSegment(SEGMENT, INTERVAL_FULL));
HadoopDruidIndexerConfig config = makeHadoopDruidIndexerConfig(
ImmutableMap.<String, Object>of(
"type",
"multi",
"children",
ImmutableList.of(
ImmutableMap.<String, Object>of(
"type",
"dataSource",
"ingestionSpec",
ImmutableMap.of(
"dataSource",
"xyz",
"interval",
INTERVAL_FULL
),
"segments",
segments
),
ImmutableMap.<String, Object>of(
"type",
"static",
"paths",
inputPath
)
)
),
temporaryFolder.newFolder()
);
List<ImmutableMap<String, Object>> expectedRows = ImmutableList.of(
ImmutableMap.<String, Object>of(
"time", DateTime.parse("2014-10-22T00:00:00.000Z"),
"host", ImmutableList.of("a.example.com"),
"visited_sum", 190L,
"unique_hosts", 1.0d
),
ImmutableMap.<String, Object>of(
"time", DateTime.parse("2014-10-22T01:00:00.000Z"),
"host", ImmutableList.of("b.example.com"),
"visited_sum", 175L,
"unique_hosts", 1.0d
),
ImmutableMap.<String, Object>of(
"time", DateTime.parse("2014-10-22T02:00:00.000Z"),
"host", ImmutableList.of("c.example.com"),
"visited_sum", 270L,
"unique_hosts", 1.0d
)
);
testIngestion(config, expectedRows, Iterables.getOnlyElement(segments));
}
private void testIngestion(
HadoopDruidIndexerConfig config,
List<ImmutableMap<String, Object>> expectedRowsGenerated,
WindowedDataSegment windowedDataSegment
) throws Exception
{
IndexGeneratorJob job = new IndexGeneratorJob(config);
JobHelper.runJobs(ImmutableList.<Jobby>of(job), config);
File segmentFolder = new File(
String.format(
"%s/%s/%s_%s/%s/0",
config.getSchema().getIOConfig().getSegmentOutputPath(),
config.getSchema().getDataSchema().getDataSource(),
INTERVAL_FULL.getStart().toString(),
INTERVAL_FULL.getEnd().toString(),
config.getSchema().getTuningConfig().getVersion()
)
);
Assert.assertTrue(segmentFolder.exists());
File descriptor = new File(segmentFolder, "descriptor.json");
File indexZip = new File(segmentFolder, "index.zip");
Assert.assertTrue(descriptor.exists());
Assert.assertTrue(indexZip.exists());
DataSegment dataSegment = MAPPER.readValue(descriptor, DataSegment.class);
Assert.assertEquals("website", dataSegment.getDataSource());
Assert.assertEquals(config.getSchema().getTuningConfig().getVersion(), dataSegment.getVersion());
Assert.assertEquals(INTERVAL_FULL, dataSegment.getInterval());
Assert.assertEquals("local", dataSegment.getLoadSpec().get("type"));
Assert.assertEquals(indexZip.getCanonicalPath(), dataSegment.getLoadSpec().get("path"));
Assert.assertEquals("host", dataSegment.getDimensions().get(0));
Assert.assertEquals("visited_sum", dataSegment.getMetrics().get(0));
Assert.assertEquals("unique_hosts", dataSegment.getMetrics().get(1));
Assert.assertEquals(Integer.valueOf(9), dataSegment.getBinaryVersion());
HashBasedNumberedShardSpec spec = (HashBasedNumberedShardSpec) dataSegment.getShardSpec();
Assert.assertEquals(0, spec.getPartitionNum());
Assert.assertEquals(1, spec.getPartitions());
File tmpUnzippedSegmentDir = temporaryFolder.newFolder();
new LocalDataSegmentPuller().getSegmentFiles(dataSegment, tmpUnzippedSegmentDir);
QueryableIndex index = INDEX_IO.loadIndex(tmpUnzippedSegmentDir);
StorageAdapter adapter = new QueryableIndexStorageAdapter(index);
Firehose firehose = new IngestSegmentFirehose(
ImmutableList.of(new WindowedStorageAdapter(adapter, windowedDataSegment.getInterval())),
ImmutableList.of("host"),
ImmutableList.of("visited_sum", "unique_hosts"),
null,
QueryGranularity.NONE
);
List<InputRow> rows = Lists.newArrayList();
while (firehose.hasMore()) {
rows.add(firehose.nextRow());
}
verifyRows(expectedRowsGenerated, rows);
}
private HadoopDruidIndexerConfig makeHadoopDruidIndexerConfig(Map<String, Object> inputSpec, File tmpDir)
throws Exception
{
HadoopDruidIndexerConfig config = new HadoopDruidIndexerConfig(
new HadoopIngestionSpec(
new DataSchema(
"website",
MAPPER.convertValue(
new StringInputRowParser(
new CSVParseSpec(
new TimestampSpec("timestamp", "yyyyMMddHH", null),
new DimensionsSpec(DimensionsSpec.getDefaultSchemas(ImmutableList.of("host")), null, null),
null,
ImmutableList.of("timestamp", "host", "host2", "visited_num")
)
),
Map.class
),
new AggregatorFactory[]{
new LongSumAggregatorFactory("visited_sum", "visited_num"),
new HyperUniquesAggregatorFactory("unique_hosts", "host2")
},
new UniformGranularitySpec(
Granularity.DAY, QueryGranularity.NONE, ImmutableList.of(INTERVAL_FULL)
),
MAPPER
),
new HadoopIOConfig(
inputSpec,
null,
tmpDir.getCanonicalPath()
),
new HadoopTuningConfig(
tmpDir.getCanonicalPath(),
null,
null,
null,
null,
null,
false,
false,
false,
false,
null,
false,
false,
null,
null,
null
)
)
);
config.setShardSpecs(
ImmutableMap.<DateTime, List<HadoopyShardSpec>>of(
INTERVAL_FULL.getStart(),
ImmutableList.of(
new HadoopyShardSpec(
new HashBasedNumberedShardSpec(0, 1, null, HadoopDruidIndexerConfig.JSON_MAPPER),
0
)
)
)
);
config = HadoopDruidIndexerConfig.fromSpec(config.getSchema());
return config;
}
private void verifyRows(List<ImmutableMap<String, Object>> expectedRows, List<InputRow> actualRows)
{
System.out.println("actualRows = " + actualRows);
Assert.assertEquals(expectedRows.size(), actualRows.size());
for (int i = 0; i < expectedRows.size(); i++) {
Map<String, Object> expected = expectedRows.get(i);
InputRow actual = actualRows.get(i);
Assert.assertEquals(ImmutableList.of("host"), actual.getDimensions());
Assert.assertEquals(expected.get("time"), actual.getTimestamp());
Assert.assertEquals(expected.get("host"), actual.getDimension("host"));
Assert.assertEquals(expected.get("visited_sum"), actual.getLongMetric("visited_sum"));
Assert.assertEquals(
(Double) expected.get("unique_hosts"),
(Double) HyperUniquesAggregatorFactory.estimateCardinality(actual.getRaw("unique_hosts")),
0.001
);
}
}
}
| |
package org.mutabilitydetector;
/*
* #%L
* MutabilityDetector
* %%
* Copyright (C) 2008 - 2014 Graham Allan
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
import org.junit.Test;
import org.mutabilitydetector.checkers.CheckerRunner.ExceptionPolicy;
import org.mutabilitydetector.checkers.MutabilityAnalysisException;
import org.mutabilitydetector.checkers.MutabilityCheckerFactory.ReassignedFieldAnalysisChoice;
import org.mutabilitydetector.checkers.info.CopyMethod;
import org.mutabilitydetector.config.HardcodedResultsUsage;
import org.mutabilitydetector.locations.Dotted;
import org.mutabilitydetector.unittesting.internal.CloneList;
import java.util.List;
import java.util.Map;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.contains;
import static org.hamcrest.Matchers.containsInAnyOrder;
import static org.hamcrest.Matchers.hasEntry;
import static org.hamcrest.Matchers.hasKey;
import static org.hamcrest.Matchers.is;
import static org.mutabilitydetector.AnalysisResult.analysisResult;
import static org.mutabilitydetector.AnalysisResult.definitelyImmutable;
import static org.mutabilitydetector.checkers.CheckerRunner.ExceptionPolicy.CARRY_ON;
import static org.mutabilitydetector.checkers.CheckerRunner.ExceptionPolicy.FAIL_FAST;
import static org.mutabilitydetector.checkers.MutabilityCheckerFactory.ReassignedFieldAnalysisChoice.NAIVE_PUT_FIELD_ANALYSIS;
import static org.mutabilitydetector.config.HardcodedResultsUsage.DIRECTLY_IN_ASSERTION;
import static org.mutabilitydetector.config.HardcodedResultsUsage.LOOKUP_WHEN_REFERENCED;
import static org.mutabilitydetector.locations.Dotted.dotted;
import static org.mutabilitydetector.unittesting.AllowedReason.provided;
import static org.mutabilitydetector.unittesting.MutabilityAssert.assertInstancesOf;
import static org.mutabilitydetector.unittesting.MutabilityMatchers.areImmutable;
public class ConfigurationBuilderTest {
@Test
public void canMergeResultsFromExistingConfiguration() throws Exception {
final Configuration existing = new ConfigurationBuilder() {
@Override public void configure() {
hardcodeResult(definitelyImmutable("hardcoded.in.other.Configuration"));
}
}.build();
final Configuration current = new ConfigurationBuilder() {
@Override public void configure() {
mergeHardcodedResultsFrom(existing);
}
}.build();
assertThat(current.hardcodedResults(), hasKey(dotted("hardcoded.in.other.Configuration")));
}
@Test
public void mergeReplacesExistingHardcodedResultForClassWithCurrentHardcodedResult() throws Exception {
final AnalysisResult resultInCurrentConfig = analysisResult("hardcoded.in.both.Configurations",
IsImmutable.NOT_IMMUTABLE,
TestUtil.unusedMutableReasonDetail());
final Configuration mergedIn = new ConfigurationBuilder() {
@Override public void configure() {
hardcodeResult(resultInCurrentConfig);
hardcodeResult(definitelyImmutable("only.in.existing.Configuration"));
}
}.build();
final Configuration current = new ConfigurationBuilder() {
@Override public void configure() {
hardcodeResult(definitelyImmutable("hardcoded.in.both.Configurations"));
hardcodeResult(definitelyImmutable("only.in.current.Configuration"));
mergeHardcodedResultsFrom(mergedIn);
}
}.build();
Map<Dotted, AnalysisResult> hardcodedResults = current.hardcodedResults();
assertThat(hardcodedResults.size(), is(3));
assertThat(hardcodedResults, hasEntry(dotted("hardcoded.in.both.Configurations"), resultInCurrentConfig));
assertThat(hardcodedResults, hasEntry(dotted("only.in.existing.Configuration"), definitelyImmutable("only.in.existing.Configuration")));
assertThat(hardcodedResults, hasEntry(dotted("only.in.current.Configuration"), definitelyImmutable("only.in.current.Configuration")));
}
@Test
public void builtConfigurationsAreImmutable() throws Exception {
ConfigurationBuilder configurationBuilder = new ConfigurationBuilder() {
@Override public void configure() { }
};
assertInstancesOf(configurationBuilder.build().getClass(),
areImmutable(),
provided(AnalysisResult.class, Dotted.class).areAlsoImmutable());
}
@Test
public void canMergeEntireConfigurations() throws Exception {
final Configuration first = new ConfigurationBuilder() {
@Override public void configure() {
hardcodeResult(definitelyImmutable("hardcoded.in.both.Configurations"));
hardcodeResult(definitelyImmutable("only.in.existing.Configuration"));
useAdvancedReassignedFieldAlgorithm();
setHowToUseHardcodedResults(DIRECTLY_IN_ASSERTION);
}
}.build();
final Configuration second = new ConfigurationBuilder() {
@Override public void configure() {
hardcodeAsDefinitelyImmutable("only.in.second.Configuration");
hardcodeAsImmutableContainerType("container.only.in.second.Configuration");
hardcodeValidCopyMethod(List.class, "com.google.common.collect.Lists.newArrayList", Iterable.class);
setExceptionPolicy(FAIL_FAST);
setHowToUseHardcodedResults(LOOKUP_WHEN_REFERENCED);
}
}.build();
final AnalysisResult resultInCurrentConfig = analysisResult("hardcoded.in.both.Configurations",
IsImmutable.NOT_IMMUTABLE,
TestUtil.unusedMutableReasonDetail());
final Configuration current = new ConfigurationBuilder() {
@Override public void configure() {
setExceptionPolicy(CARRY_ON);
setHowToUseHardcodedResults(DIRECTLY_IN_ASSERTION);
hardcodeResult(resultInCurrentConfig);
merge(first);
merge(second);
}
}.build();
assertThat(current.hardcodedResults().size(), is(3));
assertThat(current.immutableContainerClasses(), contains(Dotted.dotted("container.only.in.second.Configuration")));
assertThat(current.hardcodedCopyMethods().get("java.util.List"),
contains(new CopyMethod(dotted("com.google.common.collect.Lists"), "newArrayList", "(Ljava/lang/Iterable;)Ljava/util/ArrayList;")));
assertThat(current.exceptionPolicy(), is(CARRY_ON));
assertThat(current.howToUseHardcodedResults(), is(DIRECTLY_IN_ASSERTION));
assertThat(current.reassignedFieldAlgorithm(), is(NAIVE_PUT_FIELD_ANALYSIS));
}
@Test
public void mergeCopyMethodsDoesNotCauseDuplicates() {
final CopyMethod a = new CopyMethod(dotted("any"), "method", "A");
final CopyMethod b = new CopyMethod(dotted("any"), "method", "B");
final CopyMethod c = new CopyMethod(dotted("any"), "method", "C");
final CopyMethod a2 = new CopyMethod(dotted("any"), "method", "A");
final Configuration original = new ConfigurationBuilder() {
@Override public void configure() {
hardcodeValidCopyMethod(List.class, a);
hardcodeValidCopyMethod(List.class, b);
}
}.build();
final Configuration merged = new ConfigurationBuilder() {
@Override public void configure() {
hardcodeValidCopyMethod(List.class, c);
hardcodeValidCopyMethod(List.class, a2);
mergeValidCopyMethodsFrom(original);
}
}.build();
assertThat(merged.hardcodedCopyMethods().values(), containsInAnyOrder(a, b, c));
}
@Test (expected=MutabilityAnalysisException.class)
public void shouldThrowIfClassOfCopyMethodIsNotKnown() {
new ConfigurationBuilder() {
@Override public void configure() {
hardcodeValidCopyMethod(List.class, "non.existent.Collection.<init>", List.class);
}
}.build();
}
@Test (expected=MutabilityAnalysisException.class)
public void shouldThrowIfCopyMethodDoesNotExist() {
new ConfigurationBuilder() {
@Override public void configure() {
hardcodeValidCopyMethod(List.class, "com.google.common.collect.Lists.doesNotExist", List.class);
}
}.build();
}
@Test
public void constructorsCanBeValidCopyMethods() {
final Configuration cfg = new ConfigurationBuilder() {
@Override public void configure() {
hardcodeValidCopyMethod(CloneList.class,
"org.mutabilitydetector.unittesting.internal.CloneList.<init>", List.class);
}
}.build();
assertThat(cfg.hardcodedCopyMethods().size(), is(1));
}
}
| |
/*
* Copyright (c) 2017, Allogica
*
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without modification,
* are permitted provided that the following conditions are met:
*
* * Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* * Neither the name of Allogen nor the names of its contributors
* may be used to endorse or promote products derived from this software
* without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
* A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
* CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
* EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
* PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
* LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
* NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package com.allogica.allogen.backend.java;
import com.allogica.allogen.Compiler;
import com.allogica.allogen.CompilerContext;
import com.allogica.allogen.backend.AbstractCompilerBackend;
import com.allogica.allogen.idl.model.IDLAnnotation;
import com.allogica.allogen.model.Class;
import com.allogica.allogen.model.*;
import com.allogica.allogen.types.*;
import com.allogica.allogen.util.StringHelper;
import java.net.URL;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.Map;
import java.util.stream.Collectors;
public class JavaBackend extends AbstractCompilerBackend {
@Override
public URL getTargetTemplateURL() {
return JavaBackend.class.getResource("TargetTemplate.stg");
}
@Override
public String getTargetOutputFile(Compiler<?, ?> compiler, CompilerContext compilerContext, Class clazz) {
return String.join("/", Arrays.stream(clazz.getNamespaces()).map(
String::toLowerCase).collect(Collectors.toList())) + "/" + clazz.getAttribute("javaName") + ".java";
}
@Override
public URL getBridgeTemplateURL() {
return JavaBackend.class.getResource("BridgeTemplate.stg");
}
@Override
public String getBridgeOutputHeaderFile(Compiler<?, ?> compiler, CompilerContext compilerContext, Class clazz) {
return compilerContext.getBridgePath(clazz.getNamespaces(), clazz.getName()) + ".hpp";
}
@Override
public String getBridgeOutputImplementationFile(Compiler<?, ?> compiler, CompilerContext compilerContext, Class clazz) {
return compilerContext.getBridgePath(clazz.getNamespaces(), clazz.getName()) + ".cpp";
}
@Override
public void preHandle(Compiler<?, ?> compiler, CompilerContext compilerContext, Class clazz) {
clazz.setAttribute("javaPackage", String.join(".", Arrays.stream(clazz.getNamespaces()).map(
String::toLowerCase).collect(Collectors.toList())));
clazz.setAttribute("javaName", clazz.getName());
clazz.setAttribute("jniSignature", clazz.getAttribute("javaName"));
clazz.setAttribute("javaFullyQualifiedName", clazz.getAttribute("javaPackage") + "." +
clazz.getAttribute("javaName"));
clazz.setAttribute("javaClassPath", String.join("/", Arrays.stream(clazz.getNamespaces()).map(
String::toLowerCase).collect(Collectors.toList())) + "/" + clazz.getAttribute("javaName"));
clazz.setAttribute("javaSignature", "L" + String.join("/", Arrays.stream(clazz.getNamespaces()).map(
String::toLowerCase).collect(Collectors.toList())) + "/" + clazz.getAttribute("javaName") + ";");
}
@Override
public void preHandle(Compiler<?, ?> compiler, CompilerContext compilerContext, Class clazz, Method method) {
method.setAttribute("javaName", method.getName());
}
@Override
public void postHandle(Compiler<?, ?> compiler, CompilerContext compilerContext, Class clazz, Method method) {
method.setAttribute("jniSignature", createJNIMethodMangling(clazz, method));
}
@Override
public void preHandle(Compiler<?, ?> compiler, CompilerContext compilerContext, Class clazz, InheritedMethod method) {
}
@Override
public void postHandle(Compiler<?, ?> compiler, CompilerContext compilerContext, Class clazz, InheritedMethod method) {
method.setAttribute("jniSignature", createJNIMethodMangling(clazz, method.getMethod()));
}
@Override
public void preHandle(Compiler<?, ?> compiler, CompilerContext compilerContext, Class clazz, Method method, MethodArgument argument) {
if (argument.getType().getResolvedType() instanceof LambdaType) {
createCallbackInterface(clazz, method, argument, argument.getType(), (LambdaType) argument.getType().getResolvedType());
}
}
@Override
public void preHandle(Compiler<?, ?> compiler, CompilerContext compilerContext, Class clazz, Constructor constructor) {
}
@Override
public void postHandle(Compiler<?, ?> compiler, CompilerContext compilerContext, Class clazz, Constructor constructor) {
// create a init stub method
final Method stubInit = new Method("_init", null, constructor.getArguments());
stubInit.setAttribute("javaName", "_init");
constructor.setAttribute("jniSignature", createJNIMethodMangling(clazz, stubInit,
clazz.getConstructors().size() > 1));
}
@Override
public void preHandle(Compiler<?, ?> compiler, CompilerContext compilerContext, Class clazz, Destructor destructor) {
}
@Override
public void postHandle(Compiler<?, ?> compiler, CompilerContext compilerContext, Class clazz, Destructor destructor) {
// create a init stub method
final Method stubInit = new Method("finalize", null, new ArrayList<>());
stubInit.setAttribute("javaName", "finalize");
destructor.setAttribute("jniSignature", createJNIMethodMangling(clazz, stubInit, false));
}
// -----------------------------------------------------------------------------------------------------------------
private void createCallbackInterface(Class clazz, Method method, MethodArgument argument, TypeName type, LambdaType lambda) {
final IDLAnnotation annotation = argument.getIdlMethodArgument().getAnnotation("Callback");
final String interfaceName;
final String methodName;
final String reuse;
if (annotation != null) {
interfaceName = annotation.getProperty("interface");
methodName = annotation.getProperty("method");
reuse = annotation.getProperty("reuse");
} else {
interfaceName = StringHelper.firstToUpper(method.getName()) + StringHelper.firstToUpper(argument.getName());
methodName = "on" + StringHelper.firstToUpper(argument.getName());
reuse = null;
}
argument.setAttribute("javaHasLambdaInterface", true);
argument.setAttribute("javaLambdaInterfaceName", interfaceName);
argument.setAttribute("javaLambdaMethodName", methodName);
argument.setAttribute("javaLambdaReuse", reuse != null);
// type.setAttribute("javaName", clazz.getName());
// type.setAttribute("jniSignature", clazz.getAttribute("javaName"));
//
// type.setAttribute("javaFullyQualifiedName", clazz.getAttribute("javaPackage") + "." +
// clazz.getAttribute("javaName"));
//
// type.setAttribute("javaClassPath", String.join("/", Arrays.stream(clazz.getNamespaces()).map(
// String::toLowerCase).collect(Collectors.toList())) + "/" + clazz.getAttribute("javaName"));
// type.setAttribute("javaSignature", "L" + String.join("/", Arrays.stream(clazz.getNamespaces()).map(
// String::toLowerCase).collect(Collectors.toList())) + "/" + clazz.getAttribute("javaName") + ";");
final String className = clazz.getAttribute("javaName");
type.setAttribute("javaFullyQualifiedName", clazz.getAttribute("javaFullyQualifiedName") + "$" +
interfaceName);
argument.getType().setAttribute("javaLambdaInterfaceName", interfaceName);
}
// -----------------------------------------------------------------------------------------------------------------
private static final Map<String, String> javaSignatures = new HashMap<>();
static {
javaSignatures.put("void", "V");
javaSignatures.put("bool", "B");
javaSignatures.put("int8", "B");
javaSignatures.put("uint8", "B");
javaSignatures.put("int16", "S");
javaSignatures.put("uint16", "S");
javaSignatures.put("int32", "I");
javaSignatures.put("uint32", "I");
javaSignatures.put("int64", "J");
javaSignatures.put("uint64", "J");
javaSignatures.put("float", "F");
javaSignatures.put("double", "D");
}
private String createJNIMethodMangling(Class clazz, Method method) {
return createJNIMethodMangling(clazz, method, false);
}
private String createJNIMethodMangling(Class clazz, Method method, boolean forceOverload) {
String packageName = ((String) clazz.getAttribute("javaPackage")).replaceAll("\\.", "_");
String className = clazz.getAttribute("javaName");
String methodName = method.getAttribute("javaName");
methodName = methodName.replaceAll("_", "_1");
/* if no overloads, java uses a simplified signature */
if (!clazz.hasOverloadForMethod(method.getName()) && !forceOverload) {
return packageName + "_" + className + "_" + methodName;
}
StringBuilder overloadBuilder = new StringBuilder();
overloadBuilder.append(packageName).append("_");
overloadBuilder.append(className).append("_");
overloadBuilder.append(methodName)
.append("__");
int argumentCount = 0;
for (final MethodArgument argument : method.getArguments()) {
argumentCount++;
Type type = argument.getType().getResolvedType();
if(type instanceof SharedPtrType) {
type = ((SharedPtrType) type).getContainedType().getResolvedType();
}
if (type instanceof UserDefinedType) {
final String javaName = ((UserDefinedType) type).getUserDefinedClass()
.getAttribute("javaFullyQualifiedName");
final String normalizedJavaName = javaName.replaceAll("\\.", "_");
overloadBuilder.append("L").append(normalizedJavaName).append("_2");
} else if(type instanceof LambdaType) {
final String javaName = argument.getType().getAttribute("javaFullyQualifiedName");
final String normalizedJavaName = javaName
.replaceAll("\\.", "_")
.replaceAll("\\$", "_00024");
overloadBuilder.append("L").append(normalizedJavaName).append("_2");
} else if (type instanceof PrimitiveType) {
final String typeName = argument.getType().getName();
if (!javaSignatures.containsKey(typeName)) {
throw new RuntimeException(String.format("Java backend does not support '%s'", typeName));
}
overloadBuilder.append(javaSignatures.get(typeName));
} else if (type instanceof StringType) {
overloadBuilder.append("Ljava_lang_String_2");
} else if(type instanceof BufferType) {
overloadBuilder.append("Ljava_nio_ByteBuffer_2");
} else if(type instanceof VectorType) {
overloadBuilder.append("Ljava_util_List_2");
} else if(type instanceof MapType) {
overloadBuilder.append("Ljava_util_Map_2");
}
}
return overloadBuilder.toString();
}
}
| |
/**
* JBoss, Home of Professional Open Source
* Copyright Red Hat, Inc., and individual contributors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jboss.aerogear.unifiedpush.jpa;
import org.jboss.aerogear.unifiedpush.api.AndroidVariant;
import org.jboss.aerogear.unifiedpush.api.Installation;
import org.jboss.aerogear.unifiedpush.api.Variant;
import org.jboss.aerogear.unifiedpush.api.iOSVariant;
import org.jboss.aerogear.unifiedpush.jpa.dao.impl.JPAInstallationDao;
import org.jboss.aerogear.unifiedpush.jpa.dao.impl.JPAVariantDao;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import javax.persistence.EntityManager;
import javax.persistence.EntityManagerFactory;
import javax.persistence.Persistence;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashSet;
import java.util.List;
import static org.assertj.core.api.Assertions.assertThat;
public class VariantDaoTest {
private EntityManager entityManager;
private JPAVariantDao variantDao;
private JPAInstallationDao installationDao;
@Before
public void setUp() {
EntityManagerFactory emf = Persistence.createEntityManagerFactory("UnifiedPush");
entityManager = emf.createEntityManager();
// start the shindig
entityManager.getTransaction().begin();
variantDao = new JPAVariantDao();
variantDao.setEntityManager(entityManager);
installationDao = new JPAInstallationDao();
installationDao.setEntityManager(entityManager);
}
@After
public void tearDown() {
entityManager.getTransaction().commit();
entityManager.close();
}
@Test
public void findVariantByIdForDeveloper() {
final AndroidVariant av = new AndroidVariant();
av.setGoogleKey("KEY");
av.setDeveloper("admin");
final String uuid = av.getVariantID();
variantDao.create(av);
assertThat(variantDao.findByVariantIDForDeveloper(uuid, "admin")).isNotNull();
assertThat(variantDao.findByVariantIDForDeveloper(null, "admin")).isNull();
assertThat(variantDao.findByVariantIDForDeveloper(uuid, "mr x")).isNull();
}
@Test
public void findVariantIDsForDeveloper() {
final AndroidVariant av = new AndroidVariant();
av.setGoogleKey("KEY");
av.setDeveloper("admin");
final String uuid = av.getVariantID();
variantDao.create(av);
assertThat(variantDao.findVariantIDsForDeveloper("admin")).isNotNull();
assertThat(variantDao.findVariantIDsForDeveloper("admin")).containsOnly(uuid);
}
@Test
public void findVariantsByIDs() {
final List<String> variantIDs = new ArrayList<String>(4);
final AndroidVariant av1 = new AndroidVariant();
av1.setName("Something Android");
av1.setGoogleKey("KEY");
av1.setDeveloper("admin");
variantIDs.add(av1.getVariantID());
variantDao.create(av1);
final AndroidVariant av2 = new AndroidVariant();
av2.setName("Something more Android");
av2.setGoogleKey("KEY");
av2.setDeveloper("admin");
variantIDs.add(av2.getVariantID());
variantDao.create(av2);
// add some invalid IDs:
variantIDs.add("foo");
variantIDs.add("bar");
final List<Variant> variants = variantDao.findAllVariantsByIDs(variantIDs);
assertThat(variants).hasSize(2);
assertThat(variants).extracting("name").contains("Something Android", "Something more Android");
}
@Test
public void findVariantById() {
final AndroidVariant av = new AndroidVariant();
av.setGoogleKey("KEY");
av.setDeveloper("admin");
final String uuid = av.getVariantID();
variantDao.create(av);
assertThat(variantDao.findByVariantID(uuid)).isNotNull();
assertThat(variantDao.findByVariantID(null)).isNull();
}
@Test
public void updateVariant() {
final AndroidVariant av = new AndroidVariant();
av.setGoogleKey("KEY");
av.setDeveloper("admin");
final String uuid = av.getVariantID();
variantDao.create(av);
AndroidVariant queriedVariant = (AndroidVariant) variantDao.findByVariantID(uuid);
final String primaryKey = queriedVariant.getId();
assertThat(queriedVariant).isNotNull();
assertThat(queriedVariant.getGoogleKey()).isEqualTo("KEY");
queriedVariant.setGoogleKey("NEW_KEY");
variantDao.update(queriedVariant);
queriedVariant = (AndroidVariant) variantDao.findByVariantID(uuid);
assertThat(queriedVariant).isNotNull();
assertThat(queriedVariant.getGoogleKey()).isEqualTo("NEW_KEY");
assertThat(queriedVariant.getId()).isEqualTo(primaryKey);
}
@Test
public void updateAndDeleteVariant() {
final AndroidVariant av = new AndroidVariant();
av.setGoogleKey("KEY");
av.setDeveloper("admin");
final String uuid = av.getVariantID();
variantDao.create(av);
AndroidVariant queriedVariant = (AndroidVariant) variantDao.findByVariantID(uuid);
final String primaryKey = queriedVariant.getId();
assertThat(queriedVariant).isNotNull();
assertThat(queriedVariant.getGoogleKey()).isEqualTo("KEY");
queriedVariant.setGoogleKey("NEW_KEY");
variantDao.update(queriedVariant);
queriedVariant = (AndroidVariant) variantDao.findByVariantID(uuid);
assertThat(queriedVariant).isNotNull();
assertThat(queriedVariant.getGoogleKey()).isEqualTo("NEW_KEY");
assertThat(queriedVariant.getId()).isEqualTo(primaryKey);
variantDao.delete(queriedVariant);
assertThat(variantDao.findByVariantID(uuid)).isNull();
}
@Test
public void lookupNonExistingVariant() {
AndroidVariant variant = (AndroidVariant) variantDao.findByVariantIDForDeveloper("NOT-IN-DATABASE", "admin");
assertThat(variant).isNull();
}
@Test
public void variantIDUnmodifiedAfterUpdate() {
final AndroidVariant av = new AndroidVariant();
av.setGoogleKey("KEY");
av.setDeveloper("admin");
final String uuid = av.getVariantID();
variantDao.create(av);
AndroidVariant queriedVariant = (AndroidVariant) variantDao.findByVariantID(uuid);
final String primaryKey = queriedVariant.getId();
assertThat(queriedVariant.getVariantID()).isEqualTo(uuid);
assertThat(queriedVariant).isNotNull();
queriedVariant.setGoogleKey("NEW_KEY");
variantDao.update(queriedVariant);
queriedVariant = (AndroidVariant) variantDao.findByVariantID(uuid);
assertThat(queriedVariant).isNotNull();
assertThat(queriedVariant.getVariantID()).isEqualTo(uuid);
assertThat(queriedVariant.getId()).isEqualTo(primaryKey);
}
@Test
public void primaryKeyUnmodifiedAfterUpdate() {
AndroidVariant av = new AndroidVariant();
av.setGoogleKey("KEY");
av.setDeveloper("admin");
final String id = av.getId();
variantDao.create(av);
// flush to be sure that it's in the database
entityManager.flush();
// clear the cache otherwise finding the entity will not perform a select but get the entity from cache
entityManager.clear();
AndroidVariant variant = (AndroidVariant) variantDao.find(id);
assertThat(variant.getId()).isEqualTo(id);
av.setGoogleKey("NEW_KEY");
variantDao.update(av);
entityManager.flush();
entityManager.clear();
variant = (AndroidVariant) variantDao.find(id);
assertThat(variant.getGoogleKey()).isEqualTo("NEW_KEY");
assertThat(av.getId()).isEqualTo(id);
}
@Test
public void deleteVariantIncludingInstallations() {
final AndroidVariant av = new AndroidVariant();
av.setGoogleKey("KEY");
av.setDeveloper("admin");
final String uuid = av.getVariantID();
variantDao.create(av);
AndroidVariant queriedVariant = (AndroidVariant) variantDao.findByVariantID(uuid);
assertThat(queriedVariant).isNotNull();
assertThat(queriedVariant.getGoogleKey()).isEqualTo("KEY");
Installation androidInstallation1 = new Installation();
androidInstallation1.setDeviceToken("1234543212232301234567890012345678900123456789001234567890012345678900123456789001234567890012345678");
androidInstallation1.setCategories(new HashSet<String>(Arrays.asList("X", "Y")));
installationDao.create(androidInstallation1);
androidInstallation1.setVariant(queriedVariant);
variantDao.update(queriedVariant);
Installation storedInstallation = installationDao.find(androidInstallation1.getId());
assertThat(storedInstallation.getId()).isEqualTo(androidInstallation1.getId());
variantDao.delete(queriedVariant);
entityManager.flush();
entityManager.clear();
assertThat(variantDao.findByVariantID(uuid)).isNull();
// Installation should be gone...
assertThat(installationDao.find(androidInstallation1.getId())).isNull();
}
@Test
public void createDifferentVariantTypes() {
AndroidVariant av = new AndroidVariant();
av.setGoogleKey("KEY");
av.setDeveloper("admin");
final String androidId = av.getVariantID();
variantDao.create(av);
// flush to be sure that it's in the database
entityManager.flush();
// clear the cache otherwise finding the entity will not perform a select but get the entity from cache
entityManager.clear();
iOSVariant iOS = new iOSVariant();
iOS.setCertificate("test".getBytes());
iOS.setPassphrase("secret");
final String iOSid = iOS.getVariantID();
variantDao.create(iOS);
// flush to be sure that it's in the database
entityManager.flush();
// clear the cache otherwise finding the entity will not perform a select but get the entity from cache
entityManager.clear();
}
@Test
public void shouldDetectThatVariantIdNotExists() {
//given
String nonExistentVariantId = "321-variantId";
//when
final boolean exists = variantDao.existsVariantIDForDeveloper(nonExistentVariantId, "admin");
assertThat(exists).isEqualTo(false);
}
@Test
public void shouldDetectThatVariantIdExists() {
//given
AndroidVariant av = new AndroidVariant();
av.setGoogleKey("KEY");
av.setDeveloper("admin");
final String variantID = av.getVariantID();
variantDao.create(av);
//when
final boolean exists = variantDao.existsVariantIDForDeveloper(variantID, "admin");
assertThat(exists).isEqualTo(true);
}
}
| |
package jadx.core.dex.visitors.regions;
import java.util.ArrayList;
import java.util.BitSet;
import java.util.Collections;
import java.util.HashSet;
import java.util.LinkedHashMap;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Objects;
import java.util.Optional;
import java.util.Set;
import org.jetbrains.annotations.Nullable;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import jadx.core.dex.attributes.AFlag;
import jadx.core.dex.attributes.AType;
import jadx.core.dex.attributes.nodes.EdgeInsnAttr;
import jadx.core.dex.attributes.nodes.LoopInfo;
import jadx.core.dex.attributes.nodes.LoopLabelAttr;
import jadx.core.dex.instructions.IfNode;
import jadx.core.dex.instructions.InsnType;
import jadx.core.dex.instructions.SwitchInsn;
import jadx.core.dex.instructions.args.InsnArg;
import jadx.core.dex.nodes.BlockNode;
import jadx.core.dex.nodes.Edge;
import jadx.core.dex.nodes.IBlock;
import jadx.core.dex.nodes.IContainer;
import jadx.core.dex.nodes.IRegion;
import jadx.core.dex.nodes.InsnContainer;
import jadx.core.dex.nodes.InsnNode;
import jadx.core.dex.nodes.MethodNode;
import jadx.core.dex.regions.Region;
import jadx.core.dex.regions.SwitchRegion;
import jadx.core.dex.regions.SynchronizedRegion;
import jadx.core.dex.regions.conditions.IfInfo;
import jadx.core.dex.regions.conditions.IfRegion;
import jadx.core.dex.regions.loops.LoopRegion;
import jadx.core.dex.trycatch.ExcHandlerAttr;
import jadx.core.dex.trycatch.ExceptionHandler;
import jadx.core.dex.trycatch.TryCatchBlockAttr;
import jadx.core.utils.BlockUtils;
import jadx.core.utils.RegionUtils;
import jadx.core.utils.Utils;
import jadx.core.utils.exceptions.JadxOverflowException;
import jadx.core.utils.exceptions.JadxRuntimeException;
import static jadx.core.dex.visitors.regions.IfMakerHelper.confirmMerge;
import static jadx.core.dex.visitors.regions.IfMakerHelper.makeIfInfo;
import static jadx.core.dex.visitors.regions.IfMakerHelper.mergeNestedIfNodes;
import static jadx.core.dex.visitors.regions.IfMakerHelper.searchNestedIf;
import static jadx.core.utils.BlockUtils.followEmptyPath;
import static jadx.core.utils.BlockUtils.getNextBlock;
import static jadx.core.utils.BlockUtils.isPathExists;
public class RegionMaker {
private static final Logger LOG = LoggerFactory.getLogger(RegionMaker.class);
private final MethodNode mth;
private final int regionsLimit;
private final BitSet processedBlocks;
private int regionsCount;
public RegionMaker(MethodNode mth) {
this.mth = mth;
int blocksCount = mth.getBasicBlocks().size();
this.processedBlocks = new BitSet(blocksCount);
this.regionsLimit = blocksCount * 100;
}
public Region makeRegion(BlockNode startBlock, RegionStack stack) {
Region r = new Region(stack.peekRegion());
if (startBlock == null) {
return r;
}
if (stack.containsExit(startBlock)) {
insertEdgeInsns(r, startBlock);
return r;
}
int startBlockId = startBlock.getId();
if (processedBlocks.get(startBlockId)) {
mth.addWarn("Removed duplicated region for block: " + startBlock + ' ' + startBlock.getAttributesString());
return r;
}
processedBlocks.set(startBlockId);
BlockNode next = startBlock;
while (next != null) {
next = traverse(r, next, stack);
regionsCount++;
if (regionsCount > regionsLimit) {
throw new JadxOverflowException("Regions count limit reached");
}
}
return r;
}
private void insertEdgeInsns(Region region, BlockNode exitBlock) {
List<EdgeInsnAttr> edgeInsns = exitBlock.getAll(AType.EDGE_INSN);
if (edgeInsns.isEmpty()) {
return;
}
List<InsnNode> insns = new ArrayList<>(edgeInsns.size());
addOneInsnOfType(insns, edgeInsns, InsnType.BREAK);
addOneInsnOfType(insns, edgeInsns, InsnType.CONTINUE);
region.add(new InsnContainer(insns));
}
private void addOneInsnOfType(List<InsnNode> insns, List<EdgeInsnAttr> edgeInsns, InsnType insnType) {
for (EdgeInsnAttr edgeInsn : edgeInsns) {
InsnNode insn = edgeInsn.getInsn();
if (insn.getType() == insnType) {
insns.add(insn);
return;
}
}
}
/**
* Recursively traverse all blocks from 'block' until block from 'exits'
*/
private BlockNode traverse(IRegion r, BlockNode block, RegionStack stack) {
if (block.contains(AFlag.MTH_EXIT_BLOCK)) {
return null;
}
BlockNode next = null;
boolean processed = false;
List<LoopInfo> loops = block.getAll(AType.LOOP);
int loopCount = loops.size();
if (loopCount != 0 && block.contains(AFlag.LOOP_START)) {
if (loopCount == 1) {
next = processLoop(r, loops.get(0), stack);
processed = true;
} else {
for (LoopInfo loop : loops) {
if (loop.getStart() == block) {
next = processLoop(r, loop, stack);
processed = true;
break;
}
}
}
}
InsnNode insn = BlockUtils.getLastInsn(block);
if (!processed && insn != null) {
switch (insn.getType()) {
case IF:
next = processIf(r, block, (IfNode) insn, stack);
processed = true;
break;
case SWITCH:
next = processSwitch(r, block, (SwitchInsn) insn, stack);
processed = true;
break;
case MONITOR_ENTER:
next = processMonitorEnter(r, block, insn, stack);
processed = true;
break;
default:
break;
}
}
if (!processed) {
r.getSubBlocks().add(block);
next = getNextBlock(block);
}
if (next != null && !stack.containsExit(block) && !stack.containsExit(next)) {
return next;
}
return null;
}
private BlockNode processLoop(IRegion curRegion, LoopInfo loop, RegionStack stack) {
BlockNode loopStart = loop.getStart();
Set<BlockNode> exitBlocksSet = loop.getExitNodes();
// set exit blocks scan order priority
// this can help if loop have several exits (after using 'break' or 'return' in loop)
List<BlockNode> exitBlocks = new ArrayList<>(exitBlocksSet.size());
BlockNode nextStart = getNextBlock(loopStart);
if (nextStart != null && exitBlocksSet.remove(nextStart)) {
exitBlocks.add(nextStart);
}
if (exitBlocksSet.remove(loopStart)) {
exitBlocks.add(loopStart);
}
if (exitBlocksSet.remove(loop.getEnd())) {
exitBlocks.add(loop.getEnd());
}
exitBlocks.addAll(exitBlocksSet);
LoopRegion loopRegion = makeLoopRegion(curRegion, loop, exitBlocks);
if (loopRegion == null) {
BlockNode exit = makeEndlessLoop(curRegion, stack, loop, loopStart);
insertContinue(loop);
return exit;
}
curRegion.getSubBlocks().add(loopRegion);
IRegion outerRegion = stack.peekRegion();
stack.push(loopRegion);
IfInfo condInfo = makeIfInfo(mth, loopRegion.getHeader());
condInfo = searchNestedIf(condInfo);
confirmMerge(condInfo);
if (!loop.getLoopBlocks().contains(condInfo.getThenBlock())) {
// invert loop condition if 'then' points to exit
condInfo = IfInfo.invert(condInfo);
}
loopRegion.updateCondition(condInfo);
exitBlocks.removeAll(condInfo.getMergedBlocks());
if (!exitBlocks.isEmpty()) {
BlockNode loopExit = condInfo.getElseBlock();
if (loopExit != null) {
// add 'break' instruction before path cross between main loop exit and sub-exit
for (Edge exitEdge : loop.getExitEdges()) {
if (exitBlocks.contains(exitEdge.getSource())) {
insertLoopBreak(stack, loop, loopExit, exitEdge);
}
}
}
}
BlockNode out;
if (loopRegion.isConditionAtEnd()) {
BlockNode thenBlock = condInfo.getThenBlock();
out = thenBlock == loopStart ? condInfo.getElseBlock() : thenBlock;
loopStart.remove(AType.LOOP);
loop.getEnd().add(AFlag.ADDED_TO_REGION);
stack.addExit(loop.getEnd());
processedBlocks.clear(loopStart.getId());
Region body = makeRegion(loopStart, stack);
loopRegion.setBody(body);
loopStart.addAttr(AType.LOOP, loop);
loop.getEnd().remove(AFlag.ADDED_TO_REGION);
} else {
out = condInfo.getElseBlock();
if (outerRegion != null
&& out.contains(AFlag.LOOP_START)
&& !out.getAll(AType.LOOP).contains(loop)
&& RegionUtils.isRegionContainsBlock(outerRegion, out)) {
// exit to already processed outer loop
out = null;
}
stack.addExit(out);
BlockNode loopBody = condInfo.getThenBlock();
Region body;
if (Objects.equals(loopBody, loopStart)) {
// empty loop body
body = new Region(loopRegion);
} else {
body = makeRegion(loopBody, stack);
}
// add blocks from loop start to first condition block
BlockNode conditionBlock = condInfo.getFirstIfBlock();
if (loopStart != conditionBlock) {
Set<BlockNode> blocks = BlockUtils.getAllPathsBlocks(loopStart, conditionBlock);
blocks.remove(conditionBlock);
for (BlockNode block : blocks) {
if (block.getInstructions().isEmpty()
&& !block.contains(AFlag.ADDED_TO_REGION)
&& !RegionUtils.isRegionContainsBlock(body, block)) {
body.add(block);
}
}
}
loopRegion.setBody(body);
}
stack.pop();
insertContinue(loop);
return out;
}
/**
* Select loop exit and construct LoopRegion
*/
private LoopRegion makeLoopRegion(IRegion curRegion, LoopInfo loop, List<BlockNode> exitBlocks) {
for (BlockNode block : exitBlocks) {
if (block.contains(AType.EXC_HANDLER)) {
continue;
}
InsnNode lastInsn = BlockUtils.getLastInsn(block);
if (lastInsn == null || lastInsn.getType() != InsnType.IF) {
continue;
}
List<LoopInfo> loops = block.getAll(AType.LOOP);
if (!loops.isEmpty() && loops.get(0) != loop) {
// skip nested loop condition
continue;
}
LoopRegion loopRegion = new LoopRegion(curRegion, loop, block, block == loop.getEnd());
boolean found;
if (block == loop.getStart() || block == loop.getEnd()
|| BlockUtils.isEmptySimplePath(loop.getStart(), block)) {
found = true;
} else if (block.getPredecessors().contains(loop.getStart())) {
loopRegion.setPreCondition(loop.getStart());
// if we can't merge pre-condition this is not correct header
found = loopRegion.checkPreCondition();
} else {
found = false;
}
if (found) {
List<LoopInfo> list = mth.getAllLoopsForBlock(block);
if (list.size() >= 2) {
// bad condition if successors going out of all loops
boolean allOuter = true;
for (BlockNode outerBlock : block.getCleanSuccessors()) {
List<LoopInfo> outLoopList = mth.getAllLoopsForBlock(outerBlock);
outLoopList.remove(loop);
if (!outLoopList.isEmpty()) {
// goes to outer loop
allOuter = false;
break;
}
}
if (allOuter) {
found = false;
}
}
}
if (found && !checkLoopExits(loop, block)) {
found = false;
}
if (found) {
return loopRegion;
}
}
// no exit found => endless loop
return null;
}
private boolean checkLoopExits(LoopInfo loop, BlockNode mainExitBlock) {
List<Edge> exitEdges = loop.getExitEdges();
if (exitEdges.size() < 2) {
return true;
}
Optional<Edge> mainEdgeOpt = exitEdges.stream().filter(edge -> edge.getSource() == mainExitBlock).findFirst();
if (!mainEdgeOpt.isPresent()) {
throw new JadxRuntimeException("Not found exit edge by exit block: " + mainExitBlock);
}
Edge mainExitEdge = mainEdgeOpt.get();
BlockNode mainOutBlock = mainExitEdge.getTarget();
for (Edge exitEdge : exitEdges) {
if (exitEdge != mainExitEdge) {
// all exit paths must be same or don't cross (will be inside loop)
BlockNode exitBlock = exitEdge.getTarget();
if (!isEqualPaths(mainOutBlock, exitBlock)) {
BlockNode crossBlock = BlockUtils.getPathCross(mth, mainOutBlock, exitBlock);
if (crossBlock != null) {
return false;
}
}
}
}
return true;
}
private BlockNode makeEndlessLoop(IRegion curRegion, RegionStack stack, LoopInfo loop, BlockNode loopStart) {
LoopRegion loopRegion = new LoopRegion(curRegion, loop, null, false);
curRegion.getSubBlocks().add(loopRegion);
loopStart.remove(AType.LOOP);
processedBlocks.clear(loopStart.getId());
stack.push(loopRegion);
BlockNode out = null;
// insert 'break' for exits
List<Edge> exitEdges = loop.getExitEdges();
if (exitEdges.size() == 1) {
Edge exitEdge = exitEdges.get(0);
BlockNode exit = exitEdge.getTarget();
if (insertLoopBreak(stack, loop, exit, exitEdge)) {
BlockNode nextBlock = getNextBlock(exit);
if (nextBlock != null) {
stack.addExit(nextBlock);
out = nextBlock;
}
}
} else {
for (Edge exitEdge : exitEdges) {
BlockNode exit = exitEdge.getTarget();
List<BlockNode> blocks = BlockUtils.bitSetToBlocks(mth, exit.getDomFrontier());
for (BlockNode block : blocks) {
if (BlockUtils.isPathExists(exit, block)) {
stack.addExit(block);
insertLoopBreak(stack, loop, block, exitEdge);
out = block;
} else {
insertLoopBreak(stack, loop, exit, exitEdge);
}
}
}
}
Region body = makeRegion(loopStart, stack);
BlockNode loopEnd = loop.getEnd();
if (!RegionUtils.isRegionContainsBlock(body, loopEnd)
&& !loopEnd.contains(AType.EXC_HANDLER)
&& !inExceptionHandlerBlocks(loopEnd)) {
body.getSubBlocks().add(loopEnd);
}
loopRegion.setBody(body);
if (out == null) {
BlockNode next = getNextBlock(loopEnd);
out = RegionUtils.isRegionContainsBlock(body, next) ? null : next;
}
stack.pop();
loopStart.addAttr(AType.LOOP, loop);
return out;
}
private boolean inExceptionHandlerBlocks(BlockNode loopEnd) {
if (mth.getExceptionHandlersCount() == 0) {
return false;
}
for (ExceptionHandler eh : mth.getExceptionHandlers()) {
if (eh.getBlocks().contains(loopEnd)) {
return true;
}
}
return false;
}
private boolean canInsertBreak(BlockNode exit) {
if (BlockUtils.containsExitInsn(exit)) {
return false;
}
List<BlockNode> simplePath = BlockUtils.buildSimplePath(exit);
if (!simplePath.isEmpty()) {
BlockNode lastBlock = simplePath.get(simplePath.size() - 1);
if (lastBlock.contains(AFlag.RETURN)
|| lastBlock.getSuccessors().isEmpty()) {
return false;
}
}
// check if there no outer switch (TODO: very expensive check)
Set<BlockNode> paths = BlockUtils.getAllPathsBlocks(mth.getEnterBlock(), exit);
for (BlockNode block : paths) {
if (BlockUtils.checkLastInsnType(block, InsnType.SWITCH)) {
return false;
}
}
return true;
}
private boolean insertLoopBreak(RegionStack stack, LoopInfo loop, BlockNode loopExit, Edge exitEdge) {
BlockNode exit = exitEdge.getTarget();
Edge insertEdge = null;
boolean confirm = false;
// process special cases:
// 1. jump to outer loop
BlockNode exitEnd = BlockUtils.followEmptyPath(exit);
List<LoopInfo> loops = exitEnd.getAll(AType.LOOP);
for (LoopInfo loopAtEnd : loops) {
if (loopAtEnd != loop) {
insertEdge = exitEdge;
confirm = true;
break;
}
}
if (!confirm) {
BlockNode insertBlock = null;
while (exit != null) {
if (insertBlock != null && isPathExists(loopExit, exit)) {
// found cross
if (canInsertBreak(insertBlock)) {
insertEdge = new Edge(insertBlock, insertBlock.getSuccessors().get(0));
confirm = true;
break;
}
return false;
}
insertBlock = exit;
List<BlockNode> cs = exit.getCleanSuccessors();
exit = cs.size() == 1 ? cs.get(0) : null;
}
}
if (!confirm) {
return false;
}
InsnNode breakInsn = new InsnNode(InsnType.BREAK, 0);
breakInsn.addAttr(AType.LOOP, loop);
EdgeInsnAttr.addEdgeInsn(insertEdge, breakInsn);
stack.addExit(exit);
// add label to 'break' if needed
addBreakLabel(exitEdge, exit, breakInsn);
return true;
}
private void addBreakLabel(Edge exitEdge, BlockNode exit, InsnNode breakInsn) {
BlockNode outBlock = BlockUtils.getNextBlock(exitEdge.getTarget());
if (outBlock == null) {
return;
}
List<LoopInfo> exitLoop = mth.getAllLoopsForBlock(outBlock);
if (!exitLoop.isEmpty()) {
return;
}
List<LoopInfo> inLoops = mth.getAllLoopsForBlock(exitEdge.getSource());
if (inLoops.size() < 2) {
return;
}
// search for parent loop
LoopInfo parentLoop = null;
for (LoopInfo loop : inLoops) {
if (loop.getParentLoop() == null) {
parentLoop = loop;
break;
}
}
if (parentLoop == null) {
return;
}
if (parentLoop.getEnd() != exit && !parentLoop.getExitNodes().contains(exit)) {
LoopLabelAttr labelAttr = new LoopLabelAttr(parentLoop);
breakInsn.addAttr(labelAttr);
parentLoop.getStart().addAttr(labelAttr);
}
}
private static void insertContinue(LoopInfo loop) {
BlockNode loopEnd = loop.getEnd();
List<BlockNode> predecessors = loopEnd.getPredecessors();
if (predecessors.size() <= 1) {
return;
}
Set<BlockNode> loopExitNodes = loop.getExitNodes();
for (BlockNode pred : predecessors) {
if (canInsertContinue(pred, predecessors, loopEnd, loopExitNodes)) {
InsnNode cont = new InsnNode(InsnType.CONTINUE, 0);
pred.getInstructions().add(cont);
}
}
}
private static boolean canInsertContinue(BlockNode pred, List<BlockNode> predecessors, BlockNode loopEnd,
Set<BlockNode> loopExitNodes) {
if (!pred.contains(AFlag.SYNTHETIC)
|| BlockUtils.checkLastInsnType(pred, InsnType.CONTINUE)) {
return false;
}
List<BlockNode> preds = pred.getPredecessors();
if (preds.isEmpty()) {
return false;
}
BlockNode codePred = preds.get(0);
if (codePred.contains(AFlag.ADDED_TO_REGION)) {
return false;
}
if (loopEnd.isDominator(codePred)
|| loopExitNodes.contains(codePred)) {
return false;
}
if (isDominatedOnBlocks(codePred, predecessors)) {
return false;
}
boolean gotoExit = false;
for (BlockNode exit : loopExitNodes) {
if (BlockUtils.isPathExists(codePred, exit)) {
gotoExit = true;
break;
}
}
return gotoExit;
}
private static boolean isDominatedOnBlocks(BlockNode dom, List<BlockNode> blocks) {
for (BlockNode node : blocks) {
if (!node.isDominator(dom)) {
return false;
}
}
return true;
}
private BlockNode processMonitorEnter(IRegion curRegion, BlockNode block, InsnNode insn, RegionStack stack) {
SynchronizedRegion synchRegion = new SynchronizedRegion(curRegion, insn);
synchRegion.getSubBlocks().add(block);
curRegion.getSubBlocks().add(synchRegion);
Set<BlockNode> exits = new LinkedHashSet<>();
Set<BlockNode> cacheSet = new HashSet<>();
traverseMonitorExits(synchRegion, insn.getArg(0), block, exits, cacheSet);
for (InsnNode exitInsn : synchRegion.getExitInsns()) {
BlockNode insnBlock = BlockUtils.getBlockByInsn(mth, exitInsn);
if (insnBlock != null) {
insnBlock.add(AFlag.DONT_GENERATE);
}
// remove arg from MONITOR_EXIT to allow inline in MONITOR_ENTER
exitInsn.removeArg(0);
exitInsn.add(AFlag.DONT_GENERATE);
}
BlockNode body = getNextBlock(block);
if (body == null) {
mth.addWarn("Unexpected end of synchronized block");
return null;
}
BlockNode exit = null;
if (exits.size() == 1) {
exit = getNextBlock(exits.iterator().next());
} else if (exits.size() > 1) {
cacheSet.clear();
exit = traverseMonitorExitsCross(body, exits, cacheSet);
}
stack.push(synchRegion);
if (exit != null) {
stack.addExit(exit);
} else {
for (BlockNode exitBlock : exits) {
// don't add exit blocks which leads to method end blocks ('return', 'throw', etc)
List<BlockNode> list = BlockUtils.buildSimplePath(exitBlock);
if (list.isEmpty() || !BlockUtils.isExitBlock(mth, Utils.last(list))) {
stack.addExit(exitBlock);
// we can still try using this as an exit block to make sure it's visited.
exit = exitBlock;
}
}
}
synchRegion.getSubBlocks().add(makeRegion(body, stack));
stack.pop();
return exit;
}
/**
* Traverse from monitor-enter thru successors and collect blocks contains monitor-exit
*/
private static void traverseMonitorExits(SynchronizedRegion region, InsnArg arg, BlockNode block, Set<BlockNode> exits,
Set<BlockNode> visited) {
visited.add(block);
for (InsnNode insn : block.getInstructions()) {
if (insn.getType() == InsnType.MONITOR_EXIT
&& insn.getArgsCount() > 0
&& insn.getArg(0).equals(arg)) {
exits.add(block);
region.getExitInsns().add(insn);
return;
}
}
for (BlockNode node : block.getSuccessors()) {
if (!visited.contains(node)) {
traverseMonitorExits(region, arg, node, exits, visited);
}
}
}
/**
* Traverse from monitor-enter thru successors and search for exit paths cross
*/
private static BlockNode traverseMonitorExitsCross(BlockNode block, Set<BlockNode> exits, Set<BlockNode> visited) {
visited.add(block);
for (BlockNode node : block.getCleanSuccessors()) {
boolean cross = true;
for (BlockNode exitBlock : exits) {
boolean p = isPathExists(exitBlock, node);
if (!p) {
cross = false;
break;
}
}
if (cross) {
return node;
}
if (!visited.contains(node)) {
BlockNode res = traverseMonitorExitsCross(node, exits, visited);
if (res != null) {
return res;
}
}
}
return null;
}
private BlockNode processIf(IRegion currentRegion, BlockNode block, IfNode ifnode, RegionStack stack) {
if (block.contains(AFlag.ADDED_TO_REGION)) {
// block already included in other 'if' region
return ifnode.getThenBlock();
}
IfInfo currentIf = makeIfInfo(mth, block);
if (currentIf == null) {
return null;
}
IfInfo mergedIf = mergeNestedIfNodes(currentIf);
if (mergedIf != null) {
currentIf = mergedIf;
} else {
// invert simple condition (compiler often do it)
currentIf = IfInfo.invert(currentIf);
}
IfInfo modifiedIf = IfMakerHelper.restructureIf(mth, block, currentIf);
if (modifiedIf != null) {
currentIf = modifiedIf;
} else {
if (currentIf.getMergedBlocks().size() <= 1) {
return null;
}
currentIf = makeIfInfo(mth, block);
currentIf = IfMakerHelper.restructureIf(mth, block, currentIf);
if (currentIf == null) {
// all attempts failed
return null;
}
}
confirmMerge(currentIf);
IfRegion ifRegion = new IfRegion(currentRegion);
ifRegion.updateCondition(currentIf);
currentRegion.getSubBlocks().add(ifRegion);
BlockNode outBlock = currentIf.getOutBlock();
stack.push(ifRegion);
stack.addExit(outBlock);
ifRegion.setThenRegion(makeRegion(currentIf.getThenBlock(), stack));
BlockNode elseBlock = currentIf.getElseBlock();
if (elseBlock == null || stack.containsExit(elseBlock)) {
ifRegion.setElseRegion(null);
} else {
ifRegion.setElseRegion(makeRegion(elseBlock, stack));
}
// insert edge insns in new 'else' branch
// TODO: make more common algorithm
if (ifRegion.getElseRegion() == null && outBlock != null) {
List<EdgeInsnAttr> edgeInsnAttrs = outBlock.getAll(AType.EDGE_INSN);
if (!edgeInsnAttrs.isEmpty()) {
Region elseRegion = new Region(ifRegion);
for (EdgeInsnAttr edgeInsnAttr : edgeInsnAttrs) {
if (edgeInsnAttr.getEnd().equals(outBlock)) {
addEdgeInsn(currentIf, elseRegion, edgeInsnAttr);
}
}
ifRegion.setElseRegion(elseRegion);
}
}
stack.pop();
return outBlock;
}
private void addEdgeInsn(IfInfo ifInfo, Region region, EdgeInsnAttr edgeInsnAttr) {
BlockNode start = edgeInsnAttr.getStart();
boolean fromThisIf = false;
for (BlockNode ifBlock : ifInfo.getMergedBlocks()) {
if (ifBlock.getSuccessors().contains(start)) {
fromThisIf = true;
break;
}
}
if (!fromThisIf) {
return;
}
region.add(start);
}
private BlockNode processSwitch(IRegion currentRegion, BlockNode block, SwitchInsn insn, RegionStack stack) {
// map case blocks to keys
int len = insn.getTargets().length;
Map<BlockNode, List<Object>> blocksMap = new LinkedHashMap<>(len);
BlockNode[] targetBlocksArr = insn.getTargetBlocks();
for (int i = 0; i < len; i++) {
List<Object> keys = blocksMap.computeIfAbsent(targetBlocksArr[i], k -> new ArrayList<>(2));
keys.add(insn.getKey(i));
}
BlockNode defCase = insn.getDefTargetBlock();
if (defCase != null) {
List<Object> keys = blocksMap.computeIfAbsent(defCase, k -> new ArrayList<>(1));
keys.add(SwitchRegion.DEFAULT_CASE_KEY);
}
// search 'out' block - 'next' block after whole switch statement
BlockNode out;
LoopInfo loop = mth.getLoopForBlock(block);
if (loop == null) {
out = calcPostDomOut(mth, block, mth.getPreExitBlocks());
} else {
BlockNode loopEnd = loop.getEnd();
stack.addExit(loop.getStart());
if (stack.containsExit(block)
|| block == loopEnd
|| loopEnd.getPredecessors().contains(block)) {
// in exits or last insn in loop => no 'out' block
out = null;
} else {
// treat 'continue' as exit
out = calcPostDomOut(mth, block, loopEnd.getPredecessors());
if (out != null) {
insertContinueInSwitch(block, out, loopEnd);
} else {
// no 'continue'
out = calcPostDomOut(mth, block, Collections.singletonList(loopEnd));
}
}
if (out == loop.getStart()) {
// no other outs instead back edge to loop start
out = null;
}
}
if (out != null && processedBlocks.get(out.getId())) {
// out block already processed, prevent endless loop
throw new JadxRuntimeException("Failed to find switch 'out' block");
}
SwitchRegion sw = new SwitchRegion(currentRegion, block);
currentRegion.getSubBlocks().add(sw);
stack.push(sw);
stack.addExit(out);
// detect fallthrough cases
Map<BlockNode, BlockNode> fallThroughCases = new LinkedHashMap<>();
if (out != null) {
BitSet caseBlocks = BlockUtils.blocksToBitSet(mth, blocksMap.keySet());
caseBlocks.clear(out.getId());
for (BlockNode successor : block.getCleanSuccessors()) {
BlockNode fallThroughBlock = searchFallThroughCase(successor, out, caseBlocks);
if (fallThroughBlock != null) {
fallThroughCases.put(successor, fallThroughBlock);
}
}
// check fallthrough cases order
if (!fallThroughCases.isEmpty() && isBadCasesOrder(blocksMap, fallThroughCases)) {
Map<BlockNode, List<Object>> newBlocksMap = reOrderSwitchCases(blocksMap, fallThroughCases);
if (isBadCasesOrder(newBlocksMap, fallThroughCases)) {
mth.addWarnComment("Can't fix incorrect switch cases order, some code will duplicate");
fallThroughCases.clear();
} else {
blocksMap = newBlocksMap;
}
}
}
for (Entry<BlockNode, List<Object>> entry : blocksMap.entrySet()) {
List<Object> keysList = entry.getValue();
BlockNode caseBlock = entry.getKey();
if (stack.containsExit(caseBlock)) {
sw.addCase(keysList, new Region(stack.peekRegion()));
} else {
BlockNode next = fallThroughCases.get(caseBlock);
stack.addExit(next);
Region caseRegion = makeRegion(caseBlock, stack);
stack.removeExit(next);
if (next != null) {
next.add(AFlag.FALL_THROUGH);
caseRegion.add(AFlag.FALL_THROUGH);
}
sw.addCase(keysList, caseRegion);
// 'break' instruction will be inserted in RegionMakerVisitor.PostRegionVisitor
}
}
removeEmptyCases(insn, sw, defCase);
stack.pop();
return out;
}
@Nullable
private BlockNode searchFallThroughCase(BlockNode successor, BlockNode out, BitSet caseBlocks) {
BitSet df = successor.getDomFrontier();
if (df.intersects(caseBlocks)) {
return getOneIntersectionBlock(out, caseBlocks, df);
}
Set<BlockNode> allPathsBlocks = BlockUtils.getAllPathsBlocks(successor, out);
Map<BlockNode, BitSet> bitSetMap = BlockUtils.calcPartialPostDominance(mth, allPathsBlocks, out);
BitSet pdoms = bitSetMap.get(successor);
if (pdoms != null && pdoms.intersects(caseBlocks)) {
return getOneIntersectionBlock(out, caseBlocks, pdoms);
}
return null;
}
@Nullable
private BlockNode getOneIntersectionBlock(BlockNode out, BitSet caseBlocks, BitSet fallThroughSet) {
BitSet caseExits = BlockUtils.copyBlocksBitSet(mth, fallThroughSet);
caseExits.clear(out.getId());
caseExits.and(caseBlocks);
return BlockUtils.bitSetToOneBlock(mth, caseExits);
}
@Nullable
private static BlockNode calcPostDomOut(MethodNode mth, BlockNode block, List<BlockNode> exits) {
if (exits.size() == 1 && mth.getExitBlock().equals(exits.get(0))) {
// simple case: for only one exit which is equal to method exit block
return BlockUtils.calcImmediatePostDominator(mth, block);
}
// fast search: union of blocks dominance frontier
// work if no fallthrough cases and no returns inside switch
BitSet outs = BlockUtils.copyBlocksBitSet(mth, block.getDomFrontier());
for (BlockNode s : block.getCleanSuccessors()) {
outs.or(s.getDomFrontier());
}
outs.clear(block.getId());
if (outs.cardinality() != 1) {
// slow search: calculate partial post-dominance for every exit node
BitSet ipdoms = BlockUtils.newBlocksBitSet(mth);
for (BlockNode exitBlock : exits) {
if (BlockUtils.isAnyPathExists(block, exitBlock)) {
Set<BlockNode> pathBlocks = BlockUtils.getAllPathsBlocks(block, exitBlock);
BlockNode ipdom = BlockUtils.calcPartialImmediatePostDominator(mth, block, pathBlocks, exitBlock);
if (ipdom != null) {
ipdoms.set(ipdom.getId());
}
}
}
outs.and(ipdoms);
}
return BlockUtils.bitSetToOneBlock(mth, outs);
}
/**
* Remove empty case blocks:
* 1. single 'default' case
* 2. filler cases if switch is 'packed' and 'default' case is empty
*/
private void removeEmptyCases(SwitchInsn insn, SwitchRegion sw, BlockNode defCase) {
boolean defaultCaseIsEmpty;
if (defCase == null) {
defaultCaseIsEmpty = true;
} else {
defaultCaseIsEmpty = sw.getCases().stream()
.anyMatch(c -> c.getKeys().contains(SwitchRegion.DEFAULT_CASE_KEY)
&& RegionUtils.isEmpty(c.getContainer()));
}
if (defaultCaseIsEmpty) {
sw.getCases().removeIf(caseInfo -> {
if (RegionUtils.isEmpty(caseInfo.getContainer())) {
List<Object> keys = caseInfo.getKeys();
if (keys.contains(SwitchRegion.DEFAULT_CASE_KEY)) {
return true;
}
if (insn.isPacked()) {
return true;
}
}
return false;
});
}
}
private boolean isBadCasesOrder(Map<BlockNode, List<Object>> blocksMap, Map<BlockNode, BlockNode> fallThroughCases) {
BlockNode nextCaseBlock = null;
for (BlockNode caseBlock : blocksMap.keySet()) {
if (nextCaseBlock != null && !caseBlock.equals(nextCaseBlock)) {
return true;
}
nextCaseBlock = fallThroughCases.get(caseBlock);
}
return nextCaseBlock != null;
}
private Map<BlockNode, List<Object>> reOrderSwitchCases(Map<BlockNode, List<Object>> blocksMap,
Map<BlockNode, BlockNode> fallThroughCases) {
List<BlockNode> list = new ArrayList<>(blocksMap.size());
list.addAll(blocksMap.keySet());
list.sort((a, b) -> {
BlockNode nextA = fallThroughCases.get(a);
if (nextA != null) {
if (b.equals(nextA)) {
return -1;
}
} else if (a.equals(fallThroughCases.get(b))) {
return 1;
}
return 0;
});
Map<BlockNode, List<Object>> newBlocksMap = new LinkedHashMap<>(blocksMap.size());
for (BlockNode key : list) {
newBlocksMap.put(key, blocksMap.get(key));
}
return newBlocksMap;
}
private void insertContinueInSwitch(BlockNode block, BlockNode out, BlockNode end) {
int endId = end.getId();
for (BlockNode s : block.getCleanSuccessors()) {
if (s.getDomFrontier().get(endId) && s != out) {
// search predecessor of loop end on path from this successor
List<BlockNode> list = BlockUtils.collectBlocksDominatedBy(mth, s, s);
for (BlockNode p : end.getPredecessors()) {
if (list.contains(p)) {
if (p.isSynthetic()) {
p.getInstructions().add(new InsnNode(InsnType.CONTINUE, 0));
}
break;
}
}
}
}
}
public IRegion processTryCatchBlocks(MethodNode mth) {
List<TryCatchBlockAttr> tcs = mth.getAll(AType.TRY_BLOCKS_LIST);
for (TryCatchBlockAttr tc : tcs) {
List<BlockNode> blocks = new ArrayList<>(tc.getHandlersCount());
Set<BlockNode> splitters = new HashSet<>();
for (ExceptionHandler handler : tc.getHandlers()) {
BlockNode handlerBlock = handler.getHandlerBlock();
if (handlerBlock != null) {
blocks.add(handlerBlock);
splitters.add(BlockUtils.getTopSplitterForHandler(handlerBlock));
} else {
mth.addDebugComment("No exception handler block: " + handler);
}
}
Set<BlockNode> exits = new HashSet<>();
for (BlockNode splitter : splitters) {
for (BlockNode handler : blocks) {
if (handler.contains(AFlag.REMOVE)) {
continue;
}
List<BlockNode> s = splitter.getSuccessors();
if (s.isEmpty()) {
mth.addDebugComment("No successors for splitter: " + splitter);
continue;
}
BlockNode ss = s.get(0);
BlockNode cross = BlockUtils.getPathCross(mth, ss, handler);
if (cross != null && cross != ss && cross != handler) {
exits.add(cross);
}
}
}
for (ExceptionHandler handler : tc.getHandlers()) {
processExcHandler(mth, handler, exits);
}
}
return processHandlersOutBlocks(mth, tcs);
}
/**
* Search handlers successor blocks not included in any region.
*/
protected IRegion processHandlersOutBlocks(MethodNode mth, List<TryCatchBlockAttr> tcs) {
Set<IBlock> allRegionBlocks = new HashSet<>();
RegionUtils.getAllRegionBlocks(mth.getRegion(), allRegionBlocks);
Set<IBlock> succBlocks = new HashSet<>();
for (TryCatchBlockAttr tc : tcs) {
for (ExceptionHandler handler : tc.getHandlers()) {
IContainer region = handler.getHandlerRegion();
if (region != null) {
IBlock lastBlock = RegionUtils.getLastBlock(region);
if (lastBlock instanceof BlockNode) {
succBlocks.addAll(((BlockNode) lastBlock).getSuccessors());
}
RegionUtils.getAllRegionBlocks(region, allRegionBlocks);
}
}
}
succBlocks.removeAll(allRegionBlocks);
if (succBlocks.isEmpty()) {
return null;
}
Region excOutRegion = new Region(mth.getRegion());
for (IBlock block : succBlocks) {
if (block instanceof BlockNode) {
excOutRegion.add(makeRegion((BlockNode) block, new RegionStack(mth)));
}
}
return excOutRegion;
}
private void processExcHandler(MethodNode mth, ExceptionHandler handler, Set<BlockNode> exits) {
BlockNode start = handler.getHandlerBlock();
if (start == null) {
return;
}
RegionStack stack = new RegionStack(this.mth);
BlockNode dom;
if (handler.isFinally()) {
dom = BlockUtils.getTopSplitterForHandler(start);
} else {
dom = start;
stack.addExits(exits);
}
if (dom.contains(AFlag.REMOVE)) {
return;
}
BitSet domFrontier = dom.getDomFrontier();
List<BlockNode> handlerExits = BlockUtils.bitSetToBlocks(this.mth, domFrontier);
boolean inLoop = this.mth.getLoopForBlock(start) != null;
for (BlockNode exit : handlerExits) {
if ((!inLoop || BlockUtils.isPathExists(start, exit))
&& RegionUtils.isRegionContainsBlock(this.mth.getRegion(), exit)) {
stack.addExit(exit);
}
}
handler.setHandlerRegion(makeRegion(start, stack));
ExcHandlerAttr excHandlerAttr = start.get(AType.EXC_HANDLER);
if (excHandlerAttr == null) {
mth.addWarn("Missing exception handler attribute for start block: " + start);
} else {
handler.getHandlerRegion().addAttr(excHandlerAttr);
}
}
static boolean isEqualPaths(BlockNode b1, BlockNode b2) {
if (b1 == b2) {
return true;
}
if (b1 == null || b2 == null) {
return false;
}
return isEqualReturnBlocks(b1, b2) || isEmptySyntheticPath(b1, b2);
}
private static boolean isEmptySyntheticPath(BlockNode b1, BlockNode b2) {
BlockNode n1 = followEmptyPath(b1);
BlockNode n2 = followEmptyPath(b2);
return n1 == n2 || isEqualReturnBlocks(n1, n2);
}
public static boolean isEqualReturnBlocks(BlockNode b1, BlockNode b2) {
if (!b1.isReturnBlock() || !b2.isReturnBlock()) {
return false;
}
List<InsnNode> b1Insns = b1.getInstructions();
List<InsnNode> b2Insns = b2.getInstructions();
if (b1Insns.size() != 1 || b2Insns.size() != 1) {
return false;
}
InsnNode i1 = b1Insns.get(0);
InsnNode i2 = b2Insns.get(0);
if (i1.getArgsCount() != i2.getArgsCount()) {
return false;
}
if (i1.getArgsCount() == 0) {
return true;
}
InsnArg firstArg = i1.getArg(0);
InsnArg secondArg = i2.getArg(0);
if (firstArg.isSameConst(secondArg)) {
return true;
}
if (i1.getSourceLine() != i2.getSourceLine()) {
return false;
}
return firstArg.equals(secondArg);
}
}
| |
package com.elastisys.scale.cloudpool.api.types;
import static com.elastisys.scale.cloudpool.api.types.Machine.isActiveMember;
import static com.elastisys.scale.cloudpool.api.types.Machine.isAllocated;
import static com.elastisys.scale.cloudpool.api.types.Machine.isStarted;
import static com.elastisys.scale.cloudpool.api.types.TestUtils.ips;
import static com.elastisys.scale.cloudpool.api.types.TestUtils.machine;
import static com.elastisys.scale.cloudpool.api.types.TestUtils.machineNoIp;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
import java.util.Arrays;
import java.util.HashSet;
import java.util.Set;
import java.util.function.Predicate;
import org.joda.time.DateTime;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.elastisys.scale.cloudpool.api.types.Machine.ActiveMemberPredicate;
import com.elastisys.scale.cloudpool.api.types.Machine.AllocatedMachinePredicate;
import com.elastisys.scale.cloudpool.api.types.Machine.MachineWithState;
import com.elastisys.scale.commons.util.time.UtcTime;
/**
* Verifies the behavior of {@link Predicate}s declared for the {@link Machine}
* class.
*/
public class TestMachinePredicates {
static final Logger LOG = LoggerFactory.getLogger(TestMachinePredicates.class);
/**
* Verifies the {@link MachineWithState} {@link Predicate}.
*/
@Test
public void testMachineInStatePredicate() {
DateTime now = UtcTime.now();
Machine m1 = machineNoIp("id", MachineState.REQUESTED, now);
Machine m2 = machine("id", MachineState.RUNNING, now, ips("1.2.3.4"), ips("1.2.3.5"));
Machine m3 = machineNoIp("id", MachineState.PENDING, now);
assertFalse(Machine.inState(MachineState.RUNNING).test(m1));
assertTrue(Machine.inState(MachineState.RUNNING).test(m2));
assertFalse(Machine.inState(MachineState.RUNNING).test(m3));
}
/**
* Verifies the {@link AllocatedMachinePredicate} {@link Predicate}. Only
* machines that are REQUESTED/PENDING/RUNNING are to be considered
* allocated.
*/
@Test
public void testAllocatedMachinePredicate() {
// check all combinations of machineState, membershipStatus and
// serviceState
MachineState[] machineStates = MachineState.values();
ServiceState[] serviceStates = ServiceState.values();
MembershipStatus[] membershipStatuses = { new MembershipStatus(true, true), new MembershipStatus(true, false),
new MembershipStatus(false, true), new MembershipStatus(false, false) };
boolean allocatedFound = false;
for (MachineState machineState : machineStates) {
for (ServiceState serviceState : serviceStates) {
for (MembershipStatus membershipStatus : membershipStatuses) {
String combo = String.format("tested combination: %s-%s-%s", machineState, membershipStatus,
serviceState);
LOG.info(combo);
final DateTime timestamp = UtcTime.now();
Machine machine = Machine.builder().id("id").machineState(machineState).cloudProvider("AWS-EC2")
.region("us-east-1").machineSize("m1.small").membershipStatus(membershipStatus)
.serviceState(serviceState).requestTime(timestamp).launchTime(timestamp)
.publicIps(ips("1.2.3.4")).privateIps(ips("1.2.3.5")).build();
Set<MachineState> allocatedStates = new HashSet<>(
Arrays.asList(MachineState.REQUESTED, MachineState.PENDING, MachineState.RUNNING));
if (allocatedStates.contains(machine.getMachineState())) {
allocatedFound = true;
assertTrue(combo, isAllocated().test(machine));
} else {
assertFalse(combo, isAllocated().test(machine));
}
}
}
}
// verify that at least one allocated machine was found
assertTrue(allocatedFound);
}
/**
* Verifies the {@link AllocatedMachinePredicate} {@link Predicate}. Only
* machines that are REQUESTED/PENDING/RUNNING are to be considered
* allocated.
*/
@Test
public void testStartedMachinePredicate() {
// check all combinations of machineState, membershipStatus and
// serviceState
MachineState[] machineStates = MachineState.values();
ServiceState[] serviceStates = ServiceState.values();
MembershipStatus[] membershipStatuses = { new MembershipStatus(true, true), new MembershipStatus(true, false),
new MembershipStatus(false, true), new MembershipStatus(false, false) };
boolean startedFound = false;
for (MachineState machineState : machineStates) {
for (ServiceState serviceState : serviceStates) {
for (MembershipStatus membershipStatus : membershipStatuses) {
String combo = String.format("tested combination: %s-%s-%s", machineState, membershipStatus,
serviceState);
LOG.info(combo);
final DateTime timestamp = UtcTime.now();
Machine machine = Machine.builder().id("id").machineState(machineState).cloudProvider("AWS-EC2")
.region("us-east-1").machineSize("m1.small").membershipStatus(membershipStatus)
.serviceState(serviceState).requestTime(timestamp).launchTime(timestamp)
.publicIps(ips("1.2.3.4")).privateIps(ips("1.2.3.5")).build();
Set<MachineState> startedStates = new HashSet<>(
Arrays.asList(MachineState.PENDING, MachineState.RUNNING));
if (startedStates.contains(machine.getMachineState())) {
startedFound = true;
assertTrue(combo, isStarted().test(machine));
} else {
assertFalse(combo, isStarted().test(machine));
}
}
}
}
// verify that at least one allocated machine was found
assertTrue(startedFound);
}
/**
* Verifies the {@link ActiveMemberPredicate} {@link Predicate}. Only
* machines that are REQUESTED/PENDING/RUNNING and with an active membership
* status are to be considered active members of the pool.
*/
@Test
public void testActiveMemberPredicate() {
// check all combinations of machineState, membershipStatus and
// serviceState
MachineState[] machineStates = MachineState.values();
ServiceState[] serviceStates = ServiceState.values();
MembershipStatus[] membershipStatuses = { new MembershipStatus(true, true), new MembershipStatus(true, false),
new MembershipStatus(false, true), new MembershipStatus(false, false) };
boolean activeFound = false;
for (MachineState machineState : machineStates) {
for (ServiceState serviceState : serviceStates) {
for (MembershipStatus membershipStatus : membershipStatuses) {
String combo = String.format("tested combination: %s-%s-%s", machineState, membershipStatus,
serviceState);
LOG.info(combo);
final DateTime timestamp = UtcTime.now();
Machine machine = Machine.builder().id("id").machineState(machineState).cloudProvider("AWS-EC2")
.region("us-east-1").machineSize("m1.small").membershipStatus(membershipStatus)
.serviceState(serviceState).requestTime(timestamp).launchTime(timestamp)
.publicIps(ips("1.2.3.4")).privateIps(ips("1.2.3.5")).build();
Set<MachineState> allocatedStates = new HashSet<>(
Arrays.asList(MachineState.REQUESTED, MachineState.PENDING, MachineState.RUNNING));
if (allocatedStates.contains(machine.getMachineState())
&& machine.getMembershipStatus().isActive()) {
activeFound = true;
assertTrue(combo, isActiveMember().test(machine));
} else {
assertFalse(combo, isActiveMember().test(machine));
}
}
}
}
// verify that at least one active member was found
assertTrue(activeFound);
}
/**
* Verifies that {@link Machine#isEvictable()} only is <code>true</code> for
* machines with evictable set to <code>false</code> in their
* {@link MembershipStatus}.
*/
@Test
public void testEvictablePredicate() {
// evictable
Machine m1 = machineNoIp("id", MachineState.REQUESTED, UtcTime.now());
// not evictable
Machine m2 = Machine.builder().id("id").machineState(MachineState.RUNNING).cloudProvider("AWS-EC2")
.region("us-east-1").machineSize("m1.small").membershipStatus(MembershipStatus.blessed())
.requestTime(UtcTime.now()).launchTime(UtcTime.now()).publicIps(ips("1.2.3.4"))
.privateIps(ips("1.2.3.5")).build();
// evictable
Machine m3 = Machine.builder().id("id").machineState(MachineState.RUNNING).cloudProvider("AWS-EC2")
.region("us-east-1").machineSize("m1.small").membershipStatus(MembershipStatus.disposable())
.requestTime(UtcTime.now()).launchTime(UtcTime.now()).publicIps(ips("1.2.3.4"))
.privateIps(ips("1.2.3.5")).build();
// not evictable
Machine m4 = Machine.builder().id("id").machineState(MachineState.RUNNING).cloudProvider("AWS-EC2")
.region("us-east-1").machineSize("m1.small").membershipStatus(MembershipStatus.awaitingService())
.requestTime(UtcTime.now()).launchTime(UtcTime.now()).publicIps(ips("1.2.3.4"))
.privateIps(ips("1.2.3.5")).build();
assertTrue(Machine.isEvictable().test(m1));
assertFalse(Machine.isEvictable().test(m2));
assertTrue(Machine.isEvictable().test(m3));
assertFalse(Machine.isEvictable().test(m4));
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ignite.internal.processors.cache.distributed.dht.preloader;
import org.apache.ignite.*;
import org.apache.ignite.cluster.*;
import org.apache.ignite.internal.*;
import org.apache.ignite.internal.cluster.*;
import org.apache.ignite.internal.managers.deployment.*;
import org.apache.ignite.internal.processors.cache.*;
import org.apache.ignite.internal.processors.cache.distributed.dht.*;
import org.apache.ignite.internal.util.lang.*;
import org.apache.ignite.internal.util.typedef.*;
import org.apache.ignite.internal.util.typedef.internal.*;
import org.apache.ignite.internal.util.worker.*;
import org.apache.ignite.lang.*;
import org.apache.ignite.thread.*;
import org.jetbrains.annotations.*;
import java.io.*;
import java.util.*;
import java.util.concurrent.*;
import java.util.concurrent.locks.*;
import static java.util.concurrent.TimeUnit.*;
import static org.apache.ignite.internal.processors.cache.distributed.dht.GridDhtPartitionState.*;
/**
* Thread pool for supplying partitions to demanding nodes.
*/
class GridDhtPartitionSupplyPool<K, V> {
/** */
private final GridCacheContext<K, V> cctx;
/** */
private final IgniteLogger log;
/** */
private final ReadWriteLock busyLock;
/** */
private GridDhtPartitionTopology top;
/** */
private final Collection<SupplyWorker> workers = new LinkedList<>();
/** */
private final BlockingQueue<DemandMessage> queue = new LinkedBlockingDeque<>();
/** */
private final boolean depEnabled;
/** Preload predicate. */
private IgnitePredicate<GridCacheEntryInfo> preloadPred;
/**
* @param cctx Cache context.
* @param busyLock Shutdown lock.
*/
GridDhtPartitionSupplyPool(GridCacheContext<K, V> cctx, ReadWriteLock busyLock) {
assert cctx != null;
assert busyLock != null;
this.cctx = cctx;
this.busyLock = busyLock;
log = cctx.logger(getClass());
top = cctx.dht().topology();
int poolSize = cctx.rebalanceEnabled() ? cctx.config().getRebalanceThreadPoolSize() : 0;
for (int i = 0; i < poolSize; i++)
workers.add(new SupplyWorker());
cctx.io().addHandler(cctx.cacheId(), GridDhtPartitionDemandMessage.class, new CI2<UUID, GridDhtPartitionDemandMessage>() {
@Override public void apply(UUID id, GridDhtPartitionDemandMessage m) {
processDemandMessage(id, m);
}
});
depEnabled = cctx.gridDeploy().enabled();
}
/**
*
*/
void start() {
for (SupplyWorker w : workers)
new IgniteThread(cctx.gridName(), "preloader-supply-worker", w).start();
}
/**
*
*/
void stop() {
U.cancel(workers);
U.join(workers, log);
top = null;
}
/**
* Sets preload predicate for supply pool.
*
* @param preloadPred Preload predicate.
*/
void preloadPredicate(IgnitePredicate<GridCacheEntryInfo> preloadPred) {
this.preloadPred = preloadPred;
}
/**
* @return Size of this thread pool.
*/
int poolSize() {
return cctx.config().getRebalanceThreadPoolSize();
}
/**
* @return {@code true} if entered to busy state.
*/
private boolean enterBusy() {
if (busyLock.readLock().tryLock())
return true;
if (log.isDebugEnabled())
log.debug("Failed to enter to busy state (supplier is stopping): " + cctx.nodeId());
return false;
}
/**
* @param nodeId Sender node ID.
* @param d Message.
*/
private void processDemandMessage(UUID nodeId, GridDhtPartitionDemandMessage d) {
if (!enterBusy())
return;
try {
if (cctx.rebalanceEnabled()) {
if (log.isDebugEnabled())
log.debug("Received partition demand [node=" + nodeId + ", demand=" + d + ']');
queue.offer(new DemandMessage(nodeId, d));
}
else
U.warn(log, "Received partition demand message when rebalancing is disabled (will ignore): " + d);
}
finally {
leaveBusy();
}
}
/**
*
*/
private void leaveBusy() {
busyLock.readLock().unlock();
}
/**
* @param deque Deque to poll from.
* @param w Worker.
* @return Polled item.
* @throws InterruptedException If interrupted.
*/
@Nullable private <T> T poll(BlockingQueue<T> deque, GridWorker w) throws InterruptedException {
assert w != null;
// There is currently a case where {@code interrupted}
// flag on a thread gets flipped during stop which causes the pool to hang. This check
// will always make sure that interrupted flag gets reset before going into wait conditions.
// The true fix should actually make sure that interrupted flag does not get reset or that
// interrupted exception gets propagated. Until we find a real fix, this method should
// always work to make sure that there is no hanging during stop.
if (w.isCancelled())
Thread.currentThread().interrupt();
return deque.poll(2000, MILLISECONDS);
}
/**
* Supply work.
*/
private class SupplyWorker extends GridWorker {
/** Hide worker logger and use cache logger. */
private IgniteLogger log = GridDhtPartitionSupplyPool.this.log;
/**
* Default constructor.
*/
private SupplyWorker() {
super(cctx.gridName(), "preloader-supply-worker", GridDhtPartitionSupplyPool.this.log);
}
/** {@inheritDoc} */
@Override protected void body() throws InterruptedException, IgniteInterruptedCheckedException {
while (!isCancelled()) {
DemandMessage msg = poll(queue, this);
if (msg == null)
continue;
ClusterNode node = cctx.discovery().node(msg.senderId());
if (node == null) {
if (log.isDebugEnabled())
log.debug("Received message from non-existing node (will ignore): " + msg);
continue;
}
processMessage(msg, node);
}
}
/**
* @param msg Message.
* @param node Demander.
*/
private void processMessage(DemandMessage msg, ClusterNode node) {
assert msg != null;
assert node != null;
GridDhtPartitionDemandMessage d = msg.message();
GridDhtPartitionSupplyMessage s = new GridDhtPartitionSupplyMessage(d.workerId(),
d.updateSequence(), cctx.cacheId());
long preloadThrottle = cctx.config().getRebalanceThrottle();
boolean ack = false;
try {
// Partition map exchange is finished which means that all near transactions with given
// topology version are committed. We can wait for local locks here as it will not take
// much time.
cctx.mvcc().finishLocks(d.topologyVersion()).get();
for (Integer part : d.partitions()) {
GridDhtLocalPartition loc = top.localPartition(part, d.topologyVersion(), false);
if (loc == null || loc.state() != OWNING || !loc.reserve()) {
// Reply with partition of "-1" to let sender know that
// this node is no longer an owner.
s.missed(part);
if (log.isDebugEnabled())
log.debug("Requested partition is not owned by local node [part=" + part +
", demander=" + msg.senderId() + ']');
continue;
}
GridCacheEntryInfoCollectSwapListener swapLsnr = null;
try {
if (cctx.isSwapOrOffheapEnabled()) {
swapLsnr = new GridCacheEntryInfoCollectSwapListener(log);
cctx.swap().addOffHeapListener(part, swapLsnr);
cctx.swap().addSwapListener(part, swapLsnr);
}
boolean partMissing = false;
for (GridCacheEntryEx e : loc.entries()) {
if (!cctx.affinity().belongs(node, part, d.topologyVersion())) {
// Demander no longer needs this partition, so we send '-1' partition and move on.
s.missed(part);
if (log.isDebugEnabled())
log.debug("Demanding node does not need requested partition [part=" + part +
", nodeId=" + msg.senderId() + ']');
partMissing = true;
break;
}
if (s.messageSize() >= cctx.config().getRebalanceBatchSize()) {
ack = true;
if (!reply(node, d, s))
return;
// Throttle preloading.
if (preloadThrottle > 0)
U.sleep(preloadThrottle);
s = new GridDhtPartitionSupplyMessage(d.workerId(), d.updateSequence(),
cctx.cacheId());
}
GridCacheEntryInfo info = e.info();
if (info != null && !info.isNew()) {
if (preloadPred == null || preloadPred.apply(info))
s.addEntry(part, info, cctx);
else if (log.isDebugEnabled())
log.debug("Rebalance predicate evaluated to false (will not sender cache entry): " +
info);
}
}
if (partMissing)
continue;
if (cctx.isSwapOrOffheapEnabled()) {
GridCloseableIterator<Map.Entry<byte[], GridCacheSwapEntry>> iter =
cctx.swap().iterator(part);
// Iterator may be null if space does not exist.
if (iter != null) {
try {
boolean prepared = false;
for (Map.Entry<byte[], GridCacheSwapEntry> e : iter) {
if (!cctx.affinity().belongs(node, part, d.topologyVersion())) {
// Demander no longer needs this partition,
// so we send '-1' partition and move on.
s.missed(part);
if (log.isDebugEnabled())
log.debug("Demanding node does not need requested partition " +
"[part=" + part + ", nodeId=" + msg.senderId() + ']');
partMissing = true;
break; // For.
}
if (s.messageSize() >= cctx.config().getRebalanceBatchSize()) {
ack = true;
if (!reply(node, d, s))
return;
// Throttle preloading.
if (preloadThrottle > 0)
U.sleep(preloadThrottle);
s = new GridDhtPartitionSupplyMessage(d.workerId(),
d.updateSequence(), cctx.cacheId());
}
GridCacheSwapEntry swapEntry = e.getValue();
GridCacheEntryInfo info = new GridCacheEntryInfo();
info.keyBytes(e.getKey());
info.ttl(swapEntry.ttl());
info.expireTime(swapEntry.expireTime());
info.version(swapEntry.version());
info.value(swapEntry.value());
if (preloadPred == null || preloadPred.apply(info))
s.addEntry0(part, info, cctx);
else {
if (log.isDebugEnabled())
log.debug("Rebalance predicate evaluated to false (will not send " +
"cache entry): " + info);
continue;
}
// Need to manually prepare cache message.
if (depEnabled && !prepared) {
ClassLoader ldr = swapEntry.keyClassLoaderId() != null ?
cctx.deploy().getClassLoader(swapEntry.keyClassLoaderId()) :
swapEntry.valueClassLoaderId() != null ?
cctx.deploy().getClassLoader(swapEntry.valueClassLoaderId()) :
null;
if (ldr == null)
continue;
if (ldr instanceof GridDeploymentInfo) {
s.prepare((GridDeploymentInfo)ldr);
prepared = true;
}
}
}
if (partMissing)
continue;
}
finally {
iter.close();
}
}
}
// Stop receiving promote notifications.
if (swapLsnr != null) {
cctx.swap().removeOffHeapListener(part, swapLsnr);
cctx.swap().removeSwapListener(part, swapLsnr);
}
if (swapLsnr != null) {
Collection<GridCacheEntryInfo> entries = swapLsnr.entries();
swapLsnr = null;
for (GridCacheEntryInfo info : entries) {
if (!cctx.affinity().belongs(node, part, d.topologyVersion())) {
// Demander no longer needs this partition,
// so we send '-1' partition and move on.
s.missed(part);
if (log.isDebugEnabled())
log.debug("Demanding node does not need requested partition " +
"[part=" + part + ", nodeId=" + msg.senderId() + ']');
// No need to continue iteration over swap entries.
break;
}
if (s.messageSize() >= cctx.config().getRebalanceBatchSize()) {
ack = true;
if (!reply(node, d, s))
return;
s = new GridDhtPartitionSupplyMessage(d.workerId(),
d.updateSequence(),
cctx.cacheId());
}
if (preloadPred == null || preloadPred.apply(info))
s.addEntry(part, info, cctx);
else if (log.isDebugEnabled())
log.debug("Rebalance predicate evaluated to false (will not sender cache entry): " +
info);
}
}
// Mark as last supply message.
s.last(part);
if (ack) {
s.markAck();
break; // Partition for loop.
}
}
finally {
loc.release();
if (swapLsnr != null) {
cctx.swap().removeOffHeapListener(part, swapLsnr);
cctx.swap().removeSwapListener(part, swapLsnr);
}
}
}
reply(node, d, s);
}
catch (IgniteCheckedException e) {
U.error(log, "Failed to send partition supply message to node: " + node.id(), e);
}
}
/**
* @param n Node.
* @param d Demand message.
* @param s Supply message.
* @return {@code True} if message was sent, {@code false} if recipient left grid.
* @throws IgniteCheckedException If failed.
*/
private boolean reply(ClusterNode n, GridDhtPartitionDemandMessage d, GridDhtPartitionSupplyMessage s)
throws IgniteCheckedException {
try {
if (log.isDebugEnabled())
log.debug("Replying to partition demand [node=" + n.id() + ", demand=" + d + ", supply=" + s + ']');
cctx.io().sendOrderedMessage(n, d.topic(), s, cctx.ioPolicy(), d.timeout());
return true;
}
catch (ClusterTopologyCheckedException ignore) {
if (log.isDebugEnabled())
log.debug("Failed to send partition supply message because node left grid: " + n.id());
return false;
}
}
}
/**
* Demand message wrapper.
*/
private static class DemandMessage extends IgniteBiTuple<UUID, GridDhtPartitionDemandMessage> {
/** */
private static final long serialVersionUID = 0L;
/**
* @param sndId Sender ID.
* @param msg Message.
*/
DemandMessage(UUID sndId, GridDhtPartitionDemandMessage msg) {
super(sndId, msg);
}
/**
* Empty constructor required for {@link Externalizable}.
*/
public DemandMessage() {
// No-op.
}
/**
* @return Sender ID.
*/
UUID senderId() {
return get1();
}
/**
* @return Message.
*/
public GridDhtPartitionDemandMessage message() {
return get2();
}
/** {@inheritDoc} */
@Override public String toString() {
return "DemandMessage [senderId=" + senderId() + ", msg=" + message() + ']';
}
}
}
| |
/*
* JasperReports - Free Java Reporting Library.
* Copyright (C) 2001 - 2014 TIBCO Software Inc. All rights reserved.
* http://www.jaspersoft.com
*
* Unless you have purchased a commercial license agreement from Jaspersoft,
* the following license terms apply:
*
* This program is part of JasperReports.
*
* JasperReports is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* JasperReports is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with JasperReports. If not, see <http://www.gnu.org/licenses/>.
*/
package net.sf.jasperreports.charts.fill;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import net.sf.jasperreports.charts.JRTimeSeries;
import net.sf.jasperreports.charts.JRTimeSeriesDataset;
import net.sf.jasperreports.charts.util.TimeSeriesLabelGenerator;
import net.sf.jasperreports.engine.JRChartDataset;
import net.sf.jasperreports.engine.JRExpressionCollector;
import net.sf.jasperreports.engine.JRPrintHyperlink;
import net.sf.jasperreports.engine.JRRuntimeException;
import net.sf.jasperreports.engine.design.JRVerifier;
import net.sf.jasperreports.engine.fill.JRCalculator;
import net.sf.jasperreports.engine.fill.JRExpressionEvalException;
import net.sf.jasperreports.engine.fill.JRFillChartDataset;
import net.sf.jasperreports.engine.fill.JRFillObjectFactory;
import org.jfree.data.general.Dataset;
import org.jfree.data.time.RegularTimePeriod;
import org.jfree.data.time.TimeSeries;
import org.jfree.data.time.TimeSeriesCollection;
/**
* @author Flavius Sana (flavius_sana@users.sourceforge.net)
* @version $Id: JRFillTimeSeriesDataset.java 7199 2014-08-27 13:58:10Z teodord $
*/
public class JRFillTimeSeriesDataset extends JRFillChartDataset implements JRTimeSeriesDataset
{
/**
*
*/
protected JRFillTimeSeries[] timeSeries;
private List<Comparable<?>> seriesNames;
private Map<Comparable<?>, TimeSeries> seriesMap;
private Map<Comparable<?>, Map<RegularTimePeriod, String>> labelsMap;
private Map<Comparable<?>, Map<RegularTimePeriod, JRPrintHyperlink>> itemHyperlinks;
public JRFillTimeSeriesDataset(
JRTimeSeriesDataset timeSeriesDataset,
JRFillObjectFactory factory
)
{
super( timeSeriesDataset, factory );
JRTimeSeries[] srcTimeSeries = timeSeriesDataset.getSeries();
if( srcTimeSeries != null && srcTimeSeries.length > 0)
{
timeSeries = new JRFillTimeSeries[srcTimeSeries.length];
for (int i = 0; i < timeSeries.length; i++)
{
timeSeries[i] = (JRFillTimeSeries)factory.getTimeSeries(srcTimeSeries[i]);
}
}
}
public JRTimeSeries[] getSeries()
{
return timeSeries;
}
protected void customInitialize()
{
seriesNames = null;
seriesMap = null;
labelsMap = null;
itemHyperlinks = null;
}
protected void customEvaluate(JRCalculator calculator) throws JRExpressionEvalException
{
if(timeSeries != null && timeSeries.length > 0)
{
for (int i = 0; i < timeSeries.length; i++)
{
timeSeries[i].evaluate( calculator );
}
}
}
protected void customIncrement()
{
if (timeSeries != null && timeSeries.length > 0)
{
if (seriesNames == null)
{
seriesNames = new ArrayList<Comparable<?>>();
seriesMap = new HashMap<Comparable<?>, TimeSeries>();
labelsMap = new HashMap<Comparable<?>, Map<RegularTimePeriod, String>>();
itemHyperlinks = new HashMap<Comparable<?>, Map<RegularTimePeriod, JRPrintHyperlink>>();
}
for (int i = 0; i < timeSeries.length; i++)
{
JRFillTimeSeries crtTimeSeries = timeSeries[i];
Comparable<?> seriesName = crtTimeSeries.getSeries();
if (seriesName == null)
{
throw new JRRuntimeException("Time series name is null.");
}
TimeSeries series = seriesMap.get(seriesName);
if(series == null)
{
series = new TimeSeries(seriesName.toString(), getTimePeriod());
seriesNames.add(seriesName);
seriesMap.put(seriesName, series);
}
RegularTimePeriod tp =
RegularTimePeriod.createInstance(
getTimePeriod(),
crtTimeSeries.getTimePeriod(),
getTimeZone()
);
series.addOrUpdate(tp, crtTimeSeries.getValue());
if (crtTimeSeries.getLabelExpression() != null)
{
Map<RegularTimePeriod, String> seriesLabels = labelsMap.get(seriesName);
if (seriesLabels == null)
{
seriesLabels = new HashMap<RegularTimePeriod, String>();
labelsMap.put(seriesName, seriesLabels);
}
seriesLabels.put(tp, crtTimeSeries.getLabel());
}
if (crtTimeSeries.hasItemHyperlink())
{
Map<RegularTimePeriod, JRPrintHyperlink> seriesLinks = itemHyperlinks.get(seriesName);
if (seriesLinks == null)
{
seriesLinks = new HashMap<RegularTimePeriod, JRPrintHyperlink>();
itemHyperlinks.put(seriesName, seriesLinks);
}
seriesLinks.put(tp, crtTimeSeries.getPrintItemHyperlink());
}
}
}
}
public Dataset getCustomDataset()
{
TimeSeriesCollection dataset = new TimeSeriesCollection(getTimeZone());
if (seriesNames != null)
{
for(int i = 0; i < seriesNames.size(); i++)
{
Comparable<?> seriesName = seriesNames.get(i);
dataset.addSeries(seriesMap.get(seriesName));
}
}
return dataset;
}
public Class<?> getTimePeriod() {
return ((JRTimeSeriesDataset)parent).getTimePeriod();
}
public void setTimePeriod(Class<?> timePeriod) {
}
/**
*
*/
public byte getDatasetType() {
return JRChartDataset.TIMESERIES_DATASET;
}
public Object getLabelGenerator(){
return new TimeSeriesLabelGenerator(labelsMap);//FIXMETHEME this and other similar implementations should be able to return null and chart themes should be protected agains null;
}
/**
*
*/
public void collectExpressions(JRExpressionCollector collector)
{
collector.collect(this);
}
public Map<Comparable<?>, Map<RegularTimePeriod, JRPrintHyperlink>> getItemHyperlinks()
{
return itemHyperlinks;
}
public boolean hasItemHyperlinks()
{
boolean foundLinks = false;
if (timeSeries != null && timeSeries.length > 0)
{
for (int i = 0; i < timeSeries.length && !foundLinks; i++)
{
foundLinks = timeSeries[i].hasItemHyperlink();
}
}
return foundLinks;
}
public void validate(JRVerifier verifier)
{
verifier.verify(this);
}
}
| |
package gov.samhsa.consent2share.service.audit;
import static org.junit.Assert.assertEquals;
import static org.mockito.Matchers.any;
import static org.mockito.Matchers.anyByte;
import static org.mockito.Matchers.anyLong;
import static org.mockito.Matchers.anyString;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.spy;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
import gov.samhsa.consent2share.domain.audit.ModifiedEntityTypeEntity;
import gov.samhsa.consent2share.domain.audit.ModifiedEntityTypeEntityRepository;
import gov.samhsa.consent2share.domain.audit.RevisionInfoEntity;
import gov.samhsa.consent2share.domain.audit.RevisionInfoEntityRepository;
import gov.samhsa.consent2share.domain.patient.Patient;
import gov.samhsa.consent2share.domain.patient.PatientRepository;
import gov.samhsa.consent2share.domain.staff.StaffRepository;
import gov.samhsa.consent2share.service.dto.HistoryDto;
import java.util.ArrayList;
import java.util.List;
import javax.persistence.EntityManagerFactory;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.InjectMocks;
import org.mockito.Mock;
import org.mockito.runners.MockitoJUnitRunner;
@RunWith(MockitoJUnitRunner.class)
public class AuditServiceImplTest {
@Mock
EntityManagerFactory entityManagerFactory;
@Mock
PatientRepository patientRepository;
@Mock
RevisionInfoEntityRepository patientRevisionEntityRepository;
@Mock
ModifiedEntityTypeEntityRepository modifiedEntityTypeEntityRepository;
@Mock
StaffRepository staffRepository;
@InjectMocks
AuditServiceImpl pasut;
@Before
public void setUp() {
RevisionInfoEntity patientRevisionEntity = mock(RevisionInfoEntity.class);
when(patientRevisionEntityRepository.findOneById(anyLong()))
.thenReturn(patientRevisionEntity);
Patient patient = mock(Patient.class);
when(patientRepository.findByUsername(anyString())).thenReturn(patient);
when(patientRepository.findOne(anyLong())).thenReturn(patient);
}
@Test
public void testGetReversed() {
List<HistoryDto> historyList = new ArrayList<HistoryDto>();
List<HistoryDto> historyReversedExpected = new ArrayList<HistoryDto>();
HistoryDto hd1 = mock(HistoryDto.class);
historyList.add(hd1);
HistoryDto hd2 = mock(HistoryDto.class);
historyList.add(hd2);
HistoryDto hd3 = mock(HistoryDto.class);
historyList.add(hd3);
historyReversedExpected.add(hd3);
historyReversedExpected.add(hd2);
historyReversedExpected.add(hd1);
List<HistoryDto> historyReversed = pasut.getReversed(historyList);
Assert.assertEquals(historyReversedExpected, historyReversed);
}
@Test
public void testFindHistoryDetail() {
AuditService pasutSpy = spy(pasut);
Number n = mock(Number.class);
HistoryDto hd = mock(HistoryDto.class);
RevisionInfoEntity patientRevisionEntity = mock(RevisionInfoEntity.class);
@SuppressWarnings("unchecked")
List<ModifiedEntityTypeEntity> modifiedEntityTypeEntitys = mock(List.class);
Patient patient = mock(Patient.class);
when(pasutSpy.makeHistoryDto()).thenReturn(hd);
when(patientRevisionEntityRepository.findOneById(n)).thenReturn(
patientRevisionEntity);
when(
modifiedEntityTypeEntityRepository
.findAllByRevision(any(RevisionInfoEntity.class)))
.thenReturn(modifiedEntityTypeEntitys);
when(patientRevisionEntity.getUsername()).thenReturn("username");
when(patientRepository.findByUsername(anyString())).thenReturn(patient);
when(patient.getLastName()).thenReturn("LastName");
when(patient.getFirstName()).thenReturn("FirstName");
when(patientRevisionEntity.getTimestamp()).thenReturn((long) 1);
pasutSpy.findHistoryDetail(n);
verify(pasutSpy, times(1)).findRevType(modifiedEntityTypeEntitys);
verify(pasutSpy, times(1)).findRevClassName(modifiedEntityTypeEntitys);
verify(hd, times(1)).setRevisionid(anyLong());
verify(hd, times(1)).setChangedBy(anyString());
verify(hd, times(1)).setTimestamp(anyString());
verify(hd, times(1)).setRecType(anyString());
verify(hd, times(1)).setType(anyString());
}
@Test
public void testFindHistoryDetails() {
List<Number> revisions = new ArrayList<Number>();
for (int i = 0; i < 3; i++) {
revisions.add(i);
}
AuditService pasutSpy = spy(pasut);
List<HistoryDto> historyDtosList = new ArrayList<HistoryDto>();
List<HistoryDto> historyDtosListSpy = spy(historyDtosList);
when(pasutSpy.makeHistoryDtos()).thenReturn(historyDtosListSpy);
pasutSpy.findHistoryDetails(revisions);
verify(historyDtosListSpy, times(3)).add(any(HistoryDto.class));
}
@Test
public void testmakeHistoryDtos_return_correct_class() {
Object object = pasut.makeHistoryDtos();
String className = object.getClass().getName();
assertEquals("java.util.ArrayList", className);
}
@Test
public void testmakeHistoryDto_return_correct_class() {
Object object = pasut.makeHistoryDto();
String className = object.getClass().getName();
assertEquals("gov.samhsa.consent2share.service.dto.HistoryDto",
className);
}
@Test
public void testFindRevType_when_type_is_create() {
Byte btype = 0;
String revType = pasut.findRevType(btype);
assertEquals("Create new entry", revType);
}
@Test
public void testFindRevType_when_type_is_modified() {
Byte btype = 1;
String revType = pasut.findRevType(btype);
assertEquals("Changed entry", revType);
}
@Test
public void testFindRevType_when_type_is_deleted() {
Byte btype = 2;
String revType = pasut.findRevType(btype);
assertEquals("Delete entry", revType);
}
@Test
public void testFindRevType_when_type_is_notVaild() {
Byte btype = 3;
String revType = pasut.findRevType(btype);
assertEquals(null, revType);
}
@Test
public void testFindRevClassName_when_modifiedEntity_size_is_one() {
List<ModifiedEntityTypeEntity> modifiedEntityTypeEntitys = new ArrayList<ModifiedEntityTypeEntity>();
modifiedEntityTypeEntitys.add(mock(ModifiedEntityTypeEntity.class));
when(modifiedEntityTypeEntitys.get(0).getEntityClassName()).thenReturn(
"gov.samhsa.consent2share.domain.patient.Patient");
String revClassName = pasut.findRevClassName(modifiedEntityTypeEntitys);
assertEquals("Patient", revClassName);
}
@Test
public void testFindRevClassName_when_modifiedEntity_size_is_two() {
List<ModifiedEntityTypeEntity> modifiedEntityTypeEntitys = new ArrayList<ModifiedEntityTypeEntity>();
modifiedEntityTypeEntitys.add(mock(ModifiedEntityTypeEntity.class));
modifiedEntityTypeEntitys.add(mock(ModifiedEntityTypeEntity.class));
when(modifiedEntityTypeEntitys.get(0).getRevisionType()).thenReturn(
(byte) 1);
when(modifiedEntityTypeEntitys.get(1).getEntityClassName()).thenReturn(
"gov.samhsa.consent2share.domain.patient.Patient");
String revClassName = pasut.findRevClassName(modifiedEntityTypeEntitys);
assertEquals("Patient", revClassName);
}
@Test
public void testFindRevClassName_when_modifiedEntity_size_is_two_Second() {
List<ModifiedEntityTypeEntity> modifiedEntityTypeEntitys = new ArrayList<ModifiedEntityTypeEntity>();
modifiedEntityTypeEntitys.add(mock(ModifiedEntityTypeEntity.class));
modifiedEntityTypeEntitys.add(mock(ModifiedEntityTypeEntity.class));
when(modifiedEntityTypeEntitys.get(0).getRevisionType()).thenReturn(
(byte) 0);
when(modifiedEntityTypeEntitys.get(0).getEntityClassName()).thenReturn(
"gov.samhsa.consent2share.domain.provider.IndividualProvider");
String revClassName = pasut.findRevClassName(modifiedEntityTypeEntitys);
assertEquals("Individual Provider", revClassName);
}
@Test
public void testFindRevClassName_when_modifiedEntity_size_is_three() {
List<ModifiedEntityTypeEntity> modifiedEntityTypeEntitys = new ArrayList<ModifiedEntityTypeEntity>();
modifiedEntityTypeEntitys.add(mock(ModifiedEntityTypeEntity.class));
modifiedEntityTypeEntitys.add(mock(ModifiedEntityTypeEntity.class));
modifiedEntityTypeEntitys.add(mock(ModifiedEntityTypeEntity.class));
String revClassName = pasut.findRevClassName(modifiedEntityTypeEntitys);
assertEquals("Add provider", revClassName);
}
@Test
public void testFindRevType_when_modifiedEntity_size_is_one() {
List<ModifiedEntityTypeEntity> modifiedEntityTypeEntitys = new ArrayList<ModifiedEntityTypeEntity>();
modifiedEntityTypeEntitys.add(mock(ModifiedEntityTypeEntity.class));
when(modifiedEntityTypeEntitys.get(0).getRevisionType()).thenReturn(
(byte) 2);
AuditService pasutSpy = spy(pasut);
pasutSpy.findRevType(modifiedEntityTypeEntitys);
verify(pasutSpy, times(1)).findRevType((byte) 2);
}
@Test
public void testFindRevType_when_modifiedEntity_size_is_two() {
List<ModifiedEntityTypeEntity> modifiedEntityTypeEntitys = new ArrayList<ModifiedEntityTypeEntity>();
modifiedEntityTypeEntitys.add(mock(ModifiedEntityTypeEntity.class));
modifiedEntityTypeEntitys.add(mock(ModifiedEntityTypeEntity.class));
when(modifiedEntityTypeEntitys.get(0).getRevisionType()).thenReturn(
(byte) 1);
when(modifiedEntityTypeEntitys.get(1).getRevisionType()).thenReturn(
(byte) 2);
AuditService pasutSpy = spy(pasut);
pasutSpy.findRevType(modifiedEntityTypeEntitys);
verify(pasutSpy, times(1)).findRevType((byte) 2);
}
@Test
public void testFindRevType_when_modifiedEntity_size_is_two_Second() {
List<ModifiedEntityTypeEntity> modifiedEntityTypeEntitys = new ArrayList<ModifiedEntityTypeEntity>();
modifiedEntityTypeEntitys.add(mock(ModifiedEntityTypeEntity.class));
modifiedEntityTypeEntitys.add(mock(ModifiedEntityTypeEntity.class));
when(modifiedEntityTypeEntitys.get(0).getRevisionType()).thenReturn(
(byte) 2);
AuditService pasutSpy = spy(pasut);
pasutSpy.findRevType(modifiedEntityTypeEntitys);
verify(pasutSpy, times(1)).findRevType((byte) 2);
}
@Test
public void testFindRevType_when_modifiedEntity_size_is_three() {
List<ModifiedEntityTypeEntity> modifiedEntityTypeEntitys = new ArrayList<ModifiedEntityTypeEntity>();
modifiedEntityTypeEntitys.add(mock(ModifiedEntityTypeEntity.class));
modifiedEntityTypeEntitys.add(mock(ModifiedEntityTypeEntity.class));
modifiedEntityTypeEntitys.add(mock(ModifiedEntityTypeEntity.class));
when(modifiedEntityTypeEntitys.get(0).getRevisionType()).thenReturn(
(byte) 0);
AuditService pasutSpy = spy(pasut);
pasutSpy.findRevType(modifiedEntityTypeEntitys);
verify(pasutSpy, times(1)).findRevType((byte) 0);
}
@Test
public void testFindLegalHistoryDetail() {
AuditService pasutSpy = spy(pasut);
Number n = mock(Number.class);
HistoryDto hd = mock(HistoryDto.class);
RevisionInfoEntity patientRevisionEntity = mock(RevisionInfoEntity.class);
List<ModifiedEntityTypeEntity> modifiedEntityTypeEntitys = new ArrayList<ModifiedEntityTypeEntity>();
ModifiedEntityTypeEntity mete = mock(ModifiedEntityTypeEntity.class);
modifiedEntityTypeEntitys.add(mete);
Patient patient = mock(Patient.class);
when(pasutSpy.makeHistoryDto()).thenReturn(hd);
when(patientRevisionEntityRepository.findOneById(n)).thenReturn(
patientRevisionEntity);
when(
modifiedEntityTypeEntityRepository
.findAllByRevision(any(RevisionInfoEntity.class)))
.thenReturn(modifiedEntityTypeEntitys);
when(patientRevisionEntity.getUsername()).thenReturn("username");
when(patientRepository.findByUsername(anyString())).thenReturn(patient);
when(patient.getLastName()).thenReturn("LastName");
when(patient.getFirstName()).thenReturn("FirstName");
when(patientRevisionEntity.getTimestamp()).thenReturn((long) 1);
when(modifiedEntityTypeEntitys.get(0).getRevisionType()).thenReturn(
(byte) 1);
pasutSpy.findLegalHistoryDetail(n);
verify(pasutSpy, times(1)).findRevType(anyByte());
verify(hd, times(1)).setRevisionid(anyLong());
verify(hd, times(1)).setChangedBy(anyString());
verify(hd, times(1)).setTimestamp(anyString());
verify(hd, times(1)).setRecType(anyString());
verify(hd, times(1)).setType(anyString());
}
@Test
public void testFindLegalHistoryDetails() {
List<Number> revisions = new ArrayList<Number>();
for (int i = 0; i < 3; i++) {
revisions.add(i);
}
AuditService pasutSpy = spy(pasut);
List<HistoryDto> historyDtosList = new ArrayList<HistoryDto>();
List<HistoryDto> historyDtosListSpy = spy(historyDtosList);
when(pasutSpy.makeHistoryDtos()).thenReturn(historyDtosListSpy);
pasutSpy.findHistoryDetails(revisions);
verify(historyDtosListSpy, times(3)).add(any(HistoryDto.class));
}
}
| |
/*
* Copyright 2002-2019 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.beans.propertyeditors;
import java.beans.PropertyEditorSupport;
import java.lang.reflect.Array;
import java.util.ArrayList;
import java.util.Collection;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.SortedSet;
import java.util.TreeSet;
import org.springframework.lang.Nullable;
import org.springframework.util.Assert;
import org.springframework.util.ReflectionUtils;
/**
* Property editor for Collections, converting any source Collection to a given
* target Collection type.
*
* <p>
* By default registered for Set, SortedSet and List, to automatically convert
* any given Collection to one of those target types if the type does not match
* the target property.
*
* @author Juergen Hoeller
* @since 1.1.3
* @see java.util.Collection
* @see java.util.Set
* @see java.util.SortedSet
* @see java.util.List
*/
public class CustomCollectionEditor extends PropertyEditorSupport {
@SuppressWarnings("rawtypes")
private final Class<? extends Collection> collectionType;
private final boolean nullAsEmptyCollection;
/**
* Create a new CustomCollectionEditor for the given target type, keeping an
* incoming {@code null} as-is.
*
* @param collectionType the target type, which needs to be a sub-interface of
* Collection or a concrete Collection class
* @see java.util.Collection
* @see java.util.ArrayList
* @see java.util.TreeSet
* @see java.util.LinkedHashSet
*/
@SuppressWarnings("rawtypes")
public CustomCollectionEditor(Class<? extends Collection> collectionType) {
this(collectionType, false);
}
/**
* Create a new CustomCollectionEditor for the given target type.
* <p>
* If the incoming value is of the given type, it will be used as-is. If it is a
* different Collection type or an array, it will be converted to a default
* implementation of the given Collection type. If the value is anything else, a
* target Collection with that single value will be created.
* <p>
* The default Collection implementations are: ArrayList for List, TreeSet for
* SortedSet, and LinkedHashSet for Set.
*
* @param collectionType the target type, which needs to be a
* sub-interface of Collection or a concrete
* Collection class
* @param nullAsEmptyCollection whether to convert an incoming {@code null}
* value to an empty Collection (of the appropriate
* type)
* @see java.util.Collection
* @see java.util.ArrayList
* @see java.util.TreeSet
* @see java.util.LinkedHashSet
*/
@SuppressWarnings("rawtypes")
public CustomCollectionEditor(Class<? extends Collection> collectionType, boolean nullAsEmptyCollection) {
Assert.notNull(collectionType, "Collection type is required");
if (!Collection.class.isAssignableFrom(collectionType)) {
throw new IllegalArgumentException(
"Collection type [" + collectionType.getName() + "] does not implement [java.util.Collection]");
}
this.collectionType = collectionType;
this.nullAsEmptyCollection = nullAsEmptyCollection;
}
/**
* Convert the given text value to a Collection with a single element.
*/
@Override
public void setAsText(String text) throws IllegalArgumentException
{Thread.dumpStack();
setValue(text);
}
/**
* Convert the given value to a Collection of the target type.
*/
@Override
public void setValue(@Nullable Object value)
{Thread.dumpStack();
if (value == null && this.nullAsEmptyCollection) {
super.setValue(createCollection(this.collectionType, 0));
} else if (value == null || (this.collectionType.isInstance(value) && !alwaysCreateNewCollection())) {
// Use the source value as-is, as it matches the target type.
super.setValue(value);
} else if (value instanceof Collection) {
// Convert Collection elements.
Collection<?> source = (Collection<?>) value;
Collection<Object> target = createCollection(this.collectionType, source.size());
for (Object elem : source) {
target.add(convertElement(elem));
}
super.setValue(target);
} else if (value.getClass().isArray()) {
// Convert array elements to Collection elements.
int length = Array.getLength(value);
Collection<Object> target = createCollection(this.collectionType, length);
for (int i = 0; i < length; i++) {
target.add(convertElement(Array.get(value, i)));
}
super.setValue(target);
} else {
// A plain value: convert it to a Collection with a single element.
Collection<Object> target = createCollection(this.collectionType, 1);
target.add(convertElement(value));
super.setValue(target);
}
}
/**
* Create a Collection of the given type, with the given initial capacity (if
* supported by the Collection type).
*
* @param collectionType a sub-interface of Collection
* @param initialCapacity the initial capacity
* @return the new Collection instance
*/
@SuppressWarnings({ "rawtypes", "unchecked" })
protected Collection<Object> createCollection(Class<? extends Collection> collectionType, int initialCapacity)
{Thread.dumpStack();
if (!collectionType.isInterface()) {
try {
return ReflectionUtils.accessibleConstructor(collectionType).newInstance();
} catch (Throwable ex) {
throw new IllegalArgumentException(
"Could not instantiate collection class: " + collectionType.getName(), ex);
}
} else if (List.class == collectionType) {
return new ArrayList<>(initialCapacity);
} else if (SortedSet.class == collectionType) {
return new TreeSet<>();
} else {
return new LinkedHashSet<>(initialCapacity);
}
}
/**
* Return whether to always create a new Collection, even if the type of the
* passed-in Collection already matches.
* <p>
* Default is "false"; can be overridden to enforce creation of a new
* Collection, for example to convert elements in any case.
*
* @see #convertElement
*/
protected boolean alwaysCreateNewCollection()
{Thread.dumpStack();
return false;
}
/**
* Hook to convert each encountered Collection/array element. The default
* implementation simply returns the passed-in element as-is.
* <p>
* Can be overridden to perform conversion of certain elements, for example
* String to Integer if a String array comes in and should be converted to a Set
* of Integer objects.
* <p>
* Only called if actually creating a new Collection! This is by default not the
* case if the type of the passed-in Collection already matches. Override
* {@link #alwaysCreateNewCollection()} to enforce creating a new Collection in
* every case.
*
* @param element the source element
* @return the element to be used in the target Collection
* @see #alwaysCreateNewCollection()
*/
protected Object convertElement(Object element)
{Thread.dumpStack();
return element;
}
/**
* This implementation returns {@code null} to indicate that there is no
* appropriate text representation.
*/
@Override
@Nullable
public String getAsText()
{Thread.dumpStack();
return null;
}
}
| |
/*
* Copyright 2015 ThoughtWorks, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.thoughtworks.go.domain;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import com.thoughtworks.go.config.*;
import com.thoughtworks.go.config.materials.MaterialConfigs;
import com.thoughtworks.go.domain.config.Admin;
import com.thoughtworks.go.helper.GoConfigMother;
import com.thoughtworks.go.helper.StageConfigMother;
import org.apache.commons.collections.map.SingletonMap;
import org.junit.Before;
import org.junit.Test;
import static org.hamcrest.Matchers.hasItem;
import static org.hamcrest.core.Is.is;
import static org.junit.Assert.assertThat;
public class ApprovalTest {
public static final String DEFAULT_GROUP = "defaultGroup";
@Before
public void setUp() throws Exception {
}
@Test
public void shouldNotAssignType() throws Exception {
Approval approval = new Approval();
approval.setConfigAttributes(new SingletonMap(Approval.TYPE, Approval.SUCCESS));
assertThat(approval.getType(), is(Approval.SUCCESS));
approval.setConfigAttributes(new HashMap());
assertThat(approval.getType(), is(Approval.SUCCESS));
approval.setConfigAttributes(new SingletonMap(Approval.TYPE, Approval.MANUAL));
assertThat(approval.getType(), is(Approval.MANUAL));
approval.setConfigAttributes(new HashMap());
assertThat(approval.getType(), is(Approval.MANUAL));
}
@Test
public void shouldValidateApprovalType() throws Exception {
Approval approval = new Approval();
approval.setConfigAttributes(new SingletonMap(Approval.TYPE, "not-manual-or-success"));
assertThat(approval.getType(), is("not-manual-or-success"));
approval.validate(ConfigSaveValidationContext.forChain(new BasicCruiseConfig(), new BasicPipelineConfigs()));
assertThat(approval.errors().firstError(), is("You have defined approval type as 'not-manual-or-success'. Approval can only be of the type 'manual' or 'success'."));
}
@Test
public void shouldFailValidateWhenUsersWithoutOperatePermissionOnGroupAreAuthorizedToApproveStage_WithPipelineConfigSaveValidationContext() {
CruiseConfig cruiseConfig = cruiseConfigWithSecurity(
new Role(new CaseInsensitiveString("role"), new RoleUser(new CaseInsensitiveString("first")), new RoleUser(new CaseInsensitiveString("second"))), new AdminUser(
new CaseInsensitiveString("admin")));
addUserAndRoleToDefaultGroup(cruiseConfig, "user", "role");
PipelineConfig pipeline = cruiseConfig.find(DEFAULT_GROUP, 0);
StageConfig stage = pipeline.get(0);
StageConfigMother.addApprovalWithUsers(stage, "not-present");
Approval approval = stage.getApproval();
PipelineConfigurationCache.getInstance().onConfigChange(cruiseConfig);
approval.validate(PipelineConfigSaveValidationContext.forChain(true, DEFAULT_GROUP, cruiseConfig, pipeline, stage));
AdminUser user = approval.getAuthConfig().getUsers().get(0);
assertThat(user.errors().isEmpty(), is(false));
assertThat(user.errors().on("name"), is("User \"not-present\" who is not authorized to operate pipeline group can not be authorized to approve stage"));
}
@Test
public void shouldPassValidateWhenNoPermissionAreSetupOnGroupAndUserIsAuthorizedToApproveStage_WithPipelineConfigSaveValidationContext() {
CruiseConfig cruiseConfig = cruiseConfigWithSecurity(
new Role(new CaseInsensitiveString("role"),
new RoleUser(new CaseInsensitiveString("first")),
new RoleUser(new CaseInsensitiveString("second"))),
new AdminUser(
new CaseInsensitiveString("admin")));
PipelineConfig pipeline = cruiseConfig.find(DEFAULT_GROUP, 0);
StageConfig stage = pipeline.get(0);
StageConfigMother.addApprovalWithUsers(stage, "not-present");
Approval approval = stage.getApproval();
PipelineConfigurationCache.getInstance().onConfigChange(cruiseConfig);
approval.validate(PipelineConfigSaveValidationContext.forChain(true, DEFAULT_GROUP, cruiseConfig, pipeline, stage));
assertNoErrors(approval.getAuthConfig().getUsers().get(0));
}
@Test
public void shouldPassValidateWhenARoleIsAdminOnGroupAndThatRoleIsAuthorizedToApproveStage_WithPipelineConfigSaveValidationContext() {
CruiseConfig cruiseConfig = cruiseConfigWithSecurity(
new Role(new CaseInsensitiveString("role"),
new RoleUser(new CaseInsensitiveString("first")),
new RoleUser(new CaseInsensitiveString("second"))),
new AdminUser(new CaseInsensitiveString("admin")));
addUserAsOperatorToDefaultGroup(cruiseConfig, "user");
addRoleAsAdminToDefaultGroup(cruiseConfig, "role");
PipelineConfig pipeline = cruiseConfig.find(DEFAULT_GROUP, 0);
StageConfig stage = pipeline.get(0);
StageConfigMother.addApprovalWithRoles(stage, "role");
Approval approval = stage.getApproval();
PipelineConfigurationCache.getInstance().onConfigChange(cruiseConfig);
approval.validate(PipelineConfigSaveValidationContext.forChain(true, DEFAULT_GROUP, cruiseConfig, pipeline, stage));
assertNoErrors(approval.getAuthConfig().getRoles().get(0));
}
@Test
public void shouldReturnDisplayNameForApprovalType() {
Approval approval = Approval.automaticApproval();
assertThat(approval.getDisplayName(), is("On Success"));
approval = Approval.manualApproval();
assertThat(approval.getDisplayName(), is("Manual"));
}
@Test
public void shouldOverwriteExistingUsersWhileSettingNewUsers() {
Approval approval = Approval.automaticApproval();
approval.getAuthConfig().add(new AdminUser(new CaseInsensitiveString("sachin")));
approval.getAuthConfig().add(new AdminRole(new CaseInsensitiveString("admin")));
List names = new ArrayList();
names.add(nameMap("awesome_shilpa"));
names.add(nameMap("youth"));
names.add(nameMap(""));
List roles = new ArrayList();
roles.add(nameMap("role1"));
roles.add(nameMap("role2"));
roles.add(nameMap(""));
approval.setOperatePermissions(names, roles);
assertThat(approval.getAuthConfig().size(), is(4));
assertThat(approval.getAuthConfig(), hasItem((Admin) new AdminUser(new CaseInsensitiveString("awesome_shilpa"))));
assertThat(approval.getAuthConfig(), hasItem((Admin) new AdminUser(new CaseInsensitiveString("youth"))));
assertThat(approval.getAuthConfig(), hasItem((Admin) new AdminRole(new CaseInsensitiveString("role1"))));
assertThat(approval.getAuthConfig(), hasItem((Admin) new AdminRole(new CaseInsensitiveString("role2"))));
}
@Test
public void shouldClearAllPermissions() {
Approval approval = Approval.automaticApproval();
approval.getAuthConfig().add(new AdminUser(new CaseInsensitiveString("sachin")));
approval.getAuthConfig().add(new AdminRole(new CaseInsensitiveString("admin")));
approval.removeOperatePermissions();
assertThat(approval.getAuthConfig().isEmpty(), is(true));
}
@Test
public void shouldClearAllPermissionsWhenTheAttributesAreNull() {
Approval approval = Approval.automaticApproval();
approval.getAuthConfig().add(new AdminUser(new CaseInsensitiveString("sachin")));
approval.getAuthConfig().add(new AdminRole(new CaseInsensitiveString("admin")));
approval.setOperatePermissions(null, null);
assertThat(approval.getAuthConfig().isEmpty(), is(true));
}
@Test
public void validate_shouldNotAllow_UserInApprovalListButNotInOperationList() {
CruiseConfig cruiseConfig = cruiseConfigWithSecurity(
new Role(new CaseInsensitiveString("role"), new RoleUser(new CaseInsensitiveString("first")), new RoleUser(new CaseInsensitiveString("second"))), new AdminUser(
new CaseInsensitiveString("admin")));
PipelineConfigs group = addUserAndRoleToDefaultGroup(cruiseConfig, "user", "role");
PipelineConfig pipeline = cruiseConfig.find(DEFAULT_GROUP, 0);
StageConfig stage = pipeline.get(0);
StageConfigMother.addApprovalWithUsers(stage, "not-present");
Approval approval = stage.getApproval();
approval.validate(ConfigSaveValidationContext.forChain(cruiseConfig, group, pipeline, stage));
AdminUser user = approval.getAuthConfig().getUsers().get(0);
assertThat(user.errors().isEmpty(), is(false));
assertThat(user.errors().on("name"), is("User \"not-present\" who is not authorized to operate pipeline group can not be authorized to approve stage"));
}
@Test
public void validate_shouldNotAllowRoleInApprovalListButNotInOperationList() throws Exception {
CruiseConfig cruiseConfig = cruiseConfigWithSecurity(
new Role(new CaseInsensitiveString("role"), new RoleUser(new CaseInsensitiveString("first")), new RoleUser(new CaseInsensitiveString("second"))), new AdminUser(
new CaseInsensitiveString("admin")));
PipelineConfigs group = addUserAndRoleToDefaultGroup(cruiseConfig, "user", "role");
PipelineConfig pipeline = cruiseConfig.find(DEFAULT_GROUP, 0);
StageConfig stage = pipeline.get(0);
StageConfigMother.addApprovalWithRoles(stage, "not-present");
Approval approval = stage.getApproval();
approval.validate(ConfigSaveValidationContext.forChain(cruiseConfig, group, pipeline, stage));
AdminRole user = approval.getAuthConfig().getRoles().get(0);
assertThat(user.errors().isEmpty(), is(false));
assertThat(user.errors().on("name"), is("Role \"not-present\" who is not authorized to operate pipeline group can not be authorized to approve stage"));
}
@Test
public void validate_shouldAllowUserWhoseRoleHasOperatePermission() throws Exception {
CruiseConfig cruiseConfig = cruiseConfigWithSecurity(
new Role(new CaseInsensitiveString("role"), new RoleUser(new CaseInsensitiveString("first")), new RoleUser(new CaseInsensitiveString("second"))), new AdminUser(
new CaseInsensitiveString("admin")));
PipelineConfigs group = addUserAndRoleToDefaultGroup(cruiseConfig, "user", "role");
PipelineConfig pipeline = cruiseConfig.find(DEFAULT_GROUP, 0);
StageConfig stage = pipeline.get(0);
StageConfigMother.addApprovalWithUsers(stage, "first");
Approval approval = stage.getApproval();
approval.validate(ConfigSaveValidationContext.forChain(cruiseConfig, group, pipeline, stage));
assertNoErrors(approval.getAuthConfig().getUsers().get(0));
}
@Test
public void validate_shouldAllowUserWhoIsDefinedInGroup() throws Exception {
CruiseConfig cruiseConfig = cruiseConfigWithSecurity(
new Role(new CaseInsensitiveString("role"), new RoleUser(new CaseInsensitiveString("first")), new RoleUser(new CaseInsensitiveString("second"))), new AdminUser(
new CaseInsensitiveString("admin")));
PipelineConfigs group = addUserAndRoleToDefaultGroup(cruiseConfig, "user", "role");
PipelineConfig pipeline = cruiseConfig.find(DEFAULT_GROUP, 0);
StageConfig stage = pipeline.get(0);
StageConfigMother.addApprovalWithUsers(stage, "user");
Approval approval = stage.getApproval();
approval.validate(ConfigSaveValidationContext.forChain(cruiseConfig, group, pipeline, stage));
assertNoErrors(approval.getAuthConfig().getUsers().get(0));
}
@Test
public void validate_shouldAllowUserWhenSecurityIsNotDefinedInGroup() throws Exception {
CruiseConfig cruiseConfig = cruiseConfigWithSecurity(
new Role(new CaseInsensitiveString("role"), new RoleUser(new CaseInsensitiveString("first")), new RoleUser(new CaseInsensitiveString("second"))), new AdminUser(
new CaseInsensitiveString("admin")));
PipelineConfigs group = cruiseConfig.findGroup(DEFAULT_GROUP);
PipelineConfig pipeline = cruiseConfig.find(DEFAULT_GROUP, 0);
StageConfig stage = pipeline.get(0);
StageConfigMother.addApprovalWithUsers(stage, "user");
Approval approval = stage.getApproval();
approval.validate(ConfigSaveValidationContext.forChain(cruiseConfig, group, pipeline, stage));
assertNoErrors(approval.getAuthConfig().getUsers().get(0));
}
@Test
public void validate_shouldAllowAdminToOperateOnAStage() throws Exception {
CruiseConfig cruiseConfig = cruiseConfigWithSecurity(
new Role(new CaseInsensitiveString("role"), new RoleUser(new CaseInsensitiveString("first")), new RoleUser(new CaseInsensitiveString("second"))), new AdminUser(
new CaseInsensitiveString("admin")));
PipelineConfigs group = addUserAndRoleToDefaultGroup(cruiseConfig, "user", "role");
PipelineConfig pipeline = cruiseConfig.find(DEFAULT_GROUP, 0);
StageConfig stage = pipeline.get(0);
StageConfigMother.addApprovalWithUsers(stage, "admin");
Approval approval = stage.getApproval();
approval.validate(ConfigSaveValidationContext.forChain(cruiseConfig, group, pipeline, stage));
assertNoErrors(approval.getAuthConfig().getUsers().get(0));
}
@Test
public void shouldShowBugWhichAllowsAUserWithoutOperatePermissionToOperateAStage() throws Exception {
CruiseConfig cruiseConfig = cruiseConfigWithSecurity(
new Role(new CaseInsensitiveString("role"),
new RoleUser(new CaseInsensitiveString("first")),
new RoleUser(new CaseInsensitiveString("second"))),
new AdminUser(new CaseInsensitiveString("admin")));
addRoleAsAdminToDefaultGroup(cruiseConfig, "role");
PipelineConfig pipeline = cruiseConfig.find(DEFAULT_GROUP, 0);
StageConfig stage = pipeline.get(0);
StageConfigMother.addApprovalWithUsers(stage, "first", "some-other-user-who-is-not-operate-authorized");
Approval approval = stage.getApproval();
PipelineConfigurationCache.getInstance().onConfigChange(cruiseConfig);
approval.validate(PipelineConfigSaveValidationContext.forChain(true, DEFAULT_GROUP, cruiseConfig, pipeline, stage));
assertNoErrors(approval.getAuthConfig().getUsers().get(0));
/* https://github.com/gocd/gocd/pull/1779#issuecomment-170161521 */
assertNoErrors(approval.getAuthConfig().getUsers().get(1));
}
@Test
public void validate_shouldNotTryAndValidateWhenWithinTemplate() throws Exception {
CruiseConfig cruiseConfig = cruiseConfigWithSecurity(
new Role(new CaseInsensitiveString("role"), new RoleUser(new CaseInsensitiveString("first")), new RoleUser(new CaseInsensitiveString("second"))), new AdminUser(
new CaseInsensitiveString("admin")));
PipelineConfigs group = addUserAndRoleToDefaultGroup(cruiseConfig, "user", "role");
PipelineConfig pipeline = cruiseConfig.find(DEFAULT_GROUP, 0);
StageConfig stage = pipeline.get(0);
StageConfigMother.addApprovalWithUsers(stage, "not-present");
Approval approval = stage.getApproval();
approval.validate(ConfigSaveValidationContext.forChain(cruiseConfig, new TemplatesConfig(), stage));
assertNoErrors(approval.getAuthConfig().getUsers().get(0));
}
@Test
public void shouldValidateTree(){
Approval approval = new Approval(new AuthConfig(new AdminRole(new CaseInsensitiveString("role"))));
BasicCruiseConfig cruiseConfig = GoConfigMother.defaultCruiseConfig();
cruiseConfig.server().security().adminsConfig().addRole(new AdminRole(new CaseInsensitiveString("super-admin")));
PipelineConfig pipelineConfig = new PipelineConfig(new CaseInsensitiveString("p1"), new MaterialConfigs());
cruiseConfig.addPipeline("g1", pipelineConfig);
PipelineConfigurationCache.getInstance().onConfigChange(cruiseConfig);
assertThat(approval.validateTree(PipelineConfigSaveValidationContext.forChain(true, "g1", cruiseConfig, pipelineConfig)), is(false));
assertThat(approval.getAuthConfig().errors().isEmpty(), is(false));
assertThat(approval.getAuthConfig().errors().firstError(), is("Role \"role\" does not exist."));
}
private CruiseConfig cruiseConfigWithSecurity(Role roleDefinition, Admin admins) {
CruiseConfig cruiseConfig = GoConfigMother.configWithPipelines("pipeline");
SecurityConfig securityConfig = cruiseConfig.server().security();
securityConfig.modifyPasswordFile(new PasswordFileConfig("foo.bar"));
securityConfig.addRole(roleDefinition);
securityConfig.adminsConfig().add(admins);
return cruiseConfig;
}
private PipelineConfigs addUserAndRoleToDefaultGroup(CruiseConfig cruiseConfig, final String user, final String role) {
PipelineConfigs group = cruiseConfig.findGroup(DEFAULT_GROUP);
addUserAsOperatorToDefaultGroup(cruiseConfig, user);
addRoleAsOperatorToDefaultGroup(cruiseConfig, role);
return group;
}
private void addRoleAsOperatorToDefaultGroup(CruiseConfig goConfig, String role) {
PipelineConfigs group = goConfig.findGroup(DEFAULT_GROUP);
group.getAuthorization().getOperationConfig().add(new AdminRole(new CaseInsensitiveString(role)));
}
private PipelineConfigs addRoleAsAdminToDefaultGroup(CruiseConfig cruiseConfig, String role) {
PipelineConfigs group = cruiseConfig.findGroup(DEFAULT_GROUP);
group.getAuthorization().getAdminsConfig().add(new AdminRole(new CaseInsensitiveString(role)));
return group;
}
private PipelineConfigs addUserAsOperatorToDefaultGroup(CruiseConfig cruiseConfig, String user) {
PipelineConfigs group = cruiseConfig.findGroup(DEFAULT_GROUP);
group.getAuthorization().getOperationConfig().add(new AdminUser(new CaseInsensitiveString(user)));
return group;
}
private HashMap nameMap(final String name) {
HashMap nameMap = new HashMap();
nameMap.put("name", name);
return nameMap;
}
private void assertNoErrors(Admin userOrRole) {
assertThat(userOrRole.errors().getAll().toString(), userOrRole.errors().isEmpty(), is(true));
}
}
| |
package pl.softmate.xsd.dbunit.ant;
import static java.nio.file.StandardOpenOption.CREATE;
import static java.nio.file.StandardOpenOption.TRUNCATE_EXISTING;
import static java.nio.file.StandardOpenOption.WRITE;
import static java.sql.Types.BIGINT;
import static java.sql.Types.BINARY;
import static java.sql.Types.BIT;
import static java.sql.Types.BLOB;
import static java.sql.Types.BOOLEAN;
import static java.sql.Types.CHAR;
import static java.sql.Types.CLOB;
import static java.sql.Types.DATE;
import static java.sql.Types.DECIMAL;
import static java.sql.Types.DOUBLE;
import static java.sql.Types.FLOAT;
import static java.sql.Types.INTEGER;
import static java.sql.Types.LONGNVARCHAR;
import static java.sql.Types.LONGVARBINARY;
import static java.sql.Types.LONGVARCHAR;
import static java.sql.Types.NCHAR;
import static java.sql.Types.NCLOB;
import static java.sql.Types.NUMERIC;
import static java.sql.Types.NVARCHAR;
import static java.sql.Types.REAL;
import static java.sql.Types.REF;
import static java.sql.Types.SMALLINT;
import static java.sql.Types.TIME;
import static java.sql.Types.TIMESTAMP;
import static java.sql.Types.TIMESTAMP_WITH_TIMEZONE;
import static java.sql.Types.TIME_WITH_TIMEZONE;
import static java.sql.Types.TINYINT;
import static java.sql.Types.VARBINARY;
import static java.sql.Types.VARCHAR;
import java.io.BufferedWriter;
import java.io.IOException;
import java.nio.charset.Charset;
import java.nio.file.Files;
import java.nio.file.Paths;
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.ResultSet;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
import java.util.TreeSet;
import org.apache.tools.ant.BuildException;
import org.apache.tools.ant.Task;
public class DbUnitSchemaGenerator extends Task {
private String schemaName;
private String outputFolder;
private String driverName;
private String url;
private String user;
private String password;
public String getSchemaName() {
return schemaName;
}
public void setSchemaName( String schemaName ) {
this.schemaName = schemaName;
}
public String getOutputFolder() {
return outputFolder;
}
public void setOutputFolder( String outputFolder ) {
this.outputFolder = outputFolder;
}
public String getDriverName() {
return driverName;
}
public void setDriverName( String driverName ) {
this.driverName = driverName;
}
public String getUrl() {
return url;
}
public void setUrl( String url ) {
this.url = url;
}
public String getUser() {
return user;
}
public void setUser( String user ) {
this.user = user;
}
public String getPassword() {
return password;
}
public void setPassword( String password ) {
this.password = password;
}
@Override
public void execute() throws BuildException {
try {
Class.forName( driverName ).newInstance();
}
catch ( Exception e ) {
throw new BuildException( e );
}
try (Connection c = DriverManager.getConnection( url, user, password )) {
final String VARCHAR_SIZE_COLUMN;
final String DRIVER_NAME = c.getMetaData().getDriverName().toLowerCase();
if ( DRIVER_NAME.contains( "mariadb" ) ) {
VARCHAR_SIZE_COLUMN = "COLUMN_SIZE";
}
else {
VARCHAR_SIZE_COLUMN = "CHAR_OCTET_LENGTH";
}
Set<String> tables = new TreeSet<>();
try (ResultSet rs = c.getMetaData().getTables( null, schemaName, null, new String[] { "TABLE", "BASE TABLE" } )) {
while ( rs.next() ) {
tables.add( rs.getString( "TABLE_NAME" ) );
}
}
Map<String, Set<String>> table2pk = new HashMap<>();
for ( String table : tables ) {
Set<String> pks = new HashSet<>();
table2pk.put( table, pks );
try (ResultSet rs = c.getMetaData().getPrimaryKeys( null, schemaName, table )) {
while ( rs.next() ) {
String tableName = rs.getString( "TABLE_NAME" );
if ( table.equalsIgnoreCase( tableName ) ) {
pks.add( rs.getString( "COLUMN_NAME" ) );
}
}
}
}
Map<String, List<XsdAttribute>> table2collumn = new LinkedHashMap<>();
Set<XsdType> types = new TreeSet<>();
for ( String table : tables ) {
if ( table.equalsIgnoreCase( "attachment" ) ) {
System.out.println();
}
List<XsdAttribute> collumns = new ArrayList<>();
table2collumn.put( table, collumns );
try (ResultSet rs = c.getMetaData().getColumns( null, schemaName, table, null )) {
while ( rs.next() ) {
String tableName = rs.getString( "TABLE_NAME" );
if ( table.equalsIgnoreCase( tableName ) ) {
final int dataType = rs.getInt( "DATA_TYPE" );
String collumnName = rs.getString( "COLUMN_NAME" );
int varcharSize = rs.getInt( VARCHAR_SIZE_COLUMN );
String isNullable = rs.getString( "IS_NULLABLE" );
int columnSize = rs.getInt( "COLUMN_SIZE" );
int decimalDigits = rs.getInt( "DECIMAL_DIGITS" );
XsdAttribute attr = new XsdAttribute();
attr.name = collumnName;
attr.required = ( null != isNullable && "no".equalsIgnoreCase( isNullable ) );
attr.type = new XsdType();
attr.pk = table2pk.get( tableName ).contains( collumnName );
mapToXsdType( dataType, varcharSize, columnSize, decimalDigits, attr );
collumns.add( attr );
types.add( attr.type );
}
}
}
}
generateFile( table2collumn, types, false );
generateFile( table2collumn, types, true );
}
catch ( Exception e ) {
throw new BuildException( e );
}
}
private void generateFile( Map<String, List<XsdAttribute>> table2collumn, Set<XsdType> types, boolean update ) throws IOException {
try (BufferedWriter bw = Files.newBufferedWriter( Paths.get( outputFolder, update ? "updateSchema.xsd" : "schema.xsd" ), Charset.forName( "utf8" ), CREATE, TRUNCATE_EXISTING, WRITE )) {
//start file
bw.append( "<?xml version=\"1.0\" encoding=\"UTF-8\" ?>" );
bw.newLine();
bw.append( "<xs:schema xmlns:xs=\"http://www.w3.org/2001/XMLSchema\">" );
bw.newLine();
//define derived types for decimal, char and varchar
for ( XsdType xsdType : types ) {
if ( xsdType.base == XsdTypeNameBase.DECIMAL ) {
appendDecimal( bw, xsdType );
}
}
for ( XsdType xsdType : types ) {
if ( xsdType.base == XsdTypeNameBase.CHAR ) {
appendChar( bw, xsdType );
}
}
for ( XsdType xsdType : types ) {
if ( xsdType.base == XsdTypeNameBase.VARCHAR ) {
appendVarchar( bw, xsdType );
}
}
for ( XsdType xsdType : types ) {
if ( xsdType.base == XsdTypeNameBase.CLOB ) {
appendClob( bw );
}
}
for ( XsdType xsdType : types ) {
if ( xsdType.base == XsdTypeNameBase.TIMESTAMP ) {
appendTimestamp( bw );
}
}
//append all tables
bw.append( "\t<xs:element name=\"dataset\">" );
bw.newLine();
bw.append( "\t\t<xs:complexType>" );
bw.newLine();
bw.append( "\t\t\t<xs:choice minOccurs=\"0\" maxOccurs=\"unbounded\">" );
bw.newLine();
for ( Entry<String, List<XsdAttribute>> entry : table2collumn.entrySet() ) {
if ( !entry.getValue().isEmpty() ) {
bw.append( "\t\t\t\t<xs:element ref=\"" + entry.getKey() + "\" minOccurs=\"0\" maxOccurs=\"unbounded\" />" );
bw.newLine();
}
}
bw.append( "\t\t\t</xs:choice>" );
bw.newLine();
bw.append( "\t\t</xs:complexType>" );
bw.newLine();
bw.append( "\t</xs:element>" );
bw.newLine();
//describe each table
for ( Entry<String, List<XsdAttribute>> entry : table2collumn.entrySet() ) {
if ( !entry.getValue().isEmpty() ) {
bw.append( "\t<xs:element name=\"" + entry.getKey() + "\">" );
bw.newLine();
bw.append( "\t\t<xs:complexType>" );
bw.newLine();
for ( XsdAttribute attr : entry.getValue() ) {
String required = ( ( ( update && attr.pk ) || ( !update && attr.required ) ) ? "required" : "optional" );
bw.append( "\t\t\t\t<xs:attribute name=\"" + attr.name + "\" type=\"" + deriveType( attr.type ) + "\" use=\"" + required + "\" />" );
bw.newLine();
}
bw.append( "\t\t</xs:complexType>" );
bw.newLine();
bw.append( "\t</xs:element>" );
bw.newLine();
}
}
bw.append( "</xs:schema>" );
bw.newLine();
}
}
private void appendClob( BufferedWriter bw ) throws IOException {
bw.append( "\t<xs:simpleType name='varchar_max'>" );
bw.newLine();
bw.append( "\t\t<xs:restriction base='xs:string'>" );
bw.newLine();
bw.append( "\t\t\t<xs:maxLength value='2147483647'/>" );
bw.newLine();
bw.append( "\t\t</xs:restriction>" );
bw.newLine();
bw.append( "\t</xs:simpleType>" );
bw.newLine();
}
private void appendTimestamp( BufferedWriter bw ) throws IOException {
bw.append( "\t<xs:simpleType name='dbUnitTimestamp'>" );
bw.newLine();
bw.append( "\t\t<xs:restriction base='xs:string'>" );
bw.newLine();
bw.append( "\t\t\t<xs:pattern value='(19|20)\\d\\d-(0[1-9]|1[012])-(0[1-9]|[12][0-9]|3[01])' />" );
bw.newLine();
bw.append( "\t\t</xs:restriction>" );
bw.newLine();
bw.append( "\t</xs:simpleType>" );
bw.newLine();
}
private void appendVarchar( BufferedWriter bw, XsdType xsdType ) throws IOException {
bw.append( "\t<xs:simpleType name='varchar_" + xsdType.size + "'>" );
bw.newLine();
bw.append( "\t\t<xs:restriction base='xs:string'>" );
bw.newLine();
bw.append( "\t\t\t<xs:maxLength value='" + xsdType.size + "'/>" );
bw.newLine();
bw.append( "\t\t</xs:restriction>" );
bw.newLine();
bw.append( "\t</xs:simpleType>" );
bw.newLine();
}
private void appendChar( BufferedWriter bw, XsdType xsdType ) throws IOException {
bw.append( "\t<xs:simpleType name='char_" + xsdType.size + "'>" );
bw.newLine();
bw.append( "\t\t<xs:restriction base='xs:string'>" );
bw.newLine();
bw.append( "\t\t\t<xs:length value='" + xsdType.size + "' fixed='true'/>" );
bw.newLine();
bw.append( "\t\t</xs:restriction>" );
bw.newLine();
bw.append( "\t</xs:simpleType>" );
bw.newLine();
}
private void appendDecimal( BufferedWriter bw, XsdType xsdType ) throws IOException {
bw.append( "\t<xs:simpleType name='decimal_" + xsdType.size + "_" + xsdType.ext + "'>" );
bw.newLine();
bw.append( "\t\t<xs:restriction base='xs:decimal'>" );
bw.newLine();
bw.append( "\t\t\t<xs:totalDigits value='" + xsdType.size + "'/>" );
bw.newLine();
bw.append( "\t\t\t<xs:fractionDigits value='" + xsdType.ext + "'/>" );
bw.newLine();
bw.append( "\t\t</xs:restriction>" );
bw.newLine();
bw.append( "\t</xs:simpleType>" );
bw.newLine();
}
private void mapToXsdType( final int dataType, int varcharSize, int columnSize, int decimalDigits, XsdAttribute a ) {
switch ( dataType ) {
case BOOLEAN:
case BIT:
a.type.base = XsdTypeNameBase.BOOLEAN;
break;
case TINYINT:
a.type.base = XsdTypeNameBase.BYTE;
break;
case SMALLINT:
a.type.base = XsdTypeNameBase.SHORT;
break;
case INTEGER:
a.type.base = XsdTypeNameBase.INT;
break;
case BIGINT:
a.type.base = XsdTypeNameBase.LONG;
break;
case FLOAT:
a.type.base = XsdTypeNameBase.FLOAT;
break;
case REAL:
case DOUBLE:
a.type.base = XsdTypeNameBase.DOUBLE;
break;
case NUMERIC:
case DECIMAL:
a.type.base = XsdTypeNameBase.DECIMAL;
a.type.size = columnSize;
a.type.ext = decimalDigits;
break;
case CHAR:
case NCHAR:
a.type.base = XsdTypeNameBase.CHAR;
a.type.size = varcharSize;
break;
case VARCHAR:
case NVARCHAR:
a.type.base = XsdTypeNameBase.VARCHAR;
a.type.size = varcharSize;
if ( a.type.size == Integer.MAX_VALUE ) {
a.type.base = XsdTypeNameBase.CLOB;
a.type.size = 0;
}
break;
case LONGVARCHAR:
case LONGNVARCHAR:
case CLOB:
case NCLOB:
a.type.base = XsdTypeNameBase.CLOB;
break;
case DATE:
a.type.base = XsdTypeNameBase.DATE;
break;
case TIME:
a.type.base = XsdTypeNameBase.TIME;
break;
case TIMESTAMP:
a.type.base = XsdTypeNameBase.TIMESTAMP;
break;
case BINARY:
case VARBINARY:
case LONGVARBINARY:
case BLOB:
a.type.base = XsdTypeNameBase.BLOB;
break;
case REF:
a.type.base = XsdTypeNameBase.BLOB;
break;
case TIME_WITH_TIMEZONE:
a.type.base = XsdTypeNameBase.TIME;
break;
case TIMESTAMP_WITH_TIMEZONE:
a.type.base = XsdTypeNameBase.TIMESTAMP;
break;
}
}
private String deriveType( XsdType type ) {
switch ( type.base ) {
case BLOB:
return "xs:base64Binary";
case BOOLEAN:
return "xs:boolean";
case BYTE:
return "xs:byte";
case CHAR:
return "char_" + type.size;
case CLOB:
return "varchar_max";
case DATE:
return "xs:date";
case DECIMAL:
return "decimal_" + type.size + "_" + type.ext;
case DOUBLE:
return "xs:double";
case FLOAT:
return "xs:float";
case INT:
return "xs:int";
case LONG:
return "xs:long";
case SHORT:
return "xs:short";
case TIME:
return "xs:time";
case TIMESTAMP:
return "dbUnitTimestamp";
case VARCHAR:
return "varchar_" + type.size;
}
return null;
}
static class XsdAttribute {
String name;
boolean required;
boolean pk;
XsdType type;
}
static enum XsdTypeNameBase {
DECIMAL, CHAR, VARCHAR, BOOLEAN, BYTE, SHORT, INT, LONG, CLOB, BLOB, DATE, TIME, TIMESTAMP, FLOAT, DOUBLE;
}
static class XsdType implements Comparable<XsdType> {
XsdTypeNameBase base;
int size;
int ext;
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + ( ( base == null ) ? 0 : base.hashCode() );
result = prime * result + ext;
result = prime * result + size;
return result;
}
@Override
public boolean equals( Object obj ) {
if ( this == obj )
return true;
if ( obj == null )
return false;
if ( getClass() != obj.getClass() )
return false;
XsdType other = (XsdType) obj;
if ( base != other.base )
return false;
if ( ext != other.ext )
return false;
if ( size != other.size )
return false;
return true;
}
@Override
public int compareTo( XsdType o ) {
int r = base.compareTo( o.base );
if ( r == 0 ) {
r = Integer.compare( size, o.size );
}
if ( r == 0 ) {
r = Integer.compare( ext, o.ext );
}
return r;
}
}
}
| |
/**
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for
* license information.
*
* Code generated by Microsoft (R) AutoRest Code Generator.
*/
package com.microsoft.azure.management.synapse.v2019_06_01_preview;
import java.util.List;
import com.fasterxml.jackson.annotation.JsonProperty;
/**
* What is this?.
*/
public class OperationMetaMetricSpecification {
/**
* The source MDM namespace.
*/
@JsonProperty(value = "sourceMdmNamespace")
private String sourceMdmNamespace;
/**
* Metric display name.
*/
@JsonProperty(value = "displayName")
private String displayName;
/**
* Metric unique name.
*/
@JsonProperty(value = "name")
private String name;
/**
* Metric aggregation type.
*/
@JsonProperty(value = "aggregationType")
private String aggregationType;
/**
* Metric description.
*/
@JsonProperty(value = "displayDescription")
private String displayDescription;
/**
* The source MDM account.
*/
@JsonProperty(value = "sourceMdmAccount")
private String sourceMdmAccount;
/**
* Whether the regional MDM account is enabled.
*/
@JsonProperty(value = "enableRegionalMdmAccount")
private Boolean enableRegionalMdmAccount;
/**
* Metric units.
*/
@JsonProperty(value = "unit")
private String unit;
/**
* Metric dimensions.
*/
@JsonProperty(value = "dimensions")
private List<OperationMetaMetricDimensionSpecification> dimensions;
/**
* Whether the metric supports instance-level aggregation.
*/
@JsonProperty(value = "supportsInstanceLevelAggregation")
private Boolean supportsInstanceLevelAggregation;
/**
* Metric filter.
*/
@JsonProperty(value = "metricFilterPattern")
private String metricFilterPattern;
/**
* Get the source MDM namespace.
*
* @return the sourceMdmNamespace value
*/
public String sourceMdmNamespace() {
return this.sourceMdmNamespace;
}
/**
* Set the source MDM namespace.
*
* @param sourceMdmNamespace the sourceMdmNamespace value to set
* @return the OperationMetaMetricSpecification object itself.
*/
public OperationMetaMetricSpecification withSourceMdmNamespace(String sourceMdmNamespace) {
this.sourceMdmNamespace = sourceMdmNamespace;
return this;
}
/**
* Get metric display name.
*
* @return the displayName value
*/
public String displayName() {
return this.displayName;
}
/**
* Set metric display name.
*
* @param displayName the displayName value to set
* @return the OperationMetaMetricSpecification object itself.
*/
public OperationMetaMetricSpecification withDisplayName(String displayName) {
this.displayName = displayName;
return this;
}
/**
* Get metric unique name.
*
* @return the name value
*/
public String name() {
return this.name;
}
/**
* Set metric unique name.
*
* @param name the name value to set
* @return the OperationMetaMetricSpecification object itself.
*/
public OperationMetaMetricSpecification withName(String name) {
this.name = name;
return this;
}
/**
* Get metric aggregation type.
*
* @return the aggregationType value
*/
public String aggregationType() {
return this.aggregationType;
}
/**
* Set metric aggregation type.
*
* @param aggregationType the aggregationType value to set
* @return the OperationMetaMetricSpecification object itself.
*/
public OperationMetaMetricSpecification withAggregationType(String aggregationType) {
this.aggregationType = aggregationType;
return this;
}
/**
* Get metric description.
*
* @return the displayDescription value
*/
public String displayDescription() {
return this.displayDescription;
}
/**
* Set metric description.
*
* @param displayDescription the displayDescription value to set
* @return the OperationMetaMetricSpecification object itself.
*/
public OperationMetaMetricSpecification withDisplayDescription(String displayDescription) {
this.displayDescription = displayDescription;
return this;
}
/**
* Get the source MDM account.
*
* @return the sourceMdmAccount value
*/
public String sourceMdmAccount() {
return this.sourceMdmAccount;
}
/**
* Set the source MDM account.
*
* @param sourceMdmAccount the sourceMdmAccount value to set
* @return the OperationMetaMetricSpecification object itself.
*/
public OperationMetaMetricSpecification withSourceMdmAccount(String sourceMdmAccount) {
this.sourceMdmAccount = sourceMdmAccount;
return this;
}
/**
* Get whether the regional MDM account is enabled.
*
* @return the enableRegionalMdmAccount value
*/
public Boolean enableRegionalMdmAccount() {
return this.enableRegionalMdmAccount;
}
/**
* Set whether the regional MDM account is enabled.
*
* @param enableRegionalMdmAccount the enableRegionalMdmAccount value to set
* @return the OperationMetaMetricSpecification object itself.
*/
public OperationMetaMetricSpecification withEnableRegionalMdmAccount(Boolean enableRegionalMdmAccount) {
this.enableRegionalMdmAccount = enableRegionalMdmAccount;
return this;
}
/**
* Get metric units.
*
* @return the unit value
*/
public String unit() {
return this.unit;
}
/**
* Set metric units.
*
* @param unit the unit value to set
* @return the OperationMetaMetricSpecification object itself.
*/
public OperationMetaMetricSpecification withUnit(String unit) {
this.unit = unit;
return this;
}
/**
* Get metric dimensions.
*
* @return the dimensions value
*/
public List<OperationMetaMetricDimensionSpecification> dimensions() {
return this.dimensions;
}
/**
* Set metric dimensions.
*
* @param dimensions the dimensions value to set
* @return the OperationMetaMetricSpecification object itself.
*/
public OperationMetaMetricSpecification withDimensions(List<OperationMetaMetricDimensionSpecification> dimensions) {
this.dimensions = dimensions;
return this;
}
/**
* Get whether the metric supports instance-level aggregation.
*
* @return the supportsInstanceLevelAggregation value
*/
public Boolean supportsInstanceLevelAggregation() {
return this.supportsInstanceLevelAggregation;
}
/**
* Set whether the metric supports instance-level aggregation.
*
* @param supportsInstanceLevelAggregation the supportsInstanceLevelAggregation value to set
* @return the OperationMetaMetricSpecification object itself.
*/
public OperationMetaMetricSpecification withSupportsInstanceLevelAggregation(Boolean supportsInstanceLevelAggregation) {
this.supportsInstanceLevelAggregation = supportsInstanceLevelAggregation;
return this;
}
/**
* Get metric filter.
*
* @return the metricFilterPattern value
*/
public String metricFilterPattern() {
return this.metricFilterPattern;
}
/**
* Set metric filter.
*
* @param metricFilterPattern the metricFilterPattern value to set
* @return the OperationMetaMetricSpecification object itself.
*/
public OperationMetaMetricSpecification withMetricFilterPattern(String metricFilterPattern) {
this.metricFilterPattern = metricFilterPattern;
return this;
}
}
| |
package com.planet_ink.coffee_mud.Common;
import com.planet_ink.coffee_mud.core.interfaces.*;
import com.planet_ink.coffee_mud.core.*;
import com.planet_ink.coffee_mud.core.collections.*;
import com.planet_ink.coffee_mud.Abilities.interfaces.*;
import com.planet_ink.coffee_mud.Areas.interfaces.*;
import com.planet_ink.coffee_mud.Behaviors.interfaces.*;
import com.planet_ink.coffee_mud.CharClasses.interfaces.*;
import com.planet_ink.coffee_mud.Commands.interfaces.*;
import com.planet_ink.coffee_mud.Common.interfaces.*;
import com.planet_ink.coffee_mud.Exits.interfaces.*;
import com.planet_ink.coffee_mud.Items.interfaces.*;
import com.planet_ink.coffee_mud.Libraries.StdLibrary;
import com.planet_ink.coffee_mud.Libraries.interfaces.*;
import com.planet_ink.coffee_mud.Libraries.interfaces.XMLLibrary.XMLTag;
import com.planet_ink.coffee_mud.Locales.interfaces.*;
import com.planet_ink.coffee_mud.MOBS.interfaces.*;
import com.planet_ink.coffee_mud.Races.interfaces.*;
import java.util.*;
/*
Copyright 2005-2022 Bo Zimmerman
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
public class DefaultCoffeeTableRow implements CoffeeTableRow
{
@Override
public String ID()
{
return "DefaultCoffeeTableRow";
}
@Override
public String name()
{
return ID();
}
@Override
public int compareTo(final CMObject o)
{
return CMClass.classID(this).compareToIgnoreCase(CMClass.classID(o));
}
public Map<String, long[]> stats= new SHashtable<String, long[]>();
public long highestOnline = 0;
public long numberOnlineTotal = 0;
public long numberOnlineCounter = 0;
public long startTime = 0;
public long endTime = 0;
@Override
public void setStartTime(final long time)
{
startTime = time;
}
@Override
public void setEndTime(final long time)
{
endTime = time;
}
@Override
public long startTime()
{
return startTime;
}
@Override
public long endTime()
{
return endTime;
}
@Override
public long highestOnline()
{
return highestOnline;
}
@Override
public long numberOnlineTotal()
{
return numberOnlineTotal;
}
@Override
public long numberOnlineCounter()
{
return numberOnlineCounter;
}
@Override
public String data()
{
final StringBuffer data=new StringBuffer("");
final XMLLibrary xml=CMLib.xml();
if(xml != null)
{
data.append(xml.convertXMLtoTag("HIGH",highestOnline));
data.append(xml.convertXMLtoTag("NUMONLINE",numberOnlineTotal));
data.append(xml.convertXMLtoTag("NUMCOUNT",numberOnlineCounter));
data.append("<STATS>");
final Map<String,long[]> stats=this.stats;
if(stats == null)
return "";
for(final Iterator<String> e=stats.keySet().iterator();e.hasNext();)
{
final String s=e.next();
final long[] l=stats.get(s);
data.append(xml.convertXMLtoTag(s,CMParms.toListString(l)));
}
data.append("</STATS>");
}
return data.toString();
}
@Override
public void bumpVal(final String s, final int type)
{
long[] stat=null;
synchronized(stats)
{
if(stats.containsKey(s))
stat=stats.get(s);
else
{
stat=new long[STAT_TOTAL];
stats.put(s,stat);
}
}
stat[type]++;
}
@Override
public void totalUp(String code, final long[] tot)
{
code=tagFix(code);
for(final Iterator<String> e=stats.keySet().iterator();e.hasNext();)
{
final String s=e.next();
if(s.startsWith(code)
||(s.startsWith("C")&&code.startsWith("*")))
{
final long[] theseStats=stats.get(s);
for(int t=0;t<theseStats.length;t++)
tot[t]+=theseStats[t];
}
}
}
@Override
public String tagFix(final String s)
{
return s.trim().replaceAll(" ","_").toUpperCase();
}
@Override
public void bumpVal(final CMObject E, final int type)
{
if((E instanceof MOB)&&(((MOB)E).isMonster()))
return;
if(type==STAT_SPECIAL_NUMONLINE)
{
int ct=0;
for(final Session S : CMLib.sessions().localOnlineIterable())
{
if(S!=null)
ct++;
}
numberOnlineCounter++;
numberOnlineTotal+=ct;
if(ct>highestOnline)
highestOnline=ct;
return;
}
// classes, races, levels, genders, faiths, clanned, grouped
if(E instanceof MOB)
{
final MOB mob=(MOB)E;
final Room R=mob.location();
Area A=(R==null) ? null : R.getArea();
if((A!=null) && (CMath.bset(A.flags(),Area.FLAG_INSTANCE_CHILD)))
A=CMLib.map().getModelArea(A);
if(A!=null)
bumpVal("X"+tagFix(A.Name()),type);
bumpVal("B"+tagFix(mob.baseCharStats().getCurrentClass().baseClass()),type);
bumpVal("C"+tagFix(mob.baseCharStats().getCurrentClass().ID()),type);
bumpVal("R"+tagFix(mob.baseCharStats().getMyRace().ID()),type);
bumpVal("L"+mob.basePhyStats().level(),type);
bumpVal("G"+((char)mob.baseCharStats().getStat(CharStats.STAT_GENDER)),type);
bumpVal("F"+tagFix(mob.charStats().getWorshipCharID()),type);
for(final Pair<Clan,Integer> p : mob.clans())
bumpVal("Q"+tagFix(p.first.clanID()),type);
final Set<MOB> H=mob.getGroupMembers(new HashSet<MOB>());
bumpVal("J"+H.size(),type);
int pct=0;
for (final MOB mob2 : H)
{
if(!mob2.isMonster())
pct++;
}
if(pct==0)
pct=1;
bumpVal("P"+pct,type);
}
else
if(E instanceof Ability)
bumpVal("A"+tagFix(E.ID()),type);
else
if(E instanceof Quest)
bumpVal("U"+tagFix(((Quest)E).name()),type);
else
if(E instanceof Social)
bumpVal("S"+tagFix(((Social)E).baseName()),type);
else
if(E instanceof Command)
bumpVal("M"+tagFix(((Command)E).ID()),type);
}
@Override
public void populate(final long start, final long end, final String data)
{
synchronized(stats)
{
startTime=start;
endTime=end;
final List<XMLLibrary.XMLTag> all=CMLib.xml().parseAllXML(data);
if((all==null)||(all.size()==0))
return;
highestOnline=CMLib.xml().getIntFromPieces(all,"HIGH");
numberOnlineTotal=CMLib.xml().getIntFromPieces(all,"NUMONLINE");
numberOnlineCounter=CMLib.xml().getIntFromPieces(all,"NUMCOUNT");
final XMLTag X=CMLib.xml().getPieceFromPieces(all,"STATS");
if((X==null)||(X.contents()==null)||(X.contents().size()==0)||(!X.tag().equals("STATS")))
return;
stats.clear();
for(int s=0;s<X.contents().size();s++)
{
final XMLTag S=X.contents().get(s);
long[] l=CMParms.toLongArray(CMParms.parseCommas(S.value(),true));
if(l.length<STAT_TOTAL)
{
final long[] l2=new long[STAT_TOTAL];
for(int i=0;i<l.length;i++)
l2[i]=l[i];
l=l2;
}
final long[] l2=stats.get(S.tag());
if(l2!=null)
{
for(int i=0;i<l2.length;i++)
l[i]+=l2[i];
stats.remove(S.tag());
}
stats.put(S.tag(),l);
}
}
}
@Override
public CMObject newInstance()
{
try
{
return getClass().newInstance();
}
catch (final Exception e)
{
return new DefaultCoffeeTableRow();
}
}
@Override
public void initializeClass()
{
}
@Override
public CMObject copyOf()
{
try
{
final DefaultCoffeeTableRow CR=(DefaultCoffeeTableRow)this.clone();
CR.stats=new SHashtable<String,long[]>();
CR.stats.putAll(stats);
return CR;
}
catch (final Exception e)
{
return newInstance();
}
}
}
| |
package com.boxfuse.cloudwatchlogs;
import java.net.Inet4Address;
import java.net.InetAddress;
import java.net.NetworkInterface;
import java.net.SocketException;
import java.net.UnknownHostException;
import java.util.ArrayList;
import java.util.Enumeration;
import java.util.List;
/**
* General configuration for the CloudWatch appender.
*/
public class CloudwatchLogsConfig {
/**
* The default size of the async log event queue.
*/
public static final int DEFAULT_MAX_EVENT_QUEUE_SIZE = 1000000;
/**
* The default maximum delay in milliseconds before forcing a flush of the buffered log events to CloudWatch Logs.
*/
public static final long DEFAULT_MAX_FLUSH_DELAY = 500;
private int maxEventQueueSize = DEFAULT_MAX_EVENT_QUEUE_SIZE;
private long maxFlushDelay = DEFAULT_MAX_FLUSH_DELAY;
private boolean debug;
private String endpoint = System.getenv("BOXFUSE_CLOUDWATCHLOGS_ENDPOINT");
private String env = System.getenv("BOXFUSE_ENV");
private String image = System.getenv("BOXFUSE_IMAGE_COORDINATES");
private String instance = System.getenv("BOXFUSE_INSTANCE_ID");
private String region = System.getenv("AWS_REGION");
private String logGroup;
private boolean stdoutFallback;
private List<String> customMdcKeys = new ArrayList<>();
public CloudwatchLogsConfig() {
if (env == null) {
env = "##unknown##";
}
if (image == null) {
image = "##unknown##:##unknown##";
}
if (instance == null) {
instance = getHostName();
}
logGroup = "boxfuse/" + env;
}
/**
* @return Whether to fall back to stdout instead of disabling the appender when running outside of a Boxfuse instance. Default: false.
*/
public boolean isStdoutFallback() {
return stdoutFallback;
}
/**
* @param stdoutFallback Whether to fall back to stdout instead of disabling the appender when running outside of a Boxfuse instance. Default: false.
*/
public void setStdoutFallback(boolean stdoutFallback) {
this.stdoutFallback = stdoutFallback;
}
/**
* @return The maximum size of the async log event queue. Default: 1000000.
* Increase to avoid dropping log events at very high throughput.
* Decrease to reduce maximum memory usage at the risk if the occasional log event drop when it gets full.
*/
public int getMaxEventQueueSize() {
return maxEventQueueSize;
}
/**
* @param maxEventQueueSize The maximum size of the async log event queue. Default: 1000000.
* Increase to avoid dropping log events at very high throughput.
* Decrease to reduce maximum memory usage at the risk if the occasional log event drop when it gets full.
*/
public void setMaxEventQueueSize(int maxEventQueueSize) {
if (maxEventQueueSize < 1) {
throw new IllegalArgumentException("maxEventQueueSize may not be smaller than 1 but was " + maxEventQueueSize);
}
this.maxEventQueueSize = maxEventQueueSize;
}
/**
* @return The maximum delay in milliseconds before forcing a flush of the buffered log events to CloudWatch Logs.
*/
public long getMaxFlushDelay() {
return maxFlushDelay;
}
/**
* @param maxFlushDelay The default maximum delay in milliseconds before forcing a flush of the buffered log events to CloudWatch Logs.
*/
public void setMaxFlushDelay(long maxFlushDelay) {
if (maxFlushDelay < 1) {
throw new IllegalArgumentException("maxFlushDelay may not be smaller than 1 but was " + maxFlushDelay);
}
this.maxFlushDelay = maxFlushDelay;
}
/**
* @return The AWS CloudWatch Logs endpoint to connect to.
*/
public String getEndpoint() {
return endpoint;
}
/**
* @param endpoint The AWS CloudWatch Logs endpoint to connect to.
*/
public void setEndpoint(String endpoint) {
this.endpoint = endpoint;
}
/**
* @return The current Boxfuse environment.
*/
public String getEnv() {
return env;
}
/**
* @param env The current Boxfuse environment.
*/
public void setEnv(String env) {
if (env == null) {
throw new IllegalArgumentException("env may not be null");
}
this.env = env;
}
/**
* @return The current Boxfuse image.
*/
public String getImage() {
return image;
}
/**
* @param image The current Boxfuse image.
*/
public void setImage(String image) {
if (image == null) {
throw new IllegalArgumentException("image may not be null");
}
this.image = image;
}
/**
* @return The id of the current instance.
*/
public String getInstance() {
return instance;
}
/**
* @param instance The id of the current instance.
*/
public void setInstance(String instance) {
if (instance == null) {
throw new IllegalArgumentException("instance may not be null");
}
this.instance = instance;
}
/**
* @return The AWS region to use.
*/
public String getRegion() {
return region;
}
/**
* @param region The AWS region to use.
*/
public void setRegion(String region) {
if (region == null) {
throw new IllegalArgumentException("region may not be null");
}
this.region = region;
}
/**
* @return Whether internal debug output should produced. Only useful for diagnosing issues within the appender itself.
*/
public boolean isDebug() {
return debug;
}
/**
* @param debug Whether internal debug output should produced. Only useful for diagnosing issues within the appender itself.
*/
public void setDebug(boolean debug) {
this.debug = debug;
}
/**
* @return The AWS CloudWatch Logs LogGroup to use. This is determined automatically within Boxfuse environments.
*/
public String getLogGroup() {
return logGroup;
}
/**
* @param logGroup The AWS CloudWatch Logs LogGroup to use. This is determined automatically within Boxfuse environments.
*/
public void setLogGroup(String logGroup) {
this.logGroup = logGroup;
}
/**
* @return Custom MDC keys to include in the log events along with their values.
*/
public List<String> getCustomMdcKeys() {
return customMdcKeys;
}
/**
* @param customMdcKey Custom MDC key to include in the log events along with its value.
*/
public void addCustomMdcKey(String customMdcKey) {
this.customMdcKeys.add(customMdcKey);
}
private static String getHostName() {
try {
return InetAddress.getLocalHost().getHostName();
} catch (UnknownHostException var1) {
return getHostIp();
}
}
private static String getHostIp() {
try {
Enumeration e = NetworkInterface.getNetworkInterfaces();
String backupCandidate = null;
while (true) {
NetworkInterface networkInterface;
do {
if (!e.hasMoreElements()) {
if (backupCandidate != null) {
return backupCandidate;
}
return "<<unknown>>";
}
networkInterface = (NetworkInterface) e.nextElement();
} while (!networkInterface.isUp());
boolean firstChoice = !networkInterface.getName().contains("vboxnet") && !networkInterface.getName().contains("vmnet") && (networkInterface.getDisplayName() == null || !networkInterface.getDisplayName().contains("VirtualBox") && !networkInterface.getDisplayName().contains("VMware"));
Enumeration inetAddresses = networkInterface.getInetAddresses();
while (inetAddresses.hasMoreElements()) {
InetAddress inetAddress = (InetAddress) inetAddresses.nextElement();
if (inetAddress instanceof Inet4Address && !inetAddress.isLoopbackAddress()) {
if (firstChoice) {
return inetAddress.getHostAddress();
}
backupCandidate = inetAddress.getHostAddress();
}
}
}
} catch (SocketException e) {
return "<<unknown>>";
}
}
}
| |
// Copyright 2000-2020 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package org.jetbrains.idea.maven.indices;
import com.intellij.openapi.Disposable;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.application.ReadAction;
import com.intellij.openapi.progress.BackgroundTaskQueue;
import com.intellij.openapi.progress.ProgressIndicator;
import com.intellij.openapi.progress.Task;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.project.ProjectManager;
import com.intellij.openapi.util.JDOMUtil;
import com.intellij.openapi.util.Pair;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.util.JdomKt;
import com.intellij.util.io.PathKt;
import com.intellij.util.ui.update.MergingUpdateQueue;
import com.intellij.util.ui.update.Update;
import gnu.trove.THashSet;
import org.jdom.Element;
import org.jdom.JDOMException;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import org.jetbrains.annotations.TestOnly;
import org.jetbrains.concurrency.AsyncPromise;
import org.jetbrains.concurrency.Promise;
import org.jetbrains.concurrency.Promises;
import org.jetbrains.idea.maven.model.MavenArchetype;
import org.jetbrains.idea.maven.project.MavenGeneralSettings;
import org.jetbrains.idea.maven.project.MavenProjectsManager;
import org.jetbrains.idea.maven.server.MavenIndexerWrapper;
import org.jetbrains.idea.maven.server.MavenServerDownloadListener;
import org.jetbrains.idea.maven.server.MavenServerManager;
import org.jetbrains.idea.maven.utils.*;
import java.io.File;
import java.io.IOException;
import java.nio.file.Path;
import java.util.*;
import java.util.concurrent.ConcurrentLinkedQueue;
public final class MavenIndicesManager implements Disposable {
private static final String ELEMENT_ARCHETYPES = "archetypes";
private static final String ELEMENT_ARCHETYPE = "archetype";
private static final String ELEMENT_GROUP_ID = "groupId";
private static final String ELEMENT_ARTIFACT_ID = "artifactId";
private static final String ELEMENT_VERSION = "version";
private static final String ELEMENT_REPOSITORY = "repository";
private static final String ELEMENT_DESCRIPTION = "description";
private static final String LOCAL_REPOSITORY_ID = "local";
private final @NotNull Project myProject;
private MavenServerDownloadListener myDownloadListener;
public enum IndexUpdatingState {
IDLE, WAITING, UPDATING
}
private volatile Path myTestIndicesDir;
private volatile MavenIndexerWrapper myIndexer;
private volatile MavenIndices myIndices;
private final Object myUpdatingIndicesLock = new Object();
private final List<MavenSearchIndex> myWaitingIndices = new ArrayList<>();
private volatile MavenSearchIndex myUpdatingIndex;
private final IndexFixer myIndexFixer = new IndexFixer();
private final BackgroundTaskQueue myUpdatingQueue = new BackgroundTaskQueue(null, IndicesBundle.message("maven.indices.updating"));
private volatile List<MavenArchetype> myUserArchetypes = new ArrayList<>();
/**
* @deprecated use {@link MavenIndicesManager#getInstance(Project)}
*/
@Deprecated
public static MavenIndicesManager getInstance() {
// should not be used as it lead to plugin classloader leak on the plugin unload
return ProjectManager.getInstance().getDefaultProject().getService(MavenIndicesManager.class);
}
public static MavenIndicesManager getInstance(@NotNull Project project) {
return project.getService(MavenIndicesManager.class);
}
public MavenIndicesManager(@NotNull Project project) {
myProject = project;
}
@TestOnly
public void setTestIndexDir(Path indicesDir) {
myTestIndicesDir = indicesDir;
}
public void clear() {
myUpdatingQueue.clear();
}
private MavenIndices getIndicesObject() {
ensureInitialized();
return myIndices;
}
private void ensureInitialized() {
if (doInitIndicesFields()) return;
ArrayList<MavenArchetype> archetypes = loadUserArchetypes(getUserArchetypesFile());
if (archetypes != null) {
myUserArchetypes = archetypes;
}
}
private synchronized boolean doInitIndicesFields() {
if (myIndices != null) return true;
myIndexer = MavenServerManager.getInstance().createIndexer(myProject);
myDownloadListener = new MavenServerDownloadListener() {
@Override
public void artifactDownloaded(File file, String relativePath) {
addArtifact(file, relativePath);
}
};
MavenServerManager.getInstance().addDownloadListener(myDownloadListener);
myIndices = new MavenIndices(myIndexer, getIndicesDir().toFile(), new MavenSearchIndex.IndexListener() {
@Override
public void indexIsBroken(@NotNull MavenSearchIndex index) {
if (index instanceof MavenIndex) {
scheduleUpdate(null, Collections.singletonList((MavenIndex)index), false);
}
}
});
return false;
}
@NotNull
private Path getIndicesDir() {
return myTestIndicesDir == null
? MavenUtil.getPluginSystemDir("Indices")
: myTestIndicesDir;
}
@Override
public void dispose() {
doShutdown();
if (ApplicationManager.getApplication().isUnitTestMode()) {
PathKt.delete(getIndicesDir());
}
}
private synchronized void doShutdown() {
if (myDownloadListener != null) {
MavenServerManager mavenServerManager = MavenServerManager.getInstanceIfCreated();
if (mavenServerManager != null) {
mavenServerManager.removeDownloadListener(myDownloadListener);
}
myDownloadListener = null;
}
if (myIndices != null) {
try {
myIndices.close();
}
catch (Exception e) {
MavenLog.LOG.error("", e);
}
myIndices = null;
}
clear();
myIndexer = null;
}
@TestOnly
public void doShutdownInTests() {
doShutdown();
}
public List<MavenIndex> getIndices() {
return getIndicesObject().getIndices();
}
public MavenIndex ensureRemoteIndexExist(@NotNull Pair<String, String> remoteIndexIdAndUrl) {
try {
MavenIndices indicesObjectCache = ReadAction.compute(() -> {
if (myProject.isDisposed()) {
return null;
}
else {
return getIndicesObject();
}
});
if (indicesObjectCache == null) return null;
return indicesObjectCache.add(remoteIndexIdAndUrl.first, remoteIndexIdAndUrl.second, MavenSearchIndex.Kind.REMOTE);
}
catch (MavenIndexException e) {
MavenLog.LOG.warn(e);
return null;
}
}
@Nullable
public synchronized MavenIndex createIndexForLocalRepo(Project project, @Nullable File localRepository) {
if (localRepository == null) {
return null;
}
MavenIndices indicesObjectCache = getIndicesObject();
try {
MavenIndex localIndex = indicesObjectCache.add(LOCAL_REPOSITORY_ID, localRepository.getPath(), MavenIndex.Kind.LOCAL);
if (localIndex.getUpdateTimestamp() == -1) {
scheduleUpdate(project, Collections.singletonList(localIndex));
}
return localIndex;
}
catch (MavenIndexException e) {
MavenLog.LOG.warn("Cannot create index:" + e.getMessage());
return null;
}
}
public List<MavenIndex> ensureIndicesExist(Collection<Pair<String, String>> remoteRepositoriesIdsAndUrls) {
// MavenIndices.add method returns an existing index if it has already been added, thus we have to use set here.
LinkedHashSet<MavenIndex> result = new LinkedHashSet<>();
for (Pair<String, String> eachIdAndUrl : remoteRepositoriesIdsAndUrls) {
MavenIndex index = ensureRemoteIndexExist(eachIdAndUrl);
if (index != null) {
result.add(index);
}
}
return new ArrayList<>(result);
}
private void addArtifact(File artifactFile, String relativePath) {
String repositoryPath = getRepositoryUrl(artifactFile, relativePath);
MavenIndex index = getIndicesObject().find(repositoryPath, MavenSearchIndex.Kind.LOCAL);
if (index != null) {
index.addArtifact(artifactFile);
}
}
public void fixArtifactIndex(File artifactFile, File localRepository) {
MavenIndex index = getIndicesObject().find(localRepository.getPath(), MavenSearchIndex.Kind.LOCAL);
if (index != null) {
myIndexFixer.fixIndex(artifactFile, index);
}
}
private static String getRepositoryUrl(File artifactFile, String name) {
List<String> parts = getArtifactParts(name);
File result = artifactFile;
for (int i = 0; i < parts.size(); i++) {
result = result.getParentFile();
}
return result.getPath();
}
private static List<String> getArtifactParts(String name) {
return StringUtil.split(name, "/");
}
public Promise<Void> scheduleUpdate(@Nullable Project project, List<MavenIndex> indices) {
return scheduleUpdate(project, indices, true);
}
private Promise<Void> scheduleUpdate(@Nullable Project project, List<MavenIndex> indices, final boolean fullUpdate) {
final List<MavenSearchIndex> toSchedule = new ArrayList<>();
synchronized (myUpdatingIndicesLock) {
for (MavenSearchIndex each : indices) {
if (myWaitingIndices.contains(each)) continue;
toSchedule.add(each);
}
myWaitingIndices.addAll(toSchedule);
}
if (toSchedule.isEmpty()) {
return Promises.resolvedPromise();
}
final AsyncPromise<Void> promise = new AsyncPromise<>();
myUpdatingQueue.run(new Task.Backgroundable(project, IndicesBundle.message("maven.indices.updating"), true) {
@Override
public void run(@NotNull ProgressIndicator indicator) {
try {
indicator.setIndeterminate(false);
doUpdateIndices(project, toSchedule, fullUpdate, new MavenProgressIndicator(project, indicator, null));
}
catch (MavenProcessCanceledException ignore) {
}
finally {
promise.setResult(null);
}
}
});
return promise;
}
private void doUpdateIndices(final Project projectOrNull,
List<MavenSearchIndex> indices,
boolean fullUpdate,
MavenProgressIndicator indicator)
throws MavenProcessCanceledException {
MavenLog.LOG.assertTrue(!fullUpdate || projectOrNull != null);
List<MavenSearchIndex> remainingWaiting = new ArrayList<>(indices);
try {
for (MavenSearchIndex each : indices) {
if (indicator.isCanceled()) return;
indicator.setText(IndicesBundle.message("maven.indices.updating.index",
each.getRepositoryId(),
each.getRepositoryPathOrUrl()));
synchronized (myUpdatingIndicesLock) {
remainingWaiting.remove(each);
myWaitingIndices.remove(each);
myUpdatingIndex = each;
}
try {
MavenIndices.updateOrRepair(each, fullUpdate, fullUpdate ? getMavenSettings(projectOrNull, indicator) : null, indicator);
if (projectOrNull != null) {
MavenRehighlighter.rehighlight(projectOrNull);
}
}
finally {
synchronized (myUpdatingIndicesLock) {
myUpdatingIndex = null;
}
}
}
}
finally {
synchronized (myUpdatingIndicesLock) {
myWaitingIndices.removeAll(remainingWaiting);
}
}
}
private static MavenGeneralSettings getMavenSettings(@NotNull final Project project, @NotNull MavenProgressIndicator indicator)
throws MavenProcessCanceledException {
MavenGeneralSettings settings;
settings = ReadAction
.compute(() -> project.isDisposed() ? null : MavenProjectsManager.getInstance(project).getGeneralSettings().clone());
if (settings == null) {
// project was closed
indicator.cancel();
indicator.checkCanceled();
}
return settings;
}
public IndexUpdatingState getUpdatingState(MavenSearchIndex index) {
synchronized (myUpdatingIndicesLock) {
if (myUpdatingIndex == index) return IndexUpdatingState.UPDATING;
if (myWaitingIndices.contains(index)) return IndexUpdatingState.WAITING;
return IndexUpdatingState.IDLE;
}
}
public Set<MavenArchetype> getArchetypes() {
ensureInitialized();
Set<MavenArchetype> result = new THashSet<>(myIndexer.getArchetypes());
result.addAll(myUserArchetypes);
for (MavenIndex index : myIndices.getIndices()) {
result.addAll(index.getArchetypes());
}
for (MavenArchetypesProvider each : MavenArchetypesProvider.EP_NAME.getExtensionList()) {
result.addAll(each.getArchetypes());
}
return result;
}
public void addArchetype(MavenArchetype archetype) {
ensureInitialized();
List<MavenArchetype> archetypes = myUserArchetypes;
int idx = archetypes.indexOf(archetype);
if (idx >= 0) {
archetypes.set(idx, archetype);
}
else {
archetypes.add(archetype);
}
saveUserArchetypes();
}
private static ArrayList<MavenArchetype> loadUserArchetypes(Path file) {
try {
if (!PathKt.exists(file)) {
return null;
}
// Store artifact to set to remove duplicate created by old IDEA (https://youtrack.jetbrains.com/issue/IDEA-72105)
Collection<MavenArchetype> result = new LinkedHashSet<>();
List<Element> children = JDOMUtil.load(file).getChildren(ELEMENT_ARCHETYPE);
for (int i = children.size() - 1; i >= 0; i--) {
Element each = children.get(i);
String groupId = each.getAttributeValue(ELEMENT_GROUP_ID);
String artifactId = each.getAttributeValue(ELEMENT_ARTIFACT_ID);
String version = each.getAttributeValue(ELEMENT_VERSION);
String repository = each.getAttributeValue(ELEMENT_REPOSITORY);
String description = each.getAttributeValue(ELEMENT_DESCRIPTION);
if (StringUtil.isEmptyOrSpaces(groupId)
|| StringUtil.isEmptyOrSpaces(artifactId)
|| StringUtil.isEmptyOrSpaces(version)) {
continue;
}
result.add(new MavenArchetype(groupId, artifactId, version, repository, description));
}
ArrayList<MavenArchetype> listResult = new ArrayList<>(result);
Collections.reverse(listResult);
return listResult;
}
catch (IOException | JDOMException e) {
MavenLog.LOG.warn(e);
return null;
}
}
private void saveUserArchetypes() {
Element root = new Element(ELEMENT_ARCHETYPES);
for (MavenArchetype each : myUserArchetypes) {
Element childElement = new Element(ELEMENT_ARCHETYPE);
childElement.setAttribute(ELEMENT_GROUP_ID, each.groupId);
childElement.setAttribute(ELEMENT_ARTIFACT_ID, each.artifactId);
childElement.setAttribute(ELEMENT_VERSION, each.version);
if (each.repository != null) {
childElement.setAttribute(ELEMENT_REPOSITORY, each.repository);
}
if (each.description != null) {
childElement.setAttribute(ELEMENT_DESCRIPTION, each.description);
}
root.addContent(childElement);
}
try {
JdomKt.write(root, getUserArchetypesFile());
}
catch (IOException e) {
MavenLog.LOG.warn(e);
}
}
@NotNull
private Path getUserArchetypesFile() {
return getIndicesDir().resolve("UserArchetypes.xml");
}
private final class IndexFixer {
private final Set<String> indexedCache = Collections.synchronizedSet(Collections.newSetFromMap(new WeakHashMap<>()));
private final ConcurrentLinkedQueue<Pair<File, MavenIndex>> queueToAdd = new ConcurrentLinkedQueue<>();
private final MergingUpdateQueue myMergingUpdateQueue;
private IndexFixer() {
myMergingUpdateQueue =
new MergingUpdateQueue(this.getClass().getName(), 1000, true, MergingUpdateQueue.ANY_COMPONENT, MavenIndicesManager.this, null,
false).usePassThroughInUnitTestMode();
}
public void fixIndex(File file, MavenIndex index) {
if (indexedCache.contains(file.getName())) {
return;
}
queueToAdd.add(new Pair.NonNull<>(file, index));
myMergingUpdateQueue.queue(Update.create(this, new AddToIndexRunnable()));
}
private class AddToIndexRunnable implements Runnable {
@Override
public void run() {
Pair<File, MavenIndex> elementToAdd;
while ((elementToAdd = queueToAdd.poll()) != null) {
elementToAdd.second.addArtifact(elementToAdd.first);
indexedCache.add(elementToAdd.first.getName());
}
}
}
}
}
| |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.indices;
import org.apache.logging.log4j.message.ParameterizedMessage;
import org.apache.lucene.store.AlreadyClosedException;
import org.elasticsearch.common.component.AbstractComponent;
import org.elasticsearch.common.settings.Setting;
import org.elasticsearch.common.settings.Setting.Property;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.ByteSizeUnit;
import org.elasticsearch.common.unit.ByteSizeValue;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.util.concurrent.AbstractRunnable;
import org.elasticsearch.index.engine.Engine;
import org.elasticsearch.index.shard.IndexShard;
import org.elasticsearch.index.shard.IndexShardState;
import org.elasticsearch.index.shard.IndexingOperationListener;
import org.elasticsearch.index.shard.ShardId;
import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.threadpool.Scheduler.Cancellable;
import org.elasticsearch.threadpool.ThreadPool.Names;
import java.io.Closeable;
import java.util.ArrayList;
import java.util.EnumSet;
import java.util.HashSet;
import java.util.List;
import java.util.PriorityQueue;
import java.util.Set;
import java.util.concurrent.atomic.AtomicLong;
import java.util.concurrent.locks.ReentrantLock;
public class IndexingMemoryController extends AbstractComponent implements IndexingOperationListener, Closeable {
/** How much heap (% or bytes) we will share across all actively indexing shards on this node (default: 10%). */
public static final Setting<ByteSizeValue> INDEX_BUFFER_SIZE_SETTING =
Setting.memorySizeSetting("indices.memory.index_buffer_size", "10%", Property.NodeScope);
/** Only applies when <code>indices.memory.index_buffer_size</code> is a %, to set a floor on the actual size in bytes (default: 48 MB). */
public static final Setting<ByteSizeValue> MIN_INDEX_BUFFER_SIZE_SETTING = Setting.byteSizeSetting("indices.memory.min_index_buffer_size",
new ByteSizeValue(48, ByteSizeUnit.MB),
new ByteSizeValue(0, ByteSizeUnit.BYTES),
new ByteSizeValue(Long.MAX_VALUE, ByteSizeUnit.BYTES),
Property.NodeScope);
/** Only applies when <code>indices.memory.index_buffer_size</code> is a %, to set a ceiling on the actual size in bytes (default: not set). */
public static final Setting<ByteSizeValue> MAX_INDEX_BUFFER_SIZE_SETTING = Setting.byteSizeSetting("indices.memory.max_index_buffer_size",
new ByteSizeValue(-1),
new ByteSizeValue(-1),
new ByteSizeValue(Long.MAX_VALUE, ByteSizeUnit.BYTES),
Property.NodeScope);
/** If we see no indexing operations after this much time for a given shard, we consider that shard inactive (default: 5 minutes). */
public static final Setting<TimeValue> SHARD_INACTIVE_TIME_SETTING = Setting.positiveTimeSetting("indices.memory.shard_inactive_time", TimeValue.timeValueMinutes(5), Property.NodeScope);
/** How frequently we check indexing memory usage (default: 5 seconds). */
public static final Setting<TimeValue> SHARD_MEMORY_INTERVAL_TIME_SETTING = Setting.positiveTimeSetting("indices.memory.interval", TimeValue.timeValueSeconds(5), Property.NodeScope);
private final ThreadPool threadPool;
private final Iterable<IndexShard> indexShards;
private final ByteSizeValue indexingBuffer;
private final TimeValue inactiveTime;
private final TimeValue interval;
/** Contains shards currently being throttled because we can't write segments quickly enough */
private final Set<IndexShard> throttled = new HashSet<>();
private final Cancellable scheduler;
private static final EnumSet<IndexShardState> CAN_WRITE_INDEX_BUFFER_STATES = EnumSet.of(
IndexShardState.RECOVERING, IndexShardState.POST_RECOVERY, IndexShardState.STARTED, IndexShardState.RELOCATED);
private final ShardsIndicesStatusChecker statusChecker;
IndexingMemoryController(Settings settings, ThreadPool threadPool, Iterable<IndexShard> indexServices) {
super(settings);
this.indexShards = indexServices;
ByteSizeValue indexingBuffer = INDEX_BUFFER_SIZE_SETTING.get(settings);
String indexingBufferSetting = settings.get(INDEX_BUFFER_SIZE_SETTING.getKey());
// null means we used the default (10%)
if (indexingBufferSetting == null || indexingBufferSetting.endsWith("%")) {
// We only apply the min/max when % value was used for the index buffer:
ByteSizeValue minIndexingBuffer = MIN_INDEX_BUFFER_SIZE_SETTING.get(this.settings);
ByteSizeValue maxIndexingBuffer = MAX_INDEX_BUFFER_SIZE_SETTING.get(this.settings);
if (indexingBuffer.getBytes() < minIndexingBuffer.getBytes()) {
indexingBuffer = minIndexingBuffer;
}
if (maxIndexingBuffer.getBytes() != -1 && indexingBuffer.getBytes() > maxIndexingBuffer.getBytes()) {
indexingBuffer = maxIndexingBuffer;
}
}
this.indexingBuffer = indexingBuffer;
this.inactiveTime = SHARD_INACTIVE_TIME_SETTING.get(this.settings);
// we need to have this relatively small to free up heap quickly enough
this.interval = SHARD_MEMORY_INTERVAL_TIME_SETTING.get(this.settings);
this.statusChecker = new ShardsIndicesStatusChecker();
logger.debug("using indexing buffer size [{}] with {} [{}], {} [{}]",
this.indexingBuffer,
SHARD_INACTIVE_TIME_SETTING.getKey(), this.inactiveTime,
SHARD_MEMORY_INTERVAL_TIME_SETTING.getKey(), this.interval);
this.scheduler = scheduleTask(threadPool);
// Need to save this so we can later launch async "write indexing buffer to disk" on shards:
this.threadPool = threadPool;
}
protected Cancellable scheduleTask(ThreadPool threadPool) {
// it's fine to run it on the scheduler thread, no busy work
return threadPool.scheduleWithFixedDelay(statusChecker, interval, Names.SAME);
}
@Override
public void close() {
scheduler.cancel();
}
/**
* returns the current budget for the total amount of indexing buffers of
* active shards on this node
*/
ByteSizeValue indexingBufferSize() {
return indexingBuffer;
}
protected List<IndexShard> availableShards() {
List<IndexShard> availableShards = new ArrayList<>();
for (IndexShard shard : indexShards) {
if (CAN_WRITE_INDEX_BUFFER_STATES.contains(shard.state())) {
availableShards.add(shard);
}
}
return availableShards;
}
/** returns how much heap this shard is using for its indexing buffer */
protected long getIndexBufferRAMBytesUsed(IndexShard shard) {
return shard.getIndexBufferRAMBytesUsed();
}
/** returns how many bytes this shard is currently writing to disk */
protected long getShardWritingBytes(IndexShard shard) {
return shard.getWritingBytes();
}
/** ask this shard to refresh, in the background, to free up heap */
protected void writeIndexingBufferAsync(IndexShard shard) {
threadPool.executor(ThreadPool.Names.REFRESH).execute(new AbstractRunnable() {
@Override
public void doRun() {
shard.writeIndexingBuffer();
}
@Override
public void onFailure(Exception e) {
logger.warn(() -> new ParameterizedMessage("failed to write indexing buffer for shard [{}]; ignoring", shard.shardId()), e);
}
});
}
/** force checker to run now */
void forceCheck() {
statusChecker.run();
}
/** Asks this shard to throttle indexing to one thread */
protected void activateThrottling(IndexShard shard) {
shard.activateThrottling();
}
/** Asks this shard to stop throttling indexing to one thread */
protected void deactivateThrottling(IndexShard shard) {
shard.deactivateThrottling();
}
@Override
public void postIndex(ShardId shardId, Engine.Index index, Engine.IndexResult result) {
recordOperationBytes(index, result);
}
@Override
public void postDelete(ShardId shardId, Engine.Delete delete, Engine.DeleteResult result) {
recordOperationBytes(delete, result);
}
/** called by IndexShard to record estimated bytes written to translog for the operation */
private void recordOperationBytes(Engine.Operation operation, Engine.Result result) {
if (result.hasFailure() == false) {
statusChecker.bytesWritten(operation.estimatedSizeInBytes());
}
}
private static final class ShardAndBytesUsed implements Comparable<ShardAndBytesUsed> {
final long bytesUsed;
final IndexShard shard;
ShardAndBytesUsed(long bytesUsed, IndexShard shard) {
this.bytesUsed = bytesUsed;
this.shard = shard;
}
@Override
public int compareTo(ShardAndBytesUsed other) {
// Sort larger shards first:
return Long.compare(other.bytesUsed, bytesUsed);
}
}
/** not static because we need access to many fields/methods from our containing class (IMC): */
final class ShardsIndicesStatusChecker implements Runnable {
final AtomicLong bytesWrittenSinceCheck = new AtomicLong();
final ReentrantLock runLock = new ReentrantLock();
/** Shard calls this on each indexing/delete op */
public void bytesWritten(int bytes) {
long totalBytes = bytesWrittenSinceCheck.addAndGet(bytes);
assert totalBytes >= 0;
while (totalBytes > indexingBuffer.getBytes()/30) {
if (runLock.tryLock()) {
try {
// Must pull this again because it may have changed since we first checked:
totalBytes = bytesWrittenSinceCheck.get();
if (totalBytes > indexingBuffer.getBytes()/30) {
bytesWrittenSinceCheck.addAndGet(-totalBytes);
// NOTE: this is only an approximate check, because bytes written is to the translog, vs indexing memory buffer which is
// typically smaller but can be larger in extreme cases (many unique terms). This logic is here only as a safety against
// thread starvation or too infrequent checking, to ensure we are still checking periodically, in proportion to bytes
// processed by indexing:
runUnlocked();
}
} finally {
runLock.unlock();
}
// Must get it again since other threads could have increased it while we were in runUnlocked
totalBytes = bytesWrittenSinceCheck.get();
} else {
// Another thread beat us to it: let them do all the work, yay!
break;
}
}
}
@Override
public void run() {
runLock.lock();
try {
runUnlocked();
} finally {
runLock.unlock();
}
}
private void runUnlocked() {
// NOTE: even if we hit an errant exc here, our ThreadPool.scheduledWithFixedDelay will log the exception and re-invoke us
// again, on schedule
// First pass to sum up how much heap all shards' indexing buffers are using now, and how many bytes they are currently moving
// to disk:
long totalBytesUsed = 0;
long totalBytesWriting = 0;
for (IndexShard shard : availableShards()) {
// Give shard a chance to transition to inactive so sync'd flush can happen:
checkIdle(shard, inactiveTime.nanos());
// How many bytes this shard is currently (async'd) moving from heap to disk:
long shardWritingBytes = getShardWritingBytes(shard);
// How many heap bytes this shard is currently using
long shardBytesUsed = getIndexBufferRAMBytesUsed(shard);
shardBytesUsed -= shardWritingBytes;
totalBytesWriting += shardWritingBytes;
// If the refresh completed just after we pulled shardWritingBytes and before we pulled shardBytesUsed, then we could
// have a negative value here. So we just skip this shard since that means it's now using very little heap:
if (shardBytesUsed < 0) {
continue;
}
totalBytesUsed += shardBytesUsed;
}
if (logger.isTraceEnabled()) {
logger.trace("total indexing heap bytes used [{}] vs {} [{}], currently writing bytes [{}]",
new ByteSizeValue(totalBytesUsed), INDEX_BUFFER_SIZE_SETTING.getKey(), indexingBuffer, new ByteSizeValue(totalBytesWriting));
}
// If we are using more than 50% of our budget across both indexing buffer and bytes we are still moving to disk, then we now
// throttle the top shards to send back-pressure to ongoing indexing:
boolean doThrottle = (totalBytesWriting + totalBytesUsed) > 1.5 * indexingBuffer.getBytes();
if (totalBytesUsed > indexingBuffer.getBytes()) {
// OK we are now over-budget; fill the priority queue and ask largest shard(s) to refresh:
PriorityQueue<ShardAndBytesUsed> queue = new PriorityQueue<>();
for (IndexShard shard : availableShards()) {
// How many bytes this shard is currently (async'd) moving from heap to disk:
long shardWritingBytes = getShardWritingBytes(shard);
// How many heap bytes this shard is currently using
long shardBytesUsed = getIndexBufferRAMBytesUsed(shard);
// Only count up bytes not already being refreshed:
shardBytesUsed -= shardWritingBytes;
// If the refresh completed just after we pulled shardWritingBytes and before we pulled shardBytesUsed, then we could
// have a negative value here. So we just skip this shard since that means it's now using very little heap:
if (shardBytesUsed < 0) {
continue;
}
if (shardBytesUsed > 0) {
if (logger.isTraceEnabled()) {
if (shardWritingBytes != 0) {
logger.trace("shard [{}] is using [{}] heap, writing [{}] heap", shard.shardId(), shardBytesUsed, shardWritingBytes);
} else {
logger.trace("shard [{}] is using [{}] heap, not writing any bytes", shard.shardId(), shardBytesUsed);
}
}
queue.add(new ShardAndBytesUsed(shardBytesUsed, shard));
}
}
logger.debug("now write some indexing buffers: total indexing heap bytes used [{}] vs {} [{}], currently writing bytes [{}], [{}] shards with non-zero indexing buffer",
new ByteSizeValue(totalBytesUsed), INDEX_BUFFER_SIZE_SETTING.getKey(), indexingBuffer, new ByteSizeValue(totalBytesWriting), queue.size());
while (totalBytesUsed > indexingBuffer.getBytes() && queue.isEmpty() == false) {
ShardAndBytesUsed largest = queue.poll();
logger.debug("write indexing buffer to disk for shard [{}] to free up its [{}] indexing buffer", largest.shard.shardId(), new ByteSizeValue(largest.bytesUsed));
writeIndexingBufferAsync(largest.shard);
totalBytesUsed -= largest.bytesUsed;
if (doThrottle && throttled.contains(largest.shard) == false) {
logger.info("now throttling indexing for shard [{}]: segment writing can't keep up", largest.shard.shardId());
throttled.add(largest.shard);
activateThrottling(largest.shard);
}
}
}
if (doThrottle == false) {
for(IndexShard shard : throttled) {
logger.info("stop throttling indexing for shard [{}]", shard.shardId());
deactivateThrottling(shard);
}
throttled.clear();
}
}
}
/**
* ask this shard to check now whether it is inactive, and reduces its indexing buffer if so.
*/
protected void checkIdle(IndexShard shard, long inactiveTimeNS) {
try {
shard.checkIdle(inactiveTimeNS);
} catch (AlreadyClosedException e) {
logger.trace(() -> new ParameterizedMessage("ignore exception while checking if shard {} is inactive", shard.shardId()), e);
}
}
}
| |
package org.motechproject.mds.service.impl;
import org.apache.commons.collections.MapUtils;
import org.apache.commons.lang.StringUtils;
import org.motechproject.mds.builder.MDSConstructor;
import org.motechproject.mds.domain.ComboboxHolder;
import org.motechproject.mds.domain.Entity;
import org.motechproject.mds.domain.EntityDraft;
import org.motechproject.mds.domain.Field;
import org.motechproject.mds.domain.FieldMetadata;
import org.motechproject.mds.domain.FieldSetting;
import org.motechproject.mds.domain.FieldValidation;
import org.motechproject.mds.domain.Lookup;
import org.motechproject.mds.domain.MdsEntity;
import org.motechproject.mds.domain.Type;
import org.motechproject.mds.domain.TypeSetting;
import org.motechproject.mds.domain.TypeValidation;
import org.motechproject.mds.dto.AdvancedSettingsDto;
import org.motechproject.mds.dto.DraftData;
import org.motechproject.mds.dto.DraftResult;
import org.motechproject.mds.dto.EntityDto;
import org.motechproject.mds.dto.FieldBasicDto;
import org.motechproject.mds.dto.FieldDto;
import org.motechproject.mds.dto.FieldValidationDto;
import org.motechproject.mds.dto.LookupDto;
import org.motechproject.mds.dto.LookupFieldDto;
import org.motechproject.mds.dto.MetadataDto;
import org.motechproject.mds.dto.RestOptionsDto;
import org.motechproject.mds.dto.SettingDto;
import org.motechproject.mds.dto.TrackingDto;
import org.motechproject.mds.dto.TypeDto;
import org.motechproject.mds.dto.ValidationCriterionDto;
import org.motechproject.mds.ex.entity.EntityAlreadyExistException;
import org.motechproject.mds.ex.entity.EntityChangedException;
import org.motechproject.mds.ex.entity.EntityNotFoundException;
import org.motechproject.mds.ex.entity.EntityReadOnlyException;
import org.motechproject.mds.ex.field.FieldNotFoundException;
import org.motechproject.mds.ex.lookup.LookupNotFoundException;
import org.motechproject.mds.ex.type.NoSuchTypeException;
import org.motechproject.mds.filter.Filter;
import org.motechproject.mds.filter.FilterValue;
import org.motechproject.mds.filter.Filters;
import org.motechproject.mds.helper.ComboboxDataMigrationHelper;
import org.motechproject.mds.helper.EntityHelper;
import org.motechproject.mds.helper.FieldHelper;
import org.motechproject.mds.javassist.MotechClassPool;
import org.motechproject.mds.repository.AllEntities;
import org.motechproject.mds.repository.AllEntityAudits;
import org.motechproject.mds.repository.AllEntityDrafts;
import org.motechproject.mds.repository.AllTypes;
import org.motechproject.mds.service.EntityService;
import org.motechproject.mds.service.MotechDataService;
import org.motechproject.mds.util.ClassName;
import org.motechproject.mds.util.Constants;
import org.motechproject.mds.util.LookupName;
import org.motechproject.mds.util.SecurityMode;
import org.motechproject.mds.util.TypeHelper;
import org.motechproject.mds.validation.EntityValidator;
import org.motechproject.osgi.web.util.OSGiServiceUtils;
import org.osgi.framework.BundleContext;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.security.access.AccessDeniedException;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
import static org.motechproject.mds.repository.query.DataSourceReferenceQueryExecutionHelper.DATA_SOURCE_CLASS_NAME;
import static org.motechproject.mds.repository.query.DataSourceReferenceQueryExecutionHelper.createLookupReferenceQuery;
import static org.motechproject.mds.util.Constants.MetadataKeys.RELATED_CLASS;
import static org.motechproject.mds.util.Constants.MetadataKeys.RELATED_FIELD;
import static org.motechproject.mds.util.Constants.MetadataKeys.RELATIONSHIP_COLLECTION_TYPE;
import static org.motechproject.mds.util.Constants.Util.AUTO_GENERATED;
import static org.motechproject.mds.util.Constants.Util.AUTO_GENERATED_EDITABLE;
import static org.motechproject.mds.util.Constants.Util.TRUE;
import static org.motechproject.mds.util.SecurityUtil.getUserPermissions;
import static org.motechproject.mds.util.SecurityUtil.getUsername;
/**
* Default implementation of {@link org.motechproject.mds.service.EntityService} interface.
*/
@Service
public class EntityServiceImpl implements EntityService {
private static final Logger LOGGER = LoggerFactory.getLogger(EntityServiceImpl.class);
private static final String NAME_PATH = "basic.name";
private AllEntities allEntities;
private AllTypes allTypes;
private AllEntityDrafts allEntityDrafts;
private AllEntityAudits allEntityAudits;
private MDSConstructor mdsConstructor;
private BundleContext bundleContext;
private EntityValidator entityValidator;
private ComboboxDataMigrationHelper comboboxDataMigrationHelper;
@Override
@Transactional
public Long getCurrentSchemaVersion(String className) {
Entity entity = allEntities.retrieveByClassName(className);
assertEntityExists(entity, className);
return entity.getEntityVersion();
}
@Override
@Transactional
public void updateComboboxValues(Long entityId, Map<String, Collection> fieldValuesToUpdate) {
Entity entity = allEntities.retrieveById(entityId);
assertEntityExists(entity, entityId);
boolean doEntityUpdate = false;
for (Map.Entry<String, Collection> fieldUpdate : fieldValuesToUpdate.entrySet()) {
Field field = entity.getField(fieldUpdate.getKey());
if (field == null) {
throw new FieldNotFoundException(entity.getClassName(), fieldUpdate.getKey());
}
ComboboxHolder cbHolder = new ComboboxHolder(field);
List<String> cbValues = new ArrayList<>(Arrays.asList(cbHolder.getValues()));
boolean updateField = false;
for (Object peristedVal : fieldUpdate.getValue()) {
String peristedValAsStr = peristedVal.toString();
peristedValAsStr = peristedValAsStr.trim().replaceAll(" ", "%20");
if (!cbValues.contains(peristedValAsStr)) {
cbValues.add(peristedValAsStr);
updateField = true;
}
}
if (updateField) {
FieldSetting cbValuesSetting = field.getSettingByName(Constants.Settings.COMBOBOX_VALUES);
if (cbValuesSetting == null) {
throw new IllegalArgumentException("Field " + field.getName() + " is not a comboBox");
}
cbValuesSetting.setValue(TypeHelper.buildStringFromList(cbValues));
doEntityUpdate = true;
}
}
if (doEntityUpdate) {
allEntities.updateAndIncrementVersion(entity);
}
}
@Override
@Transactional
public EntityDto createEntity(EntityDto entityDto) {
String packageName = ClassName.getPackage(entityDto.getClassName());
boolean fromUI = StringUtils.isEmpty(packageName);
String username = getUsername();
if (fromUI) {
String className;
if (entityDto.getName().contains(" ")) {
entityDto.setName(entityDto.getName().trim());
StringBuilder stringBuilder = new StringBuilder();
String[] nameParts = entityDto.getName().split(" ");
for (String part : nameParts) {
if (part.length() > 0) {
stringBuilder.append(Character.toUpperCase(part.charAt(0)));
if (part.length() > 1) {
stringBuilder.append(part.substring(1));
}
}
}
className = String.format("%s.%s", Constants.PackagesGenerated.ENTITY, stringBuilder.toString());
} else {
// in this situation entity.getName() returns a simple name of class
className = String.format("%s.%s", Constants.PackagesGenerated.ENTITY, entityDto.getName());
}
entityDto.setClassName(className);
}
if (allEntities.contains(entityDto.getClassName())) {
throw new EntityAlreadyExistException(entityDto.getName());
}
Entity entity = allEntities.create(entityDto);
LOGGER.debug("Adding default fields to the entity which do not extend MdsEntity");
if (!MdsEntity.class.getName().equalsIgnoreCase(entityDto.getSuperClass())) {
EntityHelper.addDefaultFields(entity, allTypes);
}
if (username != null) {
allEntityAudits.createAudit(entity, username);
}
return entity.toDto();
}
@Override
@Transactional
public DraftResult saveDraftEntityChanges(Long entityId, DraftData draftData, String username) {
EntityDraft draft = getEntityDraft(entityId, username);
if (draftData.isCreate()) {
createFieldForDraft(draft, draftData);
} else if (draftData.isEdit()) {
draftEdit(draft, draftData);
} else if (draftData.isRemove()) {
draftRemove(draft, draftData);
}
return new DraftResult(draft.isChangesMade(), draft.isOutdated());
}
@Override
@Transactional
public DraftResult saveDraftEntityChanges(Long entityId, DraftData draftData) {
return saveDraftEntityChanges(entityId, draftData, getUsername());
}
private void draftEdit(EntityDraft draft, DraftData draftData) {
if (draftData.isForAdvanced()) {
editAdvancedForDraft(draft, draftData);
} else if (draftData.isForField()) {
editFieldForDraft(draft, draftData);
} else if (draftData.isForSecurity()) {
editSecurityForDraft(draft, draftData);
}
}
private void editSecurityForDraft(EntityDraft draft, DraftData draftData) {
List value = (List) draftData.getValue(DraftData.VALUE);
if (value != null) {
String securityModeName = (String) value.get(0);
SecurityMode securityMode = SecurityMode.getEnumByName(securityModeName);
if (value.size() > 1) {
List<String> list = (List<String>) value.get(1);
draft.setSecurity(securityMode, list);
} else {
draft.setSecurityMode(securityMode);
}
allEntityDrafts.update(draft);
}
}
private void editFieldForDraft(EntityDraft draft, DraftData draftData) {
String fieldIdStr = draftData.getValue(DraftData.FIELD_ID).toString();
if (StringUtils.isNotBlank(fieldIdStr)) {
Long fieldId = Long.valueOf(fieldIdStr);
Field field = draft.getField(fieldId);
if (field != null) {
String path = draftData.getPath();
List value = (List) draftData.getValue(DraftData.VALUE);
// Convert to dto for UI updates
FieldDto dto = field.toDto();
dto.setUiChanged(true);
FieldHelper.setField(dto, path, value);
//If field name was changed add this change to map
if (NAME_PATH.equals(path)) {
Map<String, String> map = draft.getFieldNameChanges();
//Checking if field name was previously changed and updating new name in map or adding new entry
if (map.containsValue(field.getName())) {
for (String key : map.keySet()) {
if (field.getName().equals(map.get(key))) {
map.put(key, value.get(0).toString());
}
}
} else {
map.put(field.getName(), value.get(0).toString());
}
draft.setFieldNameChanges(map);
List<LookupDto> lookups = draft.advancedSettingsDto().getIndexes();
// Perform update
field.update(dto);
//we need update fields name in lookup fieldsOrder
draft.updateIndexes(lookups);
FieldHelper.addOrUpdateMetadataForCombobox(field);
} else {
// Perform update
field.update(dto);
}
allEntityDrafts.update(draft);
}
}
}
private void editAdvancedForDraft(EntityDraft draft, DraftData draftData) {
AdvancedSettingsDto advancedDto = draft.advancedSettingsDto();
String path = draftData.getPath();
List value = (List) draftData.getValue(DraftData.VALUE);
entityValidator.validateAdvancedSettingsEdit(draft, path);
FieldHelper.setField(advancedDto, path, value);
setLookupMethodNames(advancedDto);
draft.updateAdvancedSetting(advancedDto);
allEntityDrafts.update(draft);
}
private void setLookupMethodNames(AdvancedSettingsDto advancedDto) {
for (LookupDto lookup : advancedDto.getIndexes()) {
lookup.setMethodName(LookupName.lookupMethod(lookup.getLookupName()));
}
}
private void createFieldForDraft(EntityDraft draft, DraftData draftData) {
String typeClass = draftData.getValue(DraftData.TYPE_CLASS).toString();
String displayName = draftData.getValue(DraftData.DISPLAY_NAME).toString();
String name = draftData.getValue(DraftData.NAME).toString();
Type type = ("textArea".equalsIgnoreCase(typeClass)) ? allTypes.retrieveByClassName("java.lang.String") :
allTypes.retrieveByClassName(typeClass);
if (type == null) {
throw new NoSuchTypeException(typeClass);
}
Set<Lookup> fieldLookups = new HashSet<>();
Field field = new Field(draft, name, displayName, fieldLookups);
field.setType(type);
if (type.hasSettings()) {
for (TypeSetting setting : type.getSettings()) {
field.addSetting(new FieldSetting(field, setting));
}
}
if (type.hasValidation()) {
for (TypeValidation validation : type.getValidations()) {
field.addValidation(new FieldValidation(field, validation));
}
}
if (TypeDto.BLOB.getTypeClass().equals(typeClass)) {
field.setUIDisplayable(false);
} else {
field.setUIDisplayable(true);
field.setUIDisplayPosition((long) draft.getFields().size());
}
if ("textArea".equalsIgnoreCase(typeClass)) {
setMetadataForTextArea(field);
}
FieldHelper.addMetadataForRelationship(typeClass, field);
FieldHelper.addOrUpdateMetadataForCombobox(field);
draft.addField(field);
allEntityDrafts.update(draft);
}
private void setMetadataForTextArea(Field field) {
if (field != null) {
for (FieldSetting setting : field.getSettings()) {
if ("mds.form.label.textarea".equalsIgnoreCase(setting.getDetails().getName())) {
setting.setValue("true");
}
}
}
}
private void draftRemove(EntityDraft draft, DraftData draftData) {
Long fieldId = Long.valueOf(draftData.getValue(DraftData.FIELD_ID).toString());
// will throw exception if it is used
entityValidator.validateFieldNotUsedByLookups(draft, fieldId);
draft.removeField(fieldId);
allEntityDrafts.update(draft);
}
@Override
@Transactional
public void abandonChanges(Long entityId) {
EntityDraft draft = getEntityDraft(entityId);
if (draft != null) {
allEntityDrafts.delete(draft);
}
}
@Override
@Transactional
public List<String> commitChanges(Long entityId, String changesOwner) {
List<String> modulesToRefresh = new ArrayList<>();
EntityDraft draft = getEntityDraft(entityId, changesOwner);
if (draft.isOutdated()) {
throw new EntityChangedException();
}
entityValidator.validateEntity(draft);
Entity parent = draft.getParentEntity();
String username = draft.getDraftOwnerUsername();
mdsConstructor.updateFields(parent.getId(), draft.getFieldNameChanges());
comboboxDataMigrationHelper.migrateComboboxDataIfNecessary(parent, draft);
configureRelatedFields(parent, draft, modulesToRefresh);
parent.updateFromDraft(draft);
if (username != null) {
allEntityAudits.createAudit(parent, username);
}
allEntityDrafts.delete(draft);
addModuleToRefresh(parent, modulesToRefresh);
return modulesToRefresh;
}
private void addModuleToRefresh(Entity entity, List<String> modulesToRefresh) {
if (entity.isDDE() && !modulesToRefresh.contains(entity.getModule())) {
modulesToRefresh.add(entity.getModule());
}
}
private void configureRelatedFields(Entity entity, EntityDraft draft, List<String> modulesToRefresh) {
Map<String, String> fieldNameChanges = draft.getFieldNameChanges();
Map<String, Field> draftManyToMany = new HashMap<>();
Map<String, Field> entityManyToMany = new HashMap<>();
List<Field> fieldsToRemove = new ArrayList<>();
retrieveRelatedFields(entity, draft, draftManyToMany, entityManyToMany, fieldsToRemove);
for (String name : draftManyToMany.keySet()) {
if (entityManyToMany.containsKey(name)) {
updateRelatedField(entityManyToMany.get(name), draftManyToMany.get(name), modulesToRefresh);
} else {
if (fieldNameChanges.containsValue(name)) {
String key = getOldName(fieldNameChanges, name);
for (String k : fieldNameChanges.keySet()) {
if (fieldNameChanges.get(k).equals(name)) {
key = k;
}
}
updateRelatedField(entityManyToMany.get(key), draftManyToMany.get(name), modulesToRefresh);
} else {
addRelatedField(draftManyToMany.get(name), modulesToRefresh);
}
}
}
removeRelatedFields(fieldsToRemove, modulesToRefresh);
}
private void retrieveRelatedFields(Entity entity, EntityDraft draft, Map<String, Field> draftManyToMany, Map<String, Field> entityManyToMany,
List<Field> fieldsToRemove) {
for (Field draftField : draft.getFields()) {
if (draftField.getType().getTypeClassName().equals(TypeDto.MANY_TO_MANY_RELATIONSHIP.getTypeClass())) {
draftManyToMany.put(draftField.getName(), draftField);
}
}
for (Field entityField : entity.getFields()) {
if (entityField.getType().getTypeClassName().equals(TypeDto.MANY_TO_MANY_RELATIONSHIP.getTypeClass()) && !entityField.isReadOnly()) {
entityManyToMany.put(entityField.getName(), entityField);
if (!draftManyToMany.containsKey(entityField.getName()) && !draftManyToMany.containsKey(draft.getFieldNameChanges().get(entityField.getName()))) {
fieldsToRemove.add(entityField);
}
}
}
}
private String getOldName(Map<String, String> fieldNameChanges, String newName) {
for (String k : fieldNameChanges.keySet()) {
if (fieldNameChanges.get(k).equals(newName)) {
return k;
}
}
return null;
}
private void removeRelatedFields(List<Field> fields, List<String> modulesToRefresh) {
for (Field field : fields) {
Entity entity = allEntities.retrieveByClassName(field.getMetadataValue(RELATED_CLASS));
Field relatedField = entity.getField(field.getMetadataValue(RELATED_FIELD));
entity.removeField(relatedField.getId());
entity.incrementVersion();
addModuleToRefresh(entity, modulesToRefresh);
}
}
private void updateRelatedField(Field oldField, Field draftField, List<String> modulesToRefresh) {
Entity relatedEntity = allEntities.retrieveByClassName(oldField.getMetadataValue(RELATED_CLASS));
Field relatedField = relatedEntity.getField(oldField.getMetadataValue(RELATED_FIELD));
boolean fieldChanged = false;
boolean relatedEntityChanged = false;
if (!StringUtils.equals(draftField.getMetadataValue(RELATED_CLASS), oldField.getMetadataValue(RELATED_CLASS))) {
addRelatedField(draftField, modulesToRefresh);
relatedEntity.removeField(relatedField.getId());
relatedEntityChanged = true;
}
if (!relatedEntityChanged && !StringUtils.equals(draftField.getMetadataValue(RELATIONSHIP_COLLECTION_TYPE),
oldField.getMetadataValue(RELATIONSHIP_COLLECTION_TYPE))) {
relatedField.setMetadataValue(RELATIONSHIP_COLLECTION_TYPE, draftField.getMetadataValue(RELATIONSHIP_COLLECTION_TYPE));
fieldChanged = true;
}
if (!relatedEntityChanged && !StringUtils.equals(draftField.getMetadataValue(RELATED_FIELD),
oldField.getMetadataValue(RELATED_FIELD))) {
relatedField.setName(draftField.getMetadataValue(RELATED_FIELD));
fieldChanged = true;
}
if (!relatedEntityChanged && !oldField.getName().equals(draftField.getName())) {
relatedField.setMetadataValue(RELATED_FIELD, draftField.getName());
fieldChanged = true;
}
if (fieldChanged || relatedEntityChanged) {
relatedEntity.incrementVersion();
}
addModuleToRefresh(relatedEntity, modulesToRefresh);
}
private void addRelatedField(Field draftField, List<String> modulesToRefresh) {
Entity entity = allEntities.retrieveByClassName(draftField.getMetadataValue(RELATED_CLASS));
String fieldName = draftField.getMetadataValue(RELATED_FIELD);
String collectionType = draftField.getMetadataValue(RELATIONSHIP_COLLECTION_TYPE);
String relatedClass = draftField.getEntity().getClassName();
Set<Lookup> fieldLookups = new HashSet<>();
Field relatedField = new Field(entity, fieldName, fieldName, false, false, false, false, null, null, null, fieldLookups);
Type type = allTypes.retrieveByClassName(TypeDto.MANY_TO_MANY_RELATIONSHIP.getTypeClass());
relatedField.setType(type);
if (type.hasSettings()) {
for (TypeSetting setting : type.getSettings()) {
relatedField.addSetting(new FieldSetting(relatedField, setting));
}
}
relatedField.setUIDisplayable(true);
relatedField.setUIDisplayPosition((long) entity.getFields().size());
FieldHelper.createMetadataForManyToManyRelationship(relatedField, relatedClass, collectionType, draftField.getName(), false);
entity.addField(relatedField);
entity.incrementVersion();
addModuleToRefresh(entity, modulesToRefresh);
}
@Override
@Transactional
public List<String> commitChanges(Long entityId) {
return commitChanges(entityId, getUsername());
}
@Override
@Transactional
public List<EntityDto> listWorkInProgress() {
String username = getUsername();
List<EntityDraft> drafts = allEntityDrafts.retrieveAll(username);
List<EntityDto> entityDtoList = new ArrayList<>();
for (EntityDraft draft : drafts) {
if (draft.isChangesMade()) {
entityDtoList.add(draft.toDto());
}
}
return entityDtoList;
}
@Override
@Transactional
public AdvancedSettingsDto getAdvancedSettings(Long entityId) {
return getAdvancedSettings(entityId, false);
}
@Override
@Transactional
public AdvancedSettingsDto getAdvancedSettings(Long entityId, boolean committed) {
if (committed) {
Entity entity = allEntities.retrieveById(entityId);
assertEntityExists(entity, entityId);
return addNonPersistentAdvancedSettingsData(entity.advancedSettingsDto(), entity, committed);
} else {
Entity entity = getEntityDraft(entityId);
return addNonPersistentAdvancedSettingsData(entity.advancedSettingsDto(), entity, committed);
}
}
@Override
@Transactional
public AdvancedSettingsDto safeGetAdvancedSettingsCommitted(String entityClassName) {
Entity entity = allEntities.retrieveByClassName(entityClassName);
if (entity == null) {
return null;
} else {
return addNonPersistentAdvancedSettingsData(entity.advancedSettingsDto(), entity, true);
}
}
@Override
@Transactional
public void updateRestOptions(Long entityId, RestOptionsDto restOptionsDto) {
Entity entity = allEntities.retrieveById(entityId);
assertEntityExists(entity, entityId);
entity.updateRestOptions(restOptionsDto);
}
@Override
@Transactional
public void updateTracking(Long entityId, TrackingDto trackingDto) {
Entity entity = allEntities.retrieveById(entityId);
assertEntityExists(entity, entityId);
entity.updateTracking(trackingDto);
}
@Override
@Transactional
public void addLookups(EntityDto entityDto, LookupDto... lookups) {
addLookups(entityDto.getId(), Arrays.asList(lookups));
}
@Override
@Transactional
public void addLookups(EntityDto entityDto, Collection<LookupDto> lookups) {
addLookups(entityDto.getId(), lookups);
}
@Override
@Transactional
public void addLookups(Long entityId, LookupDto... lookups) {
addLookups(entityId, Arrays.asList(lookups));
}
@Override
@Transactional
public void addLookups(Long entityId, Collection<LookupDto> lookups) {
Entity entity = allEntities.retrieveById(entityId);
assertEntityExists(entity, entityId);
removeLookup(entity, lookups);
addOrUpdateLookups(entity, lookups);
}
private void removeLookup(Entity entity, Collection<LookupDto> lookups) {
Iterator<Lookup> iterator = entity.getLookups().iterator();
while (iterator.hasNext()) {
Lookup lookup = iterator.next();
// don't remove user defined lookups
if (!lookup.isReadOnly()) {
continue;
}
boolean found = false;
for (LookupDto lookupDto : lookups) {
if (lookup.getLookupName().equalsIgnoreCase(lookupDto.getLookupName())) {
found = true;
break;
}
}
if (!found) {
iterator.remove();
}
}
}
private void addOrUpdateLookups(Entity entity, Collection<LookupDto> lookups) {
for (LookupDto lookupDto : lookups) {
Lookup lookup = entity.getLookupByName(lookupDto.getLookupName());
List<String> fieldsOrder = new ArrayList<>();
List<Field> lookupFields = new ArrayList<>();
for (LookupFieldDto lookupField : lookupDto.getLookupFields()) {
String fieldName = lookupField.getName();
Field field;
field = entity.getField(fieldName);
fieldsOrder.add(LookupName.buildLookupFieldName(lookupField.getName(), lookupField.getRelatedName()));
if (field == null) {
LOGGER.error("No field {} in entity {}", fieldName, entity.getClassName());
} else {
if (!lookupFields.contains(field)) {
lookupFields.add(field);
}
}
}
lookupDto.setFieldsOrder(fieldsOrder);
if (lookup == null) {
Lookup newLookup = new Lookup(lookupDto, lookupFields);
entity.addLookup(newLookup);
} else {
lookup.update(lookupDto, lookupFields);
}
}
}
@Override
@Transactional
public Map<String, FieldDto> getLookupFieldsMapping(Long entityId, String lookupName) {
Entity entity = allEntities.retrieveById(entityId);
assertEntityExists(entity, entityId);
Lookup lookup = entity.getLookupByName(lookupName);
if (lookup == null) {
throw new LookupNotFoundException(entity.getName(), lookupName);
}
Map<String, FieldDto> fieldMap = new HashMap<>();
for (String lookupFieldName : lookup.getFieldsOrder()) {
Field field = lookup.getLookupFieldByName(LookupName.getFieldName(lookupFieldName));
if (lookupFieldName.contains(".")) {
Entity relatedEntity = allEntities.retrieveByClassName(field.getMetadata(Constants.MetadataKeys.RELATED_CLASS).getValue());
field = relatedEntity.getField(LookupName.getRelatedFieldName(lookupFieldName));
}
fieldMap.put(lookupFieldName, field.toDto());
}
return fieldMap;
}
@Override
@Transactional
public void deleteEntity(Long entityId) {
Entity entity = allEntities.retrieveById(entityId);
assertWritableEntity(entity, entityId);
if (entity.isDraft()) {
entity = ((EntityDraft) entity).getParentEntity();
}
allEntityDrafts.deleteAll(entity);
allEntities.delete(entity);
}
@Override
@Transactional
public List<EntityDto> listEntities() {
return listEntities(false);
}
@Override
@Transactional
public List<EntityDto> listEntities(boolean withSecurityCheck) {
List<EntityDto> entityDtos = new ArrayList<>();
for (Entity entity : allEntities.retrieveAll()) {
if (entity.isActualEntity()) {
if (!withSecurityCheck || hasAccessToEntity(entity)) {
entityDtos.add(entity.toDto());
}
}
}
return entityDtos;
}
private boolean hasAccessToEntity(Entity entity) {
SecurityMode mode = entity.getSecurityMode();
Set<String> members = entity.getSecurityMembers();
if (SecurityMode.USERS.equals(mode)) {
return members.contains(getUsername());
} else if (SecurityMode.PERMISSIONS.equals(mode)) {
for (String permission : getUserPermissions()) {
if (members.contains(permission)) {
return true;
}
}
// Only allowed permissions can view, but current user
// doesn't have any of the required permissions
return false;
}
// There's no user and permission restriction, which means
// the user can see this entity
return true;
}
@Override
@Transactional
public EntityDto getEntity(Long entityId) {
Entity entity = allEntities.retrieveById(entityId);
return (entity == null) ? null : entity.toDto();
}
@Override
@Transactional
public EntityDto getEntityByClassName(String className) {
Entity entity = allEntities.retrieveByClassName(className);
return (entity == null) ? null : entity.toDto();
}
@Override
@Transactional
public List<EntityDto> findEntitiesByPackage(String packageName) {
List<EntityDto> entities = new ArrayList<>();
FilterValue filterValue = new FilterValue() {
@Override
public Object valueForQuery() {
return super.getValue();
}
@Override
public String paramTypeForQuery() {
return String.class.getName();
}
@Override
public List<String> operatorForQueryFilter() {
return Arrays.asList(".startsWith(", ")");
}
};
filterValue.setValue(packageName);
Filter filter = new Filter("className", new FilterValue[]{filterValue});
for (Entity entity : allEntities.filter(new Filters(filter), null, null)) {
if (entity.isActualEntity()) {
entities.add(entity.toDto());
}
}
return entities;
}
@Override
@Transactional
public List<LookupDto> getEntityLookups(Long entityId) {
return getLookups(entityId, false);
}
private List<LookupDto> getLookups(Long entityId, boolean forDraft) {
Entity entity = (forDraft) ? getEntityDraft(entityId) : allEntities.retrieveById(entityId);
assertEntityExists(entity, entityId);
List<LookupDto> lookupDtos = new ArrayList<>();
for (Lookup lookup : entity.getLookups()) {
lookupDtos.add(lookup.toDto());
}
return lookupDtos;
}
@Override
@Transactional
public List<FieldDto> getFields(Long entityId) {
return getFields(entityId, true);
}
@Override
@Transactional
public List<FieldDto> getEntityFields(Long entityId) {
return getFields(entityId, false);
}
@Override
@Transactional
public List<FieldDto> getEntityFieldsByClassName(String className) {
Entity entity = allEntities.retrieveByClassName(className);
assertEntityExists(entity, className);
List<FieldDto> fieldDtos = new ArrayList<>();
for (Field field : entity.getFields()) {
fieldDtos.add(field.toDto());
}
return fieldDtos;
}
private List<FieldDto> getFields(Long entityId, boolean forDraft) {
Entity entity = (forDraft) ? getEntityDraft(entityId) : allEntities.retrieveById(entityId);
assertEntityExists(entity, entityId);
// the returned collection is unmodifiable
List<Field> fields = new ArrayList<>(entity.getFields());
// for data browser purposes, we sort the fields by their ui display order
if (!forDraft) {
Collections.sort(fields, new Comparator<Field>() {
@Override
public int compare(Field o1, Field o2) {
// check if one is displayable and the other isn't
if (o1.isUIDisplayable() && !o2.isUIDisplayable()) {
return -1;
} else if (!o1.isUIDisplayable() && o2.isUIDisplayable()) {
return 1;
}
// compare positions
Long position1 = o1.getUIDisplayPosition();
Long position2 = o2.getUIDisplayPosition();
if (position1 == null) {
return -1;
} else if (position2 == null) {
return 1;
} else {
return (position1 > position2) ? 1 : -1;
}
}
});
}
List<FieldDto> fieldDtos = new ArrayList<>();
for (Field field : fields) {
fieldDtos.add(field.toDto());
}
return addNonPersistentFieldsData(fieldDtos, entity);
}
@Override
@Transactional
public FieldDto findFieldByName(Long entityId, String name) {
Entity entity = getEntityDraft(entityId);
Field field = entity.getField(name);
if (field == null) {
throw new FieldNotFoundException(entity.getClassName(), name);
}
return field.toDto();
}
@Override
@Transactional
public FieldDto findEntityFieldByName(Long entityId, String name) {
Entity entity = allEntities.retrieveById(entityId);
Field field = entity.getField(name);
if (field == null) {
throw new FieldNotFoundException(entity.getClassName(), name);
}
return field.toDto();
}
@Override
@Transactional
public FieldDto getEntityFieldById(Long entityId, Long fieldId) {
Entity entity = allEntities.retrieveById(entityId);
Field field = entity.getField(fieldId);
if (field == null) {
throw new FieldNotFoundException(entity.getClassName(), fieldId);
}
return field.toDto();
}
@Override
@Transactional
public EntityDto getEntityForEdit(Long entityId) {
Entity draft = getEntityDraft(entityId);
return draft.toDto();
}
@Override
@Transactional
public EntityDraft getEntityDraft(Long entityId) {
return getEntityDraft(entityId, getUsername());
}
@Override
@Transactional
public EntityDraft getEntityDraft(Long entityId, String username) {
Entity entity = allEntities.retrieveById(entityId);
assertEntityExists(entity, entityId);
if (entity instanceof EntityDraft) {
return (EntityDraft) entity;
}
if (username == null) {
throw new AccessDeniedException("Cannot save draft - no user");
}
// get the draft
EntityDraft draft = allEntityDrafts.retrieve(entity, username);
if (draft == null) {
draft = allEntityDrafts.create(entity, username);
}
return draft;
}
@Override
@Transactional
public void addFields(EntityDto entity, Collection<FieldDto> fields) {
addFields(entity.getId(), fields);
}
@Override
@Transactional
public void addFields(Long entityId, FieldDto... fields) {
addFields(entityId, Arrays.asList(fields));
}
@Override
@Transactional
public void addFields(EntityDto entity, FieldDto... fields) {
addFields(entity.getId(), Arrays.asList(fields));
}
@Override
@Transactional
public void addFields(Long entityId, Collection<FieldDto> fields) {
Entity entity = allEntities.retrieveById(entityId);
assertEntityExists(entity, entityId);
removeFields(entity, fields);
for (FieldDto fieldDto : fields) {
Field existing = entity.getField(fieldDto.getBasic().getName());
if (null != existing) {
existing.update(fieldDto);
} else {
addField(entity, fieldDto);
}
}
}
private void removeFields(Entity entity, Collection<FieldDto> fields) {
Iterator<Field> iterator = entity.getFields().iterator();
while (iterator.hasNext()) {
Field field = iterator.next();
// don't remove user defined fields
if (!field.isReadOnly() || field.getMetadata(AUTO_GENERATED) != null ||
field.getMetadata(AUTO_GENERATED_EDITABLE) != null) {
continue;
}
boolean found = false;
for (FieldDto fieldDto : fields) {
if (field.getName().equalsIgnoreCase(fieldDto.getBasic().getName())) {
found = true;
break;
}
}
if (!found) {
iterator.remove();
}
}
}
private void addField(Entity entity, FieldDto fieldDto) {
FieldBasicDto basic = fieldDto.getBasic();
String typeClass = fieldDto.getType().getTypeClass();
Type type = allTypes.retrieveByClassName(typeClass);
Field field = new Field(
entity, basic.getName(), basic.getDisplayName(), basic.isRequired(), fieldDto.isReadOnly(), fieldDto.isNonEditable(),
fieldDto.isNonDisplayable(), (String) basic.getDefaultValue(), basic.getTooltip(), basic.getPlaceholder(), null
);
field.setType(type);
if (type.hasSettings()) {
for (TypeSetting setting : type.getSettings()) {
SettingDto settingDto = fieldDto.getSetting(setting.getName());
FieldSetting fieldSetting = new FieldSetting(field, setting);
if (null != settingDto) {
fieldSetting.setValue(settingDto.getValueAsString());
}
field.addSetting(fieldSetting);
}
}
if (type.hasValidation()) {
for (TypeValidation validation : type.getValidations()) {
FieldValidation fieldValidation = new FieldValidation(field, validation);
FieldValidationDto validationDto = fieldDto.getValidation();
if (null != validationDto) {
ValidationCriterionDto criterion = validationDto
.getCriterion(validation.getDisplayName());
if (null != criterion) {
fieldValidation.setValue(criterion.valueAsString());
fieldValidation.setEnabled(criterion.isEnabled());
}
}
field.addValidation(fieldValidation);
}
}
for (MetadataDto metadata : fieldDto.getMetadata()) {
field.addMetadata(new FieldMetadata(metadata));
}
entity.addField(field);
}
@Override
@Transactional
public void addFilterableFields(EntityDto entityDto, Collection<String> fieldNames) {
Entity entity = allEntities.retrieveById(entityDto.getId());
assertEntityExists(entity, entityDto.getId());
for (Field field : entity.getFields()) {
boolean isUIFilterable = fieldNames.contains(field.getName());
field.setUIFilterable(isUIFilterable);
}
}
@Override
@Transactional
public EntityDto updateDraft(Long entityId) {
Entity entity = allEntities.retrieveById(entityId);
EntityDraft draft = getEntityDraft(entityId);
allEntityDrafts.setProperties(draft, entity);
draft.setChangesMade(false);
return draft.toDto();
}
@Override
@Transactional
public LookupDto getLookupByName(Long entityId, String lookupName) {
Entity entity = allEntities.retrieveById(entityId);
assertEntityExists(entity, entityId);
Lookup lookup = entity.getLookupByName(lookupName);
return (lookup == null) ? null : lookup.toDto();
}
@Override
@Transactional
public List<FieldDto> getDisplayFields(Long entityId) {
Entity entity = allEntities.retrieveById(entityId);
assertEntityExists(entity, entityId);
List<FieldDto> displayFields = new ArrayList<>();
for (Field field : entity.getFields()) {
if (field.isUIDisplayable()) {
displayFields.add(field.toDto());
}
}
return displayFields;
}
@Override
@Transactional
public void addNonEditableFields(EntityDto entityDto, Map<String, Boolean> nonEditableFields) {
Entity entity = allEntities.retrieveById(entityDto.getId());
assertEntityExists(entity, entityDto.getId());
List<Field> fields = entity.getFields();
for (Field field : fields) {
boolean isNonEditable = nonEditableFields.containsKey(field.getName());
Boolean display = nonEditableFields.get(field.getName());
field.setNonEditable(isNonEditable);
if (display != null) {
field.setNonDisplayable(!display);
} else {
field.setNonDisplayable(false);
}
}
}
@Override
@Transactional
public void addDisplayedFields(EntityDto entityDto, Map<String, Long> positions) {
Entity entity = allEntities.retrieveById(entityDto.getId());
assertEntityExists(entity, entityDto.getId());
List<Field> fields = entity.getFields();
if (MapUtils.isEmpty(positions)) {
// all fields will be added
for (long i = 0; i < fields.size(); ++i) {
Field field = fields.get((int) i);
// user fields and auto generated fields are ignored
if (isFieldFromDde(field)) {
field.setUIDisplayable(true);
field.setUIDisplayPosition(i);
}
}
} else {
// only fields in map should be added
for (Field field : fields) {
String fieldName = field.getName();
boolean isUIDisplayable = positions.containsKey(fieldName);
Long uiDisplayPosition = positions.get(fieldName);
field.setUIDisplayable(isUIDisplayable);
field.setUIDisplayPosition(uiDisplayPosition);
}
}
}
@Override
@Transactional
public void updateSecurityOptions(Long entityId, SecurityMode securityMode, Set<String> securityMembers) {
Entity entity = allEntities.retrieveById(entityId);
assertEntityExists(entity, entityId);
entity.setSecurityMode(securityMode);
entity.setSecurityMembers(securityMembers);
allEntities.update(entity);
}
@Override
@Transactional
public void updateMaxFetchDepth(Long entityId, Integer maxFetchDepth) {
Entity entity = allEntities.retrieveById(entityId);
assertEntityExists(entity, entityId);
entity.setMaxFetchDepth(maxFetchDepth);
allEntities.update(entity);
}
private void assertEntityExists(Entity entity, Long entityId) {
if (entity == null) {
throw new EntityNotFoundException(entityId);
}
}
private void assertEntityExists(Entity entity, String entityClassName) {
if (entity == null) {
throw new EntityNotFoundException(entityClassName);
}
}
private void assertWritableEntity(Entity entity, Long entityId) {
assertEntityExists(entity, entityId);
if (entity.isDDE()) {
throw new EntityReadOnlyException(entity.getName());
}
}
private boolean isFieldFromDde(Field field) {
// only readonly fields are considered
if (field.isReadOnly()) {
// check metadata for auto generated
for (String mdKey : Arrays.asList(AUTO_GENERATED, AUTO_GENERATED_EDITABLE)) {
FieldMetadata metaData = field.getMetadata(mdKey);
if (metaData != null && TRUE.equals(metaData.getValue())) {
return false;
}
}
// readonly and no auto generated metadata
return true;
}
// not readonly, defined by user
return false;
}
private List<FieldDto> addNonPersistentFieldsData(List<FieldDto> fieldDtos, Entity entity) {
List<LookupDto> lookupDtos = new ArrayList<>();
for (FieldDto fieldDto : fieldDtos) {
List<LookupDto> fieldLookups = fieldDto.getLookups();
if (fieldLookups != null) {
lookupDtos.addAll(fieldLookups);
}
}
addLookupsReferences(lookupDtos, entity.getClassName());
return fieldDtos;
}
private AdvancedSettingsDto addNonPersistentAdvancedSettingsData(AdvancedSettingsDto advancedSettingsDto, Entity entity, boolean committed) {
//For dataBrowser we need to add information about the lookup fields(type, settings, displayName)
if (committed) {
addNonPersistentDataForLookupFields(advancedSettingsDto.getIndexes(), entity);
}
addLookupsReferences(advancedSettingsDto.getIndexes(), entity.getClassName());
return advancedSettingsDto;
}
private void addNonPersistentDataForLookupFields(Collection<LookupDto> lookupDtos, Entity entity) {
for (LookupDto lookup : lookupDtos) {
for (LookupFieldDto lookupField : lookup.getLookupFields()) {
if (StringUtils.isNotBlank(lookupField.getRelatedName())) {
Field field = entity.getField(lookupField.getName());
Entity relatedEntity = allEntities.retrieveByClassName(field.getMetadata(Constants.MetadataKeys.RELATED_CLASS).getValue());
addNonPersistentDataForLookupField(relatedEntity.getField(lookupField.getRelatedName()), lookupField, field.getDisplayName());
} else {
addNonPersistentDataForLookupField(entity.getField(lookupField.getName()), lookupField, null);
}
}
}
}
private void addNonPersistentDataForLookupField(Field field, LookupFieldDto lookupField, String nameParam) {
lookupField.setSettings(field.settingsToDto());
lookupField.setDisplayName(StringUtils.isNotBlank(nameParam) ? nameParam : field.getDisplayName());
lookupField.setClassName(field.getType().getTypeClass().getName());
}
private void addLookupsReferences(Collection<LookupDto> lookupDtos, String entityClassName) {
MotechDataService dataSourceDataService = OSGiServiceUtils.findService(
bundleContext, MotechClassPool.getInterfaceName(DATA_SOURCE_CLASS_NAME));
if (dataSourceDataService != null) {
for (LookupDto lookupDto : lookupDtos) {
Long count = (Long) dataSourceDataService.executeQuery(createLookupReferenceQuery(lookupDto.getLookupName(), entityClassName));
if (count > 0) {
lookupDto.setReferenced(true);
} else {
lookupDto.setReferenced(false);
}
}
}
}
@Autowired
public void setAllEntities(AllEntities allEntities) {
this.allEntities = allEntities;
}
@Autowired
public void setAllTypes(AllTypes allTypes) {
this.allTypes = allTypes;
}
@Autowired
public void setAllEntityDrafts(AllEntityDrafts allEntityDrafts) {
this.allEntityDrafts = allEntityDrafts;
}
@Autowired
public void setAllEntityAudits(AllEntityAudits allEntityAudits) {
this.allEntityAudits = allEntityAudits;
}
@Autowired
public void setMDSConstructor(MDSConstructor mdsConstructor) {
this.mdsConstructor = mdsConstructor;
}
@Autowired
public void setBundleContext(BundleContext bundleContext) {
this.bundleContext = bundleContext;
}
@Autowired
public void setEntityValidator(EntityValidator entityValidator) {
this.entityValidator = entityValidator;
}
@Autowired
public void setComboboxDataMigrationHelper(ComboboxDataMigrationHelper comboboxDataMigrationHelper) {
this.comboboxDataMigrationHelper = comboboxDataMigrationHelper;
}
}
| |
package com.blstream.studybox.activities;
import android.content.Intent;
import android.os.Bundle;
import android.support.annotation.NonNull;
import android.support.design.widget.TextInputEditText;
import android.support.v7.widget.AppCompatButton;
import android.view.View;
import android.widget.ProgressBar;
import android.widget.TextView;
import com.blstream.studybox.R;
import com.blstream.studybox.auth.BaseAuthViewState;
import com.blstream.studybox.auth.login_view.LoginPresenter;
import com.blstream.studybox.auth.login_view.LoginView;
import com.blstream.studybox.base.BaseAuthActivity;
import com.blstream.studybox.model.AuthCredentials;
import com.hannesdorfmann.mosby.mvp.viewstate.ViewState;
import butterknife.Bind;
import butterknife.ButterKnife;
import butterknife.OnClick;
public class LoginActivity extends BaseAuthActivity<LoginView, LoginPresenter>
implements LoginView {
@Bind(R.id.input_email)
TextInputEditText emailInput;
@Bind(R.id.input_password)
TextInputEditText passwordInput;
@Bind(R.id.btn_login)
AppCompatButton loginButton;
@Bind(R.id.view_auth_error)
TextView authErrorView;
@Bind(R.id.progress_bar_login)
ProgressBar loginProgressBar;
@Bind(R.id.link_unlicensed_user)
TextView unlicensedUserLink;
@Bind(R.id.link_sign_up)
TextView signUpLink;
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_login);
ButterKnife.bind(this);
setRetainInstance(true);
}
@OnClick(R.id.btn_login)
public void onLoginClick() {
if (connectionStatusReceiver.isConnected()) {
String email = emailInput.getText().toString().trim();
String password = passwordInput.getText().toString();
presenter.validateCredential(new AuthCredentials(email, password));
}
}
@OnClick(R.id.link_unlicensed_user)
public void onUnlicensedClick() {
Intent intent = new Intent(LoginActivity.this, DecksActivity.class);
startActivity(intent);
}
@OnClick(R.id.link_sign_up)
public void onSignUpLinkClick() {
Intent intent = new Intent(LoginActivity.this, RegistrationActivity.class);
startActivity(intent);
}
@Override
@NonNull
public LoginPresenter createPresenter() {
return new LoginPresenter();
}
@Override
@NonNull
public ViewState<LoginView> createViewState() {
return new BaseAuthViewState<>();
}
@Override
public void onNewViewStateInstance() {
showForm();
}
@Override
public void showForm() {
BaseAuthViewState vs = (BaseAuthViewState<LoginView>) viewState;
vs.setShowForm();
setFormEnabled(true);
authErrorView.setVisibility(View.GONE);
loginProgressBar.setVisibility(View.GONE);
}
@Override
public void showAuthError() {
setAuthError(getString(R.string.auth_error));
}
@Override
public void showNetworkError() {
setNetworkError(getString(R.string.network_error));
}
@Override
public void showUnexpectedError() {
setUnexpectedError(getString(R.string.unexpected_error));
}
@Override
public void showEmptyEmailError() {
setFieldError(emailInput, getString(R.string.empty_field));
}
@Override
public void showEmptyPasswordError() {
setFieldError(passwordInput, getString(R.string.empty_field));
}
@Override
public void showInvalidEmailError() {
setFieldError(emailInput, getString(R.string.invalid_email));
}
@Override
public void showInvalidPasswordError() {
setFieldError(passwordInput, getString(R.string.invalid_password));
}
@Override
public void showTooShortPasswordError() {
setFieldError(passwordInput, getString(R.string.too_short_password));
}
@Override
public void showLoading() {
BaseAuthViewState vs = (BaseAuthViewState<LoginView>) viewState;
vs.setShowLoading();
setFormEnabled(false);
authErrorView.setVisibility(View.GONE);
loginProgressBar.setVisibility(View.VISIBLE);
}
protected void setAuthError(String message) {
BaseAuthViewState vs = (BaseAuthViewState<LoginView>) viewState;
vs.setShowAuthError();
setError(message);
}
protected void setNetworkError(String message) {
BaseAuthViewState vs = (BaseAuthViewState<LoginView>) viewState;
vs.setShowNetworkError();
setError(message);
}
protected void setUnexpectedError(String message) {
BaseAuthViewState vs = (BaseAuthViewState<LoginView>) viewState;
vs.setShowUnexpectedError();
setError(message);
}
@Override
protected void setError(String message) {
setFormEnabled(true);
authErrorView.setText(message);
authErrorView.setVisibility(View.VISIBLE);
loginProgressBar.setVisibility(View.GONE);
}
@Override
protected void setFieldError(TextInputEditText field, String message) {
BaseAuthViewState vs = (BaseAuthViewState<LoginView>) viewState;
vs.setShowForm();
setFormEnabled(true);
field.setError(message);
field.requestFocus();
authErrorView.setVisibility(View.GONE);
loginProgressBar.setVisibility(View.GONE);
}
@Override
protected void setFormEnabled(boolean enabled) {
emailInput.setEnabled(enabled);
passwordInput.setEnabled(enabled);
loginButton.setEnabled(enabled);
unlicensedUserLink.setEnabled(enabled);
signUpLink.setEnabled(enabled);
if (enabled) {
loginButton.setAlpha(ENABLED_BUTTON_ALPHA);
} else {
loginButton.setAlpha(DISABLED_BUTTON_ALPHA);
}
}
@Override
public void loginSuccessful() {
Intent intent = new Intent(LoginActivity.this, DecksActivity.class);
startActivity(intent);
finish();
}
}
| |
/*
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
package org.apache.bookkeeper.bookie;
/**
* Signals that a Bookie exception of some sort has occurred. This class
* is the general class of exceptions produced by failed or interrupted bookie operations.
*/
@SuppressWarnings("serial")
public abstract class BookieException extends Exception {
private final int code;
public BookieException(int code) {
super();
this.code = code;
}
public BookieException(int code, Throwable t) {
super(t);
this.code = code;
}
public BookieException(int code, String reason) {
super(reason);
this.code = code;
}
public BookieException(int code, String reason, Throwable t) {
super(reason, t);
this.code = code;
}
public static BookieException create(int code) {
switch(code) {
case Code.UnauthorizedAccessException:
return new BookieUnauthorizedAccessException();
case Code.LedgerFencedException:
return new LedgerFencedException();
case Code.InvalidCookieException:
return new InvalidCookieException();
case Code.UpgradeException:
return new UpgradeException();
case Code.DiskPartitionDuplicationException:
return new DiskPartitionDuplicationException();
case Code.CookieNotFoundException:
return new CookieNotFoundException();
case Code.CookieExistsException:
return new CookieExistException();
case Code.MetadataStoreException:
return new MetadataStoreException();
case Code.UnknownBookieIdException:
return new UnknownBookieIdException();
default:
return new BookieIllegalOpException();
}
}
/**
* An exception code indicates the failure reason.
*/
public interface Code {
int OK = 0;
int UnauthorizedAccessException = -1;
int IllegalOpException = -100;
int LedgerFencedException = -101;
int InvalidCookieException = -102;
int UpgradeException = -103;
int DiskPartitionDuplicationException = -104;
int CookieNotFoundException = -105;
int MetadataStoreException = -106;
int UnknownBookieIdException = -107;
int OperationRejectedException = -108;
int CookieExistsException = -109;
}
public int getCode() {
return this.code;
}
public String getMessage(int code) {
String err;
switch(code) {
case Code.OK:
err = "No problem";
break;
case Code.UnauthorizedAccessException:
err = "Error while reading ledger";
break;
case Code.LedgerFencedException:
err = "Ledger has been fenced; No more entries can be added";
break;
case Code.InvalidCookieException:
err = "Invalid environment cookie found";
break;
case Code.UpgradeException:
err = "Error performing an upgrade operation ";
break;
case Code.DiskPartitionDuplicationException:
err = "Disk Partition Duplication is not allowed";
break;
case Code.CookieNotFoundException:
err = "Cookie not found";
break;
case Code.CookieExistsException:
err = "Cookie already exists";
break;
case Code.MetadataStoreException:
err = "Error performing metadata operations";
break;
case Code.UnknownBookieIdException:
err = "Unknown bookie id";
break;
case Code.OperationRejectedException:
err = "Operation rejected";
break;
default:
err = "Invalid operation";
break;
}
String reason = super.getMessage();
if (reason == null) {
if (super.getCause() != null) {
reason = super.getCause().getMessage();
}
}
if (reason == null) {
return err;
} else {
return String.format("%s [%s]", err, reason);
}
}
/**
* Signals that an unauthorized operation attempts to access the data in a bookie.
*/
public static class BookieUnauthorizedAccessException extends BookieException {
public BookieUnauthorizedAccessException() {
super(Code.UnauthorizedAccessException);
}
}
/**
* Signals that an illegal operation attempts to access the data in a bookie.
*/
public static class BookieIllegalOpException extends BookieException {
public BookieIllegalOpException() {
super(Code.IllegalOpException);
}
public BookieIllegalOpException(String reason) {
super(Code.IllegalOpException, reason);
}
public BookieIllegalOpException(Throwable cause) {
super(Code.IllegalOpException, cause);
}
}
/**
* Signals that a ledger has been fenced in a bookie. No more entries can be appended to that ledger.
*/
public static class LedgerFencedException extends BookieException {
public LedgerFencedException() {
super(Code.LedgerFencedException);
}
}
/**
* Signals that a ledger has been fenced in a bookie. No more entries can be appended to that ledger.
*/
public static class OperationRejectedException extends BookieException {
public OperationRejectedException() {
super(Code.OperationRejectedException);
}
@Override
public Throwable fillInStackTrace() {
// Since this exception is a way to signal a specific condition and it's triggered and very specific points,
// we can disable stack traces.
return null;
}
}
/**
* Signal that an invalid cookie is found when starting a bookie.
*
* <p>This exception is mainly used for detecting if there is any malformed configuration in a bookie.
*/
public static class InvalidCookieException extends BookieException {
public InvalidCookieException() {
this("");
}
public InvalidCookieException(String reason) {
super(Code.InvalidCookieException, reason);
}
public InvalidCookieException(Throwable cause) {
super(Code.InvalidCookieException, cause);
}
}
/**
* Signal that no cookie is found when starting a bookie.
*/
public static class CookieNotFoundException extends BookieException {
public CookieNotFoundException() {
this("");
}
public CookieNotFoundException(String reason) {
super(Code.CookieNotFoundException, reason);
}
public CookieNotFoundException(Throwable cause) {
super(Code.CookieNotFoundException, cause);
}
}
/**
* Signal that cookie already exists when creating a new cookie.
*/
public static class CookieExistException extends BookieException {
public CookieExistException() {
this("");
}
public CookieExistException(String reason) {
super(Code.CookieExistsException, reason);
}
public CookieExistException(Throwable cause) {
super(Code.CookieExistsException, cause);
}
}
/**
* Signals that an exception occurs on upgrading a bookie.
*/
public static class UpgradeException extends BookieException {
public UpgradeException() {
super(Code.UpgradeException);
}
public UpgradeException(Throwable cause) {
super(Code.UpgradeException, cause);
}
public UpgradeException(String reason) {
super(Code.UpgradeException, reason);
}
}
/**
* Signals when multiple ledger/journal directories are mounted in same disk partition.
*/
public static class DiskPartitionDuplicationException extends BookieException {
public DiskPartitionDuplicationException() {
super(Code.DiskPartitionDuplicationException);
}
public DiskPartitionDuplicationException(Throwable cause) {
super(Code.DiskPartitionDuplicationException, cause);
}
public DiskPartitionDuplicationException(String reason) {
super(Code.DiskPartitionDuplicationException, reason);
}
}
/**
* Signal when bookie has problems on accessing metadata store.
*/
public static class MetadataStoreException extends BookieException {
public MetadataStoreException() {
this("");
}
public MetadataStoreException(String reason) {
super(Code.MetadataStoreException, reason);
}
public MetadataStoreException(Throwable cause) {
super(Code.MetadataStoreException, cause);
}
public MetadataStoreException(String reason, Throwable cause) {
super(Code.MetadataStoreException, reason, cause);
}
}
/**
* Signal when bookie has problems on accessing metadata store.
*/
public static class UnknownBookieIdException extends BookieException {
public UnknownBookieIdException() {
super(Code.UnknownBookieIdException);
}
public UnknownBookieIdException(Throwable cause) {
super(Code.UnknownBookieIdException, cause);
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.mesos.runtime.clusterframework.store;
import org.apache.mesos.Protos;
import java.io.Serializable;
import java.text.DecimalFormat;
import java.util.List;
import java.util.Objects;
import scala.Option;
import static java.util.Objects.requireNonNull;
/**
* A store of Mesos workers and associated framework information.
*
* @deprecated Apache Mesos support was deprecated in Flink 1.13 and is subject to removal in the
* future (see FLINK-22352 for further details).
*/
@Deprecated
public interface MesosWorkerStore {
/** The template for naming the worker. */
DecimalFormat TASKID_FORMAT = new DecimalFormat("taskmanager-00000");
/** Start the worker store. */
void start() throws Exception;
/**
* Stop the worker store.
*
* @param cleanup if true, cleanup any stored information.
*/
void stop(boolean cleanup) throws Exception;
/** Get the stored Mesos framework ID. */
Option<Protos.FrameworkID> getFrameworkID() throws Exception;
/** Set the stored Mesos framework ID. */
void setFrameworkID(Option<Protos.FrameworkID> frameworkID) throws Exception;
/** Recover the stored workers. */
List<Worker> recoverWorkers() throws Exception;
/** Generate a new task ID for a worker. */
Protos.TaskID newTaskID() throws Exception;
/** Put a worker into storage. */
void putWorker(Worker worker) throws Exception;
/**
* Remove a worker from storage.
*
* @return true if the worker existed.
*/
boolean removeWorker(Protos.TaskID taskID) throws Exception;
/**
* A stored worker.
*
* <p>The assigned slaveid/hostname is valid in Launched and Released states. The hostname is
* needed by Fenzo for optimization purposes.
*/
class Worker implements Serializable {
private static final long serialVersionUID = 1L;
private final Protos.TaskID taskID;
private final Option<Protos.SlaveID> slaveID;
private final Option<String> hostname;
private final WorkerState state;
private Worker(
Protos.TaskID taskID,
Option<Protos.SlaveID> slaveID,
Option<String> hostname,
WorkerState state) {
this.taskID = requireNonNull(taskID, "taskID");
this.slaveID = requireNonNull(slaveID, "slaveID");
this.hostname = requireNonNull(hostname, "hostname");
this.state = requireNonNull(state, "state");
}
/** Get the worker's task ID. */
public Protos.TaskID taskID() {
return taskID;
}
/** Get the worker's assigned slave ID. */
public Option<Protos.SlaveID> slaveID() {
return slaveID;
}
/** Get the worker's assigned hostname. */
public Option<String> hostname() {
return hostname;
}
/** Get the worker's state. */
public WorkerState state() {
return state;
}
// valid transition methods
/**
* Create a new worker with the given taskID.
*
* @return a new worker instance.
*/
public static Worker newWorker(Protos.TaskID taskID) {
return new Worker(
taskID,
Option.<Protos.SlaveID>empty(),
Option.<String>empty(),
WorkerState.New);
}
/**
* Transition the worker to a launched state.
*
* @return a new worker instance (does not mutate the current instance).
*/
public Worker launchWorker(Protos.SlaveID slaveID, String hostname) {
return new Worker(
taskID, Option.apply(slaveID), Option.apply(hostname), WorkerState.Launched);
}
/**
* Transition the worker to a released state.
*
* @return a new worker instance (does not mutate the current instance).
*/
public Worker releaseWorker() {
return new Worker(taskID, slaveID, hostname, WorkerState.Released);
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
Worker worker = (Worker) o;
return Objects.equals(taskID, worker.taskID)
&& Objects.equals(slaveID, worker.slaveID)
&& Objects.equals(hostname, worker.hostname)
&& state == worker.state;
}
@Override
public int hashCode() {
return Objects.hash(taskID, slaveID, hostname, state);
}
@Override
public String toString() {
return "Worker{"
+ "taskID="
+ taskID
+ ", slaveID="
+ slaveID
+ ", hostname="
+ hostname
+ ", state="
+ state
+ '}';
}
}
/** The (planned) state of the worker. */
enum WorkerState {
/** Indicates that the worker is new (not yet launched). */
New,
/** Indicates that the worker is launched. */
Launched,
/** Indicates that the worker is released. */
Released
}
}
| |
/*******************************************************************************
* Copyright (c) 2015-2018 Skymind, Inc.
*
* This program and the accompanying materials are made available under the
* terms of the Apache License, Version 2.0 which is available at
* https://www.apache.org/licenses/LICENSE-2.0.
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*
* SPDX-License-Identifier: Apache-2.0
******************************************************************************/
package org.nd4j.linalg.api.ops.impl.layers.convolution;
import lombok.Builder;
import lombok.Getter;
import lombok.NoArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import lombok.val;
import org.nd4j.autodiff.samediff.SDVariable;
import org.nd4j.autodiff.samediff.SameDiff;
import org.nd4j.base.Preconditions;
import org.nd4j.imports.converters.DifferentialFunctionClassHolder;
import org.nd4j.imports.descriptors.properties.AttributeAdapter;
import org.nd4j.imports.descriptors.properties.PropertyMapping;
import org.nd4j.imports.descriptors.properties.adapters.*;
import org.nd4j.imports.graphmapper.tf.TFGraphMapper;
import org.nd4j.linalg.api.buffer.DataType;
import org.nd4j.linalg.api.ndarray.INDArray;
import org.nd4j.linalg.api.ops.DynamicCustomOp;
import org.nd4j.linalg.api.ops.impl.layers.convolution.config.Conv2DConfig;
import org.nd4j.linalg.api.ops.impl.layers.convolution.config.DeConv2DConfig;
import org.nd4j.linalg.util.ArrayUtil;
import org.tensorflow.framework.AttrValue;
import org.tensorflow.framework.GraphDef;
import org.tensorflow.framework.NodeDef;
import java.lang.reflect.Field;
import java.util.*;
/**
* DeConv2D operation, TF-wrapper
*/
@Slf4j
@Getter
@NoArgsConstructor
public class DeConv2DTF extends DynamicCustomOp {
protected DeConv2DConfig config;
@Builder(builderMethodName = "sameDiffBuilder")
public DeConv2DTF(SameDiff sameDiff,
SDVariable[] inputs,
DeConv2DConfig config) {
super(sameDiff, inputs);
this.config = config;
addArgs();
}
public DeConv2DTF(INDArray[] inputs, INDArray[] outputs, DeConv2DConfig config){
super(inputs, outputs);
this.config = config;
addArgs();
}
@Override
public long[] iArgs() {
if (iArguments.size() == 0)
addArgs();
return super.iArgs();
}
@Override
public Map<String, Object> propertiesForFunction() {
if(config == null && !iArguments.isEmpty()){
config = DeConv2DConfig.builder()
.kH(iArguments.get(0))
.kW(iArguments.get(1))
.sH(iArguments.get(2))
.sW(iArguments.get(3))
.pH(iArguments.get(4))
.pW(iArguments.get(5))
.dH(iArguments.get(6))
.dW(iArguments.get(7))
.isSameMode(iArguments.get(8) == 1)
.dataFormat(iArguments.get(9) == 1 ? DeConv2DConfig.NHWC : Conv2DConfig.NCHW)
.build();
}
return config.toProperties();
}
private void addArgs() {
addIArgument(config.getKH());
addIArgument(config.getKW());
addIArgument(config.getSH());
addIArgument(config.getSW());
addIArgument(config.getPH());
addIArgument(config.getPW());
addIArgument(config.getDH());
addIArgument(config.getDW());
addIArgument(ArrayUtil.fromBoolean(config.isSameMode()));
addIArgument(config.getDataFormat().equalsIgnoreCase(DeConv2DConfig.NCHW) ? 0 : 1);
}
@Override
public boolean isConfigProperties() {
return true;
}
@Override
public String configFieldName() {
return "config";
}
@Override
public Object getValue(Field property) {
if (config == null) {
config = DeConv2DConfig.builder().build();
}
return config.getValue(property);
}
@Override
public Map<String, Map<String, PropertyMapping>> mappingsForFunction() {
Map<String, Map<String, PropertyMapping>> ret = new HashMap<>();
Map<String, PropertyMapping> map = new HashMap<>();
val strideMapping = PropertyMapping.builder()
.tfAttrName("strides")
.onnxAttrName("strides")
.propertyNames(new String[]{"sH", "sW"})
.build();
val kernelMapping = PropertyMapping.builder()
.propertyNames(new String[]{"kH", "kW"})
.tfInputPosition(1)
.onnxAttrName("kernel_shape")
.build();
val dilationMapping = PropertyMapping.builder()
.onnxAttrName("dilations")
.propertyNames(new String[]{"dW", "dH"})
.tfAttrName("rates")
.build();
val sameMode = PropertyMapping.builder()
.onnxAttrName("auto_pad")
.propertyNames(new String[]{"isSameMode"})
.tfAttrName("padding")
.build();
val dataFormat = PropertyMapping.builder()
.onnxAttrName("data_format")
.tfAttrName("data_format")
.propertyNames(new String[]{"dataFormat"})
.build();
map.put("sW", strideMapping);
map.put("sH", strideMapping);
map.put("kH", kernelMapping);
map.put("kW", kernelMapping);
map.put("dW", dilationMapping);
map.put("dH", dilationMapping);
map.put("isSameMode", sameMode);
map.put("dataFormat", dataFormat);
ret.put(tensorflowName(), map);
return ret;
}
@Override
public Map<String, Map<String, AttributeAdapter>> attributeAdaptersForFunction() {
Map<String, Map<String, AttributeAdapter>> ret = new HashMap<>();
Map<String, AttributeAdapter> tfMappings = new LinkedHashMap<>();
val fields = DifferentialFunctionClassHolder.getInstance().getFieldsForFunction(this);
//TF uses [kH, kW, outC, inC] always for weights
tfMappings.put("kH", new NDArrayShapeAdapter(0));
tfMappings.put("kW", new NDArrayShapeAdapter(1));
// tfMappings.put("sH", new IntArrayIntIndexAdpater(1));
// tfMappings.put("sW", new IntArrayIntIndexAdpater(2));
tfMappings.put("sH", new ConditionalFieldValueIntIndexArrayAdapter("NCHW", 2, 1, fields.get("dataFormat")));
tfMappings.put("sW", new ConditionalFieldValueIntIndexArrayAdapter("NCHW", 3, 2, fields.get("dataFormat")));
tfMappings.put("isSameMode", new StringEqualsAdapter("SAME"));
tfMappings.put("isNHWC", new StringEqualsAdapter("NHWC"));
Map<String, AttributeAdapter> onnxMappings = new HashMap<>();
onnxMappings.put("kH", new SizeThresholdIntArrayIntIndexAdpater(0, 2, 0));
onnxMappings.put("kW", new SizeThresholdIntArrayIntIndexAdpater(1, 2, 0));
onnxMappings.put("dH", new SizeThresholdIntArrayIntIndexAdpater(0, 2, 0));
onnxMappings.put("dW", new SizeThresholdIntArrayIntIndexAdpater(1, 2, 0));
onnxMappings.put("sH", new SizeThresholdIntArrayIntIndexAdpater(0, 2, 0));
onnxMappings.put("sW", new SizeThresholdIntArrayIntIndexAdpater(1, 2, 0));
onnxMappings.put("isSameMode", new StringEqualsAdapter("SAME"));
onnxMappings.put("isNHWC", new StringEqualsAdapter("NHWC"));
ret.put(tensorflowName(), tfMappings);
return ret;
}
@Override
public void initFromTensorFlow(NodeDef nodeDef, SameDiff initWith, Map<String, AttrValue> attributesForNode, GraphDef graph) {
TFGraphMapper.initFunctionFromProperties(nodeDef.getOp(), this, attributesForNode, nodeDef, graph);
addArgs();
}
@Override
public String opName() {
return "deconv2d_tf";
}
@Override
public String onnxName() {
return "ConvTranspose-Absent";
}
@Override
public String tensorflowName() {
return "Conv2DBackpropInput";
}
@Override
public List<SDVariable> doDiff(List<SDVariable> f1) {
throw new UnsupportedOperationException("To be implemented yet");
}
@Override
public List<DataType> calculateOutputDataTypes(List<DataType> inputDataTypes){ //inShape, weights, input
int n = args().length;
Preconditions.checkState(inputDataTypes != null && inputDataTypes.size() == n, "Expected %s input data types for %s, got %s", n, getClass(), inputDataTypes);
return Collections.singletonList(inputDataTypes.get(2));
}
}
| |
/*
* Copyright (c) 2015, WSO2 Inc. (http://www.wso2.org) All Rights Reserved.
*
* WSO2 Inc. licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file except
* in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.wso2.andes.kernel.slot;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import java.util.Iterator;
import java.util.TreeSet;
import java.util.concurrent.ConcurrentHashMap;
/**
* This class is responsible of slot allocating, slot creating, slot re-assigning and slot
* managing tasks in standalone mode
*/
public class SlotManagerStandalone {
/**
* To keep message IDs against queues.
*/
private ConcurrentHashMap<String, TreeSet<Long>> slotIDMap;
/**
* To keep track of last assigned message ID against queue.
*/
private ConcurrentHashMap<String, Long> queueToLastAssignedIDMap;
/**
* Slots which are previously owned and released by last subscriber of this node. Key is the
* queueName. Value is a tree set of slots
*/
private ConcurrentHashMap<String, TreeSet<Slot>> unAssignedSlotMap;
/**
* To keep track of assigned slots for each queue. Key is the queue name and value is a tree
* set of slots
*/
private ConcurrentHashMap<String, TreeSet<Slot>> slotAssignmentMap;
private static SlotManagerStandalone slotManagerStandalone = new SlotManagerStandalone();
private static Log log = LogFactory.getLog(SlotManagerStandalone.class);
private SlotManagerStandalone() {
/**
* Initialize distributed maps used in this class
*/
slotIDMap = new ConcurrentHashMap<>();
queueToLastAssignedIDMap = new ConcurrentHashMap<>();
slotAssignmentMap = new ConcurrentHashMap<>();
unAssignedSlotMap = new ConcurrentHashMap<>();
}
/**
* Get a slot by giving the queue name.
*
* @param queueName Name of the queue
* @return Slot object
*/
public Slot getSlot(String queueName) {
Slot slotToBeAssigned;
String lockKey = queueName + SlotManagerStandalone.class;
synchronized (lockKey.intern()) {
//First look at slots which are returned when last subscriber leaves
slotToBeAssigned = getUnassignedSlot(queueName);
if (null == slotToBeAssigned) {
slotToBeAssigned = getFreshSlot(queueName);
if (log.isDebugEnabled()) {
log.debug("Slot Manager - giving a slot from fresh pool. Slot= " + slotToBeAssigned);
}
}
if (null == slotToBeAssigned) {
if (log.isDebugEnabled()) {
log.debug("Slot Manager - returns empty slot for the queue: " + queueName);
}
} else {
updateSlotAssignmentMap(queueName, slotToBeAssigned);
}
return slotToBeAssigned;
}
}
/**
* Get an unassigned slot (slots dropped by sudden subscription closes)
*
* @param queueName name of the queue slot is required
* @return slot or null if cannot find
*/
private Slot getUnassignedSlot(String queueName) {
Slot slotToBeAssigned = null;
TreeSet<Slot> unAssignedSlotSet = unAssignedSlotMap.get(queueName);
if(null != unAssignedSlotSet){
slotToBeAssigned = unAssignedSlotSet.pollFirst();
}
return slotToBeAssigned;
}
/**
* Get a new slot from slotIDMap
*
* @param queueName Name of the queue
* @return Slot object
*/
private Slot getFreshSlot(String queueName) {
Slot slotToBeAssigned = null;
TreeSet<Long> messageIDSet = slotIDMap.get(queueName);
if (null != messageIDSet && !messageIDSet.isEmpty()) {
slotToBeAssigned = new Slot();
Long lastAssignedId = queueToLastAssignedIDMap.get(queueName);
if (lastAssignedId != null) {
slotToBeAssigned.setStartMessageId(lastAssignedId + 1);
} else {
slotToBeAssigned.setStartMessageId(0L);
}
slotToBeAssigned.setEndMessageId(messageIDSet.pollFirst());
slotToBeAssigned.setStorageQueueName(queueName);
slotIDMap.put(queueName, messageIDSet);
if (log.isDebugEnabled()) {
log.debug(slotToBeAssigned.getEndMessageId() + " removed to slotIdMap. Current " +
"values in " +
"map " + messageIDSet);
}
queueToLastAssignedIDMap.put(queueName, slotToBeAssigned.getEndMessageId());
}
return slotToBeAssigned;
}
/**
* Put an entry to slotAssignmentMap when a slot is assigned to slot delivery worker
* @param queueName Name of the queue
* @param assignedSlot Slot which is assigned to slot delivery worker
*/
private void updateSlotAssignmentMap(String queueName, Slot assignedSlot) {
TreeSet<Slot> assignedSlotSet = slotAssignmentMap.get(queueName);
if (null == assignedSlotSet) {
assignedSlotSet = new TreeSet<>();
}
assignedSlotSet.add(assignedSlot);
slotAssignmentMap.put(queueName, assignedSlotSet);
}
/**
* Record Slot's last message ID related to a particular queue
*
* @param queueName Name of the queue which this message ID belongs to
* @param lastMessageIdInTheSlot Last message ID of the slot
*/
public void updateMessageID(String queueName, Long lastMessageIdInTheSlot) {
TreeSet<Long> messageIdSet = slotIDMap.get(queueName);
if (messageIdSet == null) {
messageIdSet = new TreeSet<>();
}
String lockKey = queueName + SlotManagerStandalone.class;
synchronized (lockKey.intern()) {
/**
* Update the slotIDMap
*/
messageIdSet.add(lastMessageIdInTheSlot);
slotIDMap.put(queueName, messageIdSet);
if (log.isDebugEnabled()) {
log.debug(lastMessageIdInTheSlot + " added to slotIdMap. Current values in " +
"map " + messageIdSet);
}
}
}
/**
* Delete slot details when slot is empty. (All the messages are delivered and acknowledgments are
* returned )
* @param queueName Name of the queue
* @param slotToBeDeleted Slot to be deleted
* @return Whether deleted or not
*/
public boolean deleteSlot(String queueName, Slot slotToBeDeleted) {
String lockKey = queueName + SlotManagerStandalone.class;
synchronized (lockKey.intern()) {
TreeSet<Slot> assignedSlotSet = slotAssignmentMap.get(queueName);
if (null != assignedSlotSet) {
Iterator assignedSlotIterator = assignedSlotSet.iterator();
while (assignedSlotIterator.hasNext()) {
Slot assignedSlot = (Slot) assignedSlotIterator.next();
if (assignedSlot.getEndMessageId() == slotToBeDeleted.getEndMessageId()) {
assignedSlotIterator.remove();
break;
}
}
}
}
return true;
}
/**
* Re-assign the slot when there are no local subscribers in the node
* @param queueName Name of the queue
*/
public void reAssignSlotWhenNoSubscribers(String queueName) {
TreeSet<Slot> slotsToBeReAssigned = slotAssignmentMap.remove(queueName);
String lockKey = queueName + SlotManagerStandalone.class;
if (null != slotsToBeReAssigned) {
synchronized (lockKey.intern()) {
TreeSet<Slot> unassignedSlots = unAssignedSlotMap.get(queueName);
if (null == unassignedSlots) {
unassignedSlots = new TreeSet<>();
}
for (Slot slotToBeReAssigned : slotsToBeReAssigned) {
unassignedSlots.add(slotToBeReAssigned);
}
unAssignedSlotMap.put(queueName, unassignedSlots);
}
}
}
/**
* Delete all slot associations with a given queue. This is required to handle a queue purge event.
*
* @param queueName Name of destination queue
*/
public void clearAllActiveSlotRelationsToQueue(String queueName) {
if (null != slotIDMap) {
slotIDMap.remove(queueName);
}
if (null != slotAssignmentMap) {
slotAssignmentMap.remove(queueName);
}
if (null != unAssignedSlotMap) {
unAssignedSlotMap.remove(queueName);
}
}
/**
*
* @return SlotManagerStandalone instance
*/
public static SlotManagerStandalone getInstance(){
return slotManagerStandalone;
}
/**
* Return last assign message id of slot for given queue when MB standalone mode
*
* @param queueName name of destination queue
* @return last assign message id
*/
public Long getLastAssignedSlotMessageIdInStandaloneMode(String queueName) {
return slotIDMap.get(queueName).last();
}
}
| |
package ru.tasp.tools;
import android.os.Handler;
import android.os.Looper;
import android.os.Process;
import java.security.AccessControlContext;
import java.security.AccessController;
import java.security.PrivilegedAction;
import java.util.LinkedHashMap;
import java.util.concurrent.BlockingQueue;
import java.util.concurrent.Callable;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.FutureTask;
import java.util.concurrent.LinkedBlockingQueue;
import java.util.concurrent.ThreadPoolExecutor;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicLong;
import ru.tasp.tools.logger.L;
/**
* Created by the28awg on 26.10.15.
*/
public class Threads {
private static final int CPU_COUNT = Runtime.getRuntime().availableProcessors();
private static final int CORE_POOL_SIZE = CPU_COUNT + 1;
private static final int MAXIMUM_POOL_SIZE = CPU_COUNT * 2 + 1;
private static final int KEEP_ALIVE = 1;
private Handler uiHandler;
private Thread uiThread;
private UncaughtExceptionHandler exceptionHandler;
private ThreadFactory factory;
private RejectedExecutionHandler handler;
private ThreadPoolExecutor executor = null;
private BlockingQueue<Runnable> queue = new LinkedBlockingQueue<Runnable>(128);
private Threads() {
uiHandler = new Handler(Looper.getMainLooper());
uiThread = Looper.getMainLooper().getThread();
exceptionHandler = new UncaughtExceptionHandler();
factory = new ThreadFactory().wrapRunnable(true).pattern("Background", true).
daemon(false).exceptionHandler(exceptionHandler).finishConfig();
handler = new RejectedExecutionHandler();
executor = new ThreadPoolExecutor(CORE_POOL_SIZE, MAXIMUM_POOL_SIZE, KEEP_ALIVE,
TimeUnit.SECONDS, queue, factory, handler);
}
public static Threads get() {
return Holder.THREADS;
}
public UncaughtExceptionHandler getExceptionHandler() {
return exceptionHandler;
}
public ThreadFactory getFactory() {
return factory;
}
public RejectedExecutionHandler getHandler() {
return handler;
}
public ThreadPoolExecutor getExecutor() {
return executor;
}
public BlockingQueue<Runnable> getQueue() {
return queue;
}
public void runOnUiThread(Runnable action) {
if (Thread.currentThread() != uiThread) {
uiHandler.post(action);
} else {
action.run();
}
}
public static class ThreadFactory implements java.util.concurrent.ThreadFactory {
protected final AccessControlContext acc;
protected final AtomicLong counter = new AtomicLong();
private long stackSize;
private String pattern;
private ClassLoader ccl;
private ThreadGroup group;
private Thread.UncaughtExceptionHandler exceptionHandler;
private boolean daemon;
private boolean configured;
private boolean wrapRunnable;
public ThreadFactory() {
final Thread t = Thread.currentThread();
ClassLoader loader;
AccessControlContext acc = null;
try {
loader = t.getContextClassLoader();
if (System.getSecurityManager() != null) {
acc = AccessController.getContext();
acc.checkPermission(new RuntimePermission("setContextClassLoader"));
}
} catch (SecurityException _skip) {
//no permission
loader = null;
acc = null;
}
this.ccl = loader;
this.acc = acc;
this.daemon = true;//Executors have it false by default
this.wrapRunnable = true;//by default wrap if acc is present (+SecurityManager)
//default pattern - caller className
StackTraceElement[] stack = new Exception().getStackTrace();
pattern(stack.length > 1 ? getOuterClassName(stack[1].getClassName()) : "ThreadFactory", true);
}
private static String getOuterClassName(String className) {
int idx = className.lastIndexOf('.') + 1;
className = className.substring(idx);//remove package
idx = className.indexOf('$');
if (idx <= 0) {
return className;//handle classes starting w/ $
}
return className.substring(0, idx);//assume inner class
}
public ThreadFactory finishConfig() {
configured = true;
counter.addAndGet(0);//write fence "w/o" volatile
return this;
}
public long getCreatedThreadsCount() {
return counter.get();
}
protected void assertConfigurable() {
if (configured)
throw new IllegalStateException("already configured");
}
@Override
public Thread newThread(Runnable r) {
configured = true;
final Thread t = new Thread(group, wrapRunnable(r), composeName(r), stackSize);
t.setDaemon(daemon);
t.setUncaughtExceptionHandler(exceptionHandler);//securityException only if in the main group, shall be safe here
//funny moment Thread.getUncaughtExceptionHandler() has a race.. badz (can throw NPE)
applyCCL(t);
return t;
}
private void applyCCL(final Thread t) {
if (ccl != null) {//use factory creator ACC for setContextClassLoader
AccessController.doPrivileged(new PrivilegedAction<Object>() {
@Override
public Object run() {
t.setContextClassLoader(ccl);
return null;
}
}, acc);
}
}
private Runnable wrapRunnable(final Runnable r) {
if (acc == null || !wrapRunnable) {
return r;
}
Runnable result = new Runnable() {
public void run() {
AccessController.doPrivileged(new PrivilegedAction<Object>() {
@Override
public Object run() {
android.os.Process.setThreadPriority(Process.myTid(), Process.THREAD_PRIORITY_BACKGROUND);
r.run();
return null;
}
}, acc);
}
};
return result;
}
protected String composeName(Runnable r) {
return String.format(pattern, counter.incrementAndGet(), System.currentTimeMillis());
}
//standard setters allowing chaining, feel free to add normal setXXX
public ThreadFactory pattern(String patten, boolean appendFormat) {
assertConfigurable();
if (appendFormat) {
patten += ": %d @ %tF %<tT";//counter + creation time
}
this.pattern = patten;
return this;
}
public ThreadFactory daemon(boolean daemon) {
assertConfigurable();
this.daemon = daemon;
return this;
}
public ThreadFactory stackSize(long stackSize) {
assertConfigurable();
this.stackSize = stackSize;
return this;
}
public ThreadFactory threadGroup(ThreadGroup group) {
assertConfigurable();
this.group = group;
return this;
}
public ThreadFactory exceptionHandler(Thread.UncaughtExceptionHandler exceptionHandler) {
assertConfigurable();
this.exceptionHandler = exceptionHandler;
return this;
}
public ThreadFactory wrapRunnable(boolean wrapRunnable) {
assertConfigurable();
this.wrapRunnable = wrapRunnable;
return this;
}
public ThreadFactory ccl(ClassLoader ccl) {
assertConfigurable();
this.ccl = ccl;
return this;
}
}
public static class UncaughtExceptionHandler implements Thread.UncaughtExceptionHandler {
private LinkedHashMap<String, LinkedBlockingQueue<Throwable>> throwable = new LinkedHashMap<>();
@Override
public void uncaughtException(Thread thread, Throwable ex) {
L.error(ex, ex.getMessage());
if (!throwable.containsKey(thread.toString())) {
throwable.put(thread.toString(), new LinkedBlockingQueue<Throwable>());
}
throwable.get(thread.toString()).add(ex);
}
public LinkedHashMap<String, LinkedBlockingQueue<Throwable>> getThrowable() {
return throwable;
}
}
public static class RejectedExecutionHandler implements java.util.concurrent.RejectedExecutionHandler {
private AtomicLong counter = new AtomicLong(0);
@Override
public void rejectedExecution(Runnable r, ThreadPoolExecutor executor) {
counter.incrementAndGet();
}
public long getCounter() {
return counter.get();
}
}
private static class Holder {
private static final Threads THREADS = new Threads();
}
public static class Task<V> extends FutureTask<V> {
private Handler handler;
public Task(Callable<V> callable) {
super(callable);
try {
handler = new Handler(Looper.myLooper());
} catch (NullPointerException e) {
handler = new Handler(Looper.getMainLooper());
}
}
public Task(Callable<V> callable, Looper looper) {
super(callable);
handler = new Handler(looper);
}
@Override
protected void done() {
handler.post(new Runnable() {
@Override
public void run() {
try {
done(get());
} catch (InterruptedException e) {
L.error(e, "Ooops!");
} catch (ExecutionException e) {
L.error(e, "Ooops!");
}
}
});
}
public void done(V result) {}
}
}
| |
package co.juliansuarez.libwizardpager.wizard.ui;
import android.content.Context;
import android.content.res.Resources;
import android.content.res.TypedArray;
import android.graphics.Canvas;
import android.graphics.Paint;
import android.graphics.RectF;
import android.util.AttributeSet;
import android.view.Gravity;
import android.view.MotionEvent;
import android.view.View;
import co.juliansuarez.libwizardpager.R;
public class StepPagerStrip extends View {
private static final int[] ATTRS = new int[]{
android.R.attr.gravity
};
private int mPageCount;
private int mCurrentPage;
private int mGravity = Gravity.LEFT | Gravity.TOP;
private float mTabWidth;
private float mTabHeight;
private float mTabSpacing;
private Paint mPrevTabPaint;
private Paint mSelectedTabPaint;
private Paint mSelectedLastTabPaint;
private Paint mNextTabPaint;
private RectF mTempRectF = new RectF();
//private Scroller mScroller;
private OnPageSelectedListener mOnPageSelectedListener;
public StepPagerStrip(Context context) {
this(context, null, 0);
}
public StepPagerStrip(Context context, AttributeSet attrs) {
this(context, attrs, 0);
}
public StepPagerStrip(Context context, AttributeSet attrs, int defStyle) {
super(context, attrs, defStyle);
final TypedArray a = context.obtainStyledAttributes(attrs, ATTRS);
mGravity = a.getInteger(0, mGravity);
a.recycle();
final Resources res = getResources();
mTabWidth = res.getDimensionPixelSize(R.dimen.step_pager_tab_width);
mTabHeight = res.getDimensionPixelSize(R.dimen.step_pager_tab_height);
mTabSpacing = res.getDimensionPixelSize(R.dimen.step_pager_tab_spacing);
mPrevTabPaint = new Paint();
mPrevTabPaint.setColor(res.getColor(R.color.step_pager_previous_tab_color));
mSelectedTabPaint = new Paint();
mSelectedTabPaint.setColor(res.getColor(R.color.step_pager_selected_tab_color));
mSelectedLastTabPaint = new Paint();
mSelectedLastTabPaint.setColor(res.getColor(R.color.step_pager_selected_last_tab_color));
mNextTabPaint = new Paint();
mNextTabPaint.setColor(res.getColor(R.color.step_pager_next_tab_color));
}
public void setOnPageSelectedListener(OnPageSelectedListener onPageSelectedListener) {
mOnPageSelectedListener = onPageSelectedListener;
}
@Override
protected void onDraw(Canvas canvas) {
super.onDraw(canvas);
if (mPageCount == 0) {
return;
}
float totalWidth = mPageCount * (mTabWidth + mTabSpacing) - mTabSpacing;
float totalLeft;
boolean fillHorizontal = false;
switch (mGravity & Gravity.HORIZONTAL_GRAVITY_MASK) {
case Gravity.CENTER_HORIZONTAL:
totalLeft = (getWidth() - totalWidth) / 2;
break;
case Gravity.RIGHT:
totalLeft = getWidth() - getPaddingRight() - totalWidth;
break;
case Gravity.FILL_HORIZONTAL:
totalLeft = getPaddingLeft();
fillHorizontal = true;
break;
default:
totalLeft = getPaddingLeft();
}
switch (mGravity & Gravity.VERTICAL_GRAVITY_MASK) {
case Gravity.CENTER_VERTICAL:
mTempRectF.top = (int) (getHeight() - mTabHeight) / 2;
break;
case Gravity.BOTTOM:
mTempRectF.top = getHeight() - getPaddingBottom() - mTabHeight;
break;
default:
mTempRectF.top = getPaddingTop();
}
mTempRectF.bottom = mTempRectF.top + mTabHeight;
float tabWidth = mTabWidth;
if (fillHorizontal) {
tabWidth = (getWidth() - getPaddingRight() - getPaddingLeft()
- (mPageCount - 1) * mTabSpacing) / mPageCount;
}
for (int i = 0; i < mPageCount; i++) {
mTempRectF.left = totalLeft + (i * (tabWidth + mTabSpacing));
mTempRectF.right = mTempRectF.left + tabWidth;
canvas.drawRect(mTempRectF, i < mCurrentPage
? mPrevTabPaint
: (i > mCurrentPage
? mNextTabPaint
: (i == mPageCount - 1
? mSelectedLastTabPaint
: mSelectedTabPaint)));
}
}
@Override
protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) {
setMeasuredDimension(
View.resolveSize(
(int) (mPageCount * (mTabWidth + mTabSpacing) - mTabSpacing)
+ getPaddingLeft() + getPaddingRight(),
widthMeasureSpec),
View.resolveSize(
(int) mTabHeight
+ getPaddingTop() + getPaddingBottom(),
heightMeasureSpec));
}
@Override
protected void onSizeChanged(int w, int h, int oldw, int oldh) {
scrollCurrentPageIntoView();
super.onSizeChanged(w, h, oldw, oldh);
}
@Override
public boolean onTouchEvent(MotionEvent event) {
if (mOnPageSelectedListener != null) {
switch (event.getActionMasked()) {
case MotionEvent.ACTION_DOWN:
case MotionEvent.ACTION_MOVE:
int position = hitTest(event.getX());
if (position >= 0) {
mOnPageSelectedListener.onPageStripSelected(position);
}
return true;
}
}
return super.onTouchEvent(event);
}
private int hitTest(float x) {
if (mPageCount == 0) {
return -1;
}
float totalWidth = mPageCount * (mTabWidth + mTabSpacing) - mTabSpacing;
float totalLeft;
boolean fillHorizontal = false;
switch (mGravity & Gravity.HORIZONTAL_GRAVITY_MASK) {
case Gravity.CENTER_HORIZONTAL:
totalLeft = (getWidth() - totalWidth) / 2;
break;
case Gravity.RIGHT:
totalLeft = getWidth() - getPaddingRight() - totalWidth;
break;
case Gravity.FILL_HORIZONTAL:
totalLeft = getPaddingLeft();
fillHorizontal = true;
break;
default:
totalLeft = getPaddingLeft();
}
float tabWidth = mTabWidth;
if (fillHorizontal) {
tabWidth = (getWidth() - getPaddingRight() - getPaddingLeft()
- (mPageCount - 1) * mTabSpacing) / mPageCount;
}
float totalRight = totalLeft + (mPageCount * (tabWidth + mTabSpacing));
if (x >= totalLeft && x <= totalRight && totalRight > totalLeft) {
return (int) (((x - totalLeft) / (totalRight - totalLeft)) * mPageCount);
} else {
return -1;
}
}
public void setCurrentPage(int currentPage) {
mCurrentPage = currentPage;
invalidate();
scrollCurrentPageIntoView();
// TODO: Set content description appropriately
}
private void scrollCurrentPageIntoView() {
// TODO: only works with left gravity for now
//
// float widthToActive = getPaddingLeft() + (mCurrentPage + 1) * (mTabWidth + mTabSpacing)
// - mTabSpacing;
// int viewWidth = getWidth();
//
// int startScrollX = getScrollX();
// int destScrollX = (widthToActive > viewWidth) ? (int) (widthToActive - viewWidth) : 0;
//
// if (mScroller == null) {
// mScroller = new Scroller(getContext());
// }
//
// mScroller.abortAnimation();
// mScroller.startScroll(startScrollX, 0, destScrollX - startScrollX, 0);
// postInvalidate();
}
public void setPageCount(int count) {
mPageCount = count;
invalidate();
// TODO: Set content description appropriately
}
public static interface OnPageSelectedListener {
void onPageStripSelected(int position);
}
//
// @Override
// public void computeScroll() {
// super.computeScroll();
// if (mScroller.computeScrollOffset()) {
// setScrollX(mScroller.getCurrX());
// }
// }
}
| |
/*
* JBoss, Home of Professional Open Source
* Copyright 2009 Red Hat Inc. and/or its affiliates and other
* contributors as indicated by the @author tags. All rights reserved.
* See the copyright.txt in the distribution for a full listing of
* individual contributors.
*
* This is free software; you can redistribute it and/or modify it
* under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation; either version 2.1 of
* the License, or (at your option) any later version.
*
* This software is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this software; if not, write to the Free
* Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
* 02110-1301 USA, or see the FSF site: http://www.fsf.org.
*/
/*
* JBoss, Home of Professional Open Source
*
* Distributable under LGPL license.
* See terms of license at gnu.org.
*/
package org.infinispan.replication;
import static org.mockito.Matchers.anyBoolean;
import static org.mockito.Matchers.anyLong;
import static org.mockito.Matchers.anyObject;
import static org.mockito.Matchers.eq;
import static org.mockito.Mockito.*;
import static org.testng.AssertJUnit.assertEquals;
import static org.testng.AssertJUnit.assertNull;
import java.util.Collections;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import org.infinispan.Cache;
import org.infinispan.CacheException;
import org.infinispan.commands.remote.CacheRpcCommand;
import org.infinispan.config.Configuration;
import org.infinispan.remoting.responses.Response;
import org.infinispan.remoting.rpc.ResponseFilter;
import org.infinispan.remoting.rpc.ResponseMode;
import org.infinispan.remoting.rpc.RpcManager;
import org.infinispan.remoting.rpc.RpcManagerImpl;
import org.infinispan.remoting.transport.Address;
import org.infinispan.remoting.transport.Transport;
import org.infinispan.test.MultipleCacheManagersTest;
import org.infinispan.test.TestingUtil;
import org.testng.annotations.Test;
/**
* @author <a href="mailto:manik@jboss.org">Manik Surtani (manik@jboss.org)</a>
*/
@Test(groups = "functional", testName = "replication.SyncReplTest")
public class SyncReplTest extends MultipleCacheManagersTest {
String k = "key", v = "value";
@Override
protected void createCacheManagers() throws Throwable {
Configuration replSync = getDefaultClusteredConfig(Configuration.CacheMode.REPL_SYNC);
createClusteredCaches(2, "replSync", replSync);
}
public void testBasicOperation() {
Cache cache1 = cache(0, "replSync");
Cache cache2 = cache(1, "replSync");
assertClusterSize("Should only be 2 caches in the cluster!!!", 2);
assertNull("Should be null", cache1.get(k));
assertNull("Should be null", cache2.get(k));
cache1.put(k, v);
assertEquals(v, cache1.get(k));
assertEquals("Should have replicated", v, cache2.get(k));
cache2.remove(k);
assert cache1.isEmpty();
assert cache2.isEmpty();
}
public void testMultpleCachesOnSharedTransport() {
Cache cache1 = cache(0, "replSync");
Cache cache2 = cache(1, "replSync");
assertClusterSize("Should only be 2 caches in the cluster!!!", 2);
assert cache1.isEmpty();
assert cache2.isEmpty();
Configuration newConf = getDefaultClusteredConfig(Configuration.CacheMode.REPL_SYNC);
defineConfigurationOnAllManagers("newCache", newConf);
Cache altCache1 = manager(0).getCache("newCache");
Cache altCache2 = manager(1).getCache("newCache");
try {
assert altCache1.isEmpty();
assert altCache2.isEmpty();
cache1.put(k, v);
assert cache1.get(k).equals(v);
assert cache2.get(k).equals(v);
assert altCache1.isEmpty();
assert altCache2.isEmpty();
altCache1.put(k, "value2");
assert altCache1.get(k).equals("value2");
assert altCache2.get(k).equals("value2");
assert cache1.get(k).equals(v);
assert cache2.get(k).equals(v);
} finally {
removeCacheFromCluster("newCache");
}
}
public void testReplicateToNonExistentCache() {
// strictPeerToPeer is now disabled by default
boolean strictPeerToPeer = false;
Cache cache1 = cache(0, "replSync");
Cache cache2 = cache(1, "replSync");
assertClusterSize("Should only be 2 caches in the cluster!!!", 2);
assert cache1.isEmpty();
assert cache2.isEmpty();
Configuration newConf = getDefaultClusteredConfig(Configuration.CacheMode.REPL_SYNC);
defineConfigurationOnAllManagers("newCache2", newConf);
Cache altCache1 = manager(0).getCache("newCache2");
try {
assert altCache1.isEmpty();
cache1.put(k, v);
assert cache1.get(k).equals(v);
assert cache2.get(k).equals(v);
assert altCache1.isEmpty();
altCache1.put(k, "value2");
assert !strictPeerToPeer : "With strict peer-to-peer enabled the asymmetric put should have failed";
assert altCache1.get(k).equals("value2");
assert cache1.get(k).equals(v);
assert cache2.get(k).equals(v);
assert manager(0).getCache("newCache2").get(k).equals("value2");
} catch (CacheException e) {
assert strictPeerToPeer : "With strict peer-to-peer disabled the asymmetric put should have succeeded";
} finally {
removeCacheFromCluster("newCache2");
}
}
public void testMixingSyncAndAsyncOnSameTransport() throws Exception {
Cache cache1 = cache(0, "replSync");
Cache cache2 = cache(1, "replSync");
waitForClusterToForm("replSync");
Transport originalTransport = null;
RpcManagerImpl rpcManager = null;
RpcManagerImpl asyncRpcManager = null;
Map<Address, Response> emptyResponses = Collections.emptyMap();
try {
Configuration asyncCache = getDefaultClusteredConfig(Configuration.CacheMode.REPL_ASYNC);
asyncCache.setUseAsyncMarshalling(true);
defineConfigurationOnAllManagers("asyncCache", asyncCache);
Cache asyncCache1 = manager(0).getCache("asyncCache");
Cache asyncCache2 = manager(1).getCache("asyncCache");
waitForClusterToForm("asyncCache");
// replace the transport with a mock object
Transport mockTransport = mock(Transport.class);
Address mockAddressOne = mock(Address.class);
Address mockAddressTwo = mock(Address.class);
List<Address> addresses = new LinkedList<Address>();
addresses.add(mockAddressOne);
addresses.add(mockAddressTwo);
when(mockTransport.getAddress()).thenReturn(mockAddressOne);
when(mockTransport.getMembers()).thenReturn(addresses);
// this is shared by all caches managed by the cache manager
originalTransport = TestingUtil.extractGlobalComponent(cache1.getCacheManager(), Transport.class);
rpcManager = (RpcManagerImpl) TestingUtil.extractComponent(cache1, RpcManager.class);
rpcManager.setTransport(mockTransport);
when(
mockTransport.invokeRemotely((List<Address>) anyObject(),
(CacheRpcCommand) anyObject(), eq(ResponseMode.SYNCHRONOUS), anyLong(),
anyBoolean(), (ResponseFilter) anyObject(), anyBoolean(), anyBoolean())).thenReturn(emptyResponses);
// check that the replication call was sync
cache1.put("k", "v");
verify(mockTransport).invokeRemotely((List<Address>) anyObject(),
(CacheRpcCommand) anyObject(), eq(ResponseMode.SYNCHRONOUS), anyLong(),
anyBoolean(), (ResponseFilter) anyObject(), anyBoolean(), anyBoolean());
// resume to test for async
asyncRpcManager = (RpcManagerImpl) TestingUtil.extractComponent(asyncCache1, RpcManager.class);
asyncRpcManager.setTransport(mockTransport);
reset(mockTransport);
when(mockTransport.getAddress()).thenReturn(mockAddressOne);
when(mockTransport.getMembers()).thenReturn(addresses);
when(
mockTransport.invokeRemotely((List<Address>) anyObject(),
(CacheRpcCommand) anyObject(), eq(ResponseMode.ASYNCHRONOUS), anyLong(),
anyBoolean(), (ResponseFilter) anyObject(), anyBoolean(), anyBoolean())).thenReturn(emptyResponses);
asyncCache1.put("k", "v");
verify(mockTransport).invokeRemotely((List<Address>) anyObject(),
(CacheRpcCommand) anyObject(), eq(ResponseMode.ASYNCHRONOUS), anyLong(),
anyBoolean(), (ResponseFilter) anyObject(), anyBoolean(), anyBoolean());
} finally {
// replace original transport
if (rpcManager != null)
rpcManager.setTransport(originalTransport);
if (asyncRpcManager != null)
asyncRpcManager.setTransport(originalTransport);
}
}
}
| |
/*
* The MIT License
*
* Copyright (c) 2004-2009, Sun Microsystems, Inc., Kohsuke Kawaguchi, Seiji Sogabe, Tom Huybrechts
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package hudson.security;
import hudson.DescriptorExtensionList;
import hudson.Extension;
import hudson.ExtensionPoint;
import hudson.model.AbstractItem;
import hudson.model.AbstractProject;
import hudson.model.Computer;
import hudson.model.Describable;
import hudson.model.Descriptor;
import hudson.model.Hudson;
import hudson.model.Job;
import hudson.model.Node;
import hudson.model.User;
import hudson.model.View;
import hudson.slaves.Cloud;
import hudson.util.DescriptorList;
import java.io.Serializable;
import java.util.Collection;
import java.util.Collections;
import net.sf.json.JSONObject;
import org.acegisecurity.Authentication;
import org.kohsuke.stapler.StaplerRequest;
/**
* Controls authorization throughout Hudson.
*
* <h2>Persistence</h2>
* <p>
* This object will be persisted along with {@link Hudson} object.
* Hudson by itself won't put the ACL returned from {@link #getRootACL()} into the serialized object graph,
* so if that object contains state and needs to be persisted, it's the responsibility of
* {@link AuthorizationStrategy} to do so (by keeping them in an instance field.)
*
* <h2>Re-configuration</h2>
* <p>
* The corresponding {@link Describable} instance will be asked to create a new {@link AuthorizationStrategy}
* every time the system configuration is updated. Implementations that keep more state in ACL beyond
* the system configuration should use {@link Hudson#getAuthorizationStrategy()} to talk to the current
* instance to carry over the state.
*
* @author Kohsuke Kawaguchi
* @see SecurityRealm
*/
public abstract class AuthorizationStrategy implements Describable<AuthorizationStrategy>, ExtensionPoint {
/**
* Returns the instance of {@link ACL} where all the other {@link ACL} instances
* for all the other model objects eventually delegate.
* <p>
* IOW, this ACL will have the ultimate say on the access control.
*/
public abstract ACL getRootACL();
/**
* @deprecated
* Override {@link #getACL(Job)} instead.
*/
public ACL getACL(AbstractProject<?,?> project) {
return getACL((Job)project);
}
public ACL getACL(Job<?,?> project) {
return getRootACL();
}
/**
* Implementation can choose to provide different ACL for different views.
* This can be used as a basis for more fine-grained access control.
*
* <p>
* The default implementation returns the ACL of the ViewGroup.
*
* @since 1.220
*/
public ACL getACL(View item) {
return item.getOwner().getACL();
}
/**
* Implementation can choose to provide different ACL for different items.
* This can be used as a basis for more fine-grained access control.
*
* <p>
* The default implementation returns {@link #getRootACL()}.
*
* @since 1.220
*/
public ACL getACL(AbstractItem item) {
return getRootACL();
}
/**
* Implementation can choose to provide different ACL per user.
* This can be used as a basis for more fine-grained access control.
*
* <p>
* The default implementation returns {@link #getRootACL()}.
*
* @since 1.221
*/
public ACL getACL(User user) {
return getRootACL();
}
/**
* Implementation can choose to provide different ACL for different computers.
* This can be used as a basis for more fine-grained access control.
*
* <p>
* The default implementation delegates to {@link #getACL(Node)}
*
* @since 1.220
*/
public ACL getACL(Computer computer) {
return getACL(computer.getNode());
}
/**
* Implementation can choose to provide different ACL for different {@link Cloud}s.
* This can be used as a basis for more fine-grained access control.
*
* <p>
* The default implementation returns {@link #getRootACL()}.
*
* @since 1.252
*/
public ACL getACL(Cloud cloud) {
return getRootACL();
}
public ACL getACL(Node node) {
return getRootACL();
}
/**
* Returns the list of all group/role names used in this authorization strategy,
* and the ACL returned from the {@link #getRootACL()} method.
* <p>
* This method is used by {@link ContainerAuthentication} to work around the servlet API issue
* that prevents us from enumerating roles that the user has.
* <p>
* If such enumeration is impossible, do the best to list as many as possible, then
* return it. In the worst case, just return an empty list. Doing so would prevent
* users from using role names as group names (see HUDSON-2716 for such one such report.)
*
* @return
* never null.
*/
public abstract Collection<String> getGroups();
public Descriptor<AuthorizationStrategy> getDescriptor() {
return Hudson.getInstance().getDescriptor(getClass());
}
/**
* Returns all the registered {@link AuthorizationStrategy} descriptors.
*/
public static DescriptorExtensionList<AuthorizationStrategy,Descriptor<AuthorizationStrategy>> all() {
return Hudson.getInstance().getDescriptorList(AuthorizationStrategy.class);
}
/**
* All registered {@link SecurityRealm} implementations.
*
* @deprecated
* Use {@link #all()} for read access, and {@link Extension} for registration.
*/
public static final DescriptorList<AuthorizationStrategy> LIST = new DescriptorList<AuthorizationStrategy>(AuthorizationStrategy.class);
/**
* {@link AuthorizationStrategy} that implements the semantics
* of unsecured Hudson where everyone has full control.
*
* <p>
* This singleton is safe because {@link Unsecured} is stateless.
*/
public static final AuthorizationStrategy UNSECURED = new Unsecured();
public static final class Unsecured extends AuthorizationStrategy implements Serializable {
/**
* Maintains the singleton semantics.
*/
private Object readResolve() {
return UNSECURED;
}
@Override
public ACL getRootACL() {
return UNSECURED_ACL;
}
public Collection<String> getGroups() {
return Collections.emptySet();
}
private static final ACL UNSECURED_ACL = new ACL() {
public boolean hasPermission(Authentication a, Permission permission) {
return true;
}
};
@Extension
public static final class DescriptorImpl extends Descriptor<AuthorizationStrategy> {
public String getDisplayName() {
return Messages.AuthorizationStrategy_DisplayName();
}
public AuthorizationStrategy newInstance(StaplerRequest req, JSONObject formData) throws FormException {
return UNSECURED;
}
public String getHelpFile() {
return "/help/security/no-authorization.html";
}
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache license, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the license for the specific language governing permissions and
* limitations under the license.
*/
package org.apache.logging.log4j;
import org.junit.Test;
import static org.junit.Assert.*;
/**
*
*/
public class LevelTest {
@Test
public void testDefault() {
final Level level = Level.toLevel("Information", Level.ERROR);
assertNotNull(level);
assertEquals(Level.ERROR, level);
}
@Test
public void testForNameEquals() {
final String name = "Foo";
final int intValue = 1;
final Level level = Level.forName(name, intValue);
assertNotNull(level);
assertEquals(level, Level.forName(name, intValue));
assertEquals(level, Level.getLevel(name));
assertEquals(intValue, Level.getLevel(name).intLevel());
}
@Test
public void testGoodLevels() {
final Level level = Level.toLevel("INFO");
assertNotNull(level);
assertEquals(Level.INFO, level);
}
@Test
public void testIsLessSpecificThan() {
// Level.OFF
assertTrue(Level.OFF.isLessSpecificThan(Level.OFF));
assertFalse(Level.OFF.isLessSpecificThan(Level.FATAL));
assertFalse(Level.OFF.isLessSpecificThan(Level.ERROR));
assertFalse(Level.OFF.isLessSpecificThan(Level.WARN));
assertFalse(Level.OFF.isLessSpecificThan(Level.INFO));
assertFalse(Level.OFF.isLessSpecificThan(Level.DEBUG));
assertFalse(Level.OFF.isLessSpecificThan(Level.TRACE));
assertFalse(Level.OFF.isLessSpecificThan(Level.ALL));
// Level.FATAL
assertTrue(Level.FATAL.isLessSpecificThan(Level.OFF));
assertTrue(Level.FATAL.isLessSpecificThan(Level.FATAL));
assertFalse(Level.FATAL.isLessSpecificThan(Level.ERROR));
assertFalse(Level.FATAL.isLessSpecificThan(Level.WARN));
assertFalse(Level.FATAL.isLessSpecificThan(Level.INFO));
assertFalse(Level.FATAL.isLessSpecificThan(Level.DEBUG));
assertFalse(Level.FATAL.isLessSpecificThan(Level.TRACE));
assertFalse(Level.FATAL.isLessSpecificThan(Level.ALL));
// Level.ERROR
assertTrue(Level.ERROR.isLessSpecificThan(Level.OFF));
assertTrue(Level.ERROR.isLessSpecificThan(Level.FATAL));
assertTrue(Level.ERROR.isLessSpecificThan(Level.ERROR));
assertFalse(Level.ERROR.isLessSpecificThan(Level.WARN));
assertFalse(Level.ERROR.isLessSpecificThan(Level.INFO));
assertFalse(Level.ERROR.isLessSpecificThan(Level.DEBUG));
assertFalse(Level.ERROR.isLessSpecificThan(Level.TRACE));
assertFalse(Level.ERROR.isLessSpecificThan(Level.ALL));
// Level.ERROR
assertTrue(Level.WARN.isLessSpecificThan(Level.OFF));
assertTrue(Level.WARN.isLessSpecificThan(Level.FATAL));
assertTrue(Level.WARN.isLessSpecificThan(Level.ERROR));
assertTrue(Level.WARN.isLessSpecificThan(Level.WARN));
assertFalse(Level.WARN.isLessSpecificThan(Level.INFO));
assertFalse(Level.WARN.isLessSpecificThan(Level.DEBUG));
assertFalse(Level.WARN.isLessSpecificThan(Level.TRACE));
assertFalse(Level.WARN.isLessSpecificThan(Level.ALL));
// Level.WARN
assertTrue(Level.WARN.isLessSpecificThan(Level.OFF));
assertTrue(Level.WARN.isLessSpecificThan(Level.FATAL));
assertTrue(Level.WARN.isLessSpecificThan(Level.ERROR));
assertTrue(Level.WARN.isLessSpecificThan(Level.WARN));
assertFalse(Level.WARN.isLessSpecificThan(Level.INFO));
assertFalse(Level.WARN.isLessSpecificThan(Level.DEBUG));
assertFalse(Level.WARN.isLessSpecificThan(Level.TRACE));
assertFalse(Level.WARN.isLessSpecificThan(Level.ALL));
// Level.INFO
assertTrue(Level.INFO.isLessSpecificThan(Level.OFF));
assertTrue(Level.INFO.isLessSpecificThan(Level.FATAL));
assertTrue(Level.INFO.isLessSpecificThan(Level.ERROR));
assertTrue(Level.INFO.isLessSpecificThan(Level.WARN));
assertTrue(Level.INFO.isLessSpecificThan(Level.INFO));
assertFalse(Level.INFO.isLessSpecificThan(Level.DEBUG));
assertFalse(Level.INFO.isLessSpecificThan(Level.TRACE));
assertFalse(Level.INFO.isLessSpecificThan(Level.ALL));
// Level.DEBUG
assertTrue(Level.DEBUG.isLessSpecificThan(Level.OFF));
assertTrue(Level.DEBUG.isLessSpecificThan(Level.FATAL));
assertTrue(Level.DEBUG.isLessSpecificThan(Level.ERROR));
assertTrue(Level.DEBUG.isLessSpecificThan(Level.WARN));
assertTrue(Level.DEBUG.isLessSpecificThan(Level.INFO));
assertTrue(Level.DEBUG.isLessSpecificThan(Level.DEBUG));
assertFalse(Level.DEBUG.isLessSpecificThan(Level.TRACE));
assertFalse(Level.DEBUG.isLessSpecificThan(Level.ALL));
// Level.TRACE
assertTrue(Level.TRACE.isLessSpecificThan(Level.OFF));
assertTrue(Level.TRACE.isLessSpecificThan(Level.FATAL));
assertTrue(Level.TRACE.isLessSpecificThan(Level.ERROR));
assertTrue(Level.TRACE.isLessSpecificThan(Level.WARN));
assertTrue(Level.TRACE.isLessSpecificThan(Level.INFO));
assertTrue(Level.TRACE.isLessSpecificThan(Level.DEBUG));
assertTrue(Level.TRACE.isLessSpecificThan(Level.TRACE));
assertFalse(Level.TRACE.isLessSpecificThan(Level.ALL));
// Level.ALL
assertTrue(Level.ALL.isLessSpecificThan(Level.OFF));
assertTrue(Level.ALL.isLessSpecificThan(Level.FATAL));
assertTrue(Level.ALL.isLessSpecificThan(Level.ERROR));
assertTrue(Level.ALL.isLessSpecificThan(Level.WARN));
assertTrue(Level.ALL.isLessSpecificThan(Level.INFO));
assertTrue(Level.ALL.isLessSpecificThan(Level.DEBUG));
assertTrue(Level.ALL.isLessSpecificThan(Level.TRACE));
assertTrue(Level.ALL.isLessSpecificThan(Level.ALL));
}
@Test
public void testIsMoreSpecificThan() {
// Level.OFF
assertTrue(Level.OFF.isMoreSpecificThan(Level.OFF));
assertTrue(Level.OFF.isMoreSpecificThan(Level.FATAL));
assertTrue(Level.OFF.isMoreSpecificThan(Level.ERROR));
assertTrue(Level.OFF.isMoreSpecificThan(Level.WARN));
assertTrue(Level.OFF.isMoreSpecificThan(Level.INFO));
assertTrue(Level.OFF.isMoreSpecificThan(Level.DEBUG));
assertTrue(Level.OFF.isMoreSpecificThan(Level.TRACE));
assertTrue(Level.OFF.isMoreSpecificThan(Level.ALL));
// Level.FATAL
assertFalse(Level.FATAL.isMoreSpecificThan(Level.OFF));
assertTrue(Level.FATAL.isMoreSpecificThan(Level.FATAL));
assertTrue(Level.FATAL.isMoreSpecificThan(Level.ERROR));
assertTrue(Level.FATAL.isMoreSpecificThan(Level.WARN));
assertTrue(Level.FATAL.isMoreSpecificThan(Level.INFO));
assertTrue(Level.FATAL.isMoreSpecificThan(Level.DEBUG));
assertTrue(Level.FATAL.isMoreSpecificThan(Level.TRACE));
assertTrue(Level.FATAL.isMoreSpecificThan(Level.ALL));
// Level.ERROR
assertFalse(Level.ERROR.isMoreSpecificThan(Level.OFF));
assertFalse(Level.ERROR.isMoreSpecificThan(Level.FATAL));
assertTrue(Level.ERROR.isMoreSpecificThan(Level.ERROR));
assertTrue(Level.ERROR.isMoreSpecificThan(Level.WARN));
assertTrue(Level.ERROR.isMoreSpecificThan(Level.INFO));
assertTrue(Level.ERROR.isMoreSpecificThan(Level.DEBUG));
assertTrue(Level.ERROR.isMoreSpecificThan(Level.TRACE));
assertTrue(Level.ERROR.isMoreSpecificThan(Level.ALL));
// Level.WARN
assertFalse(Level.WARN.isMoreSpecificThan(Level.OFF));
assertFalse(Level.WARN.isMoreSpecificThan(Level.FATAL));
assertFalse(Level.WARN.isMoreSpecificThan(Level.ERROR));
assertTrue(Level.WARN.isMoreSpecificThan(Level.WARN));
assertTrue(Level.WARN.isMoreSpecificThan(Level.INFO));
assertTrue(Level.WARN.isMoreSpecificThan(Level.DEBUG));
assertTrue(Level.WARN.isMoreSpecificThan(Level.TRACE));
assertTrue(Level.WARN.isMoreSpecificThan(Level.ALL));
// Level.INFO
assertFalse(Level.INFO.isMoreSpecificThan(Level.OFF));
assertFalse(Level.INFO.isMoreSpecificThan(Level.FATAL));
assertFalse(Level.INFO.isMoreSpecificThan(Level.ERROR));
assertFalse(Level.INFO.isMoreSpecificThan(Level.WARN));
assertTrue(Level.INFO.isMoreSpecificThan(Level.INFO));
assertTrue(Level.INFO.isMoreSpecificThan(Level.DEBUG));
assertTrue(Level.INFO.isMoreSpecificThan(Level.TRACE));
assertTrue(Level.INFO.isMoreSpecificThan(Level.ALL));
// Level.DEBUG
assertFalse(Level.DEBUG.isMoreSpecificThan(Level.OFF));
assertFalse(Level.DEBUG.isMoreSpecificThan(Level.FATAL));
assertFalse(Level.DEBUG.isMoreSpecificThan(Level.ERROR));
assertFalse(Level.DEBUG.isMoreSpecificThan(Level.WARN));
assertFalse(Level.DEBUG.isMoreSpecificThan(Level.INFO));
assertTrue(Level.DEBUG.isMoreSpecificThan(Level.DEBUG));
assertTrue(Level.DEBUG.isMoreSpecificThan(Level.TRACE));
assertTrue(Level.DEBUG.isMoreSpecificThan(Level.ALL));
// Level.TRACE
assertFalse(Level.TRACE.isMoreSpecificThan(Level.OFF));
assertFalse(Level.TRACE.isMoreSpecificThan(Level.FATAL));
assertFalse(Level.TRACE.isMoreSpecificThan(Level.ERROR));
assertFalse(Level.TRACE.isMoreSpecificThan(Level.WARN));
assertFalse(Level.TRACE.isMoreSpecificThan(Level.INFO));
assertFalse(Level.TRACE.isMoreSpecificThan(Level.DEBUG));
assertTrue(Level.TRACE.isMoreSpecificThan(Level.TRACE));
assertTrue(Level.TRACE.isMoreSpecificThan(Level.ALL));
// Level.ALL
assertFalse(Level.ALL.isMoreSpecificThan(Level.OFF));
assertFalse(Level.ALL.isMoreSpecificThan(Level.FATAL));
assertFalse(Level.ALL.isMoreSpecificThan(Level.ERROR));
assertFalse(Level.ALL.isMoreSpecificThan(Level.WARN));
assertFalse(Level.ALL.isMoreSpecificThan(Level.INFO));
assertFalse(Level.ALL.isMoreSpecificThan(Level.DEBUG));
assertFalse(Level.ALL.isMoreSpecificThan(Level.TRACE));
assertTrue(Level.ALL.isMoreSpecificThan(Level.ALL));
}
}
| |
/**
* Copyright 2011 Noa Resare
* Copyright 2014 Andreas Schildbach
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.bitcoinj.core;
import org.bitcoinj.params.MainNetParams;
import org.junit.Test;
import java.io.ByteArrayOutputStream;
import java.net.InetAddress;
import java.nio.BufferUnderflowException;
import java.nio.ByteBuffer;
import java.util.Arrays;
import static org.bitcoinj.core.Utils.HEX;
import static org.junit.Assert.*;
public class BitcoinSerializerTest {
private static final byte[] ADDRESS_MESSAGE_BYTES = HEX.decode("f9beb4d96164647200000000000000001f000000" +
"ed52399b01e215104d010000000000000000000000000000000000ffff0a000001208d");
private static final byte[] TRANSACTION_MESSAGE_BYTES = HEX.withSeparator(" ", 2).decode(
"f9 be b4 d9 74 78 00 00 00 00 00 00 00 00 00 00" +
"02 01 00 00 e2 93 cd be 01 00 00 00 01 6d bd db" +
"08 5b 1d 8a f7 51 84 f0 bc 01 fa d5 8d 12 66 e9" +
"b6 3b 50 88 19 90 e4 b4 0d 6a ee 36 29 00 00 00" +
"00 8b 48 30 45 02 21 00 f3 58 1e 19 72 ae 8a c7" +
"c7 36 7a 7a 25 3b c1 13 52 23 ad b9 a4 68 bb 3a" +
"59 23 3f 45 bc 57 83 80 02 20 59 af 01 ca 17 d0" +
"0e 41 83 7a 1d 58 e9 7a a3 1b ae 58 4e de c2 8d" +
"35 bd 96 92 36 90 91 3b ae 9a 01 41 04 9c 02 bf" +
"c9 7e f2 36 ce 6d 8f e5 d9 40 13 c7 21 e9 15 98" +
"2a cd 2b 12 b6 5d 9b 7d 59 e2 0a 84 20 05 f8 fc" +
"4e 02 53 2e 87 3d 37 b9 6f 09 d6 d4 51 1a da 8f" +
"14 04 2f 46 61 4a 4c 70 c0 f1 4b ef f5 ff ff ff" +
"ff 02 40 4b 4c 00 00 00 00 00 19 76 a9 14 1a a0" +
"cd 1c be a6 e7 45 8a 7a ba d5 12 a9 d9 ea 1a fb" +
"22 5e 88 ac 80 fa e9 c7 00 00 00 00 19 76 a9 14" +
"0e ab 5b ea 43 6a 04 84 cf ab 12 48 5e fd a0 b7" +
"8b 4e cc 52 88 ac 00 00 00 00");
@Test
public void testAddr() throws Exception {
final NetworkParameters params = MainNetParams.get();
MessageSerializer serializer = params.getDefaultSerializer();
// the actual data from https://en.bitcoin.it/wiki/Protocol_specification#addr
AddressMessage addressMessage = (AddressMessage) serializer.deserialize(ByteBuffer.wrap(ADDRESS_MESSAGE_BYTES));
assertEquals(1, addressMessage.getAddresses().size());
PeerAddress peerAddress = addressMessage.getAddresses().get(0);
assertEquals(8333, peerAddress.getPort());
assertEquals("10.0.0.1", peerAddress.getAddr().getHostAddress());
ByteArrayOutputStream bos = new ByteArrayOutputStream(ADDRESS_MESSAGE_BYTES.length);
serializer.serialize(addressMessage, bos);
assertEquals(31, addressMessage.getMessageSize());
addressMessage.addAddress(new PeerAddress(params, InetAddress.getLocalHost()));
assertEquals(61, addressMessage.getMessageSize());
addressMessage.removeAddress(0);
assertEquals(31, addressMessage.getMessageSize());
//this wont be true due to dynamic timestamps.
//assertTrue(LazyParseByteCacheTest.arrayContains(bos.toByteArray(), addrMessage));
}
@Test
public void testCachedParsing() throws Exception {
MessageSerializer serializer = MainNetParams.get().getSerializer(true);
// first try writing to a fields to ensure uncaching and children are not affected
Transaction transaction = (Transaction) serializer.deserialize(ByteBuffer.wrap(TRANSACTION_MESSAGE_BYTES));
assertNotNull(transaction);
assertTrue(transaction.isCached());
transaction.setLockTime(1);
// parent should have been uncached
assertFalse(transaction.isCached());
// child should remain cached.
assertTrue(transaction.getInputs().get(0).isCached());
ByteArrayOutputStream bos = new ByteArrayOutputStream();
serializer.serialize(transaction, bos);
assertFalse(Arrays.equals(TRANSACTION_MESSAGE_BYTES, bos.toByteArray()));
// now try writing to a child to ensure uncaching is propagated up to parent but not to siblings
transaction = (Transaction) serializer.deserialize(ByteBuffer.wrap(TRANSACTION_MESSAGE_BYTES));
assertNotNull(transaction);
assertTrue(transaction.isCached());
transaction.getInputs().get(0).setSequenceNumber(1);
// parent should have been uncached
assertFalse(transaction.isCached());
// so should child
assertFalse(transaction.getInputs().get(0).isCached());
bos = new ByteArrayOutputStream();
serializer.serialize(transaction, bos);
assertFalse(Arrays.equals(TRANSACTION_MESSAGE_BYTES, bos.toByteArray()));
// deserialize/reserialize to check for equals.
transaction = (Transaction) serializer.deserialize(ByteBuffer.wrap(TRANSACTION_MESSAGE_BYTES));
assertNotNull(transaction);
assertTrue(transaction.isCached());
bos = new ByteArrayOutputStream();
serializer.serialize(transaction, bos);
assertTrue(Arrays.equals(TRANSACTION_MESSAGE_BYTES, bos.toByteArray()));
// deserialize/reserialize to check for equals. Set a field to it's existing value to trigger uncache
transaction = (Transaction) serializer.deserialize(ByteBuffer.wrap(TRANSACTION_MESSAGE_BYTES));
assertNotNull(transaction);
assertTrue(transaction.isCached());
transaction.getInputs().get(0).setSequenceNumber(transaction.getInputs().get(0).getSequenceNumber());
bos = new ByteArrayOutputStream();
serializer.serialize(transaction, bos);
assertTrue(Arrays.equals(TRANSACTION_MESSAGE_BYTES, bos.toByteArray()));
}
/**
* Get 1 header of the block number 1 (the first one is 0) in the chain
*/
@Test
public void testHeaders1() throws Exception {
MessageSerializer serializer = MainNetParams.get().getDefaultSerializer();
byte[] headersMessageBytes = HEX.decode("f9beb4d9686561" +
"646572730000000000520000005d4fab8101010000006fe28c0ab6f1b372c1a6a246ae6" +
"3f74f931e8365e15a089c68d6190000000000982051fd1e4ba744bbbe680e1fee14677b" +
"a1a3c3540bf7b1cdb606e857233e0e61bc6649ffff001d01e3629900");
HeadersMessage headersMessage = (HeadersMessage) serializer.deserialize(ByteBuffer.wrap(headersMessageBytes));
// The first block after the genesis
// http://blockexplorer.com/b/1
Block block = headersMessage.getBlockHeaders().get(0);
assertEquals("00000000839a8e6886ab5951d76f411475428afc90947ee320161bbf18eb6048", block.getHashAsString());
assertNotNull(block.transactions);
assertEquals("0e3e2357e806b6cdb1f70b54c3a3a17b6714ee1f0e68bebb44a74b1efd512098", Utils.HEX.encode(block.getMerkleRoot().getBytes()));
ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream();
serializer.serialize(headersMessage, byteArrayOutputStream);
byte[] serializedBytes = byteArrayOutputStream.toByteArray();
assertArrayEquals(headersMessageBytes, serializedBytes);
}
/**
* Get 6 headers of blocks 1-6 in the chain
*/
@Test
public void testHeaders2() throws Exception {
MessageSerializer serializer = MainNetParams.get().getDefaultSerializer();
byte[] headersMessageBytes = HEX.decode("f9beb4d96865616465" +
"72730000000000e701000085acd4ea06010000006fe28c0ab6f1b372c1a6a246ae63f74f931e" +
"8365e15a089c68d6190000000000982051fd1e4ba744bbbe680e1fee14677ba1a3c3540bf7b1c" +
"db606e857233e0e61bc6649ffff001d01e3629900010000004860eb18bf1b1620e37e9490fc8a" +
"427514416fd75159ab86688e9a8300000000d5fdcc541e25de1c7a5addedf24858b8bb665c9f36" +
"ef744ee42c316022c90f9bb0bc6649ffff001d08d2bd610001000000bddd99ccfda39da1b108ce1" +
"a5d70038d0a967bacb68b6b63065f626a0000000044f672226090d85db9a9f2fbfe5f0f9609b387" +
"af7be5b7fbb7a1767c831c9e995dbe6649ffff001d05e0ed6d00010000004944469562ae1c2c74" +
"d9a535e00b6f3e40ffbad4f2fda3895501b582000000007a06ea98cd40ba2e3288262b28638cec" +
"5337c1456aaf5eedc8e9e5a20f062bdf8cc16649ffff001d2bfee0a9000100000085144a84488e" +
"a88d221c8bd6c059da090e88f8a2c99690ee55dbba4e00000000e11c48fecdd9e72510ca84f023" +
"370c9a38bf91ac5cae88019bee94d24528526344c36649ffff001d1d03e4770001000000fc33f5" +
"96f822a0a1951ffdbf2a897b095636ad871707bf5d3162729b00000000379dfb96a5ea8c81700ea4" +
"ac6b97ae9a9312b2d4301a29580e924ee6761a2520adc46649ffff001d189c4c9700");
HeadersMessage headersMessage = (HeadersMessage) serializer.deserialize(ByteBuffer.wrap(headersMessageBytes));
assertEquals(6, headersMessage.getBlockHeaders().size());
// index 0 block is the number 1 block in the block chain
// http://blockexplorer.com/b/1
Block zeroBlock = headersMessage.getBlockHeaders().get(0);
assertEquals("00000000839a8e6886ab5951d76f411475428afc90947ee320161bbf18eb6048",
zeroBlock.getHashAsString());
assertEquals(2573394689L, zeroBlock.getNonce());
// index 3 block is the number 4 block in the block chain
// http://blockexplorer.com/b/4
Block thirdBlock = headersMessage.getBlockHeaders().get(3);
assertEquals("000000004ebadb55ee9096c9a2f8880e09da59c0d68b1c228da88e48844a1485",
thirdBlock.getHashAsString());
assertEquals(2850094635L, thirdBlock.getNonce());
ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream();
serializer.serialize(headersMessage, byteArrayOutputStream);
byte[] serializedBytes = byteArrayOutputStream.toByteArray();
assertArrayEquals(headersMessageBytes, serializedBytes);
}
@Test(expected = BufferUnderflowException.class)
public void testBitcoinPacketHeaderTooShort() {
new BitcoinSerializer.BitcoinPacketHeader(ByteBuffer.wrap(new byte[] { 0 }));
}
@Test(expected = ProtocolException.class)
public void testBitcoinPacketHeaderTooLong() {
// Message with a Message size which is 1 too big, in little endian format.
byte[] wrongMessageLength = HEX.decode("000000000000000000000000010000020000000000");
new BitcoinSerializer.BitcoinPacketHeader(ByteBuffer.wrap(wrongMessageLength));
}
@Test(expected = BufferUnderflowException.class)
public void testSeekPastMagicBytes() {
// Fail in another way, there is data in the stream but no magic bytes.
byte[] brokenMessage = HEX.decode("000000");
MainNetParams.get().getDefaultSerializer().seekPastMagicBytes(ByteBuffer.wrap(brokenMessage));
}
/**
* Tests serialization of an unknown message.
*/
@Test(expected = Error.class)
public void testSerializeUnknownMessage() throws Exception {
MessageSerializer serializer = MainNetParams.get().getDefaultSerializer();
Message unknownMessage = new Message() {
@Override
protected void parse() throws ProtocolException {
}
};
ByteArrayOutputStream bos = new ByteArrayOutputStream(ADDRESS_MESSAGE_BYTES.length);
serializer.serialize(unknownMessage, bos);
}
}
| |
/*
* Copyright 2016 ThoughtWorks, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.thoughtworks.go.remote.work;
import com.googlecode.junit.ext.JunitExtRunner;
import com.googlecode.junit.ext.RunIf;
import com.thoughtworks.go.config.ConfigCache;
import com.thoughtworks.go.config.CruiseConfig;
import com.thoughtworks.go.config.JobConfig;
import com.thoughtworks.go.config.MagicalGoConfigXmlLoader;
import com.thoughtworks.go.domain.*;
import com.thoughtworks.go.domain.buildcause.BuildCause;
import com.thoughtworks.go.domain.builder.Builder;
import com.thoughtworks.go.helper.ConfigFileFixture;
import com.thoughtworks.go.helper.JobInstanceMother;
import com.thoughtworks.go.helper.StageMother;
import com.thoughtworks.go.junitext.EnhancedOSChecker;
import com.thoughtworks.go.plugin.access.packagematerial.PackageAsRepositoryExtension;
import com.thoughtworks.go.plugin.access.pluggabletask.TaskExtension;
import com.thoughtworks.go.plugin.access.scm.SCMExtension;
import com.thoughtworks.go.remote.AgentIdentifier;
import com.thoughtworks.go.server.service.AgentRuntimeInfo;
import com.thoughtworks.go.server.service.UpstreamPipelineResolver;
import com.thoughtworks.go.server.service.builders.*;
import com.thoughtworks.go.util.ConfigElementImplementationRegistryMother;
import com.thoughtworks.go.util.FileUtil;
import com.thoughtworks.go.util.SystemEnvironment;
import com.thoughtworks.go.util.SystemUtil;
import com.thoughtworks.go.util.command.EnvironmentVariableContext;
import com.thoughtworks.go.websocket.MessageEncoding;
import org.apache.commons.io.FileUtils;
import org.apache.commons.io.IOUtils;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.Mock;
import javax.servlet.http.HttpServletResponse;
import java.io.File;
import java.util.List;
import java.util.Map;
import java.util.UUID;
import static com.thoughtworks.go.domain.JobResult.Failed;
import static com.thoughtworks.go.domain.JobResult.Passed;
import static com.thoughtworks.go.domain.JobState.*;
import static com.thoughtworks.go.junitext.EnhancedOSChecker.DO_NOT_RUN_ON;
import static com.thoughtworks.go.junitext.EnhancedOSChecker.WINDOWS;
import static com.thoughtworks.go.matchers.ConsoleOutMatcher.*;
import static com.thoughtworks.go.matchers.RegexMatcher.matches;
import static com.thoughtworks.go.util.SystemUtil.currentWorkingDirectory;
import static com.thoughtworks.go.util.SystemUtil.isWindows;
import static javax.servlet.http.HttpServletResponse.SC_NOT_ACCEPTABLE;
import static org.hamcrest.CoreMatchers.is;
import static org.hamcrest.CoreMatchers.not;
import static org.hamcrest.core.StringContains.containsString;
import static org.junit.Assert.assertThat;
import static org.junit.Assert.fail;
import static org.mockito.Mockito.*;
import static org.mockito.MockitoAnnotations.initMocks;
@RunWith(JunitExtRunner.class)
public class BuildWorkTest {
public static final String PIPELINE_NAME = "pipeline1";
public static final String PIPELINE_LABEL = "100";
public static final String STAGE_NAME = "mingle";
public static final String JOB_PLAN_NAME = "run-ant";
private BuildWork buildWork;
private AgentIdentifier agentIdentifier;
private static final String NANT = "<job name=\"" + JOB_PLAN_NAME + "\">\n"
+ " <tasks>\n"
+ " <nant target=\"-help\"/>\n"
+ " </tasks>\n"
+ "</job>";
private static final String NANT_WITH_WORKINGDIR = "<job name=\"" + JOB_PLAN_NAME + "\">\n"
+ " <tasks>\n"
+ " <nant target=\"-help\" workingdir=\"not-exists\" />\n"
+ " </tasks>\n"
+ "</job>";
private static final String RAKE = "<job name=\"" + JOB_PLAN_NAME + "\">\n"
+ " <tasks>\n"
+ " <rake target=\"--help\"/>\n"
+ " </tasks>\n"
+ "</job>";
private static final String WILL_FAIL = "<job name=\"" + JOB_PLAN_NAME + "\">\n"
+ " <tasks>\n"
+ " <ant target=\"something-not-really-exist\" />\n"
+ " </tasks>\n"
+ "</job>";
private static final String WILL_PASS = "<job name=\"" + JOB_PLAN_NAME + "\">\n"
+ " <tasks>\n"
+ " <ant target=\"-help\" />\n"
+ " </tasks>\n"
+ "</job>";
private static final String WITH_ENV_VAR = "<job name=\"" + JOB_PLAN_NAME + "\">\n"
+ " <environmentvariables>\n"
+ " <variable name=\"JOB_ENV\">\n"
+ " <value>foobar</value>\n"
+ " </variable>\n"
+ " <variable name=\"" + (isWindows() ? "Path": "PATH") +"\">\n"
+ " <value>/tmp</value>\n"
+ " </variable>\n"
+ " </environmentvariables>\n"
+ " <tasks>\n"
+ " <ant target=\"-help\" />\n"
+ " </tasks>\n"
+ "</job>";
private static final String WITH_SECRET_ENV_VAR = "<job name=\"" + JOB_PLAN_NAME + "\">\n"
+ " <environmentvariables>\n"
+ " <variable name=\"foo\">\n"
+ " <value>foo(i am a secret)</value>\n"
+ " </variable>\n"
+ " <variable name=\"bar\" secure=\"true\">\n"
+ " <value>i am a secret</value>\n"
+ " </variable>\n"
+ " </environmentvariables>\n"
+ " <tasks>\n"
+ " <ant target=\"-help\" />\n"
+ " </tasks>\n"
+ "</job>";
private static final String SOMETHING_NOT_EXIST = "something-not-exist";
private static final String CMD_NOT_EXIST = "<job name=\"" + JOB_PLAN_NAME + "\">\n"
+ " <tasks>\n"
+ " <exec command=\"" + SOMETHING_NOT_EXIST + "\" />\n"
+ " </tasks>\n"
+ "</job>";
private static final String WILL_NOT_RUN = "<job name=\"" + JOB_PLAN_NAME + "\">\n"
+ " <tasks>\n"
+ " <exec command=\"echo\" args=\"run when status is failed\">\n"
+ " <runif status=\"failed\" />\n"
+ " </exec>\n"
+ " </tasks>\n"
+ "</job>";
private static final String MULTIPLE_TASKS = "<job name=\"" + JOB_PLAN_NAME + "\">\n"
+ " <tasks>\n"
+ " <exec command=\"command-not-found\" >\n"
+ " </exec>\n"
+ " <exec command=\"echo\" args=\"run when status is failed\">\n"
+ " <runif status=\"failed\" />\n"
+ " </exec>\n"
+ " <exec command=\"echo\" args=\"run when status is passed\">\n"
+ " <runif status=\"passed\" />\n"
+ " </exec>\n"
+ " <exec command=\"echo\" args=\"run when status is any\">\n"
+ " <runif status=\"any\" />\n"
+ " </exec>\n"
+ " </tasks>\n"
+ "</job>";
private static final String MULTIPLE_RUN_IFS = "<job name=\"" + JOB_PLAN_NAME + "\">\n"
+ " <tasks>\n"
+ " <exec command=\"echo\" args=\"run when status is failed, passed or cancelled\">\n"
+ " <runif status=\"failed\" />\n"
+ " <runif status=\"passed\" />\n"
+ " </exec>\n"
+ " </tasks>\n"
+ "</job>";
private EnvironmentVariableContext environmentVariableContext;
private com.thoughtworks.go.remote.work.BuildRepositoryRemoteStub buildRepository;
private GoArtifactsManipulatorStub artifactManipulator;
private static BuilderFactory builderFactory = new BuilderFactory(new AntTaskBuilder(), new ExecTaskBuilder(), new NantTaskBuilder(), new RakeTaskBuilder(),
new PluggableTaskBuilderCreator(mock(TaskExtension.class)), new KillAllChildProcessTaskBuilder(), new FetchTaskBuilder(), new NullTaskBuilder());
@Mock
private static UpstreamPipelineResolver resolver;
@Mock
private PackageAsRepositoryExtension packageAsRepositoryExtension;
@Mock
private SCMExtension scmExtension;
@Mock
private TaskExtension taskExtension;
private static String willUpload(String file) {
return "<job name=\"" + JOB_PLAN_NAME + "\">\n"
+ " <artifacts>\n"
+ " <artifact src=\"something-not-there.txt\" dest=\"dist\" />\n"
+ " <artifact src=\"" + file + "\" dest=\"dist\\test\" />\n"
+ " </artifacts>"
+ " <tasks>\n"
+ " <ant target=\"-help\" />\n"
+ " </tasks>\n"
+ "</job>";
}
private static String willUploadToDest(String file, String dest) {
return "<job name=\"" + JOB_PLAN_NAME + "\">\n"
+ " <artifacts>\n"
+ " <artifact src=\"" + file + "\" dest=\"" + dest + "\" />\n"
+ " </artifacts>"
+ " <tasks>\n"
+ " <ant target=\"-help\" />\n"
+ " </tasks>\n"
+ "</job>";
}
private static final int STAGE_COUNTER = 100;
private static final String SERVER_URL = "somewhere-does-not-matter";
private static final JobIdentifier JOB_IDENTIFIER = new JobIdentifier(PIPELINE_NAME, -3, PIPELINE_LABEL, STAGE_NAME, String.valueOf(STAGE_COUNTER), JOB_PLAN_NAME, 1L);
@Before
public void setUp() throws Exception {
initMocks(this);
agentIdentifier = new AgentIdentifier("localhost", "127.0.0.1", "uuid");
environmentVariableContext = new EnvironmentVariableContext();
artifactManipulator = new GoArtifactsManipulatorStub();
new SystemEnvironment().setProperty("serviceUrl", SERVER_URL);
buildRepository = new com.thoughtworks.go.remote.work.BuildRepositoryRemoteStub();
}
@After
public void tearDown() {
new SystemEnvironment().clearProperty("serviceUrl");
verifyNoMoreInteractions(resolver);
}
@Test
public void shouldReportStatus() throws Exception {
buildWork = (BuildWork) getWork(WILL_FAIL, PIPELINE_NAME);
buildWork.doWork(agentIdentifier, buildRepository, artifactManipulator, environmentVariableContext, new AgentRuntimeInfo(agentIdentifier, AgentRuntimeStatus.Idle, currentWorkingDirectory(), "cookie", false), packageAsRepositoryExtension, scmExtension, taskExtension);
assertThat(buildRepository.states, containsState(Preparing));
assertThat(buildRepository.states, containsState(Building));
assertThat(buildRepository.states, containsState(Completing));
}
@Test
public void shouldNotRunTaskWhichConditionDoesNotMatch() throws Exception {
buildWork = (BuildWork) getWork(WILL_NOT_RUN, PIPELINE_NAME);
buildWork.doWork(agentIdentifier, buildRepository, artifactManipulator, environmentVariableContext,
new AgentRuntimeInfo(agentIdentifier, AgentRuntimeStatus.Idle, currentWorkingDirectory(), "cookie", false), packageAsRepositoryExtension, scmExtension, taskExtension);
String actual = artifactManipulator.consoleOut();
assertThat(actual, not(containsString("run when status is failed")));
}
@Test
public void shouldRunTaskWhenConditionMatches() throws Exception {
buildWork = (BuildWork) getWork(MULTIPLE_RUN_IFS, PIPELINE_NAME);
buildWork.doWork(agentIdentifier, buildRepository, artifactManipulator,
environmentVariableContext, new AgentRuntimeInfo(agentIdentifier, AgentRuntimeStatus.Idle, currentWorkingDirectory(), "cookie", false), packageAsRepositoryExtension, scmExtension, taskExtension);
String actual = artifactManipulator.consoleOut();
assertThat(actual, containsString("[go] Current job status: passed."));
assertThat(actual, containsString("[go] Start to execute task: <exec command=\"echo\" args=\"run when status is failed, passed or cancelled\" />."));
assertThat(actual, containsString("run when status is failed, passed or cancelled"));
}
@Test
public void shouldRunTasksBasedOnConditions() throws Exception {
buildWork = (BuildWork) getWork(MULTIPLE_TASKS, PIPELINE_NAME);
buildWork.doWork(agentIdentifier, buildRepository, artifactManipulator, environmentVariableContext,
new AgentRuntimeInfo(agentIdentifier, AgentRuntimeStatus.Idle, currentWorkingDirectory(), "cookie", false), packageAsRepositoryExtension, scmExtension, taskExtension);
String actual = artifactManipulator.consoleOut();
assertThat(actual, containsString("run when status is failed"));
assertThat(actual, printedExcRunIfInfo("command-not-found", "passed"));
assertThat(actual, containsString("run when status is any"));
assertThat(actual, printedExcRunIfInfo("echo", "run when status is any", "failed"));
assertThat(actual, not(containsString("run when status is passed")));
assertThat(actual, not(printedExcRunIfInfo("echo", "run when status is passed", "failed")));
assertThat(actual, not(containsString("run when status is cancelled")));
assertThat(actual, not(printedExcRunIfInfo("echo", "run when status is cancelled", "failed")));
}
@Test
public void shouldReportBuildIsFailedWhenAntBuildFailed() throws Exception {
buildWork = (BuildWork) getWork(WILL_FAIL, PIPELINE_NAME);
buildWork.doWork(agentIdentifier, buildRepository, artifactManipulator, environmentVariableContext,
new AgentRuntimeInfo(agentIdentifier, AgentRuntimeStatus.Idle, currentWorkingDirectory(), "cookie", false), packageAsRepositoryExtension, scmExtension, taskExtension);
assertThat(buildRepository.results, containsResult(Failed));
}
@Test
public void shouldReportDirectoryNotExists() throws Exception {
buildWork = (BuildWork) getWork(NANT_WITH_WORKINGDIR, PIPELINE_NAME);
buildWork.doWork(agentIdentifier, buildRepository, artifactManipulator, environmentVariableContext,
new AgentRuntimeInfo(agentIdentifier, AgentRuntimeStatus.Idle, currentWorkingDirectory(), "cookie", false), packageAsRepositoryExtension, scmExtension, taskExtension);
assertThat(artifactManipulator.consoleOut(), containsString("not-exists\" is not a directory!"));
}
@Test
public void shouldReportUploadMessageWhenUpload() throws Exception {
String destFolder = "dest\\test\\sub-folder";
final String url = String.format("%s/remoting/files/%s/%s/%s/%s/%s/%s?attempt=1&buildId=0", SERVER_URL, PIPELINE_NAME, PIPELINE_LABEL, STAGE_NAME, STAGE_COUNTER, JOB_PLAN_NAME,
destFolder.replaceAll("\\\\", "/"));
buildWork = (BuildWork) getWork(willUploadToDest("cruise-output/log.xml", destFolder), PIPELINE_NAME);
com.thoughtworks.go.remote.work.HttpServiceStub httpService = new com.thoughtworks.go.remote.work.HttpServiceStub(HttpServletResponse.SC_OK);
artifactManipulator = new GoArtifactsManipulatorStub(httpService);
buildWork.doWork(agentIdentifier, buildRepository, artifactManipulator, environmentVariableContext, new AgentRuntimeInfo(agentIdentifier, AgentRuntimeStatus.Idle, currentWorkingDirectory(), "cookie", false), packageAsRepositoryExtension, scmExtension, taskExtension);
String actual = artifactManipulator.consoleOut();
artifactManipulator.printConsoleOut();
File basedir = new File("pipelines/pipeline1");
assertThat(actual.toLowerCase(), containsString(("Uploading artifacts from " + new File(basedir, "cruise-output/log.xml").getCanonicalPath()).toLowerCase()));
Map<String, File> uploadedFiles = httpService.getUploadedFiles();
assertThat(uploadedFiles.size(), is(1));
assertThat(uploadedFiles.get(url).getAbsolutePath(), containsString("log.xml.zip"));
}
@Test
public void shouldFailTheJobWhenFailedToUploadArtifact() throws Exception {
buildWork = (BuildWork) getWork(willUpload("cruise-output/log.xml"), PIPELINE_NAME);
artifactManipulator = new GoArtifactsManipulatorStub(new HttpServiceStub(SC_NOT_ACCEPTABLE));
buildWork.doWork(agentIdentifier, buildRepository, artifactManipulator, environmentVariableContext,
new AgentRuntimeInfo(agentIdentifier, AgentRuntimeStatus.Idle, currentWorkingDirectory(), "cookie", false), packageAsRepositoryExtension, scmExtension, taskExtension);
String actual = artifactManipulator.consoleOut();
File basedir = new File("pipelines/pipeline1");
assertThat(actual, printedUploadingFailure(new File(basedir, "cruise-output/log.xml")));
assertThat(buildRepository.results, containsResult(Failed));
}
@Test
public void shouldReportBuildIsFailedWhenAntBuildPassed() throws Exception {
buildWork = (BuildWork) getWork(WILL_PASS, PIPELINE_NAME);
buildWork.doWork(agentIdentifier, buildRepository, artifactManipulator, environmentVariableContext,
new AgentRuntimeInfo(agentIdentifier, AgentRuntimeStatus.Idle, currentWorkingDirectory(), "cookie", false), packageAsRepositoryExtension, scmExtension, taskExtension);
assertThat(buildRepository.results, containsResult(Passed));
}
@Test
public void shouldSendAResultStatusToServerWhenAThrowableErrorIsThrown() throws Exception {
JobPlan jobPlan = mock(JobPlan.class);
when(jobPlan.shouldFetchMaterials()).thenThrow(new AssertionError());
when(jobPlan.getIdentifier()).thenReturn(JOB_IDENTIFIER);
createBuildWorkWithJobPlan(jobPlan);
try {
buildWork.doWork(agentIdentifier, buildRepository, artifactManipulator, environmentVariableContext, new AgentRuntimeInfo(agentIdentifier, AgentRuntimeStatus.Idle, currentWorkingDirectory(), "cookie", false), packageAsRepositoryExtension, scmExtension, taskExtension);
fail("Should have thrown an assertion error");
} catch (AssertionError e) {
assertThat(buildRepository.results.isEmpty(), is(true));
assertThat(buildRepository.states.size(), is(1));
assertThat(buildRepository.states.get(0), is(JobState.Preparing));
}
}
@Test
public void shouldSendAResultStatusToServerWhenAnExceptionIsThrown() throws Exception {
JobPlan jobPlan = mock(JobPlan.class);
when(jobPlan.shouldFetchMaterials()).thenThrow(new RuntimeException());
when(jobPlan.getIdentifier()).thenReturn(JOB_IDENTIFIER);
createBuildWorkWithJobPlan(jobPlan);
try {
buildWork.doWork(agentIdentifier, buildRepository, artifactManipulator, environmentVariableContext, new AgentRuntimeInfo(agentIdentifier, AgentRuntimeStatus.Idle, currentWorkingDirectory(), "cookie", false), packageAsRepositoryExtension, scmExtension, taskExtension);
fail("Should have thrown an assertion error");
} catch (AssertionError e) {
assertThat(buildRepository.results.isEmpty(), is(false));
assertThat(buildRepository.results.get(0), is(JobResult.Failed));
}
}
@Test
public void shouldUpdateOnlyStatusWhenBuildIsIgnored() throws Exception {
buildWork = (BuildWork) getWork(WILL_PASS, "pipeline1");
buildRepository = new com.thoughtworks.go.remote.work.BuildRepositoryRemoteStub(true);
buildWork.doWork(agentIdentifier, buildRepository, artifactManipulator, environmentVariableContext, new AgentRuntimeInfo(agentIdentifier, AgentRuntimeStatus.Idle, currentWorkingDirectory(), "cookie", false), packageAsRepositoryExtension, scmExtension, taskExtension);
assertThat(buildRepository.results.isEmpty(), is(true));
assertThat(buildRepository.states, containsResult(JobState.Completed));
}
@Test
public void shouldUpdateBothStatusAndResultWhenBuildHasPassed() throws Exception {
buildWork = (BuildWork) getWork(WILL_PASS, "pipeline1");
buildWork.doWork(agentIdentifier, buildRepository, artifactManipulator, environmentVariableContext, new AgentRuntimeInfo(agentIdentifier, AgentRuntimeStatus.Idle, currentWorkingDirectory(), "cookie", false), packageAsRepositoryExtension, scmExtension, taskExtension);
assertThat(buildRepository.results, containsResult(JobResult.Passed));
assertThat(buildRepository.states, containsResult(JobState.Completed));
}
@Test
@RunIf(value = EnhancedOSChecker.class, arguments = {DO_NOT_RUN_ON, WINDOWS})
public void shouldReportErrorWhenComandIsNotExistOnLinux() throws Exception {
buildWork = (BuildWork) getWork(CMD_NOT_EXIST, PIPELINE_NAME);
buildWork.doWork(agentIdentifier, buildRepository, artifactManipulator, environmentVariableContext,
new AgentRuntimeInfo(agentIdentifier, AgentRuntimeStatus.Idle, currentWorkingDirectory(), "cookie", false), packageAsRepositoryExtension, scmExtension, taskExtension);
assertThat(artifactManipulator.consoleOut(), printedAppsMissingInfoOnUnix(SOMETHING_NOT_EXIST));
assertThat(buildRepository.results, containsResult(Failed));
}
@Test
@RunIf(value = EnhancedOSChecker.class, arguments = {EnhancedOSChecker.WINDOWS})
public void shouldReportErrorWhenComandIsNotExistOnWindows() throws Exception {
buildWork = (BuildWork) getWork(CMD_NOT_EXIST, PIPELINE_NAME);
buildWork.doWork(agentIdentifier, buildRepository, artifactManipulator,
environmentVariableContext, new AgentRuntimeInfo(agentIdentifier, AgentRuntimeStatus.Idle, currentWorkingDirectory(), "cookie", false), packageAsRepositoryExtension, scmExtension, taskExtension);
assertThat(artifactManipulator.consoleOut(), printedAppsMissingInfoOnWindows(SOMETHING_NOT_EXIST));
assertThat(buildRepository.results, containsResult(Failed));
}
@Test
public void shouldReportConsoleout() throws Exception {
buildWork = (BuildWork) getWork(WILL_FAIL, PIPELINE_NAME);
buildWork.doWork(agentIdentifier, buildRepository, artifactManipulator, environmentVariableContext, new AgentRuntimeInfo(agentIdentifier, AgentRuntimeStatus.Idle, currentWorkingDirectory(), "cookie", false), packageAsRepositoryExtension, scmExtension, taskExtension);
String consoleOutAsString = artifactManipulator.consoleOut();
String locator = JOB_IDENTIFIER.buildLocator();
assertThat(consoleOutAsString, printedPreparingInfo(locator));
assertThat(consoleOutAsString, printedBuildingInfo(locator));
assertThat(consoleOutAsString, printedUploadingInfo(locator));
assertThat(consoleOutAsString, printedBuildFailed());
}
@Test
@RunIf(value = EnhancedOSChecker.class, arguments = {EnhancedOSChecker.WINDOWS})
public void nantTest() throws Exception {
buildWork = (BuildWork) getWork(NANT, PIPELINE_NAME);
buildWork.doWork(agentIdentifier, buildRepository, artifactManipulator,
environmentVariableContext, new AgentRuntimeInfo(agentIdentifier, AgentRuntimeStatus.Idle, currentWorkingDirectory(), "cookie", false), packageAsRepositoryExtension, scmExtension, taskExtension);
assertThat(artifactManipulator.consoleOut(), containsString("Usage : NAnt [options] <target> <target> ..."));
}
@Test
public void rakeTest() throws Exception {
buildWork = (BuildWork) getWork(RAKE, PIPELINE_NAME);
buildWork.doWork(agentIdentifier, buildRepository, artifactManipulator,
environmentVariableContext, new AgentRuntimeInfo(agentIdentifier, AgentRuntimeStatus.Idle, currentWorkingDirectory(), "cookie", false), packageAsRepositoryExtension, scmExtension, taskExtension);
assertThat(artifactManipulator.consoleOut(), containsString("rake [-f rakefile] {options} targets..."));
}
@Test
public void doWork_shouldSkipMaterialUpdateWhenFetchMaterialsIsSetToFalse() throws Exception {
buildWork = (BuildWork) getWork(WILL_PASS, PIPELINE_NAME, false, false);
buildWork.doWork(agentIdentifier, buildRepository, artifactManipulator, environmentVariableContext, new AgentRuntimeInfo(agentIdentifier, AgentRuntimeStatus.Idle, currentWorkingDirectory(), "cookie", false), packageAsRepositoryExtension, scmExtension, taskExtension);
assertThat(artifactManipulator.consoleOut(), containsString("Start to prepare"));
assertThat(artifactManipulator.consoleOut(), not(containsString("Start updating")));
assertThat(artifactManipulator.consoleOut(), containsString("Skipping material update since stage is configured not to fetch materials"));
assertThat(buildRepository.states.contains(JobState.Preparing), is(true));
}
@Test
public void doWork_shouldUpdateMaterialsWhenFetchMaterialsIsTrue() throws Exception {
buildWork = (BuildWork) getWork(WILL_PASS, PIPELINE_NAME, true, false);
buildWork.doWork(agentIdentifier, buildRepository, artifactManipulator, environmentVariableContext, new AgentRuntimeInfo(agentIdentifier, AgentRuntimeStatus.Idle, currentWorkingDirectory(), "cookie", false), packageAsRepositoryExtension, scmExtension, taskExtension);
assertThat(artifactManipulator.consoleOut(), containsString("Start to prepare"));
assertThat(buildRepository.states.contains(JobState.Preparing), is(true));
assertThat(artifactManipulator.consoleOut(), containsString("Start to update materials"));
}
@Test
public void shouldCreateAgentWorkingDirectoryIfNotExist() throws Exception {
String pipelineName = "pipeline" + UUID.randomUUID();
File workingdir = new File("pipelines/" + pipelineName);
if (workingdir.exists()) {
FileUtils.deleteDirectory(workingdir);
}
assertThat(workingdir.exists(), is(false));
buildWork = (BuildWork) getWork(WILL_PASS, pipelineName);
buildWork.doWork(agentIdentifier,
buildRepository, artifactManipulator, environmentVariableContext, new AgentRuntimeInfo(agentIdentifier, AgentRuntimeStatus.Idle, currentWorkingDirectory(), "cookie", false), packageAsRepositoryExtension, scmExtension, taskExtension);
assertThat(artifactManipulator.consoleOut(),
not(containsString("Working directory \"" + workingdir.getAbsolutePath() + "\" is not a directory")));
assertThat(buildRepository.results.contains(Passed), is(true));
assertThat(workingdir.exists(), is(true));
FileUtils.deleteDirectory(workingdir);
}
@Test
public void shouldNotBombWhenCreatingWorkingDirectoryIfCleanWorkingDirectoryFlagIsTrue() throws Exception {
String pipelineName = "pipeline" + UUID.randomUUID();
File workingdir = new File("pipelines/" + pipelineName);
if (workingdir.exists()) {
FileUtils.deleteDirectory(workingdir);
}
assertThat(workingdir.exists(), is(false));
buildWork = (BuildWork) getWork(WILL_PASS, pipelineName, true, true);
buildWork.doWork(agentIdentifier,
buildRepository, artifactManipulator, environmentVariableContext, new AgentRuntimeInfo(agentIdentifier, AgentRuntimeStatus.Idle, currentWorkingDirectory(), "cookie", false), packageAsRepositoryExtension, scmExtension, taskExtension);
assertThat(artifactManipulator.consoleOut(),
not(containsString("Working directory \"" + workingdir.getAbsolutePath() + "\" is not a directory")));
assertThat(buildRepository.results.contains(Passed), is(true));
assertThat(workingdir.exists(), is(true));
}
@Test
public void shouldCreateAgentWorkingDirectoryIfNotExistWhenFetchMaterialsIsFalse() throws Exception {
String pipelineName = "pipeline" + UUID.randomUUID();
File workingdir = new File("pipelines/" + pipelineName);
if (workingdir.exists()) {
FileUtils.deleteDirectory(workingdir);
}
assertThat(workingdir.exists(), is(false));
buildWork = (BuildWork) getWork(WILL_PASS, pipelineName, false, false);
buildWork.doWork(agentIdentifier,
buildRepository, artifactManipulator, environmentVariableContext, new AgentRuntimeInfo(agentIdentifier, AgentRuntimeStatus.Idle, currentWorkingDirectory(), "cookie", false), packageAsRepositoryExtension, scmExtension, taskExtension);
assertThat(artifactManipulator.consoleOut(), not(containsString("Working directory \"" + workingdir.getAbsolutePath() + "\" is not a directory")));
assertThat(buildRepository.results.contains(Passed), is(true));
assertThat(workingdir.exists(), is(true));
}
@Test
public void shouldCleanAgentWorkingDirectoryIfExistsWhenCleanWorkingDirIsTrue() throws Exception {
String pipelineName = "pipeline" + UUID.randomUUID();
File workingdir = new File("pipelines/" + pipelineName);
if (workingdir.exists()) {
FileUtils.deleteDirectory(workingdir);
}
workingdir.mkdirs();
createDummyFilesAndDirectories(workingdir);
assertThat(workingdir.listFiles().length, is(2));
buildWork = (BuildWork) getWork(WILL_PASS, pipelineName, false, true);
buildWork.doWork(agentIdentifier,
buildRepository, artifactManipulator, environmentVariableContext, new AgentRuntimeInfo(agentIdentifier, AgentRuntimeStatus.Idle, currentWorkingDirectory(), "cookie", false), packageAsRepositoryExtension, scmExtension, taskExtension);
assertThat(artifactManipulator.consoleOut(), containsString("Cleaning working directory \"" + workingdir.getAbsolutePath()));
assertThat(buildRepository.results.contains(Passed), is(true));
assertThat(workingdir.exists(), is(true));
assertThat(workingdir.listFiles().length, is(1));
}
@Test
public void shouldReportCurrentWorkingDirectory() throws Exception {
buildWork = (BuildWork) getWork(WILL_PASS, PIPELINE_NAME);
buildWork.doWork(agentIdentifier,
buildRepository, artifactManipulator, environmentVariableContext, new AgentRuntimeInfo(agentIdentifier, AgentRuntimeStatus.Idle, currentWorkingDirectory(), "cookie", false), packageAsRepositoryExtension, scmExtension, taskExtension);
assertThat(artifactManipulator.consoleOut(),
containsString("[" + SystemUtil.currentWorkingDirectory() + "]"));
}
private void createDummyFilesAndDirectories(File workingdir) {
for (int i = 0; i < 2; i++) {
File directory = new File(workingdir.getPath() + "/dir" + i);
directory.mkdir();
for (int j = 0; j < 10; j++) {
new File(directory.getPath() + "/file" + i);
}
}
}
public static Work getWork(String jobXml, String pipelineName) throws Exception {
return getWork(jobXml, pipelineName, true, false);
}
private static Work getWork(String jobXml, String pipelineName, boolean fetchMaterials, boolean cleanWorkingDir) throws Exception {
CruiseConfig cruiseConfig = new MagicalGoConfigXmlLoader(new ConfigCache(), ConfigElementImplementationRegistryMother.withNoPlugins()).loadConfigHolder(FileUtil.readToEnd(IOUtils.toInputStream(ConfigFileFixture.withJob(jobXml, pipelineName)))).config;
JobConfig jobConfig = cruiseConfig.jobConfigByName(pipelineName, STAGE_NAME, JOB_PLAN_NAME, true);
JobPlan jobPlan = JobInstanceMother.createJobPlan(jobConfig, new JobIdentifier(pipelineName, -2, PIPELINE_LABEL, STAGE_NAME, String.valueOf(STAGE_COUNTER), JOB_PLAN_NAME, 0L),
new DefaultSchedulingContext());
jobPlan.setFetchMaterials(fetchMaterials);
jobPlan.setCleanWorkingDir(cleanWorkingDir);
final Stage stage = StageMother.custom(STAGE_NAME, new JobInstance(JOB_PLAN_NAME));
BuildCause buildCause = BuildCause.createWithEmptyModifications();
final Pipeline pipeline = new Pipeline(pipelineName, buildCause, stage);
pipeline.setLabel(PIPELINE_LABEL);
List<Builder> builder = builderFactory.buildersForTasks(pipeline, jobConfig.getTasks(), resolver);
BuildAssignment buildAssignment = BuildAssignment.create(jobPlan,
BuildCause.createWithEmptyModifications(),
builder, pipeline.defaultWorkingFolder()
);
return new BuildWork(buildAssignment);
}
private void createBuildWorkWithJobPlan(JobPlan jobPlan) throws Exception {
CruiseConfig cruiseConfig = new MagicalGoConfigXmlLoader(new ConfigCache(), ConfigElementImplementationRegistryMother.withNoPlugins()).loadConfigHolder(FileUtil.readToEnd(IOUtils.toInputStream(ConfigFileFixture.withJob(CMD_NOT_EXIST)))).config;
JobConfig jobConfig = cruiseConfig.jobConfigByName(PIPELINE_NAME, STAGE_NAME, JOB_PLAN_NAME, true);
final Stage stage = StageMother.custom(STAGE_NAME, new JobInstance(JOB_PLAN_NAME));
BuildCause buildCause = BuildCause.createWithEmptyModifications();
final Pipeline pipeline = new Pipeline(PIPELINE_NAME, buildCause, stage);
List<Builder> builder = builderFactory.buildersForTasks(pipeline, jobConfig.getTasks(), resolver);
BuildAssignment buildAssignment = BuildAssignment.create(jobPlan,
BuildCause.createWithEmptyModifications(),
builder, pipeline.defaultWorkingFolder()
);
buildWork = new BuildWork(buildAssignment);
}
@Test
public void shouldReportEnvironmentVariables() throws Exception {
buildWork = (BuildWork) getWork(WITH_ENV_VAR, PIPELINE_NAME);
buildWork.doWork(agentIdentifier, buildRepository, artifactManipulator, environmentVariableContext, new AgentRuntimeInfo(agentIdentifier, AgentRuntimeStatus.Idle, currentWorkingDirectory(), "cookie", false), packageAsRepositoryExtension, scmExtension, taskExtension);
String consoleOut = artifactManipulator.consoleOut();
assertThat(consoleOut, matches("'GO_SERVER_URL' (to|with) value '" + SERVER_URL));
assertThat(consoleOut, matches("'GO_PIPELINE_LABEL' (to|with) value '" + PIPELINE_LABEL));
assertThat(consoleOut, matches("'GO_PIPELINE_NAME' (to|with) value '" + PIPELINE_NAME));
assertThat(consoleOut, matches("'GO_STAGE_NAME' (to|with) value '" + STAGE_NAME));
assertThat(consoleOut, matches("'GO_STAGE_COUNTER' (to|with) value '" + STAGE_COUNTER));
assertThat(consoleOut, matches("'GO_JOB_NAME' (to|with) value '" + JOB_PLAN_NAME));
assertThat(consoleOut, containsString("[go] setting environment variable 'JOB_ENV' to value 'foobar'"));
if (isWindows()) {
assertThat(consoleOut, containsString("[go] overriding environment variable 'Path' with value '/tmp'"));
} else {
assertThat(consoleOut, containsString("[go] overriding environment variable 'PATH' with value '/tmp'"));
}
}
@Test
public void shouldMaskSecretInEnvironmentVarialbeReport() throws Exception {
buildWork = (BuildWork) getWork(WITH_SECRET_ENV_VAR, PIPELINE_NAME);
buildWork.doWork(agentIdentifier, buildRepository, artifactManipulator, environmentVariableContext, new AgentRuntimeInfo(agentIdentifier, AgentRuntimeStatus.Idle, currentWorkingDirectory(), "cookie", false), packageAsRepositoryExtension, scmExtension, taskExtension);
String consoleOut = artifactManipulator.consoleOut();
assertThat(consoleOut, containsString("[go] setting environment variable 'foo' to value 'foo(******)'"));
assertThat(consoleOut, containsString("[go] setting environment variable 'bar' to value '********'"));
assertThat(consoleOut, not(containsString("i am a secret")));
}
@Test
public void encodeAndDecodeBuildWorkAsMessageData() throws Exception {
Work original = getWork(WILL_FAIL, PIPELINE_NAME);
Work clone = MessageEncoding.decodeWork(MessageEncoding.encodeWork(original));
assertThat(clone, is(original));
}
}
| |
/*
* Copyright 2016 Crown Copyright
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package stroom.receive;
import java.io.BufferedReader;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.UnsupportedEncodingException;
import java.security.Principal;
import java.util.Collection;
import java.util.Enumeration;
import java.util.HashMap;
import java.util.Locale;
import java.util.Map;
import java.util.Vector;
import javax.servlet.AsyncContext;
import javax.servlet.DispatcherType;
import javax.servlet.ReadListener;
import javax.servlet.RequestDispatcher;
import javax.servlet.ServletContext;
import javax.servlet.ServletException;
import javax.servlet.ServletInputStream;
import javax.servlet.ServletRequest;
import javax.servlet.ServletResponse;
import javax.servlet.http.Cookie;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import javax.servlet.http.HttpSession;
import javax.servlet.http.HttpUpgradeHandler;
import javax.servlet.http.Part;
public class MockHttpServletRequest implements HttpServletRequest {
private Map<String, String> headers = new HashMap<>();
private byte[] inputStreamData;
private InputStream inputStream;
private String queryString;
public MockHttpServletRequest() {
}
public void resetMock() {
headers = new HashMap<>();
inputStream = null;
inputStreamData = null;
queryString = null;
}
public void addHeader(final String key, final String value) {
headers.put(key, value);
}
public void setInputStream(final byte[] buffer) {
inputStreamData = buffer;
}
@Override
public String getAuthType() {
return null;
}
@Override
public String getContextPath() {
return null;
}
@Override
public Cookie[] getCookies() {
return null;
}
@Override
public long getDateHeader(final String arg0) {
return 0;
}
@Override
public String getHeader(final String arg0) {
return headers.get(arg0);
}
@SuppressWarnings("rawtypes")
@Override
public Enumeration getHeaderNames() {
final Vector<String> keys = new Vector<>();
keys.addAll(headers.keySet());
return keys.elements();
}
@SuppressWarnings("rawtypes")
@Override
public Enumeration getHeaders(final String arg0) {
return null;
}
@Override
public int getIntHeader(final String arg0) {
return 0;
}
@Override
public String getMethod() {
return null;
}
@Override
public String getPathInfo() {
return null;
}
@Override
public String getPathTranslated() {
return null;
}
@Override
public String getQueryString() {
return queryString;
}
public void setQueryString(final String str) {
queryString = str;
}
@Override
public String getRemoteUser() {
return null;
}
@Override
public String getRequestURI() {
return null;
}
@Override
public StringBuffer getRequestURL() {
return null;
}
@Override
public String getRequestedSessionId() {
return null;
}
@Override
public String getServletPath() {
return null;
}
@Override
public HttpSession getSession() {
return null;
}
@Override
public String changeSessionId() {
return null;
}
@Override
public HttpSession getSession(final boolean arg0) {
return null;
}
@Override
public Principal getUserPrincipal() {
return null;
}
@Override
public boolean isRequestedSessionIdFromCookie() {
return false;
}
@Override
public boolean isRequestedSessionIdFromURL() {
return false;
}
@Override
public boolean isRequestedSessionIdFromUrl() {
return false;
}
@Override
public boolean authenticate(final HttpServletResponse response) throws IOException, ServletException {
return false;
}
@Override
public void login(final String username, final String password) throws ServletException {
}
@Override
public void logout() throws ServletException {
}
@Override
public Collection<Part> getParts() throws IOException, ServletException {
return null;
}
@Override
public Part getPart(final String name) throws IOException, ServletException {
return null;
}
@Override
public <T extends HttpUpgradeHandler> T upgrade(final Class<T> handlerClass) throws IOException, ServletException {
return null;
}
@Override
public boolean isRequestedSessionIdValid() {
return false;
}
@Override
public boolean isUserInRole(final String arg0) {
return false;
}
@Override
public Object getAttribute(final String arg0) {
return null;
}
@SuppressWarnings("rawtypes")
@Override
public Enumeration getAttributeNames() {
return new Vector<String>().elements();
}
@Override
public String getCharacterEncoding() {
return null;
}
@Override
public void setCharacterEncoding(final String arg0) throws UnsupportedEncodingException {
}
@Override
public int getContentLength() {
return 0;
}
@Override
public long getContentLengthLong() {
return 0;
}
@Override
public String getContentType() {
return null;
}
@Override
public ServletInputStream getInputStream() throws IOException {
if (inputStreamData != null) {
return new ServletInputStreamImpl(new ByteArrayInputStream(inputStreamData));
}
if (inputStream != null) {
return new ServletInputStreamImpl(inputStream);
}
return null;
}
public void setInputStream(final InputStream inputStream) {
this.inputStream = inputStream;
}
@Override
public String getLocalAddr() {
return null;
}
@Override
public String getLocalName() {
return null;
}
@Override
public int getLocalPort() {
return 0;
}
@Override
public ServletContext getServletContext() {
return null;
}
@Override
public AsyncContext startAsync() throws IllegalStateException {
return null;
}
@Override
public AsyncContext startAsync(final ServletRequest servletRequest, final ServletResponse servletResponse)
throws IllegalStateException {
return null;
}
@Override
public boolean isAsyncStarted() {
return false;
}
@Override
public boolean isAsyncSupported() {
return false;
}
@Override
public AsyncContext getAsyncContext() {
return null;
}
@Override
public DispatcherType getDispatcherType() {
return null;
}
@Override
public Locale getLocale() {
return null;
}
@SuppressWarnings("rawtypes")
@Override
public Enumeration getLocales() {
return null;
}
@Override
public String getParameter(final String arg0) {
return null;
}
@SuppressWarnings("rawtypes")
@Override
public Map getParameterMap() {
return null;
}
@SuppressWarnings("rawtypes")
@Override
public Enumeration getParameterNames() {
return new Vector<String>().elements();
}
@Override
public String[] getParameterValues(final String arg0) {
return null;
}
@Override
public String getProtocol() {
return null;
}
@Override
public BufferedReader getReader() throws IOException {
return null;
}
@Override
public String getRealPath(final String arg0) {
return null;
}
@Override
public String getRemoteAddr() {
return null;
}
@Override
public String getRemoteHost() {
return null;
}
@Override
public int getRemotePort() {
return 0;
}
@Override
public RequestDispatcher getRequestDispatcher(final String arg0) {
return null;
}
@Override
public String getScheme() {
return null;
}
@Override
public String getServerName() {
return null;
}
@Override
public int getServerPort() {
return 0;
}
@Override
public boolean isSecure() {
return false;
}
@Override
public void removeAttribute(final String arg0) {
}
@Override
public void setAttribute(final String arg0, final Object arg1) {
}
public static class ServletInputStreamImpl extends ServletInputStream {
private final InputStream inputStream;
public ServletInputStreamImpl(final InputStream inputStream) {
this.inputStream = inputStream;
}
@Override
public int available() throws IOException {
return inputStream.available();
}
@Override
public void close() throws IOException {
inputStream.close();
}
@Override
public synchronized void mark(final int readlimit) {
inputStream.mark(readlimit);
}
@Override
public boolean markSupported() {
return inputStream.markSupported();
}
@Override
public int read() throws IOException {
return inputStream.read();
}
@Override
public int read(final byte[] b, final int off, final int len) throws IOException {
return inputStream.read(b, off, len);
}
@Override
public int read(final byte[] b) throws IOException {
return inputStream.read(b);
}
@Override
public synchronized void reset() throws IOException {
inputStream.reset();
}
@Override
public long skip(final long n) throws IOException {
return inputStream.skip(n);
}
@Override
public boolean equals(final Object obj) {
return inputStream.equals(obj);
}
@Override
public int hashCode() {
return inputStream.hashCode();
}
@Override
public String toString() {
return inputStream.toString();
}
@Override
public boolean isFinished() {
return false;
}
@Override
public boolean isReady() {
return false;
}
@Override
public void setReadListener(final ReadListener readListener) {
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.olio.webapp.util;
import org.apache.olio.webapp.*;
import org.apache.olio.webapp.util.geocoder.ResultSet;
import org.apache.olio.webapp.util.geocoder.ResultType;
import java.io.IOException;
import java.io.UnsupportedEncodingException;
import java.net.MalformedURLException;
import java.net.URL;
import java.net.URLEncoder;
import java.util.List;
import java.util.logging.Level;
import java.util.logging.Logger;
import javax.xml.bind.JAXBContext;
import javax.xml.bind.JAXBException;
import javax.xml.bind.Unmarshaller;
import org.w3c.dom.Document;
/**
* <p>Service object that interacts with the Yahoo GeoCoding service. For
* information on the relevant APIs, see <a href="http://developer.yahoo.net/maps/rest/V1/geocode.html">
* http://developer.yahoo.net/maps/rest/V1/geocode.html</a>.</p>
*/
public class GeoCoder {
private String applicationId = APPLICATION_ID;
//private Logger logger = WebappUtil.getLogger();
private static Logger logger = Logger.getLogger(GeoCoder.class.getName());
private String proxyHost = null;
private int proxyPort = 0;
private boolean proxySet = false;
//private static DocumentBuilderFactory dbf = DocumentBuilderFactory.newInstance();
private static JAXBContext jaxbContext;
/**
* <p>The URL of the geocoding service we will be using.</p>
*/
private static String SERVICE_URL;
static {
// Added for portability while tetsing on multiple machines
SERVICE_URL = System.getProperty("webapp.geocoderURL");
if (SERVICE_URL == null)
SERVICE_URL = ServiceLocator.getInstance().getString("geocoderURL");
logger.finer ("geocoder ServiceURL = " + SERVICE_URL);
// Fall back to default
if (SERVICE_URL == null)
SERVICE_URL = "http://localhost:8000/Web20Emulator/geocode";
}
// ------------------------------------------------------ Manifest Constants
/**
* <p>The default application identifier required by the geocoding
* service. This may be overridden by setting the <code>applicationId</code>
* property.</p>
*/
static final String APPLICATION_ID =
// "com.sun.javaee.blueprints.components.ui.geocoder";
"org.apache.olio.components.ui.geocoder";
private static ThreadLocal<Unmarshaller> unmarshallerTL = new ThreadLocal<Unmarshaller>();
public GeoCoder() {
}
private static synchronized JAXBContext getJAXBContext() {
if (jaxbContext != null)
return jaxbContext;
try {
jaxbContext = JAXBContext.newInstance("org.apache.olio.webapp.util.geocoder");
} catch (JAXBException ex) {
Logger.getLogger(GeoCoder.class.getName()).log(Level.SEVERE, null, ex);
ex.printStackTrace();
}
return jaxbContext;
}
private Unmarshaller getUnmarshaller() {
Unmarshaller u = unmarshallerTL.get();
if (u == null) {
try {
JAXBContext ctx = getJAXBContext();
if (ctx == null) {
throw new RuntimeException ("JAXBContext is null -- cannot process gecode information");
}
u = ctx.createUnmarshaller();
unmarshallerTL.set(u);
return u;
} catch (JAXBException ex) {
Logger.getLogger(GeoCoder.class.getName()).log(Level.SEVERE, null, ex);
ex.printStackTrace();
}
}
return null;
}
// -------------------------------------------------------------- Properties
/**
* <p>Return the application identifier to be passed to the geocoding
* service.</p>
*/
public String getApplicationId() {
return this.applicationId;
}
/**
* <p>Set the application identifier to be passed to the geocoding
* service.</p>
*
* @param applicationId The new application identifier
*/
public void setApplicationId(String applicationId) {
if (applicationId == null) {
throw new NullPointerException();
}
this.applicationId = applicationId;
}
/**
* <p>Return the proxy host to use for network connections, or <code>null</code>
* if the default proxy host for the application server's JVM should be
* used instead.</p>
*/
public String getProxyHost() {
return this.proxyHost;
}
/**
* Set the proxy host to use for network connections, or <code>null</code>
* to use the default proxy host for the application server's JVM.</p>
*
* @param proxyHost The new proxy host
*/
public void setProxyHost(String proxyHost) {
this.proxyHost = proxyHost;
this.proxySet = false;
}
/**
* <p>Return the proxy port to use for network connections, or <code>0</code>
* if the default proxy port for the application server's JVM should be
* used instead.</p>
*/
public int getProxyPort() {
return this.proxyPort;
}
/**
* Set the proxy port to use for network connections, or <code>0</code>
* to use the default proxy port for the application server's JVM.</p>
*
* @param proxyPort The new proxy port
*/
public void setProxyPort(int proxyPort) {
this.proxyPort = proxyPort;
this.proxySet = false;
}
// ---------------------------------------------------------- Public Methods
/**
* <p>Return an array of zero or more {@link GeoPoint} instances for results
* that match a search for the specified location string. This string can
* be formatted in any of the following ways:</p>
* <ul>
* <li>city, state</li>
* <li>city, state, zip</li>
* <li>zip</li>
* <li>street, city, state</li>
* <li>street, city, state, zip</li>
* <li>street, zip</li>
* </ul>
*
* @param location Location string to search for
*
* @exception IllegalArgumentException if <code>location</code> does not
* conform to one of the specified patterns
* @exception NullPointerException if <code>location</code> is <code>null</code>
*/
public GeoPoint[] geoCode(String location) {
// Bail out immediately if no location was specified
if (location == null) {
return null;
}
// Set the proxy configuration (if necessary)
if (!proxySet) {
setProxyConfiguration();
proxySet = false;
}
// URL encode the specified location
String applicationId = getApplicationId();
try {
applicationId = URLEncoder.encode(applicationId, "ISO-8859-1");
} catch (UnsupportedEncodingException e) {
if (logger.isLoggable(Level.WARNING)) {
logger.log(Level.WARNING, "geoCoder.encodeApplicationId", e);
}
throw new IllegalArgumentException(e.getMessage());
}
// URL encode the specified location
//** change don't encode since String is constructed not with the location
/*
try {
location = URLEncoder.encode(location, "ISO-8859-1");
} catch (UnsupportedEncodingException e) {
if (logger.isLoggable(Level.WARNING)) {
logger.log(Level.WARNING, "geoCoder.encodeLocation", e);
}
throw new IllegalArgumentException(e.getMessage());
}
*/
// Perform the actual service call and parse the response XML document,
// then format and return the results
Document document = null;
StringBuffer sb = new StringBuffer(SERVICE_URL);
sb.append("?appid=");
sb.append(applicationId);
/* this is for google map service - changing to emulated geocoder (yahoo)
sb.append("&location=");
sb.append(location);
*/
//did URL construction in WebAppUtil.handleAddress
sb.append(location);
// We will set up a thread local JAXB unmarshaller for performance
Unmarshaller u = getUnmarshaller();
if (u == null)
return null;
try {
URL url = new URL(sb.toString());
try {
ResultSet rs = (ResultSet) u.unmarshal(url.openStream());
List<ResultType> list = rs.getResult();
// Set up the geo points
GeoPoint[] gps = new GeoPoint[list.size()];
int i=0;
for (ResultType r: list) {
GeoPoint gp = new GeoPoint();
gp.setAddress(r.getAddress());
gp.setLatitude (r.getLatitude().doubleValue());
gp.setLongitude (r.getLongitude().doubleValue());
gp.setCity(r.getCity());
gp.setState(r.getState());
gp.setZip(r.getZip());
gp.setCountry(r.getCountry());
gps[i++] = gp;
}
return gps;
} catch (IOException ex) {
Logger.getLogger(GeoCoder.class.getName()).log(Level.SEVERE, null, ex);
ex.printStackTrace();
}
} catch (JAXBException ex) {
Logger.getLogger(GeoCoder.class.getName()).log(Level.SEVERE, null, ex);
ex.printStackTrace();
} catch (MalformedURLException ex) {
Logger.getLogger(GeoCoder.class.getName()).log(Level.SEVERE, null, ex);
ex.printStackTrace();
}
return null;
}
/**
* <p>Attempt to set the system properties related to the HTTP proxy host
* and port to be used, but swallow security exceptions if the security
* policy that our container is running under forbids this. In a JDK 1.5
* environment, we'll be able to use the <code>java.net.Proxy</code> class
* and deal with this on a per-connection basis. Until then, oh well.</p>
*/
private synchronized void setProxyConfiguration() {
// NOTE - the system properties API gives no way to unset properties
// after they have been set. Therefore, only attempt to set things
// if we have values for both proxyHost and proxyPort
if ((proxyHost == null) || (proxyPort == 0)) {
return;
}
// Log and swallow any security exception that occurs when attempting
// to set these system properties. The subsequent connection failure
// will be ugly enough
try {
System.setProperty("http.proxyHost", proxyHost);
System.setProperty("http.proxyPort", "" + proxyPort);
} catch (SecurityException e) {
if (logger.isLoggable(Level.WARNING)) {
logger.log(Level.WARNING, "geoCoder.setProxy", e);
}
}
}
}
| |
/*
* Copyright (C) 2012-2020 Gregory Hedlund <https://www.phon.ca>
* Copyright (C) 2012 Jason Gedge <http://www.gedge.ca>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package ca.phon.opgraph.app.components;
import java.awt.*;
import java.awt.event.*;
import java.awt.geom.*;
import java.awt.image.*;
import javax.swing.*;
import javax.swing.border.*;
/**
* A search field with optional context button. The field displays a prompt
* when the text field text is empty.
*/
public class SearchField extends JTextField {
/** Search icon size */
private static final int ICON_SIZE = 16;
/** Popup menu used for context item */
private JPopupMenu contextPopup;
/**
* Text field state
*/
public static enum FieldState {
/** Undefined state */
UNDEFINED(Color.RED),
/** Prompt (no input) */
PROMPT(Color.GRAY),
/** Regular input state */
INPUT(SystemColor.textText);
private Color color;
FieldState(Color color) {
this.color = color;
}
/**
* Gets the color used for this state.
*
* @return the color
*/
public Color getColor() {
return color;
}
};
private final static String STATE_PROPERTY = "_search_field_state_";
/**
* Current state
*/
private FieldState fieldState = FieldState.UNDEFINED;
/**
* Search field prompt
*/
private String prompt;
/**
* Search context button
*/
private SearchFieldButton ctxButton;
private SearchFieldButton endButton;
/**
* Constructs a search field with a default prompt.
*/
public SearchField() {
this("Search");
}
/**
* Constructs a search field with a specified prompt.
*
* @param prompt the prompt
*/
public SearchField(String prompt) {
init();
this.prompt = prompt;
setState(FieldState.PROMPT);
addFocusListener(focusStateListener);
}
@Override
public Dimension getPreferredSize() {
Dimension retVal = super.getPreferredSize();
retVal.height = Math.max(retVal.height, 25);
return retVal;
}
private BufferedImage clearIcn = null;
private BufferedImage createClearIcon() {
if(clearIcn == null) {
clearIcn = new BufferedImage(ICON_SIZE, ICON_SIZE, BufferedImage.TYPE_INT_ARGB);
Graphics2D g2d = (Graphics2D)clearIcn.getGraphics();
g2d.setRenderingHint(RenderingHints.KEY_ANTIALIASING, RenderingHints.VALUE_ANTIALIAS_ON);
Ellipse2D circle = new Ellipse2D.Float(2, 2, ICON_SIZE - 2, ICON_SIZE - 2);
g2d.setColor(FieldState.PROMPT.getColor());
g2d.fill(circle);
Stroke s = new BasicStroke(2.0f, BasicStroke.CAP_ROUND, BasicStroke.JOIN_ROUND);
g2d.setStroke(s);
g2d.setColor(Color.WHITE);
g2d.drawLine(6, 6, ICON_SIZE - 5, ICON_SIZE - 5);
g2d.drawLine(ICON_SIZE - 5, 6, 6, ICON_SIZE - 5);
}
return clearIcn;
}
private BufferedImage searchIcn = null;
private BufferedImage createSearchIcon() {
if(searchIcn == null) {
BufferedImage retVal = new BufferedImage(ICON_SIZE + 8, ICON_SIZE,
BufferedImage.TYPE_INT_ARGB);
Graphics2D g2d = (Graphics2D)retVal.getGraphics();
g2d.setRenderingHint(RenderingHints.KEY_ANTIALIASING, RenderingHints.VALUE_ANTIALIAS_ON);
Ellipse2D circle = new Ellipse2D.Float(2, 2,
10, 10);
Line2D stem = new Line2D.Float(11, 11, ICON_SIZE - 2, ICON_SIZE - 2);
Polygon tri = new Polygon();
tri.addPoint(16, 8);
tri.addPoint(24, 8);
tri.addPoint(20, 12);
// Line2D triA = new Line2D.Float(14.0f, 9.0f, 17.0f, 9.0f);
// Line2D triB = new Line2D.Float(17.0f, 9.0f, 15.5f, 11.0f);
// Line2D triC = new Line2D.Float(15.5f, 11.0f, 14.0f, 9.0f);
Stroke s = new BasicStroke(2.3f, BasicStroke.CAP_ROUND, BasicStroke.JOIN_ROUND);
g2d.setStroke(s);
g2d.setColor(FieldState.PROMPT.getColor());
g2d.draw(circle);
g2d.draw(stem);
g2d.fillPolygon(tri);
// s = new BasicStroke(0.5f);
// g2d.setStroke(s);
//
// g2d.draw(triA);
// g2d.draw(triB);
// g2d.draw(triC);
searchIcn = retVal;
}
return searchIcn;
}
private void init() {
// load search icon
searchIcn = createSearchIcon();
final int borderInset = 10;
ctxButton = new SearchFieldButton(SwingConstants.LEFT, createSearchIcon());
ctxButton.setAction(new AbstractAction() {
@Override
public void actionPerformed(ActionEvent e) {
if(contextPopup != null) {
final int x = ctxButton.getX();
final int y = ctxButton.getY() + ctxButton.getHeight();
contextPopup.show(SearchField.this, x, y);
}
}
});
ctxButton.setCursor(Cursor.getDefaultCursor());
super.addComponentListener(new ComponentAdapter() {
@Override
public void componentResized(ComponentEvent e) {
ctxButton.setBounds(0, 0, searchIcn.getWidth() + borderInset, getHeight());
endButton.setBounds(getWidth() - (ICON_SIZE + borderInset), 0, ICON_SIZE + borderInset, getHeight());
}
});
add(ctxButton);
endButton = new SearchFieldButton(SwingConstants.RIGHT, null);
endButton.setAction(new AbstractAction() {
@Override
public void actionPerformed(ActionEvent e) {
setText("");
}
});
endButton.setCursor(Cursor.getDefaultCursor());
add(endButton);
// setup an empty border allowing for the
// extra space needed for drawing
final int leftSpace = searchIcn.getWidth() + borderInset;
final int rightSpace = ICON_SIZE + borderInset;
final int topSpace = 0;
final int btmSpace = 0;
final Border emptyBorder = BorderFactory.createEmptyBorder(topSpace, leftSpace, btmSpace, rightSpace);
final Border matteBorder = BorderFactory.createMatteBorder(1, 0, 1, 0, FieldState.PROMPT.getColor());
setBorder(BorderFactory.createCompoundBorder(emptyBorder, matteBorder));
setBackground(Color.WHITE);
setOpaque(false);
}
/**
* Sets the popup menu used for the context button.
*
* @param contextPopup the popup menu, or <code>null</code> if no popup
* should be used
*/
public void setContextPopup(JPopupMenu contextPopup) {
this.contextPopup = contextPopup;
}
/**
* Gets the popup menu used for the context button.
*
* @return the popup menu, or <code>null</code> if one is not set
*/
public JPopupMenu getContextPopup() {
return contextPopup;
}
@Override
public String getText() {
String retVal = super.getText();
if(this.fieldState == FieldState.PROMPT) {
retVal = "";
}
return retVal;
}
@Override
public void setText(String s) {
if(s == null) s = "";
super.setText(s);
if(hasFocus() && s.length() > 0) {
setState(FieldState.INPUT);
} else if(s.length() == 0) {
setState(FieldState.PROMPT);
}
}
/**
* Gets the prompt text for the search field.
*
* @return the prompt text
*/
public String getPrompt() {
return prompt;
}
/**
* Sets the prompt text for the search field.
*
* @param prompt the prompt text
*/
public void setPrompt(String prompt) {
this.prompt = prompt;
if(getState() == FieldState.PROMPT)
super.setText(prompt);
}
/**
* Set state of field
*
* @param state the new state for this field
*/
public void setState(FieldState state) {
if(this.fieldState == state) return;
FieldState oldState = this.fieldState;
this.fieldState = state;
if(this.fieldState == FieldState.PROMPT) {
if(oldState == FieldState.INPUT && super.getText().length() > 0)
throw new IllegalStateException("Cannot set state to PROMPT when field has input.");
super.setForeground(this.fieldState.getColor());
super.setText(prompt);
endButton.setIcn(null);
endButton.setEnabled(false);
} else if(this.fieldState == FieldState.INPUT) {
super.setForeground(this.fieldState.getColor());
super.setText("");
endButton.setIcn(createClearIcon());
endButton.setEnabled(true);
}
super.firePropertyChange(STATE_PROPERTY, oldState, this.fieldState);
}
/**
* Gets the current state.
*
* @return the state
*/
public FieldState getState() {
return this.fieldState;
}
/**
* State change on focus
*
*/
private static FocusListener focusStateListener = new FocusListener() {
@Override
public void focusGained(FocusEvent arg0) {
SearchField sf = (SearchField)arg0.getSource();
if(sf.fieldState == FieldState.PROMPT) {
sf.setState(FieldState.INPUT);
}
}
@Override
public void focusLost(FocusEvent arg0) {
SearchField sf = (SearchField)arg0.getSource();
if(sf.getText().length()==0) {
sf.setState(FieldState.PROMPT);
}
}
};
/**
* Custom shaped button for the search field
*/
private class SearchFieldButton extends JButton {
private int side = SwingConstants.LEFT;
private Image icn = null;
public SearchFieldButton(int side, Image icn) {
this.side = side;
this.icn = icn;
super.setOpaque(false);
}
public void setIcn(Image icn) {
this.icn = icn;
}
@Override
protected void paintComponent(Graphics g) {
// setup graphics context
Graphics2D g2d = (Graphics2D)g;
g2d.setRenderingHint(RenderingHints.KEY_ANTIALIASING, RenderingHints.VALUE_ANTIALIAS_ON);
// create button shape
int w = super.getWidth();
int h = super.getHeight();
Area btnShape = new Area();
if(side == SwingConstants.LEFT) {
// Shape circle = new Ellipse2D.Float(1, 0, h-1.0f, h-1.0f);
Shape roundRect = new RoundRectangle2D.Float(1.0f, 0.0f, w*2, h-1, h, h);
// Shape square = new Rectangle2D.Float(h/2.0f+1.0f, 0.0f, w-(h/2.0f)+1, h-1.0f);
btnShape.add(new Area(roundRect));
// btnShape.add(new Area(square));
} else if(side == SwingConstants.RIGHT) {
Shape roundRect = new RoundRectangle2D.Float(-w, 0.0f, w*2-1, h-1, h, h);
// Shape square = new Rectangle2D.Float(0.0f, 0.0f, w/2, h-1.0f);
btnShape.add(new Area(roundRect));
// btnShape.add(new Area(square));
}
// GradientPaint gp = new GradientPaint(new Point(0,0), new Color(215, 215, 215),
// new Point(0, h), new Color(200, 200, 200));
// g2d.setColor(gp);
// g2d.setPaint(gp);
g2d.setColor(SearchField.this.getBackground());
g2d.fill(btnShape);
// there is sometimes a single pixel artifact left
// over from the shape intersection. fix this
if(side == SwingConstants.LEFT) {
g2d.fillRect(h/2, 1, 2, h-1);
} else if(side == SwingConstants.RIGHT) {
g2d.fillRect(getWidth()-(h/2)-1, 1, 2, h-1);
}
g2d.setColor(FieldState.PROMPT.getColor());
g2d.draw(btnShape);
if(icn != null ) {
int btnY = h/2 - icn.getHeight(this)/2;
int btnX = w/2 - icn.getWidth(this)/2;
g2d.drawImage(icn, btnX, btnY, null);
}
// Rectangle2D rectToRemove = new Rectangle2D.Float(0, h/2, w, h/2);
// Area areaToRemove = new Area(rectToRemove);
// Area topArea = (Area)btnShape.clone();
// topArea.subtract(areaToRemove);
// gp = new GradientPaint(new Point(0,0), new Color(255, 255, 255, 75),
// new Point(0, h/2), new Color(255, 255, 255, 25));
// g2d.setPaint(gp);
// g2d.fill(topArea);
}
}
}
| |
/**
* Copyright 2012 JBoss Inc
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.kie.workbench.common.screens.datamodeller.client;
import java.util.List;
import java.util.Map;
import java.util.Set;
import javax.enterprise.context.Dependent;
import javax.enterprise.event.Event;
import javax.enterprise.event.Observes;
import javax.inject.Inject;
import com.github.gwtbootstrap.client.ui.constants.ButtonType;
import com.google.gwt.user.client.ui.IsWidget;
import org.guvnor.common.services.shared.security.KieWorkbenchACL;
import org.guvnor.common.services.shared.validation.model.ValidationMessage;
import org.guvnor.messageconsole.events.PublishBaseEvent;
import org.guvnor.messageconsole.events.PublishBatchMessagesEvent;
import org.guvnor.messageconsole.events.SystemMessage;
import org.guvnor.messageconsole.events.UnpublishMessagesEvent;
import org.jboss.errai.common.client.api.Caller;
import org.jboss.errai.common.client.api.RemoteCallback;
import org.jboss.errai.security.shared.api.Role;
import org.kie.workbench.common.screens.datamodeller.client.resources.i18n.Constants;
import org.kie.workbench.common.screens.datamodeller.client.util.DataModelerUtils;
import org.kie.workbench.common.screens.datamodeller.client.validation.JavaFileNameValidator;
import org.kie.workbench.common.screens.datamodeller.client.validation.ValidatorService;
import org.kie.workbench.common.screens.datamodeller.client.widgets.refactoring.ShowUsagesPopup;
import org.kie.workbench.common.screens.datamodeller.events.DataModelSaved;
import org.kie.workbench.common.screens.datamodeller.events.DataModelStatusChangeEvent;
import org.kie.workbench.common.screens.datamodeller.events.DataModelerEvent;
import org.kie.workbench.common.screens.datamodeller.events.DataObjectCreatedEvent;
import org.kie.workbench.common.screens.datamodeller.events.DataObjectDeletedEvent;
import org.kie.workbench.common.screens.datamodeller.events.DataObjectSelectedEvent;
import org.kie.workbench.common.screens.datamodeller.model.DataModelerError;
import org.kie.workbench.common.screens.datamodeller.model.EditorModelContent;
import org.kie.workbench.common.screens.datamodeller.model.GenerationResult;
import org.kie.workbench.common.screens.datamodeller.model.TypeInfoResult;
import org.kie.workbench.common.screens.datamodeller.security.DataModelerFeatures;
import org.kie.workbench.common.screens.datamodeller.service.DataModelerService;
import org.kie.workbench.common.screens.javaeditor.client.type.JavaResourceType;
import org.kie.workbench.common.screens.javaeditor.client.widget.EditJavaSourceWidget;
import org.kie.workbench.common.services.datamodeller.core.AnnotationDefinition;
import org.kie.workbench.common.services.datamodeller.core.DataModel;
import org.kie.workbench.common.services.datamodeller.core.DataObject;
import org.kie.workbench.common.services.datamodeller.core.JavaTypeInfo;
import org.kie.workbench.common.services.datamodeller.core.PropertyType;
import org.kie.workbench.common.services.datamodeller.core.impl.JavaTypeInfoImpl;
import org.kie.workbench.common.widgets.client.popups.validation.ValidationPopup;
import org.kie.workbench.common.widgets.metadata.client.KieEditor;
import org.kie.workbench.common.widgets.metadata.client.KieEditorView;
import org.uberfire.backend.vfs.ObservablePath;
import org.uberfire.backend.vfs.Path;
import org.uberfire.client.annotations.WorkbenchEditor;
import org.uberfire.client.annotations.WorkbenchMenu;
import org.uberfire.client.annotations.WorkbenchPartTitle;
import org.uberfire.client.annotations.WorkbenchPartTitleDecoration;
import org.uberfire.client.annotations.WorkbenchPartView;
import org.uberfire.ext.editor.commons.client.file.CommandWithFileNameAndCommitMessage;
import org.uberfire.ext.editor.commons.client.file.CopyPopup;
import org.uberfire.ext.editor.commons.client.file.DeletePopup;
import org.uberfire.ext.editor.commons.client.file.FileNameAndCommitMessage;
import org.uberfire.ext.editor.commons.client.file.RenamePopup;
import org.uberfire.ext.editor.commons.client.file.SaveOperationService;
import org.uberfire.ext.widgets.common.client.callbacks.DefaultErrorCallback;
import org.uberfire.ext.widgets.common.client.common.Page;
import org.uberfire.ext.widgets.common.client.common.popups.YesNoCancelPopup;
import org.uberfire.ext.widgets.common.client.resources.i18n.CommonConstants;
import org.uberfire.lifecycle.OnClose;
import org.uberfire.lifecycle.OnMayClose;
import org.uberfire.lifecycle.OnStartup;
import org.uberfire.mvp.Command;
import org.uberfire.mvp.ParameterizedCommand;
import org.uberfire.mvp.PlaceRequest;
import org.uberfire.rpc.SessionInfo;
import org.uberfire.workbench.events.NotificationEvent;
import org.uberfire.workbench.model.menu.MenuFactory;
import org.uberfire.workbench.model.menu.Menus;
@Dependent
@WorkbenchEditor(identifier = "DataModelerEditor",
supportedTypes = { JavaResourceType.class },
priority = Integer.MAX_VALUE)
public class DataModelerScreenPresenter
extends KieEditor {
public interface DataModelerScreenView
extends
KieEditorView {
void setContext( DataModelerContext context );
void setEditorId( String editorId );
//TODO temporal method until we have facets.
void showDomain( String domainId );
void refreshTypeLists( boolean keepCurrentSelection );
List<String> getAvailableDomains();
}
private DataModelerScreenView view;
@Inject
private EditJavaSourceWidget javaSourceEditor;
@Inject
private Event<DataModelerEvent> dataModelerEvent;
@Inject
private Event<UnpublishMessagesEvent> unpublishMessagesEvent;
@Inject
private Event<PublishBatchMessagesEvent> publishBatchMessagesEvent;
@Inject
private KieWorkbenchACL kieACL;
@Inject
private Caller<DataModelerService> modelerService;
@Inject
private ValidatorService validatorService;
@Inject
private JavaFileNameValidator javaFileNameValidator;
@Inject
private JavaResourceType resourceType;
private DataModelerContext context;
private boolean open = false;
private boolean uiStarted = false;
private SessionInfo sessionInfo;
private String currentMessageType;
private Integer originalSourceHash = null;
private boolean sourceEditionEnabled = false;
private static final int EDITABLE_SOURCE_TAB = 2;
private static int editorIds = 0;
private String editorId;
@WorkbenchPartTitle
public String getTitleText() {
return super.getTitleText();
}
@WorkbenchPartTitleDecoration
public IsWidget getTitle() {
return super.getTitle();
}
@WorkbenchPartView
public IsWidget getView() {
return super.getWidget();
}
@WorkbenchMenu
public Menus getMenus() {
return menus;
}
@Inject
public DataModelerScreenPresenter( DataModelerScreenView baseView, SessionInfo sessionInfo ) {
super( baseView );
view = baseView;
this.sessionInfo = sessionInfo;
editorId = sessionInfo.getId() + "-" + editorIds++;
view.setEditorId( editorId );
}
@OnStartup
public void onStartup( final ObservablePath path,
final PlaceRequest place ) {
setSourceEditionGrant();
init( path, place, resourceType );
initContext( path );
open = true;
currentMessageType = "DataModeler" + path.toURI();
cleanSystemMessages( getCurrentMessageType() );
javaSourceEditor.addChangeHandler( new EditJavaSourceWidget.TextChangeHandler() {
@Override
public void onTextChange() {
if ( context != null ) {
context.setEditionStatus( DataModelerContext.EditionStatus.SOURCE_CHANGED );
}
}
} );
}
@OnMayClose
public boolean onMayClose() {
if ( isDirty() ) {
return view.confirmClose();
}
return true;
}
@OnClose
public void OnClose() {
open = false;
versionRecordManager.clear();
cleanSystemMessages( getCurrentMessageType() );
clearContext();
super.OnClose();
}
private void onSafeDelete() {
if ( context.getEditorModelContent().getOriginalClassName() != null ) {
//if we are about to delete a .java file that could be parsed without errors, and we can calculate the
//className we can check for class usages prior to deletion.
final String className = context.getEditorModelContent().getOriginalClassName();
modelerService.call( new RemoteCallback<List<Path>>() {
@Override
public void callback( List<Path> paths ) {
if ( paths != null && paths.size() > 0 ) {
//If usages for this class were detected in project assets
//show the confirmation message to the user.
ShowUsagesPopup showUsagesPopup = ShowUsagesPopup.newUsagesPopupForDeletion(
Constants.INSTANCE.modelEditor_confirm_deletion_of_used_class( className ),
paths,
new Command() {
@Override
public void execute() {
onDelete( versionRecordManager.getPathToLatest() );
}
},
new Command() {
@Override
public void execute() {
//do nothing.
}
}
);
showUsagesPopup.setCloseVisible( false );
showUsagesPopup.show();
} else {
//no usages, just proceed with the deletion.
onDelete( versionRecordManager.getPathToLatest() );
}
}
} ).findClassUsages( versionRecordManager.getPathToLatest(), className );
} else {
//we couldn't parse the class, so no check can be done. Just proceed with the standard
//file deletion procedure.
onDelete( versionRecordManager.getPathToLatest() );
}
}
private void onDelete( final Path path ) {
final DeletePopup popup = new DeletePopup( new ParameterizedCommand<String>() {
@Override
public void execute( final String comment ) {
view.showBusyIndicator( org.kie.workbench.common.widgets.client.resources.i18n.CommonConstants.INSTANCE.Deleting() );
modelerService.call( getDeleteSuccessCallback(), new DataModelerErrorCallback( Constants.INSTANCE.modelEditor_deleting_error() ) ).delete( path,
comment );
}
} );
popup.show();
}
private void onCopy() {
final CopyPopup popup = new CopyPopup( versionRecordManager.getCurrentPath(),
javaFileNameValidator,
new CommandWithFileNameAndCommitMessage() {
@Override
public void execute( final FileNameAndCommitMessage details ) {
view.showBusyIndicator( org.kie.workbench.common.widgets.client.resources.i18n.CommonConstants.INSTANCE.Copying() );
modelerService.call( getCopySuccessCallback(),
new DataModelerErrorCallback( Constants.INSTANCE.modelEditor_copying_error() ) ).copy( versionRecordManager.getCurrentPath(),
details.getNewFileName(),
details.getCommitMessage(),
true );
}
} );
popup.show();
}
private void onSafeRename() {
if ( context.getEditorModelContent().getOriginalClassName() != null ) {
//if we are about to rename a .java file that could be parsed without errors, and we can calculate the
//className we can check for class usages prior to renaming and we can also suggest to perform an automatic
// class renaming.
final String className = context.getEditorModelContent().getOriginalClassName();
modelerService.call( new RemoteCallback<List<Path>>() {
@Override
public void callback( List<Path> paths ) {
if ( paths != null && paths.size() > 0 ) {
//If usages for this class were detected in project assets
//show the confirmation message to the user.
ShowUsagesPopup showUsagesPopup = ShowUsagesPopup.newUsagesPopupForRenaming(
Constants.INSTANCE.modelEditor_confirm_renaming_of_used_class( className ),
paths,
new Command() {
@Override
public void execute() {
rename();
}
},
new Command() {
@Override
public void execute() {
//do nothing.
}
}
);
showUsagesPopup.setCloseVisible( false );
showUsagesPopup.show();
} else {
//no usages, just proceed with the deletion.
rename();
}
}
} ).findClassUsages( versionRecordManager.getPathToLatest(), className );
} else {
//we couldn't parse the class, so no check can be done. Just proceed with the standard
//file renaming procedure.
rename();
}
}
protected void rename() {
if ( isDirty() ) {
YesNoCancelPopup yesNoCancelPopup = YesNoCancelPopup.newYesNoCancelPopup( CommonConstants.INSTANCE.Information(),
Constants.INSTANCE.modelEditor_confirm_save_before_rename(),
new Command() {
@Override
public void execute() {
rename( true );
}
},
new Command() {
@Override
public void execute() {
rename( false );
}
},
new Command() {
@Override
public void execute() {
//do nothing.
}
}
);
yesNoCancelPopup.setCloseVisible( false );
yesNoCancelPopup.show();
} else {
//just rename.
rename( false );
}
}
protected Command onValidate() {
return new Command() {
@Override
public void execute() {
//at validation time we must do the same calculation as if we were about to save.
final DataObject[] modifiedDataObject = new DataObject[ 1 ];
if ( isDirty() ) {
if ( context.isEditorChanged() ) {
//at save time the source has always priority over the model.
//If the source was properly parsed and the editor has changes, we need to send the DataObject
//to the server in order to let the source to be updated prior to save.
modifiedDataObject[ 0 ] = context.getDataObject();
} else {
//if the source has changes, no update form the UI to the source will be performed.
//instead the parsed DataObject must be returned from the server.
modifiedDataObject[ 0 ] = null;
}
}
modelerService.call( new RemoteCallback<List<ValidationMessage>>() {
@Override
public void callback( final List<ValidationMessage> results ) {
if ( results == null || results.isEmpty() ) {
notification.fire( new NotificationEvent( org.kie.workbench.common.widgets.client.resources.i18n.CommonConstants.INSTANCE.ItemValidatedSuccessfully(),
NotificationEvent.NotificationType.SUCCESS ) );
} else {
ValidationPopup.showMessages( results );
}
}
}, new DefaultErrorCallback() ).validate( getSource(), versionRecordManager.getCurrentPath(), modifiedDataObject[ 0 ] );
}
};
}
private boolean isDirty() {
return isDataObjectDirty() || isSourceDirty();
}
private boolean isDataObjectDirty() {
return isDirty( context.getDataObject() != null ? context.getDataObject().hashCode() : null );
}
private boolean isSourceDirty() {
return originalSourceHash != null && originalSourceHash != getSource().hashCode();
}
private RemoteCallback<Path> getCopySuccessCallback() {
return new RemoteCallback<Path>() {
@Override
public void callback( final Path response ) {
view.hideBusyIndicator();
notification.fire( new NotificationEvent( org.kie.workbench.common.widgets.client.resources.i18n.CommonConstants.INSTANCE.ItemCopiedSuccessfully() ) );
}
};
}
private RemoteCallback<Path> getDeleteSuccessCallback() {
return new RemoteCallback<Path>() {
@Override
public void callback( final Path response ) {
view.hideBusyIndicator();
notification.fire( new NotificationEvent( org.kie.workbench.common.widgets.client.resources.i18n.CommonConstants.INSTANCE.ItemDeletedSuccessfully() ) );
}
};
}
private RemoteCallback<Path> getRenameSuccessCallback() {
return new RemoteCallback<Path>() {
@Override
public void callback( final Path targetPath ) {
view.hideBusyIndicator();
}
};
}
protected void save() {
final JavaTypeInfoImpl newTypeInfo = new JavaTypeInfoImpl();
if ( isDirty() ) {
if ( context.isEditorChanged() ) {
newTypeInfo.setPackageName( context.getDataObject().getPackageName() );
newTypeInfo.setName( context.getDataObject().getName() );
saveFile( newTypeInfo );
} else {
view.showLoading();
modelerService.call( new RemoteCallback<TypeInfoResult>() {
@Override
public void callback( TypeInfoResult typeInfoResult ) {
view.hideBusyIndicator();
if ( !typeInfoResult.hasErrors() && typeInfoResult.getJavaTypeInfo() != null ) {
newTypeInfo.setPackageName( typeInfoResult.getJavaTypeInfo().getPackageName() );
newTypeInfo.setName( typeInfoResult.getJavaTypeInfo().getName() );
}
saveFile( newTypeInfo );
}
} ).loadJavaTypeInfo( getSource() );
}
} else {
saveFile( null );
}
}
private void saveFile( final JavaTypeInfo newTypeInfo ) {
String currentFileName = DataModelerUtils.extractSimpleFileName( versionRecordManager.getPathToLatest() );
if ( hasPackageNameChanged( newTypeInfo ) ) {
YesNoCancelPopup yesNoCancelPopup = YesNoCancelPopup.newYesNoCancelPopup( CommonConstants.INSTANCE.Information(),
Constants.INSTANCE.modelEditor_confirm_file_package_refactoring( newTypeInfo.getPackageName() ),
new Command() {
@Override
public void execute() {
saveOperationService.save(versionRecordManager.getPathToLatest(), getSaveCommand(newTypeInfo, versionRecordManager.getPathToLatest()));
}
},
Constants.INSTANCE.modelEditor_action_yes_refactor_directory(),
ButtonType.PRIMARY,
new Command() {
@Override
public void execute() {
saveOperationService.save(versionRecordManager.getPathToLatest(), getSaveCommand(null, versionRecordManager.getPathToLatest()));
}
},
Constants.INSTANCE.modelEditor_action_no_dont_refactor_directory(),
ButtonType.DANGER,
new Command() {
@Override
public void execute() {
//do nothing
}
},
null,
null
);
yesNoCancelPopup.setCloseVisible( false );
yesNoCancelPopup.show();
} else if ( hasFileNameChanged( newTypeInfo, currentFileName ) ) {
YesNoCancelPopup yesNoCancelPopup = YesNoCancelPopup.newYesNoCancelPopup( CommonConstants.INSTANCE.Information(),
Constants.INSTANCE.modelEditor_confirm_file_name_refactoring( newTypeInfo.getName() ),
new Command() {
@Override
public void execute() {
saveOperationService.save(versionRecordManager.getPathToLatest(), getSaveCommand(newTypeInfo, versionRecordManager.getPathToLatest()));
}
},
Constants.INSTANCE.modelEditor_action_yes_refactor_file_name(),
ButtonType.PRIMARY,
new Command() {
@Override
public void execute() {
saveOperationService.save( versionRecordManager.getPathToLatest(), getSaveCommand( null, versionRecordManager.getPathToLatest() ) );
}
},
Constants.INSTANCE.modelEditor_action_no_dont_refactor_file_name(),
ButtonType.DANGER,
new Command() {
@Override
public void execute() {
//do nothing
}
},
null,
null
);
yesNoCancelPopup.setCloseVisible( false );
yesNoCancelPopup.show();
} else {
new SaveOperationService().save( versionRecordManager.getPathToLatest(), getSaveCommand( null, versionRecordManager.getPathToLatest() ) );
}
}
private boolean hasFileNameChanged( JavaTypeInfo newTypeInfo,
String currentFileName ) {
return currentFileName != null && newTypeInfo != null && newTypeInfo.getName() != null && !currentFileName.equals( newTypeInfo.getName() );
}
private boolean hasPackageNameChanged( JavaTypeInfo newTypeInfo ) {
return newTypeInfo != null && newTypeInfo.getPackageName() != null && !newTypeInfo.getPackageName().equals( context.getEditorModelContent().getOriginalPackageName() );
}
private ParameterizedCommand<String> getSaveCommand( final JavaTypeInfo newTypeInfo,
final Path path ) {
return new ParameterizedCommand<String>() {
@Override
public void execute( final String commitMessage ) {
final DataObject[] modifiedDataObject = new DataObject[ 1 ];
if ( isDirty() ) {
if ( context.isEditorChanged() ) {
//at save time the source has always priority over the model.
//If the source was properly parsed and the editor has changes, we need to send the DataObject
//to the server in order to let the source to be updated prior to save.
modifiedDataObject[ 0 ] = context.getDataObject();
} else {
//if the source has changes, no update form the UI to the source will be performed.
//instead the parsed DataObject must be returned from the server.
modifiedDataObject[ 0 ] = null;
}
}
view.showSaving();
if ( newTypeInfo != null ) {
modelerService.call( getSaveSuccessCallback( newTypeInfo, path ),
new DataModelerErrorCallback( Constants.INSTANCE.modelEditor_saving_error() ) ).saveSource(
getSource(),
path,
modifiedDataObject[ 0 ],
metadata, commitMessage,
newTypeInfo.getPackageName(), newTypeInfo.getName() );
} else {
modelerService.call( getSaveSuccessCallback( newTypeInfo, path ),
new DataModelerErrorCallback( Constants.INSTANCE.modelEditor_saving_error() ) ).saveSource(
getSource(),
path,
modifiedDataObject[ 0 ],
metadata, commitMessage );
}
}
};
}
private RemoteCallback<GenerationResult> getSaveSuccessCallback( final JavaTypeInfo newTypeInfo,
final Path currentPath ) {
return new RemoteCallback<GenerationResult>() {
@Override
public void callback( GenerationResult result ) {
view.hideBusyIndicator();
if ( newTypeInfo == null ) {
Boolean oldDirtyStatus = isDirty();
if ( result.hasErrors() ) {
context.setParseStatus( DataModelerContext.ParseStatus.PARSE_ERRORS );
updateEditorView( null );
context.setDataObject( null );
if ( isEditorTabSelected() ) {
//un common case
showParseErrorsDialog( Constants.INSTANCE.modelEditor_message_file_parsing_errors(),
true,
result.getErrors(),
new Command() {
@Override
public void execute() {
//return to the source tab
setSelectedTab( EDITABLE_SOURCE_TAB );
}
} );
}
} else {
context.setParseStatus( DataModelerContext.ParseStatus.PARSED );
if ( context.isSourceChanged() ) {
updateEditorView( result.getDataObject() );
context.setDataObject( result.getDataObject() );
}
cleanSystemMessages( getCurrentMessageType() );
}
setSource( result.getSource() );
context.setEditionStatus( DataModelerContext.EditionStatus.NO_CHANGES );
createOriginalHash( context.getDataObject() );
originalSourceHash = getSource().hashCode();
notification.fire( new NotificationEvent( org.kie.workbench.common.widgets.client.resources.i18n.CommonConstants.INSTANCE.ItemSavedSuccessfully() ) );
dataModelerEvent.fire( new DataModelStatusChangeEvent( context.getContextId(),
DataModelerEvent.DATA_MODEL_BROWSER,
oldDirtyStatus,
false ) );
dataModelerEvent.fire( new DataModelSaved( context.getContextId(), null ) );
versionRecordManager.reloadVersions( currentPath );
} else {
//If the file was renamed as part of the file saving, don't do anything.
//A rename event will arrive, the same as for the "Rename" case.
//and the file will be automatically reloaded.
}
}
};
}
@Override
protected void loadContent() {
modelerService.call( new RemoteCallback<Map<String, AnnotationDefinition>>() {
@Override
public void callback( final Map<String, AnnotationDefinition> defs ) {
context.setAnnotationDefinitions( defs );
modelerService.call( getLoadModelSuccessCallback(),
getNoSuchFileExceptionErrorCallback() ).loadContent( versionRecordManager.getCurrentPath() );
}
}, new DataModelerErrorCallback( Constants.INSTANCE.modelEditor_annotationDef_loading_error() )
).getAnnotationDefinitions();
}
private RemoteCallback<EditorModelContent> getLoadModelSuccessCallback() {
return new RemoteCallback<EditorModelContent>() {
@Override
public void callback( EditorModelContent content ) {
//Path is set to null when the Editor is closed (which can happen before async calls complete).
if ( versionRecordManager.getCurrentPath() == null ) {
return;
}
javaSourceEditor.setReadonly( isReadOnly || !sourceEditionEnabled );
context.setReadonly( isReadOnly );
context.setEditionStatus( DataModelerContext.EditionStatus.NO_CHANGES );
context.setEditorModelContent( content );
setModel( content );
resetEditorPages( content.getOverview() );
addSourceEditorPage();
view.hideBusyIndicator();
if ( content.hasErrors() ) {
publishSystemMessages( getCurrentMessageType(), true, content.getErrors() );
}
if ( content.getDataObject() != null ) {
selectEditorTab();
uiStarted = true;
} else {
showParseErrorsDialog( Constants.INSTANCE.modelEditor_message_file_parsing_errors(),
false,
context.getEditorModelContent().getErrors(),
new Command() {
@Override
public void execute() {
//we need to go directly to the sources tab
uiStarted = true;
//onSourceTabSelected();
setSelectedTab( EDITABLE_SOURCE_TAB );
}
} );
}
createOriginalHash( context.getDataObject() );
originalSourceHash = getSource().hashCode();
}
};
}
private void addSourceEditorPage() {
addPage( new Page( javaSourceEditor,
org.kie.workbench.common.widgets.client.resources.i18n.CommonConstants.INSTANCE.SourceTabTitle() ) {
@Override
public void onFocus() {
if ( uiStarted ) {
onSourceTabSelected();
}
}
@Override
public void onLostFocus() {
}
} );
}
private void rename( final boolean saveCurrentChanges ) {
final DataObject[] modifiedDataObject = new DataObject[ 1 ];
if ( saveCurrentChanges ) {
if ( isDirty() ) {
if ( context.isEditorChanged() ) {
//at save time the source has always priority over the model.
//If the source was properly parsed and the editor has changes, we need to send the DataObject
//to the server in order to let the source to be updated prior to save.
modifiedDataObject[ 0 ] = context.getDataObject();
} else {
//if the source has changes, no update form the UI to the source will be performed.
//instead the parsed DataObject must be returned from the server.
modifiedDataObject[ 0 ] = null;
}
}
}
final RenamePopup popup = new RenamePopup( versionRecordManager.getPathToLatest(),
javaFileNameValidator,
new CommandWithFileNameAndCommitMessage() {
@Override
public void execute( final FileNameAndCommitMessage details ) {
view.showBusyIndicator( org.kie.workbench.common.widgets.client.resources.i18n.CommonConstants.INSTANCE.Renaming() );
modelerService.call( getRenameSuccessCallback(),
new DataModelerErrorCallback( Constants.INSTANCE.modelEditor_renaming_error() ) ).rename( versionRecordManager.getPathToLatest(),
details.getNewFileName(),
details.getCommitMessage(),
true,
saveCurrentChanges,
getSource(), modifiedDataObject[ 0 ], metadata );
}
} );
popup.show();
}
public DataModel getDataModel() {
return context.getDataModel();
}
public String getSource() {
return javaSourceEditor.getContent();
}
public void setSource( String source ) {
javaSourceEditor.setContent( source );
}
private boolean isSourceTabSelected() {
return getSelectedTabIndex() == EDITABLE_SOURCE_TAB;
}
private void setSourceEditionGrant() {
Set<String> grantedRoles = kieACL.getGrantedRoles( DataModelerFeatures.EDIT_SOURCES );
sourceEditionEnabled = false;
if ( sessionInfo != null && sessionInfo.getIdentity() != null && sessionInfo.getIdentity().getRoles() != null ) {
for (Role role : sessionInfo.getIdentity().getRoles()) {
if ( grantedRoles.contains( role.getName() ) ) {
sourceEditionEnabled = true;
break;
}
}
}
}
private void setModel( EditorModelContent model ) {
view.setContext( context );
setSource( model.getSource() );
if ( model.getDataObject() != null ) {
context.setParseStatus( DataModelerContext.ParseStatus.PARSED );
dataModelerEvent.fire( new DataObjectSelectedEvent( context.getContextId(), DataModelerEvent.DATA_MODEL_BROWSER, model.getDataObject() ) );
} else {
context.setParseStatus( DataModelerContext.ParseStatus.PARSE_ERRORS );
dataModelerEvent.fire( new DataObjectSelectedEvent( context.getContextId(), DataModelerEvent.DATA_MODEL_BROWSER, null ) );
}
}
public void onSourceTabSelected() {
if ( context.isParsed() && context.isEditorChanged() ) {
//If there are changes in the ui the source must be regenerated on server side.
view.showLoading();
modelerService.call( new RemoteCallback<GenerationResult>() {
@Override
public void callback( GenerationResult result ) {
view.hideBusyIndicator();
setSource( result.getSource() );
updateSource( result.getSource() );
context.setEditionStatus( DataModelerContext.EditionStatus.NO_CHANGES );
}
}, new DataModelerErrorCallback( Constants.INSTANCE.modelEditor_loading_error() ) ).updateSource( getSource(), versionRecordManager.getCurrentPath(), context.getDataObject() );
} else {
if ( !isOverviewTabSelected() ) {
context.setEditionStatus( DataModelerContext.EditionStatus.NO_CHANGES );
}
updateSource( javaSourceEditor.getContent() );
}
}
@Override
protected void updateSource( String source ) {
setSource( source );
}
private void updateEditorView( DataObject dataObject ) {
//here we need to check if data object name, or package, changed, etc.
//if this is the likely we can show an alert to the user, etc.
//also the file should be renamed.
if ( context.getDataObject() != null ) {
context.getDataModel().removeDataObject( context.getDataObject().getClassName() );
}
if ( dataObject != null ) {
context.getDataModel().removeDataObject( dataObject.getClassName() );
context.getDataModel().getDataObjects().add( dataObject );
}
dataModelerEvent.fire( new DataObjectSelectedEvent( context.getContextId(), DataModelerEvent.DATA_MODEL_BROWSER, dataObject ) );
}
@Override
public void onEditTabSelected() {
boolean doParsing = false;
if ( context.isSourceChanged() ) {
//if there has been changes in the source we should try to parse the file and build the data object again.
doParsing = true;
} else if ( context.isNotParsed() ) {
//uncommon case, the file wasn't parsed yet.
doParsing = true;
}
if ( doParsing ) {
view.showLoading();
//If there are changes in the source, we must try to parse the file.
modelerService.call( new RemoteCallback<GenerationResult>() {
@Override
public void callback( GenerationResult result ) {
view.hideBusyIndicator();
if ( result.hasErrors() ) {
showParseErrorsDialog( Constants.INSTANCE.modelEditor_message_file_parsing_errors(),
true,
result.getErrors(),
new Command() {
@Override
public void execute() {
//return to the source tab
setSelectedTab( EDITABLE_SOURCE_TAB );
context.setParseStatus( DataModelerContext.ParseStatus.PARSE_ERRORS );
updateEditorView( null );
context.setDataObject( null );
}
} );
} else {
//ok, we can reload the editor tab.
context.setParseStatus( DataModelerContext.ParseStatus.PARSED );
updateEditorView( result.getDataObject() );
context.setEditionStatus( DataModelerContext.EditionStatus.NO_CHANGES );
context.setDataObject( result.getDataObject() );
cleanSystemMessages( getCurrentMessageType() );
}
}
}, new DataModelerErrorCallback( Constants.INSTANCE.modelEditor_loading_error() ) ).updateDataObject( context.getDataObject(), getSource(), versionRecordManager.getCurrentPath() );
} else {
//no changes in the source tab
if ( !isOverviewTabSelected() ) {
context.setEditionStatus( DataModelerContext.EditionStatus.NO_CHANGES );
}
if ( context.isParseErrors() ) {
//there are parse errors, the editor tab couldn't be loaded. (errors are already published)
showParseErrorsDialog( Constants.INSTANCE.modelEditor_message_file_parsing_errors(),
false,
null,
new Command() {
@Override
public void execute() {
setSelectedTab( EDITABLE_SOURCE_TAB );
}
} );
}
}
}
private void showParseErrorsDialog( final String message,
final boolean publishErrors,
final List<DataModelerError> errors,
final Command command ) {
if ( publishErrors && errors != null && !errors.isEmpty() ) {
publishSystemMessages( getCurrentMessageType(), true, errors );
}
YesNoCancelPopup yesNoCancelPopup = YesNoCancelPopup.newYesNoCancelPopup( CommonConstants.INSTANCE.Information(),
message,
new Command() {
@Override
public void execute() {
command.execute();
}
},
CommonConstants.INSTANCE.OK(),
null,
null,
null,
null
);
yesNoCancelPopup.setCloseVisible( false );
yesNoCancelPopup.show();
}
private boolean isOpen() {
return open;
}
private void onDataObjectDeleted( @Observes DataObjectDeletedEvent event ) {
if ( context != null &&
event.isFrom( context.getCurrentProject() ) &&
event.getCurrentDataObject() != null &&
context.isParsed() &&
isEditorTabSelected() &&
context.getDataObject() != null &&
!context.getDataObject().getClassName().equals( event.getCurrentDataObject().getClassName() ) ) {
//check deleted object is referenced by current data object.
if ( validatorService.isReferencedByCurrentObject( event.getCurrentDataObject(), context.getDataObject() ) ) {
notification.fire( new NotificationEvent( Constants.INSTANCE.modelEditor_notification_dataObject_referenced_has_been_deleted( event.getCurrentDataObject().getClassName(), context.getDataObject().getClassName() ) ) );
} else if ( !getDataModel().isExternal( event.getCurrentDataObject().getClassName() ) ) {
getDataModel().removeDataObject( event.getCurrentDataObject().getClassName() );
view.refreshTypeLists( true );
}
}
}
private void onDataObjectCreated( @Observes DataObjectCreatedEvent event ) {
if ( context != null &&
event.isFrom( context.getCurrentProject() ) &&
event.getCurrentDataObject() != null &&
getDataModel() != null &&
getDataModel().getDataObject( event.getCurrentDataObject().getClassName() ) == null ) {
getDataModel().getDataObjects().add( event.getCurrentDataObject() );
view.refreshTypeLists( true );
}
}
private void cleanSystemMessages( String currentMessageType ) {
UnpublishMessagesEvent unpublishMessage = new UnpublishMessagesEvent();
unpublishMessage.setShowSystemConsole( false );
unpublishMessage.setMessageType( currentMessageType );
unpublishMessage.setUserId( ( sessionInfo != null && sessionInfo.getIdentity() != null ) ? sessionInfo.getIdentity().getIdentifier() : null );
unpublishMessagesEvent.fire( unpublishMessage );
}
private void publishSystemMessages( String messageType,
boolean cleanExisting,
List<DataModelerError> errors ) {
PublishBatchMessagesEvent publishMessage = new PublishBatchMessagesEvent();
publishMessage.setCleanExisting( cleanExisting );
publishMessage.setMessageType( messageType );
publishMessage.setUserId( ( sessionInfo != null && sessionInfo.getIdentity() != null ) ? sessionInfo.getIdentity().getIdentifier() : null );
publishMessage.setPlace( PublishBaseEvent.Place.TOP );
SystemMessage systemMessage;
for ( DataModelerError error : errors ) {
systemMessage = new SystemMessage();
systemMessage.setMessageType( messageType );
systemMessage.setId( error.getId() );
systemMessage.setText( error.getMessage() );
systemMessage.setPath( error.getFile() );
systemMessage.setLevel( error.getLevel() );
systemMessage.setLine( error.getLine() );
systemMessage.setColumn( error.getColumn() );
publishMessage.getMessagesToPublish().add( systemMessage );
}
publishBatchMessagesEvent.fire( publishMessage );
}
protected void makeMenuBar() {
//menus =
menuBuilder
.addSave( versionRecordManager.newSaveMenuItem( new Command() {
@Override
public void execute() {
onSave();
}
} ) )
.addCopy( new Command() {
@Override
public void execute() {
onCopy();
}
} )
.addRename( new Command() {
@Override
public void execute() {
onSafeRename();
}
} )
.addDelete( new Command() {
@Override
public void execute() {
onSafeDelete();
}
} )
.addValidate(
onValidate()
)
.addNewTopLevelMenu( versionRecordManager.buildMenu() );
for ( final String availableDomain : view.getAvailableDomains() ) {
menuBuilder.addNewTopLevelMenu( MenuFactory.newTopLevelMenu( availableDomain )
.respondsWith( new Command() {
@Override public void execute() {
onShowDomain( availableDomain );
}
} )
.endMenu()
.build().getItems().get( 0 )
);
}
menus = menuBuilder.build();
}
private void onShowDomain( String domain ) {
view.showDomain( domain);
}
private void initContext( final ObservablePath path ) {
context = new DataModelerContext( editorId );
modelerService.call(
new RemoteCallback<List<PropertyType>>() {
@Override
public void callback( List<PropertyType> baseTypes ) {
context.init( baseTypes );
}
},
new DataModelerErrorCallback( Constants.INSTANCE.modelEditor_propertyType_loading_error() )
).getBasePropertyTypes();
}
private void clearContext() {
context.clear();
}
private String getCurrentMessageType() {
return currentMessageType;
}
}
| |
package org.hisp.dhis.completeness;
/*
* Copyright (c) 2004-2015, University of Oslo
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
*
* Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* Neither the name of the HISP project nor the names of its contributors may
* be used to endorse or promote products derived from this software without
* specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
* ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
* ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
import org.hisp.dhis.DhisSpringTest;
import org.hisp.dhis.dataelement.*;
import org.hisp.dhis.dataset.CompleteDataSetRegistration;
import org.hisp.dhis.dataset.CompleteDataSetRegistrationService;
import org.hisp.dhis.dataset.DataSet;
import org.hisp.dhis.dataset.DataSetService;
import org.hisp.dhis.datavalue.DataValueService;
import org.hisp.dhis.external.location.LocationManager;
import org.hisp.dhis.organisationunit.OrganisationUnit;
import org.hisp.dhis.organisationunit.OrganisationUnitGroup;
import org.hisp.dhis.organisationunit.OrganisationUnitGroupService;
import org.hisp.dhis.organisationunit.OrganisationUnitService;
import org.hisp.dhis.period.*;
import org.junit.Ignore;
import org.junit.Test;
import org.springframework.beans.factory.annotation.Autowired;
import java.util.Collection;
import java.util.Date;
import java.util.HashSet;
import java.util.Set;
import javax.annotation.Resource;
import static org.junit.Assert.*;
/**
* @author Lars Helge Overland
* @version $Id$
*/
@Ignore //TODO rewrite this test, takes too long
public class DataSetCompletenessServiceTest
extends DhisSpringTest
{
@Autowired
private DataSetCompletenessEngine completenessEngine;
@Autowired
private DataSetCompletenessStore completenessStore;
@Autowired
private PeriodService periodService;
@Autowired
private OrganisationUnitService organisationUnitService;
@Autowired
private DataSetService dataSetService;
@Autowired
private CompleteDataSetRegistrationService registrationService;
@Resource(name="locationManager")
LocationManager locationManager;
@Autowired
private OrganisationUnitGroupService organisationUnitGroupService;
@Autowired
private DataElementService dataElementService;
@Autowired
private DataElementCategoryService categoryService;
@Autowired
private DataValueService dataValueService;
@Autowired
private DataSetCompletenessService registrationCompletenessService;
private PeriodType periodType;
private Period periodA;
private Period periodB;
private Period periodC;
private int periodIdA;
private int periodIdC;
private OrganisationUnit unitA;
private OrganisationUnit unitB;
private OrganisationUnit unitC;
private OrganisationUnit unitD;
private OrganisationUnit unitE;
private OrganisationUnit unitF;
private OrganisationUnit unitG;
private OrganisationUnit unitH;
private OrganisationUnitGroup groupA;
private OrganisationUnitGroup groupB;
private OrganisationUnitGroup groupC;
private int unitIdA;
private int unitIdB;
private int unitIdC;
private Collection<Integer> unitIdsA;
private DataSet dataSetA;
private DataSet dataSetB;
private DataSet dataSetC;
private int dataSetIdA;
private DataElement dataElementA;
private DataElement dataElementB;
private DataElementCategoryOptionCombo categoryOptionCombo;
private Date onTimeA;
private Date tooLateA;
private Date onTimeB;
private Date tooLateB;
private Set<Integer> groupIds = new HashSet<>();
// -------------------------------------------------------------------------
// Fixture
// -------------------------------------------------------------------------
@Override
public void setUpTest()
{
setExternalTestDir( locationManager );
categoryOptionCombo = categoryService.getDefaultDataElementCategoryOptionCombo();
periodType = new MonthlyPeriodType();
periodA = createPeriod( periodType, getDate( 2000, 1, 1 ), getDate( 2000, 1, 31 ) );
periodB = createPeriod( periodType, getDate( 2000, 2, 1 ), getDate( 2000, 2, 28 ) );
periodC = createPeriod( new QuarterlyPeriodType(), getDate( 2000, 1, 1 ), getDate( 2000, 3, 31 ) );
periodIdA = periodService.addPeriod( periodA );
periodService.addPeriod( periodB );
periodIdC = periodService.addPeriod( periodC );
unitA = createOrganisationUnit( 'A' );
unitB = createOrganisationUnit( 'B' );
unitC = createOrganisationUnit( 'C' );
unitD = createOrganisationUnit( 'D' );
unitE = createOrganisationUnit( 'E' );
unitF = createOrganisationUnit( 'F' );
unitG = createOrganisationUnit( 'G' );
unitH = createOrganisationUnit( 'H' );
unitB.setParent( unitA );
unitC.setParent( unitA );
unitE.setParent( unitB );
unitF.setParent( unitB );
unitG.setParent( unitC );
unitH.setParent( unitC );
unitA.getChildren().add( unitB );
unitA.getChildren().add( unitC );
unitB.getChildren().add( unitE );
unitB.getChildren().add( unitF );
unitC.getChildren().add( unitG );
unitC.getChildren().add( unitH );
unitIdA = organisationUnitService.addOrganisationUnit( unitA );
unitIdB = organisationUnitService.addOrganisationUnit( unitB );
unitIdC = organisationUnitService.addOrganisationUnit( unitC );
organisationUnitService.addOrganisationUnit( unitD );
organisationUnitService.addOrganisationUnit( unitE );
organisationUnitService.addOrganisationUnit( unitF );
organisationUnitService.addOrganisationUnit( unitG );
organisationUnitService.addOrganisationUnit( unitH );
unitIdsA = new HashSet<>();
unitIdsA.add( unitIdA );
unitIdsA.add( unitIdB );
unitIdsA.add( unitIdC );
groupA = createOrganisationUnitGroup( 'A' );
groupB = createOrganisationUnitGroup( 'B' );
groupC = createOrganisationUnitGroup( 'C' );
groupA.addOrganisationUnit( unitA );
groupB.addOrganisationUnit( unitA );
groupB.addOrganisationUnit( unitB );
groupC.addOrganisationUnit( unitE );
groupC.addOrganisationUnit( unitF );
organisationUnitGroupService.addOrganisationUnitGroup( groupA );
organisationUnitGroupService.addOrganisationUnitGroup( groupB );
organisationUnitGroupService.addOrganisationUnitGroup( groupC );
dataSetA = createDataSet( 'A', periodType );
dataSetB = createDataSet( 'B', periodType );
dataSetC = createDataSet( 'C', periodType );
dataElementA = createDataElement( 'A' );
dataElementB = createDataElement( 'B' );
dataElementService.addDataElement( dataElementA );
dataElementService.addDataElement( dataElementB );
dataSetA.getCompulsoryDataElementOperands().add( new DataElementOperand( dataElementA, categoryOptionCombo ) );
dataSetA.getCompulsoryDataElementOperands().add( new DataElementOperand( dataElementB, categoryOptionCombo ) );
dataSetB.getCompulsoryDataElementOperands().add( new DataElementOperand( dataElementA, categoryOptionCombo ) );
onTimeA = getDate( 2000, 2, 10 );
tooLateA = getDate( 2000, 2, 25 );
onTimeB = getDate( 2000, 3, 10 );
tooLateB = getDate( 2000, 3, 25 );
}
// -------------------------------------------------------------------------
// A
// B C
// E F G H
// -------------------------------------------------------------------------
// -------------------------------------------------------------------------
// Tests
// -------------------------------------------------------------------------
@Test
public void testGetPercentage()
{
DataSetCompletenessResult resultA = new DataSetCompletenessResult( dataSetA.getName(), 20, 15, 10 );
DataSetCompletenessResult resultB = new DataSetCompletenessResult( dataSetA.getName(), 0, 15, 10 );
assertEquals( 75.0, resultA.getPercentage(), DELTA );
assertEquals( 0.0, resultB.getPercentage(), DELTA );
assertEquals( 50.0, resultA.getPercentageOnTime(), DELTA );
assertEquals( 0.0, resultB.getPercentageOnTime(), DELTA );
}
// -------------------------------------------------------------------------
// Complete registration based completeness
// -------------------------------------------------------------------------
@Test
public void testGetDataSetCompletenessByDataSetA()
{
dataSetA.getSources().add( unitA );
dataSetA.getSources().add( unitB );
dataSetB.getSources().add( unitA );
dataSetB.getSources().add( unitB );
dataSetC.getSources().add( unitA );
dataSetC.getSources().add( unitB );
dataSetService.addDataSet( dataSetA );
dataSetService.addDataSet( dataSetB );
dataSetService.addDataSet( dataSetC );
registrationService.saveCompleteDataSetRegistration( new CompleteDataSetRegistration( dataSetA, periodA, unitA, null, tooLateA, "") );
registrationService.saveCompleteDataSetRegistration( new CompleteDataSetRegistration( dataSetA, periodA, unitB, null, tooLateA, "") );
registrationService.saveCompleteDataSetRegistration( new CompleteDataSetRegistration( dataSetA, periodB, unitA, null, tooLateB, "") );
registrationService.saveCompleteDataSetRegistration( new CompleteDataSetRegistration( dataSetA, periodA, unitD, null, tooLateA, "") );
registrationService.saveCompleteDataSetRegistration( new CompleteDataSetRegistration( dataSetB, periodA, unitA, null, tooLateA, "") );
registrationService.saveCompleteDataSetRegistration( new CompleteDataSetRegistration( dataSetB, periodA, unitC, null, tooLateA, "") );
registrationService.saveCompleteDataSetRegistration( new CompleteDataSetRegistration( dataSetB, periodB, unitB, null, onTimeB, "") );
registrationService.saveCompleteDataSetRegistration( new CompleteDataSetRegistration( dataSetC, periodA, unitC, null, tooLateA, "") );
registrationService.saveCompleteDataSetRegistration( new CompleteDataSetRegistration( dataSetC, periodB, unitA, null, tooLateB, "") );
Collection<DataSetCompletenessResult> results = registrationCompletenessService.getDataSetCompleteness( periodIdA, unitIdA, null );
assertNotNull( results );
assertEquals( 3, results.size() );
assertTrue( results.contains( new DataSetCompletenessResult( dataSetA.getName(), 2, 2, 0 ) ) );
assertTrue( results.contains( new DataSetCompletenessResult( dataSetB.getName(), 2, 1, 0 ) ) );
assertTrue( results.contains( new DataSetCompletenessResult( dataSetC.getName(), 2, 0, 0 ) ) );
results = registrationCompletenessService.getDataSetCompleteness( periodIdC, unitIdA, null );
assertNotNull( results );
assertEquals( 3, results.size() );
assertTrue( results.contains( new DataSetCompletenessResult( dataSetA.getName(), 6, 3, 0 ) ) );
assertTrue( results.contains( new DataSetCompletenessResult( dataSetB.getName(), 6, 2, 1 ) ) );
assertTrue( results.contains( new DataSetCompletenessResult( dataSetC.getName(), 6, 1, 0 ) ) );
}
@Test
public void testGetDataSetCompletenessByDataSetB()
{
dataSetA.getSources().add( unitA );
dataSetA.getSources().add( unitB );
dataSetA.getSources().add( unitC );
dataSetB.getSources().add( unitB );
dataSetB.getSources().add( unitC );
dataSetC.getSources().add( unitB );
dataSetC.getSources().add( unitC );
dataSetService.addDataSet( dataSetA );
dataSetService.addDataSet( dataSetB );
dataSetService.addDataSet( dataSetC );
registrationService.saveCompleteDataSetRegistration( new CompleteDataSetRegistration( dataSetA, periodA, unitA, null, tooLateA, "") );
registrationService.saveCompleteDataSetRegistration( new CompleteDataSetRegistration( dataSetA, periodB, unitA, null, onTimeB, "") );
registrationService.saveCompleteDataSetRegistration( new CompleteDataSetRegistration( dataSetA, periodA, unitD, null, tooLateA, "") );
registrationService.saveCompleteDataSetRegistration( new CompleteDataSetRegistration( dataSetB, periodA, unitA, null, tooLateA, "") );
registrationService.saveCompleteDataSetRegistration( new CompleteDataSetRegistration( dataSetB, periodA, unitB, null, onTimeA, "") );
registrationService.saveCompleteDataSetRegistration( new CompleteDataSetRegistration( dataSetB, periodA, unitC, null, onTimeA, "") );
registrationService.saveCompleteDataSetRegistration( new CompleteDataSetRegistration( dataSetB, periodB, unitC, null, tooLateB, "") );
registrationService.saveCompleteDataSetRegistration( new CompleteDataSetRegistration( dataSetC, periodA, unitA, null, tooLateA, "") );
registrationService.saveCompleteDataSetRegistration( new CompleteDataSetRegistration( dataSetC, periodA, unitB, null, onTimeA, "") );
Collection<DataSetCompletenessResult> results = registrationCompletenessService.getDataSetCompleteness( periodIdA, unitIdA, null );
assertNotNull( results );
assertEquals( 3, results.size() );
assertTrue( results.contains( new DataSetCompletenessResult( dataSetA.getName(), 3, 1, 0 ) ) );
assertTrue( results.contains( new DataSetCompletenessResult( dataSetB.getName(), 2, 2, 2 ) ) );
assertTrue( results.contains( new DataSetCompletenessResult( dataSetC.getName(), 2, 1, 1 ) ) );
results = registrationCompletenessService.getDataSetCompleteness( periodIdC, unitIdA, null );
assertNotNull( results );
assertEquals( 3, results.size() );
assertTrue( results.contains( new DataSetCompletenessResult( dataSetA.getName(), 9, 2, 1 ) ) );
assertTrue( results.contains( new DataSetCompletenessResult( dataSetB.getName(), 6, 3, 2 ) ) );
assertTrue( results.contains( new DataSetCompletenessResult( dataSetC.getName(), 6, 1, 1 ) ) );
}
@Test
public void testGetDataSetCompletenessByDataSetC()
{
dataSetA.getSources().add( unitA );
dataSetA.getSources().add( unitB );
dataSetA.getSources().add( unitC );
dataSetA.getSources().add( unitE );
dataSetA.getSources().add( unitF );
dataSetA.getSources().add( unitG );
dataSetA.getSources().add( unitH );
dataSetService.addDataSet( dataSetA );
registrationService.saveCompleteDataSetRegistration( new CompleteDataSetRegistration( dataSetA, periodA, unitB, null, onTimeA, "") );
registrationService.saveCompleteDataSetRegistration( new CompleteDataSetRegistration( dataSetA, periodA, unitC, null, tooLateA, "") );
registrationService.saveCompleteDataSetRegistration( new CompleteDataSetRegistration( dataSetA, periodA, unitE, null, onTimeA, "") );
registrationService.saveCompleteDataSetRegistration( new CompleteDataSetRegistration( dataSetA, periodA, unitF, null, tooLateA, "") );
registrationService.saveCompleteDataSetRegistration( new CompleteDataSetRegistration( dataSetA, periodA, unitG, null, onTimeA, "") );
registrationService.saveCompleteDataSetRegistration( new CompleteDataSetRegistration( dataSetA, periodB, unitE, null, onTimeB, "") );
registrationService.saveCompleteDataSetRegistration( new CompleteDataSetRegistration( dataSetA, periodB, unitF, null, onTimeB, "") );
registrationService.saveCompleteDataSetRegistration( new CompleteDataSetRegistration( dataSetA, periodB, unitG, null, tooLateB, "") );
Collection<DataSetCompletenessResult> results = registrationCompletenessService.getDataSetCompleteness( periodIdA, unitIdA, null );
assertNotNull( results );
assertEquals( 1, results.size() );
assertTrue( results.contains( new DataSetCompletenessResult( dataSetA.getName(), 7, 5, 3 ) ) );
results = registrationCompletenessService.getDataSetCompleteness( periodIdC, unitIdA, null );
assertNotNull( results );
assertEquals( 1, results.size() );
assertTrue( results.contains( new DataSetCompletenessResult( dataSetA.getName(), 21, 8, 5 ) ) );
}
@Test
public void testGetDataSetCompletenessByDataSetD()
{
dataSetA.getSources().add( unitA );
dataSetA.getSources().add( unitB );
dataSetB.getSources().add( unitA );
dataSetB.getSources().add( unitB );
dataSetC.getSources().add( unitA );
dataSetC.getSources().add( unitB );
dataSetService.addDataSet( dataSetA );
dataSetService.addDataSet( dataSetB );
dataSetService.addDataSet( dataSetC );
registrationService.saveCompleteDataSetRegistration( new CompleteDataSetRegistration( dataSetA, periodA, unitA, null, tooLateA, "") );
registrationService.saveCompleteDataSetRegistration( new CompleteDataSetRegistration( dataSetA, periodA, unitB, null, tooLateA, "") );
registrationService.saveCompleteDataSetRegistration( new CompleteDataSetRegistration( dataSetA, periodB, unitA, null, tooLateB, "") );
registrationService.saveCompleteDataSetRegistration( new CompleteDataSetRegistration( dataSetA, periodA, unitD, null, tooLateA, "") );
registrationService.saveCompleteDataSetRegistration( new CompleteDataSetRegistration( dataSetB, periodA, unitA, null, tooLateA, "") );
registrationService.saveCompleteDataSetRegistration( new CompleteDataSetRegistration( dataSetB, periodA, unitC, null, tooLateA, "") );
registrationService.saveCompleteDataSetRegistration( new CompleteDataSetRegistration( dataSetB, periodB, unitB, null, onTimeB, "") );
registrationService.saveCompleteDataSetRegistration( new CompleteDataSetRegistration( dataSetC, periodA, unitC, null, tooLateA, "") );
registrationService.saveCompleteDataSetRegistration( new CompleteDataSetRegistration( dataSetC, periodB, unitA, null, tooLateB, "") );
groupIds.clear();
groupIds.add( groupA.getId() );
Collection<DataSetCompletenessResult> results = registrationCompletenessService.getDataSetCompleteness( periodIdC, unitIdA, groupIds );
assertNotNull( results );
assertEquals( 3, results.size() );
assertTrue( results.contains( new DataSetCompletenessResult( dataSetA.getName(), 3, 2, 0 ) ) );
assertTrue( results.contains( new DataSetCompletenessResult( dataSetB.getName(), 3, 1, 0 ) ) );
assertTrue( results.contains( new DataSetCompletenessResult( dataSetC.getName(), 3, 1, 0 ) ) );
groupIds.clear();
groupIds.add( groupA.getId() );
groupIds.add( groupB.getId() );
results = registrationCompletenessService.getDataSetCompleteness( periodIdC, unitIdA, groupIds );
assertNotNull( results );
assertEquals( 3, results.size() );
assertTrue( results.contains( new DataSetCompletenessResult( dataSetA.getName(), 3, 2, 0 ) ) );
assertTrue( results.contains( new DataSetCompletenessResult( dataSetB.getName(), 3, 1, 0 ) ) );
assertTrue( results.contains( new DataSetCompletenessResult( dataSetC.getName(), 3, 1, 0 ) ) );
}
@Test
public void testGetDataSetCompletenessByOrganisationUnitA()
{
dataSetA.getSources().add( unitE );
dataSetA.getSources().add( unitF );
dataSetA.getSources().add( unitG );
dataSetA.getSources().add( unitH );
dataSetIdA = dataSetService.addDataSet( dataSetA );
registrationService.saveCompleteDataSetRegistration( new CompleteDataSetRegistration( dataSetA, periodA, unitE, null, tooLateA, "") );
registrationService.saveCompleteDataSetRegistration( new CompleteDataSetRegistration( dataSetA, periodA, unitF, null, tooLateA, "") );
registrationService.saveCompleteDataSetRegistration( new CompleteDataSetRegistration( dataSetA, periodA, unitG, null, tooLateA, "") );
registrationService.saveCompleteDataSetRegistration( new CompleteDataSetRegistration( dataSetA, periodB, unitE, null, onTimeA, "") );
registrationService.saveCompleteDataSetRegistration( new CompleteDataSetRegistration( dataSetA, periodB, unitF, null, onTimeA, "") );
registrationService.saveCompleteDataSetRegistration( new CompleteDataSetRegistration( dataSetA, periodB, unitG, null, onTimeA, "") );
Collection<DataSetCompletenessResult> results = registrationCompletenessService.getDataSetCompleteness( periodIdA, unitIdsA, dataSetIdA, null );
assertNotNull( results );
assertEquals( 3, results.size() );
assertTrue( results.contains( new DataSetCompletenessResult( unitB.getName(), 2, 2, 0 ) ) );
assertTrue( results.contains( new DataSetCompletenessResult( unitC.getName(), 2, 1, 0 ) ) );
assertTrue( results.contains( new DataSetCompletenessResult( unitA.getName(), 4, 3, 0 ) ) );
results = registrationCompletenessService.getDataSetCompleteness( periodIdC, unitIdsA, dataSetIdA, null );
assertNotNull( results );
assertEquals( 3, results.size() );
assertTrue( results.contains( new DataSetCompletenessResult( unitB.getName(), 6, 4, 2 ) ) );
assertTrue( results.contains( new DataSetCompletenessResult( unitC.getName(), 6, 2, 1 ) ) );
assertTrue( results.contains( new DataSetCompletenessResult( unitA.getName(), 12, 6, 3 ) ) );
}
@Test
public void testGetDataSetCompletenessByOrganisationUnitB()
{
dataSetA.getSources().add( unitE );
dataSetA.getSources().add( unitF );
dataSetA.getSources().add( unitG );
dataSetIdA = dataSetService.addDataSet( dataSetA );
registrationService.saveCompleteDataSetRegistration( new CompleteDataSetRegistration( dataSetA, periodA, unitE, null, tooLateA, "") );
registrationService.saveCompleteDataSetRegistration( new CompleteDataSetRegistration( dataSetA, periodA, unitG, null, tooLateA, "") );
registrationService.saveCompleteDataSetRegistration( new CompleteDataSetRegistration( dataSetA, periodA, unitH, null, tooLateA, "") );
registrationService.saveCompleteDataSetRegistration( new CompleteDataSetRegistration( dataSetA, periodB, unitE, null, onTimeB, "") );
registrationService.saveCompleteDataSetRegistration( new CompleteDataSetRegistration( dataSetA, periodB, unitG, null, onTimeB, "") );
registrationService.saveCompleteDataSetRegistration( new CompleteDataSetRegistration( dataSetA, periodB, unitH, null, onTimeB, "") );
Collection<DataSetCompletenessResult> results = registrationCompletenessService.getDataSetCompleteness( periodIdA, unitIdsA, dataSetIdA, null );
assertNotNull( results );
assertEquals( 3, results.size() );
assertTrue( results.contains( new DataSetCompletenessResult( unitB.getName(), 2, 1, 0 ) ) );
assertTrue( results.contains( new DataSetCompletenessResult( unitC.getName(), 1, 1, 0 ) ) );
assertTrue( results.contains( new DataSetCompletenessResult( unitA.getName(), 3, 2, 0 ) ) );
results = registrationCompletenessService.getDataSetCompleteness( periodIdC, unitIdsA, dataSetIdA, null );
assertNotNull( results );
assertEquals( 3, results.size() );
assertTrue( results.contains( new DataSetCompletenessResult( unitB.getName(), 6, 2, 1 ) ) );
assertTrue( results.contains( new DataSetCompletenessResult( unitC.getName(), 3, 2, 1 ) ) );
assertTrue( results.contains( new DataSetCompletenessResult( unitA.getName(), 9, 4, 2 ) ) );
}
@Test
public void testGetDataSetCompletenessByOrganisationUnitC()
{
dataSetA.getSources().add( unitE );
dataSetA.getSources().add( unitF );
dataSetA.getSources().add( unitG );
dataSetA.getSources().add( unitH );
dataSetIdA = dataSetService.addDataSet( dataSetA );
registrationService.saveCompleteDataSetRegistration( new CompleteDataSetRegistration( dataSetA, periodA, unitE, null, onTimeA, "") );
registrationService.saveCompleteDataSetRegistration( new CompleteDataSetRegistration( dataSetA, periodA, unitF, null, tooLateA, "") );
registrationService.saveCompleteDataSetRegistration( new CompleteDataSetRegistration( dataSetA, periodA, unitG, null, onTimeA, "") );
registrationService.saveCompleteDataSetRegistration( new CompleteDataSetRegistration( dataSetA, periodA, unitH, null, tooLateA, "") );
registrationService.saveCompleteDataSetRegistration( new CompleteDataSetRegistration( dataSetA, periodB, unitE, null, onTimeB, "") );
registrationService.saveCompleteDataSetRegistration( new CompleteDataSetRegistration( dataSetA, periodB, unitF, null, tooLateB, "") );
registrationService.saveCompleteDataSetRegistration( new CompleteDataSetRegistration( dataSetA, periodB, unitG, null, onTimeB, "") );
registrationService.saveCompleteDataSetRegistration( new CompleteDataSetRegistration( dataSetA, periodB, unitH, null, tooLateB, "") );
Collection<DataSetCompletenessResult> results = registrationCompletenessService.getDataSetCompleteness( periodIdA, unitIdsA, dataSetIdA, null );
assertNotNull( results );
assertEquals( 3, results.size() );
assertTrue( results.contains( new DataSetCompletenessResult( unitB.getName(), 2, 2, 1 ) ) );
assertTrue( results.contains( new DataSetCompletenessResult( unitC.getName(), 2, 2, 1 ) ) );
assertTrue( results.contains( new DataSetCompletenessResult( unitA.getName(), 4, 4, 2 ) ) );
results = registrationCompletenessService.getDataSetCompleteness( periodIdC, unitIdsA, dataSetIdA, null );
assertNotNull( results );
assertEquals( 3, results.size() );
assertTrue( results.contains( new DataSetCompletenessResult( unitB.getName(), 6, 4, 2 ) ) );
assertTrue( results.contains( new DataSetCompletenessResult( unitC.getName(), 6, 4, 2 ) ) );
assertTrue( results.contains( new DataSetCompletenessResult( unitA.getName(), 12, 8, 4 ) ) );
}
@Test
public void testGetDataSetCompletenessByOrganisationUnitD()
{
dataSetA.getSources().add( unitE );
dataSetA.getSources().add( unitF );
dataSetA.getSources().add( unitG );
dataSetA.getSources().add( unitH );
dataSetIdA = dataSetService.addDataSet( dataSetA );
registrationService.saveCompleteDataSetRegistration( new CompleteDataSetRegistration( dataSetA, periodA, unitE, null, tooLateA, "") );
registrationService.saveCompleteDataSetRegistration( new CompleteDataSetRegistration( dataSetA, periodA, unitF, null, tooLateA, "") );
registrationService.saveCompleteDataSetRegistration( new CompleteDataSetRegistration( dataSetA, periodA, unitG, null, tooLateA, "") );
registrationService.saveCompleteDataSetRegistration( new CompleteDataSetRegistration( dataSetA, periodB, unitE, null, onTimeA, "") );
registrationService.saveCompleteDataSetRegistration( new CompleteDataSetRegistration( dataSetA, periodB, unitF, null, onTimeA, "") );
registrationService.saveCompleteDataSetRegistration( new CompleteDataSetRegistration( dataSetA, periodB, unitG, null, onTimeA, "") );
groupIds.clear();
groupIds.add( groupC.getId() );
Collection<DataSetCompletenessResult> results = registrationCompletenessService.getDataSetCompleteness( periodIdA, unitIdsA, dataSetIdA, groupIds );
assertNotNull( results );
assertEquals( 2, results.size() );
assertTrue( results.contains( new DataSetCompletenessResult( unitB.getName(), 2, 2, 0 ) ) );
assertTrue( results.contains( new DataSetCompletenessResult( unitA.getName(), 2, 2, 0 ) ) );
results = registrationCompletenessService.getDataSetCompleteness( periodIdC, unitIdsA, dataSetIdA, groupIds );
assertNotNull( results );
assertEquals( 2, results.size() );
assertTrue( results.contains( new DataSetCompletenessResult( unitB.getName(), 6, 4, 2 ) ) );
assertTrue( results.contains( new DataSetCompletenessResult( unitA.getName(), 6, 4, 2 ) ) );
}
}
| |
package org.xtuml.bp.welcome.test;
//=====================================================================
//
//File: $RCSfile: WelcomePageTestMetamodel.java,v $
//Version: $Revision: 1.6 $
//Modified: $Date: 2013/01/10 23:05:14 $
//
//(c) Copyright 2004-2014 by Mentor Graphics Corp. All rights reserved.
//
//=====================================================================
// Licensed under the Apache License, Version 2.0 (the "License"); you may not
// use this file except in compliance with the License. You may obtain a copy
// of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
// License for the specific language governing permissions and limitations under
// the License.
//=====================================================================
import java.util.Properties;
import org.eclipse.core.resources.IFile;
import org.eclipse.core.resources.IProject;
import org.eclipse.core.resources.IResource;
import org.eclipse.core.resources.ResourcesPlugin;
import org.eclipse.core.runtime.CoreException;
import org.eclipse.core.runtime.NullProgressMonitor;
import org.eclipse.swt.widgets.Shell;
import org.eclipse.swt.widgets.TreeItem;
import org.eclipse.ui.IViewPart;
import org.eclipse.ui.IWorkbenchPage;
import org.eclipse.ui.PartInitException;
import org.eclipse.ui.PlatformUI;
import org.xtuml.bp.core.XtUMLNature;
import org.xtuml.bp.core.common.PersistableModelComponent;
import org.xtuml.bp.core.common.PersistenceManager;
import org.xtuml.bp.test.TestUtil;
import org.xtuml.bp.test.common.TestingUtilities;
import org.xtuml.bp.ui.explorer.ExplorerView;
import org.xtuml.bp.utilities.ui.TreeUtilities;
import org.xtuml.bp.welcome.gettingstarted.SampleProjectGettingStartedAction;
import junit.framework.TestCase;
public class WelcomePageTestMetamodel extends TestCase {
private static IProject project;
private static IViewPart g_view = null;
// This test is setup so we can swap-in a different test model
// if we choose to do so. All we should need to do is change the
// name of the mode here.
private final String ProjectName = "xtUML_Metamodel";
private String[] expectedXtUMLFiles = {
"models/" + ProjectName + "/xtUML_Metamodel.xtuml",
"models/" + ProjectName + "/ooaofooa/Activity/Activity.xtuml",
"models/" + ProjectName + "/ooaofooa/Domain/Domain.xtuml",
"models/" + ProjectName + "/ooaofooa/Component/Component Nesting/Component Nesting.xtuml",
"models/" + ProjectName + "/ooaofooa/Value/Value.xtuml"};
private String[] expectedFiles = expectedXtUMLFiles;
public WelcomePageTestMetamodel() {
super();
}
// enforce ordering of tests in this class
public void testWelcomePageMetamodelProject() throws CoreException, Exception {
dotestProjectCreation();
dotestNoProjectOverwrite();
dotestProjectOverwrite();
dotestImportLoadPersist();
}
public void runSingleFileGettingStartedAction() {
SampleProjectGettingStartedAction action = new SampleProjectGettingStartedAction();
Properties props = new Properties();
props.put("model", ProjectName);
props.put("SingleFileModel", "true");
action.run(null, props);
}
public boolean projectExists(String projectName) {
// Check that project exists in the workspace
// and that it is indeed an xtUML project
boolean projectExists = false;
project = ResourcesPlugin.getWorkspace().getRoot().getProject(
projectName);
projectExists = project.exists();
assertTrue("Project: " + projectName + " does not exist.",
projectExists);
projectExists = project.isOpen();
assertTrue("Project: " + projectName + " is not open.", projectExists);
return projectExists;
}
public void isxtUMLProject(IProject project) {
assertTrue("Project: " + project.getName()
+ " is not an xtUML project.", XtUMLNature.hasNature(project) );
}
public void containsProjectMembers() {
/*
* spot check for some of the files
*/
for (int i = 0; i < expectedFiles.length; i++) {
IFile file = project.getFile(expectedFiles[i]);
assertTrue("Expected file: " + file.getName() + " does not exist.",
file.exists());
}
}
public void verifyProjectCreated() {
boolean projectExists = projectExists(ProjectName);
if (projectExists)
containsProjectMembers();
}
public void raiseWorkbench() {
Shell s = PlatformUI.getWorkbench().getActiveWorkbenchWindow().getShell();
s.forceActive();
s.forceFocus();
}
public void dotestProjectCreation() {
runSingleFileGettingStartedAction();
// Give the import time to work
TestUtil.sleepWithDispatchOfEvents(7000);
verifyProjectCreated();
raiseWorkbench();
}
public void dotestNoProjectOverwrite() {
IFile dummyFile = project.getFile("dummyFile");
IFile existingFile = project.getFile(expectedXtUMLFiles[0]);
try {
dummyFile.create(existingFile.getContents(), IResource.REPLACE, null);
} catch (CoreException ce) {
fail("Failed to create dummy file.");
}
if (!dummyFile.exists()) {
fail("Failed to create the dummy file.");
}
TestUtil.selectButtonInDialog(2000, "No");
runSingleFileGettingStartedAction();
// We said not to overwrite, so the dummy file should still be there
assertTrue("The project was overwritten when it shouldn't have been.",
dummyFile.exists());
}
public void dotestProjectOverwrite() throws Exception {
IFile dummyFile = project.getFile("dummyFile");
// Make sure the marker file is there.
assertTrue("The dummy file for testing doesn't exist.", dummyFile.exists());
TestUtil.selectButtonInDialog(1000, "Yes");
runSingleFileGettingStartedAction();
// Give the import time to work
TestUtil.sleepWithDispatchOfEvents(5000);
// We said to overwrite, so the dummy file should not be there
assertFalse("The project was not overwritten when it should have been.",
dummyFile.exists());
TestingUtilities.deleteProject(ProjectName);
}
public void dotestImportLoadPersist() throws Exception {
int numImports = 1;
for (int i = 0; i < numImports; i++) {
System.out.println("Import number: " + String.valueOf(i+1));
runSingleFileGettingStartedAction();
TestingUtilities.allowJobCompletion();
raiseWorkbench();
verifyProjectCreated();
final IProject project = getProject(ProjectName);
checkForErrors();
// load and persist
PersistableModelComponent pmc = PersistenceManager.getRootComponent(project);
pmc.loadComponentAndChildren(new NullProgressMonitor());
pmc.persistSelfAndChildren();
checkForErrors();
TestingUtilities.deleteProject(ProjectName);
}
}
private void checkForErrors() {
// Check the problems view
g_view = selectView(project, "org.eclipse.ui.views.ProblemView");
// Check the explorer view for orphaned elements
ExplorerView view = null;
try {
view = (ExplorerView) PlatformUI.getWorkbench()
.getActiveWorkbenchWindow().getActivePage().showView(
"org.xtuml.bp.ui.explorer.ExplorerView");
} catch (PartInitException e) {
}
view.getTreeViewer().refresh();
while(PlatformUI.getWorkbench().getDisplay().readAndDispatch());
view.getTreeViewer().expandAll();
while(PlatformUI.getWorkbench().getDisplay().readAndDispatch());
TreeItem topItem = view.getTreeViewer().getTree().getTopItem();
TreeItem[] orphaned = TreeUtilities.getOrphanedElementsFromTree(topItem);
if (orphaned.length > 0) {
String elements = TreeUtilities.getTextResultForOrphanedElementList(orphaned);
assertTrue("Orphaned elements are present: " + elements, false);
}
}
private IProject getProject(String name) {
IProject project = ResourcesPlugin.getWorkspace().getRoot().getProject(
name);
assertTrue( project.exists() );
return project;
}
private IViewPart selectView(final IProject project, final String viewName) {
g_view = null;
Runnable r = new Runnable() {
public void run() {
IWorkbenchPage page = PlatformUI.getWorkbench()
.getActiveWorkbenchWindow().getActivePage();
try {
g_view = page.showView(viewName); //$NON-NLS-1$
} catch (PartInitException e) {
fail("Failed to open the " + viewName + " view"); //$NON-NLS-1$
}
}
};
r.run();
assertTrue("Unable to select view: " + viewName, g_view != null);
return g_view;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hive.ql.parse.type;
import java.math.BigDecimal;
import java.time.ZoneId;
import java.util.ArrayList;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;
import org.apache.calcite.rel.RelNode;
import org.apache.commons.lang3.math.NumberUtils;
import org.apache.hadoop.hive.common.type.Date;
import org.apache.hadoop.hive.common.type.HiveChar;
import org.apache.hadoop.hive.common.type.HiveDecimal;
import org.apache.hadoop.hive.common.type.HiveIntervalDayTime;
import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth;
import org.apache.hadoop.hive.common.type.HiveVarchar;
import org.apache.hadoop.hive.common.type.Timestamp;
import org.apache.hadoop.hive.common.type.TimestampTZ;
import org.apache.hadoop.hive.common.type.TimestampTZUtil;
import org.apache.hadoop.hive.ql.ErrorMsg;
import org.apache.hadoop.hive.ql.exec.ColumnInfo;
import org.apache.hadoop.hive.ql.exec.Description;
import org.apache.hadoop.hive.ql.exec.FunctionInfo;
import org.apache.hadoop.hive.ql.exec.FunctionRegistry;
import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
import org.apache.hadoop.hive.ql.optimizer.ConstantPropagateProcFactory;
import org.apache.hadoop.hive.ql.optimizer.calcite.CalciteSubquerySemanticException;
import org.apache.hadoop.hive.ql.optimizer.calcite.translator.TypeConverter;
import org.apache.hadoop.hive.ql.parse.ASTNode;
import org.apache.hadoop.hive.ql.parse.QBSubQueryParseInfo;
import org.apache.hadoop.hive.ql.parse.RowResolver;
import org.apache.hadoop.hive.ql.parse.SemanticException;
import org.apache.hadoop.hive.ql.plan.ExprDynamicParamDesc;
import org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc;
import org.apache.hadoop.hive.ql.plan.ExprNodeColumnListDesc;
import org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc;
import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;
import org.apache.hadoop.hive.ql.plan.ExprNodeDescUtils;
import org.apache.hadoop.hive.ql.plan.ExprNodeFieldDesc;
import org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc;
import org.apache.hadoop.hive.ql.plan.ExprNodeSubQueryDesc;
import org.apache.hadoop.hive.ql.plan.SubqueryType;
import org.apache.hadoop.hive.ql.udf.SettableUDF;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDF;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDFBaseCompare;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDFIn;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPAnd;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPEqual;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPEqualNS;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPNotEqualNS;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPOr;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDFStruct;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDFWhen;
import org.apache.hadoop.hive.serde.serdeConstants;
import org.apache.hadoop.hive.serde2.objectinspector.ConstantObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.MapObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.StructField;
import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils.PrimitiveTypeEntry;
import org.apache.hadoop.hive.serde2.typeinfo.CharTypeInfo;
import org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo;
import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo;
import org.apache.hadoop.hive.serde2.typeinfo.StructTypeInfo;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils;
import org.apache.hadoop.hive.serde2.typeinfo.VarcharTypeInfo;
import org.apache.hadoop.io.NullWritable;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Expression factory for Hive {@link ExprNodeDesc}.
*/
public class ExprNodeDescExprFactory extends ExprFactory<ExprNodeDesc> {
private static final Logger LOG = LoggerFactory.getLogger(ExprNodeDescExprFactory.class);
/**
* {@inheritDoc}
*/
@Override
protected boolean isExprInstance(Object o) {
return o instanceof ExprNodeDesc;
}
/**
* {@inheritDoc}
*/
@Override
protected ExprNodeDesc toExpr(ColumnInfo colInfo, RowResolver rowResolver, int offset)
throws SemanticException {
ObjectInspector inspector = colInfo.getObjectInspector();
if (inspector instanceof ConstantObjectInspector && inspector instanceof PrimitiveObjectInspector) {
return toPrimitiveConstDesc(colInfo, inspector);
}
if (inspector instanceof ConstantObjectInspector && inspector instanceof ListObjectInspector) {
ObjectInspector listElementOI = ((ListObjectInspector)inspector).getListElementObjectInspector();
if (listElementOI instanceof PrimitiveObjectInspector) {
return toListConstDesc(colInfo, inspector, listElementOI);
}
}
if (inspector instanceof ConstantObjectInspector && inspector instanceof MapObjectInspector) {
ObjectInspector keyOI = ((MapObjectInspector)inspector).getMapKeyObjectInspector();
ObjectInspector valueOI = ((MapObjectInspector)inspector).getMapValueObjectInspector();
if (keyOI instanceof PrimitiveObjectInspector && valueOI instanceof PrimitiveObjectInspector) {
return toMapConstDesc(colInfo, inspector, keyOI, valueOI);
}
}
if (inspector instanceof ConstantObjectInspector && inspector instanceof StructObjectInspector) {
boolean allPrimitive = true;
List<? extends StructField> fields = ((StructObjectInspector)inspector).getAllStructFieldRefs();
for (StructField field : fields) {
allPrimitive &= field.getFieldObjectInspector() instanceof PrimitiveObjectInspector;
}
if (allPrimitive) {
return toStructConstDesc(colInfo, inspector, fields);
}
}
// non-constant or non-primitive constants
ExprNodeColumnDesc column = new ExprNodeColumnDesc(colInfo);
column.setSkewedCol(colInfo.isSkewedCol());
return column;
}
private static ExprNodeConstantDesc toPrimitiveConstDesc(ColumnInfo colInfo, ObjectInspector inspector) {
PrimitiveObjectInspector poi = (PrimitiveObjectInspector) inspector;
Object constant = ((ConstantObjectInspector) inspector).getWritableConstantValue();
ExprNodeConstantDesc constantExpr =
new ExprNodeConstantDesc(colInfo.getType(), poi.getPrimitiveJavaObject(constant));
constantExpr.setFoldedFromCol(colInfo.getInternalName());
constantExpr.setFoldedFromTab(colInfo.getTabAlias());
return constantExpr;
}
private static ExprNodeConstantDesc toListConstDesc(ColumnInfo colInfo, ObjectInspector inspector,
ObjectInspector listElementOI) {
PrimitiveObjectInspector poi = (PrimitiveObjectInspector)listElementOI;
List<?> values = (List<?>)((ConstantObjectInspector) inspector).getWritableConstantValue();
List<Object> constant = new ArrayList<Object>();
for (Object o : values) {
constant.add(poi.getPrimitiveJavaObject(o));
}
ExprNodeConstantDesc constantExpr = new ExprNodeConstantDesc(colInfo.getType(), constant);
constantExpr.setFoldedFromCol(colInfo.getInternalName());
constantExpr.setFoldedFromTab(colInfo.getTabAlias());
return constantExpr;
}
private static ExprNodeConstantDesc toMapConstDesc(ColumnInfo colInfo, ObjectInspector inspector,
ObjectInspector keyOI, ObjectInspector valueOI) {
PrimitiveObjectInspector keyPoi = (PrimitiveObjectInspector)keyOI;
PrimitiveObjectInspector valuePoi = (PrimitiveObjectInspector)valueOI;
Map<?, ?> values = (Map<?, ?>)((ConstantObjectInspector) inspector).getWritableConstantValue();
Map<Object, Object> constant = new LinkedHashMap<Object, Object>();
for (Map.Entry<?, ?> e : values.entrySet()) {
constant.put(keyPoi.getPrimitiveJavaObject(e.getKey()), valuePoi.getPrimitiveJavaObject(e.getValue()));
}
ExprNodeConstantDesc constantExpr = new ExprNodeConstantDesc(colInfo.getType(), constant);
constantExpr.setFoldedFromCol(colInfo.getInternalName());
constantExpr.setFoldedFromTab(colInfo.getTabAlias());
return constantExpr;
}
private static ExprNodeConstantDesc toStructConstDesc(ColumnInfo colInfo, ObjectInspector inspector,
List<? extends StructField> fields) {
List<?> values = (List<?>)((ConstantObjectInspector) inspector).getWritableConstantValue();
List<Object> constant = new ArrayList<Object>();
for (int i = 0; i < values.size(); i++) {
Object value = values.get(i);
PrimitiveObjectInspector fieldPoi = (PrimitiveObjectInspector) fields.get(i).getFieldObjectInspector();
constant.add(fieldPoi.getPrimitiveJavaObject(value));
}
ExprNodeConstantDesc constantExpr = new ExprNodeConstantDesc(colInfo.getType(), constant);
constantExpr.setFoldedFromCol(colInfo.getInternalName());
constantExpr.setFoldedFromTab(colInfo.getTabAlias());
return constantExpr;
}
/**
* {@inheritDoc}
*/
@Override
protected ExprNodeColumnDesc createColumnRefExpr(ColumnInfo colInfo, RowResolver rowResolver, int offset) {
return new ExprNodeColumnDesc(colInfo);
}
/**
* {@inheritDoc}
*/
@Override
protected ExprNodeColumnDesc createColumnRefExpr(ColumnInfo colInfo, List<RowResolver> rowResolverList) {
return new ExprNodeColumnDesc(colInfo);
}
/**
* {@inheritDoc}
*/
@Override
protected ExprNodeConstantDesc createNullConstantExpr() {
return new ExprNodeConstantDesc(TypeInfoFactory.
getPrimitiveTypeInfoFromPrimitiveWritable(NullWritable.class), null);
}
/**
* {@inheritDoc}
*/
@Override
protected ExprDynamicParamDesc createDynamicParamExpr(int index) {
return new ExprDynamicParamDesc(TypeInfoFactory.
getPrimitiveTypeInfoFromPrimitiveWritable(NullWritable.class), index,null);
}
/**
* {@inheritDoc}
*/
@Override
protected ExprNodeConstantDesc createBooleanConstantExpr(String value) {
Boolean b = value != null ? Boolean.valueOf(value) : null;
return new ExprNodeConstantDesc(TypeInfoFactory.booleanTypeInfo, b);
}
/**
* {@inheritDoc}
*/
@Override
protected ExprNodeConstantDesc createBigintConstantExpr(String value) {
Long l = Long.valueOf(value);
return new ExprNodeConstantDesc(l);
}
/**
* {@inheritDoc}
*/
@Override
protected ExprNodeConstantDesc createIntConstantExpr(String value) {
Integer i = Integer.valueOf(value);
return new ExprNodeConstantDesc(i);
}
/**
* {@inheritDoc}
*/
@Override
protected ExprNodeConstantDesc createSmallintConstantExpr(String value) {
Short s = Short.valueOf(value);
return new ExprNodeConstantDesc(s);
}
/**
* {@inheritDoc}
*/
@Override
protected ExprNodeConstantDesc createTinyintConstantExpr(String value) {
Byte b = Byte.valueOf(value);
return new ExprNodeConstantDesc(b);
}
/**
* {@inheritDoc}
*/
@Override
protected ExprNodeConstantDesc createFloatConstantExpr(String value) {
Float f = Float.valueOf(value);
return new ExprNodeConstantDesc(f);
}
/**
* {@inheritDoc}
*/
@Override
protected ExprNodeConstantDesc createDoubleConstantExpr(String value) {
Double d = Double.valueOf(value);
return new ExprNodeConstantDesc(d);
}
/**
* {@inheritDoc}
*/
@Override
protected ExprNodeConstantDesc createDecimalConstantExpr(String value, boolean allowNullValueConstantExpr) {
HiveDecimal hd = HiveDecimal.create(value);
if (!allowNullValueConstantExpr && hd == null) {
return null;
}
return new ExprNodeConstantDesc(adjustType(hd), hd);
}
@Override
protected TypeInfo adjustConstantType(PrimitiveTypeInfo targetType, Object constantValue) {
if (constantValue instanceof HiveDecimal) {
return adjustType((HiveDecimal) constantValue);
}
return targetType;
}
private DecimalTypeInfo adjustType(HiveDecimal hd) {
// Note: the normalize() call with rounding in HiveDecimal will currently reduce the
// precision and scale of the value by throwing away trailing zeroes. This may or may
// not be desirable for the literals; however, this used to be the default behavior
// for explicit decimal literals (e.g. 1.0BD), so we keep this behavior for now.
int prec = 1;
int scale = 0;
if (hd != null) {
prec = hd.precision();
scale = hd.scale();
}
DecimalTypeInfo typeInfo = TypeInfoFactory.getDecimalTypeInfo(prec, scale);
return typeInfo;
}
/**
* {@inheritDoc}
*/
@Override
protected Object interpretConstantAsPrimitive(PrimitiveTypeInfo targetType, Object constantValue,
PrimitiveTypeInfo sourceType, boolean isEqual) {
if (constantValue instanceof Number || constantValue instanceof String) {
try {
PrimitiveTypeEntry primitiveTypeEntry = targetType.getPrimitiveTypeEntry();
if (PrimitiveObjectInspectorUtils.intTypeEntry.equals(primitiveTypeEntry)) {
return toBigDecimal(constantValue.toString()).intValueExact();
} else if (PrimitiveObjectInspectorUtils.longTypeEntry.equals(primitiveTypeEntry)) {
return toBigDecimal(constantValue.toString()).longValueExact();
} else if (PrimitiveObjectInspectorUtils.doubleTypeEntry.equals(primitiveTypeEntry)) {
return Double.valueOf(constantValue.toString());
} else if (PrimitiveObjectInspectorUtils.floatTypeEntry.equals(primitiveTypeEntry)) {
return Float.valueOf(constantValue.toString());
} else if (PrimitiveObjectInspectorUtils.byteTypeEntry.equals(primitiveTypeEntry)) {
return toBigDecimal(constantValue.toString()).byteValueExact();
} else if (PrimitiveObjectInspectorUtils.shortTypeEntry.equals(primitiveTypeEntry)) {
return toBigDecimal(constantValue.toString()).shortValueExact();
} else if (PrimitiveObjectInspectorUtils.decimalTypeEntry.equals(primitiveTypeEntry)) {
return HiveDecimal.create(constantValue.toString());
}
} catch (NumberFormatException | ArithmeticException nfe) {
if (!isEqual && (constantValue instanceof Number ||
NumberUtils.isNumber(constantValue.toString()))) {
// The target is a number, if constantToInterpret can be interpreted as a number,
// return the constantToInterpret directly, GenericUDFBaseCompare will do
// type conversion for us.
return constantValue;
}
LOG.trace("Failed to narrow type of constant", nfe);
return null;
}
}
// Comparision of decimal and float/double happens in float/double.
if (constantValue instanceof HiveDecimal) {
HiveDecimal hiveDecimal = (HiveDecimal) constantValue;
PrimitiveTypeEntry primitiveTypeEntry = targetType.getPrimitiveTypeEntry();
if (PrimitiveObjectInspectorUtils.doubleTypeEntry.equals(primitiveTypeEntry)) {
return hiveDecimal.doubleValue();
} else if (PrimitiveObjectInspectorUtils.floatTypeEntry.equals(primitiveTypeEntry)) {
return hiveDecimal.floatValue();
}
return hiveDecimal;
}
String constTypeInfoName = sourceType.getTypeName();
if (constTypeInfoName.equalsIgnoreCase(serdeConstants.STRING_TYPE_NAME)) {
// because a comparison against a "string" will happen in "string" type.
// to avoid unintentional comparisons in "string"
// constants which are representing char/varchar values must be converted to the
// appropriate type.
if (targetType instanceof CharTypeInfo) {
final String constValue = constantValue.toString();
final int length = TypeInfoUtils.getCharacterLengthForType(targetType);
HiveChar newValue = new HiveChar(constValue, length);
HiveChar maxCharConst = new HiveChar(constValue, HiveChar.MAX_CHAR_LENGTH);
if (maxCharConst.equals(newValue)) {
return newValue;
} else {
return null;
}
}
if (targetType instanceof VarcharTypeInfo) {
final String constValue = constantValue.toString();
final int length = TypeInfoUtils.getCharacterLengthForType(targetType);
HiveVarchar newValue = new HiveVarchar(constValue, length);
HiveVarchar maxCharConst = new HiveVarchar(constValue, HiveVarchar.MAX_VARCHAR_LENGTH);
if (maxCharConst.equals(newValue)) {
return newValue;
} else {
return null;
}
}
}
return constantValue;
}
private BigDecimal toBigDecimal(String val) {
if (!NumberUtils.isNumber(val)) {
throw new NumberFormatException("The given string is not a valid number: " + val);
}
return new BigDecimal(val.replaceAll("[dDfFlL]$", ""));
}
/**
* {@inheritDoc}
*/
@Override
protected ExprNodeConstantDesc createStringConstantExpr(String value) {
return new ExprNodeConstantDesc(TypeInfoFactory.stringTypeInfo, value);
}
/**
* {@inheritDoc}
*/
@Override
protected ExprNodeConstantDesc createDateConstantExpr(String value) {
Date d = Date.valueOf(value);
return new ExprNodeConstantDesc(TypeInfoFactory.dateTypeInfo, d);
}
/**
* {@inheritDoc}
*/
@Override
protected ExprNodeConstantDesc createTimestampConstantExpr(String value) {
Timestamp t = Timestamp.valueOf(value);
return new ExprNodeConstantDesc(TypeInfoFactory.timestampTypeInfo, t);
}
/**
* {@inheritDoc}
*/
@Override
protected ExprNodeConstantDesc createTimestampLocalTimeZoneConstantExpr(String value, ZoneId zoneId) {
TimestampTZ t = TimestampTZUtil.parse(value);
return new ExprNodeConstantDesc(TypeInfoFactory.getTimestampTZTypeInfo(zoneId), t);
}
/**
* {@inheritDoc}
*/
@Override
protected ExprNodeConstantDesc createIntervalYearMonthConstantExpr(String value) {
return new ExprNodeConstantDesc(TypeInfoFactory.intervalYearMonthTypeInfo,
HiveIntervalYearMonth.valueOf(value));
}
/**
* {@inheritDoc}
*/
@Override
protected ExprNodeConstantDesc createIntervalDayTimeConstantExpr(String value) {
return new ExprNodeConstantDesc(TypeInfoFactory.intervalDayTimeTypeInfo,
HiveIntervalDayTime.valueOf(value));
}
/**
* {@inheritDoc}
*/
@Override
protected ExprNodeConstantDesc createIntervalYearConstantExpr(String value) {
return new ExprNodeConstantDesc(TypeInfoFactory.intervalYearMonthTypeInfo,
new HiveIntervalYearMonth(Integer.parseInt(value), 0));
}
/**
* {@inheritDoc}
*/
@Override
protected ExprNodeConstantDesc createIntervalMonthConstantExpr(String value) {
return new ExprNodeConstantDesc(TypeInfoFactory.intervalYearMonthTypeInfo,
new HiveIntervalYearMonth(0, Integer.parseInt(value)));
}
/**
* {@inheritDoc}
*/
@Override
protected ExprNodeConstantDesc createIntervalDayConstantExpr(String value) {
return new ExprNodeConstantDesc(TypeInfoFactory.intervalDayTimeTypeInfo,
new HiveIntervalDayTime(Integer.parseInt(value), 0, 0, 0, 0));
}
/**
* {@inheritDoc}
*/
@Override
protected ExprNodeConstantDesc createIntervalHourConstantExpr(String value) {
return new ExprNodeConstantDesc(TypeInfoFactory.intervalDayTimeTypeInfo,
new HiveIntervalDayTime(0, Integer.parseInt(value), 0, 0, 0));
}
/**
* {@inheritDoc}
*/
@Override
protected ExprNodeConstantDesc createIntervalMinuteConstantExpr(String value) {
return new ExprNodeConstantDesc(TypeInfoFactory.intervalDayTimeTypeInfo,
new HiveIntervalDayTime(0, 0, Integer.parseInt(value), 0, 0));
}
/**
* {@inheritDoc}
*/
@Override
protected ExprNodeConstantDesc createIntervalSecondConstantExpr(String value) {
BigDecimal bd = new BigDecimal(value);
BigDecimal bdSeconds = new BigDecimal(bd.toBigInteger());
BigDecimal bdNanos = bd.subtract(bdSeconds);
return new ExprNodeConstantDesc(TypeInfoFactory.intervalDayTimeTypeInfo,
new HiveIntervalDayTime(0, 0, 0, bdSeconds.intValueExact(),
bdNanos.multiply(NANOS_PER_SEC_BD).intValue()));
}
/**
* {@inheritDoc}
*/
@Override
protected ExprNodeDesc createStructExpr(TypeInfo typeInfo, List<ExprNodeDesc> operands)
throws SemanticException {
assert typeInfo instanceof StructTypeInfo;
if (isAllConstants(operands)) {
return createConstantExpr(typeInfo,
operands.stream()
.map(this::getConstantValue)
.collect(Collectors.toList()));
}
return ExprNodeGenericFuncDesc.newInstance(
new GenericUDFStruct(),
GenericUDFStruct.class.getAnnotation(Description.class).name(),
operands);
}
/**
* {@inheritDoc}
*/
@Override
protected ExprNodeConstantDesc createConstantExpr(TypeInfo typeInfo, Object constantValue) {
return new ExprNodeConstantDesc(typeInfo, constantValue);
}
/**
* {@inheritDoc}
*/
@Override
protected ExprNodeFieldDesc createNestedColumnRefExpr(
TypeInfo typeInfo, ExprNodeDesc expr, String fieldName, Boolean isList) {
return new ExprNodeFieldDesc(typeInfo, expr, fieldName, isList);
}
/**
* {@inheritDoc}
*/
@Override
protected ExprNodeGenericFuncDesc createFuncCallExpr(TypeInfo typeInfo, FunctionInfo fi,
String funcText, List<ExprNodeDesc> inputs) throws UDFArgumentException {
GenericUDF genericUDF = fi.getGenericUDF();
if (genericUDF instanceof SettableUDF) {
((SettableUDF) genericUDF).setTypeInfo(typeInfo);
}
return ExprNodeGenericFuncDesc.newInstance(genericUDF, funcText, inputs);
}
/**
* {@inheritDoc}
*/
@Override
protected ExprNodeColumnListDesc createExprsListExpr() {
return new ExprNodeColumnListDesc();
}
/**
* {@inheritDoc}
*/
@Override
protected void addExprToExprsList(ExprNodeDesc columnList, ExprNodeDesc expr) {
ExprNodeColumnListDesc l = (ExprNodeColumnListDesc) columnList;
l.addColumn(expr);
}
/**
* {@inheritDoc}
*/
@Override
protected boolean isConstantExpr(Object o) {
return o instanceof ExprNodeConstantDesc;
}
/**
* {@inheritDoc}
*/
@Override
protected boolean isFuncCallExpr(Object o) {
return o instanceof ExprNodeGenericFuncDesc;
}
/**
* {@inheritDoc}
*/
@Override
protected Object getConstantValue(ExprNodeDesc expr) {
return ((ExprNodeConstantDesc) expr).getValue();
}
/**
* {@inheritDoc}
*/
@Override
protected String getConstantValueAsString(ExprNodeDesc expr) {
return ((ExprNodeConstantDesc) expr).getValue().toString();
}
/**
* {@inheritDoc}
*/
@Override
protected boolean isColumnRefExpr(Object o) {
return o instanceof ExprNodeColumnDesc;
}
/**
* {@inheritDoc}
*/
@Override
protected String getColumnName(ExprNodeDesc expr, RowResolver rowResolver) {
return ((ExprNodeColumnDesc) expr).getColumn();
}
/**
* {@inheritDoc}
*/
@Override
protected boolean isExprsListExpr(Object o) {
return o instanceof ExprNodeColumnListDesc;
}
/**
* {@inheritDoc}
*/
@Override
protected List<ExprNodeDesc> getExprChildren(ExprNodeDesc expr) {
return expr.getChildren();
}
/**
* {@inheritDoc}
*/
@Override
protected TypeInfo getTypeInfo(ExprNodeDesc expr) {
return expr.getTypeInfo();
}
/**
* {@inheritDoc}
*/
@Override
protected List<TypeInfo> getStructTypeInfoList(ExprNodeDesc expr) {
StructTypeInfo structTypeInfo = (StructTypeInfo) expr.getTypeInfo();
return structTypeInfo.getAllStructFieldTypeInfos();
}
/**
* {@inheritDoc}
*/
@Override
protected List<String> getStructNameList(ExprNodeDesc expr) {
StructTypeInfo structTypeInfo = (StructTypeInfo) expr.getTypeInfo();
return structTypeInfo.getAllStructFieldNames();
}
/**
* {@inheritDoc}
*/
@Override
protected boolean isORFuncCallExpr(ExprNodeDesc expr) {
return FunctionRegistry.isOpOr(expr);
}
/**
* {@inheritDoc}
*/
@Override
protected boolean isANDFuncCallExpr(ExprNodeDesc expr) {
return FunctionRegistry.isOpAnd(expr);
}
/**
* {@inheritDoc}
*/
@Override
protected boolean isPOSITIVEFuncCallExpr(ExprNodeDesc expr) {
return FunctionRegistry.isOpPositive(expr);
}
/**
* {@inheritDoc}
*/
@Override
protected boolean isNEGATIVEFuncCallExpr(ExprNodeDesc expr) {
return FunctionRegistry.isOpNegative(expr);
}
/**
* {@inheritDoc}
*/
@Override
protected boolean isAndFunction(FunctionInfo fi) {
return fi.getGenericUDF() instanceof GenericUDFOPAnd;
}
/**
* {@inheritDoc}
*/
@Override
protected boolean isOrFunction(FunctionInfo fi) {
return fi.getGenericUDF() instanceof GenericUDFOPOr;
}
/**
* {@inheritDoc}
*/
@Override
protected boolean isInFunction(FunctionInfo fi) {
return fi.getGenericUDF() instanceof GenericUDFIn;
}
/**
* {@inheritDoc}
*/
@Override
protected boolean isCompareFunction(FunctionInfo fi) {
return fi.getGenericUDF() instanceof GenericUDFBaseCompare;
}
/**
* {@inheritDoc}
*/
@Override
protected boolean isEqualFunction(FunctionInfo fi) {
return fi.getGenericUDF() instanceof GenericUDFOPEqual
&& !(fi.getGenericUDF() instanceof GenericUDFOPEqualNS);
}
@Override
protected boolean isNSCompareFunction(FunctionInfo fi) {
return fi.getGenericUDF() instanceof GenericUDFOPEqualNS ||
fi.getGenericUDF() instanceof GenericUDFOPNotEqualNS;
}
/**
* {@inheritDoc}
*/
@Override
protected boolean isConsistentWithinQuery(FunctionInfo fi) {
return FunctionRegistry.isConsistentWithinQuery(fi.getGenericUDF());
}
/**
* {@inheritDoc}
*/
@Override
protected boolean isStateful(FunctionInfo fi) {
return FunctionRegistry.isStateful(fi.getGenericUDF());
}
/**
* {@inheritDoc}
*/
@Override
protected ExprNodeDesc setTypeInfo(ExprNodeDesc expr, TypeInfo type) {
expr.setTypeInfo(type);
return expr;
}
/**
* {@inheritDoc}
*/
@Override
protected boolean convertCASEIntoCOALESCEFuncCallExpr(FunctionInfo fi, List<ExprNodeDesc> inputs) {
GenericUDF genericUDF = fi.getGenericUDF();
if (genericUDF instanceof GenericUDFWhen && inputs.size() == 3 &&
inputs.get(1) instanceof ExprNodeConstantDesc &&
inputs.get(2) instanceof ExprNodeConstantDesc) {
ExprNodeConstantDesc constThen = (ExprNodeConstantDesc) inputs.get(1);
ExprNodeConstantDesc constElse = (ExprNodeConstantDesc) inputs.get(2);
Object thenVal = constThen.getValue();
Object elseVal = constElse.getValue();
if (thenVal instanceof Boolean && elseVal instanceof Boolean) {
//only convert to COALESCE when both branches are valid
return !thenVal.equals(elseVal);
}
}
return false;
}
@Override
protected boolean convertCASEIntoIFFuncCallExpr(FunctionInfo fi, List<ExprNodeDesc> inputs) {
GenericUDF genericUDF = fi.getGenericUDF();
return genericUDF instanceof GenericUDFWhen && inputs.size() == 3
&& TypeInfoFactory.booleanTypeInfo.equals(inputs.get(0).getTypeInfo());
}
/**
* {@inheritDoc}
*/
@Override
protected ExprNodeDesc foldExpr(ExprNodeDesc expr) {
if (expr instanceof ExprNodeGenericFuncDesc) {
return ConstantPropagateProcFactory.foldExpr((ExprNodeGenericFuncDesc) expr);
}
return expr;
}
/**
* {@inheritDoc}
*/
@Override
protected boolean isSTRUCTFuncCallExpr(ExprNodeDesc expr) {
return ExprNodeDescUtils.isStructUDF(expr);
}
/**
* {@inheritDoc}
*/
@Override
protected boolean isConstantStruct(ExprNodeDesc expr) {
return ExprNodeDescUtils.isConstantStruct(expr);
}
/**
* {@inheritDoc}
*/
@Override
protected ExprNodeDesc createSubqueryExpr(TypeCheckCtx ctx, ASTNode expr, SubqueryType subqueryType,
Object[] inputs) throws CalciteSubquerySemanticException {
// subqueryToRelNode might be null if subquery expression anywhere other than
// as expected in filter (where/having). We should throw an appropriate error
// message
Map<ASTNode, QBSubQueryParseInfo> subqueryToRelNode = ctx.getSubqueryToRelNode();
if (subqueryToRelNode == null) {
throw new CalciteSubquerySemanticException(ErrorMsg.UNSUPPORTED_SUBQUERY_EXPRESSION.getMsg(
" Currently SubQuery expressions are only allowed as " +
"Where and Having Clause predicates"));
}
ASTNode subqueryOp = (ASTNode) expr.getChild(0);
RelNode subqueryRel = subqueryToRelNode.get(expr).getSubQueryRelNode();
// For now because subquery is only supported in filter
// we will create subquery expression of boolean type
switch (subqueryType) {
case EXISTS: {
if (subqueryToRelNode.get(expr).hasFullAggregate()) {
return createConstantExpr(TypeInfoFactory.booleanTypeInfo, true);
}
return new ExprNodeSubQueryDesc(TypeInfoFactory.booleanTypeInfo, subqueryRel,
SubqueryType.EXISTS);
}
case IN: {
assert (inputs[2] != null);
ExprNodeDesc lhs = (ExprNodeDesc) inputs[2];
return new ExprNodeSubQueryDesc(TypeInfoFactory.booleanTypeInfo, subqueryRel,
SubqueryType.IN, lhs);
}
case SCALAR: {
// only single subquery expr is supported
if (subqueryRel.getRowType().getFieldCount() != 1) {
throw new CalciteSubquerySemanticException(ErrorMsg.INVALID_SUBQUERY_EXPRESSION.getMsg(
"More than one column expression in subquery"));
}
// figure out subquery expression column's type
TypeInfo subExprType = TypeConverter.convert(subqueryRel.getRowType().getFieldList().get(0).getType());
return new ExprNodeSubQueryDesc(subExprType, subqueryRel,
SubqueryType.SCALAR);
}
case SOME: {
assert (inputs[2] != null);
ExprNodeDesc lhs = (ExprNodeDesc) inputs[2];
return new ExprNodeSubQueryDesc(TypeInfoFactory.booleanTypeInfo, subqueryRel,
SubqueryType.SOME, lhs, (ASTNode) subqueryOp.getChild(1));
}
case ALL: {
assert (inputs[2] != null);
ExprNodeDesc lhs = (ExprNodeDesc) inputs[2];
return new ExprNodeSubQueryDesc(TypeInfoFactory.booleanTypeInfo, subqueryRel,
SubqueryType.ALL, lhs, (ASTNode) subqueryOp.getChild(1));
}
default:
return null;
}
}
/**
* {@inheritDoc}
*/
@Override
protected FunctionInfo getFunctionInfo(String funcName) throws SemanticException {
return FunctionRegistry.getFunctionInfo(funcName);
}
@Override
protected ExprNodeDesc replaceFieldNamesInStruct(ExprNodeDesc expr, List<String> newFieldNames) {
if (newFieldNames.isEmpty()) {
return expr;
}
ExprNodeGenericFuncDesc structCall = (ExprNodeGenericFuncDesc) expr;
List<TypeInfo> newTypes = structCall.getChildren().stream().map(ExprNodeDesc::getTypeInfo).collect(Collectors.toList());
TypeInfo newType = TypeInfoFactory.getStructTypeInfo(newFieldNames, newTypes);
return new ExprNodeGenericFuncDesc(newType, structCall.getGenericUDF(), structCall.getChildren());
}
}
| |
package org.broadinstitute.hellbender.tools.copynumber.formats.collections;
import htsjdk.samtools.SAMSequenceDictionary;
import htsjdk.samtools.SAMSequenceRecord;
import htsjdk.samtools.util.Locatable;
import org.apache.commons.io.FileUtils;
import org.broadinstitute.hellbender.GATKBaseTest;
import org.broadinstitute.hellbender.exceptions.UserException;
import org.broadinstitute.hellbender.tools.copynumber.formats.CopyNumberFormatsUtils;
import org.broadinstitute.hellbender.tools.copynumber.formats.metadata.SampleLocatableMetadata;
import org.broadinstitute.hellbender.tools.copynumber.formats.metadata.SimpleSampleLocatableMetadata;
import org.broadinstitute.hellbender.utils.SimpleInterval;
import org.broadinstitute.hellbender.utils.Utils;
import org.broadinstitute.hellbender.utils.tsv.DataLine;
import org.broadinstitute.hellbender.utils.tsv.TableColumnCollection;
import org.testng.Assert;
import org.testng.annotations.Test;
import java.io.File;
import java.io.IOException;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import java.util.function.BiConsumer;
import java.util.function.Function;
/**
* Unit tests for {@link AbstractSampleLocatableCollection}.
*
* @author Samuel Lee <slee@broadinstitute.org>
*/
public final class AbstractSampleLocatableCollectionUnitTest extends GATKBaseTest {
private static final File TEST_SUB_DIR = new File(toolsTestDir, "copynumber/formats/collections");
private static final File SIMPLE_LOCATABLE_COLLECTION_FILE =
new File(TEST_SUB_DIR, "locatable-collection-tsv-simple-locatable-collection.tsv");
private static final File SIMPLE_LOCATABLE_COLLECTION_NON_DICTIONARY_ORDER_FILE =
new File(TEST_SUB_DIR, "locatable-collection-tsv-simple-locatable-collection-non-dictionary-order.tsv");
private static final File SIMPLE_LOCATABLE_COLLECTION_MISSING_COLUMN_FILE =
new File(TEST_SUB_DIR, "locatable-collection-tsv-simple-locatable-collection-missing-column.tsv");
private static final SampleLocatableMetadata METADATA_EXPECTED = new SimpleSampleLocatableMetadata(
"test-sample",
new SAMSequenceDictionary(Arrays.asList(
new SAMSequenceRecord("1", 20000),
new SAMSequenceRecord("2", 20000),
new SAMSequenceRecord("10", 20000))));
private static final SimpleSampleLocatableCollection SIMPLE_LOCATABLE_COLLECTION_EXPECTED = new SimpleSampleLocatableCollection(
METADATA_EXPECTED,
Arrays.asList(
new SimpleLocatable(new SimpleInterval("1", 1, 1), 1.),
new SimpleLocatable(new SimpleInterval("1", 2, 2), 2.),
new SimpleLocatable(new SimpleInterval("2", 1, 1), 3.),
new SimpleLocatable(new SimpleInterval("10", 1, 1), Double.NaN)));
//simple example of a record class
private static final class SimpleLocatable implements Locatable {
private final SimpleInterval interval;
private final double value;
private SimpleLocatable(final SimpleInterval interval, final double value) {
this.interval = Utils.nonNull(interval);
this.value = value;
}
@Override
public String getContig() {
return interval.getContig();
}
@Override
public int getStart() {
return interval.getStart();
}
@Override
public int getEnd() {
return interval.getEnd();
}
public SimpleInterval getInterval() {
return interval;
}
public double getValue() {
return value;
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
final SimpleLocatable that = (SimpleLocatable) o;
return Double.compare(that.value, value) == 0 && interval.equals(that.interval);
}
@Override
public int hashCode() {
int result;
long temp;
result = interval.hashCode();
temp = Double.doubleToLongBits(value);
result = 31 * result + (int) (temp ^ (temp >>> 32));
return result;
}
@Override
public String toString() {
return "SimpleLocatable{" +
"interval=" + interval +
", value=" + value +
'}';
}
}
//simple example of a collection class
private static final class SimpleSampleLocatableCollection extends AbstractSampleLocatableCollection<SimpleLocatable> {
enum SimpleLocatableTableColumn {
CONTIG,
START,
END,
VALUE;
static final TableColumnCollection COLUMNS = new TableColumnCollection((Object[]) values());
}
private static final Function<DataLine, SimpleLocatable> SIMPLE_LOCATABLE_RECORD_FROM_DATA_LINE_DECODER = dataLine -> {
final String contig = dataLine.get(SimpleLocatableTableColumn.CONTIG);
final int start = dataLine.getInt(SimpleLocatableTableColumn.START);
final int end = dataLine.getInt(SimpleLocatableTableColumn.END);
final double value = dataLine.getDouble(SimpleLocatableTableColumn.VALUE);
final SimpleInterval interval = new SimpleInterval(contig, start, end);
return new SimpleLocatable(interval, value);
};
private static final BiConsumer<SimpleLocatable, DataLine> SIMPLE_LOCATABLE_RECORD_TO_DATA_LINE_ENCODER = (simpleLocatable, dataLine) ->
dataLine.append(simpleLocatable.getInterval().getContig())
.append(simpleLocatable.getInterval().getStart())
.append(simpleLocatable.getInterval().getEnd())
.append(formatDouble(simpleLocatable.getValue()));
private SimpleSampleLocatableCollection(final File inputFile) {
super(inputFile, SimpleLocatableTableColumn.COLUMNS, SIMPLE_LOCATABLE_RECORD_FROM_DATA_LINE_DECODER, SIMPLE_LOCATABLE_RECORD_TO_DATA_LINE_ENCODER);
}
private SimpleSampleLocatableCollection(final SampleLocatableMetadata metadata,
final List<SimpleLocatable> simpleLocatables) {
super(metadata, simpleLocatables, SimpleLocatableTableColumn.COLUMNS, SIMPLE_LOCATABLE_RECORD_FROM_DATA_LINE_DECODER, SIMPLE_LOCATABLE_RECORD_TO_DATA_LINE_ENCODER);
}
}
@Test
public void testRead() {
final SimpleSampleLocatableCollection simpleLocatableCollection = new SimpleSampleLocatableCollection(SIMPLE_LOCATABLE_COLLECTION_FILE);
assertSimpleLocatableCollectionEqualsExpected(simpleLocatableCollection);
}
@Test(expectedExceptions = IllegalArgumentException.class)
public void testReadIntervalsNotInDictionaryOrder() {
new SimpleSampleLocatableCollection(SIMPLE_LOCATABLE_COLLECTION_NON_DICTIONARY_ORDER_FILE);
}
@Test(expectedExceptions = UserException.BadInput.class)
public void testReadMissingColumn() {
new SimpleSampleLocatableCollection(SIMPLE_LOCATABLE_COLLECTION_MISSING_COLUMN_FILE);
}
/**
* Note that this will fail if {@link CopyNumberFormatsUtils#DOUBLE_FORMAT} is changed.
*/
@Test
public void testWrite() throws IOException {
final File tempFile = createTempFile("test", ".tsv");
SIMPLE_LOCATABLE_COLLECTION_EXPECTED.write(tempFile);
SIMPLE_LOCATABLE_COLLECTION_EXPECTED.write(tempFile); //test that file is overwritten
Assert.assertTrue(FileUtils.contentEquals(tempFile, SIMPLE_LOCATABLE_COLLECTION_FILE));
}
@Test
public void testConstructorFromListDictionarySortingOfIntervals() {
final SimpleSampleLocatableCollection simpleLocatableCollectionExpectedUnsortedListArgument = new SimpleSampleLocatableCollection(
METADATA_EXPECTED,
Arrays.asList(
new SimpleLocatable(new SimpleInterval("1", 1, 1), 1.),
new SimpleLocatable(new SimpleInterval("1", 2, 2), 2.),
new SimpleLocatable(new SimpleInterval("10", 1, 1), Double.NaN),
new SimpleLocatable(new SimpleInterval("2", 1, 1), 3.)));
assertSimpleLocatableCollectionEqualsExpected(simpleLocatableCollectionExpectedUnsortedListArgument);
}
@Test(expectedExceptions = IllegalArgumentException.class)
public void testIntervalsWithDuplicates() {
final List<SimpleLocatable> intervalsWithDuplicates = Arrays.asList(
new SimpleLocatable(new SimpleInterval("1", 1, 1), 1.),
new SimpleLocatable(new SimpleInterval("1", 1, 1), 1.),
new SimpleLocatable(new SimpleInterval("2", 1, 1), 1.));
new SimpleSampleLocatableCollection(METADATA_EXPECTED, intervalsWithDuplicates);
}
@Test(expectedExceptions = IllegalArgumentException.class)
public void testIntervalsWithOverlaps() {
final List<SimpleLocatable> intervalsWithOverlaps = Arrays.asList(
new SimpleLocatable(new SimpleInterval("1", 1, 100), 1.),
new SimpleLocatable(new SimpleInterval("1", 100, 200), 1.),
new SimpleLocatable(new SimpleInterval("2", 1, 1), 1.));
new SimpleSampleLocatableCollection(METADATA_EXPECTED, intervalsWithOverlaps);
}
@Test(expectedExceptions = IllegalArgumentException.class)
public void testIntervalOutsideSequenceDictionary() {
final List<SimpleLocatable> intervalOutsideSequenceDictionary = Collections.singletonList(
new SimpleLocatable(new SimpleInterval("X", 1, 100), 1.));
new SimpleSampleLocatableCollection(METADATA_EXPECTED, intervalOutsideSequenceDictionary);
}
private static void assertSimpleLocatableCollectionEqualsExpected(final SimpleSampleLocatableCollection simpleLocatableCollection) {
Assert.assertEquals(simpleLocatableCollection, SIMPLE_LOCATABLE_COLLECTION_EXPECTED);
Assert.assertEquals(simpleLocatableCollection.getMetadata(), SIMPLE_LOCATABLE_COLLECTION_EXPECTED.getMetadata());
Assert.assertEquals(simpleLocatableCollection.getRecords(), SIMPLE_LOCATABLE_COLLECTION_EXPECTED.getRecords());
}
}
| |
package com.special.ResideMenuDemo;
import android.os.AsyncTask;
import android.os.Bundle;
import android.support.v4.app.Fragment;
import android.util.DisplayMetrics;
import android.util.Log;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.ArrayAdapter;
import android.widget.ExpandableListView;
import android.widget.ListView;
import android.widget.Toast;
import org.apache.http.NameValuePair;
import org.json.JSONArray;
import org.json.JSONObject;
import java.lang.reflect.Array;
import java.util.ArrayList;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import uk.co.senab.actionbarpulltorefresh.library.ActionBarPullToRefresh;
import uk.co.senab.actionbarpulltorefresh.library.PullToRefreshLayout;
import uk.co.senab.actionbarpulltorefresh.library.listeners.OnRefreshListener;
/**
* Mail: hsdars@gmail.com
*/
public class QandAFragment extends Fragment implements OnRefreshListener {
private PullToRefreshLayout mPullToRefreshLayout;
List<String> groupList;
List<String> childList;
Map<String, List<String>> laptopCollection;
ExpandableListView expListView;
HospitalListAdapter expListAdapter;
ArrayList<String[]> arrayList;
JSONArray jArray=new JSONArray();
ArrayAdapter<String> arrayAdapter;
String LOGIN_URL = "https://api.humanapi.co/v1/human/medical/profile?access_token=demo";
ListView lv;
int offset=30;
View view;
List<String> lvArray = new ArrayList<String>();
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup container,
Bundle savedInstanceState) {
// Inflate the layout
view = inflater.inflate(R.layout.qanda, container, false);
Toast.makeText(getActivity(), "Pull down to refresh the list", Toast.LENGTH_LONG).show();
// Now give the find the PullToRefreshLayout and set it up
mPullToRefreshLayout = (PullToRefreshLayout) view.findViewById(R.id.ptr_layout);
ActionBarPullToRefresh.from(getActivity())
.allChildrenArePullable()
.listener(this)
.setup(mPullToRefreshLayout);
new GetList().execute();
//createGroupList();
//createCollection();
return view;
}
private void createGroupList() {
}
private void createCollection() {
// preparing laptops collection(child)
laptopCollection = new LinkedHashMap<String, List<String>>();
for (int i=0;i<groupList.size();i++) {
loadChild(arrayList.get(i));
//load child correesponding to its index
laptopCollection.put(groupList.get(i), childList);
}
}
private void loadChild(String[] laptopModels) {
childList = new ArrayList<String>();
for (String model : laptopModels)
childList.add(model);
}
private void setGroupIndicatorToRight() {
/* Get the screen width */
DisplayMetrics dm = new DisplayMetrics();
getActivity().getWindowManager().getDefaultDisplay().getMetrics(dm);
int width = dm.widthPixels;
expListView.setIndicatorBounds(width - getDipsFromPixel(35), width
- getDipsFromPixel(5));
}
// Convert pixel to dip
public int getDipsFromPixel(float pixels) {
// Get the screen's density scale
final float scale = getResources().getDisplayMetrics().density;
// Convert the dps to pixels, based on density scale
return (int) (pixels * scale + 0.5f);
}
@Override
public void onRefreshStarted(View view) {
Toast.makeText(getActivity(), "Refreshing", Toast.LENGTH_SHORT).show();
new GetList().execute();
mPullToRefreshLayout.setRefreshComplete();
}
class GetList extends AsyncTask<String, String, String> {
boolean failure = false;
@Override
protected void onPreExecute() {
super.onPreExecute();
}
@Override
protected String doInBackground(String... args) {
try {
List<NameValuePair> params = new ArrayList<NameValuePair>();
JSONObject j=(JSONObject)new JSONArrayParser().getJsonObject(LOGIN_URL);
groupList = new ArrayList<String>();
arrayList=new ArrayList<String[]>();
//Log.d("asd", j + "");
JSONObject demographics=((JSONObject)j.get("demographics"));
String name=((JSONArray)((JSONObject) demographics.get("name")).get("given")).get(0).toString();
groupList.add(name);
String str[]=new String[10];
String address=(((JSONObject) demographics.get("address")).get("city")).toString();
//str[0]="Address : "+(((JSONObject) demographics.get("address")).get("city")).toString();
str[0]="Family Name : "+(((JSONObject) demographics.get("name")).get("family")).toString();
str[1]="Address : "+address;
str[2]="Ethnicity : "+demographics.get("ethnicity").toString();
str[3]="Gender: "+demographics.get("gender").toString();
str[4]="Language : "+demographics.get("language").toString();
str[5]="Race : "+demographics.get("race").toString();
str[6]="Date of Birth : "+demographics.get("dob").toString();
str[7]="Alcohol usage : "+((JSONObject)j.get("alcohol")).get("use").toString();
str[8]="Smoking : "+((JSONObject)j.get("smoking")).get("status").toString();
str[9]="Profile Created : "+j.get("createdAt").toString();
arrayList.add(0,str);
getActivity().runOnUiThread(new Runnable() {
@Override
public void run() {
createCollection();
expListView = (ExpandableListView) view.findViewById(R.id.laptop_list);
expListAdapter = new HospitalListAdapter(getActivity(), groupList, laptopCollection);
expListView.setAdapter(expListAdapter);
//setGroupIndicatorToRight();
expListView.setOnChildClickListener(new ExpandableListView.OnChildClickListener() {
public boolean onChildClick(ExpandableListView parent, View v,
int groupPosition, int childPosition, long id) {
String selected = (String) expListAdapter.getChild(
groupPosition, childPosition);
// Toast.makeText(getActivity().getBaseContext(), selected, Toast.LENGTH_LONG.show();
return true;
}
});
}
});
} catch (Exception e) {
e.printStackTrace();
}
return null;
}
protected void onPostExecute(String file_url) {
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.felix.ipojo.test.scenarios.service.dependency;
import java.util.Properties;
import org.apache.felix.ipojo.ComponentInstance;
import org.apache.felix.ipojo.architecture.Architecture;
import org.apache.felix.ipojo.architecture.InstanceDescription;
import org.apache.felix.ipojo.junit4osgi.OSGiTestCase;
import org.apache.felix.ipojo.test.scenarios.service.dependency.service.CheckService;
import org.apache.felix.ipojo.test.scenarios.util.Utils;
import org.osgi.framework.ServiceReference;
public class ProxiedCollectionMultipleDependencies extends OSGiTestCase {
ComponentInstance instance1, instance2;
ComponentInstance fooProvider1, fooProvider2;
public void setUp() {
try {
Properties prov = new Properties();
prov.put("instance.name","FooProvider1");
fooProvider1 = Utils.getFactoryByName(getContext(), "FooProviderType-1").createComponentInstance(prov);
fooProvider1.stop();
Properties prov2 = new Properties();
prov2.put("instance.name","FooProvider2");
fooProvider2 = Utils.getFactoryByName(getContext(), "FooProviderType-1").createComponentInstance(prov2);
fooProvider2.stop();
Properties i1 = new Properties();
i1.put("instance.name","Simple");
instance1 = Utils.getFactoryByName(getContext(), "ProxiedSimpleCollectionCheckServiceProvider").createComponentInstance(i1);
Properties i2 = new Properties();
i2.put("instance.name","Optional");
instance2 = Utils.getFactoryByName(getContext(), "ProxiedOptionalCollectionCheckServiceProvider").createComponentInstance(i2);
} catch(Exception e) { fail(e.getMessage()); }
}
public void tearDown() {
instance1.dispose();
instance2.dispose();
fooProvider1.dispose();
fooProvider2.dispose();
instance1 = null;
instance2 = null;
fooProvider1 = null;
fooProvider2 = null;
}
public void testSimple() {
ServiceReference arch_ref = Utils.getServiceReferenceByName(getContext(), Architecture.class.getName(), instance1.getInstanceName());
assertNotNull("Check architecture availability", arch_ref);
InstanceDescription id = ((Architecture) getContext().getService(arch_ref)).getInstanceDescription();
assertTrue("Check instance invalidity - 1", id.getState() == ComponentInstance.INVALID);
fooProvider1.start();
id = ((Architecture) getContext().getService(arch_ref)).getInstanceDescription();
assertTrue("Check instance validity - 2", id.getState() == ComponentInstance.VALID);
ServiceReference cs_ref = Utils.getServiceReferenceByName(getContext(), CheckService.class.getName(), instance1.getInstanceName());
assertNotNull("Check CheckService availability", cs_ref);
CheckService cs = (CheckService) getContext().getService(cs_ref);
Properties props = cs.getProps();
//Check properties
assertTrue("check CheckService invocation - 1", ((Boolean)props.get("result")).booleanValue()); // True, a provider is here
assertEquals("check void bind invocation - 1", ((Integer)props.get("voidB")).intValue(), 0);
assertEquals("check void unbind callback invocation - 1", ((Integer)props.get("voidU")).intValue(), 0);
assertEquals("check object bind callback invocation - 1", ((Integer)props.get("objectB")).intValue(), 0);
assertEquals("check object unbind callback invocation - 1", ((Integer)props.get("objectU")).intValue(), 0);
assertEquals("check ref bind callback invocation - 1", ((Integer)props.get("refB")).intValue(), 0);
assertEquals("check ref unbind callback invocation - 1", ((Integer)props.get("refU")).intValue(), 0);
assertEquals("Check FS invocation (int) - 1", ((Integer)props.get("int")).intValue(), 1);
assertEquals("Check FS invocation (long) - 1", ((Long)props.get("long")).longValue(), 1);
assertEquals("Check FS invocation (double) - 1", ((Double)props.get("double")).doubleValue(), 1.0);
fooProvider2.start();
id = ((Architecture) getContext().getService(arch_ref)).getInstanceDescription();
assertTrue("Check instance validity - 3", id.getState() == ComponentInstance.VALID);
cs = (CheckService) getContext().getService(cs_ref);
props = cs.getProps();
//Check properties
assertTrue("check CheckService invocation - 2", ((Boolean)props.get("result")).booleanValue()); // True, two providers are here
assertEquals("check void bind invocation - 2", ((Integer)props.get("voidB")).intValue(), 0);
assertEquals("check void unbind callback invocation - 2", ((Integer)props.get("voidU")).intValue(), 0);
assertEquals("check object bind callback invocation - 2", ((Integer)props.get("objectB")).intValue(), 0);
assertEquals("check object unbind callback invocation - 2", ((Integer)props.get("objectU")).intValue(), 0);
assertEquals("check ref bind callback invocation - 2", ((Integer)props.get("refB")).intValue(), 0);
assertEquals("check ref unbind callback invocation - 2", ((Integer)props.get("refU")).intValue(), 0);
assertEquals("Check FS invocation (int) - 2", ((Integer)props.get("int")).intValue(), 2);
assertEquals("Check FS invocation (long) - 2", ((Long)props.get("long")).longValue(), 2);
assertEquals("Check FS invocation (double) - 2", ((Double)props.get("double")).doubleValue(), 2.0);
fooProvider1.stop();
id = ((Architecture) getContext().getService(arch_ref)).getInstanceDescription();
assertTrue("Check instance validity - 4", id.getState() == ComponentInstance.VALID);
cs = (CheckService) getContext().getService(cs_ref);
props = cs.getProps();
//Check properties
assertTrue("check CheckService invocation - 3", ((Boolean)props.get("result")).booleanValue()); // True, two providers are here
assertEquals("check void bind invocation - 3", ((Integer)props.get("voidB")).intValue(), 0);
assertEquals("check void unbind callback invocation - 3", ((Integer)props.get("voidU")).intValue(), 0);
assertEquals("check object bind callback invocation - 3", ((Integer)props.get("objectB")).intValue(), 0);
assertEquals("check object unbind callback invocation - 3", ((Integer)props.get("objectU")).intValue(), 0);
assertEquals("check ref bind callback invocation - 3", ((Integer)props.get("refB")).intValue(), 0);
assertEquals("check ref unbind callback invocation - 3", ((Integer)props.get("refU")).intValue(), 0);
assertEquals("Check FS invocation (int) - 3", ((Integer)props.get("int")).intValue(), 1);
assertEquals("Check FS invocation (long) - 3", ((Long)props.get("long")).longValue(), 1);
assertEquals("Check FS invocation (double) - 3", ((Double)props.get("double")).doubleValue(), 1.0);
fooProvider2.stop();
id = ((Architecture) getContext().getService(arch_ref)).getInstanceDescription();
assertTrue("Check instance validity - 5", id.getState() == ComponentInstance.INVALID);
id = null;
cs = null;
getContext().ungetService(arch_ref);
getContext().ungetService(cs_ref);
}
public void testOptional() {
ServiceReference arch_ref = Utils.getServiceReferenceByName(getContext(), Architecture.class.getName(), instance2.getInstanceName());
assertNotNull("Check architecture availability", arch_ref);
InstanceDescription id = ((Architecture) getContext().getService(arch_ref)).getInstanceDescription();
assertTrue("Check instance validity - 1", id.getState() == ComponentInstance.VALID);
ServiceReference cs_ref = Utils.getServiceReferenceByName(getContext(), CheckService.class.getName(), instance2.getInstanceName());
assertNotNull("Check CheckService availability", cs_ref);
CheckService cs = (CheckService) getContext().getService(cs_ref);
Properties props = cs.getProps();
//Check properties
assertFalse("check CheckService invocation - 0", ((Boolean)props.get("result")).booleanValue()); // False : no provider
assertEquals("check void bind invocation - 0", ((Integer)props.get("voidB")).intValue(), 0);
assertEquals("check void unbind callback invocation - 0", ((Integer)props.get("voidU")).intValue(), 0);
assertEquals("check object bind callback invocation - 0", ((Integer)props.get("objectB")).intValue(), 0);
assertEquals("check object unbind callback invocation - 0", ((Integer)props.get("objectU")).intValue(), 0);
assertEquals("check ref bind callback invocation - 0", ((Integer)props.get("refB")).intValue(), 0);
assertEquals("check ref unbind callback invocation - 0", ((Integer)props.get("refU")).intValue(), 0);
assertEquals("Check FS invocation (int) - 0", ((Integer)props.get("int")).intValue(), 0);
assertEquals("Check FS invocation (long) - 0", ((Long)props.get("long")).longValue(), 0);
assertEquals("Check FS invocation (double) - 0", ((Double)props.get("double")).doubleValue(), 0.0);
fooProvider1.start();
id = ((Architecture) getContext().getService(arch_ref)).getInstanceDescription();
assertTrue("Check instance validity - 2", id.getState() == ComponentInstance.VALID);
cs = (CheckService) getContext().getService(cs_ref);
props = cs.getProps();
//Check properties
assertTrue("check CheckService invocation - 1", ((Boolean)props.get("result")).booleanValue()); // True, a provider is here
assertEquals("check void bind invocation - 1", ((Integer)props.get("voidB")).intValue(), 0);
assertEquals("check void unbind callback invocation - 1", ((Integer)props.get("voidU")).intValue(), 0);
assertEquals("check object bind callback invocation - 1", ((Integer)props.get("objectB")).intValue(), 0);
assertEquals("check object unbind callback invocation - 1", ((Integer)props.get("objectU")).intValue(), 0);
assertEquals("check ref bind callback invocation - 1", ((Integer)props.get("refB")).intValue(), 0);
assertEquals("check ref unbind callback invocation - 1", ((Integer)props.get("refU")).intValue(), 0);
assertEquals("Check FS invocation (int) - 1", ((Integer)props.get("int")).intValue(), 1);
assertEquals("Check FS invocation (long) - 1", ((Long)props.get("long")).longValue(), 1);
assertEquals("Check FS invocation (double) - 1", ((Double)props.get("double")).doubleValue(), 1.0);
fooProvider2.start();
id = ((Architecture) getContext().getService(arch_ref)).getInstanceDescription();
assertTrue("Check instance validity - 3", id.getState() == ComponentInstance.VALID);
cs = (CheckService) getContext().getService(cs_ref);
props = cs.getProps();
//Check properties
assertTrue("check CheckService invocation - 2", ((Boolean)props.get("result")).booleanValue()); // True, two providers are here
assertEquals("check void bind invocation - 2", ((Integer)props.get("voidB")).intValue(), 0);
assertEquals("check void unbind callback invocation - 2", ((Integer)props.get("voidU")).intValue(), 0);
assertEquals("check object bind callback invocation - 2", ((Integer)props.get("objectB")).intValue(), 0);
assertEquals("check object unbind callback invocation - 2", ((Integer)props.get("objectU")).intValue(), 0);
assertEquals("check ref bind callback invocation - 2", ((Integer)props.get("refB")).intValue(), 0);
assertEquals("check ref unbind callback invocation - 2", ((Integer)props.get("refU")).intValue(), 0);
assertEquals("Check FS invocation (int) - 2", ((Integer)props.get("int")).intValue(), 2);
assertEquals("Check FS invocation (long) - 2", ((Long)props.get("long")).longValue(), 2);
assertEquals("Check FS invocation (double) - 2", ((Double)props.get("double")).doubleValue(), 2.0);
fooProvider1.stop();
id = ((Architecture) getContext().getService(arch_ref)).getInstanceDescription();
assertTrue("Check instance validity - 4", id.getState() == ComponentInstance.VALID);
cs = (CheckService) getContext().getService(cs_ref);
props = cs.getProps();
//Check properties
assertTrue("check CheckService invocation - 3", ((Boolean)props.get("result")).booleanValue()); // True, it still one provider.
assertEquals("check void bind invocation - 3", ((Integer)props.get("voidB")).intValue(), 0);
assertEquals("check void unbind callback invocation - 3", ((Integer)props.get("voidU")).intValue(), 0);
assertEquals("check object bind callback invocation - 3", ((Integer)props.get("objectB")).intValue(), 0);
assertEquals("check object unbind callback invocation - 3", ((Integer)props.get("objectU")).intValue(), 0);
assertEquals("check ref bind callback invocation - 3", ((Integer)props.get("refB")).intValue(), 0);
assertEquals("check ref unbind callback invocation - 3", ((Integer)props.get("refU")).intValue(), 0);
assertEquals("Check FS invocation (int) - 3", ((Integer)props.get("int")).intValue(), 1);
assertEquals("Check FS invocation (long) - 3", ((Long)props.get("long")).longValue(), 1);
assertEquals("Check FS invocation (double) - 3", ((Double)props.get("double")).doubleValue(), 1.0);
fooProvider2.stop();
id = ((Architecture) getContext().getService(arch_ref)).getInstanceDescription();
assertTrue("Check instance validity - 5", id.getState() == ComponentInstance.VALID);
cs = (CheckService) getContext().getService(cs_ref);
props = cs.getProps();
//Check properties
assertFalse("check CheckService invocation - 4", ((Boolean)props.get("result")).booleanValue()); // False, no more provider.
assertEquals("check void bind invocation - 4", ((Integer)props.get("voidB")).intValue(), 0);
assertEquals("check void unbind callback invocation - 4", ((Integer)props.get("voidU")).intValue(), 0);
assertEquals("check object bind callback invocation - 4", ((Integer)props.get("objectB")).intValue(), 0);
assertEquals("check object unbind callback invocation - 4", ((Integer)props.get("objectU")).intValue(), 0);
assertEquals("check ref bind callback invocation - 4", ((Integer)props.get("refB")).intValue(), 0);
assertEquals("check ref unbind callback invocation - 4", ((Integer)props.get("refU")).intValue(), 0);
assertEquals("Check FS invocation (int) - 4", ((Integer)props.get("int")).intValue(), 0);
assertEquals("Check FS invocation (long) - 4", ((Long)props.get("long")).longValue(), 0);
assertEquals("Check FS invocation (double) - 4", ((Double)props.get("double")).doubleValue(), 0.0);
id = null;
cs = null;
getContext().ungetService(arch_ref);
getContext().ungetService(cs_ref);
}
}
| |
/**
* Copyright 2009-2012 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.javacrumbs.jsonunit.core.internal;
import net.javacrumbs.jsonunit.core.Configuration;
import net.javacrumbs.jsonunit.core.Option;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.lang.String;
import java.math.BigDecimal;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.SortedSet;
import java.util.TreeSet;
import static java.util.Collections.emptySet;
import static net.javacrumbs.jsonunit.core.Option.COMPARING_ONLY_STRUCTURE;
import static net.javacrumbs.jsonunit.core.Option.IGNORING_EXTRA_FIELDS;
import static net.javacrumbs.jsonunit.core.Option.IGNORING_VALUES;
import static net.javacrumbs.jsonunit.core.internal.JsonUtils.convertToJson;
import static net.javacrumbs.jsonunit.core.internal.JsonUtils.getNode;
import static net.javacrumbs.jsonunit.core.internal.JsonUtils.quoteIfNeeded;
import static net.javacrumbs.jsonunit.core.internal.Node.KeyValue;
import static net.javacrumbs.jsonunit.core.internal.Node.NodeType;
/**
* Compares JSON structures. Mainly for internal use, the API might be more volatile than the rest.
*
* @author Lukas Krecan
*/
public class Diff {
private static final String REGEX_PLACEHOLDER = "${json-unit.regex}";
private final Node expectedRoot;
private final Node actualRoot;
private final Differences differences = new Differences();
private final String startPath;
private boolean compared = false;
private final Configuration configuration;
private static final Logger diffLogger = LoggerFactory.getLogger("net.javacrumbs.jsonunit.difference.diff");
private static final Logger valuesLogger = LoggerFactory.getLogger("net.javacrumbs.jsonunit.difference.values");
private Diff(Node expected, Node actual, String startPath, Configuration configuration) {
this.expectedRoot = expected;
this.actualRoot = actual;
this.startPath = startPath;
this.configuration = configuration;
}
public static Diff create(Object expected, Object actual, String actualName, String startPath, Configuration configuration) {
return new Diff(convertToJson(quoteIfNeeded(expected), "expected"), convertToJson(actual, actualName), startPath, configuration);
}
private void compare() {
if (!compared) {
Node part = getNode(actualRoot, startPath);
if (part.isMissingNode()) {
structureDifferenceFound("Missing node in path \"%s\".", startPath);
} else {
compareNodes(expectedRoot, part, startPath);
}
compared = true;
}
}
/**
* Compares object nodes.
*
* @param expected
* @param actual
* @param path
*/
private void compareObjectNodes(Node expected, Node actual, String path) {
Map<String, Node> expectedFields = getFields(expected);
Map<String, Node> actualFields = getFields(actual);
Set<String> expectedKeys = expectedFields.keySet();
Set<String> actualKeys = actualFields.keySet();
if (!expectedKeys.equals(actualKeys)) {
Set<String> missingKeys = getMissingKeys(expectedKeys, actualKeys);
Set<String> extraKeys = getExtraKeys(expectedKeys, actualKeys);
if (hasOption(Option.TREATING_NULL_AS_ABSENT)) {
extraKeys = getNotNullExtraKeys(actual, extraKeys);
}
if (!missingKeys.isEmpty() || !extraKeys.isEmpty()) {
String missingKeysMessage = getMissingKeysMessage(missingKeys, path);
String extraKeysMessage = getExtraKeysMessage(extraKeys, path);
structureDifferenceFound("Different keys found in node \"%s\". Expected %s, got %s. %s %s", path, sort(expectedFields.keySet()), sort(actualFields.keySet()), missingKeysMessage, extraKeysMessage);
}
}
for (String fieldName : commonFields(expectedFields, actualFields)) {
Node expectedNode = expectedFields.get(fieldName);
Node actualNode = actualFields.get(fieldName);
String fieldPath = getPath(path, fieldName);
compareNodes(expectedNode, actualNode, fieldPath);
}
}
/**
* Returns extra keys that are not null.
*
* @param actual
* @param extraKeys
* @return
*/
private Set<String> getNotNullExtraKeys(Node actual, Set<String> extraKeys) {
Set<String> notNullExtraKeys = new TreeSet<String>();
for (String extraKey : extraKeys) {
if (!actual.get(extraKey).isNull()) {
notNullExtraKeys.add(extraKey);
}
}
return notNullExtraKeys;
}
private static String getMissingKeysMessage(Set<String> missingKeys, String path) {
if (!missingKeys.isEmpty()) {
return "Missing: " + appendKeysToPrefix(missingKeys, path);
} else {
return "";
}
}
private static Set<String> getMissingKeys(Set<String> expectedKeys, Collection<String> actualKeys) {
Set<String> missingKeys = new TreeSet<String>(expectedKeys);
missingKeys.removeAll(actualKeys);
return missingKeys;
}
private static String getExtraKeysMessage(Set<String> extraKeys, String path) {
if (!extraKeys.isEmpty()) {
return "Extra: " + appendKeysToPrefix(extraKeys, path);
} else {
return "";
}
}
private Set<String> getExtraKeys(Set<String> expectedKeys, Collection<String> actualKeys) {
if (!hasOption(IGNORING_EXTRA_FIELDS)) {
Set<String> extraKeys = new TreeSet<String>(actualKeys);
extraKeys.removeAll(expectedKeys);
return extraKeys;
} else {
return emptySet();
}
}
private boolean hasOption(Option option) {
return configuration.getOptions().contains(option);
}
private static String appendKeysToPrefix(Iterable<String> keys, String prefix) {
Iterator<String> iterator = keys.iterator();
StringBuilder buffer = new StringBuilder();
while (iterator.hasNext()) {
String key = iterator.next();
buffer.append("\"").append(getPath(prefix, key)).append("\"");
if (iterator.hasNext()) {
buffer.append(",");
}
}
return buffer.toString();
}
/**
* Compares two nodes.
*
* @param expectedNode
* @param actualNode
* @param fieldPath
*/
private void compareNodes(Node expectedNode, Node actualNode, String fieldPath) {
NodeType expectedNodeType = expectedNode.getNodeType();
NodeType actualNodeType = actualNode.getNodeType();
//ignoring value
if (expectedNodeType == NodeType.STRING && configuration.getIgnorePlaceholder().equals(expectedNode.asText())) {
return;
}
if (!expectedNodeType.equals(actualNodeType)) {
valueDifferenceFound("Different value found in node \"%s\". Expected '%s', got '%s'.", fieldPath, expectedNode, actualNode);
} else {
switch (expectedNodeType) {
case OBJECT:
compareObjectNodes(expectedNode, actualNode, fieldPath);
break;
case ARRAY:
compareArrayNodes(expectedNode, actualNode, fieldPath);
break;
case STRING:
compareStringValues(expectedNode.asText(), actualNode.asText(), fieldPath);
break;
case NUMBER:
BigDecimal actualValue = actualNode.decimalValue();
BigDecimal expectedValue = expectedNode.decimalValue();
if (configuration.getTolerance() != null && !hasOption(IGNORING_VALUES)) {
BigDecimal diff = expectedValue.subtract(actualValue).abs();
if (diff.compareTo(configuration.getTolerance()) > 0) {
valueDifferenceFound("Different value found in node \"%s\". Expected %s, got %s, difference is %s, tolerance is %s",
fieldPath, quoteTextValue(expectedValue), quoteTextValue(actualValue), diff.toString(), configuration.getTolerance());
}
} else {
compareValues(expectedValue, actualValue, fieldPath);
}
break;
case BOOLEAN:
compareValues(expectedNode.asBoolean(), actualNode.asBoolean(), fieldPath);
break;
case NULL:
//nothing
break;
default:
throw new IllegalStateException("Unexpected node type " + expectedNodeType);
}
}
}
private void compareStringValues(String expectedValue, String actualValue, String path) {
if (hasOption(IGNORING_VALUES)) {
return;
}
if (isRegexExpected(expectedValue)) {
String pattern = getRegexPattern(expectedValue);
if (!actualValue.matches(pattern)) {
valueDifferenceFound("Different value found in node \"%s\". Pattern %s did not match %s.", path, quoteTextValue(pattern), quoteTextValue(actualValue));
}
} else {
compareValues(expectedValue, actualValue, path);
}
}
private String getRegexPattern(String expectedValue) {
return expectedValue.substring(REGEX_PLACEHOLDER.length());
}
private boolean isRegexExpected(String expectedValue) {
return expectedValue.startsWith(REGEX_PLACEHOLDER);
}
private void compareValues(Object expectedValue, Object actualValue, String path) {
if (!hasOption(IGNORING_VALUES)) {
if (!expectedValue.equals(actualValue)) {
valueDifferenceFound("Different value found in node \"%s\". Expected %s, got %s.", path, quoteTextValue(expectedValue), quoteTextValue(actualValue));
}
}
}
/**
* If the value is String than it's quoted in ".
*
* @param value
* @return
*/
private Object quoteTextValue(Object value) {
if (value instanceof String) {
return "\"" + value + "\"";
} else {
return value;
}
}
private void compareArrayNodes(Node expectedNode, Node actualNode, String path) {
List<Node> expectedElements = asList(expectedNode.arrayElements());
List<Node> actualElements = asList(actualNode.arrayElements());
if (expectedElements.size() != actualElements.size()) {
structureDifferenceFound("Array \"%s\" has different length. Expected %d, got %d.", path, expectedElements.size(), actualElements.size());
}
List<Node> extraValues = new ArrayList<Node>();
List<Node> missingValues = new ArrayList<Node>(expectedElements);
if (hasOption(Option.IGNORING_ARRAY_ORDER)) {
for (Node actual : actualElements) {
int index = indexOf(missingValues, actual);
if (index != -1) {
missingValues.remove(index);
} else {
extraValues.add(actual);
}
}
if (!missingValues.isEmpty() || !extraValues.isEmpty()) {
valueDifferenceFound("Array \"%s\" has different content. Missing values %s, extra values %s", path, missingValues, extraValues);
}
} else {
for (int i = 0; i < Math.min(expectedElements.size(), actualElements.size()); i++) {
compareNodes(expectedElements.get(i), actualElements.get(i), getArrayPath(path, i));
}
}
}
/**
* Finds element in the expected elements. Can not use Jackson comparison since we need to take Options into account
*
* @param expectedElements
* @param actual
* @return
*/
private int indexOf(List<Node> expectedElements, Node actual) {
int i = 0;
for (Node expected : expectedElements) {
Diff diff = new Diff(expected, actual, "", configuration);
if (diff.similar()) {
return i;
}
i++;
}
return -1;
}
private List<Node> asList(Iterator<Node> elements) {
List<Node> result = new ArrayList<Node>();
while (elements.hasNext()) {
Node Node = elements.next();
result.add(Node);
}
return Collections.unmodifiableList(result);
}
/**
* Construct path to an element.
*
* @param parent
* @param name
* @return
*/
private static String getPath(String parent, String name) {
if (parent.length() == 0) {
return name;
} else {
return parent + "." + name;
}
}
/**
* Constructs path to an array element.
*
* @param parent
* @param i
* @return
*/
private String getArrayPath(String parent, int i) {
if (parent.length() == 0) {
return "[" + i + "]";
} else {
return parent + "[" + i + "]";
}
}
private void structureDifferenceFound(String message, Object... arguments) {
differences.add(message, arguments);
}
private void valueDifferenceFound(String message, Object... arguments) {
if (!hasOption(COMPARING_ONLY_STRUCTURE)) {
differences.add(message, arguments);
}
}
private Set<String> commonFields(Map<String, Node> expectedFields, Map<String, Node> actualFields) {
Set<String> result = new TreeSet<String>(expectedFields.keySet());
result.retainAll(actualFields.keySet());
return Collections.unmodifiableSet(result);
}
private SortedSet<String> sort(Set<String> set) {
return new TreeSet<String>(set);
}
public boolean similar() {
compare();
boolean result = differences.isEmpty();
logDifferences(result);
return result;
}
private void logDifferences(boolean result) {
if (!result) {
if (diffLogger.isDebugEnabled()) {
diffLogger.debug(getDifferences().trim());
}
if (valuesLogger.isDebugEnabled()) {
valuesLogger.debug("Comparing expected:\n{}\n------------\nwith actual:\n{}\n", expectedRoot, getNode(actualRoot, startPath));
}
}
}
/**
* Returns children of an ObjectNode.
*
* @param node
* @return
*/
private static Map<String, Node> getFields(Node node) {
Map<String, Node> result = new HashMap<String, Node>();
Iterator<KeyValue> fields = node.fields();
while (fields.hasNext()) {
KeyValue field = fields.next();
result.put(field.getKey(), field.getValue());
}
return Collections.unmodifiableMap(result);
}
@Override
public String toString() {
return differences();
}
public String differences() {
if (similar()) {
return "JSON documents have the same value.";
}
return getDifferences();
}
private String getDifferences() {
StringBuilder message = new StringBuilder();
differences.appendDifferences(message);
return message.toString();
}
}
| |
/*
* Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.medialive.model;
import java.io.Serializable;
import javax.annotation.Generated;
/**
* Placeholder documentation for DeleteChannelResponse
*
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/medialive-2017-10-14/DeleteChannel" target="_top">AWS API
* Documentation</a>
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class DeleteChannelResult extends com.amazonaws.AmazonWebServiceResult<com.amazonaws.ResponseMetadata> implements Serializable, Cloneable {
/** The unique arn of the channel. */
private String arn;
/** Specification of CDI inputs for this channel */
private CdiInputSpecification cdiInputSpecification;
/**
* The class for this channel. STANDARD for a channel with two pipelines or SINGLE_PIPELINE for a channel with one
* pipeline.
*/
private String channelClass;
/**
* A list of destinations of the channel. For UDP outputs, there is one destination per output. For other types
* (HLS, for example), there is one destination per packager.
*/
private java.util.List<OutputDestination> destinations;
/** The endpoints where outgoing connections initiate from */
private java.util.List<ChannelEgressEndpoint> egressEndpoints;
private EncoderSettings encoderSettings;
/** The unique id of the channel. */
private String id;
/** List of input attachments for channel. */
private java.util.List<InputAttachment> inputAttachments;
/** Specification of network and file inputs for this channel */
private InputSpecification inputSpecification;
/** The log level being written to CloudWatch Logs. */
private String logLevel;
/** The name of the channel. (user-mutable) */
private String name;
/** Runtime details for the pipelines of a running channel. */
private java.util.List<PipelineDetail> pipelineDetails;
/** The number of currently healthy pipelines. */
private Integer pipelinesRunningCount;
/** The Amazon Resource Name (ARN) of the role assumed when running the Channel. */
private String roleArn;
private String state;
/** A collection of key-value pairs. */
private java.util.Map<String, String> tags;
/** Settings for VPC output */
private VpcOutputSettingsDescription vpc;
/**
* The unique arn of the channel.
*
* @param arn
* The unique arn of the channel.
*/
public void setArn(String arn) {
this.arn = arn;
}
/**
* The unique arn of the channel.
*
* @return The unique arn of the channel.
*/
public String getArn() {
return this.arn;
}
/**
* The unique arn of the channel.
*
* @param arn
* The unique arn of the channel.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public DeleteChannelResult withArn(String arn) {
setArn(arn);
return this;
}
/**
* Specification of CDI inputs for this channel
*
* @param cdiInputSpecification
* Specification of CDI inputs for this channel
*/
public void setCdiInputSpecification(CdiInputSpecification cdiInputSpecification) {
this.cdiInputSpecification = cdiInputSpecification;
}
/**
* Specification of CDI inputs for this channel
*
* @return Specification of CDI inputs for this channel
*/
public CdiInputSpecification getCdiInputSpecification() {
return this.cdiInputSpecification;
}
/**
* Specification of CDI inputs for this channel
*
* @param cdiInputSpecification
* Specification of CDI inputs for this channel
* @return Returns a reference to this object so that method calls can be chained together.
*/
public DeleteChannelResult withCdiInputSpecification(CdiInputSpecification cdiInputSpecification) {
setCdiInputSpecification(cdiInputSpecification);
return this;
}
/**
* The class for this channel. STANDARD for a channel with two pipelines or SINGLE_PIPELINE for a channel with one
* pipeline.
*
* @param channelClass
* The class for this channel. STANDARD for a channel with two pipelines or SINGLE_PIPELINE for a channel
* with one pipeline.
* @see ChannelClass
*/
public void setChannelClass(String channelClass) {
this.channelClass = channelClass;
}
/**
* The class for this channel. STANDARD for a channel with two pipelines or SINGLE_PIPELINE for a channel with one
* pipeline.
*
* @return The class for this channel. STANDARD for a channel with two pipelines or SINGLE_PIPELINE for a channel
* with one pipeline.
* @see ChannelClass
*/
public String getChannelClass() {
return this.channelClass;
}
/**
* The class for this channel. STANDARD for a channel with two pipelines or SINGLE_PIPELINE for a channel with one
* pipeline.
*
* @param channelClass
* The class for this channel. STANDARD for a channel with two pipelines or SINGLE_PIPELINE for a channel
* with one pipeline.
* @return Returns a reference to this object so that method calls can be chained together.
* @see ChannelClass
*/
public DeleteChannelResult withChannelClass(String channelClass) {
setChannelClass(channelClass);
return this;
}
/**
* The class for this channel. STANDARD for a channel with two pipelines or SINGLE_PIPELINE for a channel with one
* pipeline.
*
* @param channelClass
* The class for this channel. STANDARD for a channel with two pipelines or SINGLE_PIPELINE for a channel
* with one pipeline.
* @return Returns a reference to this object so that method calls can be chained together.
* @see ChannelClass
*/
public DeleteChannelResult withChannelClass(ChannelClass channelClass) {
this.channelClass = channelClass.toString();
return this;
}
/**
* A list of destinations of the channel. For UDP outputs, there is one destination per output. For other types
* (HLS, for example), there is one destination per packager.
*
* @return A list of destinations of the channel. For UDP outputs, there is one destination per output. For other
* types (HLS, for example), there is one destination per packager.
*/
public java.util.List<OutputDestination> getDestinations() {
return destinations;
}
/**
* A list of destinations of the channel. For UDP outputs, there is one destination per output. For other types
* (HLS, for example), there is one destination per packager.
*
* @param destinations
* A list of destinations of the channel. For UDP outputs, there is one destination per output. For other
* types (HLS, for example), there is one destination per packager.
*/
public void setDestinations(java.util.Collection<OutputDestination> destinations) {
if (destinations == null) {
this.destinations = null;
return;
}
this.destinations = new java.util.ArrayList<OutputDestination>(destinations);
}
/**
* A list of destinations of the channel. For UDP outputs, there is one destination per output. For other types
* (HLS, for example), there is one destination per packager.
*
* <p>
* <b>NOTE:</b> This method appends the values to the existing list (if any). Use
* {@link #setDestinations(java.util.Collection)} or {@link #withDestinations(java.util.Collection)} if you want to
* override the existing values.
* </p>
*
* @param destinations
* A list of destinations of the channel. For UDP outputs, there is one destination per output. For other
* types (HLS, for example), there is one destination per packager.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public DeleteChannelResult withDestinations(OutputDestination... destinations) {
if (this.destinations == null) {
setDestinations(new java.util.ArrayList<OutputDestination>(destinations.length));
}
for (OutputDestination ele : destinations) {
this.destinations.add(ele);
}
return this;
}
/**
* A list of destinations of the channel. For UDP outputs, there is one destination per output. For other types
* (HLS, for example), there is one destination per packager.
*
* @param destinations
* A list of destinations of the channel. For UDP outputs, there is one destination per output. For other
* types (HLS, for example), there is one destination per packager.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public DeleteChannelResult withDestinations(java.util.Collection<OutputDestination> destinations) {
setDestinations(destinations);
return this;
}
/**
* The endpoints where outgoing connections initiate from
*
* @return The endpoints where outgoing connections initiate from
*/
public java.util.List<ChannelEgressEndpoint> getEgressEndpoints() {
return egressEndpoints;
}
/**
* The endpoints where outgoing connections initiate from
*
* @param egressEndpoints
* The endpoints where outgoing connections initiate from
*/
public void setEgressEndpoints(java.util.Collection<ChannelEgressEndpoint> egressEndpoints) {
if (egressEndpoints == null) {
this.egressEndpoints = null;
return;
}
this.egressEndpoints = new java.util.ArrayList<ChannelEgressEndpoint>(egressEndpoints);
}
/**
* The endpoints where outgoing connections initiate from
* <p>
* <b>NOTE:</b> This method appends the values to the existing list (if any). Use
* {@link #setEgressEndpoints(java.util.Collection)} or {@link #withEgressEndpoints(java.util.Collection)} if you
* want to override the existing values.
* </p>
*
* @param egressEndpoints
* The endpoints where outgoing connections initiate from
* @return Returns a reference to this object so that method calls can be chained together.
*/
public DeleteChannelResult withEgressEndpoints(ChannelEgressEndpoint... egressEndpoints) {
if (this.egressEndpoints == null) {
setEgressEndpoints(new java.util.ArrayList<ChannelEgressEndpoint>(egressEndpoints.length));
}
for (ChannelEgressEndpoint ele : egressEndpoints) {
this.egressEndpoints.add(ele);
}
return this;
}
/**
* The endpoints where outgoing connections initiate from
*
* @param egressEndpoints
* The endpoints where outgoing connections initiate from
* @return Returns a reference to this object so that method calls can be chained together.
*/
public DeleteChannelResult withEgressEndpoints(java.util.Collection<ChannelEgressEndpoint> egressEndpoints) {
setEgressEndpoints(egressEndpoints);
return this;
}
/**
* @param encoderSettings
*/
public void setEncoderSettings(EncoderSettings encoderSettings) {
this.encoderSettings = encoderSettings;
}
/**
* @return
*/
public EncoderSettings getEncoderSettings() {
return this.encoderSettings;
}
/**
* @param encoderSettings
* @return Returns a reference to this object so that method calls can be chained together.
*/
public DeleteChannelResult withEncoderSettings(EncoderSettings encoderSettings) {
setEncoderSettings(encoderSettings);
return this;
}
/**
* The unique id of the channel.
*
* @param id
* The unique id of the channel.
*/
public void setId(String id) {
this.id = id;
}
/**
* The unique id of the channel.
*
* @return The unique id of the channel.
*/
public String getId() {
return this.id;
}
/**
* The unique id of the channel.
*
* @param id
* The unique id of the channel.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public DeleteChannelResult withId(String id) {
setId(id);
return this;
}
/**
* List of input attachments for channel.
*
* @return List of input attachments for channel.
*/
public java.util.List<InputAttachment> getInputAttachments() {
return inputAttachments;
}
/**
* List of input attachments for channel.
*
* @param inputAttachments
* List of input attachments for channel.
*/
public void setInputAttachments(java.util.Collection<InputAttachment> inputAttachments) {
if (inputAttachments == null) {
this.inputAttachments = null;
return;
}
this.inputAttachments = new java.util.ArrayList<InputAttachment>(inputAttachments);
}
/**
* List of input attachments for channel.
* <p>
* <b>NOTE:</b> This method appends the values to the existing list (if any). Use
* {@link #setInputAttachments(java.util.Collection)} or {@link #withInputAttachments(java.util.Collection)} if you
* want to override the existing values.
* </p>
*
* @param inputAttachments
* List of input attachments for channel.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public DeleteChannelResult withInputAttachments(InputAttachment... inputAttachments) {
if (this.inputAttachments == null) {
setInputAttachments(new java.util.ArrayList<InputAttachment>(inputAttachments.length));
}
for (InputAttachment ele : inputAttachments) {
this.inputAttachments.add(ele);
}
return this;
}
/**
* List of input attachments for channel.
*
* @param inputAttachments
* List of input attachments for channel.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public DeleteChannelResult withInputAttachments(java.util.Collection<InputAttachment> inputAttachments) {
setInputAttachments(inputAttachments);
return this;
}
/**
* Specification of network and file inputs for this channel
*
* @param inputSpecification
* Specification of network and file inputs for this channel
*/
public void setInputSpecification(InputSpecification inputSpecification) {
this.inputSpecification = inputSpecification;
}
/**
* Specification of network and file inputs for this channel
*
* @return Specification of network and file inputs for this channel
*/
public InputSpecification getInputSpecification() {
return this.inputSpecification;
}
/**
* Specification of network and file inputs for this channel
*
* @param inputSpecification
* Specification of network and file inputs for this channel
* @return Returns a reference to this object so that method calls can be chained together.
*/
public DeleteChannelResult withInputSpecification(InputSpecification inputSpecification) {
setInputSpecification(inputSpecification);
return this;
}
/**
* The log level being written to CloudWatch Logs.
*
* @param logLevel
* The log level being written to CloudWatch Logs.
* @see LogLevel
*/
public void setLogLevel(String logLevel) {
this.logLevel = logLevel;
}
/**
* The log level being written to CloudWatch Logs.
*
* @return The log level being written to CloudWatch Logs.
* @see LogLevel
*/
public String getLogLevel() {
return this.logLevel;
}
/**
* The log level being written to CloudWatch Logs.
*
* @param logLevel
* The log level being written to CloudWatch Logs.
* @return Returns a reference to this object so that method calls can be chained together.
* @see LogLevel
*/
public DeleteChannelResult withLogLevel(String logLevel) {
setLogLevel(logLevel);
return this;
}
/**
* The log level being written to CloudWatch Logs.
*
* @param logLevel
* The log level being written to CloudWatch Logs.
* @return Returns a reference to this object so that method calls can be chained together.
* @see LogLevel
*/
public DeleteChannelResult withLogLevel(LogLevel logLevel) {
this.logLevel = logLevel.toString();
return this;
}
/**
* The name of the channel. (user-mutable)
*
* @param name
* The name of the channel. (user-mutable)
*/
public void setName(String name) {
this.name = name;
}
/**
* The name of the channel. (user-mutable)
*
* @return The name of the channel. (user-mutable)
*/
public String getName() {
return this.name;
}
/**
* The name of the channel. (user-mutable)
*
* @param name
* The name of the channel. (user-mutable)
* @return Returns a reference to this object so that method calls can be chained together.
*/
public DeleteChannelResult withName(String name) {
setName(name);
return this;
}
/**
* Runtime details for the pipelines of a running channel.
*
* @return Runtime details for the pipelines of a running channel.
*/
public java.util.List<PipelineDetail> getPipelineDetails() {
return pipelineDetails;
}
/**
* Runtime details for the pipelines of a running channel.
*
* @param pipelineDetails
* Runtime details for the pipelines of a running channel.
*/
public void setPipelineDetails(java.util.Collection<PipelineDetail> pipelineDetails) {
if (pipelineDetails == null) {
this.pipelineDetails = null;
return;
}
this.pipelineDetails = new java.util.ArrayList<PipelineDetail>(pipelineDetails);
}
/**
* Runtime details for the pipelines of a running channel.
* <p>
* <b>NOTE:</b> This method appends the values to the existing list (if any). Use
* {@link #setPipelineDetails(java.util.Collection)} or {@link #withPipelineDetails(java.util.Collection)} if you
* want to override the existing values.
* </p>
*
* @param pipelineDetails
* Runtime details for the pipelines of a running channel.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public DeleteChannelResult withPipelineDetails(PipelineDetail... pipelineDetails) {
if (this.pipelineDetails == null) {
setPipelineDetails(new java.util.ArrayList<PipelineDetail>(pipelineDetails.length));
}
for (PipelineDetail ele : pipelineDetails) {
this.pipelineDetails.add(ele);
}
return this;
}
/**
* Runtime details for the pipelines of a running channel.
*
* @param pipelineDetails
* Runtime details for the pipelines of a running channel.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public DeleteChannelResult withPipelineDetails(java.util.Collection<PipelineDetail> pipelineDetails) {
setPipelineDetails(pipelineDetails);
return this;
}
/**
* The number of currently healthy pipelines.
*
* @param pipelinesRunningCount
* The number of currently healthy pipelines.
*/
public void setPipelinesRunningCount(Integer pipelinesRunningCount) {
this.pipelinesRunningCount = pipelinesRunningCount;
}
/**
* The number of currently healthy pipelines.
*
* @return The number of currently healthy pipelines.
*/
public Integer getPipelinesRunningCount() {
return this.pipelinesRunningCount;
}
/**
* The number of currently healthy pipelines.
*
* @param pipelinesRunningCount
* The number of currently healthy pipelines.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public DeleteChannelResult withPipelinesRunningCount(Integer pipelinesRunningCount) {
setPipelinesRunningCount(pipelinesRunningCount);
return this;
}
/**
* The Amazon Resource Name (ARN) of the role assumed when running the Channel.
*
* @param roleArn
* The Amazon Resource Name (ARN) of the role assumed when running the Channel.
*/
public void setRoleArn(String roleArn) {
this.roleArn = roleArn;
}
/**
* The Amazon Resource Name (ARN) of the role assumed when running the Channel.
*
* @return The Amazon Resource Name (ARN) of the role assumed when running the Channel.
*/
public String getRoleArn() {
return this.roleArn;
}
/**
* The Amazon Resource Name (ARN) of the role assumed when running the Channel.
*
* @param roleArn
* The Amazon Resource Name (ARN) of the role assumed when running the Channel.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public DeleteChannelResult withRoleArn(String roleArn) {
setRoleArn(roleArn);
return this;
}
/**
* @param state
* @see ChannelState
*/
public void setState(String state) {
this.state = state;
}
/**
* @return
* @see ChannelState
*/
public String getState() {
return this.state;
}
/**
* @param state
* @return Returns a reference to this object so that method calls can be chained together.
* @see ChannelState
*/
public DeleteChannelResult withState(String state) {
setState(state);
return this;
}
/**
* @param state
* @return Returns a reference to this object so that method calls can be chained together.
* @see ChannelState
*/
public DeleteChannelResult withState(ChannelState state) {
this.state = state.toString();
return this;
}
/**
* A collection of key-value pairs.
*
* @return A collection of key-value pairs.
*/
public java.util.Map<String, String> getTags() {
return tags;
}
/**
* A collection of key-value pairs.
*
* @param tags
* A collection of key-value pairs.
*/
public void setTags(java.util.Map<String, String> tags) {
this.tags = tags;
}
/**
* A collection of key-value pairs.
*
* @param tags
* A collection of key-value pairs.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public DeleteChannelResult withTags(java.util.Map<String, String> tags) {
setTags(tags);
return this;
}
/**
* Add a single Tags entry
*
* @see DeleteChannelResult#withTags
* @returns a reference to this object so that method calls can be chained together.
*/
public DeleteChannelResult addTagsEntry(String key, String value) {
if (null == this.tags) {
this.tags = new java.util.HashMap<String, String>();
}
if (this.tags.containsKey(key))
throw new IllegalArgumentException("Duplicated keys (" + key.toString() + ") are provided.");
this.tags.put(key, value);
return this;
}
/**
* Removes all the entries added into Tags.
*
* @return Returns a reference to this object so that method calls can be chained together.
*/
public DeleteChannelResult clearTagsEntries() {
this.tags = null;
return this;
}
/**
* Settings for VPC output
*
* @param vpc
* Settings for VPC output
*/
public void setVpc(VpcOutputSettingsDescription vpc) {
this.vpc = vpc;
}
/**
* Settings for VPC output
*
* @return Settings for VPC output
*/
public VpcOutputSettingsDescription getVpc() {
return this.vpc;
}
/**
* Settings for VPC output
*
* @param vpc
* Settings for VPC output
* @return Returns a reference to this object so that method calls can be chained together.
*/
public DeleteChannelResult withVpc(VpcOutputSettingsDescription vpc) {
setVpc(vpc);
return this;
}
/**
* Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be
* redacted from this string using a placeholder value.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getArn() != null)
sb.append("Arn: ").append(getArn()).append(",");
if (getCdiInputSpecification() != null)
sb.append("CdiInputSpecification: ").append(getCdiInputSpecification()).append(",");
if (getChannelClass() != null)
sb.append("ChannelClass: ").append(getChannelClass()).append(",");
if (getDestinations() != null)
sb.append("Destinations: ").append(getDestinations()).append(",");
if (getEgressEndpoints() != null)
sb.append("EgressEndpoints: ").append(getEgressEndpoints()).append(",");
if (getEncoderSettings() != null)
sb.append("EncoderSettings: ").append(getEncoderSettings()).append(",");
if (getId() != null)
sb.append("Id: ").append(getId()).append(",");
if (getInputAttachments() != null)
sb.append("InputAttachments: ").append(getInputAttachments()).append(",");
if (getInputSpecification() != null)
sb.append("InputSpecification: ").append(getInputSpecification()).append(",");
if (getLogLevel() != null)
sb.append("LogLevel: ").append(getLogLevel()).append(",");
if (getName() != null)
sb.append("Name: ").append(getName()).append(",");
if (getPipelineDetails() != null)
sb.append("PipelineDetails: ").append(getPipelineDetails()).append(",");
if (getPipelinesRunningCount() != null)
sb.append("PipelinesRunningCount: ").append(getPipelinesRunningCount()).append(",");
if (getRoleArn() != null)
sb.append("RoleArn: ").append(getRoleArn()).append(",");
if (getState() != null)
sb.append("State: ").append(getState()).append(",");
if (getTags() != null)
sb.append("Tags: ").append(getTags()).append(",");
if (getVpc() != null)
sb.append("Vpc: ").append(getVpc());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof DeleteChannelResult == false)
return false;
DeleteChannelResult other = (DeleteChannelResult) obj;
if (other.getArn() == null ^ this.getArn() == null)
return false;
if (other.getArn() != null && other.getArn().equals(this.getArn()) == false)
return false;
if (other.getCdiInputSpecification() == null ^ this.getCdiInputSpecification() == null)
return false;
if (other.getCdiInputSpecification() != null && other.getCdiInputSpecification().equals(this.getCdiInputSpecification()) == false)
return false;
if (other.getChannelClass() == null ^ this.getChannelClass() == null)
return false;
if (other.getChannelClass() != null && other.getChannelClass().equals(this.getChannelClass()) == false)
return false;
if (other.getDestinations() == null ^ this.getDestinations() == null)
return false;
if (other.getDestinations() != null && other.getDestinations().equals(this.getDestinations()) == false)
return false;
if (other.getEgressEndpoints() == null ^ this.getEgressEndpoints() == null)
return false;
if (other.getEgressEndpoints() != null && other.getEgressEndpoints().equals(this.getEgressEndpoints()) == false)
return false;
if (other.getEncoderSettings() == null ^ this.getEncoderSettings() == null)
return false;
if (other.getEncoderSettings() != null && other.getEncoderSettings().equals(this.getEncoderSettings()) == false)
return false;
if (other.getId() == null ^ this.getId() == null)
return false;
if (other.getId() != null && other.getId().equals(this.getId()) == false)
return false;
if (other.getInputAttachments() == null ^ this.getInputAttachments() == null)
return false;
if (other.getInputAttachments() != null && other.getInputAttachments().equals(this.getInputAttachments()) == false)
return false;
if (other.getInputSpecification() == null ^ this.getInputSpecification() == null)
return false;
if (other.getInputSpecification() != null && other.getInputSpecification().equals(this.getInputSpecification()) == false)
return false;
if (other.getLogLevel() == null ^ this.getLogLevel() == null)
return false;
if (other.getLogLevel() != null && other.getLogLevel().equals(this.getLogLevel()) == false)
return false;
if (other.getName() == null ^ this.getName() == null)
return false;
if (other.getName() != null && other.getName().equals(this.getName()) == false)
return false;
if (other.getPipelineDetails() == null ^ this.getPipelineDetails() == null)
return false;
if (other.getPipelineDetails() != null && other.getPipelineDetails().equals(this.getPipelineDetails()) == false)
return false;
if (other.getPipelinesRunningCount() == null ^ this.getPipelinesRunningCount() == null)
return false;
if (other.getPipelinesRunningCount() != null && other.getPipelinesRunningCount().equals(this.getPipelinesRunningCount()) == false)
return false;
if (other.getRoleArn() == null ^ this.getRoleArn() == null)
return false;
if (other.getRoleArn() != null && other.getRoleArn().equals(this.getRoleArn()) == false)
return false;
if (other.getState() == null ^ this.getState() == null)
return false;
if (other.getState() != null && other.getState().equals(this.getState()) == false)
return false;
if (other.getTags() == null ^ this.getTags() == null)
return false;
if (other.getTags() != null && other.getTags().equals(this.getTags()) == false)
return false;
if (other.getVpc() == null ^ this.getVpc() == null)
return false;
if (other.getVpc() != null && other.getVpc().equals(this.getVpc()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getArn() == null) ? 0 : getArn().hashCode());
hashCode = prime * hashCode + ((getCdiInputSpecification() == null) ? 0 : getCdiInputSpecification().hashCode());
hashCode = prime * hashCode + ((getChannelClass() == null) ? 0 : getChannelClass().hashCode());
hashCode = prime * hashCode + ((getDestinations() == null) ? 0 : getDestinations().hashCode());
hashCode = prime * hashCode + ((getEgressEndpoints() == null) ? 0 : getEgressEndpoints().hashCode());
hashCode = prime * hashCode + ((getEncoderSettings() == null) ? 0 : getEncoderSettings().hashCode());
hashCode = prime * hashCode + ((getId() == null) ? 0 : getId().hashCode());
hashCode = prime * hashCode + ((getInputAttachments() == null) ? 0 : getInputAttachments().hashCode());
hashCode = prime * hashCode + ((getInputSpecification() == null) ? 0 : getInputSpecification().hashCode());
hashCode = prime * hashCode + ((getLogLevel() == null) ? 0 : getLogLevel().hashCode());
hashCode = prime * hashCode + ((getName() == null) ? 0 : getName().hashCode());
hashCode = prime * hashCode + ((getPipelineDetails() == null) ? 0 : getPipelineDetails().hashCode());
hashCode = prime * hashCode + ((getPipelinesRunningCount() == null) ? 0 : getPipelinesRunningCount().hashCode());
hashCode = prime * hashCode + ((getRoleArn() == null) ? 0 : getRoleArn().hashCode());
hashCode = prime * hashCode + ((getState() == null) ? 0 : getState().hashCode());
hashCode = prime * hashCode + ((getTags() == null) ? 0 : getTags().hashCode());
hashCode = prime * hashCode + ((getVpc() == null) ? 0 : getVpc().hashCode());
return hashCode;
}
@Override
public DeleteChannelResult clone() {
try {
return (DeleteChannelResult) super.clone();
} catch (CloneNotSupportedException e) {
throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e);
}
}
}
| |
/*
* Copyright (C) 2013 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package android.support.v4.view;
import android.annotation.TargetApi;
import android.content.Context;
import android.content.res.TypedArray;
import android.database.DataSetObserver;
import android.graphics.Typeface;
import android.graphics.drawable.Drawable;
import android.os.Build;
import android.util.AttributeSet;
import android.util.TypedValue;
import android.view.Gravity;
import android.view.LayoutInflater;
import android.view.MotionEvent;
import android.view.View;
import android.view.ViewGroup;
import android.view.ViewParent;
import android.view.animation.Animation;
import android.view.animation.AnimationUtils;
import android.widget.HorizontalScrollView;
import android.widget.LinearLayout;
import android.widget.TextView;
import com.github.paradam.infinitepager.InfinitePagerAdapter;
import com.github.paradam.infinitepager.InfiniteViewPager;
import com.github.paradam.infinitepager.R;
/**
* To be used with ViewPager to provide a tab indicator component which give constant feedback as to
* the user's scroll progress.
* <p>
* To use the component, simply add it to your view hierarchy. Then in your
* {@link android.app.Activity} or {@link android.support.v4.app.Fragment} call
* {@link #setViewPager(android.support.v4.view.ViewPager)} providing it the ViewPager this layout is being used for.
* <p>
* The colors can be customized in two ways. The first and simplest is to provide an array of colors
* via {@link #setSelectedIndicatorColors(int...)} and {@link #setDividerColors(int...)}. The
* alternative is via the {@link SlidingTabLayout.TabColorizer} interface which provides you complete control over
* which color is used for any individual position.
* <p>
* The views used as tabs can be customized by calling {@link #setCustomTabView(int, int)},
* providing the layout ID of your custom layout.
*
* Based off the developer.android.com sample code "SlidingTabsColors" (http://developer.android.com/samples/SlidingTabsColors/index.html)
* with a few additions.
*/
public class SlidingTabLayout extends HorizontalScrollView implements SlidablePagerTitle, View.OnTouchListener, ViewPager.Decor {
/**
* Level of hidability of the TabLayout
*
* @see #HIDE_NONE
* @see #HIDE_AUTO
* @see #HIDE_PROGRAM
*/
private int hidable = HIDE_NONE;
/**
* The PagerTitleStrip is currently collapsed and hidden at the top of the View.
*/
private static final int CLOSED = 0;
/**
* The PagerTitleStrip is currently sliding down to become visible to the user.
*/
private static final int SLIDING_DOWN = 1;
/**
* The PagerTitleStrip is currently open and visible to the user.
*/
private static final int OPEN = 2;
/**
* The PagerTitleStrip is in the process of collapsing to be hidden from the user.
*/
private static final int SLIDING_UP = 3;
/**
* The state of the PagerTitleStrip.
*
* @see #CLOSED
* @see #SLIDING_DOWN
* @see #OPEN
* @see #SLIDING_UP
*/
private int slideState = OPEN;
/**
* A Runnable to be run after a delay that will trigger the PagerTitleStrip to collapse upwards
* and hide from the user.
*/
private final Runnable slideUp = new Runnable() {
@Override
public void run() {
slideOut();
}
};
/**
* The id of the ViewPager this PagerTitleStrip is connected to.
*/
private int mPagerId;
/**
* The duration the tabs will remain visible until they are hidden.
*
* Only honored if {@link #hidable} is set to {@link #HIDE_AUTO}. Ignored otherwise.
*/
private long defaultDisplayTime = DISPLAY_TIME;
private static final int[] ATTRS = new int[] {
android.R.attr.textAppearance,
android.R.attr.textSize,
android.R.attr.textColor,
android.R.attr.gravity,
R.attr.viewPager,
R.attr.autoHide,
R.attr.dividerColors,
R.attr.selectorColors,
R.attr.displayDuration
};
/**
* Allows complete control over the colors drawn in the tab layout. Set with
* {@link #setCustomTabColorizer(SlidingTabLayout.TabColorizer)}.
*/
public interface TabColorizer {
/**
* @return return the color of the indicator used when {@code position} is selected.
*/
int getIndicatorColor(int position);
/**
* @return return the color of the divider drawn to the right of {@code position}.
*/
int getDividerColor(int position);
}
/**
* An observer that is informed when the data for the adapter changes.
*/
private DataSetObserver dataChangeObserver = new DataSetObserver() {
@Override
public void onChanged() {
populateTabStrip();
SlidingTabLayout.this.post(new Runnable() {
@Override
public void run() {
scrollToTab(mViewPager instanceof InfiniteViewPager ? ((InfiniteViewPager) mViewPager).getRelativeCurrentItem() : mViewPager.getCurrentItem(), 0);
}
});
}
@Override
public void onInvalidated() {
populateTabStrip();
SlidingTabLayout.this.post(new Runnable() {
@Override
public void run() {
scrollToTab(mViewPager instanceof InfiniteViewPager ? ((InfiniteViewPager) mViewPager).getRelativeCurrentItem() : mViewPager.getCurrentItem(), 0);
}
});
}
};
private static final int TITLE_OFFSET_DIPS = 24;
private static final int TAB_VIEW_PADDING_DIPS = 16;
private static final int TAB_VIEW_TEXT_SIZE_SP = 12;
private int mTitleOffset;
private int mTabViewLayoutId;
private int mTabViewTextViewId;
private ViewPager mViewPager;
private final PageListener mPageListener = new PageListener();
private final SlidingTabStrip mTabStrip;
private int mTextAppearance;
private int mTextSize;
private int mTextColor;
private int mGravity;
public SlidingTabLayout(Context context) {
this(context, null);
}
public SlidingTabLayout(Context context, AttributeSet attrs) {
super(context, attrs);
mTabStrip = new SlidingTabStrip(context, attrs);
init(context, attrs);
}
public SlidingTabLayout(Context context, AttributeSet attrs, int defStyle) {
super(context, attrs, defStyle);
mTabStrip = new SlidingTabStrip(context, attrs);
init(context, attrs);
}
/**
* Initialise the settings provided from the inflated XML.
* @param context The context the layout was created in.
* @param attrs The attribute set to get the settings from.
*/
private void init(Context context, AttributeSet attrs) {
// Disable the Scroll Bar
setHorizontalScrollBarEnabled(false);
// Make sure that the Tab Strips fills this View
setFillViewport(true);
mTitleOffset = (int) (TITLE_OFFSET_DIPS * getResources().getDisplayMetrics().density);
addView(mTabStrip, LayoutParams.MATCH_PARENT, LayoutParams.WRAP_CONTENT);
final TypedArray a = context.obtainStyledAttributes(attrs, ATTRS);
mTextAppearance = a.getResourceId(0, 0);
mTextSize = a.getDimensionPixelSize(1, 0);
if (a.hasValue(2)) {
mTextColor = a.getColor(2, 0);
}
mGravity = a.getInteger(3, Gravity.BOTTOM);
if (a.hasValue(4)) {
mPagerId = a.getResourceId(4, 0);
}
if (a.hasValue(5)) {
hidable = a.getInt(5, HIDE_NONE);
}
if (a.hasValue(6)) {
setDividerColors(context.getResources().getIntArray(a.getResourceId(6, 0)));
}
if (a.hasValue(7)) {
setSelectedIndicatorColors(context.getResources().getIntArray(a.getResourceId(7, 0)));
}
if (a.hasValue(8)) {
defaultDisplayTime = a.getInteger(8, (int)DISPLAY_TIME);
}
a.recycle();
setOnTouchListener(actionEventTouchListener);
super.setBackgroundResource(0);
}
private OnTouchListener actionEventTouchListener = new OnTouchListener() {
@Override
public boolean onTouch(View v, MotionEvent event) {
switch (event.getAction()) {
case MotionEvent.ACTION_DOWN:
case MotionEvent.ACTION_MOVE:
case MotionEvent.ACTION_UP:
case MotionEvent.ACTION_CANCEL:
SlidingTabLayout.this.removeCallbacks(slideUp);
if (hidable == HIDE_AUTO) {
SlidingTabLayout.this.postDelayed(slideUp, defaultDisplayTime);
}
}
return false;
}
};
/**
* @return The state of the Tab Layouts hidable status.
*
* @see #HIDE_NONE
* @see #HIDE_AUTO
* @see #HIDE_PROGRAM
*/
public int isAutoHidable() {
return hidable;
}
/**
* Set the length in time the view will remain visible for.
* @param displayTime The number of milliseconds to have the SlidingTabLayout visible for.
*/
public void setDisplayTime(long displayTime) {
defaultDisplayTime = displayTime;
}
/**
* Get the number of milliseconds the SlidingTabLayout will remain visible for if {@link #isAutoHidable()}
* equals {@link #HIDE_AUTO}.
*/
public long getDisplayTime() {
return defaultDisplayTime;
}
/**
* Set if this TabLayout can auto hide to conserve less space.
* <br />
* Note: Setting this to {@link #HIDE_NONE} can lock the Tab Layout into an open or closed state.
* @param autoHide The state of the Tab Layouts hidability. {@link #HIDE_NONE} for not hidable at all,
* {@link #HIDE_PROGRAM} for hidable only when {@link #slideIn(long)} or {@link #slideOut(long)}
* are called, or {@link #HIDE_AUTO} for automatic control.
*
* @see #HIDE_NONE
* @see #HIDE_AUTO
* @see #HIDE_PROGRAM
*/
public void setAutoHidable(int autoHide) {
hidable = autoHide;
if (autoHide == HIDE_NONE) {
removeCallbacks(slideUp);
} else if (autoHide == HIDE_AUTO && (slideState == OPEN || slideState == SLIDING_DOWN)) {
removeCallbacks(slideUp);
postDelayed(slideUp, defaultDisplayTime);
}
}
@TargetApi (Build.VERSION_CODES.JELLY_BEAN)
@Override
public void setBackground(Drawable background) {
mTabStrip.setBackground(background);
}
@Override
public void setBackgroundColor(int color) {
mTabStrip.setBackgroundColor(color);
}
@Override
public void setBackgroundResource(int resid) {
mTabStrip.setBackgroundResource(resid);
}
/**
* Set the custom {@link SlidingTabLayout.TabColorizer} to be used.
*
* If you only require simple custmisation then you can use
* {@link #setSelectedIndicatorColors(int...)} and {@link #setDividerColors(int...)} to achieve
* similar effects.
*/
public void setCustomTabColorizer(TabColorizer tabColorizer) {
mTabStrip.setCustomTabColorizer(tabColorizer);
}
/**
* Sets the colors to be used for indicating the selected tab. These colors are treated as a
* circular array. Providing one color will mean that all tabs are indicated with the same color.
*/
public void setSelectedIndicatorColors(int... colors) {
mTabStrip.setSelectedIndicatorColors(colors);
}
/**
* Sets the colors to be used for tab dividers. These colors are treated as a circular array.
* Providing one color will mean that all tabs are indicated with the same color.
*/
public void setDividerColors(int... colors) {
mTabStrip.setDividerColors(colors);
}
/**
* Set the custom layout to be inflated for the tab views.
*
* @param layoutResId Layout id to be inflated
* @param textViewId id of the {@link android.widget.TextView} in the inflated view
*/
public void setCustomTabView(int layoutResId, int textViewId) {
mTabViewLayoutId = layoutResId;
mTabViewTextViewId = textViewId;
}
/**
* Sets the associated view pager. Note that the assumption here is that the pager content
* (number of tabs and tab titles) does not change after this call has been made.
*/
public void setViewPager(ViewPager viewPager) {
if (viewPager == mViewPager) {
return;
}
if (mViewPager != null) {
mViewPager.setInternalPageChangeListener(null);
mViewPager.setOnAdapterChangeListener(null);
mViewPager.setOnTouchListener(null);
PagerAdapter adapter = mViewPager.getAdapter();
if (adapter != null) {
adapter.unregisterDataSetObserver(dataChangeObserver);
}
}
mTabStrip.removeAllViews();
mViewPager = viewPager;
if (mViewPager == null) {
return;
}
mViewPager.setInternalPageChangeListener(mPageListener);
mViewPager.setOnAdapterChangeListener(mPageListener);
mViewPager.setOnTouchListener(this);
PagerAdapter adapter = mViewPager.getAdapter();
if (adapter != null) {
adapter.registerDataSetObserver(dataChangeObserver);
populateTabStrip();
SlidingTabLayout.this.post(new Runnable() {
@Override
public void run() {
scrollToTab(mViewPager instanceof InfiniteViewPager ? ((InfiniteViewPager) mViewPager).getRelativeCurrentItem() : mViewPager.getCurrentItem(), 0);
}
});
}
if (hidable == HIDE_AUTO) {
slideState = OPEN;
this.setVisibility(View.VISIBLE);
this.postDelayed(slideUp, defaultDisplayTime);
}
}
/**
* Create a default view to be used for tabs. This is called if a custom tab view is not set via
* {@link #setCustomTabView(int, int)}.
*/
protected TextView createDefaultTabView(Context context) {
TextView textView = new TextView(context);
textView.setTextAppearance(context, mTextAppearance);
textView.setGravity(mGravity > 0 ? mGravity : Gravity.CENTER);
if (mTextSize > 0) {
textView.setTextSize(TypedValue.COMPLEX_UNIT_PX, mTextSize);
} else {
textView.setTextSize(TypedValue.COMPLEX_UNIT_SP, TAB_VIEW_TEXT_SIZE_SP);
}
if (mTextColor != 0) {
textView.setTextColor(mTextColor);
}
textView.setTypeface(Typeface.DEFAULT_BOLD);
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.HONEYCOMB) {
// If we're running on Honeycomb or newer, then we can use the Theme's
// selectableItemBackground to ensure that the View has a pressed state
TypedValue outValue = new TypedValue();
getContext().getTheme().resolveAttribute(android.R.attr.selectableItemBackground, outValue, true);
textView.setBackgroundResource(outValue.resourceId);
}
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.ICE_CREAM_SANDWICH) {
// If we're running on ICS or newer, enable all-caps to match the Action Bar tab style
textView.setAllCaps(true);
}
int padding = (int) (TAB_VIEW_PADDING_DIPS * getResources().getDisplayMetrics().density);
textView.setPadding(padding, padding, padding, padding);
LinearLayout.LayoutParams lp = new LinearLayout.LayoutParams(LayoutParams.WRAP_CONTENT, LayoutParams.MATCH_PARENT);
lp.weight = 1f;
textView.setLayoutParams(lp);
return textView;
}
/**
* Find and create a set of TextViews to insert into the TabStrip.
*/
protected void populateTabStrip() {
mTabStrip.removeAllViews();
if (mViewPager != null) {
final PagerAdapter adapter = mViewPager.getAdapter();
if (adapter != null) {
final OnClickListener tabClickListener = new TabClickListener();
final boolean isInfinitePager = adapter instanceof InfinitePagerAdapter;
int length = (isInfinitePager ? ((InfinitePagerAdapter) adapter).getRelativeCount() : adapter.getCount());
for (int i = 0; i < length; i++) {
View tabView = null;
TextView tabTitleView = null;
if (mTabViewLayoutId != 0) {
// If there is a custom tab view layout id set, try and inflate it
tabView = LayoutInflater.from(getContext()).inflate(mTabViewLayoutId, mTabStrip, false);
tabTitleView = (TextView) tabView.findViewById(mTabViewTextViewId);
}
if (tabView == null) {
tabView = createDefaultTabView(getContext());
}
if (tabTitleView == null && TextView.class.isInstance(tabView)) {
tabTitleView = (TextView) tabView;
}
if (tabTitleView != null) {
tabTitleView.setGravity(Gravity.CENTER);
tabTitleView.setText(isInfinitePager ? ((InfinitePagerAdapter) adapter).getRelativePageTitle(i) : adapter.getPageTitle(i));
}
tabView.setOnClickListener(tabClickListener);
tabView.setOnTouchListener(actionEventTouchListener);
mTabStrip.addView(tabView);
}
}
}
}
@Override
protected void onAttachedToWindow() {
super.onAttachedToWindow();
ViewParent parent = getParent();
if (mPagerId != 0) {
View related = ((ViewGroup)parent).findViewById(mPagerId);
if (!(related instanceof ViewPager)) {
throw new IllegalStateException(
"ViewPager ID is does not refer to ViewPager.");
}
parent = (ViewParent) related;
} else if (!(parent instanceof ViewPager)) {
// Parent has not been set, will wait for #setViewPager(ViewPager) to be called instead.
return;
}
setViewPager((ViewPager) parent);
}
@Override
protected void onDetachedFromWindow() {
super.onDetachedFromWindow();
setViewPager(null);
}
private void scrollToTab(int tabIndex, int positionOffset) {
final int tabStripChildCount = mTabStrip.getChildCount();
if (tabStripChildCount == 0 || tabIndex < 0 || tabIndex >= tabStripChildCount) {
return;
}
View selectedChild = mTabStrip.getChildAt(tabIndex);
if (selectedChild != null && (float)positionOffset / selectedChild.getWidth() > 0.5 && tabIndex == tabStripChildCount -1) {
tabIndex = 0;
positionOffset = 0;
}
selectedChild = mTabStrip.getChildAt(tabIndex);
if (selectedChild != null) {
int targetScrollX = selectedChild.getLeft() + positionOffset;
if (tabIndex > 0 || positionOffset > 0) {
// If we're not at the first child and are mid-scroll, make sure we obey the offset
targetScrollX -= mTitleOffset;
}
scrollTo(targetScrollX, 0);
}
}
private class TabClickListener implements OnClickListener {
@Override
public void onClick(View v) {
for (int i = 0; i < mTabStrip.getChildCount(); i++) {
if (v == mTabStrip.getChildAt(i)) {
if (mViewPager instanceof InfiniteViewPager) {
((InfiniteViewPager)mViewPager).setRelativeCurrentItem(i);
} else {
mViewPager.setCurrentItem(i);
}
return;
}
}
}
}
/**
* Slide up the view to remove it from the frame.
*/
private void slideOut() {
if (slideState == OPEN) {
slideState = SLIDING_UP;
Animation slideOut = AnimationUtils.loadAnimation(this.getContext(), R.anim.abc_slide_out_top);
slideOut.setAnimationListener(new Animation.AnimationListener() {
@Override
public void onAnimationEnd(Animation animation) {
if (slideState != SLIDING_UP) {
slideState = CLOSED;
slideIn();
} else {
slideState = CLOSED;
}
SlidingTabLayout.this.setVisibility(View.GONE);
}
@Override
public void onAnimationStart(Animation animation) {
}
@Override
public void onAnimationRepeat(Animation animation) {
}
});
this.startAnimation(slideOut);
}
}
/**
* Slide down the view so that its contents are visible and can be interacted with.
*/
private void slideIn() {
if (slideState == CLOSED) {
slideState = SLIDING_DOWN;
Animation slideIn = AnimationUtils.loadAnimation(this.getContext(), R.anim.abc_slide_in_top);
slideIn.setAnimationListener(new Animation.AnimationListener() {
@Override
public void onAnimationEnd(Animation animation) {
slideState = OPEN;
}
@Override
public void onAnimationStart(Animation animation) {
SlidingTabLayout.this.setVisibility(View.VISIBLE);
}
@Override
public void onAnimationRepeat(Animation animation) {
}
});
this.startAnimation(slideIn);
}
}
/**
* The last position of the pointer in the Y axis.
*/
private float downPositionY = -1;
@Override
public boolean onTouch(View view, MotionEvent event) {
if (hidable == HIDE_AUTO) {
switch (event.getAction()) {
case MotionEvent.ACTION_DOWN:
downPositionY = event.getY();
break;
case MotionEvent.ACTION_MOVE:
if (downPositionY + MOVE_THRESHOLD < event.getY()) {
if (slideState == CLOSED) {
slideIn(defaultDisplayTime);
}
} else if (downPositionY - MOVE_THRESHOLD > event.getY()) {
if (slideState == OPEN || slideState == SLIDING_DOWN) {
slideOut(0);
}
}
break;
}
}
return false;
}
@Override
public void slideIn(long milliseconds) {
if (hidable == HIDE_NONE) { return; }
if (slideState == CLOSED) {
slideIn();
} else {
slideState = OPEN;
}
removeCallbacks(slideUp);
if (milliseconds >= 0 && hidable == HIDE_AUTO) {
postDelayed(slideUp, milliseconds);
}
}
@Override
public void slideOut(long milliseconds) {
if (hidable == HIDE_NONE) { return; }
if (slideState == CLOSED || slideState == SLIDING_UP) { return; }
removeCallbacks(slideUp);
slideState = OPEN;
if (milliseconds > 0) {
postDelayed(slideUp, milliseconds);
} else {
post(slideUp);
}
}
private class PageListener extends DataSetObserver implements ViewPager.OnPageChangeListener,
ViewPager.OnAdapterChangeListener {
private int mScrollState;
@Override
public void onPageScrolled(int position, float positionOffset, int positionOffsetPixels) {
if (hidable == HIDE_AUTO) {
if (slideState == CLOSED) {
slideIn();
SlidingTabLayout.this.removeCallbacks(slideUp);
if (mScrollState == ViewPager.SCROLL_STATE_IDLE) {
SlidingTabLayout.this.postDelayed(slideUp, DISPLAY_TIME_SHORT);
}
} else if (slideState == SLIDING_UP) {
slideState = OPEN;
} else {
SlidingTabLayout.this.removeCallbacks(slideUp);
SlidingTabLayout.this.postDelayed(slideUp, DISPLAY_TIME_SHORT);
}
}
int tabStripChildCount = mTabStrip.getChildCount();
if ((tabStripChildCount == 0) || (position < 0) || (position >= tabStripChildCount)) {
return;
}
mTabStrip.onViewPagerPageChanged(position, positionOffset);
View selectedTitle = mTabStrip.getChildAt(position);
int extraOffset = (selectedTitle != null)
? (int) (positionOffset * selectedTitle.getWidth())
: 0;
scrollToTab(position, extraOffset);
}
@Override
public void onPageSelected(int position) {
if (mScrollState == ViewPager.SCROLL_STATE_IDLE) {
mTabStrip.onViewPagerPageChanged(position, 0f);
scrollToTab(position, 0);
}
}
@Override
public void onPageScrollStateChanged(int state) {
mScrollState = state;
}
@Override
public void onAdapterChanged(PagerAdapter oldAdapter, PagerAdapter newAdapter) {
if (oldAdapter != null) {
oldAdapter.unregisterDataSetObserver(dataChangeObserver);
}
if (newAdapter != null ) {
newAdapter.registerDataSetObserver(dataChangeObserver);
}
populateTabStrip();
}
@Override
public void onChanged() {
populateTabStrip();
}
}
}
| |
package br.jus.cnj.intercomunicacao_2_2;
import java.util.ArrayList;
import java.util.List;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlAttribute;
import javax.xml.bind.annotation.XmlElement;
import javax.xml.bind.annotation.XmlType;
/**
* <p>Java class for tipoPessoa complex type.
*
* <p>The following schema fragment specifies the expected content contained within this class.
*
* <pre>
* <complexType name="tipoPessoa">
* <complexContent>
* <restriction base="{http://www.w3.org/2001/XMLSchema}anyType">
* <sequence>
* <element name="outroNome" type="{http://www.w3.org/2001/XMLSchema}string" maxOccurs="unbounded" minOccurs="0"/>
* <element name="documento" type="{http://www.cnj.jus.br/intercomunicacao-2.2.2}tipoDocumentoIdentificacao" maxOccurs="unbounded" minOccurs="0"/>
* <element name="endereco" type="{http://www.cnj.jus.br/intercomunicacao-2.2.2}tipoEndereco" maxOccurs="unbounded" minOccurs="0"/>
* <element name="pessoaRelacionada" type="{http://www.cnj.jus.br/intercomunicacao-2.2.2}tipoRelacionamentoPessoal" maxOccurs="unbounded" minOccurs="0"/>
* <element name="pessoaVinculada" type="{http://www.cnj.jus.br/intercomunicacao-2.2.2}tipoPessoa" minOccurs="0"/>
* </sequence>
* <attribute name="nome" type="{http://www.w3.org/2001/XMLSchema}string" />
* <attribute name="sexo" use="required" type="{http://www.cnj.jus.br/intercomunicacao-2.2.2}modalidadeGeneroPessoa" />
* <attribute name="nomeGenitor" type="{http://www.w3.org/2001/XMLSchema}string" />
* <attribute name="nomeGenitora" type="{http://www.w3.org/2001/XMLSchema}string" />
* <attribute name="dataNascimento" type="{http://www.w3.org/2001/XMLSchema}string" />
* <attribute name="dataObito" type="{http://www.w3.org/2001/XMLSchema}string" />
* <attribute name="numeroDocumentoPrincipal" type="{http://www.w3.org/2001/XMLSchema}string" />
* <attribute name="tipoPessoa" use="required" type="{http://www.cnj.jus.br/intercomunicacao-2.2.2}tipoQualificacaoPessoa" />
* <attribute name="cidadeNatural" type="{http://www.w3.org/2001/XMLSchema}string" />
* <attribute name="estadoNatural" type="{http://www.w3.org/2001/XMLSchema}string" />
* <attribute name="nacionalidade" type="{http://www.w3.org/2001/XMLSchema}string" default="BR" />
* </restriction>
* </complexContent>
* </complexType>
* </pre>
*
*
*/
@XmlAccessorType(XmlAccessType.FIELD)
@XmlType(name = "tipoPessoa", propOrder = {
"outroNome",
"documento",
"endereco",
"pessoaRelacionada",
"pessoaVinculada"
})
public class TipoPessoa {
protected List<String> outroNome;
protected List<TipoDocumentoIdentificacao> documento;
protected List<TipoEndereco> endereco;
@XmlElement(nillable = true)
protected List<TipoRelacionamentoPessoal> pessoaRelacionada;
protected TipoPessoa pessoaVinculada;
@XmlAttribute(name = "nome")
protected String nome;
@XmlAttribute(name = "sexo", required = true)
protected ModalidadeGeneroPessoa sexo;
@XmlAttribute(name = "nomeGenitor")
protected String nomeGenitor;
@XmlAttribute(name = "nomeGenitora")
protected String nomeGenitora;
@XmlAttribute(name = "dataNascimento")
protected String dataNascimento;
@XmlAttribute(name = "dataObito")
protected String dataObito;
@XmlAttribute(name = "numeroDocumentoPrincipal")
protected String numeroDocumentoPrincipal;
@XmlAttribute(name = "tipoPessoa", required = true)
protected TipoQualificacaoPessoa tipoPessoa;
@XmlAttribute(name = "cidadeNatural")
protected String cidadeNatural;
@XmlAttribute(name = "estadoNatural")
protected String estadoNatural;
@XmlAttribute(name = "nacionalidade")
protected String nacionalidade;
/**
* Gets the value of the outroNome property.
*
* <p>
* This accessor method returns a reference to the live list,
* not a snapshot. Therefore any modification you make to the
* returned list will be present inside the JAXB object.
* This is why there is not a <CODE>set</CODE> method for the outroNome property.
*
* <p>
* For example, to add a new item, do as follows:
* <pre>
* getOutroNome().add(newItem);
* </pre>
*
*
* <p>
* Objects of the following type(s) are allowed in the list
* {@link String }
*
*
*/
public List<String> getOutroNome() {
if (outroNome == null) {
outroNome = new ArrayList<String>();
}
return this.outroNome;
}
/**
* Gets the value of the documento property.
*
* <p>
* This accessor method returns a reference to the live list,
* not a snapshot. Therefore any modification you make to the
* returned list will be present inside the JAXB object.
* This is why there is not a <CODE>set</CODE> method for the documento property.
*
* <p>
* For example, to add a new item, do as follows:
* <pre>
* getDocumento().add(newItem);
* </pre>
*
*
* <p>
* Objects of the following type(s) are allowed in the list
* {@link TipoDocumentoIdentificacao }
*
*
*/
public List<TipoDocumentoIdentificacao> getDocumento() {
if (documento == null) {
documento = new ArrayList<TipoDocumentoIdentificacao>();
}
return this.documento;
}
/**
* Gets the value of the endereco property.
*
* <p>
* This accessor method returns a reference to the live list,
* not a snapshot. Therefore any modification you make to the
* returned list will be present inside the JAXB object.
* This is why there is not a <CODE>set</CODE> method for the endereco property.
*
* <p>
* For example, to add a new item, do as follows:
* <pre>
* getEndereco().add(newItem);
* </pre>
*
*
* <p>
* Objects of the following type(s) are allowed in the list
* {@link TipoEndereco }
*
*
*/
public List<TipoEndereco> getEndereco() {
if (endereco == null) {
endereco = new ArrayList<TipoEndereco>();
}
return this.endereco;
}
/**
* Gets the value of the pessoaRelacionada property.
*
* <p>
* This accessor method returns a reference to the live list,
* not a snapshot. Therefore any modification you make to the
* returned list will be present inside the JAXB object.
* This is why there is not a <CODE>set</CODE> method for the pessoaRelacionada property.
*
* <p>
* For example, to add a new item, do as follows:
* <pre>
* getPessoaRelacionada().add(newItem);
* </pre>
*
*
* <p>
* Objects of the following type(s) are allowed in the list
* {@link TipoRelacionamentoPessoal }
*
*
*/
public List<TipoRelacionamentoPessoal> getPessoaRelacionada() {
if (pessoaRelacionada == null) {
pessoaRelacionada = new ArrayList<TipoRelacionamentoPessoal>();
}
return this.pessoaRelacionada;
}
/**
* Gets the value of the pessoaVinculada property.
*
* @return
* possible object is
* {@link TipoPessoa }
*
*/
public TipoPessoa getPessoaVinculada() {
return pessoaVinculada;
}
/**
* Sets the value of the pessoaVinculada property.
*
* @param value
* allowed object is
* {@link TipoPessoa }
*
*/
public void setPessoaVinculada(TipoPessoa value) {
this.pessoaVinculada = value;
}
/**
* Gets the value of the nome property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getNome() {
return nome;
}
/**
* Sets the value of the nome property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setNome(String value) {
this.nome = value;
}
/**
* Gets the value of the sexo property.
*
* @return
* possible object is
* {@link ModalidadeGeneroPessoa }
*
*/
public ModalidadeGeneroPessoa getSexo() {
return sexo;
}
/**
* Sets the value of the sexo property.
*
* @param value
* allowed object is
* {@link ModalidadeGeneroPessoa }
*
*/
public void setSexo(ModalidadeGeneroPessoa value) {
this.sexo = value;
}
/**
* Gets the value of the nomeGenitor property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getNomeGenitor() {
return nomeGenitor;
}
/**
* Sets the value of the nomeGenitor property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setNomeGenitor(String value) {
this.nomeGenitor = value;
}
/**
* Gets the value of the nomeGenitora property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getNomeGenitora() {
return nomeGenitora;
}
/**
* Sets the value of the nomeGenitora property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setNomeGenitora(String value) {
this.nomeGenitora = value;
}
/**
* Gets the value of the dataNascimento property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getDataNascimento() {
return dataNascimento;
}
/**
* Sets the value of the dataNascimento property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setDataNascimento(String value) {
this.dataNascimento = value;
}
/**
* Gets the value of the dataObito property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getDataObito() {
return dataObito;
}
/**
* Sets the value of the dataObito property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setDataObito(String value) {
this.dataObito = value;
}
/**
* Gets the value of the numeroDocumentoPrincipal property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getNumeroDocumentoPrincipal() {
return numeroDocumentoPrincipal;
}
/**
* Sets the value of the numeroDocumentoPrincipal property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setNumeroDocumentoPrincipal(String value) {
this.numeroDocumentoPrincipal = value;
}
/**
* Gets the value of the tipoPessoa property.
*
* @return
* possible object is
* {@link TipoQualificacaoPessoa }
*
*/
public TipoQualificacaoPessoa getTipoPessoa() {
return tipoPessoa;
}
/**
* Sets the value of the tipoPessoa property.
*
* @param value
* allowed object is
* {@link TipoQualificacaoPessoa }
*
*/
public void setTipoPessoa(TipoQualificacaoPessoa value) {
this.tipoPessoa = value;
}
/**
* Gets the value of the cidadeNatural property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getCidadeNatural() {
return cidadeNatural;
}
/**
* Sets the value of the cidadeNatural property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setCidadeNatural(String value) {
this.cidadeNatural = value;
}
/**
* Gets the value of the estadoNatural property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getEstadoNatural() {
return estadoNatural;
}
/**
* Sets the value of the estadoNatural property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setEstadoNatural(String value) {
this.estadoNatural = value;
}
/**
* Gets the value of the nacionalidade property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getNacionalidade() {
if (nacionalidade == null) {
return "BR";
} else {
return nacionalidade;
}
}
/**
* Sets the value of the nacionalidade property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setNacionalidade(String value) {
this.nacionalidade = value;
}
}
| |
/*
Copyright 2017 Jasen Sanders (EnRandomLabs).
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package com.enrandomlabs.jasensanders.v1.folio;
import android.content.ContentValues;
import android.content.Intent;
import android.content.res.Resources;
import android.database.Cursor;
import android.net.Uri;
import android.os.Bundle;
import android.support.annotation.NonNull;
import android.support.v4.app.ActivityOptionsCompat;
import android.support.v4.app.Fragment;
import android.support.v4.app.LoaderManager;
import android.support.v4.content.CursorLoader;
import android.support.v4.content.Loader;
import android.support.v4.view.MenuItemCompat;
import android.support.v7.widget.ShareActionProvider;
import android.view.LayoutInflater;
import android.view.Menu;
import android.view.MenuInflater;
import android.view.MenuItem;
import android.view.View;
import android.view.ViewGroup;
import android.widget.Button;
import android.widget.CheckBox;
import android.widget.EditText;
import android.widget.ImageView;
import android.widget.LinearLayout;
import android.widget.TextView;
import android.widget.Toast;
import com.bumptech.glide.Glide;
import com.enrandomlabs.jasensanders.v1.folio.database.DataContract;
import com.google.firebase.analytics.FirebaseAnalytics;
import com.google.firebase.crash.FirebaseCrash;
import static com.enrandomlabs.jasensanders.v1.folio.database.DataContract.W_COL_FIVE;
import static com.enrandomlabs.jasensanders.v1.folio.database.DataContract.W_COL_NINE;
/**
* Created by Jasen Sanders on 10/11/2016.
* A simple {@link Fragment} subclass used to display items in the WishList.
*
* Use the {@link DetailWishFragment#newInstance} factory method to
* create an instance of this fragment.
*/
public class DetailWishFragment extends Fragment implements LoaderManager.LoaderCallbacks<Cursor>{
private static final String LOG_TAG = DetailWishFragment.class.getSimpleName();
private static final String ACTIVITY_NAME = "DetailWishFragment";
// the fragment initialization parameters, e.g. ARG_ITEM_NUMBER
public static final String WISH_DETAIL_URI = "WISH_URI";
private static final String DETAIL_WISH_CURRENT_STORE = "DETAIL_WISH_CURRENT_STORE";
private static final String DETAIL_WISH_CURRENT_NOTES = "DETAIL_WISH_CURRENT_NOTES";
//Branding send to URLs
private static final String GOOGLE_BOOKS = "https://books.google.com/";
private static final String TMDB_SITE = "https://www.themoviedb.org/";
private static final int DETAIL_WISH_LOADER = 202;
private FirebaseAnalytics mFirebaseAnalytics;
private ShareActionProvider mShareActionProvider;
private Uri mParam1;
private String mDetailShare;
private Resources mResources;
private String mStatus;
private String mStateSavedStore;
private String mStateSavedNotes;
private View mRootView;
private TextView mError;
private TextView mTitle;
private TextView mByline;
private TextView mAuthors;
private ImageView mPosterImage;
private ImageView mBranding;
private ImageView mBarcodeImage;
private TextView mReleaseDate;
private TextView mSubTextOne;
private TextView mSubTextTwo;
private TextView mSynopsis;
private EditText mStore;
private EditText mNotes;
private LinearLayout mTrailerScroll;
private LinearLayout mDetailView;
private CheckBox mFavButton;
private Button mSearchRetail;
private Button mDeleteButton;
private Button mSaveButton;
public DetailWishFragment() {
// Required empty public constructor
}
/**
* Use this factory method to create a new instance of
* this fragment using the provided parameters.
*
* @param param1 Parameter 1
* @return A new instance of fragment DetailWishFragment.
*/
public static DetailWishFragment newInstance(Uri param1) {
DetailWishFragment fragment = new DetailWishFragment();
Bundle args = new Bundle();
args.putParcelable(WISH_DETAIL_URI, param1);
fragment.setArguments(args);
return fragment;
}
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setHasOptionsMenu(true);
mFirebaseAnalytics = FirebaseAnalytics.getInstance(getContext());
logActionEvent(ACTIVITY_NAME, "ActivityStarted", "action");
mResources = getResources();
if (getArguments() != null) {
mParam1 = getArguments().getParcelable(WISH_DETAIL_URI);
}
}
@Override
public View onCreateView(@NonNull LayoutInflater inflater, ViewGroup container,
Bundle savedInstanceState) {
// Inflate the layout for this fragment
mRootView = inflater.inflate(R.layout.fragment_detail, container, false);
initializeViews();
getLoaderManager().initLoader(DETAIL_WISH_LOADER, null, this);
return mRootView;
}
@Override
public void onActivityCreated(Bundle savedInstanceState) {
super.onActivityCreated(savedInstanceState);
if (savedInstanceState != null) {
// Restore last state.
mStateSavedStore = savedInstanceState.getString(DETAIL_WISH_CURRENT_STORE);
mStateSavedNotes = savedInstanceState.getString(DETAIL_WISH_CURRENT_NOTES);
}
}
@Override
public void onCreateOptionsMenu(Menu menu, MenuInflater inflater) {
// Inflate the menu; this adds items to the action bar if it is present.
inflater.inflate(R.menu.detail_fragment_menu, menu);
// Retrieve the share menu item
MenuItem menuItem = menu.findItem(R.id.action_share);
// Get the provider and hold onto it to set/change the share intent.
mShareActionProvider = (ShareActionProvider) MenuItemCompat.getActionProvider(menuItem);
// If onLoadFinished happens before this, we can go ahead and set the share intent now.
if (mShareActionProvider != null) {
mShareActionProvider.setShareIntent(createShareIntent(mDetailShare));
mShareActionProvider.setOnShareTargetSelectedListener(new ShareActionProvider.OnShareTargetSelectedListener() {
@Override
public boolean onShareTargetSelected(ShareActionProvider source, Intent intent) {
logShareEvent(ACTIVITY_NAME, "ShareButton", mDetailShare);
return false;
}
});
}
}
@Override
public void onSaveInstanceState(@NonNull Bundle outState) {
super.onSaveInstanceState(outState);
outState.putString(DETAIL_WISH_CURRENT_STORE, mStore.getText().toString());
outState.putString(DETAIL_WISH_CURRENT_NOTES, mNotes.getText().toString());
}
private Intent createShareIntent(String movieDesc) {
if(movieDesc != null) {
Intent shareIntent = new Intent(Intent.ACTION_SEND);
shareIntent.addFlags(Intent.FLAG_ACTIVITY_CLEAR_WHEN_TASK_RESET);
shareIntent.setType("text/plain");
shareIntent.putExtra(Intent.EXTRA_TEXT, movieDesc);
return shareIntent;
}else{
String placeHolder = getString(R.string.placeholder_share);
Intent shareIntent = new Intent(Intent.ACTION_SEND);
shareIntent.addFlags(Intent.FLAG_ACTIVITY_CLEAR_WHEN_TASK_RESET);
shareIntent.setType("text/plain");
shareIntent.putExtra(Intent.EXTRA_TEXT, placeHolder);
return shareIntent;
}
}
private void initializeViews(){
//Input Area
mError = mRootView.findViewById(R.id.error);
mError.setVisibility(View.GONE);
//Movie Details Area
mDetailView = mRootView.findViewById(R.id.details);
mTitle = mRootView.findViewById(R.id.detail_view_title);
mByline = mRootView.findViewById(R.id.detail_view_byline);
mAuthors = mRootView.findViewById(R.id.detail_view_authors);
mPosterImage = mRootView.findViewById(R.id.posterView);
mBranding = mRootView.findViewById(R.id.branding);
mBarcodeImage = mRootView.findViewById(R.id.upcBarcodeImage);
mReleaseDate = mRootView.findViewById(R.id.releaseDate);
mSubTextOne = mRootView.findViewById(R.id.detail_subtext1);
mSubTextTwo = mRootView.findViewById(R.id.detail_subtext2);
mFavButton = mRootView.findViewById(R.id.FavButton);
mSearchRetail = mRootView.findViewById(R.id.search_retail);
mSynopsis = mRootView.findViewById(R.id.synopsis);
mStore = mRootView.findViewById(R.id.store);
mNotes = mRootView.findViewById(R.id.notes);
mTrailerScroll = mRootView.findViewById(R.id.trailer_scroll);
mDeleteButton = mRootView.findViewById(R.id.delete_button);
mSaveButton = mRootView.findViewById(R.id.save_button);
}
private void inflateViews(Cursor row){
//A Wish Item in database could be a Book or a Movie
//Find out which one we have
mStatus = row.getString(DataContract.W_COL_STATUS);
mTitle.setText(row.getString(DataContract.W_COL_TITLE));
if(mStatus.startsWith("MOVIE")) {
//Its a movie so load views accordingly
Glide.with(getActivity()).load(row.getString(DataContract.W_COL_ELEVEN)).fitCenter().into(mPosterImage);
String ratingRuntime = String.format(mResources.getString(R.string.rating_runtime),
row.getString(DataContract.W_COL_FIFTEEN), row.getString(W_COL_NINE) );
mSubTextOne.setText(ratingRuntime);
mSubTextTwo.setText(row.getString(W_COL_FIVE));
mBranding.setImageResource(R.drawable.tmdb_brand_120_47);
mBranding.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
sendToBrand();
}
});
mByline.setVisibility(View.GONE);
mAuthors.setVisibility(View.GONE);
String overview = String.format(mResources.getString(R.string.overview), row.getString(DataContract.W_COL_SIXTEEN));
mSynopsis.setText(overview);
addTrailers(mTrailerScroll, row.getString(DataContract.W_COL_SEVENTEEN));
//ALLy content descriptions for dynamic content
String description =getActivity().getResources().getString(R.string.movie_detail_view_description,
row.getString(DataContract.W_COL_TITLE), row.getString(W_COL_FIVE), row.getString(DataContract.W_COL_DATE),
row.getString(DataContract.W_COL_FIFTEEN));
mDetailView.setContentDescription(description);
mSynopsis.setContentDescription(overview);
mDetailShare = mResources.getString(R.string.detail_movie_desc, row.getString(DataContract.W_COL_TITLE),
row.getString(DataContract.W_COL_FIVE), row.getString(DataContract.W_COL_DATE),
row.getString(DataContract.W_COL_FIFTEEN), row.getString(DataContract.W_COL_UPC));
}
else{
//Its a book so load views accordingly
Glide.with(getActivity()).load(row.getString(DataContract.W_COL_THUMB)).fitCenter().into(mPosterImage);
mSubTextOne.setText(row.getString(DataContract.W_COL_ELEVEN));
mSubTextTwo.setText(row.getString(DataContract.W_COL_FIFTEEN));
mBranding.setImageResource(R.drawable.google_logo);
mBranding.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
sendToBrand();
}
});
mByline.setText(row.getString(DataContract.W_COL_FIVE));
mAuthors.setText(row.getString(W_COL_NINE));
String overview = String.format(mResources.getString(R.string.overview), row.getString(DataContract.W_COL_SIX));
mSynopsis.setText(overview);
mTrailerScroll.setVisibility(View.GONE);
//ALLy content descriptions for dynamic content
String description = mResources.getString(R.string.book_detail_view_description,
row.getString(DataContract.W_COL_TITLE), row.getString(W_COL_NINE), row.getString(DataContract.W_COL_DATE));
mDetailView.setContentDescription(description);
mSynopsis.setContentDescription(overview);
mDetailShare = getActivity().getResources().getString(R.string.detail_book_desc, row.getString(DataContract.W_COL_TITLE),
row.getString(W_COL_NINE), row.getString(DataContract.W_COL_DATE), row.getString(DataContract.W_COL_UPC));
}
Glide.with(this).load(row.getString(DataContract.W_COL_SEVEN)).fitCenter().into(mBarcodeImage);
mReleaseDate.setText(Utility.dateToYear(row.getString(DataContract.W_COL_DATE)));
//Set the checkbox accordingly.
if(!mFavButton.isChecked()){
mFavButton.setChecked(true);
mFavButton.setEnabled(false);
}
//Restore from Saved State if necessary.
if(mStateSavedStore != null ){
mStore.setText(mStateSavedStore);
}else {
mStore.setText(row.getString(DataContract.W_COL_STORE));
}
if(mStateSavedNotes != null){
mNotes.setText(mStateSavedNotes);
}else {
mNotes.setText(row.getString(DataContract.W_COL_NOTES));
}
//Rest of A11Y content descriptions
String artDesc = mResources.getString(R.string.poster_description, row.getString(DataContract.W_COL_TITLE));
mPosterImage.setContentDescription(artDesc);
String barcodeDesc = mResources.getString(R.string.barcode_description, row.getString(DataContract.W_COL_UPC));
mBarcodeImage.setContentDescription(barcodeDesc);
// If onCreateOptionsMenu has already happened, we need to update the share intent now.
if (mShareActionProvider != null) {
mShareActionProvider.setShareIntent(createShareIntent(mDetailShare));
}
//Setup Delete, Save and Search Retail Buttons for Movie View, Book View and WishList View
final String CurrentUPC = mParam1.getLastPathSegment();
//Set searchRetail click listener
mSearchRetail.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
logActionEvent(ACTIVITY_NAME, "SearchRetailersButton", "action");
Uri send = mParam1;
Bundle bundle = ActivityOptionsCompat.makeSceneTransitionAnimation(getActivity()).toBundle();
Intent SearchRetailIntent = new Intent(getActivity(), RetailerSearchActivity.class);
SearchRetailIntent.setData(send);
startActivity(SearchRetailIntent, bundle);
}
});
mDeleteButton.setOnClickListener(new View.OnClickListener(){
public void onClick(View v){
//Determine from which list we are deleting
Uri deleteItem = mParam1;
String deleteSelection = DataContract.WishEntry.COLUMN_UPC + " = ?";
//Attempt delete
int rowsDeleted = getActivity().getContentResolver().delete(deleteItem,
deleteSelection,
new String[]{CurrentUPC});
//Notify User
if(rowsDeleted == 1){
Toast.makeText(getActivity(), getString(R.string.detailItemRemoved), Toast.LENGTH_SHORT).show();
}
}
});
mSaveButton.setOnClickListener(new View.OnClickListener(){
public void onClick(View v){
//Get input changes
String Store = mStore.getText().toString();
String Notes = mNotes.getText().toString();
//Determine from which list we are updating
Uri updateMovie = mParam1;
ContentValues update = Utility.makeUpdateValues(Store, Notes, Utility.WISH_ITEM_BY_UPC);
String saveSelection = DataContract.WishEntry.COLUMN_UPC + " = ?";
int rowsUpdated;
//Attempt update
rowsUpdated = getActivity().getContentResolver().update(updateMovie,update,
saveSelection,
new String[]{CurrentUPC});
if(rowsUpdated == 1){
Toast.makeText(getActivity(), getString(R.string.detailItemUpdated), Toast.LENGTH_SHORT).show();
}
}
});
}
private void addTrailers(LinearLayout view, String trailers) {
//Clear any views
if (view.getChildCount() > 0) {view.removeAllViews();}
//If there are no trailers, then nothing to do
if(trailers == null || trailers.equals("")){
return;
}
LayoutInflater vi = getActivity().getLayoutInflater();
final String[] tempTrail;
//Log.v("AddNew: ", trailers);
int i = 0;
try {
tempTrail = trailers.split(",");
if (tempTrail.length > 0) {
for (String url : tempTrail) {
View v = vi.inflate(R.layout.content_trailer_list_item, view, false);
TextView listText = v.findViewById(R.id.list_item_trailer_text);
String text = mResources.getString(R.string.trailer_play_description, String.valueOf(i + 1));
//ALLy Content description for trailers
v.setContentDescription(text);
v.setFocusable(true);
listText.setText(text);
final Uri trailerUrl = Uri.parse(url);
v.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
Intent intent = new Intent(Intent.ACTION_VIEW, trailerUrl);
if (intent.resolveActivity(getActivity().getPackageManager()) != null) {
startActivity(intent);
}
}
});
view.addView(v, new ViewGroup.LayoutParams(ViewGroup.LayoutParams.MATCH_PARENT, ViewGroup.LayoutParams.WRAP_CONTENT));
i++;
//Log.v("AddNew: " + String.valueOf(i), text + " : " + trailerUrl);
}
}
} catch (NullPointerException e) {
//Log.e(LOG_TAG, "Error splitting and Adding Trailers", e);
FirebaseCrash.log("Error splitting and Adding Trailers");
}
}
public void sendToBrand(){
if(mStatus.startsWith("MOVIE")){
Intent result = new Intent(Intent.ACTION_VIEW, Uri.parse(TMDB_SITE));
result.addFlags(Intent.FLAG_ACTIVITY_NEW_TASK);
if(result.resolveActivity(getActivity().getPackageManager()) != null){
startActivity(result);
}
}
if(mStatus.startsWith("BOOK")){
Intent result = new Intent(Intent.ACTION_VIEW, Uri.parse(GOOGLE_BOOKS));
result.addFlags(Intent.FLAG_ACTIVITY_NEW_TASK);
if(result.resolveActivity(getActivity().getPackageManager()) != null){
startActivity(result);
}
}
}
@Override
@NonNull
public Loader<Cursor> onCreateLoader(int id, Bundle args) {
return new CursorLoader(getActivity(),
mParam1,
DataContract.WISH_COLUMNS,
null,
null,
null);
}
@Override
public void onLoadFinished(@NonNull Loader<Cursor> loader, Cursor data) {
if(data.moveToFirst()) {
inflateViews(data);
}
}
@Override
public void onLoaderReset(@NonNull Loader<Cursor> loader) {
}
private void logShareEvent(String activity, String buttonName, String shareable){
Bundle bundle = new Bundle();
bundle.putString(FirebaseAnalytics.Param.ITEM_ID, activity);
bundle.putString(FirebaseAnalytics.Param.ITEM_NAME, buttonName);
bundle.putString(FirebaseAnalytics.Param.SEARCH_TERM, shareable);
bundle.putString(FirebaseAnalytics.Param.CONTENT_TYPE, "wish_string");
mFirebaseAnalytics.logEvent(FirebaseAnalytics.Event.SHARE, bundle);
}
private void logActionEvent(String activity, String actionName, String type ){
Bundle bundle = new Bundle();
bundle.putString(FirebaseAnalytics.Param.ITEM_ID, activity);
bundle.putString(FirebaseAnalytics.Param.ITEM_NAME, actionName);
bundle.putString(FirebaseAnalytics.Param.CONTENT_TYPE, type);
mFirebaseAnalytics.logEvent(FirebaseAnalytics.Event.SELECT_CONTENT, bundle);
}
}
| |
/*
* Copyright (C) 2015 Square, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.squareup.javapoet;
import java.io.IOException;
import java.io.StringWriter;
import java.lang.reflect.Type;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.EnumSet;
import java.util.Iterator;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.Set;
import javax.lang.model.SourceVersion;
import javax.lang.model.element.Element;
import javax.lang.model.element.Modifier;
import static com.squareup.javapoet.Util.checkArgument;
import static com.squareup.javapoet.Util.checkNotNull;
import static com.squareup.javapoet.Util.checkState;
import static com.squareup.javapoet.Util.hasDefaultModifier;
import static com.squareup.javapoet.Util.requireExactlyOneOf;
/** A generated class, interface, or enum declaration. */
public final class TypeSpec {
public final Kind kind;
public final String name;
public final CodeBlock anonymousTypeArguments;
public final CodeBlock javadoc;
public final List<AnnotationSpec> annotations;
public final Set<Modifier> modifiers;
public final List<TypeVariableName> typeVariables;
public final TypeName superclass;
public final List<TypeName> superinterfaces;
public final Map<String, TypeSpec> enumConstants;
public final List<FieldSpec> fieldSpecs;
public final CodeBlock staticBlock;
public final CodeBlock initializerBlock;
public final List<MethodSpec> methodSpecs;
public final List<TypeSpec> typeSpecs;
public final List<Element> originatingElements;
private TypeSpec(Builder builder) {
this.kind = builder.kind;
this.name = builder.name;
this.anonymousTypeArguments = builder.anonymousTypeArguments;
this.javadoc = builder.javadoc.build();
this.annotations = Util.immutableList(builder.annotations);
this.modifiers = Util.immutableSet(builder.modifiers);
this.typeVariables = Util.immutableList(builder.typeVariables);
this.superclass = builder.superclass;
this.superinterfaces = Util.immutableList(builder.superinterfaces);
this.enumConstants = Util.immutableMap(builder.enumConstants);
this.fieldSpecs = Util.immutableList(builder.fieldSpecs);
this.staticBlock = builder.staticBlock.build();
this.initializerBlock = builder.initializerBlock.build();
this.methodSpecs = Util.immutableList(builder.methodSpecs);
this.typeSpecs = Util.immutableList(builder.typeSpecs);
List<Element> originatingElementsMutable = new ArrayList<>();
originatingElementsMutable.addAll(builder.originatingElements);
for (TypeSpec typeSpec : builder.typeSpecs) {
originatingElementsMutable.addAll(typeSpec.originatingElements);
}
this.originatingElements = Util.immutableList(originatingElementsMutable);
}
public boolean hasModifier(Modifier modifier) {
return modifiers.contains(modifier);
}
public static Builder classBuilder(String name) {
return new Builder(Kind.CLASS, checkNotNull(name, "name == null"), null);
}
public static Builder classBuilder(ClassName className) {
return classBuilder(checkNotNull(className, "className == null").simpleName());
}
public static Builder interfaceBuilder(String name) {
return new Builder(Kind.INTERFACE, checkNotNull(name, "name == null"), null);
}
public static Builder interfaceBuilder(ClassName className) {
return interfaceBuilder(checkNotNull(className, "className == null").simpleName());
}
public static Builder enumBuilder(String name) {
return new Builder(Kind.ENUM, checkNotNull(name, "name == null"), null);
}
public static Builder enumBuilder(ClassName className) {
return enumBuilder(checkNotNull(className, "className == null").simpleName());
}
public static Builder anonymousClassBuilder(String typeArgumentsFormat, Object... args) {
return new Builder(Kind.CLASS, null, CodeBlock.builder()
.add(typeArgumentsFormat, args)
.build());
}
public static Builder annotationBuilder(String name) {
return new Builder(Kind.ANNOTATION, checkNotNull(name, "name == null"), null);
}
public static Builder annotationBuilder(ClassName className) {
return annotationBuilder(checkNotNull(className, "className == null").simpleName());
}
public Builder toBuilder() {
Builder builder = new Builder(kind, name, anonymousTypeArguments);
builder.javadoc.add(javadoc);
builder.annotations.addAll(annotations);
builder.modifiers.addAll(modifiers);
builder.typeVariables.addAll(typeVariables);
builder.superclass = superclass;
builder.superinterfaces.addAll(superinterfaces);
builder.enumConstants.putAll(enumConstants);
builder.fieldSpecs.addAll(fieldSpecs);
builder.methodSpecs.addAll(methodSpecs);
builder.typeSpecs.addAll(typeSpecs);
builder.initializerBlock.add(initializerBlock);
builder.staticBlock.add(staticBlock);
return builder;
}
void emit(CodeWriter codeWriter, String enumName, Set<Modifier> implicitModifiers)
throws IOException {
// Nested classes interrupt wrapped line indentation. Stash the current wrapping state and put
// it back afterwards when this type is complete.
int previousStatementLine = codeWriter.statementLine;
codeWriter.statementLine = -1;
try {
codeWriter.pushType(this);
if (enumName != null) {
codeWriter.emitJavadoc(javadoc);
codeWriter.emitAnnotations(annotations, false);
codeWriter.emit("$L", enumName);
if (!anonymousTypeArguments.formatParts.isEmpty()) {
codeWriter.emit("(");
codeWriter.emit(anonymousTypeArguments);
codeWriter.emit(")");
}
if (fieldSpecs.isEmpty() && methodSpecs.isEmpty() && typeSpecs.isEmpty()) {
return; // Avoid unnecessary braces "{}".
}
codeWriter.emit(" {\n");
} else if (anonymousTypeArguments != null) {
TypeName supertype = !superinterfaces.isEmpty() ? superinterfaces.get(0) : superclass;
codeWriter.emit("new $T(", supertype);
codeWriter.emit(anonymousTypeArguments);
codeWriter.emit(") {\n");
} else {
codeWriter.emitJavadoc(javadoc);
codeWriter.emitAnnotations(annotations, false);
codeWriter.emitModifiers(modifiers, Util.union(implicitModifiers, kind.asMemberModifiers));
if (kind == Kind.ANNOTATION) {
codeWriter.emit("$L $L", "@interface", name);
} else {
codeWriter.emit("$L $L", kind.name().toLowerCase(Locale.US), name);
}
codeWriter.emitTypeVariables(typeVariables);
List<TypeName> extendsTypes;
List<TypeName> implementsTypes;
if (kind == Kind.INTERFACE) {
extendsTypes = superinterfaces;
implementsTypes = Collections.emptyList();
} else {
extendsTypes = superclass.equals(ClassName.OBJECT)
? Collections.<TypeName>emptyList()
: Collections.singletonList(superclass);
implementsTypes = superinterfaces;
}
if (!extendsTypes.isEmpty()) {
codeWriter.emit(" extends");
boolean firstType = true;
for (TypeName type : extendsTypes) {
if (!firstType) codeWriter.emit(",");
codeWriter.emit(" $T", type);
firstType = false;
}
}
if (!implementsTypes.isEmpty()) {
codeWriter.emit(" implements");
boolean firstType = true;
for (TypeName type : implementsTypes) {
if (!firstType) codeWriter.emit(",");
codeWriter.emit(" $T", type);
firstType = false;
}
}
codeWriter.emit(" {\n");
}
codeWriter.indent();
boolean firstMember = true;
for (Iterator<Map.Entry<String, TypeSpec>> i = enumConstants.entrySet().iterator();
i.hasNext(); ) {
Map.Entry<String, TypeSpec> enumConstant = i.next();
if (!firstMember) codeWriter.emit("\n");
enumConstant.getValue()
.emit(codeWriter, enumConstant.getKey(), Collections.<Modifier>emptySet());
firstMember = false;
if (i.hasNext()) {
codeWriter.emit(",\n");
} else if (!fieldSpecs.isEmpty() || !methodSpecs.isEmpty() || !typeSpecs.isEmpty()) {
codeWriter.emit(";\n");
} else {
codeWriter.emit("\n");
}
}
// Static fields.
for (FieldSpec fieldSpec : fieldSpecs) {
if (!fieldSpec.hasModifier(Modifier.STATIC)) continue;
if (!firstMember) codeWriter.emit("\n");
fieldSpec.emit(codeWriter, kind.implicitFieldModifiers);
firstMember = false;
}
if (!staticBlock.isEmpty()) {
if (!firstMember) codeWriter.emit("\n");
codeWriter.emit(staticBlock);
firstMember = false;
}
// Non-static fields.
for (FieldSpec fieldSpec : fieldSpecs) {
if (fieldSpec.hasModifier(Modifier.STATIC)) continue;
if (!firstMember) codeWriter.emit("\n");
fieldSpec.emit(codeWriter, kind.implicitFieldModifiers);
firstMember = false;
}
// Initializer block.
if (!initializerBlock.isEmpty()) {
if (!firstMember) codeWriter.emit("\n");
codeWriter.emit(initializerBlock);
firstMember = false;
}
// Constructors.
for (MethodSpec methodSpec : methodSpecs) {
if (!methodSpec.isConstructor()) continue;
if (!firstMember) codeWriter.emit("\n");
methodSpec.emit(codeWriter, name, kind.implicitMethodModifiers);
firstMember = false;
}
// Methods (static and non-static).
for (MethodSpec methodSpec : methodSpecs) {
if (methodSpec.isConstructor()) continue;
if (!firstMember) codeWriter.emit("\n");
methodSpec.emit(codeWriter, name, kind.implicitMethodModifiers);
firstMember = false;
}
// Types.
for (TypeSpec typeSpec : typeSpecs) {
if (!firstMember) codeWriter.emit("\n");
typeSpec.emit(codeWriter, null, kind.implicitTypeModifiers);
firstMember = false;
}
codeWriter.unindent();
codeWriter.emit("}");
if (enumName == null && anonymousTypeArguments == null) {
codeWriter.emit("\n"); // If this type isn't also a value, include a trailing newline.
}
} finally {
codeWriter.popType();
codeWriter.statementLine = previousStatementLine;
}
}
@Override public boolean equals(Object o) {
if (this == o) return true;
if (o == null) return false;
if (getClass() != o.getClass()) return false;
return toString().equals(o.toString());
}
@Override public int hashCode() {
return toString().hashCode();
}
@Override public String toString() {
StringWriter out = new StringWriter();
try {
CodeWriter codeWriter = new CodeWriter(out);
emit(codeWriter, null, Collections.<Modifier>emptySet());
return out.toString();
} catch (IOException e) {
throw new AssertionError();
}
}
public enum Kind {
CLASS(
Collections.<Modifier>emptySet(),
Collections.<Modifier>emptySet(),
Collections.<Modifier>emptySet(),
Collections.<Modifier>emptySet()),
INTERFACE(
Util.immutableSet(Arrays.asList(Modifier.PUBLIC, Modifier.STATIC, Modifier.FINAL)),
Util.immutableSet(Arrays.asList(Modifier.PUBLIC, Modifier.ABSTRACT)),
Util.immutableSet(Arrays.asList(Modifier.PUBLIC, Modifier.STATIC)),
Util.immutableSet(Arrays.asList(Modifier.STATIC))),
ENUM(
Collections.<Modifier>emptySet(),
Collections.<Modifier>emptySet(),
Collections.<Modifier>emptySet(),
Collections.singleton(Modifier.STATIC)),
ANNOTATION(
Util.immutableSet(Arrays.asList(Modifier.PUBLIC, Modifier.STATIC, Modifier.FINAL)),
Util.immutableSet(Arrays.asList(Modifier.PUBLIC, Modifier.ABSTRACT)),
Util.immutableSet(Arrays.asList(Modifier.PUBLIC, Modifier.STATIC)),
Util.immutableSet(Arrays.asList(Modifier.STATIC)));
private final Set<Modifier> implicitFieldModifiers;
private final Set<Modifier> implicitMethodModifiers;
private final Set<Modifier> implicitTypeModifiers;
private final Set<Modifier> asMemberModifiers;
Kind(Set<Modifier> implicitFieldModifiers,
Set<Modifier> implicitMethodModifiers,
Set<Modifier> implicitTypeModifiers,
Set<Modifier> asMemberModifiers) {
this.implicitFieldModifiers = implicitFieldModifiers;
this.implicitMethodModifiers = implicitMethodModifiers;
this.implicitTypeModifiers = implicitTypeModifiers;
this.asMemberModifiers = asMemberModifiers;
}
}
public static final class Builder {
private final Kind kind;
private final String name;
private final CodeBlock anonymousTypeArguments;
private final CodeBlock.Builder javadoc = CodeBlock.builder();
private final List<AnnotationSpec> annotations = new ArrayList<>();
private final List<Modifier> modifiers = new ArrayList<>();
private final List<TypeVariableName> typeVariables = new ArrayList<>();
private TypeName superclass = ClassName.OBJECT;
private final List<TypeName> superinterfaces = new ArrayList<>();
private final Map<String, TypeSpec> enumConstants = new LinkedHashMap<>();
private final List<FieldSpec> fieldSpecs = new ArrayList<>();
private final CodeBlock.Builder staticBlock = CodeBlock.builder();
private final CodeBlock.Builder initializerBlock = CodeBlock.builder();
private final List<MethodSpec> methodSpecs = new ArrayList<>();
private final List<TypeSpec> typeSpecs = new ArrayList<>();
private final List<Element> originatingElements = new ArrayList<>();
private Builder(Kind kind, String name,
CodeBlock anonymousTypeArguments) {
checkArgument(name == null || SourceVersion.isName(name), "not a valid name: %s", name);
this.kind = kind;
this.name = name;
this.anonymousTypeArguments = anonymousTypeArguments;
}
public Builder addJavadoc(String format, Object... args) {
javadoc.add(format, args);
return this;
}
public Builder addAnnotations(Iterable<AnnotationSpec> annotationSpecs) {
checkArgument(annotationSpecs != null, "annotationSpecs == null");
for (AnnotationSpec annotationSpec : annotationSpecs) {
this.annotations.add(annotationSpec);
}
return this;
}
public Builder addAnnotation(AnnotationSpec annotationSpec) {
this.annotations.add(annotationSpec);
return this;
}
public Builder addAnnotation(ClassName annotation) {
return addAnnotation(AnnotationSpec.builder(annotation).build());
}
public Builder addAnnotation(Class<?> annotation) {
return addAnnotation(ClassName.get(annotation));
}
public Builder addModifiers(Modifier... modifiers) {
checkState(anonymousTypeArguments == null, "forbidden on anonymous types.");
Collections.addAll(this.modifiers, modifiers);
return this;
}
public Builder addTypeVariables(Iterable<TypeVariableName> typeVariables) {
checkState(anonymousTypeArguments == null, "forbidden on anonymous types.");
checkArgument(typeVariables != null, "typeVariables == null");
for (TypeVariableName typeVariable : typeVariables) {
this.typeVariables.add(typeVariable);
}
return this;
}
public Builder addTypeVariable(TypeVariableName typeVariable) {
checkState(anonymousTypeArguments == null, "forbidden on anonymous types.");
typeVariables.add(typeVariable);
return this;
}
public Builder superclass(TypeName superclass) {
checkState(this.superclass == ClassName.OBJECT,
"superclass already set to " + this.superclass);
checkArgument(!superclass.isPrimitive(), "superclass may not be a primitive");
this.superclass = superclass;
return this;
}
public Builder superclass(Type superclass) {
return superclass(TypeName.get(superclass));
}
public Builder addSuperinterfaces(Iterable<? extends TypeName> superinterfaces) {
checkArgument(superinterfaces != null, "superinterfaces == null");
for (TypeName superinterface : superinterfaces) {
this.superinterfaces.add(superinterface);
}
return this;
}
public Builder addSuperinterface(TypeName superinterface) {
this.superinterfaces.add(superinterface);
return this;
}
public Builder addSuperinterface(Type superinterface) {
return addSuperinterface(TypeName.get(superinterface));
}
public Builder addEnumConstant(String name) {
return addEnumConstant(name, anonymousClassBuilder("").build());
}
public Builder addEnumConstant(String name, TypeSpec typeSpec) {
checkState(kind == Kind.ENUM, "%s is not enum", this.name);
checkArgument(typeSpec.anonymousTypeArguments != null,
"enum constants must have anonymous type arguments");
checkArgument(SourceVersion.isName(name), "not a valid enum constant: %s", name);
enumConstants.put(name, typeSpec);
return this;
}
public Builder addFields(Iterable<FieldSpec> fieldSpecs) {
checkArgument(fieldSpecs != null, "fieldSpecs == null");
for (FieldSpec fieldSpec : fieldSpecs) {
addField(fieldSpec);
}
return this;
}
public Builder addField(FieldSpec fieldSpec) {
if (kind == Kind.INTERFACE || kind == Kind.ANNOTATION) {
requireExactlyOneOf(fieldSpec.modifiers, Modifier.PUBLIC, Modifier.PRIVATE);
Set<Modifier> check = EnumSet.of(Modifier.STATIC, Modifier.FINAL);
checkState(fieldSpec.modifiers.containsAll(check), "%s %s.%s requires modifiers %s",
kind, name, fieldSpec.name, check);
}
fieldSpecs.add(fieldSpec);
return this;
}
public Builder addField(TypeName type, String name, Modifier... modifiers) {
return addField(FieldSpec.builder(type, name, modifiers).build());
}
public Builder addField(Type type, String name, Modifier... modifiers) {
return addField(TypeName.get(type), name, modifiers);
}
public Builder addStaticBlock(CodeBlock block) {
staticBlock.beginControlFlow("static").add(block).endControlFlow();
return this;
}
public Builder addInitializerBlock(CodeBlock block) {
if ((kind != Kind.CLASS && kind != Kind.ENUM)) {
throw new UnsupportedOperationException(kind + " can't have initializer blocks");
}
initializerBlock.add("{\n")
.indent()
.add(block)
.unindent()
.add("}\n");
return this;
}
public Builder addMethods(Iterable<MethodSpec> methodSpecs) {
checkArgument(methodSpecs != null, "methodSpecs == null");
for (MethodSpec methodSpec : methodSpecs) {
addMethod(methodSpec);
}
return this;
}
public Builder addMethod(MethodSpec methodSpec) {
if (kind == Kind.INTERFACE) {
requireExactlyOneOf(methodSpec.modifiers, Modifier.ABSTRACT, Modifier.STATIC, Util.DEFAULT);
requireExactlyOneOf(methodSpec.modifiers, Modifier.PUBLIC, Modifier.PRIVATE);
} else if (kind == Kind.ANNOTATION) {
checkState(methodSpec.modifiers.equals(kind.implicitMethodModifiers),
"%s %s.%s requires modifiers %s",
kind, name, methodSpec.name, kind.implicitMethodModifiers);
}
if (kind != Kind.ANNOTATION) {
checkState(methodSpec.defaultValue == null, "%s %s.%s cannot have a default value",
kind, name, methodSpec.name);
}
if (kind != Kind.INTERFACE) {
checkState(!hasDefaultModifier(methodSpec.modifiers), "%s %s.%s cannot be default",
kind, name, methodSpec.name);
}
methodSpecs.add(methodSpec);
return this;
}
public Builder addTypes(Iterable<TypeSpec> typeSpecs) {
checkArgument(typeSpecs != null, "typeSpecs == null");
for (TypeSpec typeSpec : typeSpecs) {
addType(typeSpec);
}
return this;
}
public Builder addType(TypeSpec typeSpec) {
checkArgument(typeSpec.modifiers.containsAll(kind.implicitTypeModifiers),
"%s %s.%s requires modifiers %s", kind, name, typeSpec.name,
kind.implicitTypeModifiers);
typeSpecs.add(typeSpec);
return this;
}
public Builder addOriginatingElement(Element originatingElement) {
originatingElements.add(originatingElement);
return this;
}
public TypeSpec build() {
checkArgument(kind != Kind.ENUM || !enumConstants.isEmpty(),
"at least one enum constant is required for %s", name);
boolean isAbstract = modifiers.contains(Modifier.ABSTRACT) || kind != Kind.CLASS;
for (MethodSpec methodSpec : methodSpecs) {
checkArgument(isAbstract || !methodSpec.hasModifier(Modifier.ABSTRACT),
"non-abstract type %s cannot declare abstract method %s", name, methodSpec.name);
}
boolean superclassIsObject = superclass.equals(ClassName.OBJECT);
int interestingSupertypeCount = (superclassIsObject ? 0 : 1) + superinterfaces.size();
checkArgument(anonymousTypeArguments == null || interestingSupertypeCount <= 1,
"anonymous type has too many supertypes");
return new TypeSpec(this);
}
}
}
| |
package zendesk.belvedere;
import android.app.Activity;
import android.content.Context;
import android.content.Intent;
import android.net.Uri;
import android.os.Bundle;
import android.os.Parcel;
import android.os.Parcelable;
import android.provider.Settings;
import android.view.View;
import android.view.View.OnClickListener;
import android.view.ViewGroup;
import androidx.annotation.IdRes;
import androidx.annotation.NonNull;
import androidx.appcompat.app.AppCompatActivity;
import androidx.fragment.app.Fragment;
import androidx.fragment.app.FragmentManager;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import zendesk.belvedere.ui.R;
/**
* Main entry-point for interacting the UI components of Belvedere.
* <p>
* There are two different UIs available: - Dialog (from 1.x) - ImageStream (BottomSheet)
*/
public class BelvedereUi {
private final static String FRAGMENT_TAG = "BelvedereDialog";
private final static String EXTRA_MEDIA_INTENT = "extra_intent";
private final static String FRAGMENT_TAG_POPUP = "belvedere_image_stream";
private final static String INTENT_URI_SCHEMA = "package";
/**
* Gets the builder for showing the ImageStream.
*/
public static ImageStreamBuilder imageStream(@NonNull Context context) {
return new ImageStreamBuilder(context);
}
/**
* Install the ImageStream to an {@link AppCompatActivity}
*
* @param activity the activity that will show the ImageStream
* @return an {@link ImageStream}
*/
public static ImageStream install(@NonNull AppCompatActivity activity) {
final FragmentManager supportFragmentManager = activity.getSupportFragmentManager();
final Fragment fragment = supportFragmentManager.findFragmentByTag(FRAGMENT_TAG_POPUP);
final ImageStream popupBackend;
if (fragment instanceof ImageStream) {
popupBackend = (ImageStream) fragment;
} else {
popupBackend = new ImageStream();
supportFragmentManager
.beginTransaction()
.add(popupBackend, FRAGMENT_TAG_POPUP)
.commitNow();
}
popupBackend.setKeyboardHelper(KeyboardHelper.inject(activity));
return popupBackend;
}
public static class ImageStreamBuilder {
private final Context context;
private final boolean resolveMedia = true;
private final List<MediaIntent> mediaIntents = new ArrayList<>();
private List<MediaResult> selectedItems = new ArrayList<>();
private List<MediaResult> extraItems = new ArrayList<>();
private List<Integer> touchableItems = new ArrayList<>();
private long maxFileSize = -1L;
private boolean fullScreenOnly = false;
private ImageStreamBuilder(Context context) {
this.context = context;
}
/**
* Allow the user to select an image from the camera.
*/
public ImageStreamBuilder withCameraIntent() {
final MediaIntent cameraIntent = Belvedere.from(context).camera().build();
this.mediaIntents.add(cameraIntent);
return this;
}
/**
* Allow the user to select files of the specified content type from the system. Only one of the following
* should be called as they are mutually exclusive:
*
* <li>{@link ImageStreamBuilder#withDocumentIntent(String, boolean)}</li>
* <li>{@link ImageStreamBuilder#withDocumentIntent(List, boolean)}</li>
*
* @param contentType restrict the files to a content type
* @param allowMultiple allow the user to select multiple attachments in a third party app or the system file
* picker
*/
public ImageStreamBuilder withDocumentIntent(@NonNull String contentType, boolean allowMultiple) {
final MediaIntent mediaIntent = Belvedere.from(context)
.document()
.allowMultiple(allowMultiple)
.contentType(contentType)
.build();
this.mediaIntents.add(mediaIntent);
return this;
}
/**
* Allow the user to select files of any specified content type from the system. This can be used when allowing
* the selection of files from a disjoint set (e.g. "image/*" and "text/*"). Only one of the following
* should be called as they are mutually exclusive:
*
* <li>{@link ImageStreamBuilder#withDocumentIntent(String, boolean)}</li>
* <li>{@link ImageStreamBuilder#withDocumentIntent(List, boolean)}</li>
*
* @param contentTypes restrict the files to the content types
* @param allowMultiple allow the user to select multiple attachments in a third party app or the system file
* picker
*/
public ImageStreamBuilder withDocumentIntent(@NonNull List<String> contentTypes, boolean allowMultiple) {
final MediaIntent mediaIntent = Belvedere.from(context)
.document()
.allowMultiple(allowMultiple)
.contentTypes(contentTypes)
.build();
this.mediaIntents.add(mediaIntent);
return this;
}
/**
* Pass in files that are should be marked as selected.
*/
public ImageStreamBuilder withSelectedItems(List<MediaResult> mediaResults) {
this.selectedItems = new ArrayList<>(mediaResults);
return this;
}
/**
* Pass in files that are not selected but should show up in the ImageStream.
*/
public ImageStreamBuilder withExtraItems(List<MediaResult> mediaResults) {
this.extraItems = new ArrayList<>(mediaResults);
return this;
}
/**
* Specify a list of ids from your activity that should be clickable although the ImageStream is visible.
*/
public ImageStreamBuilder withTouchableItems(@IdRes int... ids) {
final List<Integer> objects = new ArrayList<>(ids.length);
for (int id : ids) {
objects.add(id);
}
this.touchableItems = objects;
return this;
}
/**
* Define a maximum file size. Files bigger than the provided value are not selectable.
*
* @param maxFileSize maximum file size in bytes
*/
public ImageStreamBuilder withMaxFileSize(long maxFileSize) {
this.maxFileSize = maxFileSize;
return this;
}
/**
* Always show the image picker in full screen.
*
* @param enabled {@code true} if the picker should be shown full screen to the user, {@code false} if the
* picker should be drawn above the keyboard
*/
public ImageStreamBuilder withFullScreenOnly(boolean enabled) {
this.fullScreenOnly = enabled;
return this;
}
/**
* Show the ImageStream to the user.
*/
public void showPopup(final AppCompatActivity activity) {
final ImageStream popupBackend = BelvedereUi.install(activity);
popupBackend.handlePermissions(mediaIntents, new PermissionManager.PermissionCallback() {
@Override
public void onPermissionsGranted(final List<MediaIntent> mediaIntents) {
final Activity appCompatActivity = popupBackend.getActivity();
if (appCompatActivity != null && !appCompatActivity.isChangingConfigurations()) {
final ViewGroup decorView = (ViewGroup) appCompatActivity.getWindow().getDecorView();
decorView.post(new Runnable() {
@Override
public void run() {
final UiConfig uiConfig = new UiConfig(mediaIntents, selectedItems, extraItems,
resolveMedia, touchableItems, maxFileSize, fullScreenOnly);
final ImageStreamUi show = ImageStreamUi.show(
appCompatActivity,
decorView,
popupBackend,
uiConfig);
popupBackend.setImageStreamUi(show, uiConfig);
}
});
}
}
@Override
public void onPermissionsDenied() {
final Activity appCompatActivity = popupBackend.getActivity();
if (appCompatActivity != null) {
final ViewGroup parentView = appCompatActivity.findViewById(android.R.id.content);
Utils.showBottomSheetDialog(
parentView,
appCompatActivity.getString(R.string.belvedere_permissions_rationale),
5000L,
appCompatActivity.getString(R.string.belvedere_navigate_to_settings),
new OnClickListener() {
@Override
public void onClick(View v) {
Intent settingsIntent = new Intent();
settingsIntent.setAction(Settings.ACTION_APPLICATION_DETAILS_SETTINGS);
settingsIntent.setData(
Uri.fromParts(
INTENT_URI_SCHEMA,
appCompatActivity.getPackageName(),
null
)
);
appCompatActivity.startActivity(settingsIntent);
}
});
}
}
});
}
}
/**
* Show the Belvedere dialog to the user
*
* @param fm a valid {@link FragmentManager}
* @param mediaIntent a list of {@link MediaIntent}
*/
public static void showDialog(FragmentManager fm, List<MediaIntent> mediaIntent) {
if (mediaIntent == null || mediaIntent.size() == 0) {
return;
}
final BelvedereDialog dialog = new BelvedereDialog();
dialog.setArguments(getBundle(mediaIntent, new ArrayList<MediaResult>(0), new ArrayList<MediaResult>(0), true,
new ArrayList<Integer>(0)));
dialog.show(fm, FRAGMENT_TAG);
}
/**
* Show the Belvedere dialog to the user
*
* @param fm a valid {@link FragmentManager}
* @param mediaIntent a list of {@link MediaIntent}
*/
public static void showDialog(FragmentManager fm, MediaIntent... mediaIntent) {
if (mediaIntent == null || mediaIntent.length == 0) {
return;
}
showDialog(fm, Arrays.asList(mediaIntent));
}
private static Bundle getBundle(List<MediaIntent> mediaIntent, List<MediaResult> selectedItems,
List<MediaResult> extraItems, boolean resolveMedia,
List<Integer> touchableIds) {
final List<MediaIntent> intents = new ArrayList<>();
final List<MediaResult> selected = new ArrayList<>();
final List<MediaResult> extra = new ArrayList<>();
if (mediaIntent != null) {
intents.addAll(mediaIntent);
}
if (selectedItems != null) {
selected.addAll(selectedItems);
}
if (extraItems != null) {
extra.addAll(extraItems);
}
final UiConfig uiConfig = new UiConfig(intents, selected, extra, resolveMedia, touchableIds, -1L, false);
final Bundle bundle = new Bundle();
bundle.putParcelable(EXTRA_MEDIA_INTENT, uiConfig);
return bundle;
}
static UiConfig getUiConfig(Bundle bundle) {
UiConfig config = bundle.getParcelable(EXTRA_MEDIA_INTENT);
if (config == null) {
return new UiConfig();
}
return config;
}
public static class UiConfig implements Parcelable {
private final List<MediaIntent> intents;
private final List<MediaResult> selectedItems;
private final List<MediaResult> extraItems;
private final List<Integer> touchableElements;
private final boolean resolveMedia;
private final long maxFileSize;
private final boolean fullScreenOnly;
UiConfig() {
this.intents = new ArrayList<>();
this.selectedItems = new ArrayList<>();
this.extraItems = new ArrayList<>();
this.touchableElements = new ArrayList<>();
this.resolveMedia = true;
this.maxFileSize = -1L;
this.fullScreenOnly = false;
}
UiConfig(List<MediaIntent> intents, List<MediaResult> selectedItems,
List<MediaResult> extraItems, boolean resolveMedia,
List<Integer> touchableElements, long maxFileSize,
boolean fullScreenOnly) {
this.intents = intents;
this.selectedItems = selectedItems;
this.extraItems = extraItems;
this.resolveMedia = resolveMedia;
this.touchableElements = touchableElements;
this.maxFileSize = maxFileSize;
this.fullScreenOnly = fullScreenOnly;
}
UiConfig(Parcel in) {
this.intents = in.createTypedArrayList(MediaIntent.CREATOR);
this.selectedItems = in.createTypedArrayList(MediaResult.CREATOR);
this.extraItems = in.createTypedArrayList(MediaResult.CREATOR);
this.touchableElements = new ArrayList<>();
in.readList(touchableElements, Integer.class.getClassLoader());
this.resolveMedia = in.readInt() == 1;
this.maxFileSize = in.readLong();
this.fullScreenOnly = in.readInt() == 1;
}
List<MediaIntent> getIntents() {
return intents;
}
List<MediaResult> getSelectedItems() {
return selectedItems;
}
List<MediaResult> getExtraItems() {
return extraItems;
}
List<Integer> getTouchableElements() {
return touchableElements;
}
long getMaxFileSize() {
return maxFileSize;
}
boolean showFullScreenOnly() {
return fullScreenOnly;
}
public static final Creator<UiConfig> CREATOR = new Creator<UiConfig>() {
@Override
public UiConfig createFromParcel(Parcel in) {
return new UiConfig(in);
}
@Override
public UiConfig[] newArray(int size) {
return new UiConfig[size];
}
};
@Override
public int describeContents() {
return 0;
}
@Override
public void writeToParcel(Parcel dest, int flags) {
dest.writeTypedList(intents);
dest.writeTypedList(selectedItems);
dest.writeTypedList(extraItems);
dest.writeList(touchableElements);
dest.writeInt(resolveMedia ? 1 : 0);
dest.writeLong(maxFileSize);
dest.writeInt(fullScreenOnly ? 1 : 0);
}
}
}
| |
/**
* File: Graph.java
*
* Author: Brian Westerman
*
* Creates a graph that stores Vertex objects and associated data
*/
import java.util.*;
public class Graph<T> {
// A list of the vertices in the graph
private ArrayList<Vertex> vertices;
// The number of vertices in the graph
private int size;
public Graph() {
Random r = new Random();
this.vertices = new ArrayList<>();
this.size = 0;
}
// Accessor for vertices
public ArrayList<Vertex> getVertices() {
return this.vertices;
}
// Counts the number of vertices in the graph
public int vertexCount() {
return this.size;
}
// Adds v to the graph
public void addVertex(Vertex v) {
this.vertices.add(v);
this.size++;
}
// Removes v from the graph
public void remove(Vertex vertex) {
if (this.vertices.remove(vertex)) {
this.size--;
} else {
System.out.println("Error: vertex not found.");
}
}
// Clears all vertices from the graph
public void clear() {
this.vertices.clear();
this.size = 0;
}
// Adds v1 and v2 to the graph (if necessary) and adds edges connecting v1 to v2 via direction dir and connecting
// v2 to v1 via the opposite direction (bidirectional!!!)
public void addEdge(Vertex v1, Vertex.Direction dir, Vertex v2) {
// Check if v1 and v2 are already in the graph
boolean v1inGraph = false;
boolean v2inGraph = false;
for (Vertex v : this.vertices) {
if (v1.equals(v)) {
v1inGraph = true;
}
if (v2.equals(v)) {
v2inGraph = true;
}
}
// If v1 isn't in the graph, add it to the graph
if (!v1inGraph) {
this.addVertex(v1);
}
// If v2 isn't in the graph, add it to the graph
if (!v2inGraph) {
this.addVertex(v2);
}
v1.connect(v2, dir);
v2.connect(v1, Vertex.opposite(dir));
}
// Adds v1 and v2 to the graph (if necessary) and adds edges connecting v1 to v2 and v2 to v1 (bidirectional!!!)
public void addEdge(Vertex v1, Vertex v2) {
// Check if v1 and v2 are already in the graph
boolean v1inGraph = false;
boolean v2inGraph = false;
for (Vertex v : this.vertices) {
if (v1.equals(v)) {
v1inGraph = true;
}
if (v2.equals(v)) {
v2inGraph = true;
}
}
// If v1 isn't in the graph, add it to the graph
if (!v1inGraph) {
this.addVertex(v1);
}
// If v2 isn't in the graph, add it to the graph
if (!v2inGraph) {
this.addVertex(v2);
}
v1.connect(v2);
v2.connect(v1);
}
// Adds v1 and v2 to the graph (if necessary) and adds a unidirectional edge connecting v1 to v2
public void addEdgeUnidirectional(Vertex v1, Vertex v2) {
// Check if v1 and v2 are already in the graph
boolean v1inGraph = false;
boolean v2inGraph = false;
for (Vertex v : this.vertices) {
if (v1.equals(v)) {
v1inGraph = true;
}
if (v2.equals(v)) {
v2inGraph = true;
}
}
// If v1 isn't in the graph, add it to the graph
if (!v1inGraph) {
this.addVertex(v1);
}
// If v2 isn't in the graph, add it to the graph
if (!v2inGraph) {
this.addVertex(v2);
}
v1.connect(v2);
}
// Implements a single-source shortest-path algorithm for the graph (Dijkstra's algorithm)
public void shortestPath(Vertex v0) {
for (Vertex v : this.vertices) {
v.setMarked(false);
v.setCost(Integer.MAX_VALUE);
}
PriorityQueue<Vertex> pq = new PriorityQueue<>();
v0.setCost(0);
pq.add(v0);
while (pq.size() != 0) {
Vertex v = pq.poll();
v.setMarked(true);
for (Vertex neighbor : v.getNeighbors()) { // Careful if using non-cardinal direction vertices - use getFreeNeighbors() instead
if (! neighbor.isMarked() && v.getCost() + 1 < neighbor.getCost()) {
neighbor.setCost(v.getCost() + 1);
pq.remove(neighbor);
pq.add(neighbor);
}
}
}
// Output: the resulting cost of each vertex v in the graph is the shortest distance from v0 to v
}
// Visits all vertices in a graph
public void visit(Vertex v) {
v.setMarked(true);
for (Vertex n : v.getFreeNeighbors()) {
if (! n.isMarked()) {
visit(n);
}
}
}
// Checks whether the graph is connected
public boolean isConnected(Vertex start) {
for (Vertex v : this.vertices) {
v.setMarked(false);
}
visit(start);
for (Vertex n : this.vertices) {
if (! n.isMarked()) {
return false;
}
}
return true;
}
// Checks whether the graph is acyclic
public boolean isAcyclic(Vertex start) {
for (Vertex v : this.vertices) {
v.setMarked(false);
}
MyQueue<Vertex> C = new MyQueue<>();
C.add(start);
while (! C.isEmpty()) {
Vertex n = C.remove();
n.setMarked(true);
for (Vertex p : n.getNeighbors()) {
if (! p.isMarked()) {
C.add(p);
} else {
for (Vertex v : C) {
if (p.equals(v)) {
return false;
}
}
}
}
}
return true;
}
public static void main(String[] args) {
Graph graph = new Graph<>();
Vertex vertex1 = new Vertex("Brian", 0, 0);
Vertex vertex2 = new Vertex("Kenny", 1, 1);
Vertex vertex3 = new Vertex("Anne", 2, 2);
Vertex vertex4 = new Vertex("Gary", 3, 3);
Vertex vertex5 = new Vertex("Grammy", 4, 4);
Vertex vertex6 = new Vertex("Grandad", 5, 5);
graph.addVertex(vertex1);
graph.addVertex(vertex2);
graph.addVertex(vertex3);
graph.addVertex(vertex4);
graph.addVertex(vertex5);
graph.addVertex(vertex6);
graph.addEdge(vertex1, Vertex.Direction.NORTH, vertex2);
graph.addEdge(vertex1, Vertex.Direction.EAST, vertex3);
graph.addEdge(vertex1, Vertex.Direction.SOUTH, vertex4);
graph.addEdge(vertex1, Vertex.Direction.WEST, vertex5);
// Skip an edge to vertex6 for testing purposes
// Run Dijkstra's algorithm
graph.shortestPath(vertex1);
vertex1.compareTo(vertex2);
System.out.println(vertex1.getCost());
System.out.println(vertex2.getCost());
System.out.println(vertex3.getCost());
System.out.println(vertex4.getCost());
System.out.println(vertex5.getCost());
System.out.println(vertex6.getCost());
}
}
| |
package com.doubtech.universalremote.providers.providerdo;
import java.util.List;
import java.util.concurrent.ConcurrentHashMap;
import org.json.JSONArray;
import org.json.JSONException;
import org.json.JSONObject;
import android.database.Cursor;
import android.net.Uri;
import com.doubtech.universalremote.listeners.IconLoaderListener;
import com.doubtech.universalremote.providers.AbstractUniversalRemoteProvider;
import com.doubtech.universalremote.providers.URPContract;
import com.doubtech.universalremote.providers.URPContract.Parents;
public class Parent {
protected String mAuthority;
protected String[] mPath;
protected String mName;
private int mHashCode;
private boolean mHasButtonSets;
private String mDescription;
private boolean mNeedsToFetch = true;
private Parent[] mChildren = new Parent[0];
private String mLevelName;
private Parent mParent;
public static class ParentBuilder {
Parent mParent;
ParentBuilder(Parent parent) {
mParent = parent;
}
public ParentBuilder(String authority, String[] path) {
mParent = new Parent(authority, path, false);
}
public ParentBuilder setName(String name) {
mParent.mName = name;
return this;
}
public ParentBuilder setHasButtonSets(boolean hasButtonSets) {
mParent.mHasButtonSets = hasButtonSets;
return this;
}
public ParentBuilder setDescription(String description) {
mParent.mDescription = description;
return this;
}
public ParentBuilder setLevelName(String levelName) {
mParent.mLevelName = levelName;
return this;
}
public Parent build() {
try {
return (Parent) mParent.clone();
} catch (CloneNotSupportedException e) {
return mParent;
}
}
}
protected Parent() {
}
public Parent(String authority, String[] path, boolean needsToFetch) {
mAuthority = authority;
mPath = path;
mNeedsToFetch = needsToFetch;
}
public Parent(String authority, String id, boolean needsToFetch) {
mAuthority = authority;
mPath = new String[] {id};
mNeedsToFetch = needsToFetch;
}
public static Parent fromUri(Parent parent, Uri uri) {
List<String> segments = uri.getPathSegments();
String[] path = new String[segments.size() - 1];
for (int i = 0; i < path.length; i++) {
path[i] = segments.get(i + 1);
}
parent.mAuthority = uri.getAuthority();
parent.mPath = path;
return getCached(parent);
}
public static Parent fromUri(Uri uri) {
return fromUri(new Parent(), uri);
}
public Uri getUri() {
return URPContract.getUri(mAuthority, URPContract.TABLE_BUTTONS_PATH, getPath());
}
public static Parent fromCursor(Cursor cursor) {
int idx;
idx = cursor.getColumnIndex(Parents.COLUMN_AUTHORITY);
String authority = cursor.getString(idx);
String parent = "";
idx = cursor.getColumnIndex(Parents.COLUMN_PATH);
if (idx >= 0) {
parent = cursor.getString(idx);
}
idx = cursor.getColumnIndex(Parents.COLUMN_TYPE);
boolean isParent = -1 == idx || "parent".equals(cursor.getString(idx));
return fromCursor(isParent ? new Parent(authority, getPath(parent), true) :
new Button(authority, getPath(parent), true), cursor);
}
public static Parent fromCursor(Parent node, Cursor cursor) {
int idx = cursor.getColumnIndex(Parents.COLUMN_NAME);
if (idx >= 0) {
node.mName = cursor.getString(idx);
}
idx = cursor.getColumnIndex(Parents.COLUMN_LEVEL);
if (idx >= 0) {
node.mLevelName = cursor.getString(idx);
}
idx = cursor.getColumnIndex(Parents.COLUMN_HAS_BUTTONSETS);
if (idx >= 0) {
node.mHasButtonSets = cursor.getInt(idx) != 0;
}
if (node instanceof Button) {
node = Button.fromCursor((Button) node, cursor);
}
return node;
}
static String[] getPath(String path) {
String[] segments = path.split("/");
for (int i = 0; i < segments.length; i++) {
segments[i] = Uri.decode(segments[i]);
}
return segments;
}
static String[] getPath(String parent, JSONObject obj) throws JSONException {
String[] path = getPath(parent);
String[] fullPath = new String[path.length + 1];
for (int i = 0; i < path.length; i++) {
fullPath[i] = path[i];
}
fullPath[path.length] = obj.getString("id");
return fullPath;
}
public static Parent[] fromJson(AbstractUniversalRemoteProvider provider, Parent parentNode, String json) throws JSONException {
if (null == json || json.length() == 0) return new Parent[0];
JSONObject obj = new JSONObject(json);
boolean isParent = "parent".equals(obj.getString("objectType"));
JSONArray array = obj.getJSONArray("objects");
Parent[] models = new Parent[array.length()];
String parent = obj.getString("parent");
String authority = provider.getAuthority();
String levelName = null;
boolean hasButtonSets = false;
if (obj.has(Parents.COLUMN_HAS_BUTTONSETS)) {
hasButtonSets = obj.getBoolean(Parents.COLUMN_HAS_BUTTONSETS);
}
if (obj.has(Parents.COLUMN_LEVEL)) {
levelName = obj.getString(Parents.COLUMN_LEVEL);
}
for (int i = 0; i < array.length(); i++) {
obj = array.getJSONObject(i);
String[] path = getPath(parent, obj);
Parent node = isParent ? new Parent(authority, path, false)
: new Button(authority, path, false);
node.mName = obj.getString(Parents.COLUMN_NAME);
node.mHasButtonSets = hasButtonSets || provider.hasButtonSets(node);
node.mLevelName = levelName;
if (obj.has(Parents.COLUMN_DESCRIPTION)) {
node.mDescription = obj.getString(Parents.COLUMN_DESCRIPTION);
} else {
node.mDescription = provider.getDescription(node);
}
if (!isParent) {
node = Button.fromJson(provider, obj, (Button) node);
}
if (null == node.mParent) {
node.mParent = parentNode;
}
models[i] = getCached(node);
}
if (null != parentNode) {
parentNode.setChildren(models);
}
return models;
}
public String[] getColumns() {
return URPContract.Parents.ALL;
}
public Object[] toRow() {
Object[] row = new Object[] {
hashCode(),
getAuthority(),
getPathString(),
getLevelName(),
getName(),
getDescription(),
Parents.TYPE_PARENT,
hasButtonSets() ? 1 : 0
};
return row;
}
public String getLevelName() {
return mLevelName;
}
public String getName() {
return mName;
}
public String getDescription() {
return mDescription;
}
public String[] getPath() {
return mPath;
}
public String getPathString() {
StringBuilder path = new StringBuilder();
for (String segment : getPath()) {
path.append("/");
path.append(Uri.encode(segment));
}
return path.toString().replaceAll("//+", "/");
}
public String getAuthority() {
return mAuthority;
}
@Override
public int hashCode() {
if (0 == mHashCode) {
mHashCode = getPathString().hashCode();
}
return mHashCode;
}
@Override
public boolean equals(Object o) {
if (o instanceof Parent) {
Parent b = (Parent) o;
return getAuthority().equals(b.getAuthority()) &&
getPathString().equals(b.getPathString());
}
return false;
}
private static ConcurrentHashMap<Parent, Parent> mCache = new ConcurrentHashMap<Parent, Parent>();
public static Parent getCached(Parent button) {
Parent b = mCache.get(button);
if (null == b) {
b = button;
// Only cache objects that have all of their data.
if (!button.needsToFetch()) {
mCache.put(b, b);
}
}
return b;
}
public boolean hasButtonSets() {
return mHasButtonSets;
}
public boolean needsToFetch() {
return mNeedsToFetch;
}
public void setNeedsToFetch(boolean needsToFetch) {
mNeedsToFetch = needsToFetch;
}
public void setChildren(Parent[] children) {
mChildren = children;
}
public Parent[] getChildren() {
return mChildren;
}
public String getId() {
return getPath()[getPath().length - 1];
}
public Parent getParent() {
if (null == mParent && mPath.length > 0) {
String[] path = new String[mPath.length - 1];
for (int i = 0; i < path.length; i++) {
path[i] = mPath[i];
}
mParent = getCached(new Parent(mAuthority, path, true));
}
return mParent;
}
}
| |
package com.yermoon.server.util;
import java.io.UnsupportedEncodingException;
import java.nio.ByteBuffer;
import java.nio.charset.Charset;
/**
* Utilities for byte process
*/
public final class ByteUtils {
public static final String DEFAULT_CHARSET_NAME = "utf-8";
public static final Charset DEFAULT_CHARSET = Charset
.forName(DEFAULT_CHARSET_NAME);
/**
* All possible chars for representing a number as a String
*/
final static byte[] digits = {'0', '1', '2', '3', '4', '5', '6', '7', '8',
'9', 'a', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i', 'j', 'k', 'l',
'm', 'n', 'o', 'p', 'q', 'r', 's', 't', 'u', 'v', 'w', 'x', 'y',
'z'};
final static byte[] DigitTens = {'0', '0', '0', '0', '0', '0', '0', '0',
'0', '0', '1', '1', '1', '1', '1', '1', '1', '1', '1', '1', '2',
'2', '2', '2', '2', '2', '2', '2', '2', '2', '3', '3', '3', '3',
'3', '3', '3', '3', '3', '3', '4', '4', '4', '4', '4', '4', '4',
'4', '4', '4', '5', '5', '5', '5', '5', '5', '5', '5', '5', '5',
'6', '6', '6', '6', '6', '6', '6', '6', '6', '6', '7', '7', '7',
'7', '7', '7', '7', '7', '7', '7', '8', '8', '8', '8', '8', '8',
'8', '8', '8', '8', '9', '9', '9', '9', '9', '9', '9', '9', '9',
'9',};
final static byte[] DigitOnes = {'0', '1', '2', '3', '4', '5', '6', '7',
'8', '9', '0', '1', '2', '3', '4', '5', '6', '7', '8', '9', '0',
'1', '2', '3', '4', '5', '6', '7', '8', '9', '0', '1', '2', '3',
'4', '5', '6', '7', '8', '9', '0', '1', '2', '3', '4', '5', '6',
'7', '8', '9', '0', '1', '2', '3', '4', '5', '6', '7', '8', '9',
'0', '1', '2', '3', '4', '5', '6', '7', '8', '9', '0', '1', '2',
'3', '4', '5', '6', '7', '8', '9', '0', '1', '2', '3', '4', '5',
'6', '7', '8', '9', '0', '1', '2', '3', '4', '5', '6', '7', '8',
'9',};
final static int[] sizeTable = {9, 99, 999, 9999, 99999, 999999, 9999999,
99999999, 999999999, Integer.MAX_VALUE};
final static int[] byte_len_array = new int[256];
static {
for (int i = Byte.MIN_VALUE; i <= Byte.MAX_VALUE; ++i) {
int size = (i < 0) ? stringSize(-i) + 1 : stringSize(i);
byte_len_array[i & 0xFF] = size;
}
}
/**
* if it is testing,check key argument even if use binary protocol. The user
* must never change this value at all.
*/
public static boolean testing;
private static int maxKeyLength = 250;
private ByteUtils() {
}
public static boolean isNumber(String string) {
if (string == null || string.isEmpty()) {
return false;
}
int i = 0;
if (string.charAt(0) == '-') {
if (string.length() > 1) {
i++;
} else {
return false;
}
}
for (; i < string.length(); i++) {
if (!Character.isDigit(string.charAt(i))) {
return false;
}
}
return true;
}
public static byte[] getBytes(String k) {
if (k == null || k.length() == 0) {
throw new IllegalArgumentException("Key must not be blank");
}
try {
return k.getBytes(DEFAULT_CHARSET_NAME);
} catch (UnsupportedEncodingException e) {
throw new RuntimeException(e);
}
}
public static int normalizeCapacity(int requestedCapacity) {
switch (requestedCapacity) {
case 0:
case 1 << 0:
case 1 << 1:
case 1 << 2:
case 1 << 3:
case 1 << 4:
case 1 << 5:
case 1 << 6:
case 1 << 7:
case 1 << 8:
case 1 << 9:
case 1 << 10:
case 1 << 11:
case 1 << 12:
case 1 << 13:
case 1 << 14:
case 1 << 15:
case 1 << 16:
case 1 << 17:
case 1 << 18:
case 1 << 19:
case 1 << 21:
case 1 << 22:
case 1 << 23:
case 1 << 24:
case 1 << 25:
case 1 << 26:
case 1 << 27:
case 1 << 28:
case 1 << 29:
case 1 << 30:
case Integer.MAX_VALUE:
return requestedCapacity;
}
int newCapacity = 1;
while (newCapacity < requestedCapacity) {
newCapacity <<= 1;
if (newCapacity < 0) {
return Integer.MAX_VALUE;
}
}
return newCapacity;
}
public static boolean stepBuffer(ByteBuffer buffer, int remaining) {
if (buffer.remaining() >= remaining) {
buffer.position(buffer.position() + remaining);
return true;
} else {
return false;
}
}
public static String getString(byte[] bytes) {
try {
return new String(bytes, DEFAULT_CHARSET_NAME);
} catch (UnsupportedEncodingException e) {
throw new RuntimeException(e);
}
}
public static void byte2hex(byte b, StringBuffer buf) {
char[] hexChars = {'0', '1', '2', '3', '4', '5', '6', '7', '8', '9',
'A', 'B', 'C', 'D', 'E', 'F'};
int high = ((b & 0xf0) >> 4);
int low = (b & 0x0f);
buf.append(hexChars[high]);
buf.append(hexChars[low]);
}
public static void int2hex(int a, StringBuffer str) {
str.append(Integer.toHexString(a));
}
public static void short2hex(int a, StringBuffer str) {
str.append(Integer.toHexString(a));
}
public static void getBytes(long i, int index, byte[] buf) {
long q;
int r;
int pos = index;
byte sign = 0;
if (i < 0) {
sign = '-';
i = -i;
}
// Get 2 digits/iteration using longs until quotient fits into an int
while (i > Integer.MAX_VALUE) {
q = i / 100;
// really: r = i - (q * 100);
r = (int) (i - ((q << 6) + (q << 5) + (q << 2)));
i = q;
buf[--pos] = DigitOnes[r];
buf[--pos] = DigitTens[r];
}
// Get 2 digits/iteration using ints
int q2;
int i2 = (int) i;
while (i2 >= 65536) {
q2 = i2 / 100;
// really: r = i2 - (q * 100);
r = i2 - ((q2 << 6) + (q2 << 5) + (q2 << 2));
i2 = q2;
buf[--pos] = DigitOnes[r];
buf[--pos] = DigitTens[r];
}
// Fall thru to fast mode for smaller numbers
// assert(i2 <= 65536, i2);
for (; ; ) {
q2 = (i2 * 52429) >>> (16 + 3);
r = i2 - ((q2 << 3) + (q2 << 1)); // r = i2-(q2*10) ...
buf[--pos] = digits[r];
i2 = q2;
if (i2 == 0)
break;
}
if (sign != 0) {
buf[--pos] = sign;
}
}
/**
* Places characters representing the integer i into the character array
* buf. The characters are placed into the buffer backwards starting with
* the least significant digit at the specified index (exclusive), and
* working backwards from there.
* <p/>
* Will fail if i == Integer.MIN_VALUE
*/
static void getBytes(int i, int index, byte[] buf) {
int q, r;
int pos = index;
byte sign = 0;
if (i < 0) {
sign = '-';
i = -i;
}
// Generate two digits per iteration
while (i >= 65536) {
q = i / 100;
// really: r = i - (q * 100);
r = i - ((q << 6) + (q << 5) + (q << 2));
i = q;
buf[--pos] = DigitOnes[r];
buf[--pos] = DigitTens[r];
}
// Fall thru to fast mode for smaller numbers
// assert(i <= 65536, i);
for (; ; ) {
q = (i * 52429) >>> (16 + 3);
r = i - ((q << 3) + (q << 1)); // r = i-(q*10) ...
buf[--pos] = digits[r];
i = q;
if (i == 0)
break;
}
if (sign != 0) {
buf[--pos] = sign;
}
}
// Requires positive x
public static final int stringSize(int x) {
for (int i = 0; ; i++)
if (x <= sizeTable[i])
return i + 1;
}
// Requires positive x
public static int stringSize(long x) {
long p = 10;
for (int i = 1; i < 19; i++) {
if (x < p)
return i;
p = 10 * p;
}
return 19;
}
}
| |
package org.zstack.storage.volume;
import org.springframework.beans.factory.annotation.Autowire;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Configurable;
import org.springframework.transaction.annotation.Transactional;
import org.zstack.core.cascade.CascadeConstant;
import org.zstack.core.cascade.CascadeFacade;
import org.zstack.core.cloudbus.CloudBus;
import org.zstack.core.cloudbus.CloudBusCallBack;
import org.zstack.core.componentloader.PluginRegistry;
import org.zstack.core.db.DatabaseFacade;
import org.zstack.core.errorcode.ErrorFacade;
import org.zstack.header.core.NopeCompletion;
import org.zstack.header.core.workflow.*;
import org.zstack.header.errorcode.OperationFailureException;
import org.zstack.header.errorcode.SysErrors;
import org.zstack.core.thread.ChainTask;
import org.zstack.core.thread.SyncTaskChain;
import org.zstack.core.thread.ThreadFacade;
import org.zstack.core.workflow.*;
import org.zstack.header.core.Completion;
import org.zstack.header.errorcode.ErrorCode;
import org.zstack.header.image.ImageInventory;
import org.zstack.header.image.ImageVO;
import org.zstack.header.message.APIDeleteMessage.DeletionMode;
import org.zstack.header.message.APIMessage;
import org.zstack.header.message.Message;
import org.zstack.header.message.MessageReply;
import org.zstack.header.storage.primary.*;
import org.zstack.header.storage.snapshot.CreateVolumeSnapshotMsg;
import org.zstack.header.storage.snapshot.CreateVolumeSnapshotReply;
import org.zstack.header.storage.snapshot.VolumeSnapshotConstant;
import org.zstack.header.storage.snapshot.VolumeSnapshotVO;
import org.zstack.header.vm.*;
import org.zstack.header.volume.*;
import org.zstack.header.volume.VolumeDeletionPolicyManager.VolumeDeletionPolicy;
import org.zstack.identity.AccountManager;
import org.zstack.tag.TagManager;
import org.zstack.utils.CollectionUtils;
import org.zstack.utils.DebugUtils;
import org.zstack.utils.Utils;
import org.zstack.utils.function.ForEachFunction;
import org.zstack.utils.logging.CLogger;
import javax.persistence.TypedQuery;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.Map;
import static org.zstack.utils.CollectionDSL.list;
/**
* Created with IntelliJ IDEA.
* User: frank
* Time: 9:20 PM
* To change this template use File | Settings | File Templates.
*/
@Configurable(preConstruction = true, autowire = Autowire.BY_TYPE)
public class VolumeBase implements Volume {
private static final CLogger logger = Utils.getLogger(VolumeBase.class);
@Autowired
private CloudBus bus;
@Autowired
private DatabaseFacade dbf;
@Autowired
private ThreadFacade thdf;
@Autowired
private ErrorFacade errf;
@Autowired
private CascadeFacade casf;
@Autowired
private AccountManager acntMgr;
@Autowired
private TagManager tagMgr;
@Autowired
private PluginRegistry pluginRgty;
@Autowired
private VolumeDeletionPolicyManager deletionPolicyMgr;
private VolumeVO self;
public VolumeBase(VolumeVO vo) {
self = vo;
}
protected void refreshVO() {
self = dbf.reload(self);
}
@Override
public void handleMessage(Message msg) {
try {
if (msg instanceof APIMessage) {
handleApiMessage((APIMessage)msg);
} else {
handleLocalMessage(msg);
}
} catch (Exception e) {
bus.logExceptionWithMessageDump(msg, e);
bus.replyErrorByMessageType(msg, e);
}
}
private void handleLocalMessage(Message msg) {
if (msg instanceof VolumeDeletionMsg) {
handle((VolumeDeletionMsg) msg);
} else if (msg instanceof DeleteVolumeMsg) {
handle((DeleteVolumeMsg) msg);
} else if (msg instanceof CreateDataVolumeTemplateFromDataVolumeMsg) {
handle((CreateDataVolumeTemplateFromDataVolumeMsg) msg);
} else if (msg instanceof ExpungeVolumeMsg) {
handle((ExpungeVolumeMsg) msg);
} else if (msg instanceof RecoverVolumeMsg) {
handle((RecoverVolumeMsg) msg);
} else {
bus.dealWithUnknownMessage(msg);
}
}
private void handle(final RecoverVolumeMsg msg) {
final RecoverVolumeReply reply = new RecoverVolumeReply();
recoverVolume(new Completion(msg) {
@Override
public void success() {
bus.reply(msg ,reply);
}
@Override
public void fail(ErrorCode errorCode) {
reply.setError(errorCode);
bus.reply(msg ,reply);
}
});
}
private void expunge(final Completion completion) {
if (self.getStatus() != VolumeStatus.Deleted) {
throw new OperationFailureException(errf.stringToOperationError(
String.format("the volume[uuid:%s, name:%s] is not deleted yet, can't expunge it",
self.getUuid(), self.getName())
));
}
if (self.getPrimaryStorageUuid() != null) {
DeleteVolumeOnPrimaryStorageMsg dmsg = new DeleteVolumeOnPrimaryStorageMsg();
dmsg.setVolume(getSelfInventory());
dmsg.setUuid(self.getPrimaryStorageUuid());
bus.makeTargetServiceIdByResourceUuid(dmsg, PrimaryStorageConstant.SERVICE_ID, self.getPrimaryStorageUuid());
bus.send(dmsg, new CloudBusCallBack(completion) {
@Override
public void run(MessageReply r) {
if (!r.isSuccess()) {
completion.fail(r.getError());
} else {
dbf.remove(self);
completion.success();
}
}
});
} else {
dbf.remove(self);
completion.success();
}
}
private void handle(final ExpungeVolumeMsg msg) {
final ExpungeVmReply reply = new ExpungeVmReply();
expunge(new Completion(msg) {
@Override
public void success() {
bus.reply(msg, reply);
}
@Override
public void fail(ErrorCode errorCode) {
reply.setError(errorCode);
bus.reply(msg, reply);
}
});
}
private void handle(final CreateDataVolumeTemplateFromDataVolumeMsg msg) {
final CreateTemplateFromVolumeOnPrimaryStorageMsg cmsg = new CreateTemplateFromVolumeOnPrimaryStorageMsg();
cmsg.setBackupStorageUuid(msg.getBackupStorageUuid());
cmsg.setImageInventory(ImageInventory.valueOf(dbf.findByUuid(msg.getImageUuid(), ImageVO.class)));
cmsg.setVolumeInventory(getSelfInventory());
bus.makeTargetServiceIdByResourceUuid(cmsg, PrimaryStorageConstant.SERVICE_ID, self.getPrimaryStorageUuid());
bus.send(cmsg, new CloudBusCallBack(msg) {
@Override
public void run(MessageReply r) {
CreateDataVolumeTemplateFromDataVolumeReply reply = new CreateDataVolumeTemplateFromDataVolumeReply();
if (!r.isSuccess()) {
reply.setError(r.getError());
} else {
CreateTemplateFromVolumeOnPrimaryStorageReply creply = r.castReply();
String backupStorageInstallPath = creply.getTemplateBackupStorageInstallPath();
reply.setFormat(creply.getFormat());
reply.setInstallPath(backupStorageInstallPath);
reply.setMd5sum(null);
reply.setBackupStorageUuid(msg.getBackupStorageUuid());
}
bus.reply(msg, reply);
}
});
}
private void handle(final DeleteVolumeMsg msg) {
final DeleteVolumeReply reply = new DeleteVolumeReply();
delete(true, msg.isDetachBeforeDeleting(), new Completion(msg) {
@Override
public void success() {
logger.debug(String.format("deleted data volume[uuid: %s]", msg.getUuid()));
bus.reply(msg, reply);
}
@Override
public void fail(ErrorCode errorCode) {
reply.setError(errorCode);
bus.reply(msg, reply);
}
});
}
private void handle(final VolumeDeletionMsg msg) {
final VolumeDeletionReply reply = new VolumeDeletionReply();
thdf.chainSubmit(new ChainTask() {
@Override
public String getSyncSignature() {
return getName();
}
@Override
public void run(SyncTaskChain chain) {
self = dbf.reload(self);
if (self.getStatus() == VolumeStatus.Deleted) {
// the volume has been deleted
// we run into this case because the cascading framework
// will send duplicate messages when deleting a vm as the cascading
// framework has no knowledge about
}
}
@Override
public String getName() {
return String.format("delete-volume-%s", self.getUuid());
}
});
for (VolumeDeletionExtensionPoint extp : pluginRgty.getExtensionList(VolumeDeletionExtensionPoint.class)) {
extp.preDeleteVolume(getSelfInventory());
}
CollectionUtils.safeForEach(pluginRgty.getExtensionList(VolumeDeletionExtensionPoint.class), new ForEachFunction<VolumeDeletionExtensionPoint>() {
@Override
public void run(VolumeDeletionExtensionPoint arg) {
arg.beforeDeleteVolume(getSelfInventory());
}
});
FlowChain chain = FlowChainBuilder.newShareFlowChain();
chain.setName(String.format("delete-volume-%s", self.getUuid()));
// for NotInstantiated Volume, no flow to execute
chain.allowEmptyFlow();
chain.then(new ShareFlow() {
VolumeDeletionPolicy deletionPolicy;
{
if (msg.getDeletionPolicy() == null) {
deletionPolicy = deletionPolicyMgr.getDeletionPolicy(self.getUuid());
} else {
deletionPolicy = VolumeDeletionPolicy.valueOf(msg.getDeletionPolicy());
}
}
@Override
public void setup() {
if (self.getVmInstanceUuid() != null && self.getType() == VolumeType.Data && msg.isDetachBeforeDeleting()) {
flow(new NoRollbackFlow() {
String __name__ = String.format("detach-volume-from-vm");
public void run(final FlowTrigger trigger, Map data) {
DetachDataVolumeFromVmMsg dmsg = new DetachDataVolumeFromVmMsg();
dmsg.setVolume(getSelfInventory());
bus.makeTargetServiceIdByResourceUuid(dmsg, VmInstanceConstant.SERVICE_ID, dmsg.getVmInstanceUuid());
bus.send(dmsg, new CloudBusCallBack(trigger) {
@Override
public void run(MessageReply reply) {
self.setVmInstanceUuid(null);
self = dbf.updateAndRefresh(self);
trigger.next();
}
});
}
});
}
if (deletionPolicy == VolumeDeletionPolicy.Direct) {
flow(new NoRollbackFlow() {
String __name__ = "delete-data-volume-from-primary-storage";
@Override
public void run(final FlowTrigger trigger, Map data) {
if (self.getStatus() == VolumeStatus.Ready) {
DeleteVolumeOnPrimaryStorageMsg dmsg = new DeleteVolumeOnPrimaryStorageMsg();
dmsg.setVolume(getSelfInventory());
dmsg.setUuid(self.getPrimaryStorageUuid());
bus.makeTargetServiceIdByResourceUuid(dmsg, PrimaryStorageConstant.SERVICE_ID, self.getPrimaryStorageUuid());
logger.debug(String.format("Asking primary storage[uuid:%s] to remove data volume[uuid:%s]", self.getPrimaryStorageUuid(),
self.getUuid()));
bus.send(dmsg, new CloudBusCallBack(trigger) {
@Override
public void run(MessageReply reply) {
if (!reply.isSuccess()) {
logger.warn(String.format("failed to delete volume[uuid:%s, name:%s], %s", self.getUuid(), self.getName(), reply.getError()));
}
trigger.next();
}
});
} else {
trigger.next();
}
}
});
}
if (self.getPrimaryStorageUuid() != null) {
flow(new NoRollbackFlow() {
String __name__ = "return-primary-storage-capacity";
@Override
public void run(FlowTrigger trigger, Map data) {
ReturnPrimaryStorageCapacityMsg rmsg = new ReturnPrimaryStorageCapacityMsg();
rmsg.setPrimaryStorageUuid(self.getPrimaryStorageUuid());
rmsg.setDiskSize(self.getSize());
bus.makeTargetServiceIdByResourceUuid(rmsg, PrimaryStorageConstant.SERVICE_ID, self.getPrimaryStorageUuid());
bus.send(rmsg);
trigger.next();
}
});
}
done(new FlowDoneHandler(msg) {
@Override
public void handle(Map data) {
if (deletionPolicy == VolumeDeletionPolicy.Direct) {
dbf.remove(self);
} else if (deletionPolicy == VolumeDeletionPolicy.Delay) {
self.setStatus(VolumeStatus.Deleted);
self = dbf.updateAndRefresh(self);
} else if (deletionPolicy == VolumeDeletionPolicy.Never) {
self.setStatus(VolumeStatus.Deleted);
self = dbf.updateAndRefresh(self);
}
CollectionUtils.safeForEach(pluginRgty.getExtensionList(VolumeDeletionExtensionPoint.class), new ForEachFunction<VolumeDeletionExtensionPoint>() {
@Override
public void run(VolumeDeletionExtensionPoint arg) {
arg.afterDeleteVolume(getSelfInventory());
}
});
bus.reply(msg, reply);
}
});
error(new FlowErrorHandler(msg) {
@Override
public void handle(final ErrorCode errCode, Map data) {
CollectionUtils.safeForEach(pluginRgty.getExtensionList(VolumeDeletionExtensionPoint.class), new ForEachFunction<VolumeDeletionExtensionPoint>() {
@Override
public void run(VolumeDeletionExtensionPoint arg) {
arg.failedToDeleteVolume(getSelfInventory(), errCode);
}
});
reply.setError(errCode);
bus.reply(msg, reply);
}
});
}
}).start();
}
private void handleApiMessage(APIMessage msg) {
if (msg instanceof APIChangeVolumeStateMsg) {
handle((APIChangeVolumeStateMsg) msg);
} else if (msg instanceof APICreateVolumeSnapshotMsg) {
handle((APICreateVolumeSnapshotMsg) msg);
} else if (msg instanceof APIDeleteDataVolumeMsg) {
handle((APIDeleteDataVolumeMsg) msg);
} else if (msg instanceof APIDetachDataVolumeFromVmMsg) {
handle((APIDetachDataVolumeFromVmMsg) msg);
} else if (msg instanceof APIAttachDataVolumeToVmMsg) {
handle((APIAttachDataVolumeToVmMsg) msg);
} else if (msg instanceof APIGetDataVolumeAttachableVmMsg) {
handle((APIGetDataVolumeAttachableVmMsg) msg);
} else if (msg instanceof APIUpdateVolumeMsg) {
handle((APIUpdateVolumeMsg) msg);
} else if (msg instanceof APIRecoverDataVolumeMsg) {
handle((APIRecoverDataVolumeMsg) msg);
} else if (msg instanceof APIExpungeDataVolumeMsg) {
handle((APIExpungeDataVolumeMsg) msg);
} else {
bus.dealWithUnknownMessage(msg);
}
}
private void handle(APIExpungeDataVolumeMsg msg) {
final APIExpungeDataVolumeEvent evt = new APIExpungeDataVolumeEvent(msg.getId());
expunge(new Completion(msg) {
@Override
public void success() {
bus.publish(evt);
}
@Override
public void fail(ErrorCode errorCode) {
evt.setErrorCode(errorCode);
bus.publish(evt);
}
});
}
protected void recoverVolume(Completion completion) {
final VolumeInventory vol = getSelfInventory();
List<RecoverDataVolumeExtensionPoint> exts = pluginRgty.getExtensionList(RecoverDataVolumeExtensionPoint.class);
for (RecoverDataVolumeExtensionPoint ext : exts) {
ext.preRecoverDataVolume(vol);
}
CollectionUtils.safeForEach(exts, new ForEachFunction<RecoverDataVolumeExtensionPoint>() {
@Override
public void run(RecoverDataVolumeExtensionPoint ext) {
ext.beforeRecoverDataVolume(vol);
}
});
if (self.getInstallPath() != null) {
self.setStatus(VolumeStatus.Ready);
} else {
self.setStatus(VolumeStatus.NotInstantiated);
}
self = dbf.updateAndRefresh(self);
CollectionUtils.safeForEach(exts, new ForEachFunction<RecoverDataVolumeExtensionPoint>() {
@Override
public void run(RecoverDataVolumeExtensionPoint ext) {
ext.afterRecoverDataVolume(vol);
}
});
completion.success();
}
private void handle(APIRecoverDataVolumeMsg msg) {
final APIRecoverDataVolumeEvent evt = new APIRecoverDataVolumeEvent(msg.getId());
recoverVolume(new Completion(msg) {
@Override
public void success() {
evt.setInventory(getSelfInventory());
bus.publish(evt);
}
@Override
public void fail(ErrorCode errorCode) {
evt.setInventory(getSelfInventory());
evt.setErrorCode(errorCode);
bus.publish(evt);
}
});
}
private void handle(APIUpdateVolumeMsg msg) {
boolean update = false;
if (msg.getName() != null) {
self.setName(msg.getName());
update = true;
}
if (msg.getDescription() != null) {
self.setDescription(msg.getDescription());
update = true;
}
if (update) {
self = dbf.updateAndRefresh(self);
}
APIUpdateVolumeEvent evt = new APIUpdateVolumeEvent(msg.getId());
evt.setInventory(getSelfInventory());
bus.publish(evt);
}
@Transactional(readOnly = true)
private List<VmInstanceVO> getCandidateVmForAttaching(String accountUuid) {
List<String> vmUuids = acntMgr.getResourceUuidsCanAccessByAccount(accountUuid, VmInstanceVO.class);
if (vmUuids != null && vmUuids.isEmpty()) {
return new ArrayList<VmInstanceVO>();
}
TypedQuery<VmInstanceVO> q = null;
String sql;
if (vmUuids == null) {
// all vms
if (self.getStatus() == VolumeStatus.Ready) {
sql = "select vm from VmInstanceVO vm, PrimaryStorageClusterRefVO ref, VolumeVO vol where vm.state in (:vmStates) and vol.uuid = :volUuid and vm.hypervisorType in (:hvTypes) and vm.clusterUuid = ref.clusterUuid and ref.primaryStorageUuid = vol.primaryStorageUuid group by vm.uuid";
q = dbf.getEntityManager().createQuery(sql, VmInstanceVO.class);
q.setParameter("volUuid", self.getUuid());
List<String> hvTypes = VolumeFormat.valueOf(self.getFormat()).getHypervisorTypesSupportingThisVolumeFormatInString();
q.setParameter("hvTypes", hvTypes);
} else if (self.getStatus() == VolumeStatus.NotInstantiated) {
sql = "select vm from VmInstanceVO vm where vm.state in (:vmStates) group by vm.uuid";
q = dbf.getEntityManager().createQuery(sql, VmInstanceVO.class);
} else {
DebugUtils.Assert(false, String.format("should not reach here, volume[uuid:%s]", self.getUuid()));
}
} else {
if (self.getStatus() == VolumeStatus.Ready) {
sql = "select vm from VmInstanceVO vm, PrimaryStorageClusterRefVO ref, VolumeVO vol where vm.uuid in (:vmUuids) and vm.state in (:vmStates) and vol.uuid = :volUuid and vm.hypervisorType in (:hvTypes) and vm.clusterUuid = ref.clusterUuid and ref.primaryStorageUuid = vol.primaryStorageUuid group by vm.uuid";
q = dbf.getEntityManager().createQuery(sql, VmInstanceVO.class);
q.setParameter("volUuid", self.getUuid());
List<String> hvTypes = VolumeFormat.valueOf(self.getFormat()).getHypervisorTypesSupportingThisVolumeFormatInString();
q.setParameter("hvTypes", hvTypes);
} else if (self.getStatus() == VolumeStatus.NotInstantiated) {
sql = "select vm from VmInstanceVO vm where vm.uuid in (:vmUuids) and vm.state in (:vmStates) group by vm.uuid";
q = dbf.getEntityManager().createQuery(sql, VmInstanceVO.class);
} else {
DebugUtils.Assert(false, String.format("should not reach here, volume[uuid:%s]", self.getUuid()));
}
q.setParameter("vmUuids", vmUuids);
}
q.setParameter("vmStates", Arrays.asList(VmInstanceState.Running, VmInstanceState.Stopped));
List<VmInstanceVO> vms = q.getResultList();
if (vms.isEmpty()) {
return vms;
}
VolumeInventory vol = getSelfInventory();
for (VolumeGetAttachableVmExtensionPoint ext : pluginRgty.getExtensionList(VolumeGetAttachableVmExtensionPoint.class)) {
vms = ext.returnAttachableVms(vol, vms);
}
return vms;
}
private void handle(APIGetDataVolumeAttachableVmMsg msg) {
APIGetDataVolumeAttachableVmReply reply = new APIGetDataVolumeAttachableVmReply();
reply.setInventories(VmInstanceInventory.valueOf(getCandidateVmForAttaching(msg.getSession().getAccountUuid())));
bus.reply(msg, reply);
}
private void handle(final APIAttachDataVolumeToVmMsg msg) {
self.setVmInstanceUuid(msg.getVmInstanceUuid());
self = dbf.updateAndRefresh(self);
AttachDataVolumeToVmMsg amsg = new AttachDataVolumeToVmMsg();
amsg.setVolume(getSelfInventory());
amsg.setVmInstanceUuid(msg.getVmInstanceUuid());
bus.makeTargetServiceIdByResourceUuid(amsg, VmInstanceConstant.SERVICE_ID, amsg.getVmInstanceUuid());
bus.send(amsg, new CloudBusCallBack(msg) {
@Override
public void run(MessageReply reply) {
final APIAttachDataVolumeToVmEvent evt = new APIAttachDataVolumeToVmEvent(msg.getId());
self = dbf.reload(self);
if (reply.isSuccess()) {
AttachDataVolumeToVmReply ar = reply.castReply();
self.setVmInstanceUuid(msg.getVmInstanceUuid());
self.setFormat(VolumeFormat.getVolumeFormatByMasterHypervisorType(ar.getHypervisorType()).toString());
self = dbf.updateAndRefresh(self);
evt.setInventory(getSelfInventory());
} else {
self.setVmInstanceUuid(null);
dbf.update(self);
evt.setErrorCode(reply.getError());
}
bus.publish(evt);
}
});
}
private void handle(final APIDetachDataVolumeFromVmMsg msg) {
DetachDataVolumeFromVmMsg dmsg = new DetachDataVolumeFromVmMsg();
dmsg.setVolume(getSelfInventory());
bus.makeTargetServiceIdByResourceUuid(dmsg, VmInstanceConstant.SERVICE_ID, dmsg.getVmInstanceUuid());
bus.send(dmsg, new CloudBusCallBack(msg) {
@Override
public void run(MessageReply reply) {
APIDetachDataVolumeFromVmEvent evt = new APIDetachDataVolumeFromVmEvent(msg.getId());
if (reply.isSuccess()) {
self.setVmInstanceUuid(null);
self.setDeviceId(null);
self = dbf.updateAndRefresh(self);
evt.setInventory(getSelfInventory());
} else {
evt.setErrorCode(reply.getError());
}
bus.publish(evt);
}
});
}
protected VolumeInventory getSelfInventory() {
return VolumeInventory.valueOf(self);
}
private void delete(boolean forceDelete, final Completion completion) {
delete(forceDelete, true, completion);
}
private void delete(boolean forceDelete, boolean detachBeforeDeleting, final Completion completion) {
final String issuer = VolumeVO.class.getSimpleName();
VolumeDeletionStruct struct = new VolumeDeletionStruct(getSelfInventory());
struct.setDetachBeforeDeleting(detachBeforeDeleting);
final List<VolumeDeletionStruct> ctx = list(struct);
FlowChain chain = FlowChainBuilder.newSimpleFlowChain();
chain.setName(String.format("delete-data-volume"));
if (!forceDelete) {
chain.then(new NoRollbackFlow() {
@Override
public void run(final FlowTrigger trigger, Map data) {
casf.asyncCascade(CascadeConstant.DELETION_CHECK_CODE, issuer, ctx, new Completion(trigger) {
@Override
public void success() {
trigger.next();
}
@Override
public void fail(ErrorCode errorCode) {
trigger.fail(errorCode);
}
});
}
}).then(new NoRollbackFlow() {
@Override
public void run(final FlowTrigger trigger, Map data) {
casf.asyncCascade(CascadeConstant.DELETION_DELETE_CODE, issuer, ctx, new Completion(trigger) {
@Override
public void success() {
trigger.next();
}
@Override
public void fail(ErrorCode errorCode) {
trigger.fail(errorCode);
}
});
}
});
} else {
chain.then(new NoRollbackFlow() {
@Override
public void run(final FlowTrigger trigger, Map data) {
casf.asyncCascade(CascadeConstant.DELETION_FORCE_DELETE_CODE, issuer, ctx, new Completion(trigger) {
@Override
public void success() {
trigger.next();
}
@Override
public void fail(ErrorCode errorCode) {
trigger.fail(errorCode);
}
});
}
});
}
chain.done(new FlowDoneHandler(completion) {
@Override
public void handle(Map data) {
casf.asyncCascadeFull(CascadeConstant.DELETION_CLEANUP_CODE, issuer, ctx, new NopeCompletion());
completion.success();
}
}).error(new FlowErrorHandler(completion) {
@Override
public void handle(ErrorCode errCode, Map data) {
completion.fail(errCode);
}
}).start();
}
private void handle(APIDeleteDataVolumeMsg msg) {
final APIDeleteDataVolumeEvent evt = new APIDeleteDataVolumeEvent(msg.getId());
delete(msg.getDeletionMode() == DeletionMode.Enforcing, new Completion(msg) {
@Override
public void success() {
bus.publish(evt);
}
@Override
public void fail(ErrorCode errorCode) {
evt.setErrorCode(errf.instantiateErrorCode(SysErrors.DELETE_RESOURCE_ERROR, errorCode));
bus.publish(evt);
}
});
}
private void handle(final APICreateVolumeSnapshotMsg msg) {
thdf.chainSubmit(new ChainTask(msg) {
@Override
public String getSyncSignature() {
return String.format("create-snapshot-for-volume-%s", self.getUuid());
}
@Override
public void run(final SyncTaskChain chain) {
CreateVolumeSnapshotMsg cmsg = new CreateVolumeSnapshotMsg();
cmsg.setName(msg.getName());
cmsg.setDescription(msg.getDescription());
cmsg.setResourceUuid(msg.getResourceUuid());
cmsg.setAccountUuid(msg.getSession().getAccountUuid());
cmsg.setVolumeUuid(msg.getVolumeUuid());
bus.makeLocalServiceId(cmsg, VolumeSnapshotConstant.SERVICE_ID);
bus.send(cmsg, new CloudBusCallBack(chain) {
@Override
public void run(MessageReply reply) {
APICreateVolumeSnapshotEvent evt = new APICreateVolumeSnapshotEvent(msg.getId());
if (reply.isSuccess()) {
CreateVolumeSnapshotReply creply = (CreateVolumeSnapshotReply) reply;
evt.setInventory(creply.getInventory());
tagMgr.createTagsFromAPICreateMessage(msg, creply.getInventory().getUuid(), VolumeSnapshotVO.class.getSimpleName());
} else {
evt.setErrorCode(reply.getError());
}
bus.publish(evt);
chain.next();
}
});
}
@Override
public String getName() {
return String.format("create-snapshot-for-volume-%s", self.getUuid());
}
});
}
private void handle(APIChangeVolumeStateMsg msg) {
VolumeStateEvent sevt = VolumeStateEvent.valueOf(msg.getStateEvent());
if (sevt == VolumeStateEvent.enable) {
self.setState(VolumeState.Enabled);
} else {
self.setState(VolumeState.Disabled);
}
self = dbf.updateAndRefresh(self);
VolumeInventory inv = VolumeInventory.valueOf(self);
APIChangeVolumeStateEvent evt = new APIChangeVolumeStateEvent(msg.getId());
evt.setInventory(inv);
bus.publish(evt);
}
}
| |
package org.indiarose.backend.activity;
/*
* This work is licensed under the Creative Commons Attribution-NonCommercial-ShareAlike 4.0 International License.
* To view a copy of this license, visit http://creativecommons.org/licenses/by-nc-sa/4.0/.
*/
import org.indiarose.R;
import org.indiarose.backend.view.element.EnableDisableChoice;
import org.indiarose.backend.view.element.NumberSlider;
import org.indiarose.lib.AppData;
import org.indiarose.lib.model.Settings;
import android.app.Activity;
import android.app.AlertDialog;
import android.content.DialogInterface;
import android.content.Intent;
import android.os.Bundle;
import android.util.Log;
import android.view.View;
import android.view.WindowManager;
public class AppSettings extends Activity implements View.OnClickListener
{
View backgroundColor;
View indiagramProperty;
View lecture;
View reinitialisation;
View previous;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
this.setContentView(R.layout.activity_app_settings);
AppData.current_activity = this;
getWindow().setFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN, WindowManager.LayoutParams.FLAG_FULLSCREEN);
ajouterVues();
ajouterListeners();
}
@Override
public void onClick(View v) {
switch(v.getId()){
case R.id.app_settings_background_properties:
launchBackgroundColor();
break;
case R.id.app_settings_indiagram_properties:
launchIndiagramProperty();
break;
case R.id.app_settings_lecture_properties:
launchListenProperty();
break;
case R.id.app_settings_reinitialisation:
openResetSettingsDialog();
break;
case R.id.app_settings_previous:
onBackPressed();
break;
}
}
@Override
public void onBackPressed()
{
Intent i = new Intent(this, Home.class);
startActivity(i);
finish();
}
protected void ajouterVues(){
backgroundColor = findViewById(R.id.app_settings_background_properties);
indiagramProperty = findViewById(R.id.app_settings_indiagram_properties);
lecture = findViewById(R.id.app_settings_lecture_properties);
reinitialisation = findViewById(R.id.app_settings_reinitialisation);
previous = findViewById(R.id.app_settings_previous);
}
protected void ajouterListeners(){
backgroundColor.setOnClickListener(this);
indiagramProperty.setOnClickListener(this);
lecture.setOnClickListener(this);
reinitialisation.setOnClickListener(this);
previous.setOnClickListener(this);
}
protected void launchBackgroundColor(){
//this.m_screen.push(new BackgroundColor(this.m_screen));
Intent i = new Intent(this,BackgroundColor.class);
startActivity(i);
finish();
}
protected void launchIndiagramProperty(){
//this.m_screen.push(new IndiaViewProperty(this.m_screen));
Intent i = new Intent(this,IndiaProperty.class);
startActivity(i);
finish();
}
protected void launchListenProperty(){
Intent i = new Intent(this,ListenProperty.class);
startActivity(i);
finish();
}
/*protected void openCategoryReadingDialog(){
AlertDialog.Builder adb = new AlertDialog.Builder(this);
final EnableDisableChoice choice = new EnableDisableChoice();
choice.setValue(AppData.settings.enableCategoryReading);
adb.setView(choice.getView());
adb.setTitle(R.string.categoryReadingQuestion);
adb.setIcon(android.R.drawable.ic_dialog_alert);
adb.setPositiveButton(R.string.okText, new DialogInterface.OnClickListener()
{
public void onClick(DialogInterface _dialog, int _which)
{
AppData.settings.enableCategoryReading = choice.getValue();
AppData.settingsChanged();
}
});
adb.setNegativeButton(R.string.cancelText, new DialogInterface.OnClickListener()
{
public void onClick(DialogInterface _dialog, int _which)
{
_dialog.dismiss();
}
});
adb.setOnCancelListener(new DialogInterface.OnCancelListener()
{
public void onCancel(DialogInterface _dialog)
{
_dialog.dismiss();
}
});
adb.show();
}
protected void openReadingDelayDialog(){
AlertDialog.Builder adb = new AlertDialog.Builder(this);
final NumberSlider slider = new NumberSlider();
slider.setValue(AppData.settings.wordsReadingDelay);
adb.setView(slider.getView());
adb.setTitle(R.string.readingDelayQuestion);
adb.setIcon(android.R.drawable.ic_dialog_alert);
adb.setPositiveButton(R.string.okText, new DialogInterface.OnClickListener()
{
public void onClick(DialogInterface _dialog, int _which)
{
AppData.settings.wordsReadingDelay = slider.getValue();
AppData.settingsChanged();
}
});
adb.setNegativeButton(R.string.cancelText, new DialogInterface.OnClickListener()
{
public void onClick(DialogInterface _dialog, int _which)
{
_dialog.dismiss();
}
});
adb.setOnCancelListener(new DialogInterface.OnCancelListener()
{
public void onCancel(DialogInterface _dialog)
{
_dialog.dismiss();
}
});
adb.show();
}*/
protected void openResetSettingsDialog(){
AlertDialog.Builder adb = new AlertDialog.Builder(this);
adb.setTitle(R.string.resetSettingsQuestion);
adb.setIcon(android.R.drawable.ic_dialog_alert);
adb.setPositiveButton(R.string.okText, new DialogInterface.OnClickListener()
{
public void onClick(DialogInterface _dialog, int _which)
{
AppData.settings = new Settings();
AppData.settingsChanged();
}
});
adb.setNegativeButton(R.string.cancelText, new DialogInterface.OnClickListener()
{
public void onClick(DialogInterface _dialog, int _which)
{
_dialog.dismiss();
}
});
adb.setOnCancelListener(new DialogInterface.OnCancelListener()
{
public void onCancel(DialogInterface _dialog)
{
_dialog.dismiss();
}
});
adb.show();
}
}
| |
/*
* Copyright 2016 ThoughtWorks, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.thoughtworks.go.server.service;
import com.thoughtworks.go.config.*;
import com.thoughtworks.go.config.materials.PackageMaterialConfig;
import com.thoughtworks.go.config.materials.PluggableSCMMaterialConfig;
import com.thoughtworks.go.config.materials.dependency.DependencyMaterialConfig;
import com.thoughtworks.go.config.materials.git.GitMaterialConfig;
import com.thoughtworks.go.config.registry.ConfigElementImplementationRegistry;
import com.thoughtworks.go.domain.config.*;
import com.thoughtworks.go.domain.scm.SCM;
import com.thoughtworks.go.helper.GoConfigMother;
import com.thoughtworks.go.helper.StageConfigMother;
import com.thoughtworks.go.metrics.service.MetricsProbeService;
import com.thoughtworks.go.presentation.TriStateSelection;
import com.thoughtworks.go.security.GoCipher;
import com.thoughtworks.go.server.dao.DatabaseAccessHelper;
import com.thoughtworks.go.server.domain.Username;
import com.thoughtworks.go.server.service.result.HttpLocalizedOperationResult;
import com.thoughtworks.go.service.ConfigRepository;
import com.thoughtworks.go.util.GoConfigFileHelper;
import com.thoughtworks.go.util.GoConstants;
import com.thoughtworks.go.util.SystemEnvironment;
import org.eclipse.jgit.api.errors.GitAPIException;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.test.context.ContextConfiguration;
import org.springframework.test.context.junit4.SpringJUnit4ClassRunner;
import java.io.ByteArrayOutputStream;
import java.util.Arrays;
import java.util.UUID;
import static com.thoughtworks.go.util.TestUtils.contains;
import static junit.framework.TestCase.assertFalse;
import static org.hamcrest.core.Is.is;
import static org.hamcrest.core.IsNot.not;
import static org.hamcrest.core.IsNull.nullValue;
import static org.junit.Assert.assertThat;
@RunWith(SpringJUnit4ClassRunner.class)
@ContextConfiguration(locations = {
"classpath:WEB-INF/applicationContext-global.xml",
"classpath:WEB-INF/applicationContext-dataLocalAccess.xml",
"classpath:WEB-INF/applicationContext-acegi-security.xml"
})
public class PipelineConfigServiceIntegrationTest {
static {
new SystemEnvironment().setProperty(GoConstants.USE_COMPRESSED_JAVASCRIPT, "false");
}
@Autowired
private PipelineConfigService pipelineConfigService;
@Autowired
private GoConfigService goConfigService;
@Autowired
private GoConfigDao goConfigDao;
@Autowired
private DatabaseAccessHelper dbHelper;
@Autowired
private ConfigRepository configRepository;
@Autowired
private ConfigCache configCache;
@Autowired
private ConfigElementImplementationRegistry registry;
@Autowired
private MetricsProbeService metricsProbeService;
private GoConfigFileHelper configHelper;
private PipelineConfig pipelineConfig;
private Username user;
private String headCommitBeforeUpdate;
private HttpLocalizedOperationResult result;
private String groupName = "jumbo";
@Before
public void setup() throws Exception {
configHelper = new GoConfigFileHelper();
dbHelper.onSetUp();
configHelper.usingCruiseConfigDao(goConfigDao).initializeConfigFile();
configHelper.onSetUp();
goConfigService.forceNotifyListeners();
user = new Username(new CaseInsensitiveString("current"));
pipelineConfig = GoConfigMother.createPipelineConfigWithMaterialConfig(UUID.randomUUID().toString(), new GitMaterialConfig("FOO"));
goConfigService.addPipeline(pipelineConfig, groupName);
GoCipher goCipher = new GoCipher();
goConfigService.updateServerConfig(new MailHost(goCipher), new LdapConfig(goCipher), new PasswordFileConfig("path"), false, goConfigService.configFileMd5(), "artifacts", null, null, "0", null, null, "foo");
UpdateConfigCommand command = goConfigService.modifyAdminPrivilegesCommand(Arrays.asList(user.getUsername().toString()), new TriStateSelection(Admin.GO_SYSTEM_ADMIN, TriStateSelection.Action.add));
goConfigService.updateConfig(command);
result = new HttpLocalizedOperationResult();
headCommitBeforeUpdate = configRepository.getCurrentRevCommit().name();
}
@After
public void tearDown() throws Exception {
configHelper.onTearDown();
dbHelper.onTearDown();
}
@Test
public void shouldCreatePipelineConfigWhenPipelineGroupExists() throws GitAPIException {
GoConfigHolder goConfigHolderBeforeUpdate = goConfigDao.loadConfigHolder();
pipelineConfig = GoConfigMother.createPipelineConfigWithMaterialConfig(UUID.randomUUID().toString(), new GitMaterialConfig("FOO"));
pipelineConfigService.createPipelineConfig(user, pipelineConfig, result, groupName);
assertThat(result.toString(), result.isSuccessful(), is(true));
assertThat(goConfigDao.loadConfigHolder(), is(not(goConfigHolderBeforeUpdate)));
PipelineConfig savedPipelineConfig = goConfigDao.loadForEditing().getPipelineConfigByName(pipelineConfig.name());
assertThat(savedPipelineConfig, is(pipelineConfig));
assertThat(configRepository.getCurrentRevCommit().name(), is(not(headCommitBeforeUpdate)));
assertThat(configRepository.getCurrentRevision().getUsername(), is(user.getDisplayName()));
assertThat(configRepository.getCurrentRevision().getMd5(), is(not(goConfigHolderBeforeUpdate.config.getMd5())));
assertThat(configRepository.getCurrentRevision().getMd5(), is(goConfigDao.loadConfigHolder().config.getMd5()));
assertThat(configRepository.getCurrentRevision().getMd5(), is(goConfigDao.loadConfigHolder().configForEdit.getMd5()));
}
@Test
public void shouldCreatePipelineConfigWhenPipelineGroupDoesExist() throws GitAPIException {
GoConfigHolder goConfigHolderBeforeUpdate = goConfigDao.loadConfigHolder();
PipelineConfig downstream = GoConfigMother.createPipelineConfigWithMaterialConfig(UUID.randomUUID().toString(), new DependencyMaterialConfig(pipelineConfig.name(), pipelineConfig.first().name()));
pipelineConfigService.createPipelineConfig(user, downstream, result, "does-not-exist");
assertThat(result.toString(), result.isSuccessful(), is(true));
assertThat(goConfigDao.loadConfigHolder(), is(not(goConfigHolderBeforeUpdate)));
PipelineConfig savedPipelineConfig = goConfigDao.loadForEditing().getPipelineConfigByName(downstream.name());
assertThat(savedPipelineConfig, is(downstream));
assertThat(configRepository.getCurrentRevCommit().name(), is(not(headCommitBeforeUpdate)));
assertThat(configRepository.getCurrentRevision().getUsername(), is(user.getDisplayName()));
}
@Test
public void shouldNotCreatePipelineConfigWhenAPipelineBySameNameAlreadyExists() throws GitAPIException {
GoConfigHolder goConfigHolderBeforeUpdate = goConfigDao.loadConfigHolder();
PipelineConfig pipelineBeingCreated = GoConfigMother.createPipelineConfigWithMaterialConfig(pipelineConfig.name().toLower(), new DependencyMaterialConfig(pipelineConfig.name(), pipelineConfig.first().name()));
pipelineConfigService.createPipelineConfig(user, pipelineBeingCreated, result, groupName);
assertThat(result.toString(), result.isSuccessful(), is(false));
assertThat(result.httpCode(), is(422));
assertFalse(pipelineBeingCreated.errors().isEmpty());
assertThat(pipelineBeingCreated.errors().on(PipelineConfig.NAME), is(String.format("You have defined multiple pipelines named '%s'. Pipeline names must be unique.", pipelineConfig.name())));
assertThat(goConfigDao.loadConfigHolder(), is(goConfigHolderBeforeUpdate));
assertThat(configRepository.getCurrentRevCommit().name(), is(headCommitBeforeUpdate));
}
@Test
public void shouldNotCreatePipelineConfigWhenInvalidGroupNameIsPassed() throws GitAPIException {
GoConfigHolder goConfigHolderBeforeUpdate = goConfigDao.loadConfigHolder();
PipelineConfig pipelineBeingCreated = GoConfigMother.createPipelineConfigWithMaterialConfig(pipelineConfig.name().toLower(), new DependencyMaterialConfig(pipelineConfig.name(), pipelineConfig.first().name()));
pipelineConfigService.createPipelineConfig(user, pipelineBeingCreated, result, "%$-with-invalid-characters");
assertThat(result.toString(), result.isSuccessful(), is(false));
assertThat(result.httpCode(), is(422));
assertFalse(pipelineBeingCreated.errors().isEmpty());
assertThat(pipelineBeingCreated.errors().on(PipelineConfigs.GROUP), contains("Invalid group name '%$-with-invalid-characters'"));
assertThat(goConfigDao.loadConfigHolder(), is(goConfigHolderBeforeUpdate));
assertThat(configRepository.getCurrentRevCommit().name(), is(headCommitBeforeUpdate));
}
@Test
public void shouldUpdatePipelineConfig() throws GitAPIException {
GoConfigHolder goConfigHolderBeforeUpdate = goConfigDao.loadConfigHolder();
pipelineConfig.add(new StageConfig(new CaseInsensitiveString("additional_stage"), new JobConfigs(new JobConfig(new CaseInsensitiveString("addtn_job")))));
pipelineConfigService.updatePipelineConfig(user, pipelineConfig, result);
assertThat(result.toString(), result.isSuccessful(), is(true));
assertThat(goConfigDao.loadConfigHolder(), is(not(goConfigHolderBeforeUpdate)));
StageConfig newlyAddedStage = goConfigDao.loadForEditing().getPipelineConfigByName(pipelineConfig.name()).getStage(new CaseInsensitiveString("additional_stage"));
assertThat(newlyAddedStage, is(not(nullValue())));
assertThat(newlyAddedStage.getJobs().isEmpty(), is(false));
assertThat(newlyAddedStage.getJobs().first().name().toString(), is("addtn_job"));
assertThat(configRepository.getCurrentRevCommit().name(), is(not(headCommitBeforeUpdate)));
assertThat(configRepository.getCurrentRevision().getUsername(), is(user.getDisplayName()));
}
@Test
public void shouldNotUpdatePipelineConfigInCaseOfValidationErrors() throws GitAPIException {
GoConfigHolder goConfigHolder = goConfigDao.loadConfigHolder();
pipelineConfig.setLabelTemplate("LABEL");
pipelineConfigService.updatePipelineConfig(user, pipelineConfig, result);
assertThat(result.toString(), result.isSuccessful(), is(false));
assertThat(pipelineConfig.errors().on(PipelineConfig.LABEL_TEMPLATE), contains("Invalid label"));
assertThat(configRepository.getCurrentRevCommit().name(), is(headCommitBeforeUpdate));
assertThat(goConfigDao.loadConfigHolder().configForEdit, is(goConfigHolder.configForEdit));
assertThat(goConfigDao.loadConfigHolder().config, is(goConfigHolder.config));
}
@Test
public void shouldNotUpdatePipelineWhenPreprocessingFails() throws Exception {
CaseInsensitiveString templateName = new CaseInsensitiveString("template_with_param");
saveTemplateWithParamToConfig(templateName);
GoConfigHolder goConfigHolder = goConfigDao.loadConfigHolder();
pipelineConfig.clear();
pipelineConfig.setTemplateName(templateName);
pipelineConfigService.updatePipelineConfig(user, pipelineConfig, result);
assertThat(result.toString(), result.isSuccessful(), is(false));
assertThat(result.toString(), result.toString().contains("Parameter 'SOME_PARAM' is not defined"), is(true));
assertThat(configRepository.getCurrentRevCommit().name(), is(headCommitBeforeUpdate));
assertThat(goConfigDao.loadConfigHolder().configForEdit, is(goConfigHolder.configForEdit));
assertThat(goConfigDao.loadConfigHolder().config, is(goConfigHolder.config));
}
@Test
public void shouldNotUpdatePipelineWhenPipelineIsAssociatedWithTemplateAsWellAsHasStagesDefinedLocally() throws Exception {
CaseInsensitiveString templateName = new CaseInsensitiveString("template_with_param");
saveTemplateWithParamToConfig(templateName);
GoConfigHolder goConfigHolder = goConfigDao.loadConfigHolder();
pipelineConfig.clear();
pipelineConfig.setTemplateName(templateName);
pipelineConfig.addStageWithoutValidityAssertion(StageConfigMother.stageConfig("local-stage"));
pipelineConfigService.updatePipelineConfig(user, pipelineConfig, result);
assertThat(result.toString(), result.isSuccessful(), is(false));
assertThat(pipelineConfig.errors().on("stages"), is(String.format("Cannot add stages to pipeline '%s' which already references template '%s'", pipelineConfig.name(), templateName)));
assertThat(pipelineConfig.errors().on("template"), is(String.format("Cannot set template '%s' on pipeline '%s' because it already has stages defined", templateName, pipelineConfig.name())));
assertThat(configRepository.getCurrentRevCommit().name(), is(headCommitBeforeUpdate));
assertThat(goConfigDao.loadConfigHolder().configForEdit, is(goConfigHolder.configForEdit));
assertThat(goConfigDao.loadConfigHolder().config, is(goConfigHolder.config));
}
@Test
public void shouldCheckForUserPermissionBeforeUpdatingPipelineConfig() throws Exception {
CaseInsensitiveString templateName = new CaseInsensitiveString("template_with_param");
saveTemplateWithParamToConfig(templateName);
GoConfigHolder goConfigHolderBeforeUpdate = goConfigDao.loadConfigHolder();
pipelineConfigService.updatePipelineConfig(new Username(new CaseInsensitiveString("unauthorized_user")), pipelineConfig, result);
assertThat(result.toString(), result.isSuccessful(), is(false));
assertThat(result.toString(), result.httpCode(), is(401));
assertThat(result.toString(), result.toString().contains("UNAUTHORIZED_TO_EDIT_PIPELINE"), is(true));
assertThat(configRepository.getCurrentRevCommit().name(), is(headCommitBeforeUpdate));
assertThat(goConfigDao.loadConfigHolder().configForEdit, is(goConfigHolderBeforeUpdate.configForEdit));
assertThat(goConfigDao.loadConfigHolder().config, is(goConfigHolderBeforeUpdate.config));
}
@Test
public void shouldMapErrorsBackToScmMaterials() throws Exception {
GoConfigHolder goConfigHolder = goConfigDao.loadConfigHolder();
String scmid = "scmid";
saveScmMaterialToConfig(scmid);
PluggableSCMMaterialConfig scmMaterialConfig = new PluggableSCMMaterialConfig(scmid);
pipelineConfig.materialConfigs().add(scmMaterialConfig);
pipelineConfigService.updatePipelineConfig(user, pipelineConfig, result);
assertThat(result.toString(), result.isSuccessful(), is(false));
assertThat(scmMaterialConfig.errors().on(PluggableSCMMaterialConfig.FOLDER), is("Destination directory is required when specifying multiple scm materials"));
assertThat(scmMaterialConfig.errors().on(PluggableSCMMaterialConfig.SCM_ID), is("Could not find plugin for scm-id: [scmid]."));
assertThat(configRepository.getCurrentRevCommit().name(), is(headCommitBeforeUpdate));
assertThat(goConfigDao.loadConfigHolder().configForEdit, is(goConfigHolder.configForEdit));
assertThat(goConfigDao.loadConfigHolder().config, is(goConfigHolder.config));
}
@Test
public void shouldMapErrorsBackToPackageMaterials() throws Exception {
GoConfigHolder goConfigHolder = goConfigDao.loadConfigHolder();
String packageid = "packageid";
saveScmMaterialToConfig(packageid);
PackageMaterialConfig packageMaterialConfig = new PackageMaterialConfig(packageid);
pipelineConfig.materialConfigs().add(packageMaterialConfig);
pipelineConfigService.updatePipelineConfig(user, pipelineConfig, result);
assertThat(result.toString(), result.isSuccessful(), is(false));
assertThat(packageMaterialConfig.errors().on(PackageMaterialConfig.PACKAGE_ID), is("Could not find repository for given package id:[packageid]"));
assertThat(configRepository.getCurrentRevCommit().name(), is(headCommitBeforeUpdate));
assertThat(goConfigDao.loadConfigHolder().configForEdit, is(goConfigHolder.configForEdit));
assertThat(goConfigDao.loadConfigHolder().config, is(goConfigHolder.config));
}
private void saveTemplateWithParamToConfig(CaseInsensitiveString templateName) throws Exception {
JobConfig jobConfig = new JobConfig(new CaseInsensitiveString("job"));
ExecTask task = new ExecTask();
task.setCommand("ls");
jobConfig.addTask(task);
jobConfig.addVariable("ENV_VAR", "#{SOME_PARAM}");
final PipelineTemplateConfig template = new PipelineTemplateConfig(templateName, new StageConfig(new CaseInsensitiveString("stage"), new JobConfigs(jobConfig)));
CruiseConfig cruiseConfig = goConfigDao.loadConfigHolder().configForEdit;
cruiseConfig.addTemplate(template);
saveConfig(cruiseConfig);
}
private void saveScmMaterialToConfig(String id) throws Exception {
SCM scm = new SCM(id, new PluginConfiguration(id, "1.0"), new Configuration(new ConfigurationProperty(new ConfigurationKey("key"), new ConfigurationValue("value"))));
scm.setName(id);
CruiseConfig cruiseConfig = goConfigDao.loadConfigHolder().configForEdit;
cruiseConfig.getSCMs().add(scm);
saveConfig(cruiseConfig);
}
private void saveConfig(CruiseConfig cruiseConfig) throws Exception {
ByteArrayOutputStream buffer = new ByteArrayOutputStream();
new MagicalGoConfigXmlWriter(configCache, registry, metricsProbeService).write(cruiseConfig, buffer, false);
}
}
| |
/*
* Copyright (C) 2018 The DNA Authors
* This file is part of The DNA library.
*
* The DNA is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* The DNA is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with The DNA. If not, see <http://www.gnu.org/licenses/>.
*
*/
package com.github.DNAProject.network.rest;
import java.io.IOException;
import com.github.DNAProject.common.ErrorCode;
import com.github.DNAProject.common.Helper;
import com.github.DNAProject.core.block.Block;
import com.github.DNAProject.io.Serializable;
import com.github.DNAProject.network.connect.AbstractConnector;
import com.github.DNAProject.network.exception.ConnectorException;
import com.github.DNAProject.core.transaction.Transaction;
import com.alibaba.fastjson.JSON;
import com.github.DNAProject.network.exception.RestfulException;
public class RestClient extends AbstractConnector {
private Interfaces api;
private String version = "v1.0.0", action = "sendrawtransaction";
public RestClient(String restUrl) {
api = new Interfaces(restUrl);
}
@Override
public String getUrl() {
return api.getUrl();
}
@Override
public String sendRawTransaction(String hexData) throws RestfulException {
String rs = api.sendTransaction(false, null, action, version, hexData);
Result rr = JSON.parseObject(rs, Result.class);
if (rr.Error == 0) {
return rs;
}
throw new RestfulException(to(rr));
}
@Override
public String sendRawTransaction(boolean preExec, String userid, String hexData) throws RestfulException {
String rs = api.sendTransaction(preExec, userid, action, version, hexData);
Result rr = JSON.parseObject(rs, Result.class);
if (rr.Error == 0) {
return rs;
}
throw new RestfulException(to(rr));
}
@Override
public Transaction getRawTransaction(String txhash) throws RestfulException {
String rs = api.getTransaction(txhash, true);
Result rr = JSON.parseObject(rs, Result.class);
if (rr.Error == 0) {
try {
return Transaction.deserializeFrom(Helper.hexToBytes((String) rr.Result));
} catch (IOException e) {
throw new RestfulException(ErrorCode.TxDeserializeError, e);
}
}
throw new RestfulException(to(rr));
}
@Override
public int getNodeCount() throws RestfulException {
String rs = api.getNodeCount();
Result rr = JSON.parseObject(rs, Result.class);
if (rr.Error == 0) {
return (int) rr.Result;
}
throw new RestfulException(to(rr));
}
@Override
public int getBlockHeight() throws RestfulException {
String rs = api.getBlockHeight();
Result rr = JSON.parseObject(rs, Result.class);
if (rr.Error == 0) {
return (int) rr.Result;
}
throw new RestfulException(to(rr));
}
@Override
public Block getBlock(int height) throws RestfulException {
String rs = api.getBlock(height, "1");
Result rr = JSON.parseObject(rs, Result.class);
if (rr.Error == 0) {
try {
return Serializable.from(Helper.hexToBytes((String) rr.Result), Block.class);
} catch (InstantiationException | IllegalAccessException e) {
throw new RestfulException(ErrorCode.BlockDeserializeError, e);
}
}
throw new RestfulException(to(rr));
}
@Override
public Block getBlock(String hash) throws RestfulException {
String rs = api.getBlock(hash, "1");
Result rr = JSON.parseObject(rs, Result.class);
if (rr.Error != 0) {
throw new RestfulException(to(rr));
}
try {
return Serializable.from(Helper.hexToBytes((String) rr.Result), Block.class);
} catch (InstantiationException | IllegalAccessException e) {
throw new RestfulException(ErrorCode.BlockDeserializeError, e);
}
}
@Override
public Object getBalance(String address) throws RestfulException {
String rs = api.getBalance(address);
Result rr = JSON.parseObject(rs, Result.class);
if (rr.Error == 0) {
return rr.Result;
}
throw new RestfulException(to(rr));
}
@Override
public Object getRawTransactionJson(String txhash) throws RestfulException,IOException {
String rs = api.getTransaction(txhash, true);
Result rr = JSON.parseObject(rs, Result.class);
if (rr.Error == 0) {
return JSON.toJSONString(Transaction.deserializeFrom(Helper.hexToBytes((String) rr.Result)).json());
}
throw new RestfulException(to(rr));
}
@Override
public Object getBlockJson(int height) throws RestfulException {
String rs = api.getBlock(height, "0");
Result rr = JSON.parseObject(rs, Result.class);
if (rr.Error == 0) {
return rr.Result;
}
throw new RestfulException(to(rr));
}
@Override
public Object getBlockJson(String hash) throws RestfulException {
String rs = api.getBlock(hash, "0");
Result rr = JSON.parseObject(rs, Result.class);
if (rr.Error == 0) {
return rr.Result;
}
throw new RestfulException(to(rr));
}
@Override
public Object getContract(String hash) throws RestfulException {
String rs = api.getContract(hash);
Result rr = JSON.parseObject(rs, Result.class);
if (rr.Error == 0) {
return rr.Result;
}
throw new RestfulException(to(rr));
}
@Override
public Object getContractJson(String hash) throws RestfulException {
String rs = api.getContractJson(hash);
Result rr = JSON.parseObject(rs, Result.class);
if (rr.Error == 0) {
return rr.Result;
}
throw new RestfulException(to(rr));
}
@Override
public Object getSmartCodeEvent(int height) throws ConnectorException, IOException {
String rs = api.getSmartCodeEvent(height);
Result rr = JSON.parseObject(rs, Result.class);
if (rr.Error == 0) {
return rr.Result;
}
throw new RestfulException(to(rr));
}
@Override
public Object getSmartCodeEvent(String hash) throws ConnectorException, IOException {
String rs = api.getSmartCodeEvent(hash);
Result rr = JSON.parseObject(rs, Result.class);
if (rr.Error == 0) {
return rr.Result;
}
throw new RestfulException(to(rr));
}
@Override
public int getBlockHeightByTxHash(String hash) throws ConnectorException, IOException {
String rs = api.getBlockHeightByTxHash(hash);
Result rr = JSON.parseObject(rs, Result.class);
if (rr.Error == 0) {
return (int)rr.Result;
}
throw new RestfulException(to(rr));
}
@Override
public String getStorage(String codehash,String key) throws ConnectorException, IOException {
String rs = api.getStorage(codehash,key);
Result rr = JSON.parseObject(rs, Result.class);
if (rr.Error == 0) {
return (String)rr.Result;
}
throw new RestfulException(to(rr));
}
@Override
public Object getMerkleProof(String hash) throws ConnectorException, IOException{
String rs = api.getMerkleProof(hash);
Result rr = JSON.parseObject(rs, Result.class);
if (rr.Error == 0) {
return rr.Result;
}
throw new RestfulException(to(rr));
}
@Override
public String getAllowance(String asset,String from,String to) throws ConnectorException, IOException{
String rs = api.getAllowance(asset,from,to);
Result rr = JSON.parseObject(rs, Result.class);
if (rr.Error == 0) {
return (String)rr.Result;
}
throw new RestfulException(to(rr));
}
@Override
public Object getMemPoolTxCount() throws ConnectorException, IOException{
String rs = api.getMemPoolTxCount();
Result rr = JSON.parseObject(rs, Result.class);
if (rr.Error == 0) {
return rr.Result;
}
throw new RestfulException(to(rr));
}
@Override
public Object getMemPoolTxState(String hash) throws ConnectorException, IOException{
String rs = api.getMemPoolTxState(hash);
Result rr = JSON.parseObject(rs, Result.class);
if (rr.Error == 0) {
return rr.Result;
}
throw new RestfulException(to(rr));
}
@Override
public String getVersion() throws ConnectorException, IOException{
String rs = api.getVersion();
Result rr = JSON.parseObject(rs, Result.class);
if (rr.Error == 0) {
return (String)rr.Result;
}
throw new RestfulException(to(rr));
}
@Override
public String getGrantOng(String address) throws ConnectorException, IOException {
String rs = api.getGrantOng(address);
Result rr = JSON.parseObject(rs, Result.class);
if (rr.Error == 0) {
return (String)rr.Result;
}
throw new RestfulException(to(rr));
}
@Override
public int getNetworkId() throws ConnectorException, IOException {
String rs = api.getNetworkId();
Result rr = JSON.parseObject(rs, Result.class);
if (rr.Error == 0) {
return (int)rr.Result;
}
throw new RestfulException(to(rr));
}
@Override
public Object getNodeSyncStatus() throws ConnectorException, IOException {
String rs = api.getNodeSyncStatus();
Result rr = JSON.parseObject(rs, Result.class);
if (rr.Error == 0) {
return rr.Result;
}
throw new RestfulException(to(rr));
}
@Override
public String getSideChainData(String sideChainID) throws ConnectorException, IOException {
return null;
}
private String to(Result rr) {
return JSON.toJSONString(rr);
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.druid.sql.calcite.planner;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.node.ArrayNode;
import com.fasterxml.jackson.databind.node.ObjectNode;
import com.google.common.base.Preconditions;
import com.google.common.base.Supplier;
import com.google.common.base.Suppliers;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.Iterables;
import org.apache.calcite.DataContext;
import org.apache.calcite.adapter.java.JavaTypeFactory;
import org.apache.calcite.config.CalciteConnectionConfig;
import org.apache.calcite.config.CalciteConnectionConfigImpl;
import org.apache.calcite.config.CalciteConnectionProperty;
import org.apache.calcite.interpreter.BindableConvention;
import org.apache.calcite.interpreter.BindableRel;
import org.apache.calcite.interpreter.Bindables;
import org.apache.calcite.jdbc.CalciteSchema;
import org.apache.calcite.linq4j.Enumerable;
import org.apache.calcite.linq4j.Enumerator;
import org.apache.calcite.plan.RelOptPlanner;
import org.apache.calcite.plan.RelOptUtil;
import org.apache.calcite.prepare.CalciteCatalogReader;
import org.apache.calcite.prepare.Prepare;
import org.apache.calcite.rel.RelNode;
import org.apache.calcite.rel.RelRoot;
import org.apache.calcite.rel.core.Sort;
import org.apache.calcite.rel.logical.LogicalSort;
import org.apache.calcite.rel.type.RelDataType;
import org.apache.calcite.rel.type.RelDataTypeFactory;
import org.apache.calcite.rex.RexBuilder;
import org.apache.calcite.rex.RexNode;
import org.apache.calcite.sql.SqlExplain;
import org.apache.calcite.sql.SqlIdentifier;
import org.apache.calcite.sql.SqlInsert;
import org.apache.calcite.sql.SqlKind;
import org.apache.calcite.sql.SqlNode;
import org.apache.calcite.sql.parser.SqlParseException;
import org.apache.calcite.sql.type.SqlTypeName;
import org.apache.calcite.sql.validate.SqlValidator;
import org.apache.calcite.sql.validate.SqlValidatorUtil;
import org.apache.calcite.tools.FrameworkConfig;
import org.apache.calcite.tools.Frameworks;
import org.apache.calcite.tools.Planner;
import org.apache.calcite.tools.RelConversionException;
import org.apache.calcite.tools.ValidationException;
import org.apache.calcite.util.Pair;
import org.apache.druid.java.util.common.StringUtils;
import org.apache.druid.java.util.common.guava.BaseSequence;
import org.apache.druid.java.util.common.guava.Sequence;
import org.apache.druid.java.util.common.guava.Sequences;
import org.apache.druid.java.util.common.logger.Logger;
import org.apache.druid.java.util.emitter.EmittingLogger;
import org.apache.druid.query.Query;
import org.apache.druid.query.QueryContexts;
import org.apache.druid.segment.DimensionHandlerUtils;
import org.apache.druid.server.security.Action;
import org.apache.druid.server.security.Resource;
import org.apache.druid.server.security.ResourceAction;
import org.apache.druid.server.security.ResourceType;
import org.apache.druid.sql.calcite.rel.DruidConvention;
import org.apache.druid.sql.calcite.rel.DruidQuery;
import org.apache.druid.sql.calcite.rel.DruidRel;
import org.apache.druid.sql.calcite.rel.DruidUnionRel;
import org.apache.druid.sql.calcite.run.QueryMaker;
import org.apache.druid.sql.calcite.run.QueryMakerFactory;
import org.apache.druid.utils.Throwables;
import javax.annotation.Nullable;
import java.io.Closeable;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Properties;
import java.util.Set;
import java.util.stream.Collectors;
public class DruidPlanner implements Closeable
{
private static final EmittingLogger log = new EmittingLogger(DruidPlanner.class);
private final FrameworkConfig frameworkConfig;
private final Planner planner;
private final PlannerContext plannerContext;
private final QueryMakerFactory queryMakerFactory;
private RexBuilder rexBuilder;
DruidPlanner(
final FrameworkConfig frameworkConfig,
final PlannerContext plannerContext,
final QueryMakerFactory queryMakerFactory
)
{
this.frameworkConfig = frameworkConfig;
this.planner = Frameworks.getPlanner(frameworkConfig);
this.plannerContext = plannerContext;
this.queryMakerFactory = queryMakerFactory;
}
/**
* Validates a SQL query and populates {@link PlannerContext#getResourceActions()}.
*
* @return set of {@link Resource} corresponding to any Druid datasources or views which are taking part in the query.
*/
public ValidationResult validate() throws SqlParseException, ValidationException
{
resetPlanner();
final ParsedNodes parsed = ParsedNodes.create(planner.parse(plannerContext.getSql()));
final SqlValidator validator = getValidator();
final SqlNode validatedQueryNode;
try {
validatedQueryNode = validator.validate(rewriteDynamicParameters(parsed.getQueryNode()));
}
catch (RuntimeException e) {
throw new ValidationException(e);
}
SqlResourceCollectorShuttle resourceCollectorShuttle = new SqlResourceCollectorShuttle(validator, plannerContext);
validatedQueryNode.accept(resourceCollectorShuttle);
final Set<ResourceAction> resourceActions = new HashSet<>(resourceCollectorShuttle.getResourceActions());
if (parsed.getInsertNode() != null) {
final String targetDataSource = validateAndGetDataSourceForInsert(parsed.getInsertNode());
resourceActions.add(new ResourceAction(new Resource(targetDataSource, ResourceType.DATASOURCE), Action.WRITE));
}
plannerContext.setResourceActions(resourceActions);
return new ValidationResult(resourceActions);
}
/**
* Prepare an SQL query for execution, including some initial parsing and validation and any dynamic parameter type
* resolution, to support prepared statements via JDBC.
*
* In some future this could perhaps re-use some of the work done by {@link #validate()}
* instead of repeating it, but that day is not today.
*/
public PrepareResult prepare() throws SqlParseException, ValidationException, RelConversionException
{
resetPlanner();
final ParsedNodes parsed = ParsedNodes.create(planner.parse(plannerContext.getSql()));
final SqlNode validatedQueryNode = planner.validate(parsed.getQueryNode());
final RelRoot rootQueryRel = planner.rel(validatedQueryNode);
final SqlValidator validator = getValidator();
final RelDataTypeFactory typeFactory = rootQueryRel.rel.getCluster().getTypeFactory();
final RelDataType parameterTypes = validator.getParameterRowType(validator.validate(validatedQueryNode));
final RelDataType returnedRowType;
if (parsed.getExplainNode() != null) {
returnedRowType = getExplainStructType(typeFactory);
} else {
returnedRowType = buildQueryMaker(rootQueryRel, parsed.getInsertNode()).getResultType();
}
return new PrepareResult(returnedRowType, parameterTypes);
}
/**
* Plan an SQL query for execution, returning a {@link PlannerResult} which can be used to actually execute the query.
*
* Ideally, the query can be planned into a native Druid query, using {@link #planWithDruidConvention}, but will
* fall-back to {@link #planWithBindableConvention} if this is not possible.
*
* In some future this could perhaps re-use some of the work done by {@link #validate()}
* instead of repeating it, but that day is not today.
*/
public PlannerResult plan() throws SqlParseException, ValidationException, RelConversionException
{
resetPlanner();
final ParsedNodes parsed = ParsedNodes.create(planner.parse(plannerContext.getSql()));
// the planner's type factory is not available until after parsing
this.rexBuilder = new RexBuilder(planner.getTypeFactory());
final SqlNode parameterizedQueryNode = rewriteDynamicParameters(parsed.getQueryNode());
final SqlNode validatedQueryNode = planner.validate(parameterizedQueryNode);
final RelRoot rootQueryRel = planner.rel(validatedQueryNode);
try {
return planWithDruidConvention(rootQueryRel, parsed.getExplainNode(), parsed.getInsertNode());
}
catch (Exception e) {
Throwable cannotPlanException = Throwables.getCauseOfType(e, RelOptPlanner.CannotPlanException.class);
if (null == cannotPlanException) {
// Not a CannotPlanningException, rethrow without trying with bindable
throw e;
}
if (parsed.getInsertNode() != null) {
// Cannot INSERT with BINDABLE.
throw e;
}
// Try again with BINDABLE convention. Used for querying Values and metadata tables.
try {
return planWithBindableConvention(rootQueryRel, parsed.getExplainNode());
}
catch (Exception e2) {
e.addSuppressed(e2);
Logger logger = log;
if (!QueryContexts.isDebug(plannerContext.getQueryContext())) {
logger = log.noStackTrace();
}
String errorMessage = buildSQLPlanningErrorMessage(cannotPlanException);
logger.warn(e, errorMessage);
throw new UnsupportedSQLQueryException(errorMessage);
}
}
}
public PlannerContext getPlannerContext()
{
return plannerContext;
}
@Override
public void close()
{
planner.close();
}
/**
* While the actual query might not have changed, if the druid planner is re-used, we still have the need to reset the
* {@link #planner} since we do not re-use artifacts or keep track of state between
* {@link #validate}, {@link #prepare}, and {@link #plan} and instead repeat parsing and validation
* for each step.
*
* Currently, that state tracking is done in {@link org.apache.druid.sql.SqlLifecycle}, which will create a new
* planner for each of the corresponding steps so this isn't strictly necessary at this time, this method is here as
* much to make this situation explicit and provide context for a future refactor as anything else (and some tests
* do re-use the planner between validate, prepare, and plan, which will run into this issue).
*
* This could be improved by tying {@link org.apache.druid.sql.SqlLifecycle} and {@link DruidPlanner} states more
* closely with the state of {@link #planner}, instead of repeating parsing and validation between each of these
* steps.
*/
private void resetPlanner()
{
planner.close();
planner.reset();
}
/**
* Construct a {@link PlannerResult} for a {@link RelNode} that is directly translatable to a native Druid query.
*/
private PlannerResult planWithDruidConvention(
final RelRoot root,
@Nullable final SqlExplain explain,
@Nullable final SqlInsert insert
) throws ValidationException, RelConversionException
{
final RelRoot possiblyLimitedRoot = possiblyWrapRootWithOuterLimitFromContext(root);
final QueryMaker queryMaker = buildQueryMaker(root, insert);
plannerContext.setQueryMaker(queryMaker);
RelNode parameterized = rewriteRelDynamicParameters(possiblyLimitedRoot.rel);
final DruidRel<?> druidRel = (DruidRel<?>) planner.transform(
Rules.DRUID_CONVENTION_RULES,
planner.getEmptyTraitSet()
.replace(DruidConvention.instance())
.plus(root.collation),
parameterized
);
if (explain != null) {
return planExplanation(druidRel, explain, true);
} else {
final Supplier<Sequence<Object[]>> resultsSupplier = () -> {
// sanity check
final Set<ResourceAction> readResourceActions =
plannerContext.getResourceActions()
.stream()
.filter(action -> action.getAction() == Action.READ)
.collect(Collectors.toSet());
Preconditions.checkState(
readResourceActions.isEmpty() == druidRel.getDataSourceNames().isEmpty()
// The resources found in the plannerContext can be less than the datasources in
// the query plan, because the query planner can eliminate empty tables by replacing
// them with InlineDataSource of empty rows.
|| readResourceActions.size() >= druidRel.getDataSourceNames().size(),
"Authorization sanity check failed"
);
return druidRel.runQuery();
};
return new PlannerResult(resultsSupplier, queryMaker.getResultType());
}
}
/**
* Construct a {@link PlannerResult} for a fall-back 'bindable' rel, for things that are not directly translatable
* to native Druid queries such as system tables and just a general purpose (but definitely not optimized) fall-back.
*
* See {@link #planWithDruidConvention} which will handle things which are directly translatable
* to native Druid queries.
*/
private PlannerResult planWithBindableConvention(
final RelRoot root,
@Nullable final SqlExplain explain
) throws RelConversionException
{
BindableRel bindableRel = (BindableRel) planner.transform(
Rules.BINDABLE_CONVENTION_RULES,
planner.getEmptyTraitSet().replace(BindableConvention.INSTANCE).plus(root.collation),
root.rel
);
if (!root.isRefTrivial()) {
// Add a projection on top to accommodate root.fields.
final List<RexNode> projects = new ArrayList<>();
final RexBuilder rexBuilder = bindableRel.getCluster().getRexBuilder();
for (int field : Pair.left(root.fields)) {
projects.add(rexBuilder.makeInputRef(bindableRel, field));
}
bindableRel = new Bindables.BindableProject(
bindableRel.getCluster(),
bindableRel.getTraitSet(),
bindableRel,
projects,
root.validatedRowType
);
}
if (explain != null) {
return planExplanation(bindableRel, explain, false);
} else {
final BindableRel theRel = bindableRel;
final DataContext dataContext = plannerContext.createDataContext(
(JavaTypeFactory) planner.getTypeFactory(),
plannerContext.getParameters()
);
final Supplier<Sequence<Object[]>> resultsSupplier = () -> {
final Enumerable<?> enumerable = theRel.bind(dataContext);
final Enumerator<?> enumerator = enumerable.enumerator();
return Sequences.withBaggage(new BaseSequence<>(
new BaseSequence.IteratorMaker<Object[], EnumeratorIterator<Object[]>>()
{
@Override
public EnumeratorIterator<Object[]> make()
{
return new EnumeratorIterator<>(new Iterator<Object[]>()
{
@Override
public boolean hasNext()
{
return enumerator.moveNext();
}
@Override
public Object[] next()
{
return (Object[]) enumerator.current();
}
});
}
@Override
public void cleanup(EnumeratorIterator<Object[]> iterFromMake)
{
}
}
), enumerator::close);
};
return new PlannerResult(resultsSupplier, root.validatedRowType);
}
}
/**
* Construct a {@link PlannerResult} for an 'explain' query from a {@link RelNode}
*/
private PlannerResult planExplanation(
final RelNode rel,
final SqlExplain explain,
final boolean isDruidConventionExplanation
)
{
String explanation = RelOptUtil.dumpPlan("", rel, explain.getFormat(), explain.getDetailLevel());
String resourcesString;
try {
if (isDruidConventionExplanation && rel instanceof DruidRel) {
// Show the native queries instead of Calcite's explain if the legacy flag is turned off
if (plannerContext.getPlannerConfig().isUseNativeQueryExplain()) {
DruidRel<?> druidRel = (DruidRel<?>) rel;
try {
explanation = explainSqlPlanAsNativeQueries(druidRel);
}
catch (Exception ex) {
log.warn(ex, "Unable to translate to a native Druid query. Resorting to legacy Druid explain plan");
}
}
}
final Set<Resource> resources =
plannerContext.getResourceActions().stream().map(ResourceAction::getResource).collect(Collectors.toSet());
resourcesString = plannerContext.getJsonMapper().writeValueAsString(resources);
}
catch (JsonProcessingException jpe) {
// this should never happen, we create the Resources here, not a user
log.error(jpe, "Encountered exception while serializing Resources for explain output");
resourcesString = null;
}
final Supplier<Sequence<Object[]>> resultsSupplier = Suppliers.ofInstance(
Sequences.simple(ImmutableList.of(new Object[]{explanation, resourcesString})));
return new PlannerResult(resultsSupplier, getExplainStructType(rel.getCluster().getTypeFactory()));
}
/**
* This method doesn't utilize the Calcite's internal {@link RelOptUtil#dumpPlan} since that tends to be verbose
* and not indicative of the native Druid Queries which will get executed
* This method assumes that the Planner has converted the RelNodes to DruidRels, and thereby we can implictly cast it
*
* @param rel Instance of the root {@link DruidRel} which is formed by running the planner transformations on it
* @return A string representing an array of native queries that correspond to the given SQL query, in JSON format
* @throws JsonProcessingException
*/
private String explainSqlPlanAsNativeQueries(DruidRel<?> rel) throws JsonProcessingException
{
ObjectMapper jsonMapper = plannerContext.getJsonMapper();
List<DruidQuery> druidQueryList;
druidQueryList = flattenOutermostRel(rel)
.stream()
.map(druidRel -> druidRel.toDruidQuery(false))
.collect(Collectors.toList());
// Putting the queries as object node in an ArrayNode, since directly returning a list causes issues when
// serializing the "queryType". Another method would be to create a POJO containing query and signature, and then
// serializing it using normal list method.
ArrayNode nativeQueriesArrayNode = jsonMapper.createArrayNode();
for (DruidQuery druidQuery : druidQueryList) {
Query<?> nativeQuery = druidQuery.getQuery();
ObjectNode objectNode = jsonMapper.createObjectNode();
objectNode.put("query", jsonMapper.convertValue(nativeQuery, ObjectNode.class));
objectNode.put("signature", jsonMapper.convertValue(druidQuery.getOutputRowSignature(), ArrayNode.class));
nativeQueriesArrayNode.add(objectNode);
}
return jsonMapper.writeValueAsString(nativeQueriesArrayNode);
}
/**
* Given a {@link DruidRel}, this method recursively flattens the Rels if they are of the type {@link DruidUnionRel}
* It is implicitly assumed that the {@link DruidUnionRel} can never be the child of a non {@link DruidUnionRel}
* node
* For eg, a DruidRel structure of kind:
* DruidUnionRel
* DruidUnionRel
* DruidRel (A)
* DruidRel (B)
* DruidRel(C)
* will return [DruidRel(A), DruidRel(B), DruidRel(C)]
* @param outermostDruidRel The outermost rel which is to be flattened
* @return a list of DruidRel's which donot have a DruidUnionRel nested in between them
*/
private List<DruidRel<?>> flattenOutermostRel(DruidRel<?> outermostDruidRel)
{
List<DruidRel<?>> druidRels = new ArrayList<>();
flattenOutermostRel(outermostDruidRel, druidRels);
return druidRels;
}
/**
* Recursive function (DFS) which traverses the nodes and collects the corresponding {@link DruidRel} into a list if
* they are not of the type {@link DruidUnionRel} or else calls the method with the child nodes. The DFS order of the
* nodes are retained, since that is the order in which they will actually be called in {@link DruidUnionRel#runQuery()}
* @param druidRel The current relNode
* @param flattendListAccumulator Accumulator list which needs to be appended by this method
*/
private void flattenOutermostRel(DruidRel<?> druidRel, List<DruidRel<?>> flattendListAccumulator)
{
if (druidRel instanceof DruidUnionRel) {
DruidUnionRel druidUnionRel = (DruidUnionRel) druidRel;
druidUnionRel.getInputs().forEach(innerRelNode -> {
DruidRel<?> innerDruidRelNode = (DruidRel<?>) innerRelNode; // This type conversion should always be possible
flattenOutermostRel(innerDruidRelNode, flattendListAccumulator);
});
} else {
flattendListAccumulator.add(druidRel);
}
}
/**
* This method wraps the root with a {@link LogicalSort} that applies a limit (no ordering change). If the outer rel
* is already a {@link Sort}, we can merge our outerLimit into it, similar to what is going on in
* {@link org.apache.druid.sql.calcite.rule.SortCollapseRule}.
*
* The {@link PlannerContext#CTX_SQL_OUTER_LIMIT} flag that controls this wrapping is meant for internal use only by
* the web console, allowing it to apply a limit to queries without rewriting the original SQL.
*
* @param root root node
* @return root node wrapped with a limiting logical sort if a limit is specified in the query context.
*/
@Nullable
private RelRoot possiblyWrapRootWithOuterLimitFromContext(RelRoot root)
{
Object outerLimitObj = plannerContext.getQueryContext().get(PlannerContext.CTX_SQL_OUTER_LIMIT);
Long outerLimit = DimensionHandlerUtils.convertObjectToLong(outerLimitObj, true);
if (outerLimit == null) {
return root;
}
final LogicalSort newRootRel;
if (root.rel instanceof Sort) {
Sort sort = (Sort) root.rel;
final OffsetLimit originalOffsetLimit = OffsetLimit.fromSort(sort);
final OffsetLimit newOffsetLimit = originalOffsetLimit.andThen(new OffsetLimit(0, outerLimit));
if (newOffsetLimit.equals(originalOffsetLimit)) {
// nothing to do, don't bother to make a new sort
return root;
}
newRootRel = LogicalSort.create(
sort.getInput(),
sort.collation,
newOffsetLimit.getOffsetAsRexNode(rexBuilder),
newOffsetLimit.getLimitAsRexNode(rexBuilder)
);
} else {
newRootRel = LogicalSort.create(
root.rel,
root.collation,
null,
new OffsetLimit(0, outerLimit).getLimitAsRexNode(rexBuilder)
);
}
return new RelRoot(newRootRel, root.validatedRowType, root.kind, root.fields, root.collation);
}
/**
* Constructs an SQL validator, just like papa {@link #planner} uses.
*/
private SqlValidator getValidator()
{
// this is sort of lame, planner won't cough up its validator, which is nice and seeded after validating a query,
// but it is private and has no accessors, so make another one so we can get the parameter types... but i suppose
// beats creating our own Prepare and Planner implementations
Preconditions.checkNotNull(planner.getTypeFactory());
final CalciteConnectionConfig connectionConfig;
if (frameworkConfig.getContext() != null) {
connectionConfig = frameworkConfig.getContext().unwrap(CalciteConnectionConfig.class);
} else {
Properties properties = new Properties();
properties.setProperty(
CalciteConnectionProperty.CASE_SENSITIVE.camelName(),
String.valueOf(PlannerFactory.PARSER_CONFIG.caseSensitive())
);
connectionConfig = new CalciteConnectionConfigImpl(properties);
}
Prepare.CatalogReader catalogReader = new CalciteCatalogReader(
CalciteSchema.from(frameworkConfig.getDefaultSchema().getParentSchema()),
CalciteSchema.from(frameworkConfig.getDefaultSchema()).path(null),
planner.getTypeFactory(),
connectionConfig
);
return SqlValidatorUtil.newValidator(
frameworkConfig.getOperatorTable(),
catalogReader,
planner.getTypeFactory(),
DruidConformance.instance()
);
}
/**
* Uses {@link SqlParameterizerShuttle} to rewrite {@link SqlNode} to swap out any
* {@link org.apache.calcite.sql.SqlDynamicParam} early for their {@link org.apache.calcite.sql.SqlLiteral}
* replacement
*/
private SqlNode rewriteDynamicParameters(SqlNode parsed)
{
if (!plannerContext.getParameters().isEmpty()) {
SqlParameterizerShuttle sshuttle = new SqlParameterizerShuttle(plannerContext);
return parsed.accept(sshuttle);
}
return parsed;
}
/**
* Fall-back dynamic parameter substitution using {@link RelParameterizerShuttle} in the event that
* {@link #rewriteDynamicParameters(SqlNode)} was unable to successfully substitute all parameter values, and will
* cause a failure if any dynamic a parameters are not bound.
*/
private RelNode rewriteRelDynamicParameters(RelNode rootRel)
{
RelParameterizerShuttle parameterizer = new RelParameterizerShuttle(plannerContext);
return rootRel.accept(parameterizer);
}
private QueryMaker buildQueryMaker(
final RelRoot rootQueryRel,
@Nullable final SqlInsert insert
) throws ValidationException
{
if (insert != null) {
final String targetDataSource = validateAndGetDataSourceForInsert(insert);
return queryMakerFactory.buildForInsert(targetDataSource, rootQueryRel, plannerContext);
} else {
return queryMakerFactory.buildForSelect(rootQueryRel, plannerContext);
}
}
private static RelDataType getExplainStructType(RelDataTypeFactory typeFactory)
{
return typeFactory.createStructType(
ImmutableList.of(
Calcites.createSqlType(typeFactory, SqlTypeName.VARCHAR),
Calcites.createSqlType(typeFactory, SqlTypeName.VARCHAR)
),
ImmutableList.of("PLAN", "RESOURCES")
);
}
/**
* Extract target datasource from a {@link SqlInsert}, and also validate that the INSERT is of a form we support.
* Expects the INSERT target to be either an unqualified name, or a name qualified by the default schema.
*/
private String validateAndGetDataSourceForInsert(final SqlInsert insert) throws ValidationException
{
if (insert.isUpsert()) {
throw new ValidationException("UPSERT is not supported.");
}
if (insert.getTargetColumnList() != null) {
throw new ValidationException("INSERT with target column list is not supported.");
}
final SqlIdentifier tableIdentifier = (SqlIdentifier) insert.getTargetTable();
if (tableIdentifier.names.isEmpty()) {
// I don't think this can happen, but include a branch for it just in case.
throw new ValidationException("INSERT requires target table.");
} else if (tableIdentifier.names.size() == 1) {
// Unqualified name.
return Iterables.getOnlyElement(tableIdentifier.names);
} else {
// Qualified name.
final String defaultSchemaName =
Iterables.getOnlyElement(CalciteSchema.from(frameworkConfig.getDefaultSchema()).path(null));
if (tableIdentifier.names.size() == 2 && defaultSchemaName.equals(tableIdentifier.names.get(0))) {
return tableIdentifier.names.get(1);
} else {
throw new ValidationException(
StringUtils.format("Cannot INSERT into [%s] because it is not a Druid datasource.", tableIdentifier)
);
}
}
}
private String buildSQLPlanningErrorMessage(Throwable exception)
{
String errorMessage = plannerContext.getPlanningError();
if (null == errorMessage && exception instanceof UnsupportedSQLQueryException) {
errorMessage = exception.getMessage();
}
if (null == errorMessage) {
errorMessage = "Please check broker logs for more details";
} else {
// Re-phrase since planning errors are more like hints
errorMessage = "Possible error: " + errorMessage;
}
// Finally, add the query itself to error message that user will get.
return StringUtils.format("Cannot build plan for query: %s. %s", plannerContext.getSql(), errorMessage);
}
private static class EnumeratorIterator<T> implements Iterator<T>
{
private final Iterator<T> it;
EnumeratorIterator(Iterator<T> it)
{
this.it = it;
}
@Override
public boolean hasNext()
{
return it.hasNext();
}
@Override
public T next()
{
return it.next();
}
}
private static class ParsedNodes
{
@Nullable
private SqlExplain explain;
@Nullable
private SqlInsert insert;
private SqlNode query;
private ParsedNodes(@Nullable SqlExplain explain, @Nullable SqlInsert insert, SqlNode query)
{
this.explain = explain;
this.insert = insert;
this.query = query;
}
static ParsedNodes create(final SqlNode node) throws ValidationException
{
SqlExplain explain = null;
SqlInsert insert = null;
SqlNode query = node;
if (query.getKind() == SqlKind.EXPLAIN) {
explain = (SqlExplain) query;
query = explain.getExplicandum();
}
if (query.getKind() == SqlKind.INSERT) {
insert = (SqlInsert) query;
query = insert.getSource();
}
if (!query.isA(SqlKind.QUERY)) {
throw new ValidationException(StringUtils.format("Cannot execute [%s].", query.getKind()));
}
return new ParsedNodes(explain, insert, query);
}
@Nullable
public SqlExplain getExplainNode()
{
return explain;
}
@Nullable
public SqlInsert getInsertNode()
{
return insert;
}
public SqlNode getQueryNode()
{
return query;
}
}
}
| |
/*
* Copyright (c) 2009-2014, Peter Abeles. All Rights Reserved.
*
* This file is part of Efficient Java Matrix Library (EJML).
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.ejml.data;
import org.ejml.ops.MatrixIO;
/**
* Dense matrix for complex numbers. Internally it stores its data in a single row-major array with the real
* and imaginary components interlaces, in that order. The total number of elements in the array will be
* numRows*numColumns*2.
*
* @author Peter Abeles
*/
public class CDenseMatrix64F extends CD1Matrix64F {
/**
* <p>
* Creates a matrix with the values and shape defined by the 2D array 'data'.
* It is assumed that 'data' has a row-major formatting:<br>
* <br>
* data[ row ][ column ]
* </p>
* @param data 2D array representation of the matrix. Not modified.
*/
public CDenseMatrix64F( double data[][] ) {
this.numRows = data.length;
this.numCols = data[0].length/2;
this.data = new double[ numRows * numCols * 2 ];
for (int i = 0; i < numRows; i++) {
double[] row = data[i];
if( row.length != numCols*2 )
throw new IllegalArgumentException("Unexpected row size in input data at row "+i);
System.arraycopy(row,0,this.data,i*numCols*2,row.length);
}
}
public CDenseMatrix64F(int numRows, int numCols, boolean rowMajor, double... data) {
if( data.length != numRows*numCols*2 )
throw new RuntimeException("Unexpected length for data");
this.data = new double[ numRows * numCols * 2 ];
this.numRows = numRows;
this.numCols = numCols;
set(numRows,numCols, rowMajor, data);
}
/**
* Creates a new {@link org.ejml.data.CDenseMatrix64F} which is a copy of the passed in matrix.
* @param original Matrix which is to be copied
*/
public CDenseMatrix64F(CDenseMatrix64F original) {
this(original.numRows, original.numCols);
set(original);
}
/**
* Creates a new matrix with the specified number of rows and columns
*
* @param numRows number of rows
* @param numCols number of columns
*/
public CDenseMatrix64F(int numRows, int numCols) {
this.numRows = numRows;
this.numCols = numCols;
this.data = new double[numRows*numCols*2];
}
@Override
public int getIndex(int row, int col) {
return row*numCols*2 + col*2;
}
@Override
public void reshape( int numRows , int numCols ) {
int newLength = numRows*numCols*2;
if( newLength > data.length ) {
data = new double[newLength];
}
this.numRows = numRows;
this.numCols = numCols;
}
@Override
public void get(int row, int col, Complex64F output) {
int index = row*numCols*2 + col*2;
output.real = data[index];
output.imaginary = data[index+1];
}
@Override
public void set(int row, int col, double real, double imaginary) {
int index = row*numCols*2 + col*2;
data[index] = real;
data[index+1] = imaginary;
}
@Override
public double getReal(int row, int col) {
return data[row*numCols*2 + col*2];
}
@Override
public void setReal(int row, int col, double val) {
data[row*numCols*2 + col*2] = val;
}
@Override
public double getImaginary(int row, int col) {
return data[row*numCols*2 + col*2 + 1];
}
@Override
public void setImaginary(int row, int col, double val) {
data[row*numCols*2 + col*2 + 1] = val;
}
@Override
public int getDataLength() {
return numRows*numCols*2;
}
public void set( CDenseMatrix64F original ) {
reshape(original.numRows,original.numCols);
int columnSize = numCols*2;
for (int y = 0; y < numRows; y++) {
int index = y*numCols*2;
System.arraycopy(original.data,index,data,index,columnSize);
}
}
@Override
public CDenseMatrix64F copy() {
return new CDenseMatrix64F(this);
}
@Override
public void set(Matrix original) {
reshape(original.getNumRows(),original.getNumCols());
ComplexMatrix64F n = (ComplexMatrix64F)original;
Complex64F c = new Complex64F();
for (int i = 0; i < numRows; i++) {
for (int j = 0; j < numCols; j++) {
n.get(i,j,c);
set(i,j,c.real,c.imaginary);
}
}
}
@Override
public void print() {
MatrixIO.print(System.out, this);
}
/**
* Number of array elements in the matrix's row.
*/
public int getRowStride() {
return numCols*2;
}
/**
* Sets this matrix equal to the matrix encoded in the array.
*
* @param numRows The number of rows.
* @param numCols The number of columns.
* @param rowMajor If the array is encoded in a row-major or a column-major format.
* @param data The formatted 1D array. Not modified.
*/
public void set(int numRows, int numCols, boolean rowMajor, double ...data)
{
reshape(numRows,numCols);
int length = numRows*numCols*2;
if( length > data.length )
throw new RuntimeException("Passed in array not long enough");
if( rowMajor ) {
System.arraycopy(data,0,this.data,0,length);
} else {
int index = 0;
int stride = numRows*2;
for( int i = 0; i < numRows; i++ ) {
for( int j = 0; j < numCols; j++ ) {
this.data[index++] = data[j*stride+i*2];
this.data[index++] = data[j*stride+i*2+1];
}
}
}
}
}
| |
/*
* Copyright 2010-2015 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazonaws.services.route53.model;
import java.io.Serializable;
/**
* <p>
* A complex type that contains information about a
* <code>GeoLocation</code> .
* </p>
*/
public class GeoLocationDetails implements Serializable, Cloneable {
/**
* The code for a continent geo location. Note: only continent locations
* have a continent code.
* <p>
* <b>Constraints:</b><br/>
* <b>Length: </b>2 - 2<br/>
*/
private String continentCode;
/**
* The name of the continent. This element is only present if
* <code>ContinentCode</code> is also present.
* <p>
* <b>Constraints:</b><br/>
* <b>Length: </b>1 - 32<br/>
*/
private String continentName;
/**
* The code for a country geo location. The default location uses '*' for
* the country code and will match all locations that are not matched by
* a geo location. <p>The default geo location uses a <code>*</code> for
* the country code. All other country codes follow the ISO 3166
* two-character code.
* <p>
* <b>Constraints:</b><br/>
* <b>Length: </b>1 - 2<br/>
*/
private String countryCode;
/**
* The name of the country. This element is only present if
* <code>CountryCode</code> is also present.
* <p>
* <b>Constraints:</b><br/>
* <b>Length: </b>1 - 64<br/>
*/
private String countryName;
/**
* The code for a country's subdivision (e.g., a province of Canada). A
* subdivision code is only valid with the appropriate country code.
* <p>
* <b>Constraints:</b><br/>
* <b>Length: </b>1 - 3<br/>
*/
private String subdivisionCode;
/**
* The name of the subdivision. This element is only present if
* <code>SubdivisionCode</code> is also present.
* <p>
* <b>Constraints:</b><br/>
* <b>Length: </b>1 - 64<br/>
*/
private String subdivisionName;
/**
* The code for a continent geo location. Note: only continent locations
* have a continent code.
* <p>
* <b>Constraints:</b><br/>
* <b>Length: </b>2 - 2<br/>
*
* @return The code for a continent geo location. Note: only continent locations
* have a continent code.
*/
public String getContinentCode() {
return continentCode;
}
/**
* The code for a continent geo location. Note: only continent locations
* have a continent code.
* <p>
* <b>Constraints:</b><br/>
* <b>Length: </b>2 - 2<br/>
*
* @param continentCode The code for a continent geo location. Note: only continent locations
* have a continent code.
*/
public void setContinentCode(String continentCode) {
this.continentCode = continentCode;
}
/**
* The code for a continent geo location. Note: only continent locations
* have a continent code.
* <p>
* Returns a reference to this object so that method calls can be chained together.
* <p>
* <b>Constraints:</b><br/>
* <b>Length: </b>2 - 2<br/>
*
* @param continentCode The code for a continent geo location. Note: only continent locations
* have a continent code.
*
* @return A reference to this updated object so that method calls can be chained
* together.
*/
public GeoLocationDetails withContinentCode(String continentCode) {
this.continentCode = continentCode;
return this;
}
/**
* The name of the continent. This element is only present if
* <code>ContinentCode</code> is also present.
* <p>
* <b>Constraints:</b><br/>
* <b>Length: </b>1 - 32<br/>
*
* @return The name of the continent. This element is only present if
* <code>ContinentCode</code> is also present.
*/
public String getContinentName() {
return continentName;
}
/**
* The name of the continent. This element is only present if
* <code>ContinentCode</code> is also present.
* <p>
* <b>Constraints:</b><br/>
* <b>Length: </b>1 - 32<br/>
*
* @param continentName The name of the continent. This element is only present if
* <code>ContinentCode</code> is also present.
*/
public void setContinentName(String continentName) {
this.continentName = continentName;
}
/**
* The name of the continent. This element is only present if
* <code>ContinentCode</code> is also present.
* <p>
* Returns a reference to this object so that method calls can be chained together.
* <p>
* <b>Constraints:</b><br/>
* <b>Length: </b>1 - 32<br/>
*
* @param continentName The name of the continent. This element is only present if
* <code>ContinentCode</code> is also present.
*
* @return A reference to this updated object so that method calls can be chained
* together.
*/
public GeoLocationDetails withContinentName(String continentName) {
this.continentName = continentName;
return this;
}
/**
* The code for a country geo location. The default location uses '*' for
* the country code and will match all locations that are not matched by
* a geo location. <p>The default geo location uses a <code>*</code> for
* the country code. All other country codes follow the ISO 3166
* two-character code.
* <p>
* <b>Constraints:</b><br/>
* <b>Length: </b>1 - 2<br/>
*
* @return The code for a country geo location. The default location uses '*' for
* the country code and will match all locations that are not matched by
* a geo location. <p>The default geo location uses a <code>*</code> for
* the country code. All other country codes follow the ISO 3166
* two-character code.
*/
public String getCountryCode() {
return countryCode;
}
/**
* The code for a country geo location. The default location uses '*' for
* the country code and will match all locations that are not matched by
* a geo location. <p>The default geo location uses a <code>*</code> for
* the country code. All other country codes follow the ISO 3166
* two-character code.
* <p>
* <b>Constraints:</b><br/>
* <b>Length: </b>1 - 2<br/>
*
* @param countryCode The code for a country geo location. The default location uses '*' for
* the country code and will match all locations that are not matched by
* a geo location. <p>The default geo location uses a <code>*</code> for
* the country code. All other country codes follow the ISO 3166
* two-character code.
*/
public void setCountryCode(String countryCode) {
this.countryCode = countryCode;
}
/**
* The code for a country geo location. The default location uses '*' for
* the country code and will match all locations that are not matched by
* a geo location. <p>The default geo location uses a <code>*</code> for
* the country code. All other country codes follow the ISO 3166
* two-character code.
* <p>
* Returns a reference to this object so that method calls can be chained together.
* <p>
* <b>Constraints:</b><br/>
* <b>Length: </b>1 - 2<br/>
*
* @param countryCode The code for a country geo location. The default location uses '*' for
* the country code and will match all locations that are not matched by
* a geo location. <p>The default geo location uses a <code>*</code> for
* the country code. All other country codes follow the ISO 3166
* two-character code.
*
* @return A reference to this updated object so that method calls can be chained
* together.
*/
public GeoLocationDetails withCountryCode(String countryCode) {
this.countryCode = countryCode;
return this;
}
/**
* The name of the country. This element is only present if
* <code>CountryCode</code> is also present.
* <p>
* <b>Constraints:</b><br/>
* <b>Length: </b>1 - 64<br/>
*
* @return The name of the country. This element is only present if
* <code>CountryCode</code> is also present.
*/
public String getCountryName() {
return countryName;
}
/**
* The name of the country. This element is only present if
* <code>CountryCode</code> is also present.
* <p>
* <b>Constraints:</b><br/>
* <b>Length: </b>1 - 64<br/>
*
* @param countryName The name of the country. This element is only present if
* <code>CountryCode</code> is also present.
*/
public void setCountryName(String countryName) {
this.countryName = countryName;
}
/**
* The name of the country. This element is only present if
* <code>CountryCode</code> is also present.
* <p>
* Returns a reference to this object so that method calls can be chained together.
* <p>
* <b>Constraints:</b><br/>
* <b>Length: </b>1 - 64<br/>
*
* @param countryName The name of the country. This element is only present if
* <code>CountryCode</code> is also present.
*
* @return A reference to this updated object so that method calls can be chained
* together.
*/
public GeoLocationDetails withCountryName(String countryName) {
this.countryName = countryName;
return this;
}
/**
* The code for a country's subdivision (e.g., a province of Canada). A
* subdivision code is only valid with the appropriate country code.
* <p>
* <b>Constraints:</b><br/>
* <b>Length: </b>1 - 3<br/>
*
* @return The code for a country's subdivision (e.g., a province of Canada). A
* subdivision code is only valid with the appropriate country code.
*/
public String getSubdivisionCode() {
return subdivisionCode;
}
/**
* The code for a country's subdivision (e.g., a province of Canada). A
* subdivision code is only valid with the appropriate country code.
* <p>
* <b>Constraints:</b><br/>
* <b>Length: </b>1 - 3<br/>
*
* @param subdivisionCode The code for a country's subdivision (e.g., a province of Canada). A
* subdivision code is only valid with the appropriate country code.
*/
public void setSubdivisionCode(String subdivisionCode) {
this.subdivisionCode = subdivisionCode;
}
/**
* The code for a country's subdivision (e.g., a province of Canada). A
* subdivision code is only valid with the appropriate country code.
* <p>
* Returns a reference to this object so that method calls can be chained together.
* <p>
* <b>Constraints:</b><br/>
* <b>Length: </b>1 - 3<br/>
*
* @param subdivisionCode The code for a country's subdivision (e.g., a province of Canada). A
* subdivision code is only valid with the appropriate country code.
*
* @return A reference to this updated object so that method calls can be chained
* together.
*/
public GeoLocationDetails withSubdivisionCode(String subdivisionCode) {
this.subdivisionCode = subdivisionCode;
return this;
}
/**
* The name of the subdivision. This element is only present if
* <code>SubdivisionCode</code> is also present.
* <p>
* <b>Constraints:</b><br/>
* <b>Length: </b>1 - 64<br/>
*
* @return The name of the subdivision. This element is only present if
* <code>SubdivisionCode</code> is also present.
*/
public String getSubdivisionName() {
return subdivisionName;
}
/**
* The name of the subdivision. This element is only present if
* <code>SubdivisionCode</code> is also present.
* <p>
* <b>Constraints:</b><br/>
* <b>Length: </b>1 - 64<br/>
*
* @param subdivisionName The name of the subdivision. This element is only present if
* <code>SubdivisionCode</code> is also present.
*/
public void setSubdivisionName(String subdivisionName) {
this.subdivisionName = subdivisionName;
}
/**
* The name of the subdivision. This element is only present if
* <code>SubdivisionCode</code> is also present.
* <p>
* Returns a reference to this object so that method calls can be chained together.
* <p>
* <b>Constraints:</b><br/>
* <b>Length: </b>1 - 64<br/>
*
* @param subdivisionName The name of the subdivision. This element is only present if
* <code>SubdivisionCode</code> is also present.
*
* @return A reference to this updated object so that method calls can be chained
* together.
*/
public GeoLocationDetails withSubdivisionName(String subdivisionName) {
this.subdivisionName = subdivisionName;
return this;
}
/**
* Returns a string representation of this object; useful for testing and
* debugging.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getContinentCode() != null) sb.append("ContinentCode: " + getContinentCode() + ",");
if (getContinentName() != null) sb.append("ContinentName: " + getContinentName() + ",");
if (getCountryCode() != null) sb.append("CountryCode: " + getCountryCode() + ",");
if (getCountryName() != null) sb.append("CountryName: " + getCountryName() + ",");
if (getSubdivisionCode() != null) sb.append("SubdivisionCode: " + getSubdivisionCode() + ",");
if (getSubdivisionName() != null) sb.append("SubdivisionName: " + getSubdivisionName() );
sb.append("}");
return sb.toString();
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getContinentCode() == null) ? 0 : getContinentCode().hashCode());
hashCode = prime * hashCode + ((getContinentName() == null) ? 0 : getContinentName().hashCode());
hashCode = prime * hashCode + ((getCountryCode() == null) ? 0 : getCountryCode().hashCode());
hashCode = prime * hashCode + ((getCountryName() == null) ? 0 : getCountryName().hashCode());
hashCode = prime * hashCode + ((getSubdivisionCode() == null) ? 0 : getSubdivisionCode().hashCode());
hashCode = prime * hashCode + ((getSubdivisionName() == null) ? 0 : getSubdivisionName().hashCode());
return hashCode;
}
@Override
public boolean equals(Object obj) {
if (this == obj) return true;
if (obj == null) return false;
if (obj instanceof GeoLocationDetails == false) return false;
GeoLocationDetails other = (GeoLocationDetails)obj;
if (other.getContinentCode() == null ^ this.getContinentCode() == null) return false;
if (other.getContinentCode() != null && other.getContinentCode().equals(this.getContinentCode()) == false) return false;
if (other.getContinentName() == null ^ this.getContinentName() == null) return false;
if (other.getContinentName() != null && other.getContinentName().equals(this.getContinentName()) == false) return false;
if (other.getCountryCode() == null ^ this.getCountryCode() == null) return false;
if (other.getCountryCode() != null && other.getCountryCode().equals(this.getCountryCode()) == false) return false;
if (other.getCountryName() == null ^ this.getCountryName() == null) return false;
if (other.getCountryName() != null && other.getCountryName().equals(this.getCountryName()) == false) return false;
if (other.getSubdivisionCode() == null ^ this.getSubdivisionCode() == null) return false;
if (other.getSubdivisionCode() != null && other.getSubdivisionCode().equals(this.getSubdivisionCode()) == false) return false;
if (other.getSubdivisionName() == null ^ this.getSubdivisionName() == null) return false;
if (other.getSubdivisionName() != null && other.getSubdivisionName().equals(this.getSubdivisionName()) == false) return false;
return true;
}
@Override
public GeoLocationDetails clone() {
try {
return (GeoLocationDetails) super.clone();
} catch (CloneNotSupportedException e) {
throw new IllegalStateException(
"Got a CloneNotSupportedException from Object.clone() "
+ "even though we're Cloneable!",
e);
}
}
}
| |
/*
* The Alluxio Open Foundation licenses this work under the Apache License, version 2.0
* (the "License"). You may not use this work except in compliance with the License, which is
* available at www.apache.org/licenses/LICENSE-2.0
*
* This software is distributed on an "AS IS" basis, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied, as more fully set forth in the License.
*
* See the NOTICE file distributed with this work for information regarding copyright ownership.
*/
package alluxio.client.block.stream;
import alluxio.conf.AlluxioConfiguration;
import alluxio.conf.PropertyKey;
import alluxio.Seekable;
import alluxio.client.BoundedStream;
import alluxio.client.PositionedReadable;
import alluxio.client.ReadType;
import alluxio.client.file.FileSystemContext;
import alluxio.client.file.URIStatus;
import alluxio.client.file.options.InStreamOptions;
import alluxio.exception.PreconditionMessage;
import alluxio.exception.status.NotFoundException;
import alluxio.grpc.ReadRequest;
import alluxio.network.protocol.databuffer.DataBuffer;
import alluxio.proto.dataserver.Protocol;
import alluxio.util.io.BufferUtils;
import alluxio.util.network.NettyUtils;
import alluxio.util.network.NetworkAddressUtils;
import alluxio.wire.BlockInfo;
import alluxio.wire.WorkerNetAddress;
import com.google.common.base.Preconditions;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.io.InputStream;
import javax.annotation.concurrent.NotThreadSafe;
/**
* Provides an {@link InputStream} implementation that is based on {@link DataReader}s to
* stream data chunk by chunk.
*/
@NotThreadSafe
public class BlockInStream extends InputStream implements BoundedStream, Seekable,
PositionedReadable {
private static final Logger LOG = LoggerFactory.getLogger(BlockInStream.class);
/** the source tracking where the block is from. */
public enum BlockInStreamSource {
LOCAL, REMOTE, UFS
}
private final WorkerNetAddress mAddress;
private final BlockInStreamSource mInStreamSource;
/** The id of the block or UFS file to which this instream provides access. */
private final long mId;
/** The size in bytes of the block. */
private final long mLength;
private final byte[] mSingleByte = new byte[1];
/** Current position of the stream, relative to the start of the block. */
private long mPos = 0;
/** The current data chunk. */
private DataBuffer mCurrentChunk;
private DataReader mDataReader;
private final DataReader.Factory mDataReaderFactory;
private boolean mClosed = false;
private boolean mEOF = false;
/**
* Creates a {@link BlockInStream}.
*
* One of several read behaviors:
*
* 1. Domain socket - if the data source is the local worker and the local worker has a domain
* socket server
* 2. Short-Circuit - if the data source is the local worker
* 3. Local Loopback Read - if the data source is the local worker and short circuit is disabled
* 4. Read from remote worker - if the data source is a remote worker
* 5. UFS Read from worker - if the data source is UFS, read from the UFS policy's designated
* worker (ufs -> local or remote worker -> client)
*
* @param context the file system context
* @param info the block info
* @param dataSource the Alluxio worker which should read the data
* @param dataSourceType the source location of the block
* @param options the instream options
* @return the {@link BlockInStream} object
*/
public static BlockInStream create(FileSystemContext context, BlockInfo info,
WorkerNetAddress dataSource, BlockInStreamSource dataSourceType, InStreamOptions options)
throws IOException {
URIStatus status = options.getStatus();
ReadType readType = ReadType.fromProto(options.getOptions().getReadType());
long blockId = info.getBlockId();
long blockSize = info.getLength();
// Construct the partial read request
ReadRequest.Builder builder =
ReadRequest.newBuilder().setBlockId(blockId).setPromote(readType.isPromote());
// Add UFS fallback options
builder.setOpenUfsBlockOptions(options.getOpenUfsBlockOptions(blockId));
AlluxioConfiguration alluxioConf = context.getClusterConf();
boolean shortCircuit = alluxioConf.getBoolean(PropertyKey.USER_SHORT_CIRCUIT_ENABLED);
boolean sourceSupportsDomainSocket = NettyUtils.isDomainSocketSupported(dataSource);
boolean sourceIsLocal = dataSourceType == BlockInStreamSource.LOCAL;
// Short circuit
if (sourceIsLocal && shortCircuit && !sourceSupportsDomainSocket) {
LOG.debug("Creating short circuit input stream for block {} @ {}", blockId, dataSource);
try {
return createLocalBlockInStream(context, dataSource, blockId, blockSize, options);
} catch (NotFoundException e) {
// Failed to do short circuit read because the block is not available in Alluxio.
// We will try to read via gRPC. So this exception is ignored.
LOG.warn("Failed to create short circuit input stream for block {} @ {}. Falling back to "
+ "network transfer", blockId, dataSource);
}
}
// gRPC
LOG.debug("Creating gRPC input stream for block {} @ {} from client {} reading through {}",
blockId, dataSource, NetworkAddressUtils.getClientHostName(alluxioConf), dataSource);
return createGrpcBlockInStream(context, dataSource, dataSourceType, builder.buildPartial(),
blockSize, options);
}
/**
* Creates a {@link BlockInStream} to read from a local file.
*
* @param context the file system context
* @param address the network address of the gRPC data server to read from
* @param blockId the block ID
* @param length the block length
* @param options the in stream options
* @return the {@link BlockInStream} created
*/
private static BlockInStream createLocalBlockInStream(FileSystemContext context,
WorkerNetAddress address, long blockId, long length, InStreamOptions options)
throws IOException {
long chunkSize = context.getClusterConf().getBytes(
PropertyKey.USER_LOCAL_READER_CHUNK_SIZE_BYTES);
return new BlockInStream(
new LocalFileDataReader.Factory(context, address, blockId, chunkSize, options),
address, BlockInStreamSource.LOCAL, blockId, length);
}
/**
* Creates a {@link BlockInStream} to read from a gRPC data server.
*
* @param context the file system context
* @param address the address of the gRPC data server
* @param blockSource the source location of the block
* @param blockSize the block size
* @param readRequestPartial the partial read request
* @return the {@link BlockInStream} created
*/
private static BlockInStream createGrpcBlockInStream(FileSystemContext context,
WorkerNetAddress address, BlockInStreamSource blockSource,
ReadRequest readRequestPartial, long blockSize, InStreamOptions options) {
ReadRequest.Builder readRequestBuilder = readRequestPartial.toBuilder();
long chunkSize = context.getClusterConf().getBytes(
PropertyKey.USER_NETWORK_READER_CHUNK_SIZE_BYTES);
readRequestBuilder.setChunkSize(chunkSize);
DataReader.Factory factory =
new GrpcDataReader.Factory(context, address, readRequestBuilder.build());
return new BlockInStream(factory, address, blockSource, readRequestPartial.getBlockId(),
blockSize);
}
/**
* Creates a {@link BlockInStream} to read from a specific remote server. Should only be used
* in cases where the data source and method of reading is known, ie. worker - worker
* communication.
*
* @param context the file system context
* @param blockId the block id
* @param address the address of the gRPC data server
* @param blockSource the source location of the block
* @param blockSize the size of the block
* @param ufsOptions the ufs read options
* @return the {@link BlockInStream} created
*/
public static BlockInStream createRemoteBlockInStream(FileSystemContext context, long blockId,
WorkerNetAddress address, BlockInStreamSource blockSource, long blockSize,
Protocol.OpenUfsBlockOptions ufsOptions) {
long chunkSize =
context.getClusterConf()
.getBytes(PropertyKey.USER_NETWORK_READER_CHUNK_SIZE_BYTES);
ReadRequest readRequest = ReadRequest.newBuilder().setBlockId(blockId)
.setOpenUfsBlockOptions(ufsOptions).setChunkSize(chunkSize).buildPartial();
DataReader.Factory factory = new GrpcDataReader.Factory(context, address,
readRequest.toBuilder().buildPartial());
return new BlockInStream(factory, address, blockSource, blockId, blockSize);
}
/**
* Creates an instance of {@link BlockInStream}.
*
* @param dataReaderFactory the data reader factory
* @param address the address of the gRPC data server
* @param blockSource the source location of the block
* @param id the ID (either block ID or UFS file ID)
* @param length the length
*/
protected BlockInStream(DataReader.Factory dataReaderFactory, WorkerNetAddress address,
BlockInStreamSource blockSource, long id, long length) {
mDataReaderFactory = dataReaderFactory;
mAddress = address;
mInStreamSource = blockSource;
mId = id;
mLength = length;
}
@Override
public long getPos() {
return mPos;
}
@Override
public int read() throws IOException {
int bytesRead = read(mSingleByte);
if (bytesRead == -1) {
return -1;
}
Preconditions.checkState(bytesRead == 1);
return BufferUtils.byteToInt(mSingleByte[0]);
}
@Override
public int read(byte[] b) throws IOException {
return read(b, 0, b.length);
}
@Override
public int read(byte[] b, int off, int len) throws IOException {
checkIfClosed();
Preconditions.checkArgument(b != null, PreconditionMessage.ERR_READ_BUFFER_NULL);
Preconditions.checkArgument(off >= 0 && len >= 0 && len + off <= b.length,
PreconditionMessage.ERR_BUFFER_STATE.toString(), b.length, off, len);
if (len == 0) {
return 0;
}
readChunk();
if (mCurrentChunk == null) {
mEOF = true;
}
if (mEOF) {
closeDataReader();
Preconditions
.checkState(mPos >= mLength, PreconditionMessage.BLOCK_LENGTH_INCONSISTENT.toString(),
mId, mLength, mPos);
return -1;
}
int toRead = Math.min(len, mCurrentChunk.readableBytes());
mCurrentChunk.readBytes(b, off, toRead);
mPos += toRead;
return toRead;
}
@Override
public int positionedRead(long pos, byte[] b, int off, int len) throws IOException {
if (len == 0) {
return 0;
}
if (pos < 0 || pos >= mLength) {
return -1;
}
int lenCopy = len;
try (DataReader reader = mDataReaderFactory.create(pos, len)) {
// We try to read len bytes instead of returning after reading one chunk because
// it is not free to create/close a DataReader.
while (len > 0) {
DataBuffer dataBuffer = null;
try {
dataBuffer = reader.readChunk();
if (dataBuffer == null) {
break;
}
Preconditions.checkState(dataBuffer.readableBytes() <= len);
int toRead = dataBuffer.readableBytes();
dataBuffer.readBytes(b, off, toRead);
len -= toRead;
off += toRead;
} finally {
if (dataBuffer != null) {
dataBuffer.release();
}
}
}
}
if (lenCopy == len) {
return -1;
}
return lenCopy - len;
}
@Override
public long remaining() {
return mEOF ? 0 : mLength - mPos;
}
@Override
public void seek(long pos) throws IOException {
checkIfClosed();
Preconditions.checkArgument(pos >= 0, PreconditionMessage.ERR_SEEK_NEGATIVE.toString(), pos);
Preconditions
.checkArgument(pos <= mLength, PreconditionMessage.ERR_SEEK_PAST_END_OF_REGION.toString(),
mId);
if (pos == mPos) {
return;
}
if (pos < mPos) {
mEOF = false;
}
closeDataReader();
mPos = pos;
}
@Override
public long skip(long n) throws IOException {
checkIfClosed();
if (n <= 0) {
return 0;
}
long toSkip = Math.min(remaining(), n);
mPos += toSkip;
closeDataReader();
return toSkip;
}
@Override
public void close() throws IOException {
try {
closeDataReader();
} finally {
mDataReaderFactory.close();
}
mClosed = true;
}
/**
* @return whether the reader is reading data directly from a local file
*/
public boolean isShortCircuit() {
return mDataReaderFactory.isShortCircuit();
}
/**
* Reads a new chunk from the channel if all of the current chunk is read.
*/
private void readChunk() throws IOException {
if (mDataReader == null) {
mDataReader = mDataReaderFactory.create(mPos, mLength - mPos);
}
if (mCurrentChunk != null && mCurrentChunk.readableBytes() == 0) {
mCurrentChunk.release();
mCurrentChunk = null;
}
if (mCurrentChunk == null) {
mCurrentChunk = mDataReader.readChunk();
}
}
/**
* Close the current data reader.
*/
private void closeDataReader() throws IOException {
if (mCurrentChunk != null) {
mCurrentChunk.release();
mCurrentChunk = null;
}
if (mDataReader != null) {
mDataReader.close();
}
mDataReader = null;
}
/**
* Convenience method to ensure the stream is not closed.
*/
private void checkIfClosed() {
Preconditions.checkState(!mClosed, PreconditionMessage.ERR_CLOSED_BLOCK_IN_STREAM);
}
/**
* @return the address of the data server
*/
public WorkerNetAddress getAddress() {
return mAddress;
}
/**
* @return the source of the block location
*/
public BlockInStreamSource getSource() {
return mInStreamSource;
}
/**
* @return the block ID
*/
public long getId() {
return mId;
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.